diff --git a/build_docs.py b/build_docs.py index 474bf868d2..f50281fce0 100644 --- a/build_docs.py +++ b/build_docs.py @@ -29,6 +29,8 @@ ################################################################################ # Distutils Command class for installing nipype to a temporary location. + + class TempInstall(Command): temp_install_dir = os.path.join('build', 'install') @@ -36,11 +38,11 @@ def run(self): """ build and install nipype in a temporary location. """ install = self.distribution.get_command_obj('install') install.install_scripts = self.temp_install_dir - install.install_base = self.temp_install_dir + install.install_base = self.temp_install_dir install.install_platlib = self.temp_install_dir install.install_purelib = self.temp_install_dir - install.install_data = self.temp_install_dir - install.install_lib = self.temp_install_dir + install.install_data = self.temp_install_dir + install.install_lib = self.temp_install_dir install.install_headers = self.temp_install_dir install.run() @@ -64,13 +66,12 @@ def finalize_options(self): # Distutils Command class for API generation class APIDocs(TempInstall): description = \ - """generate API docs """ + """generate API docs """ user_options = [ ('None', None, 'this command has no options'), ] - def run(self): # First build the project and install it to a temporary location. TempInstall.run(self) @@ -141,22 +142,21 @@ def zip_docs(self): # require zlib. try: zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_DEFLATED) + compression=zipfile.ZIP_DEFLATED) except RuntimeError: warnings.warn('zlib not installed, storing the docs ' - 'without compression') + 'without compression') zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_STORED) + compression=zipfile.ZIP_STORED) for root, dirs, files in os.walk(DOC_BUILD_DIR): relative = relative_path(root) if not relative.startswith('.doctrees'): for f in files: zf.write(os.path.join(root, f), - os.path.join(relative, 'html_docs', f)) + os.path.join(relative, 'html_docs', f)) zf.close() - def finalize_options(self): """ Override the default for the documentation build directory. @@ -166,6 +166,8 @@ def finalize_options(self): ################################################################################ # Distutils Command class to clean + + class Clean(clean): def run(self): diff --git a/doc/conf.py b/doc/conf.py index 694d4aefaf..5bfb1f402f 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -15,7 +15,7 @@ import sys, os nipypepath = os.path.abspath('..') -sys.path.insert(1,nipypepath) +sys.path.insert(1, nipypepath) import nipype @@ -168,9 +168,9 @@ #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -html_sidebars = {'**': ['gse.html','localtoc.html', 'sidebar_versions.html', 'indexsidebar.html'], - 'searchresults' : ['sidebar_versions.html', 'indexsidebar.html'], - 'version' : []} +html_sidebars = {'**': ['gse.html', 'localtoc.html', 'sidebar_versions.html', 'indexsidebar.html'], + 'searchresults': ['sidebar_versions.html', 'indexsidebar.html'], + 'version': []} # Additional templates that should be rendered to pages, maps page names to # template names. diff --git a/doc/sphinxext/autosummary_generate.py b/doc/sphinxext/autosummary_generate.py index 7fd1e94459..0b562f9975 100755 --- a/doc/sphinxext/autosummary_generate.py +++ b/doc/sphinxext/autosummary_generate.py @@ -26,6 +26,7 @@ except ImportError: import_phantom_module = lambda x: x + def main(): p = optparse.OptionParser(__doc__.strip()) p.add_option("-p", "--phantom", action="store", type="string", @@ -95,16 +96,19 @@ def main(): finally: f.close() + def format_modulemember(name, directive): parts = name.split('.') mod, name = '.'.join(parts[:-1]), parts[-1] return ".. currentmodule:: %s\n\n.. %s:: %s\n" % (mod, directive, name) + def format_classmember(name, directive): parts = name.split('.') mod, name = '.'.join(parts[:-2]), '.'.join(parts[-2:]) return ".. currentmodule:: %s\n\n.. %s:: %s\n" % (mod, directive, name) + def get_documented(filenames): """ Find out what items are documented in source/*.rst @@ -119,6 +123,7 @@ def get_documented(filenames): f.close() return documented + def get_documented_in_docstring(name, module=None, filename=None): """ Find out what items are documented in the given object's docstring. @@ -135,6 +140,7 @@ def get_documented_in_docstring(name, module=None, filename=None): print("Failed to import '%s': %s" % (name, e)) return {} + def get_documented_in_lines(lines, module=None, filename=None): """ Find out what items are documented in the given lines @@ -173,7 +179,7 @@ def get_documented_in_lines(lines, module=None, filename=None): continue if line.strip().startswith(':'): - continue # skip options + continue # skip options m = autosummary_item_re.match(line) if m: diff --git a/doc/sphinxext/ipython_console_highlighting.py b/doc/sphinxext/ipython_console_highlighting.py index 7d024b5b47..6720056dc8 100644 --- a/doc/sphinxext/ipython_console_highlighting.py +++ b/doc/sphinxext/ipython_console_highlighting.py @@ -3,7 +3,7 @@ """reST directive for syntax-highlighting ipython interactive sessions. """ -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Needed modules # Standard library @@ -18,13 +18,14 @@ from sphinx import highlighting -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Global constants line_re = re.compile('.*?\n') -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Code begins - classes and functions + class IPythonConsoleLexer(Lexer): """ For IPython console output or doctests, such as: @@ -95,6 +96,6 @@ def get_tokens_unprocessed(self, text): pylexer.get_tokens_unprocessed(curcode)): yield item -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Register the extension as a valid pygments lexer highlighting.lexers['ipython'] = IPythonConsoleLexer() diff --git a/doc/sphinxext/numpy_ext/docscrape.py b/doc/sphinxext/numpy_ext/docscrape.py index ba35086cb6..71beb6bbc9 100644 --- a/doc/sphinxext/numpy_ext/docscrape.py +++ b/doc/sphinxext/numpy_ext/docscrape.py @@ -20,6 +20,7 @@ class Reader(object): """A line-based string reader. """ + def __init__(self, data): """ Parameters @@ -28,10 +29,10 @@ def __init__(self, data): String with lines separated by '\n'. """ - if isinstance(data,list): + if isinstance(data, list): self._str = data else: - self._str = data.split('\n') # store string as list of lines + self._str = data.split('\n') # store string as list of lines self.reset() @@ -39,7 +40,7 @@ def __getitem__(self, n): return self._str[n] def reset(self): - self._l = 0 # current line nr + self._l = 0 # current line nr def read(self): if not self.eof(): @@ -71,6 +72,7 @@ def read_to_condition(self, condition_func): def read_to_next_empty_line(self): self.seek_next_non_empty_line() + def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) @@ -80,7 +82,7 @@ def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) - def peek(self,n=0): + def peek(self, n=0): if self._l + n < len(self._str): return self[self._l + n] else: @@ -116,10 +118,10 @@ def __init__(self, docstring, config={}): self._parse() - def __getitem__(self,key): + def __getitem__(self, key): return self._parsed_data[key] - def __setitem__(self,key,val): + def __setitem__(self, key, val): if key not in self._parsed_data: warn("Unknown section %s" % key) else: @@ -136,16 +138,16 @@ def _is_at_section(self): if l1.startswith('.. index::'): return True - l2 = self._doc.peek(1).strip() # ---------- or ========== + l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) - def _strip(self,doc): + def _strip(self, doc): i = 0 j = 0 - for i,line in enumerate(doc): + for i, line in enumerate(doc): if line.strip(): break - for j,line in enumerate(doc[::-1]): + for j, line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc)-j] @@ -154,7 +156,7 @@ def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): - if not self._doc.peek(-1).strip(): # previous line was empty + if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() @@ -166,14 +168,14 @@ def _read_sections(self): data = self._read_to_next_section() name = data[0].strip() - if name.startswith('..'): # index section + if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) - def _parse_param_list(self,content): + def _parse_param_list(self, content): r = Reader(content) params = [] while not r.eof(): @@ -186,13 +188,13 @@ def _parse_param_list(self,content): desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) - params.append((arg_name,arg_type,desc)) + params.append((arg_name, arg_type, desc)) return params - _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) + def _parse_see_also(self, content): """ func_name : Descriptive text @@ -288,7 +290,7 @@ def _parse(self): self._doc.reset() self._parse_summary() - for (section,content) in self._read_sections(): + for (section, content) in self._read_sections(): if not section.startswith('..'): section = ' '.join([s.capitalize() for s in section.split(' ')]) if section in ('Parameters', 'Returns', 'Raises', 'Warns', @@ -314,7 +316,7 @@ def _str_indent(self, doc, indent=4): def _str_signature(self): if self['Signature']: - return [self['Signature'].replace('*','\*')] + [''] + return [self['Signature'].replace('*', '\*')] + [''] else: return [''] @@ -334,7 +336,7 @@ def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) - for param,param_type,desc in self[name]: + for param, param_type, desc in self[name]: out += ['%s : %s' % (param, param_type)] out += self._str_indent(desc) out += [''] @@ -376,7 +378,7 @@ def _str_see_also(self, func_role): def _str_index(self): idx = self['index'] out = [] - out += ['.. index:: %s' % idx.get('default','')] + out += ['.. index:: %s' % idx.get('default', '')] for section, references in list(idx.items()): if section == 'default': continue @@ -393,7 +395,7 @@ def __str__(self, func_role=''): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) - for s in ('Notes','References','Examples'): + for s in ('Notes', 'References', 'Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) @@ -401,17 +403,19 @@ def __str__(self, func_role=''): return '\n'.join(out) -def indent(str,indent=4): +def indent(str, indent=4): indent_str = ' '*indent if str is None: return indent_str lines = str.split('\n') return '\n'.join(indent_str + l for l in lines) + def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") + def header(text, style='-'): return text + '\n' + style*len(text) + '\n' @@ -419,7 +423,7 @@ def header(text, style='-'): class FunctionDoc(NumpyDocString): def __init__(self, func, role='func', doc=None, config={}): self._f = func - self._role = role # e.g. "func" or "meth" + self._role = role # e.g. "func" or "meth" if doc is None: if func is None: @@ -433,7 +437,7 @@ def __init__(self, func, role='func', doc=None, config={}): # try to read signature argspec = inspect.getargspec(func) argspec = inspect.formatargspec(*argspec) - argspec = argspec.replace('*','\*') + argspec = argspec.replace('*', '\*') signature = '%s%s' % (func_name, argspec) except TypeError as e: signature = '%s()' % func_name @@ -459,7 +463,7 @@ def __str__(self): if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) - out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), + out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''), func_name) out += super(FunctionDoc, self).__str__(func_role=self._role) @@ -499,7 +503,7 @@ def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, def methods(self): if self._cls is None: return [] - return [name for name,func in inspect.getmembers(self._cls) + return [name for name, func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) and callable(func))] @@ -508,5 +512,5 @@ def methods(self): def properties(self): if self._cls is None: return [] - return [name for name,func in inspect.getmembers(self._cls) + return [name for name, func in inspect.getmembers(self._cls) if not name.startswith('_') and func is None] diff --git a/doc/sphinxext/numpy_ext/docscrape_sphinx.py b/doc/sphinxext/numpy_ext/docscrape_sphinx.py index 9d76da3b27..70f7ecd695 100644 --- a/doc/sphinxext/numpy_ext/docscrape_sphinx.py +++ b/doc/sphinxext/numpy_ext/docscrape_sphinx.py @@ -5,7 +5,6 @@ from nipype.external.six import string_types - class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): self.use_plots = config.get('use_plots', False) @@ -42,11 +41,11 @@ def _str_param_list(self, name): if self[name]: out += self._str_field_list(name) out += [''] - for param,param_type,desc in self[name]: + for param, param_type, desc in self[name]: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) out += [''] - out += self._str_indent(desc,8) + out += self._str_indent(desc, 8) out += [''] return out @@ -130,7 +129,7 @@ def _str_index(self): if len(idx) == 0: return out - out += ['.. index:: %s' % idx.get('default','')] + out += ['.. index:: %s' % idx.get('default', '')] for section, references in list(idx.items()): if section == 'default': continue @@ -151,9 +150,9 @@ def _str_references(self): # Latex collects all references to a separate bibliography, # so we need to insert links to it if sphinx.__version__ >= "0.6": - out += ['.. only:: latex',''] + out += ['.. only:: latex', ''] else: - out += ['.. latexonly::',''] + out += ['.. latexonly::', ''] items = [] for line in self['References']: m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) @@ -192,24 +191,28 @@ def __str__(self, indent=0, func_role="obj"): out += self._str_examples() for param_list in ('Attributes', 'Methods'): out += self._str_member_list(param_list) - out = self._str_indent(out,indent) + out = self._str_indent(out, indent) return '\n'.join(out) + class SphinxFunctionDoc(SphinxDocString, FunctionDoc): def __init__(self, obj, doc=None, config={}): self.use_plots = config.get('use_plots', False) FunctionDoc.__init__(self, obj, doc=doc, config=config) + class SphinxClassDoc(SphinxDocString, ClassDoc): def __init__(self, obj, doc=None, func_doc=None, config={}): self.use_plots = config.get('use_plots', False) ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) + class SphinxObjDoc(SphinxDocString): def __init__(self, obj, doc=None, config={}): self._f = obj SphinxDocString.__init__(self, doc, config=config) + def get_doc_object(obj, what=None, doc=None, config={}): if what is None: if inspect.isclass(obj): diff --git a/doc/sphinxext/numpy_ext/numpydoc.py b/doc/sphinxext/numpy_ext/numpydoc.py index f55c4d4c1c..981c1d0270 100644 --- a/doc/sphinxext/numpy_ext/numpydoc.py +++ b/doc/sphinxext/numpy_ext/numpydoc.py @@ -29,6 +29,7 @@ from sphinx.util.compat import Directive import inspect + def mangle_docstrings(app, what, name, obj, options, lines, reference_offset=[0]): @@ -38,14 +39,14 @@ def mangle_docstrings(app, what, name, obj, options, lines, if what == 'module': # Strip top title title_re = re.compile(ur'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*', - re.I|re.S) + re.I |re.S) lines[:] = title_re.sub(u'', u"\n".join(lines)).split(u"\n") else: doc = get_doc_object(obj, what, u"\n".join(lines), config=cfg) lines[:] = str(doc).split(u"\n") if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ - obj.__name__: + obj.__name__: if hasattr(obj, '__module__'): v = dict(full_name=u"%s.%s" % (obj.__module__, obj.__name__)) else: @@ -78,11 +79,12 @@ def mangle_docstrings(app, what, name, obj, options, lines, reference_offset[0] += len(references) + def mangle_signature(app, what, name, obj, options, sig, retann): # Do not try to inspect classes that don't define `__init__` if (inspect.isclass(obj) and - (not hasattr(obj, '__init__') or - 'initializes x; see ' in pydoc.getdoc(obj.__init__))): + (not hasattr(obj, '__init__') or + 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return @@ -93,6 +95,7 @@ def mangle_signature(app, what, name, obj, options, sig, retann): sig = re.sub(u"^[^(]*", u"", doc['Signature']) return sig, u'' + def setup(app, get_doc_object_=get_doc_object): global get_doc_object get_doc_object = get_doc_object_ @@ -107,14 +110,15 @@ def setup(app, get_doc_object_=get_doc_object): app.add_domain(NumpyPythonDomain) app.add_domain(NumpyCDomain) -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Docstring-mangling domains -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ from docutils.statemachine import ViewList from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain + class ManglingDomainBase(object): directive_mangling_map = {} @@ -127,6 +131,7 @@ def wrap_mangling_directives(self): self.directives[name] = wrap_mangling_directive( self.directives[name], objtype) + class NumpyPythonDomain(ManglingDomainBase, PythonDomain): name = 'np' directive_mangling_map = { @@ -139,6 +144,7 @@ class NumpyPythonDomain(ManglingDomainBase, PythonDomain): 'attribute': 'attribute', } + class NumpyCDomain(ManglingDomainBase, CDomain): name = 'np-c' directive_mangling_map = { @@ -149,6 +155,7 @@ class NumpyCDomain(ManglingDomainBase, CDomain): 'var': 'object', } + def wrap_mangling_directive(base_directive, objtype): class directive(base_directive): def run(self): diff --git a/examples/dmri_camino_dti.py b/examples/dmri_camino_dti.py index 3c3e358dcc..4c81eebf09 100755 --- a/examples/dmri_camino_dti.py +++ b/examples/dmri_camino_dti.py @@ -33,6 +33,7 @@ (fmri_spm_face.py) also implements this inferral of voxel size from the data. """ + def get_vox_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -42,6 +43,7 @@ def get_vox_dims(volume): voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] + def get_data_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -51,6 +53,7 @@ def get_data_dims(volume): datadims = hdr.get_data_shape() return [int(datadims[0]), int(datadims[1]), int(datadims[2])] + def get_affine(volume): import nibabel as nb nii = nb.load(volume) @@ -65,8 +68,8 @@ def get_affine(volume): """ info = dict(dwi=[['subject_id', 'data']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -92,7 +95,7 @@ def get_affine(volume): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" @@ -125,7 +128,7 @@ def get_affine(volume): Second, diffusion tensors are fit to the voxel-order data. """ -dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit') +dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit') """ Next, a lookup table is generated from the schemefile and the @@ -196,15 +199,15 @@ def get_affine(volume): fractional anisotropy and diffusivity trace maps and their associated headers. """ -fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa') -trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace') +fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa') +trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace') dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') -analyzeheader_fa = pe.Node(interface= camino.AnalyzeHeader(), name = "analyzeheader_fa") +analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(), name="analyzeheader_fa") analyzeheader_fa.inputs.datatype = "double" analyzeheader_trace = analyzeheader_fa.clone('analyzeheader_trace') -fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii') +fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii') trace2nii = fa2nii.clone("trace2nii") """ @@ -213,7 +216,7 @@ def get_affine(volume): tractography = pe.Workflow(name='tractography') -tractography.connect([(inputnode, bet,[("dwi","in_file")])]) +tractography.connect([(inputnode, bet, [("dwi", "in_file")])]) """ File format conversion @@ -228,29 +231,29 @@ def get_affine(volume): Tensor fitting """ -tractography.connect([(image2voxel, dtifit,[['voxel_order','in_file']]), - (fsl2scheme, dtifit,[['scheme','scheme_file']]) - ]) +tractography.connect([(image2voxel, dtifit, [['voxel_order', 'in_file']]), + (fsl2scheme, dtifit, [['scheme', 'scheme_file']]) + ]) """ Workflow for applying DT streamline tractogpahy """ -tractography.connect([(bet, trackdt,[("mask_file","seed_file")])]) -tractography.connect([(dtifit, trackdt,[("tensor_fitted","in_file")])]) +tractography.connect([(bet, trackdt, [("mask_file", "seed_file")])]) +tractography.connect([(dtifit, trackdt, [("tensor_fitted", "in_file")])]) """ Workflow for applying PICo """ -tractography.connect([(bet, trackpico,[("mask_file","seed_file")])]) -tractography.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])]) -tractography.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])]) -tractography.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])]) -tractography.connect([(picopdfs, trackpico,[("pdfs","in_file")])]) +tractography.connect([(bet, trackpico, [("mask_file", "seed_file")])]) +tractography.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])]) +tractography.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])]) +tractography.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])]) +tractography.connect([(picopdfs, trackpico, [("pdfs", "in_file")])]) # ProcStreamlines might throw memory errors - comment this line out in such case -tractography.connect([(trackdt, procstreamlines,[("tracked","in_file")])]) +tractography.connect([(trackdt, procstreamlines, [("tracked", "in_file")])]) """ @@ -262,31 +265,31 @@ def get_affine(volume): will be correct and readable. """ -tractography.connect([(dtifit, fa,[("tensor_fitted","in_file")])]) -tractography.connect([(fa, analyzeheader_fa,[("fa","in_file")])]) -tractography.connect([(inputnode, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'), -(('dwi', get_data_dims), 'data_dims')])]) -tractography.connect([(fa, fa2nii,[('fa','data_file')])]) -tractography.connect([(inputnode, fa2nii,[(('dwi', get_affine), 'affine')])]) -tractography.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])]) +tractography.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) +tractography.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) +tractography.connect([(inputnode, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) +tractography.connect([(fa, fa2nii, [('fa', 'data_file')])]) +tractography.connect([(inputnode, fa2nii, [(('dwi', get_affine), 'affine')])]) +tractography.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) -tractography.connect([(dtifit, trace,[("tensor_fitted","in_file")])]) -tractography.connect([(trace, analyzeheader_trace,[("trace","in_file")])]) -tractography.connect([(inputnode, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'), -(('dwi', get_data_dims), 'data_dims')])]) -tractography.connect([(trace, trace2nii,[('trace','data_file')])]) -tractography.connect([(inputnode, trace2nii,[(('dwi', get_affine), 'affine')])]) -tractography.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])]) +tractography.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) +tractography.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) +tractography.connect([(inputnode, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) +tractography.connect([(trace, trace2nii, [('trace', 'data_file')])]) +tractography.connect([(inputnode, trace2nii, [(('dwi', get_affine), 'affine')])]) +tractography.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) -tractography.connect([(dtifit, dteig,[("tensor_fitted","in_file")])]) +tractography.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])]) -tractography.connect([(trackpico, cam2trk_pico, [('tracked','in_file')])]) -tractography.connect([(trackdt, cam2trk_dt, [('tracked','in_file')])]) -tractography.connect([(inputnode, cam2trk_pico,[(('dwi', get_vox_dims), 'voxel_dims'), +tractography.connect([(trackpico, cam2trk_pico, [('tracked', 'in_file')])]) +tractography.connect([(trackdt, cam2trk_dt, [('tracked', 'in_file')])]) +tractography.connect([(inputnode, cam2trk_pico, [(('dwi', get_vox_dims), 'voxel_dims'), (('dwi', get_data_dims), 'data_dims')])]) -tractography.connect([(inputnode, cam2trk_dt,[(('dwi', get_vox_dims), 'voxel_dims'), +tractography.connect([(inputnode, cam2trk_dt, [(('dwi', get_vox_dims), 'voxel_dims'), (('dwi', get_data_dims), 'data_dims')])]) @@ -298,12 +301,12 @@ def get_affine(volume): workflow = pe.Workflow(name="workflow") workflow.base_dir = os.path.abspath('camino_dti_tutorial') -workflow.connect([(infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,tractography,[('dwi','inputnode.dwi'), - ('bvals','inputnode.bvals'), - ('bvecs','inputnode.bvecs') - ]) - ]) +workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, tractography, [('dwi', 'inputnode.dwi'), + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]) + ]) """ The following functions run the whole workflow and produce a .dot and .png graph of the processing pipeline. """ diff --git a/examples/dmri_connectivity.py b/examples/dmri_connectivity.py index c4ef738153..1206e1821e 100755 --- a/examples/dmri_connectivity.py +++ b/examples/dmri_connectivity.py @@ -71,6 +71,7 @@ regions. """ + def get_vox_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -80,6 +81,7 @@ def get_vox_dims(volume): voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] + def get_data_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -89,17 +91,20 @@ def get_data_dims(volume): datadims = hdr.get_data_shape() return [int(datadims[0]), int(datadims[1]), int(datadims[2])] + def get_affine(volume): import nibabel as nb nii = nb.load(volume) return nii.get_affine() + def select_aparc(list_of_files): for in_file in list_of_files: if 'aparc+aseg.mgz' in in_file: idx = list_of_files.index(in_file) return list_of_files[idx] + def select_aparc_annot(list_of_files): for in_file in list_of_files: if '.aparc.annot' in in_file: @@ -113,7 +118,7 @@ def select_aparc_annot(list_of_files): """ fs_dir = op.abspath('/usr/local/freesurfer') -subjects_dir = op.abspath(op.join(op.curdir,'./subjects')) +subjects_dir = op.abspath(op.join(op.curdir, './subjects')) fsl.FSLCommand.set_default_output_type('NIFTI') """ @@ -136,8 +141,8 @@ def select_aparc_annot(list_of_files): infosource.iterables = ('subject_id', subject_list) info = dict(dwi=[['subject_id', 'data']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) """ A datasource node is used to perform the actual data grabbing. @@ -147,7 +152,7 @@ def select_aparc_annot(list_of_files): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" datasource.inputs.base_directory = data_dir @@ -185,7 +190,7 @@ def select_aparc_annot(list_of_files): FSL's Brain Extraction tool is used to create a mask from the b0 image """ -b0Strip = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0') +b0Strip = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') """ FSL's FLIRT function is used to coregister the b0 mask and the structural image. @@ -193,16 +198,16 @@ def select_aparc_annot(list_of_files): FLIRT is used once again to apply the inverse transformation to the parcellated brain image. """ -coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister') +coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') coregister.inputs.cost = ('corratio') -convertxfm = pe.Node(interface=fsl.ConvertXFM(), name = 'convertxfm') +convertxfm = pe.Node(interface=fsl.ConvertXFM(), name='convertxfm') convertxfm.inputs.invert_xfm = True -inverse = pe.Node(interface=fsl.FLIRT(), name = 'inverse') +inverse = pe.Node(interface=fsl.FLIRT(), name='inverse') inverse.inputs.interp = ('nearestneighbour') -inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name = 'inverse_AparcAseg') +inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name='inverse_AparcAseg') inverse_AparcAseg.inputs.interp = ('nearestneighbour') """ @@ -254,7 +259,7 @@ def select_aparc_annot(list_of_files): If desired, these tensors can be converted to a Nifti tensor image using the DT2NIfTI interface. """ -dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit') +dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit') """ Next, a lookup table is generated from the schemefile and the @@ -310,16 +315,16 @@ def select_aparc_annot(list_of_files): into a single .nii file. """ -fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa') -trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace') +fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa') +trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace') dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') -analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_fa') +analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(), name='analyzeheader_fa') analyzeheader_fa.inputs.datatype = 'double' -analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_trace') +analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(), name='analyzeheader_trace') analyzeheader_trace.inputs.datatype = 'double' -fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii') +fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii') trace2nii = fa2nii.clone("trace2nii") """ @@ -334,7 +339,7 @@ def select_aparc_annot(list_of_files): """ roigen = pe.Node(interface=cmtk.ROIGen(), name="ROIGen") -cmp_config = cmp.configuration.PipelineConfiguration(parcellation_scheme = "NativeFreesurfer") +cmp_config = cmp.configuration.PipelineConfiguration(parcellation_scheme="NativeFreesurfer") cmp_config.parcellation_scheme = "NativeFreesurfer" roigen.inputs.LUT_file = cmp_config.get_freeview_lut("NativeFreesurfer")['freesurferaparc'] roigen_structspace = roigen.clone('ROIGen_structspace') @@ -377,41 +382,41 @@ def select_aparc_annot(list_of_files): FreeSurfer input nodes: """ -mapping.connect([(inputnode, FreeSurferSource,[("subject_id","subject_id")])]) -mapping.connect([(inputnode, FreeSurferSourceLH,[("subject_id","subject_id")])]) -mapping.connect([(inputnode, FreeSurferSourceRH,[("subject_id","subject_id")])]) +mapping.connect([(inputnode, FreeSurferSource, [("subject_id", "subject_id")])]) +mapping.connect([(inputnode, FreeSurferSourceLH, [("subject_id", "subject_id")])]) +mapping.connect([(inputnode, FreeSurferSourceRH, [("subject_id", "subject_id")])]) """ Required conversions for processing in Camino: """ mapping.connect([(inputnode, image2voxel, [("dwi", "in_file")]), - (inputnode, fsl2scheme, [("bvecs", "bvec_file"), - ("bvals", "bval_file")]), - (image2voxel, dtifit,[['voxel_order','in_file']]), - (fsl2scheme, dtifit,[['scheme','scheme_file']]) - ]) + (inputnode, fsl2scheme, [("bvecs", "bvec_file"), + ("bvals", "bval_file")]), + (image2voxel, dtifit, [['voxel_order', 'in_file']]), + (fsl2scheme, dtifit, [['scheme', 'scheme_file']]) + ]) """ Nifti conversions for the parcellated white matter image (used in Camino's conmap), and the subject's stripped brain image from Freesurfer: """ -mapping.connect([(FreeSurferSource, mri_convert_WMParc,[('wmparc','in_file')])]) -mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])]) +mapping.connect([(FreeSurferSource, mri_convert_WMParc, [('wmparc', 'in_file')])]) +mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', 'in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ -mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])]) -mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])]) -mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])]) -mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', 'in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', 'in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, [('inflated', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, [('inflated', 'in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, [('sphere', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, [('sphere', 'in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. @@ -419,8 +424,8 @@ def select_aparc_annot(list_of_files): specifically (rather than i.e. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ -mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [('pial', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [('pial', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) @@ -430,24 +435,24 @@ def select_aparc_annot(list_of_files): code that have presented some users with errors. """ -mapping.connect([(inputnode, b0Strip,[('dwi','in_file')])]) -mapping.connect([(b0Strip, coregister,[('out_file','in_file')])]) -mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])]) -mapping.connect([(coregister, convertxfm,[('out_matrix_file','in_file')])]) -mapping.connect([(b0Strip, inverse,[('out_file','reference')])]) -mapping.connect([(convertxfm, inverse,[('out_file','in_matrix_file')])]) -mapping.connect([(mri_convert_WMParc, inverse,[('out_file','in_file')])]) +mapping.connect([(inputnode, b0Strip, [('dwi', 'in_file')])]) +mapping.connect([(b0Strip, coregister, [('out_file', 'in_file')])]) +mapping.connect([(mri_convert_Brain, coregister, [('out_file', 'reference')])]) +mapping.connect([(coregister, convertxfm, [('out_matrix_file', 'in_file')])]) +mapping.connect([(b0Strip, inverse, [('out_file', 'reference')])]) +mapping.connect([(convertxfm, inverse, [('out_file', 'in_matrix_file')])]) +mapping.connect([(mri_convert_WMParc, inverse, [('out_file', 'in_file')])]) """ The tractography pipeline consists of the following nodes. Further information about the tractography can be found in nipype/examples/dmri_camino_dti.py. """ -mapping.connect([(b0Strip, track,[("mask_file","seed_file")])]) -mapping.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])]) -mapping.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])]) -mapping.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])]) -mapping.connect([(picopdfs, track,[("pdfs","in_file")])]) +mapping.connect([(b0Strip, track, [("mask_file", "seed_file")])]) +mapping.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])]) +mapping.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])]) +mapping.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])]) +mapping.connect([(picopdfs, track, [("pdfs", "in_file")])]) """ Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the @@ -456,36 +461,36 @@ def select_aparc_annot(list_of_files): files will be correct and readable. """ -mapping.connect([(dtifit, fa,[("tensor_fitted","in_file")])]) -mapping.connect([(fa, analyzeheader_fa,[("fa","in_file")])]) -mapping.connect([(inputnode, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) -mapping.connect([(fa, fa2nii,[('fa','data_file')])]) -mapping.connect([(inputnode, fa2nii,[(('dwi', get_affine), 'affine')])]) -mapping.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])]) +mapping.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) +mapping.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) +mapping.connect([(inputnode, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) +mapping.connect([(fa, fa2nii, [('fa', 'data_file')])]) +mapping.connect([(inputnode, fa2nii, [(('dwi', get_affine), 'affine')])]) +mapping.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) -mapping.connect([(dtifit, trace,[("tensor_fitted","in_file")])]) -mapping.connect([(trace, analyzeheader_trace,[("trace","in_file")])]) -mapping.connect([(inputnode, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) -mapping.connect([(trace, trace2nii,[('trace','data_file')])]) -mapping.connect([(inputnode, trace2nii,[(('dwi', get_affine), 'affine')])]) -mapping.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])]) +mapping.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) +mapping.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) +mapping.connect([(inputnode, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) +mapping.connect([(trace, trace2nii, [('trace', 'data_file')])]) +mapping.connect([(inputnode, trace2nii, [(('dwi', get_affine), 'affine')])]) +mapping.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) -mapping.connect([(dtifit, dteig,[("tensor_fitted","in_file")])]) +mapping.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])]) """ The output tracts are converted to Trackvis format (and back). Here we also use the voxel- and data-grabbing functions defined at the beginning of the pipeline. """ -mapping.connect([(track, camino2trackvis, [('tracked','in_file')]), - (track, vtkstreamlines,[['tracked','in_file']]), - (camino2trackvis, trk2camino,[['trackvis','in_file']]) - ]) -mapping.connect([(inputnode, camino2trackvis,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) +mapping.connect([(track, camino2trackvis, [('tracked', 'in_file')]), + (track, vtkstreamlines, [['tracked', 'in_file']]), + (camino2trackvis, trk2camino, [['trackvis', 'in_file']]) + ]) +mapping.connect([(inputnode, camino2trackvis, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) """ Here the CMTK connectivity mapping nodes are connected. @@ -498,45 +503,45 @@ def select_aparc_annot(list_of_files): creatematrix, 'resolution_network_file') mapping.connect([(FreeSurferSource, mri_convert_AparcAseg, [(('aparc_aseg', select_aparc), 'in_file')])]) -mapping.connect([(b0Strip, inverse_AparcAseg,[('out_file','reference')])]) -mapping.connect([(convertxfm, inverse_AparcAseg,[('out_file','in_matrix_file')])]) -mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg,[('out_file','in_file')])]) -mapping.connect([(mri_convert_AparcAseg, roigen_structspace,[('out_file','aparc_aseg_file')])]) -mapping.connect([(roigen_structspace, createnodes,[("roi_file","roi_file")])]) +mapping.connect([(b0Strip, inverse_AparcAseg, [('out_file', 'reference')])]) +mapping.connect([(convertxfm, inverse_AparcAseg, [('out_file', 'in_matrix_file')])]) +mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg, [('out_file', 'in_file')])]) +mapping.connect([(mri_convert_AparcAseg, roigen_structspace, [('out_file', 'aparc_aseg_file')])]) +mapping.connect([(roigen_structspace, createnodes, [("roi_file", "roi_file")])]) -mapping.connect([(inverse_AparcAseg, roigen,[("out_file","aparc_aseg_file")])]) -mapping.connect([(roigen, creatematrix,[("roi_file","roi_file")])]) -mapping.connect([(camino2trackvis, creatematrix,[("trackvis","tract_file")])]) -mapping.connect([(inputnode, creatematrix,[("subject_id","out_matrix_file")])]) -mapping.connect([(inputnode, creatematrix,[("subject_id","out_matrix_mat_file")])]) +mapping.connect([(inverse_AparcAseg, roigen, [("out_file", "aparc_aseg_file")])]) +mapping.connect([(roigen, creatematrix, [("roi_file", "roi_file")])]) +mapping.connect([(camino2trackvis, creatematrix, [("trackvis", "tract_file")])]) +mapping.connect([(inputnode, creatematrix, [("subject_id", "out_matrix_file")])]) +mapping.connect([(inputnode, creatematrix, [("subject_id", "out_matrix_mat_file")])]) """ The merge nodes defined earlier are used here to create lists of the files which are destined for the CFFConverter. """ -mapping.connect([(creatematrix, gpickledNetworks,[("matrix_files","in1")])]) +mapping.connect([(creatematrix, gpickledNetworks, [("matrix_files", "in1")])]) -mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])]) -mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])]) -mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])]) -mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])]) -mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])]) -mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])]) -mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])]) -mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])]) +mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) +mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) +mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", "in3")])]) +mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", "in4")])]) +mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", "in5")])]) +mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", "in6")])]) +mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", "in7")])]) +mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", "in8")])]) -mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])]) -mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])]) +mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", "in1")])]) +mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", "in2")])]) -mapping.connect([(roigen, niftiVolumes,[("roi_file","in1")])]) -mapping.connect([(inputnode, niftiVolumes,[("dwi","in2")])]) -mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])]) +mapping.connect([(roigen, niftiVolumes, [("roi_file", "in1")])]) +mapping.connect([(inputnode, niftiVolumes, [("dwi", "in2")])]) +mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", "in1")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", "in2")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", "in3")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", "in4")])]) """ This block actually connects the merged lists to the CFF converter. We pass the surfaces @@ -547,13 +552,13 @@ def select_aparc_annot(list_of_files): """ CFFConverter.inputs.script_files = op.abspath(inspect.getfile(inspect.currentframe())) -mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])]) -mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])]) -mapping.connect([(gpickledNetworks, CFFConverter,[("out","gpickled_networks")])]) -mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])]) -mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])]) -mapping.connect([(creatematrix, CFFConverter,[("filtered_tractographies","tract_files")])]) -mapping.connect([(inputnode, CFFConverter,[("subject_id","title")])]) +mapping.connect([(giftiSurfaces, CFFConverter, [("out", "gifti_surfaces")])]) +mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) +mapping.connect([(gpickledNetworks, CFFConverter, [("out", "gpickled_networks")])]) +mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) +mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) +mapping.connect([(creatematrix, CFFConverter, [("filtered_tractographies", "tract_files")])]) +mapping.connect([(inputnode, CFFConverter, [("subject_id", "title")])]) """ Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes @@ -564,12 +569,12 @@ def select_aparc_annot(list_of_files): connectivity = pe.Workflow(name="connectivity") connectivity.base_dir = op.abspath('dmri_connectivity') connectivity.connect([ - (infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,mapping,[('dwi','inputnode.dwi'), - ('bvals','inputnode.bvals'), - ('bvecs','inputnode.bvecs') - ]), - (infosource,mapping,[('subject_id','inputnode.subject_id')]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, mapping, [('dwi', 'inputnode.dwi'), + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]), + (infosource, mapping, [('subject_id', 'inputnode.subject_id')]) ]) """ diff --git a/examples/dmri_connectivity_advanced.py b/examples/dmri_connectivity_advanced.py index edad0d13ec..bbf648aeb7 100755 --- a/examples/dmri_connectivity_advanced.py +++ b/examples/dmri_connectivity_advanced.py @@ -30,15 +30,15 @@ .. seealso:: - connectivity_tutorial.py - Original tutorial using Camino and the NativeFreesurfer Parcellation Scheme + connectivity_tutorial.py + Original tutorial using Camino and the NativeFreesurfer Parcellation Scheme - www.cmtk.org - For more info about the parcellation scheme + www.cmtk.org + For more info about the parcellation scheme .. warning:: - The ConnectomeMapper (https://github.com/LTS5/cmp or www.cmtk.org) must be installed for this tutorial to function! + The ConnectomeMapper (https://github.com/LTS5/cmp or www.cmtk.org) must be installed for this tutorial to function! Packages and Data Setup ======================= @@ -75,25 +75,25 @@ This needs to point to the freesurfer subjects directory (Recon-all must have been run on subj1 from the FSL course data) Alternatively, the reconstructed subject data can be downloaded from: - * http://dl.dropbox.com/u/315714/subj1.zip + * http://dl.dropbox.com/u/315714/subj1.zip """ -subjects_dir = op.abspath(op.join(op.curdir,'./subjects')) +subjects_dir = op.abspath(op.join(op.curdir, './subjects')) fs.FSCommand.set_default_subjects_dir(subjects_dir) fsl.FSLCommand.set_default_output_type('NIFTI') fs_dir = os.environ['FREESURFER_HOME'] -lookup_file = op.join(fs_dir,'FreeSurferColorLUT.txt') +lookup_file = op.join(fs_dir, 'FreeSurferColorLUT.txt') """ This needs to point to the fdt folder you can find after extracting - * http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz + * http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz """ -data_dir = op.abspath(op.join(op.curdir,'exdata/')) +data_dir = op.abspath(op.join(op.curdir, 'exdata/')) subject_list = ['subj1'] """ @@ -105,8 +105,8 @@ infosource.iterables = ('subject_id', subject_list) info = dict(dwi=[['subject_id', 'data']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) """ Use datasource node to perform the actual data grabbing. @@ -115,7 +115,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" datasource.inputs.base_directory = data_dir @@ -128,7 +128,7 @@ conduits for the raw data to the rest of the processing pipeline. """ -inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id","dwi", "bvecs", "bvals", "subjects_dir"]), name="inputnode") +inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir"]), name="inputnode") inputnode.inputs.subjects_dir = subjects_dir FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource') @@ -186,7 +186,7 @@ b-values and b-vectors stored in FSL's format are converted into a single encoding file for MRTrix. """ -fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(),name='fsl2mrtrix') +fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') """ Distortions induced by eddy currents are corrected prior to fitting the tensors. @@ -204,11 +204,11 @@ * Fractional anisotropy """ -dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(),name='dwi2tensor') -tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(),name='tensor2vector') -tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(),name='tensor2adc') -tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(),name='tensor2fa') -MRconvert_fa = pe.Node(interface=mrtrix.MRConvert(),name='MRconvert_fa') +dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') +tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(), name='tensor2vector') +tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(), name='tensor2adc') +tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(), name='tensor2fa') +MRconvert_fa = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert_fa') MRconvert_fa.inputs.extension = 'nii' """ @@ -218,11 +218,11 @@ put through a simple thresholding routine, and smoothed using a 3x3 median filter. """ -MRconvert = pe.Node(interface=mrtrix.MRConvert(),name='MRconvert') +MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') MRconvert.inputs.extract_at_axis = 3 MRconvert.inputs.extract_at_coordinate = [0] -threshold_b0 = pe.Node(interface=mrtrix.Threshold(),name='threshold_b0') -median3d = pe.Node(interface=mrtrix.MedianFilter3D(),name='median3d') +threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') +median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3d') """ The brain mask is also used to help identify single-fiber voxels. @@ -231,11 +231,11 @@ thresholding the result to obtain some highly anisotropic within-brain voxels. """ -erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_firstpass') -erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_secondpass') -MRmultiply = pe.Node(interface=mrtrix.MRMultiply(),name='MRmultiply') +erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_firstpass') +erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_secondpass') +MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') MRmult_merge = pe.Node(interface=util.Merge(2), name='MRmultiply_merge') -threshold_FA = pe.Node(interface=mrtrix.Threshold(),name='threshold_FA') +threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') threshold_FA.inputs.absolute_threshold_value = 0.7 """ @@ -244,9 +244,9 @@ thresholding it at a reasonably high level. """ -bet = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0') -gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(),name='gen_WM_mask') -threshold_wmmask = pe.Node(interface=mrtrix.Threshold(),name='threshold_wmmask') +bet = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') +gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(), name='gen_WM_mask') +threshold_wmmask = pe.Node(interface=mrtrix.Threshold(), name='threshold_wmmask') threshold_wmmask.inputs.absolute_threshold_value = 0.4 """ @@ -259,9 +259,9 @@ """ -estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(),name='estimateresponse') +estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(), name='estimateresponse') estimateresponse.inputs.maximum_harmonic_order = 6 -csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(),name='csdeconv') +csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(), name='csdeconv') csdeconv.inputs.maximum_harmonic_order = 6 """ @@ -269,14 +269,14 @@ The tracts are then used to generate a tract-density image, and they are also converted to TrackVis format. """ -probCSDstreamtrack = pe.Node(interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(),name='probCSDstreamtrack') +probCSDstreamtrack = pe.Node(interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(), name='probCSDstreamtrack') probCSDstreamtrack.inputs.inputmodel = 'SD_PROB' probCSDstreamtrack.inputs.desired_number_of_tracks = 150000 -tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(),name='tracks2prob') +tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') tracks2prob.inputs.colour = True MRconvert_tracks2prob = MRconvert_fa.clone(name='MRconvert_tracks2prob') -tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(),name='tck2trk') -trk2tdi = pe.Node(interface=dipy.TrackDensityMap(),name='trk2tdi') +tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') +trk2tdi = pe.Node(interface=dipy.TrackDensityMap(), name='trk2tdi') """ Structural segmentation nodes @@ -287,7 +287,7 @@ so that they are in the same space as the regions of interest. """ -coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister') +coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') coregister.inputs.cost = ('normmi') """ @@ -363,40 +363,40 @@ First, we connect the input node to the FreeSurfer input nodes. """ -mapping.connect([(inputnode, FreeSurferSource,[("subjects_dir","subjects_dir")])]) -mapping.connect([(inputnode, FreeSurferSource,[("subject_id","subject_id")])]) +mapping.connect([(inputnode, FreeSurferSource, [("subjects_dir", "subjects_dir")])]) +mapping.connect([(inputnode, FreeSurferSource, [("subject_id", "subject_id")])]) -mapping.connect([(inputnode, FreeSurferSourceLH,[("subjects_dir","subjects_dir")])]) -mapping.connect([(inputnode, FreeSurferSourceLH,[("subject_id","subject_id")])]) +mapping.connect([(inputnode, FreeSurferSourceLH, [("subjects_dir", "subjects_dir")])]) +mapping.connect([(inputnode, FreeSurferSourceLH, [("subject_id", "subject_id")])]) -mapping.connect([(inputnode, FreeSurferSourceRH,[("subjects_dir","subjects_dir")])]) -mapping.connect([(inputnode, FreeSurferSourceRH,[("subject_id","subject_id")])]) +mapping.connect([(inputnode, FreeSurferSourceRH, [("subjects_dir", "subjects_dir")])]) +mapping.connect([(inputnode, FreeSurferSourceRH, [("subject_id", "subject_id")])]) -mapping.connect([(inputnode, tessflow,[("subjects_dir","inputspec.subjects_dir")])]) -mapping.connect([(inputnode, tessflow,[("subject_id","inputspec.subject_id")])]) +mapping.connect([(inputnode, tessflow, [("subjects_dir", "inputspec.subjects_dir")])]) +mapping.connect([(inputnode, tessflow, [("subject_id", "inputspec.subject_id")])]) -mapping.connect([(inputnode, parcellate,[("subjects_dir","subjects_dir")])]) -mapping.connect([(inputnode, parcellate,[("subject_id","subject_id")])]) -mapping.connect([(parcellate, mri_convert_ROI_scale500,[('roi_file','in_file')])]) +mapping.connect([(inputnode, parcellate, [("subjects_dir", "subjects_dir")])]) +mapping.connect([(inputnode, parcellate, [("subject_id", "subject_id")])]) +mapping.connect([(parcellate, mri_convert_ROI_scale500, [('roi_file', 'in_file')])]) """ Nifti conversion for subject's stripped brain image from Freesurfer: """ -mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])]) +mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', 'in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ -mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])]) -mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])]) -mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])]) -mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', 'in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', 'in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, [('inflated', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, [('inflated', 'in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, [('sphere', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, [('sphere', 'in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. @@ -404,8 +404,8 @@ specifically (rather than e.g. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ -mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])]) -mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])]) +mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [('pial', 'in_file')])]) +mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [('pial', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) @@ -417,68 +417,68 @@ """ mapping.connect([(inputnode, fsl2mrtrix, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) -mapping.connect([(inputnode, eddycorrect,[("dwi","inputnode.in_file")])]) -mapping.connect([(eddycorrect, dwi2tensor,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(fsl2mrtrix, dwi2tensor,[("encoding_file","encoding_file")])]) + ("bvals", "bval_file")])]) +mapping.connect([(inputnode, eddycorrect, [("dwi", "inputnode.in_file")])]) +mapping.connect([(eddycorrect, dwi2tensor, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", "encoding_file")])]) -mapping.connect([(dwi2tensor, tensor2vector,[['tensor','in_file']]), - (dwi2tensor, tensor2adc,[['tensor','in_file']]), - (dwi2tensor, tensor2fa,[['tensor','in_file']]), - ]) -mapping.connect([(tensor2fa, MRmult_merge,[("FA","in1")])]) -mapping.connect([(tensor2fa, MRconvert_fa,[("FA","in_file")])]) +mapping.connect([(dwi2tensor, tensor2vector, [['tensor', 'in_file']]), + (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), + (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), + ]) +mapping.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) +mapping.connect([(tensor2fa, MRconvert_fa, [("FA", "in_file")])]) """ This block creates the rough brain mask to be multiplied, mulitplies it with the fractional anisotropy image, and thresholds it to get the single-fiber voxels. """ -mapping.connect([(eddycorrect, MRconvert,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(MRconvert, threshold_b0,[("converted","in_file")])]) -mapping.connect([(threshold_b0, median3d,[("out_file","in_file")])]) -mapping.connect([(median3d, erode_mask_firstpass,[("out_file","in_file")])]) -mapping.connect([(erode_mask_firstpass, erode_mask_secondpass,[("out_file","in_file")])]) -mapping.connect([(erode_mask_secondpass, MRmult_merge,[("out_file","in2")])]) -mapping.connect([(MRmult_merge, MRmultiply,[("out","in_files")])]) -mapping.connect([(MRmultiply, threshold_FA,[("out_file","in_file")])]) +mapping.connect([(eddycorrect, MRconvert, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) +mapping.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) +mapping.connect([(median3d, erode_mask_firstpass, [("out_file", "in_file")])]) +mapping.connect([(erode_mask_firstpass, erode_mask_secondpass, [("out_file", "in_file")])]) +mapping.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", "in2")])]) +mapping.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) +mapping.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) """ Here the thresholded white matter mask is created for seeding the tractography. """ -mapping.connect([(eddycorrect, bet,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(eddycorrect, gen_WM_mask,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(bet, gen_WM_mask,[("mask_file","binary_mask")])]) -mapping.connect([(fsl2mrtrix, gen_WM_mask,[("encoding_file","encoding_file")])]) -mapping.connect([(gen_WM_mask, threshold_wmmask,[("WMprobabilitymap","in_file")])]) +mapping.connect([(eddycorrect, bet, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(eddycorrect, gen_WM_mask, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) +mapping.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", "encoding_file")])]) +mapping.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", "in_file")])]) """ Next we estimate the fiber response distribution. """ -mapping.connect([(eddycorrect, estimateresponse,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(fsl2mrtrix, estimateresponse,[("encoding_file","encoding_file")])]) -mapping.connect([(threshold_FA, estimateresponse,[("out_file","mask_image")])]) +mapping.connect([(eddycorrect, estimateresponse, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", "encoding_file")])]) +mapping.connect([(threshold_FA, estimateresponse, [("out_file", "mask_image")])]) """ Run constrained spherical deconvolution. """ -mapping.connect([(eddycorrect, csdeconv,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(gen_WM_mask, csdeconv,[("WMprobabilitymap","mask_image")])]) -mapping.connect([(estimateresponse, csdeconv,[("response","response_file")])]) -mapping.connect([(fsl2mrtrix, csdeconv,[("encoding_file","encoding_file")])]) +mapping.connect([(eddycorrect, csdeconv, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", "mask_image")])]) +mapping.connect([(estimateresponse, csdeconv, [("response", "response_file")])]) +mapping.connect([(fsl2mrtrix, csdeconv, [("encoding_file", "encoding_file")])]) """ Connect the tractography and compute the tract density image. """ -mapping.connect([(threshold_wmmask, probCSDstreamtrack,[("out_file","seed_file")])]) -mapping.connect([(csdeconv, probCSDstreamtrack,[("spherical_harmonics_image","in_file")])]) -mapping.connect([(probCSDstreamtrack, tracks2prob,[("tracked","in_file")])]) -mapping.connect([(eddycorrect, tracks2prob,[("outputnode.eddy_corrected","template_file")])]) -mapping.connect([(tracks2prob, MRconvert_tracks2prob,[("tract_image","in_file")])]) +mapping.connect([(threshold_wmmask, probCSDstreamtrack, [("out_file", "seed_file")])]) +mapping.connect([(csdeconv, probCSDstreamtrack, [("spherical_harmonics_image", "in_file")])]) +mapping.connect([(probCSDstreamtrack, tracks2prob, [("tracked", "in_file")])]) +mapping.connect([(eddycorrect, tracks2prob, [("outputnode.eddy_corrected", "template_file")])]) +mapping.connect([(tracks2prob, MRconvert_tracks2prob, [("tract_image", "in_file")])]) """ Structural Processing @@ -486,52 +486,52 @@ First, we coregister the diffusion image to the structural image """ -mapping.connect([(eddycorrect, coregister,[("outputnode.eddy_corrected","in_file")])]) -mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])]) +mapping.connect([(eddycorrect, coregister, [("outputnode.eddy_corrected", "in_file")])]) +mapping.connect([(mri_convert_Brain, coregister, [('out_file', 'reference')])]) """ The MRtrix-tracked fibers are converted to TrackVis format (with voxel and data dimensions grabbed from the DWI). The connectivity matrix is created with the transformed .trk fibers and the parcellation file. """ -mapping.connect([(eddycorrect, tck2trk,[("outputnode.eddy_corrected","image_file")])]) -mapping.connect([(mri_convert_Brain, tck2trk,[("out_file","registration_image_file")])]) -mapping.connect([(coregister, tck2trk,[("out_matrix_file","matrix_file")])]) -mapping.connect([(probCSDstreamtrack, tck2trk,[("tracked","in_file")])]) -mapping.connect([(tck2trk, creatematrix,[("out_file","tract_file")])]) -mapping.connect([(tck2trk, trk2tdi,[("out_file","in_file")])]) -mapping.connect([(inputnode, creatematrix,[("subject_id","out_matrix_file")])]) -mapping.connect([(inputnode, creatematrix,[("subject_id","out_matrix_mat_file")])]) -mapping.connect([(parcellate, creatematrix,[("roi_file","roi_file")])]) -mapping.connect([(parcellate, createnodes,[("roi_file","roi_file")])]) -mapping.connect([(createnodes, creatematrix,[("node_network","resolution_network_file")])]) +mapping.connect([(eddycorrect, tck2trk, [("outputnode.eddy_corrected", "image_file")])]) +mapping.connect([(mri_convert_Brain, tck2trk, [("out_file", "registration_image_file")])]) +mapping.connect([(coregister, tck2trk, [("out_matrix_file", "matrix_file")])]) +mapping.connect([(probCSDstreamtrack, tck2trk, [("tracked", "in_file")])]) +mapping.connect([(tck2trk, creatematrix, [("out_file", "tract_file")])]) +mapping.connect([(tck2trk, trk2tdi, [("out_file", "in_file")])]) +mapping.connect([(inputnode, creatematrix, [("subject_id", "out_matrix_file")])]) +mapping.connect([(inputnode, creatematrix, [("subject_id", "out_matrix_mat_file")])]) +mapping.connect([(parcellate, creatematrix, [("roi_file", "roi_file")])]) +mapping.connect([(parcellate, createnodes, [("roi_file", "roi_file")])]) +mapping.connect([(createnodes, creatematrix, [("node_network", "resolution_network_file")])]) """ The merge nodes defined earlier are used here to create lists of the files which are destined for the CFFConverter. """ -mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])]) -mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])]) -mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])]) -mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])]) -mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])]) -mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])]) -mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])]) -mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])]) -mapping.connect([(tessflow, giftiSurfaces,[("outputspec.meshes","in9")])]) +mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) +mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) +mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", "in3")])]) +mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", "in4")])]) +mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", "in5")])]) +mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", "in6")])]) +mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", "in7")])]) +mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", "in8")])]) +mapping.connect([(tessflow, giftiSurfaces, [("outputspec.meshes", "in9")])]) -mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])]) -mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])]) +mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", "in1")])]) +mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", "in2")])]) -mapping.connect([(parcellate, niftiVolumes,[("roi_file","in1")])]) -mapping.connect([(eddycorrect, niftiVolumes,[("outputnode.eddy_corrected","in2")])]) -mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])]) +mapping.connect([(parcellate, niftiVolumes, [("roi_file", "in1")])]) +mapping.connect([(eddycorrect, niftiVolumes, [("outputnode.eddy_corrected", "in2")])]) +mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])]) -mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", "in1")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", "in2")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", "in3")])]) +mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", "in4")])]) """ This block actually connects the merged lists to the CFF converter. We pass the surfaces @@ -541,30 +541,30 @@ product. """ -mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])]) -mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])]) -mapping.connect([(creatematrix, CFFConverter,[("matrix_files","gpickled_networks")])]) -mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])]) -mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])]) -mapping.connect([(creatematrix, CFFConverter,[("filtered_tractographies","tract_files")])]) -mapping.connect([(inputnode, CFFConverter,[("subject_id","title")])]) +mapping.connect([(giftiSurfaces, CFFConverter, [("out", "gifti_surfaces")])]) +mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) +mapping.connect([(creatematrix, CFFConverter, [("matrix_files", "gpickled_networks")])]) +mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) +mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) +mapping.connect([(creatematrix, CFFConverter, [("filtered_tractographies", "tract_files")])]) +mapping.connect([(inputnode, CFFConverter, [("subject_id", "title")])]) """ The graph theoretical metrics are computed using the networkx workflow and placed in another CFF file """ -mapping.connect([(inputnode, networkx,[("subject_id","inputnode.extra_field")])]) -mapping.connect([(creatematrix, networkx,[("intersection_matrix_file","inputnode.network_file")])]) +mapping.connect([(inputnode, networkx, [("subject_id", "inputnode.extra_field")])]) +mapping.connect([(creatematrix, networkx, [("intersection_matrix_file", "inputnode.network_file")])]) -mapping.connect([(networkx, NxStatsCFFConverter,[("outputnode.network_files","gpickled_networks")])]) -mapping.connect([(giftiSurfaces, NxStatsCFFConverter,[("out","gifti_surfaces")])]) -mapping.connect([(giftiLabels, NxStatsCFFConverter,[("out","gifti_labels")])]) -mapping.connect([(niftiVolumes, NxStatsCFFConverter,[("out","nifti_volumes")])]) -mapping.connect([(fiberDataArrays, NxStatsCFFConverter,[("out","data_files")])]) -mapping.connect([(inputnode, NxStatsCFFConverter,[("subject_id","title")])]) +mapping.connect([(networkx, NxStatsCFFConverter, [("outputnode.network_files", "gpickled_networks")])]) +mapping.connect([(giftiSurfaces, NxStatsCFFConverter, [("out", "gifti_surfaces")])]) +mapping.connect([(giftiLabels, NxStatsCFFConverter, [("out", "gifti_labels")])]) +mapping.connect([(niftiVolumes, NxStatsCFFConverter, [("out", "nifti_volumes")])]) +mapping.connect([(fiberDataArrays, NxStatsCFFConverter, [("out", "data_files")])]) +mapping.connect([(inputnode, NxStatsCFFConverter, [("subject_id", "title")])]) -mapping.connect([(inputnode, cmats_to_csv,[("subject_id","inputnode.extra_field")])]) -mapping.connect([(creatematrix, cmats_to_csv,[("matlab_matrix_files","inputnode.matlab_matrix_files")])]) +mapping.connect([(inputnode, cmats_to_csv, [("subject_id", "inputnode.extra_field")])]) +mapping.connect([(creatematrix, cmats_to_csv, [("matlab_matrix_files", "inputnode.matlab_matrix_files")])]) """ Create a higher-level workflow @@ -578,12 +578,12 @@ connectivity.base_dir = op.abspath('dmri_connectivity_advanced') connectivity.connect([ - (infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,mapping,[('dwi','inputnode.dwi'), - ('bvals','inputnode.bvals'), - ('bvecs','inputnode.bvecs') - ]), - (infosource,mapping,[('subject_id','inputnode.subject_id')]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, mapping, [('dwi', 'inputnode.dwi'), + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]), + (infosource, mapping, [('subject_id', 'inputnode.subject_id')]) ]) """ diff --git a/examples/dmri_dtk_dti.py b/examples/dmri_dtk_dti.py index ee755d0e12..f3c5b3e0dc 100755 --- a/examples/dmri_dtk_dti.py +++ b/examples/dmri_dtk_dti.py @@ -72,8 +72,8 @@ """ info = dict(dwi=[['subject_id', 'data']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -99,7 +99,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" @@ -124,42 +124,41 @@ extract the volume with b=0 (nodif_brain) """ -fslroi = pe.Node(interface=fsl.ExtractROI(),name='fslroi') -fslroi.inputs.t_min=0 -fslroi.inputs.t_size=1 +fslroi = pe.Node(interface=fsl.ExtractROI(), name='fslroi') +fslroi.inputs.t_min = 0 +fslroi.inputs.t_size = 1 """ create a brain mask from the nodif_brain """ -bet = pe.Node(interface=fsl.BET(),name='bet') -bet.inputs.mask=True -bet.inputs.frac=0.34 +bet = pe.Node(interface=fsl.BET(), name='bet') +bet.inputs.mask = True +bet.inputs.frac = 0.34 """ correct the diffusion weighted images for eddy_currents """ eddycorrect = create_eddy_correct_pipeline('eddycorrect') -eddycorrect.inputs.inputnode.ref_num=0 +eddycorrect.inputs.inputnode.ref_num = 0 """ compute the diffusion tensor in each voxel """ -dtifit = pe.Node(interface=dtk.DTIRecon(),name='dtifit') +dtifit = pe.Node(interface=dtk.DTIRecon(), name='dtifit') """ connect all the nodes for this workflow """ computeTensor.connect([ - (fslroi,bet,[('roi_file','in_file')]), - (eddycorrect,dtifit,[('outputnode.eddy_corrected','DWI')]) + (fslroi, bet, [('roi_file', 'in_file')]), + (eddycorrect, dtifit, [('outputnode.eddy_corrected', 'DWI')]) ]) - """ Setup for Tracktography ----------------------- @@ -186,11 +185,12 @@ Setup data storage area """ -datasink = pe.Node(interface=nio.DataSink(),name='datasink') +datasink = pe.Node(interface=nio.DataSink(), name='datasink') datasink.inputs.base_directory = os.path.abspath('dtiresults') + def getstripdir(subject_id): - return os.path.join(os.path.abspath('data/workingdir/dwiproc'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('data/workingdir/dwiproc'), '_subject_id_%s' % subject_id) """ @@ -201,13 +201,13 @@ def getstripdir(subject_id): dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('dtk_dti_tutorial') dwiproc.connect([ - (infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,computeTensor,[('dwi','fslroi.in_file'), - ('bvals','dtifit.bvals'), - ('bvecs','dtifit.bvecs'), - ('dwi','eddycorrect.inputnode.in_file')]), - (computeTensor,tractography,[('bet.mask_file','dtk_tracker.mask1_file'), - ('dtifit.tensor','dtk_tracker.tensor_file') + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, computeTensor, [('dwi', 'fslroi.in_file'), + ('bvals', 'dtifit.bvals'), + ('bvecs', 'dtifit.bvecs'), + ('dwi', 'eddycorrect.inputnode.in_file')]), + (computeTensor, tractography, [('bet.mask_file', 'dtk_tracker.mask1_file'), + ('dtifit.tensor', 'dtk_tracker.tensor_file') ]) ]) diff --git a/examples/dmri_dtk_odf.py b/examples/dmri_dtk_odf.py index 3ca87df9bc..2cc4e6ea1d 100755 --- a/examples/dmri_dtk_odf.py +++ b/examples/dmri_dtk_odf.py @@ -72,8 +72,8 @@ """ info = dict(dwi=[['subject_id', 'siemens_hardi_test_data']], - bvecs=[['subject_id','siemens_hardi_test_data.bvec']], - bvals=[['subject_id','siemens_hardi_test_data.bval']]) + bvecs=[['subject_id', 'siemens_hardi_test_data.bvec']], + bvals=[['subject_id', 'siemens_hardi_test_data.bval']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -99,7 +99,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" @@ -124,43 +124,42 @@ extract the volume with b=0 (nodif_brain) """ -fslroi = pe.Node(interface=fsl.ExtractROI(),name='fslroi') -fslroi.inputs.t_min=0 -fslroi.inputs.t_size=1 +fslroi = pe.Node(interface=fsl.ExtractROI(), name='fslroi') +fslroi.inputs.t_min = 0 +fslroi.inputs.t_size = 1 """ create a brain mask from the nodif_brain """ -bet = pe.Node(interface=fsl.BET(),name='bet') -bet.inputs.mask=True -bet.inputs.frac=0.34 +bet = pe.Node(interface=fsl.BET(), name='bet') +bet.inputs.mask = True +bet.inputs.frac = 0.34 """ correct the diffusion weighted images for eddy_currents """ eddycorrect = create_eddy_correct_pipeline('eddycorrect') -eddycorrect.inputs.inputnode.ref_num=0 +eddycorrect.inputs.inputnode.ref_num = 0 -hardi_mat = pe.Node(interface=dtk.HARDIMat(),name='hardi_mat') +hardi_mat = pe.Node(interface=dtk.HARDIMat(), name='hardi_mat') -odf_recon = pe.Node(interface=dtk.ODFRecon(),name='odf_recon') +odf_recon = pe.Node(interface=dtk.ODFRecon(), name='odf_recon') """ connect all the nodes for this workflow """ compute_ODF.connect([ - (fslroi,bet,[('roi_file','in_file')]), - (eddycorrect, odf_recon,[('outputnode.eddy_corrected','DWI')]), - (eddycorrect, hardi_mat,[('outputnode.eddy_corrected','reference_file')]), + (fslroi, bet, [('roi_file', 'in_file')]), + (eddycorrect, odf_recon, [('outputnode.eddy_corrected', 'DWI')]), + (eddycorrect, hardi_mat, [('outputnode.eddy_corrected', 'reference_file')]), (hardi_mat, odf_recon, [('out_file', 'matrix')]) ]) - """ Setup for Tracktography ----------------------- @@ -190,15 +189,15 @@ dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('dtk_odf_tutorial') dwiproc.connect([ - (infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,compute_ODF,[('dwi','fslroi.in_file'), - ('bvals','hardi_mat.bvals'), - ('bvecs','hardi_mat.bvecs'), - ('dwi','eddycorrect.inputnode.in_file')]), - (compute_ODF,tractography,[('bet.mask_file','odf_tracker.mask1_file'), - ('odf_recon.ODF','odf_tracker.ODF'), - ('odf_recon.max','odf_tracker.max') - ]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, compute_ODF, [('dwi', 'fslroi.in_file'), + ('bvals', 'hardi_mat.bvals'), + ('bvecs', 'hardi_mat.bvecs'), + ('dwi', 'eddycorrect.inputnode.in_file')]), + (compute_ODF, tractography, [('bet.mask_file', 'odf_tracker.mask1_file'), + ('odf_recon.ODF', 'odf_tracker.ODF'), + ('odf_recon.max', 'odf_tracker.max') + ]) ]) dwiproc.inputs.compute_ODF.hardi_mat.oblique_correction = True diff --git a/examples/dmri_fsl_dti.py b/examples/dmri_fsl_dti.py index a9b2db60dd..879c5b8ec5 100755 --- a/examples/dmri_fsl_dti.py +++ b/examples/dmri_fsl_dti.py @@ -72,10 +72,10 @@ """ info = dict(dwi=[['subject_id', 'data']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']], - seed_file = [['subject_id','MASK_average_thal_right']], - target_masks = [['subject_id',['MASK_average_M1_right', + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']], + seed_file=[['subject_id', 'MASK_average_thal_right']], + target_masks=[['subject_id', ['MASK_average_M1_right', 'MASK_average_S1_right', 'MASK_average_occipital_right', 'MASK_average_pfc_right', @@ -108,7 +108,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" @@ -135,44 +135,43 @@ extract the volume with b=0 (nodif_brain) """ -fslroi = pe.Node(interface=fsl.ExtractROI(),name='fslroi') -fslroi.inputs.t_min=0 -fslroi.inputs.t_size=1 +fslroi = pe.Node(interface=fsl.ExtractROI(), name='fslroi') +fslroi.inputs.t_min = 0 +fslroi.inputs.t_size = 1 """ create a brain mask from the nodif_brain """ -bet = pe.Node(interface=fsl.BET(),name='bet') -bet.inputs.mask=True -bet.inputs.frac=0.34 +bet = pe.Node(interface=fsl.BET(), name='bet') +bet.inputs.mask = True +bet.inputs.frac = 0.34 """ correct the diffusion weighted images for eddy_currents """ eddycorrect = create_eddy_correct_pipeline('eddycorrect') -eddycorrect.inputs.inputnode.ref_num=0 +eddycorrect.inputs.inputnode.ref_num = 0 """ compute the diffusion tensor in each voxel """ -dtifit = pe.Node(interface=fsl.DTIFit(),name='dtifit') +dtifit = pe.Node(interface=fsl.DTIFit(), name='dtifit') """ connect all the nodes for this workflow """ computeTensor.connect([ - (fslroi,bet,[('roi_file','in_file')]), - (eddycorrect, dtifit,[('outputnode.eddy_corrected','dwi')]), - (infosource, dtifit,[['subject_id','base_name']]), - (bet,dtifit,[('mask_file','mask')]) + (fslroi, bet, [('roi_file', 'in_file')]), + (eddycorrect, dtifit, [('outputnode.eddy_corrected', 'dwi')]), + (infosource, dtifit, [['subject_id', 'base_name']]), + (bet, dtifit, [('mask_file', 'mask')]) ]) - """ Setup for Tracktography ----------------------- @@ -189,7 +188,7 @@ """ bedpostx = create_bedpostx_pipeline() -bedpostx.get_node("xfibres").iterables = ("n_fibres",[1,2]) +bedpostx.get_node("xfibres").iterables = ("n_fibres", [1, 2]) flirt = pe.Node(interface=fsl.FLIRT(), name='flirt') @@ -200,22 +199,22 @@ perform probabilistic tracktography """ -probtrackx = pe.Node(interface=fsl.ProbTrackX(),name='probtrackx') -probtrackx.inputs.mode='seedmask' +probtrackx = pe.Node(interface=fsl.ProbTrackX(), name='probtrackx') +probtrackx.inputs.mode = 'seedmask' probtrackx.inputs.c_thresh = 0.2 -probtrackx.inputs.n_steps=2000 -probtrackx.inputs.step_length=0.5 -probtrackx.inputs.n_samples=5000 -probtrackx.inputs.opd=True -probtrackx.inputs.os2t=True -probtrackx.inputs.loop_check=True +probtrackx.inputs.n_steps = 2000 +probtrackx.inputs.step_length = 0.5 +probtrackx.inputs.n_samples = 5000 +probtrackx.inputs.opd = True +probtrackx.inputs.os2t = True +probtrackx.inputs.loop_check = True """ perform hard segmentation on the output of probtrackx """ -findthebiggest = pe.Node(interface=fsl.FindTheBiggest(),name='findthebiggest') +findthebiggest = pe.Node(interface=fsl.FindTheBiggest(), name='findthebiggest') """ @@ -223,25 +222,26 @@ """ tractography.add_nodes([bedpostx, flirt]) -tractography.connect([(bedpostx,probtrackx,[('outputnode.thsamples','thsamples'), - ('outputnode.phsamples','phsamples'), - ('outputnode.fsamples','fsamples') +tractography.connect([(bedpostx, probtrackx, [('outputnode.thsamples', 'thsamples'), + ('outputnode.phsamples', 'phsamples'), + ('outputnode.fsamples', 'fsamples') ]), - (probtrackx,findthebiggest,[('targets','in_files')]), - (flirt, probtrackx, [('out_matrix_file','xfm')]) - ]) + (probtrackx, findthebiggest, [('targets', 'in_files')]), + (flirt, probtrackx, [('out_matrix_file', 'xfm')]) + ]) """ Setup data storage area """ -datasink = pe.Node(interface=nio.DataSink(),name='datasink') +datasink = pe.Node(interface=nio.DataSink(), name='datasink') datasink.inputs.base_directory = os.path.abspath('dtiresults') + def getstripdir(subject_id): import os - return os.path.join(os.path.abspath('data/workingdir/dwiproc'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('data/workingdir/dwiproc'), '_subject_id_%s' % subject_id) """ @@ -252,23 +252,23 @@ def getstripdir(subject_id): dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('fsl_dti_tutorial') dwiproc.connect([ - (infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,computeTensor,[('dwi','fslroi.in_file'), - ('bvals','dtifit.bvals'), - ('bvecs','dtifit.bvecs'), - ('dwi','eddycorrect.inputnode.in_file')]), - (datasource,tractography,[('bvals','bedpostx.inputnode.bvals'), - ('bvecs','bedpostx.inputnode.bvecs'), - ('seed_file','probtrackx.seed'), - ('target_masks','probtrackx.target_masks') + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, computeTensor, [('dwi', 'fslroi.in_file'), + ('bvals', 'dtifit.bvals'), + ('bvecs', 'dtifit.bvecs'), + ('dwi', 'eddycorrect.inputnode.in_file')]), + (datasource, tractography, [('bvals', 'bedpostx.inputnode.bvals'), + ('bvecs', 'bedpostx.inputnode.bvecs'), + ('seed_file', 'probtrackx.seed'), + ('target_masks', 'probtrackx.target_masks') ]), - (computeTensor,tractography,[('eddycorrect.outputnode.eddy_corrected','bedpostx.inputnode.dwi'), - ('bet.mask_file','bedpostx.inputnode.mask'), - ('bet.mask_file','probtrackx.mask'), - ('fslroi.roi_file','flirt.reference')]), - (infosource, datasink,[('subject_id','container'), - (('subject_id', getstripdir),'strip_dir')]), - (tractography,datasink,[('findthebiggest.out_file','fbiggest.@biggestsegmentation')]) + (computeTensor, tractography, [('eddycorrect.outputnode.eddy_corrected', 'bedpostx.inputnode.dwi'), + ('bet.mask_file', 'bedpostx.inputnode.mask'), + ('bet.mask_file', 'probtrackx.mask'), + ('fslroi.roi_file', 'flirt.reference')]), + (infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (tractography, datasink, [('findthebiggest.out_file', 'fbiggest.@biggestsegmentation')]) ]) if __name__ == '__main__': diff --git a/examples/dmri_group_connectivity_camino.py b/examples/dmri_group_connectivity_camino.py index dac4ab90c4..afb2ad1e60 100644 --- a/examples/dmri_group_connectivity_camino.py +++ b/examples/dmri_group_connectivity_camino.py @@ -59,7 +59,7 @@ import cmp from nipype.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline from nipype.workflows.dmri.connectivity.group_connectivity import (create_merge_networks_by_group_workflow, -create_merge_group_networks_workflow, create_average_networks_by_group_workflow) + create_merge_group_networks_workflow, create_average_networks_by_group_workflow) """ Set the proper directories diff --git a/examples/dmri_group_connectivity_mrtrix.py b/examples/dmri_group_connectivity_mrtrix.py index 10f535c042..4bdc0ae2b2 100644 --- a/examples/dmri_group_connectivity_mrtrix.py +++ b/examples/dmri_group_connectivity_mrtrix.py @@ -55,7 +55,7 @@ import nipype.interfaces.fsl as fsl import nipype.interfaces.freesurfer as fs # freesurfer -import os.path as op # system functions +import os.path as op # system functions import cmp from nipype.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline from nipype.workflows.dmri.connectivity.group_connectivity import (create_merge_network_results_by_group_workflow, create_merge_group_network_results_workflow, create_average_networks_by_group_workflow) diff --git a/examples/dmri_mrtrix_dti.py b/examples/dmri_mrtrix_dti.py index a0f44d1d69..d010518874 100755 --- a/examples/dmri_mrtrix_dti.py +++ b/examples/dmri_mrtrix_dti.py @@ -22,7 +22,7 @@ import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine -import nipype.interfaces.mrtrix as mrtrix #<---- The important new part! +import nipype.interfaces.mrtrix as mrtrix # <---- The important new part! import nipype.interfaces.fsl as fsl import nipype.algorithms.misc as misc import os, os.path as op # system functions @@ -32,11 +32,11 @@ """ This needs to point to the fdt folder you can find after extracting - * http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz + * http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz """ -data_dir = op.abspath(op.join(op.curdir,'exdata/')) +data_dir = op.abspath(op.join(op.curdir, 'exdata/')) subject_list = ['subj1'] """ @@ -48,8 +48,8 @@ infosource.iterables = ('subject_id', subject_list) info = dict(dwi=[['subject_id', 'data']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) """ Use datasource node to perform the actual data grabbing. @@ -58,7 +58,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" datasource.inputs.base_directory = data_dir @@ -87,21 +87,21 @@ b-values and b-vectors stored in FSL's format are converted into a single encoding file for MRTrix. """ -fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(),name='fsl2mrtrix') +fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') """ Tensors are fitted to each voxel in the diffusion-weighted image and from these three maps are created: - * Major eigenvector in each voxel - * Apparent diffusion coefficient - * Fractional anisotropy + * Major eigenvector in each voxel + * Apparent diffusion coefficient + * Fractional anisotropy """ gunzip = pe.Node(interface=misc.Gunzip(), name='gunzip') -dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(),name='dwi2tensor') -tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(),name='tensor2vector') -tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(),name='tensor2adc') -tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(),name='tensor2fa') +dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') +tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(), name='tensor2vector') +tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(), name='tensor2adc') +tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(), name='tensor2fa') """ These nodes are used to create a rough brain mask from the b0 image. @@ -109,11 +109,11 @@ put through a simple thresholding routine, and smoothed using a 3x3 median filter. """ -MRconvert = pe.Node(interface=mrtrix.MRConvert(),name='MRconvert') +MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') MRconvert.inputs.extract_at_axis = 3 MRconvert.inputs.extract_at_coordinate = [0] -threshold_b0 = pe.Node(interface=mrtrix.Threshold(),name='threshold_b0') -median3d = pe.Node(interface=mrtrix.MedianFilter3D(),name='median3d') +threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') +median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3d') """ The brain mask is also used to help identify single-fiber voxels. @@ -122,11 +122,11 @@ thresholding the result to obtain some highly anisotropic within-brain voxels. """ -erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_firstpass') -erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_secondpass') -MRmultiply = pe.Node(interface=mrtrix.MRMultiply(),name='MRmultiply') +erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_firstpass') +erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_secondpass') +MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') MRmult_merge = pe.Node(interface=util.Merge(2), name="MRmultiply_merge") -threshold_FA = pe.Node(interface=mrtrix.Threshold(),name='threshold_FA') +threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') threshold_FA.inputs.absolute_threshold_value = 0.7 """ @@ -135,9 +135,9 @@ thresholding it at a reasonably high level. """ -bet = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0') -gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(),name='gen_WM_mask') -threshold_wmmask = pe.Node(interface=mrtrix.Threshold(),name='threshold_wmmask') +bet = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') +gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(), name='gen_WM_mask') +threshold_wmmask = pe.Node(interface=mrtrix.Threshold(), name='threshold_wmmask') threshold_wmmask.inputs.absolute_threshold_value = 0.4 """ @@ -150,9 +150,9 @@ """ -estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(),name='estimateresponse') +estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(), name='estimateresponse') estimateresponse.inputs.maximum_harmonic_order = 6 -csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(),name='csdeconv') +csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(), name='csdeconv') csdeconv.inputs.maximum_harmonic_order = 6 """ @@ -160,12 +160,12 @@ The tracts are then used to generate a tract-density image, and they are also converted to TrackVis format. """ -probCSDstreamtrack = pe.Node(interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(),name='probCSDstreamtrack') +probCSDstreamtrack = pe.Node(interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(), name='probCSDstreamtrack') probCSDstreamtrack.inputs.inputmodel = 'SD_PROB' probCSDstreamtrack.inputs.maximum_number_of_tracks = 150000 -tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(),name='tracks2prob') +tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') tracks2prob.inputs.colour = True -tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(),name='tck2trk') +tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') """ Creating the workflow @@ -176,69 +176,69 @@ tractography = pe.Workflow(name='tractography') tractography.connect([(inputnode, fsl2mrtrix, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) -tractography.connect([(inputnode, gunzip,[("dwi","in_file")])]) -tractography.connect([(gunzip, dwi2tensor,[("out_file","in_file")])]) -tractography.connect([(fsl2mrtrix, dwi2tensor,[("encoding_file","encoding_file")])]) + ("bvals", "bval_file")])]) +tractography.connect([(inputnode, gunzip, [("dwi", "in_file")])]) +tractography.connect([(gunzip, dwi2tensor, [("out_file", "in_file")])]) +tractography.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", "encoding_file")])]) -tractography.connect([(dwi2tensor, tensor2vector,[['tensor','in_file']]), - (dwi2tensor, tensor2adc,[['tensor','in_file']]), - (dwi2tensor, tensor2fa,[['tensor','in_file']]), - ]) -tractography.connect([(tensor2fa, MRmult_merge,[("FA","in1")])]) +tractography.connect([(dwi2tensor, tensor2vector, [['tensor', 'in_file']]), + (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), + (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), + ]) +tractography.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) """ This block creates the rough brain mask to be multiplied, mulitplies it with the fractional anisotropy image, and thresholds it to get the single-fiber voxels. """ -tractography.connect([(gunzip, MRconvert,[("out_file","in_file")])]) -tractography.connect([(MRconvert, threshold_b0,[("converted","in_file")])]) -tractography.connect([(threshold_b0, median3d,[("out_file","in_file")])]) -tractography.connect([(median3d, erode_mask_firstpass,[("out_file","in_file")])]) -tractography.connect([(erode_mask_firstpass, erode_mask_secondpass,[("out_file","in_file")])]) -tractography.connect([(erode_mask_secondpass, MRmult_merge,[("out_file","in2")])]) -tractography.connect([(MRmult_merge, MRmultiply,[("out","in_files")])]) -tractography.connect([(MRmultiply, threshold_FA,[("out_file","in_file")])]) +tractography.connect([(gunzip, MRconvert, [("out_file", "in_file")])]) +tractography.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) +tractography.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) +tractography.connect([(median3d, erode_mask_firstpass, [("out_file", "in_file")])]) +tractography.connect([(erode_mask_firstpass, erode_mask_secondpass, [("out_file", "in_file")])]) +tractography.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", "in2")])]) +tractography.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) +tractography.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) """ Here the thresholded white matter mask is created for seeding the tractography. """ -tractography.connect([(gunzip, bet,[("out_file","in_file")])]) -tractography.connect([(gunzip, gen_WM_mask,[("out_file","in_file")])]) -tractography.connect([(bet, gen_WM_mask,[("mask_file","binary_mask")])]) -tractography.connect([(fsl2mrtrix, gen_WM_mask,[("encoding_file","encoding_file")])]) -tractography.connect([(gen_WM_mask, threshold_wmmask,[("WMprobabilitymap","in_file")])]) +tractography.connect([(gunzip, bet, [("out_file", "in_file")])]) +tractography.connect([(gunzip, gen_WM_mask, [("out_file", "in_file")])]) +tractography.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) +tractography.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", "encoding_file")])]) +tractography.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", "in_file")])]) """ Next we estimate the fiber response distribution. """ -tractography.connect([(gunzip, estimateresponse,[("out_file","in_file")])]) -tractography.connect([(fsl2mrtrix, estimateresponse,[("encoding_file","encoding_file")])]) -tractography.connect([(threshold_FA, estimateresponse,[("out_file","mask_image")])]) +tractography.connect([(gunzip, estimateresponse, [("out_file", "in_file")])]) +tractography.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", "encoding_file")])]) +tractography.connect([(threshold_FA, estimateresponse, [("out_file", "mask_image")])]) """ Run constrained spherical deconvolution. """ -tractography.connect([(gunzip, csdeconv,[("out_file","in_file")])]) -tractography.connect([(gen_WM_mask, csdeconv,[("WMprobabilitymap","mask_image")])]) -tractography.connect([(estimateresponse, csdeconv,[("response","response_file")])]) -tractography.connect([(fsl2mrtrix, csdeconv,[("encoding_file","encoding_file")])]) +tractography.connect([(gunzip, csdeconv, [("out_file", "in_file")])]) +tractography.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", "mask_image")])]) +tractography.connect([(estimateresponse, csdeconv, [("response", "response_file")])]) +tractography.connect([(fsl2mrtrix, csdeconv, [("encoding_file", "encoding_file")])]) """ Connect the tractography and compute the tract density image. """ -tractography.connect([(threshold_wmmask, probCSDstreamtrack,[("out_file","seed_file")])]) -tractography.connect([(csdeconv, probCSDstreamtrack,[("spherical_harmonics_image","in_file")])]) -tractography.connect([(probCSDstreamtrack, tracks2prob,[("tracked","in_file")])]) -tractography.connect([(gunzip, tracks2prob,[("out_file","template_file")])]) +tractography.connect([(threshold_wmmask, probCSDstreamtrack, [("out_file", "seed_file")])]) +tractography.connect([(csdeconv, probCSDstreamtrack, [("spherical_harmonics_image", "in_file")])]) +tractography.connect([(probCSDstreamtrack, tracks2prob, [("tracked", "in_file")])]) +tractography.connect([(gunzip, tracks2prob, [("out_file", "template_file")])]) -tractography.connect([(gunzip, tck2trk,[("out_file","image_file")])]) -tractography.connect([(probCSDstreamtrack, tck2trk,[("tracked","in_file")])]) +tractography.connect([(gunzip, tck2trk, [("out_file", "image_file")])]) +tractography.connect([(probCSDstreamtrack, tck2trk, [("tracked", "in_file")])]) """ Finally, we create another higher-level workflow to connect our tractography workflow with the info and datagrabbing nodes @@ -249,11 +249,11 @@ dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('dmri_mrtrix_dti') dwiproc.connect([ - (infosource,datasource,[('subject_id', 'subject_id')]), - (datasource,tractography,[('dwi','inputnode.dwi'), - ('bvals','inputnode.bvals'), - ('bvecs','inputnode.bvecs') - ]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, tractography, [('dwi', 'inputnode.dwi'), + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]) ]) if __name__ == '__main__': diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index bbf6da096f..22676d6093 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -155,14 +155,14 @@ wf = pe.Workflow(name="dMRI_Preprocessing") wf.base_dir = os.path.abspath('preprocessing_dmri_tutorial') wf.connect([ - (infosource, datasource, [('subject_id', 'subject_id')]) - ,(datasource, prep, [('dwi', 'inputnode.in_file'), + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, prep, [('dwi', 'inputnode.in_file'), ('dwi_rev', 'inputnode.alt_file'), ('bvals', 'inputnode.in_bval'), - ('bvecs', 'inputnode.in_bvec')]) - ,(prep, bias, [('outputnode.out_file', 'inputnode.in_file'), - ('outputnode.out_mask', 'inputnode.in_mask')]) - ,(datasource, bias, [('bvals', 'inputnode.in_bval')]) + ('bvecs', 'inputnode.in_bvec')]), + (prep, bias, [('outputnode.out_file', 'inputnode.in_file'), + ('outputnode.out_mask', 'inputnode.in_mask')]), + (datasource, bias, [('bvals', 'inputnode.in_bval')]) ]) diff --git a/examples/dmri_tbss_nki.py b/examples/dmri_tbss_nki.py index aa582df5b4..67f3719b2f 100755 --- a/examples/dmri_tbss_nki.py +++ b/examples/dmri_tbss_nki.py @@ -120,7 +120,7 @@ 'inputnode.distance_map')]), (tbss_source, tbss_MD, [('md_list', 'inputnode.file_list')]), - ]) + ]) if __name__ == '__main__': tbssproc.write_graph() diff --git a/examples/fmri_ants_openfmri.py b/examples/fmri_ants_openfmri.py index 2acc96bc0c..7a42ee8432 100755 --- a/examples/fmri_ants_openfmri.py +++ b/examples/fmri_ants_openfmri.py @@ -45,7 +45,7 @@ version = 0 if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): + LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): version = 507 fsl.FSLCommand.set_default_output_type('NIFTI_GZ') @@ -58,6 +58,7 @@ 'from scipy.special import legendre' ] + def median(in_files): """Computes an average of the median of each realigned timeseries @@ -111,11 +112,11 @@ def create_reg_workflow(name='registration'): register = pe.Workflow(name=name) inputnode = pe.Node(interface=niu.IdentityInterface(fields=['source_files', - 'mean_image', - 'anatomical_image', - 'target_image', - 'target_image_brain', - 'config_file']), + 'mean_image', + 'anatomical_image', + 'target_image', + 'target_image_brain', + 'config_file']), name='inputspec') outputnode = pe.Node(interface=niu.IdentityInterface(fields=['func2anat_transform', 'anat2target_transform', @@ -186,7 +187,7 @@ def create_reg_workflow(name='registration'): convert2itk.inputs.fsl2ras = True convert2itk.inputs.itk_transform = True register.connect(mean2anatbbr, 'out_matrix_file', convert2itk, 'transform_file') - register.connect(inputnode, 'mean_image',convert2itk, 'source_file') + register.connect(inputnode, 'mean_image', convert2itk, 'source_file') register.connect(stripper, 'out_file', convert2itk, 'reference_file') """ @@ -227,7 +228,7 @@ def create_reg_workflow(name='registration'): reg.plugin_args = {'qsub_args': '-pe orte 4', 'sbatch_args': '--mem=6G -c 4'} register.connect(stripper, 'out_file', reg, 'moving_image') - register.connect(inputnode,'target_image_brain', reg,'fixed_image') + register.connect(inputnode, 'target_image_brain', reg, 'fixed_image') """ Concatenate the affine and ants transforms into a list @@ -250,7 +251,7 @@ def create_reg_workflow(name='registration'): warpmean.inputs.invert_transform_flags = [False, False] warpmean.inputs.terminal_output = 'file' - register.connect(inputnode,'target_image_brain', warpmean,'reference_image') + register.connect(inputnode, 'target_image_brain', warpmean, 'reference_image') register.connect(inputnode, 'mean_image', warpmean, 'input_image') register.connect(merge, 'out', warpmean, 'transforms') @@ -266,8 +267,8 @@ def create_reg_workflow(name='registration'): warpall.inputs.invert_transform_flags = [False, False] warpall.inputs.terminal_output = 'file' - register.connect(inputnode,'target_image_brain',warpall,'reference_image') - register.connect(inputnode,'source_files', warpall, 'input_image') + register.connect(inputnode, 'target_image_brain', warpall, 'reference_image') + register.connect(inputnode, 'source_files', warpall, 'input_image') register.connect(merge, 'out', warpall, 'transforms') """ @@ -286,6 +287,7 @@ def create_reg_workflow(name='registration'): return register + def get_aparc_aseg(files): """Return the aparc+aseg.mgz file""" for name in files: @@ -293,6 +295,7 @@ def get_aparc_aseg(files): return name raise ValueError('aparc+aseg.mgz not found') + def create_fs_reg_workflow(name='registration'): """Create a FEAT preprocessing workflow together with freesurfer @@ -350,7 +353,7 @@ def create_fs_reg_workflow(name='registration'): # Coregister the median to the surface bbregister = Node(freesurfer.BBRegister(registered_file=True), - name='bbregister') + name='bbregister') bbregister.inputs.init = 'fsl' bbregister.inputs.contrast_type = 't2' bbregister.inputs.out_fsl_file = True @@ -370,7 +373,7 @@ def create_fs_reg_workflow(name='registration'): binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc") register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file") - stripper = Node(fsl.ApplyMask(), name ='stripper') + stripper = Node(fsl.ApplyMask(), name='stripper') register.connect(binarize, "binary_file", stripper, "mask_file") register.connect(convert, 'out_file', stripper, 'in_file') @@ -395,7 +398,7 @@ def create_fs_reg_workflow(name='registration'): convert2itk.inputs.fsl2ras = True convert2itk.inputs.itk_transform = True register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file') - register.connect(inputnode, 'mean_image',convert2itk, 'source_file') + register.connect(inputnode, 'mean_image', convert2itk, 'source_file') register.connect(stripper, 'out_file', convert2itk, 'reference_file') """ @@ -436,7 +439,7 @@ def create_fs_reg_workflow(name='registration'): reg.plugin_args = {'qsub_args': '-pe orte 4', 'sbatch_args': '--mem=6G -c 4'} register.connect(stripper, 'out_file', reg, 'moving_image') - register.connect(inputnode,'target_image', reg,'fixed_image') + register.connect(inputnode, 'target_image', reg, 'fixed_image') """ Concatenate the affine and ants transforms into a list @@ -458,8 +461,8 @@ def create_fs_reg_workflow(name='registration'): warpmean.inputs.invert_transform_flags = [False, False] warpmean.inputs.terminal_output = 'file' warpmean.inputs.args = '--float' - #warpmean.inputs.num_threads = 4 - #warpmean.plugin_args = {'sbatch_args': '--mem=4G -c 4'} + # warpmean.inputs.num_threads = 4 + # warpmean.plugin_args = {'sbatch_args': '--mem=4G -c 4'} """ Transform the remaining images. First to anatomical and then to target @@ -483,11 +486,11 @@ def create_fs_reg_workflow(name='registration'): register.connect(warpmean, 'output_image', outputnode, 'transformed_mean') register.connect(warpall, 'output_image', outputnode, 'transformed_files') - register.connect(inputnode,'target_image', warpmean,'reference_image') + register.connect(inputnode, 'target_image', warpmean, 'reference_image') register.connect(inputnode, 'mean_image', warpmean, 'input_image') register.connect(merge, 'out', warpmean, 'transforms') - register.connect(inputnode,'target_image', warpall,'reference_image') - register.connect(inputnode,'source_files', warpall, 'input_image') + register.connect(inputnode, 'target_image', warpall, 'reference_image') + register.connect(inputnode, 'source_files', warpall, 'input_image') register.connect(merge, 'out', warpall, 'transforms') """ @@ -516,6 +519,7 @@ def create_fs_reg_workflow(name='registration'): Get info for a given subject """ + def get_subjectinfo(subject_id, base_dir, task_id, model_id): """Get info for a given subject @@ -560,7 +564,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id): for idx in range(n_tasks): taskidx = np.where(taskinfo[:, 0] == 'task%03d' % (idx + 1)) conds.append([condition.replace(' ', '_') for condition - in taskinfo[taskidx[0], 2]]) # if 'junk' not in condition]) + in taskinfo[taskidx[0], 2]]) # if 'junk' not in condition]) files = sorted(glob(os.path.join(base_dir, subject_id, 'BOLD', @@ -568,8 +572,8 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id): runs = [int(val[-3:]) for val in files] run_ids.insert(idx, runs) json_info = os.path.join(base_dir, subject_id, 'BOLD', - 'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]), - 'bold_scaninfo.json') + 'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]), + 'bold_scaninfo.json') if os.path.exists(json_info): import json with open(json_info, 'rt') as fp: @@ -577,8 +581,8 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id): TR = data['global']['const']['RepetitionTime'] / 1000. else: task_scan_key = os.path.join(base_dir, subject_id, 'BOLD', - 'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]), - 'scan_key.txt') + 'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]), + 'scan_key.txt') if os.path.exists(task_scan_key): TR = np.genfromtxt(task_scan_key)[1] else: @@ -589,6 +593,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id): Analyzes an open fmri dataset """ + def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, task_id=None, output_dir=None, subj_prefix='*', hpcutoff=120., use_derivatives=True, @@ -662,15 +667,15 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, has_contrast = os.path.exists(contrast_file) if has_contrast: datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', - 'task_id', 'model_id'], - outfields=['anat', 'bold', 'behav', - 'contrasts']), - name='datasource') + 'task_id', 'model_id'], + outfields=['anat', 'bold', 'behav', + 'contrasts']), + name='datasource') else: datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', - 'task_id', 'model_id'], - outfields=['anat', 'bold', 'behav']), - name='datasource') + 'task_id', 'model_id'], + outfields=['anat', 'bold', 'behav']), + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' @@ -682,19 +687,19 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, 'contrasts': ('models/model%03d/' 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], - 'bold': [['subject_id', 'task_id']], - 'behav': [['subject_id', 'model_id', - 'task_id', 'run_id']], - 'contrasts': [['model_id']]} + 'bold': [['subject_id', 'task_id']], + 'behav': [['subject_id', 'model_id', + 'task_id', 'run_id']], + 'contrasts': [['model_id']]} else: datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz', 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', 'behav': ('%s/model/model%03d/onsets/task%03d_' 'run%03d/cond*.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], - 'bold': [['subject_id', 'task_id']], - 'behav': [['subject_id', 'model_id', - 'task_id', 'run_id']]} + 'bold': [['subject_id', 'task_id']], + 'behav': [['subject_id', 'model_id', + 'task_id', 'run_id']]} datasource.inputs.sort_filelist = True @@ -737,7 +742,7 @@ def get_contrasts(contrast_file, task_id, conds): for row in contrast_def: if row[0] != 'task%03d' % task_id: continue - con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], + con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column @@ -763,7 +768,7 @@ def get_contrasts(contrast_file, task_id, conds): name="art") modelspec = pe.Node(interface=model.SpecifyModel(), - name="modelspec") + name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav, run_id, conds): @@ -777,9 +782,9 @@ def check_behav_list(behav, run_id, conds): num_conds).tolist() reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'], - output_names=['behav'], - function=check_behav_list), - name='reshape_behav') + output_names=['behav'], + function=check_behav_list), + name='reshape_behav') wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') wf.connect(datasource, 'behav', reshape_behav, 'behav') @@ -861,7 +866,7 @@ def sort_copes(copes, varcopes, contrasts): ('varcopes', 'inputspec.varcopes'), ('n_runs', 'l2model.num_copes')]), (modelfit, fixed_fx, [('outputspec.dof_file', - 'inputspec.dof_files'), + 'inputspec.dof_files'), ]) ]) @@ -891,13 +896,13 @@ def merge_files(copes, varcopes, zstats): mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'zstats'], - output_names=['out_files', 'splits'], - function=merge_files), - name='merge_files') + output_names=['out_files', 'splits'], + function=merge_files), + name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, - [('copes', 'copes'), - ('varcopes', 'varcopes'), - ('zstats', 'zstats'), + [('copes', 'copes'), + ('varcopes', 'varcopes'), + ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') @@ -911,7 +916,7 @@ def split_files(in_files, splits): output_names=['copes', 'varcopes', 'zstats'], function=split_files), - name='split_files') + name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') @@ -935,12 +940,12 @@ def split_files(in_files, splits): def get_subs(subject_id, conds, run_id, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] - subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) + subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', - 'mean')) + 'mean')) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', - 'affine')) + 'affine')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) @@ -992,17 +997,17 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, - [('res4d', 'res4d'), - ('copes', 'copes'), - ('varcopes', 'varcopes'), - ('zstats', 'zstats'), - ('tstats', 'tstats')]) - ]) + [('res4d', 'res4d'), + ('copes', 'copes'), + ('varcopes', 'varcopes'), + ('zstats', 'zstats'), + ('tstats', 'tstats')]) + ]) wf.connect([(modelfit.get_node('modelgen'), datasink, - [('design_cov', 'qa.model'), - ('design_image', 'qa.model.@matrix_image'), - ('design_file', 'qa.model.@matrix'), - ])]) + [('design_cov', 'qa.model'), + ('design_image', 'qa.model.@matrix_image'), + ('design_file', 'qa.model.@matrix'), + ])]) wf.connect([(preproc, datasink, [('outputspec.motion_parameters', 'qa.motion'), ('outputspec.motion_plots', @@ -1061,7 +1066,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): help="Model index" + defstr) parser.add_argument('-x', '--subjectprefix', default='sub*', help="Subject prefix" + defstr) - parser.add_argument('-t', '--task', default=1, #nargs='+', + parser.add_argument('-t', '--task', default=1, # nargs='+', type=int, help="Task index" + defstr) parser.add_argument('--hpfilter', default=120., type=float, help="High pass filter cutoff (in secs)" + defstr) @@ -1109,7 +1114,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): fwhm=args.fwhm, subjects_dir=args.subjects_dir, target=args.target_file) - #wf.config['execution']['remove_unnecessary_outputs'] = False + # wf.config['execution']['remove_unnecessary_outputs'] = False wf.base_dir = work_dir if args.plugin_args: diff --git a/examples/fmri_freesurfer_smooth.py b/examples/fmri_freesurfer_smooth.py index 0bb5cd7601..315030d3e6 100755 --- a/examples/fmri_freesurfer_smooth.py +++ b/examples/fmri_freesurfer_smooth.py @@ -102,19 +102,19 @@ """ art = pe.Node(interface=ra.ArtifactDetect(), name="art") -art.inputs.use_differences = [True, False] -art.inputs.use_norm = True -art.inputs.norm_threshold = 1 +art.inputs.use_differences = [True, False] +art.inputs.use_norm = True +art.inputs.norm_threshold = 1 art.inputs.zintensity_threshold = 3 -art.inputs.mask_type = 'file' -art.inputs.parameter_source = 'SPM' +art.inputs.mask_type = 'file' +art.inputs.parameter_source = 'SPM' """ Use :class:`nipype.interfaces.freesurfer.BBRegister` to coregister the mean functional image generated by realign to the subjects' surfaces. """ -surfregister = pe.Node(interface=fs.BBRegister(),name='surfregister') +surfregister = pe.Node(interface=fs.BBRegister(), name='surfregister') surfregister.inputs.init = 'fsl' surfregister.inputs.contrast_type = 't2' @@ -139,7 +139,7 @@ mask. """ -Threshold = pe.Node(interface=fs.Binarize(),name='threshold') +Threshold = pe.Node(interface=fs.Binarize(), name='threshold') Threshold.inputs.min = 10 Threshold.inputs.out_type = 'nii' @@ -163,25 +163,25 @@ """ -volsmooth = pe.Node(interface=spm.Smooth(), name = "volsmooth") -surfsmooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0,1,0.1)), name = "surfsmooth", +volsmooth = pe.Node(interface=spm.Smooth(), name="volsmooth") +surfsmooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), name="surfsmooth", iterfield=['in_file']) """ We connect up the different nodes to implement the preprocessing workflow. """ -preproc.connect([(realign, surfregister,[('mean_image', 'source_file')]), - (FreeSurferSource, ApplyVolTransform,[('brainmask','target_file')]), - (surfregister, ApplyVolTransform,[('out_reg_file','reg_file')]), - (realign, ApplyVolTransform,[('mean_image', 'source_file')]), - (ApplyVolTransform, Threshold,[('transformed_file','in_file')]), - (realign, art,[('realignment_parameters','realignment_parameters'), - ('realigned_files','realigned_files')]), +preproc.connect([(realign, surfregister, [('mean_image', 'source_file')]), + (FreeSurferSource, ApplyVolTransform, [('brainmask', 'target_file')]), + (surfregister, ApplyVolTransform, [('out_reg_file', 'reg_file')]), + (realign, ApplyVolTransform, [('mean_image', 'source_file')]), + (ApplyVolTransform, Threshold, [('transformed_file', 'in_file')]), + (realign, art, [('realignment_parameters', 'realignment_parameters'), + ('realigned_files', 'realigned_files')]), (Threshold, art, [('binary_file', 'mask_file')]), (realign, volsmooth, [('realigned_files', 'in_files')]), (realign, surfsmooth, [('realigned_files', 'in_file')]), - (surfregister, surfsmooth, [('out_reg_file','reg_file')]), + (surfregister, surfsmooth, [('out_reg_file', 'reg_file')]), ]) @@ -198,16 +198,16 @@ :class:`nipype.interfaces.spm.SpecifyModel`. """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") -modelspec.inputs.concatenate_runs = True +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") +modelspec.inputs.concatenate_runs = True """ Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} """ Use :class:`nipype.interfaces.spm.EstimateModel` to determine the @@ -215,20 +215,20 @@ """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} """ Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ -contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") -volanalysis.connect([(modelspec,level1design,[('session_info','session_info')]), - (level1design,level1estimate,[('spm_mat_file','spm_mat_file')]), - (level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), +volanalysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), + (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), + (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ @@ -257,7 +257,7 @@ nifti images. """ -convert = pe.Node(interface=fs.MRIConvert(out_type='nii'),name='convert2nii') +convert = pe.Node(interface=fs.MRIConvert(out_type='nii'), name='convert2nii') convert2 = pe.MapNode(interface=fs.MRIConvert(out_type='nii'), iterfield=['in_file'], name='convertimg2nii') @@ -295,10 +295,10 @@ Connect up the volume normalization components """ -volnorm.connect([(convert, segment, [('out_file','data')]), +volnorm.connect([(convert, segment, [('out_file', 'data')]), (convert2, normwreg, [('out_file', 'source_file')]), (segment, normalize, [('transformation_mat', 'parameter_file')]), - (normwreg, normalize, [('transformed_file','apply_to_files')]), + (normwreg, normalize, [('transformed_file', 'apply_to_files')]), ]) """ @@ -326,25 +326,25 @@ """ l1pipeline = pe.Workflow(name='firstlevel') -l1pipeline.connect([(inputnode,preproc,[('func','realign.in_files'), - ('subject_id','surfregister.subject_id'), - ('subject_id','fssource.subject_id'), +l1pipeline.connect([(inputnode, preproc, [('func', 'realign.in_files'), + ('subject_id', 'surfregister.subject_id'), + ('subject_id', 'fssource.subject_id'), ]), - (inputnode, volanalysis,[('session_info','modelspec.subject_info'), - ('contrasts','contrastestimate.contrasts')]), - (inputnode, surfanalysis,[('session_info','modelspec.subject_info'), - ('contrasts','contrastestimate.contrasts')]), + (inputnode, volanalysis, [('session_info', 'modelspec.subject_info'), + ('contrasts', 'contrastestimate.contrasts')]), + (inputnode, surfanalysis, [('session_info', 'modelspec.subject_info'), + ('contrasts', 'contrastestimate.contrasts')]), ]) # attach volume and surface model specification and estimation components l1pipeline.connect([(preproc, volanalysis, [('realign.realignment_parameters', - 'modelspec.realignment_parameters'), - ('volsmooth.smoothed_files', - 'modelspec.functional_runs'), - ('art.outlier_files', - 'modelspec.outlier_files'), - ('threshold.binary_file', - 'level1design.mask_image')]), + 'modelspec.realignment_parameters'), + ('volsmooth.smoothed_files', + 'modelspec.functional_runs'), + ('art.outlier_files', + 'modelspec.outlier_files'), + ('threshold.binary_file', + 'level1design.mask_image')]), (preproc, surfanalysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters'), ('surfsmooth.smoothed_file', @@ -356,13 +356,13 @@ ]) # attach volume contrast normalization components -l1pipeline.connect([(preproc, volnorm, [('fssource.orig','convert2nii.in_file'), - ('surfregister.out_reg_file','applyreg2con.reg_file'), - ('fssource.orig','applyreg2con.target_file')]), +l1pipeline.connect([(preproc, volnorm, [('fssource.orig', 'convert2nii.in_file'), + ('surfregister.out_reg_file', 'applyreg2con.reg_file'), + ('fssource.orig', 'applyreg2con.target_file')]), (volanalysis, volnorm, [('contrastestimate.con_images', 'convertimg2nii.in_file'), ]) - ]) + ]) """ @@ -391,8 +391,8 @@ # Specify the subject directories subject_list = ['s1', 's3'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -417,7 +417,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -446,14 +446,15 @@ paradigm was used for every participant. """ + def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n"%str(subject_id)) + print("Subject ID: %s\n" %str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -468,9 +469,9 @@ def subjectinfo(subject_id): in the `subjectinfo` function described above. """ -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) -contrasts = [cont1,cont2] +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) +contrasts = [cont1, cont2] """ Set up node specific inputs @@ -481,21 +482,21 @@ def subjectinfo(subject_id): """ modelspecref = l1pipeline.inputs.volanalysis.modelspec -modelspecref.input_units = 'secs' -modelspecref.time_repetition = 3. +modelspecref.input_units = 'secs' +modelspecref.time_repetition = 3. modelspecref.high_pass_filter_cutoff = 120 modelspecref = l1pipeline.inputs.surfanalysis.modelspec -modelspecref.input_units = 'secs' -modelspecref.time_repetition = 3. +modelspecref.input_units = 'secs' +modelspecref.time_repetition = 3. modelspecref.high_pass_filter_cutoff = 120 l1designref = l1pipeline.inputs.volanalysis.level1design -l1designref.timing_units = modelspecref.output_units +l1designref.timing_units = modelspecref.output_units l1designref.interscan_interval = modelspecref.time_repetition l1designref = l1pipeline.inputs.surfanalysis.level1design -l1designref.timing_units = modelspecref.output_units +l1designref.timing_units = modelspecref.output_units l1designref.interscan_interval = modelspecref.time_repetition l1pipeline.inputs.inputnode.contrasts = contrasts @@ -518,10 +519,10 @@ def subjectinfo(subject_id): level1.base_dir = os.path.abspath('volsurf_tutorial/workingdir') level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,l1pipeline,[('func','inputnode.func')]), - (infosource,l1pipeline,[('subject_id','inputnode.subject_id'), - (('subject_id', subjectinfo), - 'inputnode.session_info')]), + (datasource, l1pipeline, [('func', 'inputnode.func')]), + (infosource, l1pipeline, [('subject_id', 'inputnode.subject_id'), + (('subject_id', subjectinfo), + 'inputnode.session_info')]), ]) @@ -536,16 +537,17 @@ def subjectinfo(subject_id): datasink.inputs.base_directory = os.path.abspath('volsurf_tutorial/l1out') datasink.inputs.substitutions = [] + def getsubs(subject_id): - subs = [('_subject_id_%s/'%subject_id,'')] + subs = [('_subject_id_%s/' %subject_id, '')] return subs # store relevant outputs from various stages of the 1st level analysis -level1.connect([(infosource, datasink,[('subject_id','container'), +level1.connect([(infosource, datasink, [('subject_id', 'container'), (('subject_id', getsubs), 'substitutions') ]), - (l1pipeline, datasink,[('surfanalysis.contrastestimate.con_images','contrasts'), - ('preproc.surfregister.out_reg_file','registrations'), + (l1pipeline, datasink, [('surfanalysis.contrastestimate.con_images', 'contrasts'), + ('preproc.surfregister.out_reg_file', 'registrations'), ]) ]) @@ -577,21 +579,21 @@ def getsubs(subject_id): l2inputnode = pe.Node(interface=util.IdentityInterface(fields=['contrasts', 'hemi']), name='inputnode') -l2inputnode.iterables = [('contrasts', list(range(1,len(contrasts)+1))), - ('hemi', ['lh','rh'])] +l2inputnode.iterables = [('contrasts', list(range(1, len(contrasts)+1))), + ('hemi', ['lh', 'rh'])] """ Use a datagrabber node to collect contrast images and registration files """ l2source = pe.Node(interface=nio.DataGrabber(infields=['con_id'], - outfields=['con','reg']), + outfields=['con', 'reg']), name='l2source') l2source.inputs.base_directory = os.path.abspath('volsurf_tutorial/l1out') l2source.inputs.template = '*' l2source.inputs.field_template = dict(con='*/contrasts/con_%04d.img', reg='*/registrations/*.dat') -l2source.inputs.template_args = dict(con=[['con_id']],reg=[[]]) +l2source.inputs.template_args = dict(con=[['con_id']], reg=[[]]) l2source.inputs.sort_filelist = True l2flow.connect(l2inputnode, 'contrasts', l2source, 'con_id') @@ -603,18 +605,19 @@ def getsubs(subject_id): mergenode = pe.Node(interface=util.Merge(2, axis='hstack'), name='merge') + def ordersubjects(files, subj_list): outlist = [] for s in subj_list: for f in files: - if '/%s/'%s in f: + if '/%s/' %s in f: outlist.append(f) continue print(outlist) return outlist -l2flow.connect(l2source,('con', ordersubjects, subject_list), mergenode, 'in1') -l2flow.connect(l2source,('reg', ordersubjects, subject_list), mergenode, 'in2') +l2flow.connect(l2source, ('con', ordersubjects, subject_list), mergenode, 'in1') +l2flow.connect(l2source, ('reg', ordersubjects, subject_list), mergenode, 'in2') """ Concatenate contrast images projected to fsaverage @@ -624,6 +627,7 @@ def ordersubjects(files, subj_list): l2concat.inputs.target = 'fsaverage' l2concat.inputs.fwhm = 5 + def list2tuple(listoflist): return [tuple(x) for x in listoflist] l2flow.connect(l2inputnode, 'hemi', l2concat, 'hemi') diff --git a/examples/fmri_fsl.py b/examples/fmri_fsl.py index b4a1225db1..285fc81d78 100755 --- a/examples/fmri_fsl.py +++ b/examples/fmri_fsl.py @@ -29,7 +29,6 @@ import nipype.algorithms.rapidart as ra # artifact detection - """ Preliminaries ------------- @@ -64,7 +63,7 @@ """ inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', - 'struct',]), + 'struct', ]), name='inputspec') """ @@ -73,8 +72,8 @@ """ img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float', - op_string = '', - suffix='_dtype'), + op_string='', + suffix='_dtype'), iterfield=['in_file'], name='img2float') preproc.connect(inputnode, 'func', img2float, 'in_file') @@ -84,12 +83,13 @@ """ extract_ref = pe.Node(interface=fsl.ExtractROI(t_size=1), - name = 'extractref') + name='extractref') """ Define a function to pick the first file from a list of files """ + def pickfirst(files): if isinstance(files, list): return files[0] @@ -102,12 +102,13 @@ def pickfirst(files): Define a function to return the 1 based index of the middle volume """ + def getmiddlevolume(func): from nibabel import load funcfile = func if isinstance(func, list): funcfile = func[0] - _,_,_,timepoints = load(funcfile).get_shape() + _, _, _, timepoints = load(funcfile).get_shape() return int(timepoints / 2) - 1 preproc.connect(inputnode, ('func', getmiddlevolume), extract_ref, 't_min') @@ -116,10 +117,10 @@ def getmiddlevolume(func): Realign the functional runs to the middle volume of the first run """ -motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True, - save_plots = True), +motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats=True, + save_plots=True), name='realign', - iterfield = ['in_file']) + iterfield=['in_file']) preproc.connect(img2float, 'out_file', motion_correct, 'in_file') preproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file') @@ -129,8 +130,8 @@ def getmiddlevolume(func): """ plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) + name='plot_motion', + iterfield=['in_file']) plot_motion.iterables = ('plot_type', ['rotations', 'translations']) preproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') @@ -138,7 +139,7 @@ def getmiddlevolume(func): Extract the mean volume of the first functional run """ -meanfunc = pe.Node(interface=fsl.ImageMaths(op_string = '-Tmean', +meanfunc = pe.Node(interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), name='meanfunc') preproc.connect(motion_correct, ('out_file', pickfirst), meanfunc, 'in_file') @@ -147,10 +148,10 @@ def getmiddlevolume(func): Strip the skull from the mean functional to generate a mask """ -meanfuncmask = pe.Node(interface=fsl.BET(mask = True, +meanfuncmask = pe.Node(interface=fsl.BET(mask=True, no_output=True, - frac = 0.3), - name = 'meanfuncmask') + frac=0.3), + name='meanfuncmask') preproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file') """ @@ -160,7 +161,7 @@ def getmiddlevolume(func): maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), iterfield=['in_file'], - name = 'maskfunc') + name='maskfunc') preproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') preproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2') @@ -170,7 +171,7 @@ def getmiddlevolume(func): """ getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'), - iterfield = ['in_file'], + iterfield=['in_file'], name='getthreshold') preproc.connect(maskfunc, 'out_file', getthresh, 'in_file') @@ -188,8 +189,9 @@ def getmiddlevolume(func): Define a function to get 10% of the intensity """ + def getthreshop(thresh): - return '-thr %.10f -Tmin -bin'%(0.1*thresh[0][1]) + return '-thr %.10f -Tmin -bin' %(0.1*thresh[0][1]) preproc.connect(getthresh, ('out_stat', getthreshop), threshold, 'op_string') """ @@ -197,7 +199,7 @@ def getthreshop(thresh): """ medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield = ['in_file'], + iterfield=['in_file'], name='medianval') preproc.connect(motion_correct, 'out_file', medianval, 'in_file') preproc.connect(threshold, 'out_file', medianval, 'mask_file') @@ -238,8 +240,8 @@ def getthreshop(thresh): mergenode = pe.Node(interface=util.Merge(2, axis='hstack'), name='merge') -preproc.connect(meanfunc2,'out_file', mergenode, 'in1') -preproc.connect(medianval,'out_stat', mergenode, 'in2') +preproc.connect(meanfunc2, 'out_file', mergenode, 'in1') +preproc.connect(medianval, 'out_stat', mergenode, 'in2') """ @@ -248,18 +250,20 @@ def getthreshop(thresh): """ smooth = pe.MapNode(interface=fsl.SUSAN(), - iterfield=['in_file', 'brightness_threshold','usans'], + iterfield=['in_file', 'brightness_threshold', 'usans'], name='smooth') """ Define a function to get the brightness threshold for SUSAN """ + def getbtthresh(medianvals): return [0.75*val for val in medianvals] + def getusans(x): - return [[tuple([val[0],0.75*val[1]])] for val in x] + return [[tuple([val[0], 0.75*val[1]])] for val in x] preproc.connect(maskfunc2, 'out_file', smooth, 'in_file') preproc.connect(medianval, ('out_stat', getbtthresh), smooth, 'brightness_threshold') @@ -281,7 +285,7 @@ def getusans(x): """ intnorm = pe.MapNode(interface=fsl.ImageMaths(suffix='_intnorm'), - iterfield=['in_file','op_string'], + iterfield=['in_file', 'op_string'], name='intnorm') preproc.connect(maskfunc3, 'out_file', intnorm, 'in_file') @@ -289,6 +293,7 @@ def getusans(x): Define a function to get the scaling factor for intensity normalization """ + def getinormscale(medianvals): return ['-mul %.10f' % (10000. / val) for val in medianvals] preproc.connect(medianval, ('out_stat', getinormscale), intnorm, 'op_string') @@ -318,12 +323,12 @@ def getinormscale(medianvals): """ nosestrip = pe.Node(interface=fsl.BET(frac=0.3), - name = 'nosestrip') -skullstrip = pe.Node(interface=fsl.BET(mask = True), - name = 'stripstruct') + name='nosestrip') +skullstrip = pe.Node(interface=fsl.BET(mask=True), + name='stripstruct') coregister = pe.Node(interface=fsl.FLIRT(dof=6), - name = 'coregister') + name='coregister') """ Use :class:`nipype.algorithms.rapidart` to determine which of the @@ -331,22 +336,22 @@ def getinormscale(medianvals): intensity and/or movement. """ -art = pe.MapNode(interface=ra.ArtifactDetect(use_differences = [True, False], - use_norm = True, - norm_threshold = 1, - zintensity_threshold = 3, - parameter_source = 'FSL', - mask_type = 'file'), +art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], + use_norm=True, + norm_threshold=1, + zintensity_threshold=3, + parameter_source='FSL', + mask_type='file'), iterfield=['realigned_files', 'realignment_parameters'], name="art") -preproc.connect([(inputnode, nosestrip,[('struct','in_file')]), - (nosestrip, skullstrip, [('out_file','in_file')]), - (skullstrip, coregister,[('out_file','in_file')]), - (meanfunc2, coregister,[(('out_file',pickfirst),'reference')]), - (motion_correct, art, [('par_file','realignment_parameters')]), - (maskfunc2, art, [('out_file','realigned_files')]), +preproc.connect([(inputnode, nosestrip, [('struct', 'in_file')]), + (nosestrip, skullstrip, [('out_file', 'in_file')]), + (skullstrip, coregister, [('out_file', 'in_file')]), + (meanfunc2, coregister, [(('out_file', pickfirst), 'reference')]), + (motion_correct, art, [('par_file', 'realignment_parameters')]), + (maskfunc2, art, [('out_file', 'realigned_files')]), (dilatemask, art, [('out_file', 'mask_file')]), ]) @@ -377,7 +382,7 @@ def getinormscale(medianvals): """ modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', - iterfield = ['fsf_file', 'ev_files']) + iterfield=['fsf_file', 'ev_files']) """ @@ -389,27 +394,27 @@ def getinormscale(medianvals): mask_size=5, threshold=1000), name='modelestimate', - iterfield = ['design_file','in_file']) + iterfield=['design_file', 'in_file']) """ Use :class:`nipype.interfaces.fsl.ContrastMgr` to generate contrast estimates """ conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', - iterfield = ['tcon_file','param_estimates', + iterfield=['tcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file']) modelfit.connect([ - (modelspec,level1design,[('session_info','session_info')]), - (level1design,modelgen,[('fsf_files', 'fsf_file'), + (modelspec, level1design, [('session_info', 'session_info')]), + (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), - (modelgen,modelestimate,[('design_file','design_file')]), - (modelgen,conestimate,[('con_file','tcon_file')]), - (modelestimate,conestimate,[('param_estimates','param_estimates'), + (modelgen, modelestimate, [('design_file', 'design_file')]), + (modelgen, conestimate, [('con_file', 'tcon_file')]), + (modelestimate, conestimate, [('param_estimates', 'param_estimates'), ('sigmasquareds', 'sigmasquareds'), - ('corrections','corrections'), - ('dof_file','dof_file')]), + ('corrections', 'corrections'), + ('dof_file', 'dof_file')]), ]) """ @@ -425,13 +430,13 @@ def getinormscale(medianvals): varcopes for each condition """ -copemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), +copemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), iterfield=['in_files'], name="copemerge") varcopemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), - iterfield=['in_files'], - name="varcopemerge") + iterfield=['in_files'], + name="varcopemerge") """ Use :class:`nipype.interfaces.fsl.L2Model` to generate subject and condition @@ -446,13 +451,13 @@ def getinormscale(medianvals): """ flameo = pe.MapNode(interface=fsl.FLAMEO(run_mode='fe'), name="flameo", - iterfield=['cope_file','var_cope_file']) + iterfield=['cope_file', 'var_cope_file']) -fixed_fx.connect([(copemerge,flameo,[('merged_file','cope_file')]), - (varcopemerge,flameo,[('merged_file','var_cope_file')]), - (level2model,flameo, [('design_mat','design_file'), - ('design_con','t_con_file'), - ('design_grp','cov_split_file')]), +fixed_fx.connect([(copemerge, flameo, [('merged_file', 'cope_file')]), + (varcopemerge, flameo, [('merged_file', 'var_cope_file')]), + (level2model, flameo, [('design_mat', 'design_file'), + ('design_con', 't_con_file'), + ('design_grp', 'cov_split_file')]), ]) @@ -462,26 +467,28 @@ def getinormscale(medianvals): """ + def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): - outfiles.insert(i,[]) + outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles + def num_copes(files): return len(files) firstlevel = pe.Workflow(name='firstlevel') firstlevel.connect([(preproc, modelfit, [('highpass.out_file', 'modelspec.functional_runs'), ('art.outlier_files', 'modelspec.outlier_files'), - ('highpass.out_file','modelestimate.in_file')]), + ('highpass.out_file', 'modelestimate.in_file')]), (preproc, fixed_fx, [('coregister.out_file', 'flameo.mask_file')]), - (modelfit, fixed_fx,[(('conestimate.copes', sort_copes),'copemerge.in_files'), - (('conestimate.varcopes', sort_copes),'varcopemerge.in_files'), - (('conestimate.copes', num_copes),'l2model.num_copes'), + (modelfit, fixed_fx, [(('conestimate.copes', sort_copes), 'copemerge.in_files'), + (('conestimate.varcopes', sort_copes), 'varcopemerge.in_files'), + (('conestimate.copes', num_copes), 'l2model.num_copes'), ]) ]) @@ -510,10 +517,10 @@ def num_copes(files): # Specify the location of the data. data_dir = os.path.abspath('data') # Specify the subject directories -subject_list = ['s1'] #, 's3'] +subject_list = ['s1'] # , 's3'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -539,7 +546,7 @@ def num_copes(files): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -551,8 +558,8 @@ def num_copes(files): """ smoothnode = firstlevel.get_node('preproc.smooth') -assert(str(smoothnode)=='preproc.smooth') -smoothnode.iterables = ('fwhm', [5.,10.]) +assert(str(smoothnode) == 'preproc.smooth') +smoothnode.iterables = ('fwhm', [5., 10.]) hpcutoff = 120 TR = 3. # ensure float @@ -569,14 +576,15 @@ def num_copes(files): `doc/examples` folder. Note: Python knowledge required here. """ + def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy print("Subject ID: %s\n" % str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -596,17 +604,17 @@ def subjectinfo(subject_id): described above. """ -cont1 = ['Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]] -cont2 = ['Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]] -cont3 = ['Task','F', [cont1, cont2]] -contrasts = [cont1,cont2] +cont1 = ['Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]] +cont2 = ['Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]] +cont3 = ['Task', 'F', [cont1, cont2]] +contrasts = [cont1, cont2] firstlevel.inputs.modelfit.modelspec.input_units = 'secs' firstlevel.inputs.modelfit.modelspec.time_repetition = TR firstlevel.inputs.modelfit.modelspec.high_pass_filter_cutoff = hpcutoff firstlevel.inputs.modelfit.level1design.interscan_interval = TR -firstlevel.inputs.modelfit.level1design.bases = {'dgamma':{'derivs': False}} +firstlevel.inputs.modelfit.level1design.bases = {'dgamma': {'derivs': False}} firstlevel.inputs.modelfit.level1design.contrasts = contrasts firstlevel.inputs.modelfit.level1design.model_serial_correlations = True @@ -615,13 +623,13 @@ def subjectinfo(subject_id): ======================== """ -l1pipeline = pe.Workflow(name= "level1") +l1pipeline = pe.Workflow(name="level1") l1pipeline.base_dir = os.path.abspath('./fsl/workingdir') -l1pipeline.config = {"execution": {"crashdump_dir":os.path.abspath('./fsl/crashdumps')}} +l1pipeline.config = {"execution": {"crashdump_dir": os.path.abspath('./fsl/crashdumps')}} l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (infosource, firstlevel, [(('subject_id', subjectinfo), 'modelfit.modelspec.subject_info')]), - (datasource, firstlevel, [('struct','preproc.inputspec.struct'), + (datasource, firstlevel, [('struct', 'preproc.inputspec.struct'), ('func', 'preproc.inputspec.func'), ]), ]) @@ -639,6 +647,6 @@ def subjectinfo(subject_id): if __name__ == '__main__': l1pipeline.write_graph() outgraph = l1pipeline.run() - #l1pipeline.run(plugin='MultiProc', plugin_args={'n_procs':2}) + # l1pipeline.run(plugin='MultiProc', plugin_args={'n_procs':2}) diff --git a/examples/fmri_fsl_feeds.py b/examples/fmri_fsl_feeds.py index c065146bb2..f564074d20 100755 --- a/examples/fmri_fsl_feeds.py +++ b/examples/fmri_fsl_feeds.py @@ -62,7 +62,7 @@ """ datasource = pe.Node(interface=nio.DataGrabber(outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = feeds_data_dir datasource.inputs.template = '%s.nii.gz' datasource.inputs.template_args = info @@ -78,8 +78,8 @@ modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = TR modelspec.inputs.high_pass_filter_cutoff = 100 -modelspec.inputs.subject_info = [Bunch(conditions=['Visual','Auditory'], - onsets=[list(range(0,int(180*TR),60)),list(range(0,int(180*TR),90))], +modelspec.inputs.subject_info = [Bunch(conditions=['Visual', 'Auditory'], + onsets=[list(range(0, int(180*TR), 60)), list(range(0, int(180*TR), 90))], durations=[[30], [45]], amplitudes=None, tmod=None, @@ -91,10 +91,10 @@ modelfit.inputs.inputspec.interscan_interval = TR modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} -cont1 = ['Visual>Baseline','T', ['Visual','Auditory'],[1,0]] -cont2 = ['Auditory>Baseline','T', ['Visual','Auditory'],[0,1]] -cont3 = ['Task','F', [cont1, cont2]] -modelfit.inputs.inputspec.contrasts = [cont1,cont2,cont3] +cont1 = ['Visual>Baseline', 'T', ['Visual', 'Auditory'], [1, 0]] +cont2 = ['Auditory>Baseline', 'T', ['Visual', 'Auditory'], [0, 1]] +cont3 = ['Task', 'F', [cont1, cont2]] +modelfit.inputs.inputspec.contrasts = [cont1, cont2, cont3] registration = create_reg_workflow() registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') @@ -106,11 +106,11 @@ ======================== """ -l1pipeline = pe.Workflow(name= "level1") +l1pipeline = pe.Workflow(name="level1") l1pipeline.base_dir = os.path.abspath('./fsl_feeds/workingdir') -l1pipeline.config = {"execution": {"crashdump_dir":os.path.abspath('./fsl_feeds/crashdumps')}} +l1pipeline.config = {"execution": {"crashdump_dir": os.path.abspath('./fsl_feeds/crashdumps')}} -l1pipeline.connect(datasource, 'func', preproc,'inputspec.func') +l1pipeline.connect(datasource, 'func', preproc, 'inputspec.func') l1pipeline.connect(preproc, 'outputspec.highpassed_files', modelspec, 'functional_runs') l1pipeline.connect(preproc, 'outputspec.motion_parameters', modelspec, 'realignment_parameters') l1pipeline.connect(modelspec, 'session_info', modelfit, 'inputspec.session_info') diff --git a/examples/fmri_fsl_reuse.py b/examples/fmri_fsl_reuse.py index 8d0ea99e34..8dd05b19ab 100755 --- a/examples/fmri_fsl_reuse.py +++ b/examples/fmri_fsl_reuse.py @@ -56,12 +56,12 @@ and modelfitting workflows. """ -art = pe.MapNode(interface=ra.ArtifactDetect(use_differences = [True, False], - use_norm = True, - norm_threshold = 1, - zintensity_threshold = 3, - parameter_source = 'FSL', - mask_type = 'file'), +art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], + use_norm=True, + norm_threshold=1, + zintensity_threshold=3, + parameter_source='FSL', + mask_type='file'), iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") @@ -69,9 +69,9 @@ level1_workflow.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), - ('outputspec.realigned_files', - 'realigned_files'), - ('outputspec.mask', 'mask_file')]), + ('outputspec.realigned_files', + 'realigned_files'), + ('outputspec.mask', 'mask_file')]), (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'), ('outputspec.motion_parameters', @@ -79,7 +79,7 @@ (art, modelspec, [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [('session_info', 'inputspec.session_info')]), (preproc, modelfit, [('outputspec.highpassed_files', 'inputspec.functional_data')]) - ]) + ]) """ @@ -88,26 +88,28 @@ """ + def sort_copes(files): numelements = len(files[0]) outfiles = [] for i in range(numelements): - outfiles.insert(i,[]) + outfiles.insert(i, []) for j, elements in enumerate(files): outfiles[i].append(elements[i]) return outfiles + def num_copes(files): return len(files) -pickfirst = lambda x : x[0] +pickfirst = lambda x: x[0] level1_workflow.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), - ('outputspec.dof_file', - 'inputspec.dof_files'), + ('outputspec.dof_file', + 'inputspec.dof_files'), (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'), @@ -140,10 +142,10 @@ def num_copes(files): # Specify the location of the data. data_dir = os.path.abspath('data') # Specify the subject directories -subject_list = ['s1'] #, 's3'] +subject_list = ['s1'] # , 's3'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -169,7 +171,7 @@ def num_copes(files): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -181,7 +183,7 @@ def num_copes(files): """ inputnode = level1_workflow.get_node('featpreproc.inputspec') -inputnode.iterables = ('fwhm', [5.,10.]) +inputnode.iterables = ('fwhm', [5., 10.]) hpcutoff = 120. TR = 3. @@ -196,14 +198,15 @@ def num_copes(files): `doc/examples` folder. Note: Python knowledge required here. """ + def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n"%str(subject_id)) + print("Subject ID: %s\n" %str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -218,17 +221,17 @@ def subjectinfo(subject_id): described above. """ -cont1 = ['Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]] -cont2 = ['Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]] -cont3 = ['Task','F', [cont1, cont2]] -contrasts = [cont1,cont2] +cont1 = ['Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]] +cont2 = ['Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]] +cont3 = ['Task', 'F', [cont1, cont2]] +contrasts = [cont1, cont2] modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = TR modelspec.inputs.high_pass_filter_cutoff = hpcutoff modelfit.inputs.inputspec.interscan_interval = TR -modelfit.inputs.inputspec.bases = {'dgamma':{'derivs': False}} +modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': False}} modelfit.inputs.inputspec.contrasts = contrasts modelfit.inputs.inputspec.model_serial_correlations = True modelfit.inputs.inputspec.film_threshold = 1000 @@ -240,7 +243,7 @@ def subjectinfo(subject_id): (infosource, modelspec, [(('subject_id', subjectinfo), 'subject_info')]), (datasource, preproc, [('func', 'inputspec.func')]), - ]) + ]) """ Execute the pipeline @@ -253,8 +256,8 @@ def subjectinfo(subject_id): """ if __name__ == '__main__': - #level1_workflow.write_graph() + # level1_workflow.write_graph() level1_workflow.run() - #level1_workflow.run(plugin='MultiProc', plugin_args={'n_procs':2}) + # level1_workflow.run(plugin='MultiProc', plugin_args={'n_procs':2}) diff --git a/examples/fmri_nipy_glm.py b/examples/fmri_nipy_glm.py index b1416634cc..f97d7e7189 100755 --- a/examples/fmri_nipy_glm.py +++ b/examples/fmri_nipy_glm.py @@ -70,8 +70,8 @@ # Specify the subject directories subject_list = ['s1'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -100,7 +100,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -121,12 +121,12 @@ """ art = pe.Node(interface=ra.ArtifactDetect(), name="art") -art.inputs.use_differences = [True, False] -art.inputs.use_norm = True -art.inputs.norm_threshold = 1 +art.inputs.use_differences = [True, False] +art.inputs.use_norm = True +art.inputs.norm_threshold = 1 art.inputs.zintensity_threshold = 3 -art.inputs.mask_type = 'file' -art.inputs.parameter_source = 'SPM' +art.inputs.mask_type = 'file' +art.inputs.parameter_source = 'SPM' """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid @@ -141,7 +141,7 @@ :class:`nipype.interfaces.spm.Smooth`. """ -smooth = pe.Node(interface=spm.Smooth(), name = "smooth") +smooth = pe.Node(interface=spm.Smooth(), name="smooth") smooth.inputs.fwhm = 4 """ @@ -159,11 +159,11 @@ def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n"%str(subject_id)) + print("Subject ID: %s\n" %str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -182,20 +182,20 @@ def subjectinfo(subject_id): in the `subjectinfo` function described above. """ -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) -contrasts = [cont1,cont2] +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) +contrasts = [cont1, cont2] """Generate design information using :class:`nipype.interfaces.spm.SpecifyModel`. nipy accepts only design specified in seconds so "output_units" has always have to be set to "secs". """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") -modelspec.inputs.concatenate_runs = True -modelspec.inputs.input_units = 'secs' -modelspec.inputs.output_units = 'secs' -modelspec.inputs.time_repetition = 3. +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") +modelspec.inputs.concatenate_runs = True +modelspec.inputs.input_units = 'secs' +modelspec.inputs.output_units = 'secs' +modelspec.inputs.time_repetition = 3. modelspec.inputs.high_pass_filter_cutoff = 120 """Fit the GLM model using nipy and ordinary least square method @@ -211,9 +211,9 @@ def subjectinfo(subject_id): """ contrast_estimate = pe.Node(interface=EstimateContrast(), name="contrast_estimate") -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) -contrast_estimate.inputs.contrasts = [cont1,cont2] +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) +contrast_estimate.inputs.contrasts = [cont1, cont2] """ Setup the pipeline @@ -242,28 +242,28 @@ def subjectinfo(subject_id): l1pipeline.base_dir = os.path.abspath('nipy_tutorial/workingdir') l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,realign,[('func','in_files')]), - (realign, compute_mask, [('mean_image','mean_volume')]), - (realign, coregister,[('mean_image', 'source'), - ('realigned_files','apply_to_files')]), - (datasource, coregister,[('struct', 'target')]), - (coregister, smooth, [('coregistered_files', 'in_files')]), - (realign, modelspec,[('realignment_parameters','realignment_parameters')]), - (smooth, modelspec,[('smoothed_files','functional_runs')]), - (realign, art,[('realignment_parameters','realignment_parameters')]), - (coregister, art,[('coregistered_files','realigned_files')]), - (compute_mask,art,[('brain_mask','mask_file')]), - (art, modelspec,[('outlier_files','outlier_files')]), - (infosource, modelspec, [(("subject_id", subjectinfo), "subject_info")]), - (modelspec, model_estimate,[('session_info','session_info')]), - (compute_mask, model_estimate, [('brain_mask','mask')]), - (model_estimate, contrast_estimate, [("beta","beta"), - ("nvbeta","nvbeta"), - ("s2","s2"), - ("dof", "dof"), - ("axis", "axis"), - ("constants", "constants"), - ("reg_names", "reg_names")]) + (datasource, realign, [('func', 'in_files')]), + (realign, compute_mask, [('mean_image', 'mean_volume')]), + (realign, coregister, [('mean_image', 'source'), + ('realigned_files', 'apply_to_files')]), + (datasource, coregister, [('struct', 'target')]), + (coregister, smooth, [('coregistered_files', 'in_files')]), + (realign, modelspec, [('realignment_parameters', 'realignment_parameters')]), + (smooth, modelspec, [('smoothed_files', 'functional_runs')]), + (realign, art, [('realignment_parameters', 'realignment_parameters')]), + (coregister, art, [('coregistered_files', 'realigned_files')]), + (compute_mask, art, [('brain_mask', 'mask_file')]), + (art, modelspec, [('outlier_files', 'outlier_files')]), + (infosource, modelspec, [(("subject_id", subjectinfo), "subject_info")]), + (modelspec, model_estimate, [('session_info', 'session_info')]), + (compute_mask, model_estimate, [('brain_mask', 'mask')]), + (model_estimate, contrast_estimate, [("beta", "beta"), + ("nvbeta", "nvbeta"), + ("s2", "s2"), + ("dof", "dof"), + ("axis", "axis"), + ("constants", "constants"), + ("reg_names", "reg_names")]) ]) if __name__ == '__main__': diff --git a/examples/fmri_openfmri.py b/examples/fmri_openfmri.py index 70f9c25bc9..dddfcda605 100755 --- a/examples/fmri_openfmri.py +++ b/examples/fmri_openfmri.py @@ -161,11 +161,11 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, datasource.inputs.base_directory = data_dir datasource.inputs.template = '*' datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz', - 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', - 'behav': ('%s/model/model%03d/onsets/task%03d_' - 'run%03d/cond*.txt'), - 'contrasts': ('models/model%03d/' - 'task_contrasts.txt')} + 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', + 'behav': ('%s/model/model%03d/onsets/task%03d_' + 'run%03d/cond*.txt'), + 'contrasts': ('models/model%03d/' + 'task_contrasts.txt')} datasource.inputs.template_args = {'anat': [['subject_id']], 'bold': [['subject_id', 'task_id']], 'behav': [['subject_id', 'model_id', @@ -210,7 +210,7 @@ def get_contrasts(contrast_file, task_id, conds): for row in contrast_def: if row[0] != 'task%03d' % task_id: continue - con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], + con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()] contrasts.append(con) # add auto contrasts for each column @@ -236,7 +236,7 @@ def get_contrasts(contrast_file, task_id, conds): name="art") modelspec = pe.Node(interface=model.SpecifyModel(), - name="modelspec") + name="modelspec") modelspec.inputs.input_units = 'secs' def check_behav_list(behav): @@ -296,14 +296,14 @@ def num_copes(files): 'flameo.mask_file')]), (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), 'inputspec.copes'), - ('outputspec.dof_file', - 'inputspec.dof_files'), - (('outputspec.varcopes', - sort_copes), - 'inputspec.varcopes'), - (('outputspec.copes', num_copes), - 'l2model.num_copes'), - ]) + ('outputspec.dof_file', + 'inputspec.dof_files'), + (('outputspec.varcopes', + sort_copes), + 'inputspec.varcopes'), + (('outputspec.copes', num_copes), + 'l2model.num_copes'), + ]) ]) wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image') @@ -325,13 +325,13 @@ def merge_files(copes, varcopes, zstats): mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes', 'zstats'], - output_names=['out_files', 'splits'], - function=merge_files), - name='merge_files') + output_names=['out_files', 'splits'], + function=merge_files), + name='merge_files') wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, - [('copes', 'copes'), - ('varcopes', 'varcopes'), - ('zstats', 'zstats'), + [('copes', 'copes'), + ('varcopes', 'varcopes'), + ('zstats', 'zstats'), ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') @@ -345,24 +345,23 @@ def split_files(in_files, splits): output_names=['copes', 'varcopes', 'zstats'], function=split_files), - name='split_files') + name='split_files') wf.connect(mergefunc, 'splits', splitfunc, 'splits') wf.connect(registration, 'outputspec.transformed_files', splitfunc, 'in_files') - """ Connect to a datasink """ def get_subs(subject_id, conds, model_id, task_id): subs = [('_subject_id_%s_' % subject_id, '')] - subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id)) + subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id)) subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', - 'mean')) + 'mean')) subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', - 'affine')) + 'affine')) for i in range(len(conds)): subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) @@ -393,12 +392,12 @@ def get_subs(subject_id, conds, model_id, task_id): wf.connect(contrastgen, 'contrasts', subsgen, 'conds') wf.connect(subsgen, 'substitutions', datasink, 'substitutions') wf.connect([(fixed_fx.get_node('outputspec'), datasink, - [('res4d', 'res4d'), - ('copes', 'copes'), - ('varcopes', 'varcopes'), - ('zstats', 'zstats'), - ('tstats', 'tstats')]) - ]) + [('res4d', 'res4d'), + ('copes', 'copes'), + ('varcopes', 'varcopes'), + ('zstats', 'zstats'), + ('tstats', 'tstats')]) + ]) wf.connect([(splitfunc, datasink, [('copes', 'copes.mni'), ('varcopes', 'varcopes.mni'), @@ -440,7 +439,7 @@ def get_subs(subject_id, conds, model_id, task_id): help="Model index" + defstr) parser.add_argument('-x', '--subjectprefix', default='sub*', help="Subject prefix" + defstr) - parser.add_argument('-t', '--task', default=1, #nargs='+', + parser.add_argument('-t', '--task', default=1, # nargs='+', type=int, help="Task index" + defstr) parser.add_argument("-o", "--output_dir", dest="outdir", help="Output directory base") @@ -463,11 +462,11 @@ def get_subs(subject_id, conds, model_id, task_id): outdir = os.path.join(outdir, 'model%02d' % int(args.model), 'task%03d' % int(args.task)) wf = analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir), - subject=args.subject, - model_id=int(args.model), - task_id=[int(args.task)], - subj_prefix=args.subjectprefix, - output_dir=outdir) + subject=args.subject, + model_id=int(args.model), + task_id=[int(args.task)], + subj_prefix=args.subjectprefix, + output_dir=outdir) wf.base_dir = work_dir if args.plugin_args: wf.run(args.plugin, plugin_args=eval(args.plugin_args)) diff --git a/examples/fmri_slicer_coregistration.py b/examples/fmri_slicer_coregistration.py index 1acf5837f5..daf5bbb9e7 100755 --- a/examples/fmri_slicer_coregistration.py +++ b/examples/fmri_slicer_coregistration.py @@ -13,7 +13,7 @@ """ -#raise RuntimeWarning, 'Slicer not fully implmented' +# raise RuntimeWarning, 'Slicer not fully implmented' from nipype.interfaces.slicer import BRAINSFit, BRAINSResample @@ -63,7 +63,7 @@ subject_list = ['s1', 's3'] # Map field names to individual subject runs. info = dict(func=[['subject_id', 'f3']], - struct=[['subject_id','struct']]) + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -92,7 +92,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -110,11 +110,11 @@ pipeline.base_dir = os.path.abspath('slicer_tutorial/workingdir') pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,coregister,[('func','movingVolume')]), - (datasource,coregister,[('struct','fixedVolume')]), - (coregister,reslice,[('outputTransform', 'warpTransform')]), - (datasource,reslice,[('func','inputVolume')]), - (datasource,reslice,[('struct','referenceVolume')]) + (datasource, coregister, [('func', 'movingVolume')]), + (datasource, coregister, [('struct', 'fixedVolume')]), + (coregister, reslice, [('outputTransform', 'warpTransform')]), + (datasource, reslice, [('func', 'inputVolume')]), + (datasource, reslice, [('struct', 'referenceVolume')]) ]) if __name__ == '__main__': diff --git a/examples/fmri_spm.py b/examples/fmri_spm.py index 16538cc124..b3fe92beb3 100755 --- a/examples/fmri_spm.py +++ b/examples/fmri_spm.py @@ -21,7 +21,7 @@ import os # system functions from nipype import config -#config.enable_provenance() +# config.enable_provenance() from nipype.interfaces import spm, fsl @@ -80,8 +80,8 @@ # Specify the subject directories subject_list = ['s1', 's3'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -110,7 +110,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -129,12 +129,12 @@ """ art = pe.Node(interface=ra.ArtifactDetect(), name="art") -art.inputs.use_differences = [True, False] -art.inputs.use_norm = True -art.inputs.norm_threshold = 1 +art.inputs.use_differences = [True, False] +art.inputs.use_norm = True +art.inputs.norm_threshold = 1 art.inputs.zintensity_threshold = 3 -art.inputs.mask_type = 'file' -art.inputs.parameter_source = 'SPM' +art.inputs.mask_type = 'file' +art.inputs.parameter_source = 'SPM' """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. @@ -156,7 +156,7 @@ includes the template image, T1.nii. """ -normalize = pe.Node(interface=spm.Normalize(), name = "normalize") +normalize = pe.Node(interface=spm.Normalize(), name="normalize") normalize.inputs.template = os.path.abspath('data/T1.nii') @@ -164,9 +164,9 @@ :class:`nipype.interfaces.spm.Smooth`. """ -smooth = pe.Node(interface=spm.Smooth(), name = "smooth") +smooth = pe.Node(interface=spm.Smooth(), name="smooth") fwhmlist = [4] -smooth.iterables = ('fwhm',fwhmlist) +smooth.iterables = ('fwhm', fwhmlist) """ Set up analysis components @@ -183,11 +183,11 @@ def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n"%str(subject_id)) + print("Subject ID: %s\n" %str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -201,29 +201,29 @@ def subjectinfo(subject_id): in the `subjectinfo` function described above. """ -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) -contrasts = [cont1,cont2] +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) +contrasts = [cont1, cont2] """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") -modelspec.inputs.concatenate_runs = False -modelspec.inputs.input_units = 'secs' -modelspec.inputs.output_units = 'secs' -modelspec.inputs.time_repetition = 3. +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") +modelspec.inputs.concatenate_runs = False +modelspec.inputs.input_units = 'secs' +modelspec.inputs.output_units = 'secs' +modelspec.inputs.time_repetition = 3. modelspec.inputs.high_pass_filter_cutoff = 120 """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.timing_units = modelspec.inputs.output_units +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.timing_units = modelspec.inputs.output_units level1design.inputs.interscan_interval = modelspec.inputs.time_repetition -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the @@ -231,13 +231,13 @@ def subjectinfo(subject_id): """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ -contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") contrastestimate.inputs.contrasts = contrasts contrastestimate.overwrite = True contrastestimate.config = {'execution': {'remove_unnecessary_outputs': False}} @@ -269,28 +269,28 @@ def subjectinfo(subject_id): l1pipeline.base_dir = os.path.abspath('spm_tutorial/workingdir') l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,realign,[('func','in_files')]), - (realign,coregister,[('mean_image', 'source'), - ('realigned_files','apply_to_files')]), - (datasource,coregister,[('struct', 'target')]), - (datasource,normalize,[('struct', 'source')]), - (coregister, normalize, [('coregistered_files','apply_to_files')]), - (normalize, smooth, [('normalized_files', 'in_files')]), - (infosource,modelspec,[(('subject_id', subjectinfo), - 'subject_info')]), - (realign,modelspec,[('realignment_parameters','realignment_parameters')]), - (smooth,modelspec,[('smoothed_files','functional_runs')]), - (normalize,skullstrip,[('normalized_source','in_file')]), - (realign,art,[('realignment_parameters','realignment_parameters')]), - (normalize,art,[('normalized_files','realigned_files')]), - (skullstrip,art,[('mask_file','mask_file')]), - (art,modelspec,[('outlier_files','outlier_files')]), - (modelspec,level1design,[('session_info','session_info')]), - (skullstrip,level1design,[('mask_file','mask_image')]), - (level1design,level1estimate,[('spm_mat_file','spm_mat_file')]), - (level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), + (datasource, realign, [('func', 'in_files')]), + (realign, coregister, [('mean_image', 'source'), + ('realigned_files', 'apply_to_files')]), + (datasource, coregister, [('struct', 'target')]), + (datasource, normalize, [('struct', 'source')]), + (coregister, normalize, [('coregistered_files', 'apply_to_files')]), + (normalize, smooth, [('normalized_files', 'in_files')]), + (infosource, modelspec, [(('subject_id', subjectinfo), + 'subject_info')]), + (realign, modelspec, [('realignment_parameters', 'realignment_parameters')]), + (smooth, modelspec, [('smoothed_files', 'functional_runs')]), + (normalize, skullstrip, [('normalized_source', 'in_file')]), + (realign, art, [('realignment_parameters', 'realignment_parameters')]), + (normalize, art, [('normalized_files', 'realigned_files')]), + (skullstrip, art, [('mask_file', 'mask_file')]), + (art, modelspec, [('outlier_files', 'outlier_files')]), + (modelspec, level1design, [('session_info', 'session_info')]), + (skullstrip, level1design, [('mask_file', 'mask_image')]), + (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), + (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) @@ -317,25 +317,26 @@ def subjectinfo(subject_id): datasink = pe.Node(interface=nio.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.abspath('spm_tutorial/l1output') + def getstripdir(subject_id): import os - return os.path.join(os.path.abspath('spm_tutorial/workingdir'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('spm_tutorial/workingdir'), '_subject_id_%s' % subject_id) # store relevant outputs from various stages of the 1st level analysis -l1pipeline.connect([(infosource,datasink,[('subject_id','container'), - (('subject_id', getstripdir),'strip_dir')]), - (realign,datasink,[('mean_image','realign.@mean'), - ('realignment_parameters','realign.@param')]), - (art,datasink,[('outlier_files','art.@outliers'), - ('statistic_files','art.@stats')]), - (level1design,datasink,[('spm_mat_file','model.pre-estimate')]), - (level1estimate,datasink,[('spm_mat_file','model.@spm'), - ('beta_images','model.@beta'), - ('mask_image','model.@mask'), - ('residual_image','model.@res'), - ('RPVimage','model.@rpv')]), - (contrastestimate,datasink,[('con_images','contrasts.@con'), - ('spmT_images','contrasts.@T')]), +l1pipeline.connect([(infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (realign, datasink, [('mean_image', 'realign.@mean'), + ('realignment_parameters', 'realign.@param')]), + (art, datasink, [('outlier_files', 'art.@outliers'), + ('statistic_files', 'art.@stats')]), + (level1design, datasink, [('spm_mat_file', 'model.pre-estimate')]), + (level1estimate, datasink, [('spm_mat_file', 'model.@spm'), + ('beta_images', 'model.@beta'), + ('mask_image', 'model.@mask'), + ('residual_image', 'model.@res'), + ('RPVimage', 'model.@rpv')]), + (contrastestimate, datasink, [('con_images', 'contrasts.@con'), + ('spmT_images', 'contrasts.@T')]), ]) @@ -350,13 +351,13 @@ def getstripdir(subject_id): """ # collect all the con images for each contrast. -contrast_ids = list(range(1,len(contrasts)+1)) +contrast_ids = list(range(1, len(contrasts)+1)) l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source") # we use .*i* to capture both .img (SPM8) and .nii (SPM12) -l2source.inputs.template=os.path.abspath('spm_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') +l2source.inputs.template = os.path.abspath('spm_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') # iterate over all contrast images -l2source.iterables = [('fwhm',fwhmlist), - ('con',contrast_ids)] +l2source.iterables = [('fwhm', fwhmlist), + ('con', contrast_ids)] l2source.inputs.sort_filelist = True @@ -368,9 +369,9 @@ def getstripdir(subject_id): # setup a 1-sample t-test node onesamplettestdes = pe.Node(interface=spm.OneSampleTTestDesign(), name="onesampttestdes") l2estimate = pe.Node(interface=spm.EstimateModel(), name="level2estimate") -l2estimate.inputs.estimation_method = {'Classical' : 1} -l2conestimate = pe.Node(interface = spm.EstimateContrast(), name="level2conestimate") -cont1 = ('Group','T', ['mean'],[1]) +l2estimate.inputs.estimation_method = {'Classical': 1} +l2conestimate = pe.Node(interface=spm.EstimateContrast(), name="level2conestimate") +cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True @@ -381,11 +382,11 @@ def getstripdir(subject_id): l2pipeline = pe.Workflow(name="level2") l2pipeline.base_dir = os.path.abspath('spm_tutorial/l2output') -l2pipeline.connect([(l2source,onesamplettestdes,[('outfiles','in_files')]), - (onesamplettestdes,l2estimate,[('spm_mat_file','spm_mat_file')]), - (l2estimate,l2conestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), +l2pipeline.connect([(l2source, onesamplettestdes, [('outfiles', 'in_files')]), + (onesamplettestdes, l2estimate, [('spm_mat_file', 'spm_mat_file')]), + (l2estimate, l2conestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index f75b3cc43a..1b34cb6b49 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -70,7 +70,7 @@ if merge_to_4d: merge = pe.Node(interface=fsl.Merge(), name="merge") - merge.inputs.dimension="t" + merge.inputs.dimension = "t" """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. @@ -87,23 +87,22 @@ coregister.inputs.jobtype = 'estimate' - segment = pe.Node(interface=spm.Segment(), name="segment") """Uncomment the following line for faster execution """ -#segment.inputs.gaussians_per_class = [1, 1, 1, 4] +# segment.inputs.gaussians_per_class = [1, 1, 1, 4] """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. """ -normalize_func = pe.Node(interface=spm.Normalize(), name = "normalize_func") +normalize_func = pe.Node(interface=spm.Normalize(), name="normalize_func") normalize_func.inputs.jobtype = "write" -normalize_struc = pe.Node(interface=spm.Normalize(), name = "normalize_struc") +normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" @@ -111,13 +110,14 @@ :class:`nipype.interfaces.spm.Smooth`. """ -smooth = pe.Node(interface=spm.Smooth(), name = "smooth") +smooth = pe.Node(interface=spm.Smooth(), name="smooth") """`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data using the following function: """ + def get_vox_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -133,12 +133,12 @@ def get_vox_dims(volume): """ if merge_to_4d: - preproc.connect([(merge, realign,[('merged_file', 'in_files')])]) + preproc.connect([(merge, realign, [('merged_file', 'in_files')])]) -preproc.connect([(realign,coregister,[('mean_image', 'target')]), - (coregister, segment,[('coregistered_source','data')]), - (segment, normalize_func, [('transformation_mat','parameter_file')]), - (segment, normalize_struc, [('transformation_mat','parameter_file'), +preproc.connect([(realign, coregister, [('mean_image', 'target')]), + (coregister, segment, [('coregistered_source', 'data')]), + (segment, normalize_func, [('transformation_mat', 'parameter_file')]), + (segment, normalize_struc, [('transformation_mat', 'parameter_file'), ('modulated_input_image', 'apply_to_files'), (('modulated_input_image', get_vox_dims), 'write_voxel_sizes')]), (realign, normalize_func, [('realigned_files', 'apply_to_files'), @@ -159,21 +159,21 @@ def get_vox_dims(volume): :class:`nipype.interfaces.spm.SpecifyModel`. """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") @@ -182,15 +182,15 @@ def get_vox_dims(volume): first level contrasts specified in a few steps above. """ -contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") -l1analysis.connect([(modelspec,level1design,[('session_info','session_info')]), - (level1design,level1estimate,[('spm_mat_file','spm_mat_file')]), - (level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), - (contrastestimate, threshold,[('spm_mat_file','spm_mat_file'), - ('spmT_images', 'stat_image')]), +l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), + (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), + (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), + (contrastestimate, threshold, [('spm_mat_file', 'spm_mat_file'), + ('spmT_images', 'stat_image')]), ]) """ @@ -200,7 +200,6 @@ def get_vox_dims(volume): """ - l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) @@ -216,11 +215,10 @@ def get_vox_dims(volume): else: def makelist(item): return [item] - l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files',makelist), + l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) - """ Data specific components ------------------------ @@ -239,7 +237,7 @@ def makelist(item): # Specify the subject directories subject_list = ['M00223'] # Map field names to individual subject runs. -info = dict(func=[['f', 'subject_id', 'f', 'subject_id', list(range(16,100))]], +info = dict(func=[['f', 'subject_id', 'f', 'subject_id', list(range(16, 100))]], struct=[['s', 'subject_id', 's', 'subject_id', 2]]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -265,7 +263,7 @@ def makelist(item): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s%s/%s%s_%03d.img' datasource.inputs.template_args = info @@ -284,8 +282,8 @@ def makelist(item): from nipype.interfaces.base import Bunch subjectinfo = [Bunch(conditions=['Task'], - onsets=[list(range(6,84,12))], - durations=[[6]])] + onsets=[list(range(6, 84, 12))], + durations=[[6]])] """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the @@ -294,18 +292,18 @@ def makelist(item): in the `subjectinfo` function described above. """ -cont1 = ('active > rest','T', ['Task'],[1]) +cont1 = ('active > rest', 'T', ['Task'], [1]) contrasts = [cont1] # set up node specific inputs modelspecref = l1pipeline.inputs.analysis.modelspec -modelspecref.input_units = 'scans' -modelspecref.output_units = 'scans' -modelspecref.time_repetition = 7 +modelspecref.input_units = 'scans' +modelspecref.output_units = 'scans' +modelspecref.time_repetition = 7 modelspecref.high_pass_filter_cutoff = 120 l1designref = l1pipeline.inputs.analysis.level1design -l1designref.timing_units = modelspecref.output_units +l1designref.timing_units = modelspecref.output_units l1designref.interscan_interval = modelspecref.time_repetition l1pipeline.inputs.preproc.smooth.fwhm = [6, 6, 6] @@ -340,12 +338,12 @@ def makelist(item): level1.base_dir = os.path.abspath('spm_auditory_tutorial/workingdir') level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,l1pipeline,[('struct', 'preproc.coregister.source')]) + (datasource, l1pipeline, [('struct', 'preproc.coregister.source')]) ]) if merge_to_4d: - level1.connect([(datasource,l1pipeline,[('func','preproc.merge.in_files')])]) + level1.connect([(datasource, l1pipeline, [('func', 'preproc.merge.in_files')])]) else: - level1.connect([(datasource,l1pipeline,[('func','preproc.realign.in_files')])]) + level1.connect([(datasource, l1pipeline, [('func', 'preproc.realign.in_files')])]) """ @@ -371,15 +369,16 @@ def makelist(item): datasink = pe.Node(interface=nio.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.abspath('spm_auditory_tutorial/l1output') + def getstripdir(subject_id): import os - return os.path.join(os.path.abspath('spm_auditory_tutorial/workingdir'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('spm_auditory_tutorial/workingdir'), '_subject_id_%s' % subject_id) # store relevant outputs from various stages of the 1st level analysis -level1.connect([(infosource, datasink,[('subject_id','container'), - (('subject_id', getstripdir),'strip_dir')]), - (l1pipeline, datasink,[('analysis.contrastestimate.con_images','contrasts.@con'), - ('analysis.contrastestimate.spmT_images','contrasts.@T')]), +level1.connect([(infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), ]) diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 7bd25ff5a7..0c150999ca 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -43,8 +43,8 @@ fsl.FSLCommand.set_default_output_type('NIFTI') # Set the way matlab should be called -#mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") -#mlab.MatlabCommand.set_default_paths('/software/spm8') +# mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") +# mlab.MatlabCommand.set_default_paths('/software/spm8') """ Setting up workflows @@ -77,12 +77,12 @@ """ art = pe.Node(interface=ra.ArtifactDetect(), name="art") -art.inputs.use_differences = [True, False] -art.inputs.use_norm = True -art.inputs.norm_threshold = 1 +art.inputs.use_differences = [True, False] +art.inputs.use_norm = True +art.inputs.norm_threshold = 1 art.inputs.zintensity_threshold = 3 -art.inputs.mask_type = 'file' -art.inputs.parameter_source = 'SPM' +art.inputs.mask_type = 'file' +art.inputs.parameter_source = 'SPM' """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. @@ -104,7 +104,7 @@ normalize_and_smooth_func = pe.Node(spm.DARTELNorm2MNI(modulate=True), name='normalize_and_smooth_func') fwhmlist = [4] -normalize_and_smooth_func.iterables = ('fwhm',fwhmlist) +normalize_and_smooth_func.iterables = ('fwhm', fwhmlist) """Normalize structural data using DARTEL template """ @@ -112,13 +112,13 @@ normalize_struct = pe.Node(spm.DARTELNorm2MNI(modulate=True), name='normalize_struct') normalize_struct.inputs.fwhm = 2 -preproc.connect([(realign,coregister,[('mean_image', 'source'), - ('realigned_files','apply_to_files')]), - (coregister, normalize_and_smooth_func, [('coregistered_files','apply_to_files')]), - (normalize_struct,skullstrip,[('normalized_files','in_file')]), - (realign,art,[('realignment_parameters','realignment_parameters')]), - (normalize_and_smooth_func,art,[('normalized_files','realigned_files')]), - (skullstrip,art,[('mask_file','mask_file')]), +preproc.connect([(realign, coregister, [('mean_image', 'source'), + ('realigned_files', 'apply_to_files')]), + (coregister, normalize_and_smooth_func, [('coregistered_files', 'apply_to_files')]), + (normalize_struct, skullstrip, [('normalized_files', 'in_file')]), + (realign, art, [('realignment_parameters', 'realignment_parameters')]), + (normalize_and_smooth_func, art, [('normalized_files', 'realigned_files')]), + (skullstrip, art, [('mask_file', 'mask_file')]), ]) @@ -134,28 +134,28 @@ :class:`nipype.interfaces.spm.SpecifyModel`. """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") -modelspec.inputs.concatenate_runs = True +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") +modelspec.inputs.concatenate_runs = True """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ -contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. @@ -168,9 +168,9 @@ """ overlaystats = pe.Node(interface=fsl.Overlay(), name="overlaystats") -overlaystats.inputs.stat_thresh = (3,10) -overlaystats.inputs.show_negative_stats=True -overlaystats.inputs.auto_thresh_bg=True +overlaystats.inputs.stat_thresh = (3, 10) +overlaystats.inputs.show_negative_stats = True +overlaystats.inputs.auto_thresh_bg = True """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. @@ -180,14 +180,14 @@ slicestats.inputs.all_axial = True slicestats.inputs.image_width = 750 -l1analysis.connect([(modelspec,level1design,[('session_info','session_info')]), - (level1design,level1estimate,[('spm_mat_file','spm_mat_file')]), - (level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), - (contrastestimate,selectcontrast,[('spmT_images','inlist')]), - (selectcontrast,overlaystats,[('out','stat_image')]), - (overlaystats,slicestats,[('out_file','in_file')]) +l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), + (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), + (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), + (contrastestimate, selectcontrast, [('spmT_images', 'inlist')]), + (selectcontrast, overlaystats, [('out', 'stat_image')]), + (overlaystats, slicestats, [('out_file', 'in_file')]) ]) """ @@ -207,7 +207,7 @@ 'level1design.mask_image'), ('normalize_struct.normalized_files', 'overlaystats.background_image')]), - ]) + ]) """ @@ -236,8 +236,8 @@ # Specify the subject directories subject_list = ['s1', 's3'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -262,7 +262,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -273,11 +273,11 @@ datasource_dartel = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), - name = 'datasource_dartel', - iterfield = ['subject_id']) + name='datasource_dartel', + iterfield=['subject_id']) datasource_dartel.inputs.base_directory = data_dir datasource_dartel.inputs.template = '%s/%s.nii' -datasource_dartel.inputs.template_args = dict(struct=[['subject_id','struct']]) +datasource_dartel.inputs.template_args = dict(struct=[['subject_id', 'struct']]) datasource_dartel.inputs.sort_filelist = True datasource_dartel.inputs.subject_id = subject_list @@ -287,7 +287,7 @@ rename_dartel = pe.MapNode(util.Rename(format_string="subject_id_%(subject_id)s_struct"), iterfield=['in_file', 'subject_id'], - name = 'rename_dartel') + name='rename_dartel') rename_dartel.inputs.subject_id = subject_list rename_dartel.inputs.keep_ext = True @@ -297,11 +297,12 @@ """This function will allow to pick the right field flow for each subject """ + def pickFieldFlow(dartel_flow_fields, subject_id): from nipype.utils.filemanip import split_filename for f in dartel_flow_fields: _, name, _ = split_filename(f) - if name.find("subject_id_%s"%subject_id): + if name.find("subject_id_%s" %subject_id): return f raise Exception @@ -309,8 +310,8 @@ def pickFieldFlow(dartel_flow_fields, subject_id): pick_flow = pe.Node(util.Function(input_names=['dartel_flow_fields', 'subject_id'], output_names=['dartel_flow_field'], - function = pickFieldFlow), - name = "pick_flow") + function=pickFieldFlow), + name="pick_flow") """ Experimental paradigm specific components @@ -323,14 +324,15 @@ def pickFieldFlow(dartel_flow_fields, subject_id): paradigm was used for every participant. """ + def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n"%str(subject_id)) + print("Subject ID: %s\n" %str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -349,19 +351,19 @@ def subjectinfo(subject_id): in the `subjectinfo` function described above. """ -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) -contrasts = [cont1,cont2] +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) +contrasts = [cont1, cont2] # set up node specific inputs modelspecref = l1pipeline.inputs.analysis.modelspec -modelspecref.input_units = 'secs' -modelspecref.output_units = 'secs' -modelspecref.time_repetition = 3. +modelspecref.input_units = 'secs' +modelspecref.output_units = 'secs' +modelspecref.time_repetition = 3. modelspecref.high_pass_filter_cutoff = 120 l1designref = l1pipeline.inputs.analysis.level1design -l1designref.timing_units = modelspecref.output_units +l1designref.timing_units = modelspecref.output_units l1designref.interscan_interval = modelspecref.time_repetition @@ -369,7 +371,7 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. -selectcontrast.iterables = ('index',[[i] for i in range(len(contrasts))]) +selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) """ Setup the pipeline @@ -398,20 +400,20 @@ def subjectinfo(subject_id): level1.base_dir = os.path.abspath('spm_dartel_tutorial/workingdir') level1.connect([(datasource_dartel, rename_dartel, [('struct', 'in_file')]), - (rename_dartel, dartel_workflow, [('out_file','inputspec.structural_files')]), + (rename_dartel, dartel_workflow, [('out_file', 'inputspec.structural_files')]), (infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,l1pipeline,[('func','preproc.realign.in_files'), + (datasource, l1pipeline, [('func', 'preproc.realign.in_files'), ('struct', 'preproc.coregister.target'), ('struct', 'preproc.normalize_struct.apply_to_files')]), (dartel_workflow, l1pipeline, [('outputspec.template_file', 'preproc.normalize_struct.template_file'), - ('outputspec.template_file', 'preproc.normalize_and_smooth_func.template_file')]), + ('outputspec.template_file', 'preproc.normalize_and_smooth_func.template_file')]), (infosource, pick_flow, [('subject_id', 'subject_id')]), (dartel_workflow, pick_flow, [('outputspec.flow_fields', 'dartel_flow_fields')]), (pick_flow, l1pipeline, [('dartel_flow_field', 'preproc.normalize_struct.flowfield_files'), ('dartel_flow_field', 'preproc.normalize_and_smooth_func.flowfield_files')]), - (infosource,l1pipeline,[(('subject_id', subjectinfo), - 'analysis.modelspec.subject_info')]), + (infosource, l1pipeline, [(('subject_id', subjectinfo), + 'analysis.modelspec.subject_info')]), ]) @@ -441,18 +443,19 @@ def subjectinfo(subject_id): report.inputs.base_directory = os.path.abspath('spm_dartel_tutorial/report') report.inputs.parameterization = False + def getstripdir(subject_id): import os - return os.path.join(os.path.abspath('spm_dartel_tutorial/workingdir'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('spm_dartel_tutorial/workingdir'), '_subject_id_%s' % subject_id) # store relevant outputs from various stages of the 1st level analysis -level1.connect([(infosource, datasink,[('subject_id','container'), - (('subject_id', getstripdir),'strip_dir')]), - (l1pipeline, datasink,[('analysis.contrastestimate.con_images','contrasts.@con'), - ('analysis.contrastestimate.spmT_images','contrasts.@T')]), - (infosource, report,[('subject_id', 'container'), - (('subject_id', getstripdir),'strip_dir')]), - (l1pipeline, report,[('analysis.slicestats.out_file', '@report')]), +level1.connect([(infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), + (infosource, report, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) @@ -482,13 +485,13 @@ def getstripdir(subject_id): """ # collect all the con images for each contrast. -contrast_ids = list(range(1,len(contrasts)+1)) +contrast_ids = list(range(1, len(contrasts)+1)) l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source") # we use .*i* to capture both .img (SPM8) and .nii (SPM12) -l2source.inputs.template=os.path.abspath('spm_dartel_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') +l2source.inputs.template = os.path.abspath('spm_dartel_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') # iterate over all contrast images -l2source.iterables = [('fwhm',fwhmlist), - ('con',contrast_ids)] +l2source.iterables = [('fwhm', fwhmlist), + ('con', contrast_ids)] l2source.inputs.sort_filelist = True @@ -500,9 +503,9 @@ def getstripdir(subject_id): # setup a 1-sample t-test node onesamplettestdes = pe.Node(interface=spm.OneSampleTTestDesign(), name="onesampttestdes") l2estimate = pe.Node(interface=spm.EstimateModel(), name="level2estimate") -l2estimate.inputs.estimation_method = {'Classical' : 1} -l2conestimate = pe.Node(interface = spm.EstimateContrast(), name="level2conestimate") -cont1 = ('Group','T', ['mean'],[1]) +l2estimate.inputs.estimation_method = {'Classical': 1} +l2conestimate = pe.Node(interface=spm.EstimateContrast(), name="level2conestimate") +cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True @@ -513,11 +516,11 @@ def getstripdir(subject_id): l2pipeline = pe.Workflow(name="level2") l2pipeline.base_dir = os.path.abspath('spm_dartel_tutorial/l2output') -l2pipeline.connect([(l2source,onesamplettestdes,[('outfiles','in_files')]), - (onesamplettestdes,l2estimate,[('spm_mat_file','spm_mat_file')]), - (l2estimate,l2conestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), +l2pipeline.connect([(l2source, onesamplettestdes, [('outfiles', 'in_files')]), + (onesamplettestdes, l2estimate, [('spm_mat_file', 'spm_mat_file')]), + (l2estimate, l2conestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ diff --git a/examples/fmri_spm_face.py b/examples/fmri_spm_face.py index ffdd745bc2..948475e8f5 100755 --- a/examples/fmri_spm_face.py +++ b/examples/fmri_spm_face.py @@ -80,24 +80,23 @@ coregister.inputs.jobtype = 'estimate' - segment = pe.Node(interface=spm.Segment(), name="segment") segment.inputs.save_bias_corrected = True """Uncomment the following line for faster execution """ -#segment.inputs.gaussians_per_class = [1, 1, 1, 4] +# segment.inputs.gaussians_per_class = [1, 1, 1, 4] """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. """ -normalize_func = pe.Node(interface=spm.Normalize(), name = "normalize_func") +normalize_func = pe.Node(interface=spm.Normalize(), name="normalize_func") normalize_func.inputs.jobtype = "write" -normalize_struc = pe.Node(interface=spm.Normalize(), name = "normalize_struc") +normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" @@ -105,13 +104,14 @@ :class:`nipype.interfaces.spm.Smooth`. """ -smooth = pe.Node(interface=spm.Smooth(), name = "smooth") +smooth = pe.Node(interface=spm.Smooth(), name="smooth") """`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data using the following function: """ + def get_vox_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -126,15 +126,15 @@ def get_vox_dims(volume): voxel sizes. """ -preproc.connect([(realign,coregister,[('mean_image', 'target')]), - (coregister, segment,[('coregistered_source','data')]), - (segment, normalize_func, [('transformation_mat','parameter_file')]), - (segment, normalize_struc, [('transformation_mat','parameter_file'), +preproc.connect([(realign, coregister, [('mean_image', 'target')]), + (coregister, segment, [('coregistered_source', 'data')]), + (segment, normalize_func, [('transformation_mat', 'parameter_file')]), + (segment, normalize_struc, [('transformation_mat', 'parameter_file'), ('bias_corrected_image', 'apply_to_files'), (('bias_corrected_image', get_vox_dims), 'write_voxel_sizes')]), (realign, slice_timing, [('realigned_files', 'in_files')]), (slice_timing, normalize_func, [('timecorrected_files', 'apply_to_files'), - (('timecorrected_files', get_vox_dims), 'write_voxel_sizes')]), + (('timecorrected_files', get_vox_dims), 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) @@ -151,20 +151,20 @@ def get_vox_dims(volume): :class:`nipype.interfaces.spm.SpecifyModel`. """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") @@ -173,18 +173,19 @@ def get_vox_dims(volume): first level contrasts specified in a few steps above. """ -contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") + def pickfirst(l): return l[0] -l1analysis.connect([(modelspec,level1design,[('session_info','session_info')]), - (level1design,level1estimate,[('spm_mat_file','spm_mat_file')]), - (level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), - (contrastestimate, threshold,[('spm_mat_file','spm_mat_file'), - (('spmT_images', pickfirst), 'stat_image')]), +l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), + (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), + (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), + (contrastestimate, threshold, [('spm_mat_file', 'spm_mat_file'), + (('spmT_images', pickfirst), 'stat_image')]), ]) """ @@ -202,9 +203,10 @@ def pickfirst(l): to make one we need a helper function. """ + def makelist(item): return [item] -l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files',makelist), +l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) @@ -226,7 +228,7 @@ def makelist(item): # Specify the subject directories subject_list = ['M03953'] # Map field names to individual subject runs. -info = dict(func=[['RawEPI', 'subject_id', 5, ["_%04d"%i for i in range(6,357)]]], +info = dict(func=[['RawEPI', 'subject_id', 5, ["_%04d" %i for i in range(6, 357)]]], struct=[['Structural', 'subject_id', 7, '']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), @@ -253,7 +255,7 @@ def makelist(item): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/s%s_%04d%s.img' datasource.inputs.template_args = info @@ -282,13 +284,13 @@ def makelist(item): itemlag = mat['itemlag'][0] subjectinfo = [Bunch(conditions=['N1', 'N2', 'F1', 'F2'], - onsets=[sot[0], sot[1], sot[2], sot[3]], - durations=[[0], [0], [0], [0]], - amplitudes=None, - tmod=None, - pmod=None, - regressor_names=None, - regressors=None)] + onsets=[sot[0], sot[1], sot[2], sot[3]], + durations=[[0], [0], [0], [0]], + amplitudes=None, + tmod=None, + pmod=None, + regressor_names=None, + regressors=None)] """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the @@ -297,26 +299,26 @@ def makelist(item): in the `subjectinfo` function described above. """ -cond1 = ('positive effect of condition','T', ['N1*bf(1)','N2*bf(1)','F1*bf(1)','F2*bf(1)'],[1,1,1,1]) -cond2 = ('positive effect of condition_dtemo','T', ['N1*bf(2)','N2*bf(2)','F1*bf(2)','F2*bf(2)'],[1,1,1,1]) -cond3 = ('positive effect of condition_ddisp','T', ['N1*bf(3)','N2*bf(3)','F1*bf(3)','F2*bf(3)'],[1,1,1,1]) +cond1 = ('positive effect of condition', 'T', ['N1*bf(1)', 'N2*bf(1)', 'F1*bf(1)', 'F2*bf(1)'], [1, 1, 1, 1]) +cond2 = ('positive effect of condition_dtemo', 'T', ['N1*bf(2)', 'N2*bf(2)', 'F1*bf(2)', 'F2*bf(2)'], [1, 1, 1, 1]) +cond3 = ('positive effect of condition_ddisp', 'T', ['N1*bf(3)', 'N2*bf(3)', 'F1*bf(3)', 'F2*bf(3)'], [1, 1, 1, 1]) # non-famous > famous -fam1 = ('positive effect of Fame','T', ['N1*bf(1)','N2*bf(1)','F1*bf(1)','F2*bf(1)'],[1,1,-1,-1]) -fam2 = ('positive effect of Fame_dtemp','T', ['N1*bf(2)','N2*bf(2)','F1*bf(2)','F2*bf(2)'],[1,1,-1,-1]) -fam3 = ('positive effect of Fame_ddisp','T', ['N1*bf(3)','N2*bf(3)','F1*bf(3)','F2*bf(3)'],[1,1,-1,-1]) +fam1 = ('positive effect of Fame', 'T', ['N1*bf(1)', 'N2*bf(1)', 'F1*bf(1)', 'F2*bf(1)'], [1, 1, -1, -1]) +fam2 = ('positive effect of Fame_dtemp', 'T', ['N1*bf(2)', 'N2*bf(2)', 'F1*bf(2)', 'F2*bf(2)'], [1, 1, -1, -1]) +fam3 = ('positive effect of Fame_ddisp', 'T', ['N1*bf(3)', 'N2*bf(3)', 'F1*bf(3)', 'F2*bf(3)'], [1, 1, -1, -1]) # rep1 > rep2 -rep1 = ('positive effect of Rep','T', ['N1*bf(1)','N2*bf(1)','F1*bf(1)','F2*bf(1)'],[1,-1,1,-1]) -rep2 = ('positive effect of Rep_dtemp','T', ['N1*bf(2)','N2*bf(2)','F1*bf(2)','F2*bf(2)'],[1,-1,1,-1]) -rep3 = ('positive effect of Rep_ddisp','T', ['N1*bf(3)','N2*bf(3)','F1*bf(3)','F2*bf(3)'],[1,-1,1,-1]) -int1 = ('positive interaction of Fame x Rep','T', ['N1*bf(1)','N2*bf(1)','F1*bf(1)','F2*bf(1)'],[-1,-1,-1,1]) -int2 = ('positive interaction of Fame x Rep_dtemp','T', ['N1*bf(2)','N2*bf(2)','F1*bf(2)','F2*bf(2)'],[1,-1,-1,1]) -int3 = ('positive interaction of Fame x Rep_ddisp','T', ['N1*bf(3)','N2*bf(3)','F1*bf(3)','F2*bf(3)'],[1,-1,-1,1]) - -contf1 = ['average effect condition','F', [cond1, cond2, cond3]] +rep1 = ('positive effect of Rep', 'T', ['N1*bf(1)', 'N2*bf(1)', 'F1*bf(1)', 'F2*bf(1)'], [1, -1, 1, -1]) +rep2 = ('positive effect of Rep_dtemp', 'T', ['N1*bf(2)', 'N2*bf(2)', 'F1*bf(2)', 'F2*bf(2)'], [1, -1, 1, -1]) +rep3 = ('positive effect of Rep_ddisp', 'T', ['N1*bf(3)', 'N2*bf(3)', 'F1*bf(3)', 'F2*bf(3)'], [1, -1, 1, -1]) +int1 = ('positive interaction of Fame x Rep', 'T', ['N1*bf(1)', 'N2*bf(1)', 'F1*bf(1)', 'F2*bf(1)'], [-1, -1, -1, 1]) +int2 = ('positive interaction of Fame x Rep_dtemp', 'T', ['N1*bf(2)', 'N2*bf(2)', 'F1*bf(2)', 'F2*bf(2)'], [1, -1, -1, 1]) +int3 = ('positive interaction of Fame x Rep_ddisp', 'T', ['N1*bf(3)', 'N2*bf(3)', 'F1*bf(3)', 'F2*bf(3)'], [1, -1, -1, 1]) + +contf1 = ['average effect condition', 'F', [cond1, cond2, cond3]] contf2 = ['main effect Fam', 'F', [fam1, fam2, fam3]] contf3 = ['main effect Rep', 'F', [rep1, rep2, rep3]] contf4 = ['interaction: Fam x Rep', 'F', [int1, int2, int3]] -contrasts = [cond1, cond2, cond3, fam1, fam2, fam3, rep1, rep2, rep3, int1, int2, int3, contf1, contf2,contf3,contf4] +contrasts = [cond1, cond2, cond3, fam1, fam2, fam3, rep1, rep2, rep3, int1, int2, int3, contf1, contf2, contf3, contf4] """Setting up nodes inputs """ @@ -328,31 +330,31 @@ def makelist(item): slice_timingref.num_slices = num_slices slice_timingref.time_repetition = TR slice_timingref.time_acquisition = TR - TR / float(num_slices) -slice_timingref.slice_order = list(range(num_slices,0,-1)) +slice_timingref.slice_order = list(range(num_slices, 0, -1)) slice_timingref.ref_slice = int(num_slices / 2) l1pipeline.inputs.preproc.smooth.fwhm = [8, 8, 8] # set up node specific inputs modelspecref = l1pipeline.inputs.analysis.modelspec -modelspecref.input_units = 'scans' -modelspecref.output_units = 'scans' -modelspecref.time_repetition = TR +modelspecref.input_units = 'scans' +modelspecref.output_units = 'scans' +modelspecref.time_repetition = TR modelspecref.high_pass_filter_cutoff = 120 l1designref = l1pipeline.inputs.analysis.level1design -l1designref.timing_units = modelspecref.output_units +l1designref.timing_units = modelspecref.output_units l1designref.interscan_interval = modelspecref.time_repetition l1designref.microtime_resolution = slice_timingref.num_slices l1designref.microtime_onset = slice_timingref.ref_slice -l1designref.bases = {'hrf':{'derivs': [1,1]}} +l1designref.bases = {'hrf': {'derivs': [1, 1]}} """ The following lines automatically inform SPM to create a default set of contrats for a factorial design. """ -#l1designref.factor_info = [dict(name = 'Fame', levels = 2), +# l1designref.factor_info = [dict(name = 'Fame', levels = 2), # dict(name = 'Rep', levels = 2)] l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo @@ -370,36 +372,36 @@ def makelist(item): """ subjectinfo_param = [Bunch(conditions=['N1', 'N2', 'F1', 'F2'], - onsets=[sot[0], sot[1], sot[2], sot[3]], - durations=[[0], [0], [0], [0]], - amplitudes=None, - tmod=None, - pmod=[None, - Bunch(name=['Lag'], - param=itemlag[1].tolist(), - poly=[2]), - None, - Bunch(name=['Lag'], - param=itemlag[3].tolist(), - poly=[2])], - regressor_names=None, - regressors=None)] - -cont1 = ('Famous_lag1','T', ['F2xLag^1'],[1]) -cont2 = ('Famous_lag2','T', ['F2xLag^2'],[1]) + onsets=[sot[0], sot[1], sot[2], sot[3]], + durations=[[0], [0], [0], [0]], + amplitudes=None, + tmod=None, + pmod=[None, + Bunch(name=['Lag'], + param=itemlag[1].tolist(), + poly=[2]), + None, + Bunch(name=['Lag'], + param=itemlag[3].tolist(), + poly=[2])], + regressor_names=None, + regressors=None)] + +cont1 = ('Famous_lag1', 'T', ['F2xLag^1'], [1]) +cont2 = ('Famous_lag2', 'T', ['F2xLag^2'], [1]) fcont1 = ('Famous Lag', 'F', [cont1, cont2]) paramcontrasts = [cont1, cont2, fcont1] paramanalysis = l1analysis.clone(name='paramanalysis') -paramanalysis.inputs.level1design.bases = {'hrf':{'derivs': [0,0]}} +paramanalysis.inputs.level1design.bases = {'hrf': {'derivs': [0, 0]}} paramanalysis.inputs.modelspec.subject_info = subjectinfo_param paramanalysis.inputs.contrastestimate.contrasts = paramcontrasts paramanalysis.inputs.contrastestimate.use_derivs = False l1pipeline.connect([(preproc, paramanalysis, [('realign.realignment_parameters', - 'modelspec.realignment_parameters'), - (('smooth.smoothed_files',makelist), + 'modelspec.realignment_parameters'), + (('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) """ @@ -429,8 +431,8 @@ def makelist(item): level1.base_dir = os.path.abspath('spm_face_tutorial/workingdir') level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,l1pipeline,[('struct', 'preproc.coregister.source'), - ('func','preproc.realign.in_files')]) + (datasource, l1pipeline, [('struct', 'preproc.coregister.source'), + ('func', 'preproc.realign.in_files')]) ]) @@ -457,17 +459,18 @@ def makelist(item): datasink = pe.Node(interface=nio.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.abspath('spm_auditory_tutorial/l1output') + def getstripdir(subject_id): import os - return os.path.join(os.path.abspath('spm_auditory_tutorial/workingdir'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('spm_auditory_tutorial/workingdir'), '_subject_id_%s' % subject_id) # store relevant outputs from various stages of the 1st level analysis -level1.connect([(infosource, datasink,[('subject_id','container'), - (('subject_id', getstripdir),'strip_dir')]), - (l1pipeline, datasink,[('analysis.contrastestimate.con_images','contrasts.@con'), - ('analysis.contrastestimate.spmT_images','contrasts.@T'), - ('paramanalysis.contrastestimate.con_images','paramcontrasts.@con'), - ('paramanalysis.contrastestimate.spmT_images','paramcontrasts.@T')]), +level1.connect([(infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T'), + ('paramanalysis.contrastestimate.con_images', 'paramcontrasts.@con'), + ('paramanalysis.contrastestimate.spmT_images', 'paramcontrasts.@T')]), ]) diff --git a/examples/fmri_spm_nested.py b/examples/fmri_spm_nested.py index 738a97aded..28d3d0e755 100755 --- a/examples/fmri_spm_nested.py +++ b/examples/fmri_spm_nested.py @@ -43,8 +43,8 @@ fsl.FSLCommand.set_default_output_type('NIFTI') # Set the way matlab should be called -#mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") -#mlab.MatlabCommand.set_default_paths('/software/spm8') +# mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") +# mlab.MatlabCommand.set_default_paths('/software/spm8') """ @@ -78,12 +78,12 @@ """ art = pe.Node(interface=ra.ArtifactDetect(), name="art") -art.inputs.use_differences = [True, False] -art.inputs.use_norm = True -art.inputs.norm_threshold = 1 +art.inputs.use_differences = [True, False] +art.inputs.use_norm = True +art.inputs.norm_threshold = 1 art.inputs.zintensity_threshold = 3 -art.inputs.mask_type = 'file' -art.inputs.parameter_source = 'SPM' +art.inputs.mask_type = 'file' +art.inputs.parameter_source = 'SPM' """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. @@ -105,7 +105,7 @@ includes the template image, T1.nii. """ -normalize = pe.Node(interface=spm.Normalize(), name = "normalize") +normalize = pe.Node(interface=spm.Normalize(), name="normalize") normalize.inputs.template = os.path.abspath('data/T1.nii') @@ -113,18 +113,18 @@ :class:`nipype.interfaces.spm.Smooth`. """ -smooth = pe.Node(interface=spm.Smooth(), name = "smooth") +smooth = pe.Node(interface=spm.Smooth(), name="smooth") fwhmlist = [4] -smooth.iterables = ('fwhm',fwhmlist) +smooth.iterables = ('fwhm', fwhmlist) -preproc.connect([(realign,coregister,[('mean_image', 'source'), - ('realigned_files','apply_to_files')]), - (coregister, normalize, [('coregistered_files','apply_to_files')]), +preproc.connect([(realign, coregister, [('mean_image', 'source'), + ('realigned_files', 'apply_to_files')]), + (coregister, normalize, [('coregistered_files', 'apply_to_files')]), (normalize, smooth, [('normalized_files', 'in_files')]), - (normalize,skullstrip,[('normalized_source','in_file')]), - (realign,art,[('realignment_parameters','realignment_parameters')]), - (normalize,art,[('normalized_files','realigned_files')]), - (skullstrip,art,[('mask_file','mask_file')]), + (normalize, skullstrip, [('normalized_source', 'in_file')]), + (realign, art, [('realignment_parameters', 'realignment_parameters')]), + (normalize, art, [('normalized_files', 'realigned_files')]), + (skullstrip, art, [('mask_file', 'mask_file')]), ]) @@ -140,28 +140,28 @@ :class:`nipype.interfaces.spm.SpecifyModel`. """ -modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec") -modelspec.inputs.concatenate_runs = True +modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") +modelspec.inputs.concatenate_runs = True """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ -contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate") +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. @@ -174,9 +174,9 @@ """ overlaystats = pe.Node(interface=fsl.Overlay(), name="overlaystats") -overlaystats.inputs.stat_thresh = (3,10) -overlaystats.inputs.show_negative_stats=True -overlaystats.inputs.auto_thresh_bg=True +overlaystats.inputs.stat_thresh = (3, 10) +overlaystats.inputs.show_negative_stats = True +overlaystats.inputs.auto_thresh_bg = True """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. @@ -186,14 +186,14 @@ slicestats.inputs.all_axial = True slicestats.inputs.image_width = 750 -l1analysis.connect([(modelspec,level1design,[('session_info','session_info')]), - (level1design,level1estimate,[('spm_mat_file','spm_mat_file')]), - (level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), - (contrastestimate,selectcontrast,[('spmT_images','inlist')]), - (selectcontrast,overlaystats,[('out','stat_image')]), - (overlaystats,slicestats,[('out_file','in_file')]) +l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), + (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), + (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), + (contrastestimate, selectcontrast, [('spmT_images', 'inlist')]), + (selectcontrast, overlaystats, [('out', 'stat_image')]), + (overlaystats, slicestats, [('out_file', 'in_file')]) ]) """ @@ -213,7 +213,7 @@ 'level1design.mask_image'), ('normalize.normalized_source', 'overlaystats.background_image')]), - ]) + ]) """ @@ -242,8 +242,8 @@ # Specify the subject directories subject_list = ['s1', 's3'] # Map field names to individual subject runs. -info = dict(func=[['subject_id', ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) +info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -268,7 +268,7 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info @@ -286,14 +286,15 @@ paradigm was used for every participant. """ + def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n"%str(subject_id)) + print("Subject ID: %s\n" %str(subject_id)) output = [] - names = ['Task-Odd','Task-Even'] + names = ['Task-Odd', 'Task-Even'] for r in range(4): - onsets = [list(range(15,240,60)),list(range(45,240,60))] + onsets = [list(range(15, 240, 60)), list(range(45, 240, 60))] output.insert(r, Bunch(conditions=names, onsets=deepcopy(onsets), @@ -312,19 +313,19 @@ def subjectinfo(subject_id): in the `subjectinfo` function described above. """ -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) -contrasts = [cont1,cont2] +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) +contrasts = [cont1, cont2] # set up node specific inputs modelspecref = l1pipeline.inputs.analysis.modelspec -modelspecref.input_units = 'secs' -modelspecref.output_units = 'secs' -modelspecref.time_repetition = 3. +modelspecref.input_units = 'secs' +modelspecref.output_units = 'secs' +modelspecref.time_repetition = 3. modelspecref.high_pass_filter_cutoff = 120 l1designref = l1pipeline.inputs.analysis.level1design -l1designref.timing_units = modelspecref.output_units +l1designref.timing_units = modelspecref.output_units l1designref.interscan_interval = modelspecref.time_repetition @@ -332,7 +333,7 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. -selectcontrast.iterables = ('index',[[i] for i in range(len(contrasts))]) +selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) """ Setup the pipeline @@ -361,11 +362,11 @@ def subjectinfo(subject_id): level1.base_dir = os.path.abspath('spm_tutorial2/workingdir') level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource,l1pipeline,[('func','preproc.realign.in_files'), + (datasource, l1pipeline, [('func', 'preproc.realign.in_files'), ('struct', 'preproc.coregister.target'), ('struct', 'preproc.normalize.source')]), - (infosource,l1pipeline,[(('subject_id', subjectinfo), - 'analysis.modelspec.subject_info')]), + (infosource, l1pipeline, [(('subject_id', subjectinfo), + 'analysis.modelspec.subject_info')]), ]) @@ -395,18 +396,19 @@ def subjectinfo(subject_id): report.inputs.base_directory = os.path.abspath('spm_tutorial2/report') report.inputs.parameterization = False + def getstripdir(subject_id): import os - return os.path.join(os.path.abspath('spm_tutorial2/workingdir'),'_subject_id_%s' % subject_id) + return os.path.join(os.path.abspath('spm_tutorial2/workingdir'), '_subject_id_%s' % subject_id) # store relevant outputs from various stages of the 1st level analysis -level1.connect([(infosource, datasink,[('subject_id','container'), - (('subject_id', getstripdir),'strip_dir')]), - (l1pipeline, datasink,[('analysis.contrastestimate.con_images','contrasts.@con'), - ('analysis.contrastestimate.spmT_images','contrasts.@T')]), - (infosource, report,[('subject_id', 'container'), - (('subject_id', getstripdir),'strip_dir')]), - (l1pipeline, report,[('analysis.slicestats.out_file', '@report')]), +level1.connect([(infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), + (infosource, report, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) @@ -436,13 +438,13 @@ def getstripdir(subject_id): """ # collect all the con images for each contrast. -contrast_ids = list(range(1,len(contrasts)+1)) +contrast_ids = list(range(1, len(contrasts)+1)) l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source") # we use .*i* to capture both .img (SPM8) and .nii (SPM12) -l2source.inputs.template=os.path.abspath('spm_tutorial2/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') +l2source.inputs.template = os.path.abspath('spm_tutorial2/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') # iterate over all contrast images -l2source.iterables = [('fwhm',fwhmlist), - ('con',contrast_ids)] +l2source.iterables = [('fwhm', fwhmlist), + ('con', contrast_ids)] l2source.inputs.sort_filelist = True @@ -454,9 +456,9 @@ def getstripdir(subject_id): # setup a 1-sample t-test node onesamplettestdes = pe.Node(interface=spm.OneSampleTTestDesign(), name="onesampttestdes") l2estimate = pe.Node(interface=spm.EstimateModel(), name="level2estimate") -l2estimate.inputs.estimation_method = {'Classical' : 1} -l2conestimate = pe.Node(interface = spm.EstimateContrast(), name="level2conestimate") -cont1 = ('Group','T', ['mean'],[1]) +l2estimate.inputs.estimation_method = {'Classical': 1} +l2conestimate = pe.Node(interface=spm.EstimateContrast(), name="level2conestimate") +cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True @@ -467,11 +469,11 @@ def getstripdir(subject_id): l2pipeline = pe.Workflow(name="level2") l2pipeline.base_dir = os.path.abspath('spm_tutorial2/l2output') -l2pipeline.connect([(l2source,onesamplettestdes,[('outfiles','in_files')]), - (onesamplettestdes,l2estimate,[('spm_mat_file','spm_mat_file')]), - (l2estimate,l2conestimate,[('spm_mat_file','spm_mat_file'), - ('beta_images','beta_images'), - ('residual_image','residual_image')]), +l2pipeline.connect([(l2source, onesamplettestdes, [('outfiles', 'in_files')]), + (onesamplettestdes, l2estimate, [('spm_mat_file', 'spm_mat_file')]), + (l2estimate, l2conestimate, [('spm_mat_file', 'spm_mat_file'), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ diff --git a/examples/frontiers_paper/smoothing_comparison.py b/examples/frontiers_paper/smoothing_comparison.py index 46b4b8bc5e..79bf3e2aeb 100644 --- a/examples/frontiers_paper/smoothing_comparison.py +++ b/examples/frontiers_paper/smoothing_comparison.py @@ -26,10 +26,10 @@ iter_fwhm.iterables = [('fwhm', [4, 8])] iter_smoothing_method = pe.Node(interface=util.IdentityInterface(fields=["smoothing_method"]), - name="iter_smoothing_method") -iter_smoothing_method.iterables = [('smoothing_method',['isotropic_voxel', - 'anisotropic_voxel', - 'isotropic_surface'])] + name="iter_smoothing_method") +iter_smoothing_method.iterables = [('smoothing_method', ['isotropic_voxel', + 'anisotropic_voxel', + 'isotropic_surface'])] realign = pe.Node(interface=spm.Realign(), name="realign") realign.inputs.register_to_mean = True @@ -54,17 +54,16 @@ 'inputnode.mask_file') +recon_all = pe.Node(interface=fs.ReconAll(), name="recon_all") -recon_all = pe.Node(interface=fs.ReconAll(), name = "recon_all") - -surfregister = pe.Node(interface=fs.BBRegister(),name='surfregister') +surfregister = pe.Node(interface=fs.BBRegister(), name='surfregister') surfregister.inputs.init = 'fsl' surfregister.inputs.contrast_type = 't2' preprocessing.connect(realign, 'mean_image', surfregister, 'source_file') preprocessing.connect(recon_all, 'subject_id', surfregister, 'subject_id') preprocessing.connect(recon_all, 'subjects_dir', surfregister, 'subjects_dir') -isotropic_surface_smooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0,1,0.1)), +isotropic_surface_smooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)), iterfield=['in_file'], name="isotropic_surface_smooth") preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth, @@ -91,9 +90,10 @@ preprocessing.connect(merge_smoothed_files, 'out', select_smoothed_files, 'inlist') + def chooseindex(roi): - return {'isotropic_voxel':list(range(0,4)), 'anisotropic_voxel':list(range(4,8)), - 'isotropic_surface':list(range(8,12))}[roi] + return {'isotropic_voxel': list(range(0, 4)), 'anisotropic_voxel': list(range(4, 8)), + 'isotropic_surface': list(range(8, 12))}[roi] preprocessing.connect(iter_smoothing_method, ("smoothing_method", chooseindex), select_smoothed_files, 'index') @@ -105,34 +105,34 @@ def chooseindex(roi): preprocessing.connect(select_smoothed_files, 'out', rename, 'in_file') specify_model = pe.Node(interface=model.SpecifyModel(), name="specify_model") -specify_model.inputs.input_units = 'secs' -specify_model.inputs.time_repetition = 3. +specify_model.inputs.input_units = 'secs' +specify_model.inputs.time_repetition = 3. specify_model.inputs.high_pass_filter_cutoff = 120 -specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd','Task-Even'], - onsets=[list(range(15,240,60)), - list(range(45,240,60))], +specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'], + onsets=[list(range(15, 240, 60)), + list(range(45, 240, 60))], durations=[[15], [15]])]*4 -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} level1design.inputs.timing_units = 'secs' level1design.inputs.interscan_interval = specify_model.inputs.time_repetition level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} -contrastestimate = pe.Node(interface = spm.EstimateContrast(), +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") -contrastestimate.inputs.contrasts = [('Task>Baseline','T', - ['Task-Odd','Task-Even'],[0.5,0.5])] +contrastestimate.inputs.contrasts = [('Task>Baseline', 'T', + ['Task-Odd', 'Task-Even'], [0.5, 0.5])] modelling = pe.Workflow(name="modelling") modelling.connect(specify_model, 'session_info', level1design, 'session_info') modelling.connect(level1design, 'spm_mat_file', level1estimate, 'spm_mat_file') -modelling.connect(level1estimate,'spm_mat_file', contrastestimate, +modelling.connect(level1estimate, 'spm_mat_file', contrastestimate, 'spm_mat_file') -modelling.connect(level1estimate,'beta_images', contrastestimate,'beta_images') -modelling.connect(level1estimate,'residual_image', contrastestimate, +modelling.connect(level1estimate, 'beta_images', contrastestimate, 'beta_images') +modelling.connect(level1estimate, 'residual_image', contrastestimate, 'residual_image') main_workflow = pe.Workflow(name="main_workflow") @@ -146,12 +146,12 @@ def chooseindex(roi): datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = os.path.abspath('data') datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = info = dict(func=[['subject_id', - ['f3','f5','f7','f10']]], - struct=[['subject_id','struct']]) + ['f3', 'f5', 'f7', 'f10']]], + struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = 's1' datasource.inputs.sort_filelist = True diff --git a/examples/frontiers_paper/workflow_from_scratch.py b/examples/frontiers_paper/workflow_from_scratch.py index 9560737651..533da455fc 100644 --- a/examples/frontiers_paper/workflow_from_scratch.py +++ b/examples/frontiers_paper/workflow_from_scratch.py @@ -63,36 +63,36 @@ the same names of regressors as defined in the SpecifyModel.""" specify_model = pe.Node(interface=model.SpecifyModel(), name="specify_model") -specify_model.inputs.input_units = 'secs' -specify_model.inputs.time_repetition = 3. +specify_model.inputs.input_units = 'secs' +specify_model.inputs.time_repetition = 3. specify_model.inputs.high_pass_filter_cutoff = 120 -specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd','Task-Even'], - onsets=[list(range(15,240,60)), - list(range(45,240,60))], +specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'], + onsets=[list(range(15, 240, 60)), + list(range(45, 240, 60))], durations=[[15], [15]])]*4 -level1design = pe.Node(interface=spm.Level1Design(), name= "level1design") -level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} +level1design = pe.Node(interface=spm.Level1Design(), name="level1design") +level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} level1design.inputs.timing_units = 'secs' level1design.inputs.interscan_interval = specify_model.inputs.time_repetition level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") -level1estimate.inputs.estimation_method = {'Classical' : 1} +level1estimate.inputs.estimation_method = {'Classical': 1} -contrastestimate = pe.Node(interface = spm.EstimateContrast(), +contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate") -cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) -cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) +cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5]) +cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1]) contrastestimate.inputs.contrasts = [cont1, cont2] modelling = pe.Workflow(name="modelling") modelling.connect(specify_model, 'session_info', level1design, 'session_info') modelling.connect(level1design, 'spm_mat_file', level1estimate, 'spm_mat_file') -modelling.connect(level1estimate,'spm_mat_file', - contrastestimate,'spm_mat_file') -modelling.connect(level1estimate,'beta_images', contrastestimate,'beta_images') -modelling.connect(level1estimate,'residual_image', - contrastestimate,'residual_image') +modelling.connect(level1estimate, 'spm_mat_file', + contrastestimate, 'spm_mat_file') +modelling.connect(level1estimate, 'beta_images', contrastestimate, 'beta_images') +modelling.connect(level1estimate, 'residual_image', + contrastestimate, 'residual_image') """Having preprocessing and modelling workflows we need to connect them together, add data grabbing facility and save the results. For this we will @@ -119,11 +119,11 @@ datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func']), - name = 'datasource') + name='datasource') datasource.inputs.base_directory = os.path.abspath('data') datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(func=[['subject_id', - ['f3','f5','f7','f10']]]) + ['f3', 'f5', 'f7', 'f10']]]) datasource.inputs.subject_id = 's1' datasource.inputs.sort_filelist = True diff --git a/examples/howto_caching_example.py b/examples/howto_caching_example.py index 8a992a70a9..41f9bb5ccd 100644 --- a/examples/howto_caching_example.py +++ b/examples/howto_caching_example.py @@ -31,21 +31,21 @@ # Apply an arbitrary (and pointless, here) threshold to the files) threshold = [mem.cache(fsl.Threshold)(in_file=f, thresh=i) - for i, f in enumerate(in_files)] + for i, f in enumerate(in_files)] # Merge all these files along the time dimension out_merge = mem.cache(fsl.Merge)(dimension="t", - in_files=[t.outputs.out_file for t in threshold], - ) + in_files=[t.outputs.out_file for t in threshold], + ) # And finally compute the mean out_mean = mem.cache(fsl.MeanImage)(in_file=out_merge.outputs.merged_file) # To avoid having increasing disk size we can keep only what was touched # in this run -#mem.clear_previous_runs() +# mem.clear_previous_runs() # or what wasn't used since the start of 2011 -#mem.clear_runs_since(year=2011) +# mem.clear_runs_since(year=2011) diff --git a/examples/rsfmri_vol_surface_preprocessing.py b/examples/rsfmri_vol_surface_preprocessing.py index 408bfd582a..35de677abe 100644 --- a/examples/rsfmri_vol_surface_preprocessing.py +++ b/examples/rsfmri_vol_surface_preprocessing.py @@ -422,7 +422,7 @@ def create_reg_workflow(name='registration'): # Coregister the median to the surface bbregister = Node(freesurfer.BBRegister(), - name='bbregister') + name='bbregister') bbregister.inputs.init = 'fsl' bbregister.inputs.contrast_type = 't2' bbregister.inputs.out_fsl_file = True @@ -484,7 +484,7 @@ def create_reg_workflow(name='registration'): convert2itk.inputs.fsl2ras = True convert2itk.inputs.itk_transform = True register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file') - register.connect(inputnode, 'mean_image',convert2itk, 'source_file') + register.connect(inputnode, 'mean_image', convert2itk, 'source_file') register.connect(stripper, 'out_file', convert2itk, 'reference_file') """ @@ -524,7 +524,7 @@ def create_reg_workflow(name='registration'): reg.inputs.num_threads = 4 reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} register.connect(stripper, 'out_file', reg, 'moving_image') - register.connect(inputnode,'target_image', reg,'fixed_image') + register.connect(inputnode, 'target_image', reg, 'fixed_image') """ Concatenate the affine and ants transforms into a list @@ -546,7 +546,7 @@ def create_reg_workflow(name='registration'): warpmean.inputs.args = '--float' warpmean.inputs.num_threads = 4 - register.connect(inputnode,'target_image', warpmean,'reference_image') + register.connect(inputnode, 'target_image', warpmean, 'reference_image') register.connect(inputnode, 'mean_image', warpmean, 'input_image') register.connect(merge, 'out', warpmean, 'transforms') @@ -634,7 +634,6 @@ def create_workflow(files, registration.inputs.inputspec.subjects_dir = subjects_dir registration.inputs.inputspec.target_image = target_file - """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -648,7 +647,6 @@ def create_workflow(files, art.inputs.mask_type = 'spm_global' art.inputs.parameter_source = 'SPM' - """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal voxel sizes. @@ -698,7 +696,6 @@ def merge_files(in1, in2): wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') - filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii', out_pf_name='pF_mcart.nii', demean=True), @@ -710,7 +707,6 @@ def merge_files(in1, in2): filter1, 'out_res_name') wf.connect(createfilter1, 'out_files', filter1, 'design') - createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'], @@ -726,7 +722,6 @@ def merge_files(in1, in2): wf.connect(registration, ('outputspec.segmentation_files', selectindex, [0, 2]), createfilter2, 'mask_file') - filter2 = MapNode(fsl.GLM(out_f_name='F.nii', out_pf_name='pF.nii', demean=True), @@ -739,11 +734,11 @@ def merge_files(in1, in2): wf.connect(mask, 'mask_file', filter2, 'mask') bandpass = Node(Function(input_names=['files', 'lowpass_freq', - 'highpass_freq', 'fs'], - output_names=['out_files'], - function=bandpass_filter, - imports=imports), - name='bandpass_unsmooth') + 'highpass_freq', 'fs'], + output_names=['out_files'], + function=bandpass_filter, + imports=imports), + name='bandpass_unsmooth') bandpass.inputs.fs = 1. / TR bandpass.inputs.highpass_freq = highpass_freq bandpass.inputs.lowpass_freq = lowpass_freq @@ -839,7 +834,7 @@ def get_names(files, suffix): samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" samplerlh.inputs.smooth_surf = surf_fwhm - #samplerlh.inputs.cortex_mask = True + # samplerlh.inputs.cortex_mask = True samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = subjects_dir @@ -874,7 +869,7 @@ def get_names(files, suffix): iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\ - list(range(49, 55)) + [58] + list(range(49, 55)) + [58] ts2txt.inputs.label_file = \ os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' '2mm_v2.nii.gz')) @@ -903,7 +898,7 @@ def get_names(files, suffix): datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.substitutions = substitutions - datasink.inputs.regexp_substitutions = regex_subs #(r'(/_.*(\d+/))', r'/run\2') + datasink.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') wf.connect(realign, 'realignment_parameters', datasink, 'resting.qa.motion') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') @@ -934,7 +929,7 @@ def get_names(files, suffix): datasink2.inputs.base_directory = sink_directory datasink2.inputs.container = subject_id datasink2.inputs.substitutions = substitutions - datasink2.inputs.regexp_substitutions = regex_subs #(r'(/_.*(\d+/))', r'/run\2') + datasink2.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') wf.connect(combiner, 'out_file', datasink2, 'resting.parcellations.grayo.@surface') return wf diff --git a/examples/rsfmri_vol_surface_preprocessing_nipy.py b/examples/rsfmri_vol_surface_preprocessing_nipy.py index 9055a98866..12abda6987 100644 --- a/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ b/examples/rsfmri_vol_surface_preprocessing_nipy.py @@ -55,7 +55,7 @@ from dcmstack.extract import default_extractor from dicom import read_file -from nipype.interfaces import (fsl, Function, ants, freesurfer,nipy) +from nipype.interfaces import (fsl, Function, ants, freesurfer, nipy) from nipype.interfaces.c3 import C3dAffineTool fsl.FSLCommand.set_default_output_type('NIFTI_GZ') @@ -401,7 +401,7 @@ def create_reg_workflow(name='registration'): # Coregister the median to the surface bbregister = Node(freesurfer.BBRegister(), - name='bbregister') + name='bbregister') bbregister.inputs.init = 'fsl' bbregister.inputs.contrast_type = 't2' bbregister.inputs.out_fsl_file = True @@ -416,7 +416,7 @@ def create_reg_workflow(name='registration'): binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc") register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file") - stripper = Node(fsl.ApplyMask(), name ='stripper') + stripper = Node(fsl.ApplyMask(), name='stripper') register.connect(binarize, "binary_file", stripper, "mask_file") register.connect(convert, 'out_file', stripper, 'in_file') @@ -466,7 +466,7 @@ def create_reg_workflow(name='registration'): convert2itk.inputs.fsl2ras = True convert2itk.inputs.itk_transform = True register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file') - register.connect(inputnode, 'mean_image',convert2itk, 'source_file') + register.connect(inputnode, 'mean_image', convert2itk, 'source_file') register.connect(stripper, 'out_file', convert2itk, 'reference_file') """ @@ -506,8 +506,7 @@ def create_reg_workflow(name='registration'): reg.inputs.num_threads = 4 reg.plugin_args = {'sbatch_args': '-c%d' % 4} register.connect(stripper, 'out_file', reg, 'moving_image') - register.connect(inputnode,'target_image', reg,'fixed_image') - + register.connect(inputnode, 'target_image', reg, 'fixed_image') """ Concatenate the affine and ants transforms into a list @@ -517,7 +516,6 @@ def create_reg_workflow(name='registration'): register.connect(convert2itk, 'itk_transform', merge, 'in2') register.connect(reg, 'composite_transform', merge, 'in1') - """ Transform the mean image. First to anatomical and then to target """ @@ -531,11 +529,10 @@ def create_reg_workflow(name='registration'): warpmean.inputs.num_threads = 4 warpmean.plugin_args = {'sbatch_args': '-c%d' % 4} - register.connect(inputnode,'target_image', warpmean,'reference_image') + register.connect(inputnode, 'target_image', warpmean, 'reference_image') register.connect(inputnode, 'mean_image', warpmean, 'input_image') register.connect(merge, 'out', warpmean, 'transforms') - """ Assign all the output files """ @@ -596,10 +593,9 @@ def create_workflow(files, realign.inputs.slice_info = 2 realign.plugin_args = {'sbatch_args': '-c%d' % 4} - # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') - wf.connect(realign,"out_file", tsnr, "in_file") + wf.connect(realign, "out_file", tsnr, "in_file") # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], @@ -640,7 +636,6 @@ def create_workflow(files, art.inputs.mask_type = 'spm_global' art.inputs.parameter_source = 'NiPy' - """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal voxel sizes. @@ -689,7 +684,6 @@ def merge_files(in1, in2): wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') - filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii.gz', out_pf_name='pF_mcart.nii.gz', demean=True), @@ -701,7 +695,6 @@ def merge_files(in1, in2): filter1, 'out_res_name') wf.connect(createfilter1, 'out_files', filter1, 'design') - createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'], @@ -717,7 +710,6 @@ def merge_files(in1, in2): wf.connect(registration, ('outputspec.segmentation_files', selectindex, [0, 2]), createfilter2, 'mask_file') - filter2 = MapNode(fsl.GLM(out_f_name='F.nii.gz', out_pf_name='pF.nii.gz', demean=True), @@ -730,11 +722,11 @@ def merge_files(in1, in2): wf.connect(mask, 'mask_file', filter2, 'mask') bandpass = Node(Function(input_names=['files', 'lowpass_freq', - 'highpass_freq', 'fs'], - output_names=['out_files'], - function=bandpass_filter, - imports=imports), - name='bandpass_unsmooth') + 'highpass_freq', 'fs'], + output_names=['out_files'], + function=bandpass_filter, + imports=imports), + name='bandpass_unsmooth') bandpass.inputs.fs = 1. / TR bandpass.inputs.highpass_freq = highpass_freq bandpass.inputs.lowpass_freq = lowpass_freq @@ -811,7 +803,7 @@ def get_names(files, suffix): out_names = [] for filename in files: path, name, _ = split_filename(filename) - out_names.append(os.path.join(path,name + suffix)) + out_names.append(os.path.join(path, name + suffix)) return list_to_filename(out_names) wf.connect(collector, ('out', get_names, '_avgwf.txt'), @@ -832,7 +824,7 @@ def get_names(files, suffix): samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" samplerlh.inputs.smooth_surf = surf_fwhm - #samplerlh.inputs.cortex_mask = True + # samplerlh.inputs.cortex_mask = True samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = subjects_dir @@ -867,7 +859,7 @@ def get_names(files, suffix): iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\ - list(range(49, 55)) + [58] + list(range(49, 55)) + [58] ts2txt.inputs.label_file = \ os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' '2mm_v2.nii.gz')) @@ -879,13 +871,13 @@ def get_names(files, suffix): ('_filtermotart_cleaned_bp_trans_masked', ''), ('_filtermotart_cleaned_bp', ''), ] - substitutions += [("_smooth%d" % i,"") for i in range(11)[::-1]] - substitutions += [("_ts_masker%d" % i,"") for i in range(11)[::-1]] - substitutions += [("_getsubcortts%d" % i,"") for i in range(11)[::-1]] - substitutions += [("_combiner%d" % i,"") for i in range(11)[::-1]] - substitutions += [("_filtermotion%d" % i,"") for i in range(11)[::-1]] - substitutions += [("_filter_noise_nosmooth%d" % i,"") for i in range(11)[::-1]] - substitutions += [("_makecompcorfilter%d" % i,"") for i in range(11)[::-1]] + substitutions += [("_smooth%d" % i, "") for i in range(11)[::-1]] + substitutions += [("_ts_masker%d" % i, "") for i in range(11)[::-1]] + substitutions += [("_getsubcortts%d" % i, "") for i in range(11)[::-1]] + substitutions += [("_combiner%d" % i, "") for i in range(11)[::-1]] + substitutions += [("_filtermotion%d" % i, "") for i in range(11)[::-1]] + substitutions += [("_filter_noise_nosmooth%d" % i, "") for i in range(11)[::-1]] + substitutions += [("_makecompcorfilter%d" % i, "") for i in range(11)[::-1]] substitutions += [("_get_aparc_tsnr%d/" % i, "run%d_" % (i + 1)) for i in range(11)[::-1]] substitutions += [("T1_out_brain_pve_0_maths_warped", "compcor_csf"), @@ -910,7 +902,7 @@ def get_names(files, suffix): datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.substitutions = substitutions - datasink.inputs.regexp_substitutions = regex_subs #(r'(/_.*(\d+/))', r'/run\2') + datasink.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') @@ -946,7 +938,7 @@ def get_names(files, suffix): datasink2.inputs.base_directory = sink_directory datasink2.inputs.container = subject_id datasink2.inputs.substitutions = substitutions - datasink2.inputs.regexp_substitutions = regex_subs #(r'(/_.*(\d+/))', r'/run\2') + datasink2.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') wf.connect(combiner, 'out_file', datasink2, 'resting.parcellations.grayo.@surface') return wf @@ -956,6 +948,7 @@ def get_names(files, suffix): Creates the full workflow including getting information from dicom files """ + def create_resting_workflow(args, name=None): TR = args.TR slice_times = args.slice_times diff --git a/examples/smri_ants_build_template.py b/examples/smri_ants_build_template.py index 65797b3943..49100be967 100644 --- a/examples/smri_ants_build_template.py +++ b/examples/smri_ants_build_template.py @@ -29,24 +29,24 @@ """ import urllib.request, urllib.error, urllib.parse -homeDir=os.getenv("HOME") -requestedPath=os.path.join(homeDir,'nipypeTestPath') -mydatadir=os.path.realpath(requestedPath) +homeDir = os.getenv("HOME") +requestedPath = os.path.join(homeDir, 'nipypeTestPath') +mydatadir = os.path.realpath(requestedPath) if not os.path.exists(mydatadir): os.makedirs(mydatadir) print(mydatadir) -MyFileURLs=[ - ('http://slicer.kitware.com/midas3/download?bitstream=13121','01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122','02_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13124','03_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13128','01_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13123','02_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13125','03_T1_inv_half.nii.gz'), +MyFileURLs = [ + ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13124', '03_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13128', '01_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13123', '02_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13125', '03_T1_inv_half.nii.gz'), ] for tt in MyFileURLs: - myURL=tt[0] - localFilename=os.path.join(mydatadir,tt[1]) + myURL = tt[0] + localFilename = os.path.join(mydatadir, tt[1]) if not os.path.exists(localFilename): remotefile = urllib.request.urlopen(myURL) @@ -57,15 +57,15 @@ else: print("File previously downloaded {0}".format(localFilename)) -input_images=[ -os.path.join(mydatadir,'01_T1_half.nii.gz'), -os.path.join(mydatadir,'02_T1_half.nii.gz'), -os.path.join(mydatadir,'03_T1_half.nii.gz') +input_images = [ + os.path.join(mydatadir, '01_T1_half.nii.gz'), + os.path.join(mydatadir, '02_T1_half.nii.gz'), + os.path.join(mydatadir, '03_T1_half.nii.gz') ] -input_passive_images=[ -{'INV_T1':os.path.join(mydatadir,'01_T1_inv_half.nii.gz')}, -{'INV_T1':os.path.join(mydatadir,'02_T1_inv_half.nii.gz')}, -{'INV_T1':os.path.join(mydatadir,'03_T1_inv_half.nii.gz')} +input_passive_images = [ + {'INV_T1': os.path.join(mydatadir, '01_T1_inv_half.nii.gz')}, + {'INV_T1': os.path.join(mydatadir, '02_T1_inv_half.nii.gz')}, + {'INV_T1': os.path.join(mydatadir, '03_T1_inv_half.nii.gz')} ] @@ -73,26 +73,25 @@ 3. Define the workflow and its working directory """ -tbuilder=pe.Workflow(name="ANTSTemplateBuilder") -tbuilder.base_dir=requestedPath +tbuilder = pe.Workflow(name="ANTSTemplateBuilder") +tbuilder.base_dir = requestedPath """ 4. Define data sources. In real life these would be replace by DataGrabbers """ -datasource = pe.Node(interface=util.IdentityInterface(fields= - ['imageList', 'passiveImagesDictionariesList']), - run_without_submitting=True, - name='InputImages' ) -datasource.inputs.imageList=input_images -datasource.inputs.passiveImagesDictionariesList=input_passive_images +datasource = pe.Node(interface=util.IdentityInterface(fields=['imageList', 'passiveImagesDictionariesList']), + run_without_submitting=True, + name='InputImages') +datasource.inputs.imageList = input_images +datasource.inputs.passiveImagesDictionariesList = input_passive_images datasource.inputs.sort_filelist = True """ 5. Template is initialized by a simple average """ -initAvg = pe.Node(interface=ants.AverageImages(), name ='initAvg') +initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg') initAvg.inputs.dimension = 3 initAvg.inputs.normalize = True @@ -102,7 +101,7 @@ 6. Define the first iteration of template building """ -buildTemplateIteration1=ANTSTemplateBuildSingleIterationWF('iteration01') +buildTemplateIteration1 = ANTSTemplateBuildSingleIterationWF('iteration01') tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1, 'inputspec.fixed_image') tbuilder.connect(datasource, 'imageList', buildTemplateIteration1, 'inputspec.images') tbuilder.connect(datasource, 'passiveImagesDictionariesList', buildTemplateIteration1, 'inputspec.ListOfPassiveImagesDictionaries') @@ -123,9 +122,9 @@ datasink = pe.Node(io.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.join(requestedPath, "results") -tbuilder.connect(buildTemplateIteration2, 'outputspec.template',datasink,'PrimaryTemplate') -tbuilder.connect(buildTemplateIteration2, 'outputspec.passive_deformed_templates',datasink,'PassiveTemplate') -tbuilder.connect(initAvg, 'output_average_image', datasink,'PreRegisterAverage') +tbuilder.connect(buildTemplateIteration2, 'outputspec.template', datasink, 'PrimaryTemplate') +tbuilder.connect(buildTemplateIteration2, 'outputspec.passive_deformed_templates', datasink, 'PassiveTemplate') +tbuilder.connect(initAvg, 'output_average_image', datasink, 'PreRegisterAverage') """ 8. Run the workflow diff --git a/examples/smri_ants_registration.py b/examples/smri_ants_registration.py index c1c3044977..d77fd1b250 100644 --- a/examples/smri_ants_registration.py +++ b/examples/smri_ants_registration.py @@ -24,20 +24,20 @@ 2. Download T1 volumes into home directory """ -homeDir=os.getenv("HOME") -requestedPath=os.path.join(homeDir,'nipypeTestPath') -mydatadir=os.path.realpath(requestedPath) +homeDir = os.getenv("HOME") +requestedPath = os.path.join(homeDir, 'nipypeTestPath') +mydatadir = os.path.realpath(requestedPath) if not os.path.exists(mydatadir): os.makedirs(mydatadir) print(mydatadir) -MyFileURLs=[ - ('http://slicer.kitware.com/midas3/download?bitstream=13121','01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122','02_T1_half.nii.gz'), +MyFileURLs = [ + ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), ] for tt in MyFileURLs: - myURL=tt[0] - localFilename=os.path.join(mydatadir,tt[1]) + myURL = tt[0] + localFilename = os.path.join(mydatadir, tt[1]) if not os.path.exists(localFilename): remotefile = urllib.request.urlopen(myURL) @@ -48,9 +48,9 @@ else: print("File previously downloaded {0}".format(localFilename)) -input_images=[ -os.path.join(mydatadir,'01_T1_half.nii.gz'), -os.path.join(mydatadir,'02_T1_half.nii.gz'), +input_images = [ + os.path.join(mydatadir, '01_T1_half.nii.gz'), + os.path.join(mydatadir, '02_T1_half.nii.gz'), ] """ @@ -58,7 +58,7 @@ """ reg = Registration() -reg.inputs.fixed_image = input_images[0] +reg.inputs.fixed_image = input_images[0] reg.inputs.moving_image = input_images[1] reg.inputs.output_transform_prefix = 'thisTransform' reg.inputs.output_warped_image = 'INTERNAL_WARPED.nii.gz' @@ -67,7 +67,7 @@ reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 + - [[100, 50, 30]]) + [[100, 50, 30]]) reg.inputs.dimension = 3 reg.inputs.write_composite_transform = True reg.inputs.collapse_output_transforms = False diff --git a/examples/smri_antsregistration_build_template.py b/examples/smri_antsregistration_build_template.py index 2b1f8b6130..1e49a2c0bb 100644 --- a/examples/smri_antsregistration_build_template.py +++ b/examples/smri_antsregistration_build_template.py @@ -29,24 +29,24 @@ """ import urllib.request, urllib.error, urllib.parse -homeDir=os.getenv("HOME") -requestedPath=os.path.join(homeDir,'nipypeTestPath') -mydatadir=os.path.realpath(requestedPath) +homeDir = os.getenv("HOME") +requestedPath = os.path.join(homeDir, 'nipypeTestPath') +mydatadir = os.path.realpath(requestedPath) if not os.path.exists(mydatadir): os.makedirs(mydatadir) print(mydatadir) -MyFileURLs=[ - ('http://slicer.kitware.com/midas3/download?bitstream=13121','01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122','02_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13124','03_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13128','01_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13123','02_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13125','03_T1_inv_half.nii.gz'), +MyFileURLs = [ + ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13124', '03_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13128', '01_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13123', '02_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13125', '03_T1_inv_half.nii.gz'), ] for tt in MyFileURLs: - myURL=tt[0] - localFilename=os.path.join(mydatadir,tt[1]) + myURL = tt[0] + localFilename = os.path.join(mydatadir, tt[1]) if not os.path.exists(localFilename): remotefile = urllib.request.urlopen(myURL) @@ -64,15 +64,15 @@ co-aligned images for that one scan session """ -ListOfImagesDictionaries=[ -{'T1':os.path.join(mydatadir,'01_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'01_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'01_T1_inv_half.nii.gz')}, -{'T1':os.path.join(mydatadir,'02_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'02_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'02_T1_inv_half.nii.gz')}, -{'T1':os.path.join(mydatadir,'03_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'03_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'03_T1_inv_half.nii.gz')} +ListOfImagesDictionaries = [ + {'T1': os.path.join(mydatadir, '01_T1_half.nii.gz'), 'INV_T1': os.path.join(mydatadir, '01_T1_inv_half.nii.gz'), 'LABEL_MAP': os.path.join(mydatadir, '01_T1_inv_half.nii.gz')}, + {'T1': os.path.join(mydatadir, '02_T1_half.nii.gz'), 'INV_T1': os.path.join(mydatadir, '02_T1_inv_half.nii.gz'), 'LABEL_MAP': os.path.join(mydatadir, '02_T1_inv_half.nii.gz')}, + {'T1': os.path.join(mydatadir, '03_T1_half.nii.gz'), 'INV_T1': os.path.join(mydatadir, '03_T1_inv_half.nii.gz'), 'LABEL_MAP': os.path.join(mydatadir, '03_T1_inv_half.nii.gz')} ] -input_passive_images=[ -{'INV_T1':os.path.join(mydatadir,'01_T1_inv_half.nii.gz')}, -{'INV_T1':os.path.join(mydatadir,'02_T1_inv_half.nii.gz')}, -{'INV_T1':os.path.join(mydatadir,'03_T1_inv_half.nii.gz')} +input_passive_images = [ + {'INV_T1': os.path.join(mydatadir, '01_T1_inv_half.nii.gz')}, + {'INV_T1': os.path.join(mydatadir, '02_T1_inv_half.nii.gz')}, + {'INV_T1': os.path.join(mydatadir, '03_T1_inv_half.nii.gz')} ] """ @@ -82,7 +82,7 @@ ['T1','T2'] """ -registrationImageTypes=['T1'] +registrationImageTypes = ['T1'] """ interpolationMap - A map of image types to interpolation modes. If an @@ -90,30 +90,29 @@ { 'labelmap':'NearestNeighbor', 'FLAIR':'WindowedSinc' } """ -interpolationMapping={'INV_T1':'LanczosWindowedSinc','LABEL_MAP':'NearestNeighbor','T1':'Linear'} +interpolationMapping = {'INV_T1': 'LanczosWindowedSinc', 'LABEL_MAP': 'NearestNeighbor', 'T1': 'Linear'} """ 3. Define the workflow and its working directory """ -tbuilder=pe.Workflow(name="antsRegistrationTemplateBuilder") -tbuilder.base_dir=requestedPath +tbuilder = pe.Workflow(name="antsRegistrationTemplateBuilder") +tbuilder.base_dir = requestedPath """ 4. Define data sources. In real life these would be replace by DataGrabbers """ -InitialTemplateInputs=[ mdict['T1'] for mdict in ListOfImagesDictionaries ] +InitialTemplateInputs = [mdict['T1'] for mdict in ListOfImagesDictionaries] -datasource = pe.Node(interface=util.IdentityInterface(fields= - ['InitialTemplateInputs', 'ListOfImagesDictionaries', - 'registrationImageTypes','interpolationMapping']), - run_without_submitting=True, - name='InputImages' ) -datasource.inputs.InitialTemplateInputs=InitialTemplateInputs -datasource.inputs.ListOfImagesDictionaries=ListOfImagesDictionaries -datasource.inputs.registrationImageTypes=registrationImageTypes -datasource.inputs.interpolationMapping=interpolationMapping +datasource = pe.Node(interface=util.IdentityInterface(fields=['InitialTemplateInputs', 'ListOfImagesDictionaries', + 'registrationImageTypes', 'interpolationMapping']), + run_without_submitting=True, + name='InputImages') +datasource.inputs.InitialTemplateInputs = InitialTemplateInputs +datasource.inputs.ListOfImagesDictionaries = ListOfImagesDictionaries +datasource.inputs.registrationImageTypes = registrationImageTypes +datasource.inputs.interpolationMapping = interpolationMapping datasource.inputs.sort_filelist = True """ @@ -121,7 +120,7 @@ any reference image could be used (i.e. a previously created template) """ -initAvg = pe.Node(interface=ants.AverageImages(), name ='initAvg') +initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg') initAvg.inputs.dimension = 3 initAvg.inputs.normalize = True @@ -131,14 +130,14 @@ 6. Define the first iteration of template building """ -buildTemplateIteration1=antsRegistrationTemplateBuildSingleIterationWF('iteration01') +buildTemplateIteration1 = antsRegistrationTemplateBuildSingleIterationWF('iteration01') """ Here we are fine tuning parameters of the SGE job (memory limit, numebr of cores etc.) """ BeginANTS = buildTemplateIteration1.get_node("BeginANTS") -BeginANTS.plugin_args={'qsub_args': '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', 'overwrite': True} +BeginANTS.plugin_args = {'qsub_args': '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', 'overwrite': True} tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1, 'inputspec.fixed_image') tbuilder.connect(datasource, 'ListOfImagesDictionaries', buildTemplateIteration1, 'inputspec.ListOfImagesDictionaries') @@ -151,7 +150,7 @@ buildTemplateIteration2 = antsRegistrationTemplateBuildSingleIterationWF('iteration02') BeginANTS = buildTemplateIteration2.get_node("BeginANTS") -BeginANTS.plugin_args={'qsub_args': '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', 'overwrite': True} +BeginANTS.plugin_args = {'qsub_args': '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', 'overwrite': True} tbuilder.connect(buildTemplateIteration1, 'outputspec.template', buildTemplateIteration2, 'inputspec.fixed_image') tbuilder.connect(datasource, 'ListOfImagesDictionaries', buildTemplateIteration2, 'inputspec.ListOfImagesDictionaries') tbuilder.connect(datasource, 'registrationImageTypes', buildTemplateIteration2, 'inputspec.registrationImageTypes') @@ -164,9 +163,9 @@ datasink = pe.Node(io.DataSink(), name="datasink") datasink.inputs.base_directory = os.path.join(requestedPath, "results") -tbuilder.connect(buildTemplateIteration2, 'outputspec.template',datasink,'PrimaryTemplate') -tbuilder.connect(buildTemplateIteration2, 'outputspec.passive_deformed_templates',datasink,'PassiveTemplate') -tbuilder.connect(initAvg, 'output_average_image', datasink,'PreRegisterAverage') +tbuilder.connect(buildTemplateIteration2, 'outputspec.template', datasink, 'PrimaryTemplate') +tbuilder.connect(buildTemplateIteration2, 'outputspec.passive_deformed_templates', datasink, 'PassiveTemplate') +tbuilder.connect(initAvg, 'output_average_image', datasink, 'PreRegisterAverage') """ 9. Run the workflow diff --git a/examples/tessellation_tutorial.py b/examples/tessellation_tutorial.py index ad9d51cf97..6ca738e0d3 100644 --- a/examples/tessellation_tutorial.py +++ b/examples/tessellation_tutorial.py @@ -15,18 +15,18 @@ .. seealso:: - ConnectomeViewer - The Connectome Viewer connects Multi-Modal Multi-Scale Neuroimaging and Network Datasets For Analysis and Visualization in Python. + ConnectomeViewer + The Connectome Viewer connects Multi-Modal Multi-Scale Neuroimaging and Network Datasets For Analysis and Visualization in Python. - http://www.geuz.org/gmsh/ - Gmsh: a three-dimensional finite element mesh generator with built-in pre- and post-processing facilities + http://www.geuz.org/gmsh/ + Gmsh: a three-dimensional finite element mesh generator with built-in pre- and post-processing facilities - http://www.blender.org/ - Blender is the free open source 3D content creation suite, available for all major operating systems under the GNU General Public License. + http://www.blender.org/ + Blender is the free open source 3D content creation suite, available for all major operating systems under the GNU General Public License. .. warning:: - This workflow will take several hours to finish entirely, since smoothing the larger cortical surfaces is very time consuming. + This workflow will take several hours to finish entirely, since smoothing the larger cortical surfaces is very time consuming. Packages and Data Setup ======================= @@ -48,7 +48,7 @@ """ fs_dir = os.environ['FREESURFER_HOME'] -lookup_file = op.join(fs_dir,'FreeSurferColorLUT.txt') +lookup_file = op.join(fs_dir, 'FreeSurferColorLUT.txt') subjects_dir = op.join(fs_dir, 'subjects/') output_dir = './tessellate_tutorial' @@ -99,7 +99,7 @@ tesspipe = pe.Workflow(name='tessellate_tutorial') tesspipe.base_dir = output_dir -tesspipe.connect([(tessflow, datasink,[('outputspec.meshes', '@meshes.all')])]) +tesspipe.connect([(tessflow, datasink, [('outputspec.meshes', '@meshes.all')])]) """ If the surfaces are to be packaged, this will connect the CFFConverter @@ -107,7 +107,7 @@ """ if cff: - tesspipe.connect([(tessflow, cff,[('outputspec.meshes', 'gifti_surfaces')])]) - tesspipe.connect([(cff, datasink,[('connectome_file', '@cff')])]) + tesspipe.connect([(tessflow, cff, [('outputspec.meshes', 'gifti_surfaces')])]) + tesspipe.connect([(cff, datasink, [('connectome_file', '@cff')])]) tesspipe.run() diff --git a/examples/test_spm.py b/examples/test_spm.py index e524b63a28..4162a39769 100644 --- a/examples/test_spm.py +++ b/examples/test_spm.py @@ -15,7 +15,7 @@ stc.inputs.num_slices = 21 stc.inputs.time_repetition = 1.0 stc.inputs.time_acquisition = 2. - 2. / 32 -stc.inputs.slice_order = list(range(21,0,-1)) +stc.inputs.slice_order = list(range(21, 0, -1)) stc.inputs.ref_slice = 10 realign_estimate = pe.Node(interface=spm.Realign(), name='realign_estimate') @@ -28,17 +28,17 @@ realign_estwrite.inputs.jobtype = "estwrite" realign_estwrite.inputs.register_to_mean = True -smooth = pe.Node(interface=spm.Smooth(), name = 'smooth') +smooth = pe.Node(interface=spm.Smooth(), name='smooth') smooth.inputs.fwhm = [6, 6, 6] workflow3d = pe.Workflow(name='test_3d') workflow3d.base_dir = "/tmp" workflow3d.connect([(split, stc, [("out_files", "in_files")]), - (stc, realign_estimate, [('timecorrected_files','in_files')]), - (realign_estimate, realign_write, [('modified_in_files','in_files')]), - (stc, realign_estwrite, [('timecorrected_files','in_files')]), - (realign_write, smooth, [('realigned_files','in_files')])]) + (stc, realign_estimate, [('timecorrected_files', 'in_files')]), + (realign_estimate, realign_write, [('modified_in_files', 'in_files')]), + (stc, realign_estwrite, [('timecorrected_files', 'in_files')]), + (realign_write, smooth, [('realigned_files', 'in_files')])]) workflow3d.run() @@ -50,7 +50,7 @@ stc.inputs.num_slices = 21 stc.inputs.time_repetition = 1.0 stc.inputs.time_acquisition = 2. - 2. / 32 -stc.inputs.slice_order = list(range(21,0,-1)) +stc.inputs.slice_order = list(range(21, 0, -1)) stc.inputs.ref_slice = 10 realign_estimate = pe.Node(interface=spm.Realign(), name='realign_estimate') @@ -62,16 +62,16 @@ realign_estwrite = pe.Node(interface=spm.Realign(), name='realign_estwrite') realign_estwrite.inputs.jobtype = "estwrite" -smooth = pe.Node(interface=spm.Smooth(), name = 'smooth') +smooth = pe.Node(interface=spm.Smooth(), name='smooth') smooth.inputs.fwhm = [6, 6, 6] workflow4d = pe.Workflow(name='test_4d') workflow4d.base_dir = "/tmp" workflow4d.connect([(gunzip, stc, [("out_file", "in_files")]), - (stc, realign_estimate, [('timecorrected_files','in_files')]), - (realign_estimate, realign_write, [('modified_in_files','in_files')]), - (stc, realign_estwrite, [('timecorrected_files','in_files')]), - (realign_write, smooth, [('realigned_files','in_files')])]) + (stc, realign_estimate, [('timecorrected_files', 'in_files')]), + (realign_estimate, realign_write, [('modified_in_files', 'in_files')]), + (stc, realign_estwrite, [('timecorrected_files', 'in_files')]), + (realign_write, smooth, [('realigned_files', 'in_files')])]) -workflow4d.run() \ No newline at end of file +workflow4d.run() diff --git a/examples/workshop_dartmouth_2010.py b/examples/workshop_dartmouth_2010.py index 6a035c4924..779fec9045 100644 --- a/examples/workshop_dartmouth_2010.py +++ b/examples/workshop_dartmouth_2010.py @@ -131,7 +131,7 @@ smooth = pe.Node(interface=fsl.Smooth(fwhm=6), name="smooth") motion_correct_and_smooth = pe.Workflow(name="motion_correct_and_smooth") -motion_correct_and_smooth.base_dir = os.path.abspath('.') # define where will be the root folder for the workflow +motion_correct_and_smooth.base_dir = os.path.abspath('.') # define where will be the root folder for the workflow motion_correct_and_smooth.connect([ (motion_correct, smooth, [('out_file', 'in_file')]) ]) @@ -212,7 +212,7 @@ preproc.inputs.inputspec.func = os.path.abspath('data/s1/f3.nii') preproc.inputs.inputspec.struct = os.path.abspath('data/s1/struct.nii') -datasink = pe.Node(interface=nio.DataSink(),name='sinker') +datasink = pe.Node(interface=nio.DataSink(), name='sinker') preprocess = pe.Workflow(name='preprocout') preprocess.base_dir = os.path.abspath('.') preprocess.connect([ @@ -272,7 +272,7 @@ datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), name="datasource") datasource.inputs.template = '%s/%s.nii' datasource.inputs.base_directory = os.path.abspath('data') -datasource.inputs.template_args = dict(func=[['subject_id','f3']], struct=[['subject_id','struct']]) +datasource.inputs.template_args = dict(func=[['subject_id', 'f3']], struct=[['subject_id', 'struct']]) datasource.inputs.sort_filelist = True my_workflow = pe.Workflow(name="my_workflow") @@ -280,7 +280,7 @@ my_workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, preproc, [('func', 'inputspec.func'), - ('struct', 'inputspec.struct')])]) + ('struct', 'inputspec.struct')])]) my_workflow.run() @@ -290,8 +290,8 @@ """ smoothnode = my_workflow.get_node('preproc.smooth') -assert(str(smoothnode)=='preproc.smooth') -smoothnode.iterables = ('fwhm', [5.,10.]) +assert(str(smoothnode) == 'preproc.smooth') +smoothnode.iterables = ('fwhm', [5., 10.]) my_workflow.run() """ diff --git a/nipype/__init__.py b/nipype/__init__.py index bfe2ab0563..bc86315987 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -5,9 +5,9 @@ import os from .info import (LONG_DESCRIPTION as __doc__, - URL as __url__, - STATUS as __status__, - __version__) + URL as __url__, + STATUS as __status__, + __version__) from .utils.config import NipypeConfig config = NipypeConfig() from .utils.logger import Logging @@ -17,6 +17,7 @@ from .fixes.numpy.testing import nosetester + class _NoseTester(nosetester.NoseTester): """ Subclass numpy's NoseTester to add doctests by default """ @@ -57,7 +58,7 @@ def _test_local_install(): os.path.abspath(__file__).split(os.sep)[:-2]): import warnings warnings.warn('Running the tests from the install directory may ' - 'trigger some failures') + 'trigger some failures') _test_local_install() @@ -78,4 +79,4 @@ def _test_local_install(): from .pipeline import Node, MapNode, JoinNode, Workflow from .interfaces import (DataGrabber, DataSink, SelectFiles, - IdentityInterface, Rename, Function, Select, Merge) + IdentityInterface, Rename, Function, Select, Merge) diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index d2a018cd37..f30697d584 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -11,8 +11,8 @@ class ICCInputSpec(BaseInterfaceInputSpec): subjects_sessions = traits.List(traits.List(File(exists=True)), - desc="n subjects m sessions 3D stat files", - mandatory=True) + desc="n subjects m sessions 3D stat files", + mandatory=True) mask = File(exists=True, mandatory=True) diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index 9d7d8dc66f..5284b1f201 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -100,7 +100,7 @@ def _eucl_min(self, nii1, nii2): (point1, point2) = np.unravel_index( np.argmin(dist_matrix), dist_matrix.shape) return (euclidean(set1_coordinates.T[point1, :], - set2_coordinates.T[point2, :]), + set2_coordinates.T[point2, :]), set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]) @@ -117,7 +117,7 @@ def _eucl_cog(self, nii1, nii2): for i in range(n_labels): cogs[:3, i] = np.array(center_of_mass(origdata2, - labeled_data, i + 1)) + labeled_data, i + 1)) cogs_coor = np.dot(nii2.get_affine(), cogs)[:3, :] @@ -312,7 +312,7 @@ def _run_interface(self, runtime): for l in labels: res.append(self._bool_vec_dissimilarity(data1 == l, - data2 == l, method='jaccard')) + data2 == l, method='jaccard')) volumes1.append(scale * len(data1[data1 == l])) volumes2.append(scale * len(data2[data2 == l])) @@ -331,7 +331,7 @@ def _run_interface(self, runtime): both_data[(data1 - data2) != 0] = 1 nb.save(nb.Nifti1Image(both_data, nii1.get_affine(), - nii1.get_header()), self.inputs.out_file) + nii1.get_header()), self.inputs.out_file) self._labels = labels self._ove_rois = results @@ -359,10 +359,10 @@ def _list_outputs(self): class FuzzyOverlapInputSpec(BaseInterfaceInputSpec): - in_ref = InputMultiPath( File(exists=True), mandatory=True, - desc='Reference image. Requires the same dimensions as in_tst.') - in_tst = InputMultiPath( File(exists=True), mandatory=True, - desc='Test image. Requires the same dimensions as in_ref.') + in_ref = InputMultiPath(File(exists=True), mandatory=True, + desc='Reference image. Requires the same dimensions as in_tst.') + in_tst = InputMultiPath(File(exists=True), mandatory=True, + desc='Test image. Requires the same dimensions as in_ref.') weighting = traits.Enum('none', 'volume', 'squared_vol', usedefault=True, desc=('\'none\': no class-overlap weighting is ' 'performed. \'volume\': computed class-' @@ -374,11 +374,11 @@ class FuzzyOverlapInputSpec(BaseInterfaceInputSpec): class FuzzyOverlapOutputSpec(TraitedSpec): - jaccard = traits.Float( desc='Fuzzy Jaccard Index (fJI), all the classes' ) - dice = traits.Float( desc='Fuzzy Dice Index (fDI), all the classes' ) - diff_file = File(exists=True, desc='resulting difference-map of all classes, using the chosen weighting' ) - class_fji = traits.List( traits.Float(), desc='Array containing the fJIs of each computed class' ) - class_fdi = traits.List( traits.Float(), desc='Array containing the fDIs of each computed class' ) + jaccard = traits.Float(desc='Fuzzy Jaccard Index (fJI), all the classes') + dice = traits.Float(desc='Fuzzy Dice Index (fDI), all the classes') + diff_file = File(exists=True, desc='resulting difference-map of all classes, using the chosen weighting') + class_fji = traits.List(traits.Float(), desc='Array containing the fJIs of each computed class') + class_fdi = traits.List(traits.Float(), desc='Array containing the fDIs of each computed class') class FuzzyOverlap(BaseInterface): @@ -401,36 +401,35 @@ class FuzzyOverlap(BaseInterface): >>> res = overlap.run() # doctest: +SKIP """ - input_spec = FuzzyOverlapInputSpec + input_spec = FuzzyOverlapInputSpec output_spec = FuzzyOverlapOutputSpec def _run_interface(self, runtime): ncomp = len(self.inputs.in_ref) - assert( ncomp == len(self.inputs.in_tst) ) - weights = np.ones( shape=ncomp ) - - img_ref = np.array( [ nb.load( fname ).get_data() for fname in self.inputs.in_ref ] ) - img_tst = np.array( [ nb.load( fname ).get_data() for fname in self.inputs.in_tst ] ) + assert(ncomp == len(self.inputs.in_tst)) + weights = np.ones(shape=ncomp) + img_ref = np.array([nb.load(fname).get_data() for fname in self.inputs.in_ref]) + img_tst = np.array([nb.load(fname).get_data() for fname in self.inputs.in_tst]) msk = np.sum(img_ref, axis=0) - msk[msk>0] = 1.0 + msk[msk > 0] = 1.0 tst_msk = np.sum(img_tst, axis=0) - tst_msk[tst_msk>0] = 1.0 + tst_msk[tst_msk > 0] = 1.0 - #check that volumes are normalized - #img_ref[:][msk>0] = img_ref[:][msk>0] / (np.sum( img_ref, axis=0 ))[msk>0] - #img_tst[tst_msk>0] = img_tst[tst_msk>0] / np.sum( img_tst, axis=0 )[tst_msk>0] + # check that volumes are normalized + # img_ref[:][msk>0] = img_ref[:][msk>0] / (np.sum( img_ref, axis=0 ))[msk>0] + # img_tst[tst_msk>0] = img_tst[tst_msk>0] / np.sum( img_tst, axis=0 )[tst_msk>0] self._jaccards = [] volumes = [] - diff_im = np.zeros( img_ref.shape ) + diff_im = np.zeros(img_ref.shape) - for ref_comp, tst_comp, diff_comp in zip( img_ref, img_tst, diff_im ): - num = np.minimum( ref_comp, tst_comp ) - ddr = np.maximum( ref_comp, tst_comp ) - diff_comp[ddr>0] += 1.0 - (num[ddr>0] / ddr[ddr>0]) + for ref_comp, tst_comp, diff_comp in zip(img_ref, img_tst, diff_im): + num = np.minimum(ref_comp, tst_comp) + ddr = np.maximum(ref_comp, tst_comp) + diff_comp[ddr > 0] += 1.0 - (num[ddr > 0] / ddr[ddr > 0]) self._jaccards.append(np.sum(num) / np.sum(ddr)) volumes.append(np.sum(ref_comp)) @@ -444,19 +443,17 @@ def _run_interface(self, runtime): weights = weights / np.sum(weights) - setattr(self, '_jaccard', np.sum( weights * self._jaccards ) ) - setattr(self, '_dice', np.sum( weights * self._dices ) ) - + setattr(self, '_jaccard', np.sum(weights * self._jaccards)) + setattr(self, '_dice', np.sum(weights * self._dices)) - diff = np.zeros( diff_im[0].shape ) + diff = np.zeros(diff_im[0].shape) - for w,ch in zip(weights,diff_im): - ch[msk==0] = 0 - diff+= w* ch - - nb.save(nb.Nifti1Image(diff, nb.load( self.inputs.in_ref[0]).get_affine(), - nb.load(self.inputs.in_ref[0]).get_header()), self.inputs.out_file) + for w, ch in zip(weights, diff_im): + ch[msk == 0] = 0 + diff += w * ch + nb.save(nb.Nifti1Image(diff, nb.load(self.inputs.in_ref[0]).get_affine(), + nb.load(self.inputs.in_ref[0]).get_header()), self.inputs.out_file) return runtime @@ -464,18 +461,18 @@ def _list_outputs(self): outputs = self._outputs().get() for method in ("dice", "jaccard"): outputs[method] = getattr(self, '_' + method) - #outputs['volume_difference'] = self._volume + # outputs['volume_difference'] = self._volume outputs['diff_file'] = os.path.abspath(self.inputs.out_file) - outputs['class_fji'] = np.array(self._jaccards).astype(float).tolist(); - outputs['class_fdi']= self._dices.astype(float).tolist(); + outputs['class_fji'] = np.array(self._jaccards).astype(float).tolist(); + outputs['class_fdi'] = self._dices.astype(float).tolist(); return outputs class ErrorMapInputSpec(BaseInterfaceInputSpec): in_ref = File(exists=True, mandatory=True, - desc="Reference image. Requires the same dimensions as in_tst.") + desc="Reference image. Requires the same dimensions as in_tst.") in_tst = File(exists=True, mandatory=True, - desc="Test image. Requires the same dimensions as in_ref.") + desc="Test image. Requires the same dimensions as in_ref.") mask = File(exists=True, desc="calculate overlap only within this mask.") metric = traits.Enum("sqeuclidean", "euclidean", desc='error map metric (as implemented in scipy cdist)', @@ -519,7 +516,7 @@ def _run_interface(self, runtime): mapshape = ref_data.shape[:-1] if isdefined(self.inputs.mask): - msk = nb.load( self.inputs.mask ).get_data() + msk = nb.load(self.inputs.mask).get_data() if (mapshape != msk.shape): raise RuntimeError("Mask should match volume shape, \ mask is %s and volumes are %s" % @@ -529,9 +526,9 @@ def _run_interface(self, runtime): # Flatten both volumes and make the pixel differennce mskvector = msk.reshape(-1) - msk_idxs = np.where(mskvector==1) - refvector = ref_data.reshape(-1,comps)[msk_idxs].astype(np.float32) - tstvector = tst_data.reshape(-1,comps)[msk_idxs].astype(np.float32) + msk_idxs = np.where(mskvector == 1) + refvector = ref_data.reshape(-1, comps)[msk_idxs].astype(np.float32) + tstvector = tst_data.reshape(-1, comps)[msk_idxs].astype(np.float32) diffvector = (refvector-tstvector) # Scale the difference @@ -544,11 +541,11 @@ def _run_interface(self, runtime): elif self.inputs.metric == 'euclidean': errvector = np.linalg.norm(diffvector, axis=1) - errvectorexp = np.zeros_like(mskvector, dtype=np.float32) # The default type is uint8 + errvectorexp = np.zeros_like(mskvector, dtype=np.float32) # The default type is uint8 errvectorexp[msk_idxs] = errvector # Get averaged error - self._distance = np.average(errvector) # Only average the masked voxels + self._distance = np.average(errvector) # Only average the masked voxels errmap = errvectorexp.reshape(mapshape) @@ -558,9 +555,9 @@ def _run_interface(self, runtime): hdr.set_data_shape(mapshape) if not isdefined(self.inputs.out_map): - fname,ext = op.splitext(op.basename(self.inputs.in_tst)) - if ext=='.gz': - fname,ext2 = op.splitext(fname) + fname, ext = op.splitext(op.basename(self.inputs.in_tst)) + if ext == '.gz': + fname, ext2 = op.splitext(fname) ext = ext2 + ext self._out_file = op.abspath(fname + "_errmap" + ext) else: @@ -584,8 +581,8 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either(traits.Enum('cc', 'cr', 'crl1', 'mi', 'nmi', 'slr'), - traits.Callable(), - desc="""str or callable + traits.Callable(), + desc="""str or callable Cost-function for assessing image similarity. If a string, one of 'cc': correlation coefficient, 'cr': correlation ratio, 'crl1': L1-norm based correlation ratio, 'mi': mutual @@ -596,7 +593,7 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): class SimilarityOutputSpec(TraitedSpec): - similarity = traits.List( traits.Float(desc="Similarity between volume 1 and 2, frame by frame")) + similarity = traits.List(traits.Float(desc="Similarity between volume 1 and 2, frame by frame")) class Similarity(BaseInterface): @@ -629,8 +626,7 @@ def __init__(self, **inputs): package_check('nipy') except Exception as e: self._have_nipy = False - super(Similarity,self).__init__(**inputs) - + super(Similarity, self).__init__(**inputs) def _run_interface(self, runtime): if not self._have_nipy: @@ -644,15 +640,15 @@ def _run_interface(self, runtime): dims = vol1_nii.get_data().ndim - if dims==3 or dims==2: - vols1 = [ vol1_nii ] - vols2 = [ vol2_nii ] - if dims==4: - vols1 = nb.four_to_three( vol1_nii ) - vols2 = nb.four_to_three( vol2_nii ) + if dims == 3 or dims == 2: + vols1 = [vol1_nii] + vols2 = [vol2_nii] + if dims == 4: + vols1 = nb.four_to_three(vol1_nii) + vols2 = nb.four_to_three(vol2_nii) - if dims<2 or dims>4: - raise RuntimeError( 'Image dimensions not supported (detected %dD file)' % dims ) + if dims < 2 or dims > 4: + raise RuntimeError('Image dimensions not supported (detected %dD file)' % dims) if isdefined(self.inputs.mask1): mask1 = nb.load(self.inputs.mask1).get_data() == 1 @@ -666,13 +662,13 @@ def _run_interface(self, runtime): self._similarity = [] - for ts1,ts2 in zip( vols1, vols2 ): - histreg = HistogramRegistration(from_img = ts1, - to_img = ts2, + for ts1, ts2 in zip(vols1, vols2): + histreg = HistogramRegistration(from_img=ts1, + to_img=ts2, similarity=self.inputs.metric, - from_mask = mask1, - to_mask = mask2) - self._similarity.append( histreg.eval(Affine()) ) + from_mask=mask1, + to_mask=mask2) + self._similarity.append(histreg.eval(Affine())) return runtime diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 2394818434..6d5aaf1dc0 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -100,7 +100,7 @@ def _get_brodmann_area(self): for lab in labels: newdata[origdata == lab] = 1 if self.inputs.hemi == 'right': - newdata[int(floor(float(origdata.shape[0]) / 2)):, :, :] = 0 + newdata[int(floor(float(origdata.shape[0]) / 2)):, :, :] = 0 elif self.inputs.hemi == 'left': newdata[:int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 @@ -205,7 +205,7 @@ def _run_interface(self, runtime): affine = np.dot(self.inputs.transformation_matrix, affine) nb.save(nb.Nifti1Image(img.get_data(), affine, - img.get_header()), self._gen_output_filename(fname)) + img.get_header()), self._gen_output_filename(fname)) return runtime @@ -979,13 +979,13 @@ class AddNoiseInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True, desc='input image that will be corrupted with noise') in_mask = File(exists=True, desc=('input mask, voxels outside this mask ' - 'will be considered background')) + 'will be considered background')) snr = traits.Float(10.0, desc='desired output SNR in dB', usedefault=True) dist = traits.Enum('normal', 'rician', usedefault=True, mandatory=True, desc=('desired noise distribution')) bg_dist = traits.Enum('normal', 'rayleigh', usedefault=True, mandatory=True, desc=('desired noise distribution, currently ' - 'only normal is implemented')) + 'only normal is implemented')) out_file = File(desc='desired output filename') @@ -1080,14 +1080,14 @@ def gen_noise(self, image, mask=None, snr_db=10.0, dist='normal', bg_dist='norma im_noise = np.sqrt((image + stde_1)**2 + (stde_2)**2) else: raise NotImplementedError(('Only normal and rician distributions ' - 'are supported')) + 'are supported')) return im_noise class NormalizeProbabilityMapSetInputSpec(TraitedSpec): in_files = InputMultiPath(File(exists=True, mandatory=True, - desc='The tpms to be normalized')) + desc='The tpms to be normalized')) in_mask = File(exists=True, desc='Masked voxels must sum up 1.0, 0.0 otherwise.') @@ -1243,47 +1243,46 @@ def normalize_tpms(in_files, in_mask=None, out_files=[]): in_files = np.atleast_1d(in_files).tolist() if len(out_files) != len(in_files): - for i,finname in enumerate(in_files): - fname,fext = op.splitext(op.basename(finname)) + for i, finname in enumerate(in_files): + fname, fext = op.splitext(op.basename(finname)) if fext == '.gz': - fname,fext2 = op.splitext(fname) + fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('%s_norm_%02d%s' % (fname,i,fext)) - out_files+= [out_file] + out_file = op.abspath('%s_norm_%02d%s' % (fname, i, fext)) + out_files += [out_file] imgs = [nib.load(fim) for fim in in_files] - if len(in_files)==1: + if len(in_files) == 1: img_data = imgs[0].get_data() - img_data[img_data>0.0] = 1.0 + img_data[img_data > 0.0] = 1.0 hdr = imgs[0].get_header().copy() - hdr['data_type']= 16 + hdr['data_type'] = 16 hdr.set_data_dtype(np.float32) nib.save(nib.Nifti1Image(img_data.astype(np.float32), imgs[0].get_affine(), hdr), out_files[0]) return out_files[0] img_data = np.array([im.get_data() for im in imgs]).astype(np.float32) - #img_data[img_data>1.0] = 1.0 - img_data[img_data<0.0] = 0.0 + # img_data[img_data>1.0] = 1.0 + img_data[img_data < 0.0] = 0.0 weights = np.sum(img_data, axis=0) msk = np.ones_like(imgs[0].get_data()) - msk[ weights<= 0 ] = 0 + msk[weights <= 0] = 0 if not in_mask is None: msk = nib.load(in_mask).get_data() - msk[ msk<=0 ] = 0 - msk[ msk>0 ] = 1 + msk[msk <= 0] = 0 + msk[msk > 0] = 1 msk = np.ma.masked_equal(msk, 0) - - for i,out_file in enumerate(out_files): + for i, out_file in enumerate(out_files): data = np.ma.masked_equal(img_data[i], 0) probmap = data / weights hdr = imgs[i].get_header().copy() - hdr['data_type']= 16 + hdr['data_type'] = 16 hdr.set_data_dtype('float32') nib.save(nib.Nifti1Image(probmap.astype(np.float32), imgs[i].get_affine(), hdr), out_file) @@ -1419,8 +1418,8 @@ def merge_rois(in_files, in_idxs, in_ref, data[idata, ...] = cdata[0:nels, ...] except: print(('Consistency between indexes and chunks was ' - 'lost: data=%s, chunk=%s') % (str(data.shape), - str(cdata.shape))) + 'lost: data=%s, chunk=%s') % (str(data.shape), + str(cdata.shape))) raise hdr.set_data_shape(newshape) @@ -1466,7 +1465,7 @@ class Distance(nam.Distance): def __init__(self, **inputs): super(nam.Distance, self).__init__(**inputs) warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.Distance"), + " please use nipype.algorithms.metrics.Distance"), DeprecationWarning) @@ -1479,7 +1478,7 @@ class Overlap(nam.Overlap): def __init__(self, **inputs): super(nam.Overlap, self).__init__(**inputs) warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.Overlap"), + " please use nipype.algorithms.metrics.Overlap"), DeprecationWarning) @@ -1493,5 +1492,5 @@ class FuzzyOverlap(nam.FuzzyOverlap): def __init__(self, **inputs): super(nam.FuzzyOverlap, self).__init__(**inputs) warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.FuzzyOverlap"), + " please use nipype.algorithms.metrics.FuzzyOverlap"), DeprecationWarning) diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 8ef80c4976..cb7e2ae4e8 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -136,7 +136,7 @@ def scale_timings(timelist, input_units, output_units, time_repetition): time_repetition: float in seconds """ - if input_units==output_units: + if input_units == output_units: _scalefactor = 1. if (input_units == 'scans') and (output_units == 'secs'): _scalefactor = time_repetition @@ -176,33 +176,33 @@ def gen_info(run_event_files): class SpecifyModelInputSpec(BaseInterfaceInputSpec): subject_info = InputMultiPath(Bunch, mandatory=True, xor=['subject_info', 'event_files'], - desc=("Bunch or List(Bunch) subject specific condition information. " - "see :ref:`SpecifyModel` or SpecifyModel.__doc__ for details")) + desc=("Bunch or List(Bunch) subject specific condition information. " + "see :ref:`SpecifyModel` or SpecifyModel.__doc__ for details")) event_files = InputMultiPath(traits.List(File(exists=True)), mandatory=True, xor=['subject_info', 'event_files'], - desc=('list of event description files 1, 2 or 3 column format ' - 'corresponding to onsets, durations and amplitudes')) + desc=('list of event description files 1, 2 or 3 column format ' + 'corresponding to onsets, durations and amplitudes')) realignment_parameters = InputMultiPath(File(exists=True), - desc="Realignment parameters returned by motion correction algorithm", - copyfile=False) + desc="Realignment parameters returned by motion correction algorithm", + copyfile=False) outlier_files = InputMultiPath(File(exists=True), - desc="Files containing scan outlier indices that should be tossed", - copyfile=False) + desc="Files containing scan outlier indices that should be tossed", + copyfile=False) functional_runs = InputMultiPath(traits.Either(traits.List(File(exists=True)), File(exists=True)), mandatory=True, - desc=("Data files for model. List of 4D files or list of list of 3D " - "files per session"), copyfile=False) + desc=("Data files for model. List of 4D files or list of list of 3D " + "files per session"), copyfile=False) input_units = traits.Enum('secs', 'scans', mandatory=True, - desc=("Units of event onsets and durations (secs or scans). Output " - "units are always in secs")) + desc=("Units of event onsets and durations (secs or scans). Output " + "units are always in secs")) high_pass_filter_cutoff = traits.Float(mandatory=True, - desc="High-pass filter cutoff in secs") + desc="High-pass filter cutoff in secs") time_repetition = traits.Float(mandatory=True, - desc=("Time between the start of one volume to the start of " - "the next image volume.")) + desc=("Time between the start of one volume to the start of " + "the next image volume.")) # Not implemented yet - #polynomial_order = traits.Range(0, low=0, + # polynomial_order = traits.Range(0, low=0, # desc ="Number of polynomial functions to model high pass filter.") @@ -315,10 +315,10 @@ def _generate_standard_design(self, infolist, sessinfo[i]['cond'][cid]['amplitudes'] = \ info.amplitudes[cid] if hasattr(info, 'tmod') and info.tmod and \ - len(info.tmod) > cid: + len(info.tmod) > cid: sessinfo[i]['cond'][cid]['tmod'] = info.tmod[cid] if hasattr(info, 'pmod') and info.pmod and \ - len(info.pmod) > cid: + len(info.pmod) > cid: if info.pmod[cid]: sessinfo[i]['cond'][cid]['pmod'] = [] for j, name in enumerate(info.pmod[cid].name): @@ -329,12 +329,12 @@ def _generate_standard_design(self, infolist, info.pmod[cid].poly[j] sessinfo[i]['cond'][cid]['pmod'][j]['param'] = \ info.pmod[cid].param[j] - sessinfo[i]['regress']= [] + sessinfo[i]['regress'] = [] if hasattr(info, 'regressors') and info.regressors is not None: for j, r in enumerate(info.regressors): sessinfo[i]['regress'].insert(j, dict(name='', val=[])) if hasattr(info, 'regressor_names') and \ - info.regressor_names is not None: + info.regressor_names is not None: sessinfo[i]['regress'][j]['name'] = \ info.regressor_names[j] else: @@ -364,7 +364,7 @@ def _generate_standard_design(self, infolist, for j, scanno in enumerate(out): colidx = len(sessinfo[i]['regress']) sessinfo[i]['regress'].insert(colidx, dict(name='', val=[])) - sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d'%(j+1) + sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d' %(j+1) sessinfo[i]['regress'][colidx]['val'] = \ np.zeros((1, numscans))[0].tolist() sessinfo[i]['regress'][colidx]['val'][int(scanno)] = 1 @@ -395,9 +395,9 @@ def _generate_design(self, infolist=None): else: infolist = gen_info(self.inputs.event_files) self._sessinfo = self._generate_standard_design(infolist, - functional_runs=self.inputs.functional_runs, - realignment_parameters=realignment_parameters, - outliers=outliers) + functional_runs=self.inputs.functional_runs, + realignment_parameters=realignment_parameters, + outliers=outliers) def _run_interface(self, runtime): """ @@ -417,9 +417,9 @@ def _list_outputs(self): class SpecifySPMModelInputSpec(SpecifyModelInputSpec): concatenate_runs = traits.Bool(False, usedefault=True, - desc="Concatenate all runs to look like a single session.") + desc="Concatenate all runs to look like a single session.") output_units = traits.Enum('secs', 'scans', usedefault=True, - desc="Units of design event onsets and durations (secs or scans)") + desc="Units of design event onsets and durations (secs or scans)") class SpecifySPMModel(SpecifyModel): @@ -470,7 +470,7 @@ def _concatenate_info(self, infolist): infoout.durations[j] = (infolist[0].durations[j] * len(infolist[0].onsets[j])) for i, info in enumerate(infolist[1:]): - #info.[conditions, tmod] remain the same + # info.[conditions, tmod] remain the same if info.onsets: for j, val in enumerate(info.onsets): if self.inputs.input_units == 'secs': @@ -501,11 +501,11 @@ def _concatenate_info(self, infolist): for key, data in enumerate(val.param): infoout.pmod[j].param[key].extend(data) if hasattr(info, 'regressors') and info.regressors: - #assumes same ordering of regressors across different - #runs and the same names for the regressors + # assumes same ordering of regressors across different + # runs and the same names for the regressors for j, v in enumerate(info.regressors): infoout.regressors[j].extend(info.regressors[j]) - #insert session regressors + # insert session regressors if not hasattr(infoout, 'regressors') or not infoout.regressors: infoout.regressors = [] onelist = np.zeros((1, sum(nscans))) @@ -551,27 +551,27 @@ def _generate_design(self, infolist=None): outliers[0].extend((np.array(out) + sum(nscans[0:i])).tolist()) self._sessinfo = self._generate_standard_design(concatlist, - functional_runs=functional_runs, - realignment_parameters=realignment_parameters, - outliers=outliers) + functional_runs=functional_runs, + realignment_parameters=realignment_parameters, + outliers=outliers) class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float(0, mandatory=True, - desc="Time in seconds to acquire a single image volume") - volumes_in_cluster=traits.Range(1, usedefault=True, + desc="Time in seconds to acquire a single image volume") + volumes_in_cluster = traits.Range(1, usedefault=True, desc="Number of scan volumes in a cluster") model_hrf = traits.Bool(desc="model sparse events with hrf") stimuli_as_impulses = traits.Bool(True, - desc="Treat each stimulus to be impulse like.", - usedefault=True) + desc="Treat each stimulus to be impulse like.", + usedefault=True) use_temporal_deriv = traits.Bool(requires=['model_hrf'], - desc="Create a temporal derivative in addition to regular regressor") + desc="Create a temporal derivative in addition to regular regressor") scale_regressors = traits.Bool(True, desc="Scale regressors by the peak", usedefault=True) scan_onset = traits.Float(0.0, - desc="Start of scanning relative to onset of run in secs", - usedefault=True) + desc="Start of scanning relative to onset of run in secs", + usedefault=True) save_plot = traits.Bool(desc=('save plot of sparse design calculation ' '(Requires matplotlib)')) @@ -617,15 +617,15 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): """ bplot = False if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - bplot=True + bplot = True import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt TR = np.round(self.inputs.time_repetition * 1000) # in ms if self.inputs.time_acquisition: - TA = np.round(self.inputs.time_acquisition * 1000) # in ms + TA = np.round(self.inputs.time_acquisition * 1000) # in ms else: - TA = TR # in ms + TA = TR # in ms nvol = self.inputs.volumes_in_cluster SCANONSET = np.round(self.inputs.scan_onset * 1000) total_time = TR * (nscans - nvol) / nvol + TA * nvol + SCANONSET @@ -688,7 +688,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): timeline = np.convolve(timeline, hrf)[0:len(timeline)] if isdefined(self.inputs.use_temporal_deriv) and \ self.inputs.use_temporal_deriv: - #create temporal deriv + # create temporal deriv timederiv = np.concatenate(([0], np.diff(timeline))) if bplot: plt.subplot(4, 1, 3) @@ -782,7 +782,7 @@ def _generate_clustered_design(self, infolist): if hasattr(infoout[i], 'regressors') and infoout[i].regressors: if not infoout[i].regressor_names: infoout[i].regressor_names = \ - ['R%d'%j for j in range(len(infoout[i].regressors))] + ['R%d' %j for j in range(len(infoout[i].regressors))] else: infoout[i].regressors = [] infoout[i].regressor_names = [] @@ -798,7 +798,7 @@ def _generate_design(self, infolist=None): else: infolist = gen_info(self.inputs.event_files) sparselist = self._generate_clustered_design(infolist) - super(SpecifySparseModel, self)._generate_design(infolist = sparselist) + super(SpecifySparseModel, self)._generate_design(infolist=sparselist) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index b565a23d1a..6f90c70e75 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -31,8 +31,8 @@ from ..external.six import string_types from ..interfaces.base import (BaseInterface, traits, InputMultiPath, - OutputMultiPath, TraitedSpec, File, - BaseInterfaceInputSpec, isdefined) + OutputMultiPath, TraitedSpec, File, + BaseInterfaceInputSpec, isdefined) from ..utils.filemanip import filename_to_list, save_json, split_filename from ..utils.misc import find_indices from .. import logging, config @@ -55,7 +55,7 @@ def _get_affine_matrix(params, source): # nipy does not store typical euler angles, use nipy to convert from nipy.algorithms.registration import to_matrix44 return to_matrix44(params) - #process for FSL, SPM, AFNI and FSFAST + # process for FSL, SPM, AFNI and FSFAST rotfunc = lambda x: np.array([[np.cos(x), np.sin(x)], [-np.sin(x), np.cos(x)]]) q = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0]) @@ -155,19 +155,19 @@ def _nanmean(a, axis=None): class ArtifactDetectInputSpec(BaseInterfaceInputSpec): realigned_files = InputMultiPath(File(exists=True), - desc="Names of realigned functional data files", + desc="Names of realigned functional data files", mandatory=True) realignment_parameters = InputMultiPath(File(exists=True), mandatory=True, - desc=("Names of realignment parameters" - "corresponding to the functional data files")) + desc=("Names of realignment parameters" + "corresponding to the functional data files")) parameter_source = traits.Enum("SPM", "FSL", "AFNI", "NiPy", "FSFAST", desc="Source of movement parameters", mandatory=True) use_differences = traits.ListBool([True, False], minlen=2, maxlen=2, usedefault=True, - desc=("Use differences between successive motion (first element)" - "and intensity paramter (second element) estimates in order" - "to determine outliers. (default is [True, False])")) + desc=("Use differences between successive motion (first element)" + "and intensity paramter (second element) estimates in order" + "to determine outliers. (default is [True, False])")) use_norm = traits.Bool(True, requires=['norm_threshold'], desc=("Uses a composite of the motion parameters in " "order to determine outliers."), @@ -178,21 +178,21 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): xor=['rotation_threshold', 'translation_threshold']) rotation_threshold = traits.Float(mandatory=True, xor=['norm_threshold'], - desc=("Threshold (in radians) to use to detect rotation-related " - "outliers")) + desc=("Threshold (in radians) to use to detect rotation-related " + "outliers")) translation_threshold = traits.Float(mandatory=True, xor=['norm_threshold'], - desc=("Threshold (in mm) to use to detect translation-related " - "outliers")) + desc=("Threshold (in mm) to use to detect translation-related " + "outliers")) zintensity_threshold = traits.Float(mandatory=True, - desc=("Intensity Z-threshold use to detection images that deviate " - "from the mean")) + desc=("Intensity Z-threshold use to detection images that deviate " + "from the mean")) mask_type = traits.Enum('spm_global', 'file', 'thresh', - desc=("Type of mask that should be used to mask the functional " - "data. *spm_global* uses an spm_global like calculation to " - "determine the brain mask. *file* specifies a brain mask " - "file (should be an image file consisting of 0s and 1s). " - "*thresh* specifies a threshold to use. By default all voxels" - "are used, unless one of these mask types are defined."), + desc=("Type of mask that should be used to mask the functional " + "data. *spm_global* uses an spm_global like calculation to " + "determine the brain mask. *file* specifies a brain mask " + "file (should be an image file consisting of 0s and 1s). " + "*thresh* specifies a threshold to use. By default all voxels" + "are used, unless one of these mask types are defined."), mandatory=True) mask_file = File(exists=True, desc="Mask file to be used if mask_type is 'file'.") @@ -207,10 +207,10 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): desc="file type of the outlier plot", usedefault=True) bound_by_brainmask = traits.Bool(False, desc=("use the brain mask to " - "determine bounding box" - "for composite norm (works" - "for SPM and Nipy - currently" - "inaccurate for FSL, AFNI"), + "determine bounding box" + "for composite norm (works" + "for SPM and Nipy - currently" + "inaccurate for FSL, AFNI"), usedefault=True) global_threshold = traits.Float(8.0, desc=("use this threshold when mask " "type equal's spm_global"), @@ -219,28 +219,28 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): class ArtifactDetectOutputSpec(TraitedSpec): outlier_files = OutputMultiPath(File(exists=True), - desc=("One file for each functional run containing a list of " - "0-based indices corresponding to outlier volumes")) + desc=("One file for each functional run containing a list of " + "0-based indices corresponding to outlier volumes")) intensity_files = OutputMultiPath(File(exists=True), - desc=("One file for each functional run containing the global " - "intensity values determined from the brainmask")) + desc=("One file for each functional run containing the global " + "intensity values determined from the brainmask")) norm_files = OutputMultiPath(File, - desc=("One file for each functional run containing the composite " - "norm")) + desc=("One file for each functional run containing the composite " + "norm")) statistic_files = OutputMultiPath(File(exists=True), - desc=("One file for each functional run containing information " - "about the different types of artifacts and if design info is" - " provided then details of stimulus correlated motion and a " - "listing or artifacts by event type.")) + desc=("One file for each functional run containing information " + "about the different types of artifacts and if design info is" + " provided then details of stimulus correlated motion and a " + "listing or artifacts by event type.")) plot_files = OutputMultiPath(File, - desc=("One image file for each functional run containing the " - "detected outliers")) + desc=("One image file for each functional run containing the " + "detected outliers")) mask_files = OutputMultiPath(File, - desc=("One image file for each functional run containing the mask" - "used for global signal calculation")) + desc=("One image file for each functional run containing the mask" + "used for global signal calculation")) displacement_files = OutputMultiPath(File, - desc=("One image file for each functional run containing the voxel" - "displacement timeseries")) + desc=("One image file for each functional run containing the voxel" + "displacement timeseries")) class ArtifactDetect(BaseInterface): @@ -377,7 +377,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): vol = data[:, :, :, t0] # Use an SPM like approach mask_tmp = vol > \ - (_nanmean(vol) / self.inputs.global_threshold) + (_nanmean(vol) / self.inputs.global_threshold) mask = mask * mask_tmp for t0 in range(timepoints): vol = data[:, :, :, t0] @@ -391,7 +391,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): for t0 in range(timepoints): vol = data[:, :, :, t0] mask_tmp = vol > \ - (_nanmean(vol) / self.inputs.global_threshold) + (_nanmean(vol) / self.inputs.global_threshold) mask[:, :, :, t0] = mask_tmp g[t0] = np.nansum(vol * mask_tmp) / np.nansum(mask_tmp) elif masktype == 'file': # uses a mask image to determine intensity @@ -509,25 +509,25 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): motion_outliers)), 'motion_outliers': len(np.setdiff1d(motion_outliers, iidx)), }, - {'motion': [{'using differences': self.inputs.use_differences[0]}, - {'mean': np.mean(mc_in, axis=0).tolist(), - 'min': np.min(mc_in, axis=0).tolist(), - 'max': np.max(mc_in, axis=0).tolist(), - 'std': np.std(mc_in, axis=0).tolist()}, - ]}, - {'intensity': [{'using differences': self.inputs.use_differences[1]}, - {'mean': np.mean(gz, axis=0).tolist(), - 'min': np.min(gz, axis=0).tolist(), - 'max': np.max(gz, axis=0).tolist(), - 'std': np.std(gz, axis=0).tolist()}, - ]}, + {'motion': [{'using differences': self.inputs.use_differences[0]}, + {'mean': np.mean(mc_in, axis=0).tolist(), + 'min': np.min(mc_in, axis=0).tolist(), + 'max': np.max(mc_in, axis=0).tolist(), + 'std': np.std(mc_in, axis=0).tolist()}, + ]}, + {'intensity': [{'using differences': self.inputs.use_differences[1]}, + {'mean': np.mean(gz, axis=0).tolist(), + 'min': np.min(gz, axis=0).tolist(), + 'max': np.max(gz, axis=0).tolist(), + 'std': np.std(gz, axis=0).tolist()}, + ]}, ] if self.inputs.use_norm: stats.insert(3, {'motion_norm': - {'mean': np.mean(normval, axis=0).tolist(), - 'min': np.min(normval, axis=0).tolist(), - 'max': np.max(normval, axis=0).tolist(), - 'std': np.std(normval, axis=0).tolist(), + {'mean': np.mean(normval, axis=0).tolist(), + 'min': np.min(normval, axis=0).tolist(), + 'max': np.max(normval, axis=0).tolist(), + 'std': np.std(normval, axis=0).tolist(), }}) save_json(statsfile, stats) @@ -544,19 +544,19 @@ def _run_interface(self, runtime): class StimCorrInputSpec(BaseInterfaceInputSpec): realignment_parameters = InputMultiPath(File(exists=True), mandatory=True, - desc=('Names of realignment parameters corresponding to the functional ' - 'data files')) + desc=('Names of realignment parameters corresponding to the functional ' + 'data files')) intensity_values = InputMultiPath(File(exists=True), mandatory=True, - desc='Name of file containing intensity values') + desc='Name of file containing intensity values') spm_mat_file = File(exists=True, mandatory=True, desc='SPM mat file (use pre-estimate SPM.mat file)') concatenated_design = traits.Bool(mandatory=True, - desc='state if the design matrix contains concatenated sessions') + desc='state if the design matrix contains concatenated sessions') class StimCorrOutputSpec(TraitedSpec): stimcorr_files = OutputMultiPath(File(exists=True), - desc='List of files containing correlation values') + desc='List of files containing correlation values') class StimulusCorrelation(BaseInterface): diff --git a/nipype/algorithms/tests/test_errormap.py b/nipype/algorithms/tests/test_errormap.py index 46f23a9ed9..3dea795ee0 100644 --- a/nipype/algorithms/tests/test_errormap.py +++ b/nipype/algorithms/tests/test_errormap.py @@ -8,13 +8,14 @@ from tempfile import mkdtemp import os + def test_errormap(): tempdir = mkdtemp() # Single-Spectual # Make two fake 2*2*2 voxel volumes - volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # John von Neumann's birthday - volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) # Alan Turing's birthday + volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # John von Neumann's birthday + volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) # Alan Turing's birthday mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]]) img1 = nib.Nifti1Image(volume1, np.eye(4)) @@ -25,7 +26,6 @@ def test_errormap(): nib.save(img2, os.path.join(tempdir, 'alan.nii.gz')) nib.save(maskimg, os.path.join(tempdir, 'mask.nii.gz')) - # Default metric errmap = ErrorMap() errmap.inputs.in_tst = os.path.join(tempdir, 'von.nii.gz') @@ -49,17 +49,17 @@ def test_errormap(): result = errmap.run() yield assert_equal, result.outputs.distance, 1.0 - ## Multi-Spectual - volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) # Raymond Vahan Damadian's birthday + # Multi-Spectual + volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) # Raymond Vahan Damadian's birthday - msvolume1 = np.zeros(shape=(2,2,2,2)) - msvolume1[:,:,:,0] = volume1 - msvolume1[:,:,:,1] = volume3 + msvolume1 = np.zeros(shape=(2, 2, 2, 2)) + msvolume1[:, :, :, 0] = volume1 + msvolume1[:, :, :, 1] = volume3 msimg1 = nib.Nifti1Image(msvolume1, np.eye(4)) - msvolume2 = np.zeros(shape=(2,2,2,2)) - msvolume2[:,:,:,0] = volume3 - msvolume2[:,:,:,1] = volume1 + msvolume2 = np.zeros(shape=(2, 2, 2, 2)) + msvolume2[:, :, :, 0] = volume3 + msvolume2[:, :, :, 1] = volume1 msimg2 = nib.Nifti1Image(msvolume2, np.eye(4)) nib.save(msimg1, os.path.join(tempdir, 'von-ray.nii.gz')) diff --git a/nipype/algorithms/tests/test_icc_anova.py b/nipype/algorithms/tests/test_icc_anova.py index 21b6db7eca..78f5f515e6 100644 --- a/nipype/algorithms/tests/test_icc_anova.py +++ b/nipype/algorithms/tests/test_icc_anova.py @@ -5,7 +5,7 @@ def test_ICC_rep_anova(): - #see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in + # see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in # Assessing Rater Reliability". Psychological Bulletin 86 (2): 420-428 Y = np.array([[9, 2, 5, 8], [6, 1, 3, 2], @@ -14,8 +14,8 @@ def test_ICC_rep_anova(): [10, 5, 6, 9], [6, 2, 4, 7]]) - icc, r_var, e_var , _, dfc, dfe = ICC_rep_anova(Y) - #see table 4 + icc, r_var, e_var, _, dfc, dfe = ICC_rep_anova(Y) + # see table 4 yield assert_equal, round(icc, 2), 0.71 yield assert_equal, dfc, 3 yield assert_equal, dfe, 15 diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 53c4c5fdc2..031881b422 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -22,6 +22,7 @@ except ImportError: pass + @skipif(notvtk) def test_ident_distances(): tempdir = mkdtemp() diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 4ebd7bd030..05d42db4a3 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -52,7 +52,7 @@ def test_modelgen1(): s.inputs.input_units = 'scans' res = s.run() yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([6., 6.]) - yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([6.,]) + yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([6., ]) yield assert_almost_equal, np.array(res.outputs.session_info[1]['cond'][1]['duration']), np.array([6., 6.]) rmtree(tempdir) @@ -104,9 +104,9 @@ def test_modelgen_spm_concat(): s.inputs.subject_info = deepcopy(info) res = s.run() yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([1., 1.]) - yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([1.,]) + yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([1., ]) yield assert_almost_equal, np.array(res.outputs.session_info[1]['cond'][1]['duration']), np.array([1., 1.]) - yield assert_almost_equal, np.array(res.outputs.session_info[2]['cond'][1]['duration']), np.array([1.,]) + yield assert_almost_equal, np.array(res.outputs.session_info[2]['cond'][1]['duration']), np.array([1., ]) # Test case for variable number of events in concatenated runs, sometimes unique. s.inputs.concatenate_runs = True info = [Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2]], durations=[[1, 1], [1]]), diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py index f74c8f4257..56bd4e7ebf 100644 --- a/nipype/algorithms/tests/test_normalize_tpms.py +++ b/nipype/algorithms/tests/test_normalize_tpms.py @@ -7,8 +7,8 @@ from shutil import rmtree from tempfile import mkdtemp -from nipype.testing import (assert_equal,assert_raises, - assert_almost_equal,example_data ) +from nipype.testing import (assert_equal, assert_raises, + assert_almost_equal, example_data) import numpy as np import nibabel as nb @@ -16,12 +16,13 @@ from nipype.algorithms.misc import normalize_tpms + def test_normalize_tpms(): tempdir = mkdtemp() - in_mask = example_data('tpms_msk.nii.gz' ) - mskdata = nb.load( in_mask ).get_data() - mskdata[mskdata>0.0] = 1.0 + in_mask = example_data('tpms_msk.nii.gz') + mskdata = nb.load(in_mask).get_data() + mskdata[mskdata > 0.0] = 1.0 mapdata = [] in_files = [] @@ -29,27 +30,27 @@ def test_normalize_tpms(): for i in range(3): mapname = example_data('tpm_%02d.nii.gz' % i) - filename = os.path.join(tempdir, 'modtpm_%02d.nii.gz' % i ) - out_files.append(os.path.join(tempdir, 'normtpm_%02d.nii.gz' % i )) + filename = os.path.join(tempdir, 'modtpm_%02d.nii.gz' % i) + out_files.append(os.path.join(tempdir, 'normtpm_%02d.nii.gz' % i)) im = nb.load(mapname) data = im.get_data() - mapdata.append( data.copy() ) + mapdata.append(data.copy()) nb.Nifti1Image(2.0 * (data * mskdata), im.get_affine(), - im.get_header() ).to_filename(filename) - in_files.append( filename ) + im.get_header()).to_filename(filename) + in_files.append(filename) - normalize_tpms( in_files, in_mask, out_files=out_files ) + normalize_tpms(in_files, in_mask, out_files=out_files) sumdata = np.zeros_like(mskdata) - for i,tstfname in enumerate( out_files ): - normdata = nb.load( tstfname ).get_data() - sumdata+=normdata - yield assert_equal, np.all( normdata[mskdata==0]==0 ), True - yield assert_equal, np.allclose( normdata, mapdata[i] ), True + for i, tstfname in enumerate(out_files): + normdata = nb.load(tstfname).get_data() + sumdata += normdata + yield assert_equal, np.all(normdata[mskdata == 0] == 0), True + yield assert_equal, np.allclose(normdata, mapdata[i]), True - yield assert_equal, np.allclose(sumdata[sumdata>0.0], 1.0 ), True + yield assert_equal, np.allclose(sumdata[sumdata > 0.0], 1.0), True rmtree(tempdir) diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index 4da6980a73..d3b842459d 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -26,6 +26,7 @@ ################################################################################ # PipeFunc object: callable interface to nipype.interface objects + class PipeFunc(object): """ Callable interface to nipype.interface objects @@ -51,7 +52,7 @@ def __init__(self, interface, base_dir, callback=None): is called. """ if not (isinstance(interface, type) - and issubclass(interface, BaseInterface)): + and issubclass(interface, BaseInterface)): raise ValueError('the interface argument should be a nipype ' 'interface class, but %s (type %s) was passed.' % (interface, type(interface))) @@ -61,7 +62,7 @@ def __init__(self, interface, base_dir, callback=None): raise ValueError('base_dir should be an existing directory') self.base_dir = base_dir doc = '%s\n%s' % (self.interface.__doc__, - self.interface.help(returnhelp=True)) + self.interface.help(returnhelp=True)) self.__doc__ = doc self.callback = callback @@ -93,14 +94,15 @@ def __call__(self, **kwargs): def __repr__(self): return '%s(%s.%s, base_dir=%s)' % (self.__class__.__name__, - self.interface.__module__, - self.interface.__name__, - self.base_dir) + self.interface.__module__, + self.interface.__name__, + self.base_dir) ################################################################################ # Memory manager: provide some tracking about what is computed when, to # be able to flush the disk + def read_log(filename, run_dict=None): if run_dict is None: run_dict = dict() @@ -225,7 +227,7 @@ def _log_name(self, dir_name, job_name): # immediately to avoid race conditions in parallel computing: # file appends are atomic open(os.path.join(base_dir, 'log.current'), - 'a').write('%s/%s\n' % (dir_name, job_name)) + 'a').write('%s/%s\n' % (dir_name, job_name)) t = time.localtime() year_dir = os.path.join(base_dir, 'log.%i' % t.tm_year) try: @@ -238,7 +240,7 @@ def _log_name(self, dir_name, job_name): except OSError: "Dir exists" open(os.path.join(month_dir, '%02i.log' % t.tm_mday), - 'a').write('%s/%s\n' % (dir_name, job_name)) + 'a').write('%s/%s\n' % (dir_name, job_name)) def clear_previous_runs(self, warn=True): """ Remove all the cache that where not used in the latest run of @@ -274,7 +276,7 @@ def clear_runs_since(self, day=None, month=None, year=None, warn=True): year = year if year is not None else t.tm_year base_dir = self.base_dir cut_off_file = '%s/log.%i/%02i/%02i.log' % (base_dir, - year, month, day) + year, month, day) logs_to_flush = list() recent_runs = dict() for log_name in glob.glob('%s/log.*/*/*.log' % base_dir): @@ -297,5 +299,5 @@ def _clear_all_but(self, runs, warn=True): def __repr__(self): return '%s(base_dir=%s)' % (self.__class__.__name__, - self.base_dir) + self.base_dir) diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index ec8eaab64d..4a02532273 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -65,7 +65,6 @@ codecs = None - # Question/TODO: Should we have a fallback mode if we can't load portalocker / # we should still be better off than with the standard RotattingFileHandler # class, right? We do some rename checking... that should prevent some file @@ -156,7 +155,7 @@ def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, "Use 'supress_abs_warn=True' to hide this message.") try: BaseRotatingHandler.__init__(self, filename, mode, encoding) - except TypeError: # Due to a different logging release without encoding support (Python 2.4.1 and earlier?) + except TypeError: # Due to a different logging release without encoding support (Python 2.4.1 and earlier?) BaseRotatingHandler.__init__(self, filename, mode) self.encoding = encoding @@ -264,7 +263,7 @@ def doRollover(self): # Attempt to rename logfile to tempname: There is a slight race-condition here, but it seems unavoidable tmpname = None while not tmpname or os.path.exists(tmpname): - tmpname = "%s.rotate.%08d" % (self.baseFilename, randint(0,99999999)) + tmpname = "%s.rotate.%08d" % (self.baseFilename, randint(0, 99999999)) try: # Do a rename test to determine if we can successfully rename the log file os.rename(self.baseFilename, tmpname) @@ -285,7 +284,7 @@ def doRollover(self): sfn = "%s.%d" % (self.baseFilename, i) dfn = "%s.%d" % (self.baseFilename, i + 1) if os.path.exists(sfn): - #print "%s -> %s" % (sfn, dfn) + # print "%s -> %s" % (sfn, dfn) if os.path.exists(dfn): os.remove(dfn) os.rename(sfn, dfn) @@ -293,7 +292,7 @@ def doRollover(self): if os.path.exists(dfn): os.remove(dfn) os.rename(tmpname, dfn) - #print "%s -> %s" % (self.baseFilename, dfn) + # print "%s -> %s" % (self.baseFilename, dfn) self._degrade(False, "Rotation completed") finally: self._openFile(self.mode) @@ -319,7 +318,7 @@ def shouldRollover(self, record): def _shouldRollover(self): if self.maxBytes > 0: # are we rolling over? try: - self.stream.seek(0, 2) #due to non-posix-compliant Windows feature + self.stream.seek(0, 2) # due to non-posix-compliant Windows feature except IOError: return True if self.stream.tell() >= self.maxBytes: diff --git a/nipype/external/portalocker.py b/nipype/external/portalocker.py index 25860c30e1..40b12b3cf3 100644 --- a/nipype/external/portalocker.py +++ b/nipype/external/portalocker.py @@ -63,6 +63,7 @@ import os + class LockException(Exception): # Error codes: LOCK_FAILED = 1 @@ -72,7 +73,7 @@ class LockException(Exception): import win32file import pywintypes LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK - LOCK_SH = 0 # the default + LOCK_SH = 0 # the default LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY # is there any reason not to reuse the following structure? __overlapped = pywintypes.OVERLAPPED() @@ -123,7 +124,6 @@ def unlock(file): fcntl.flock(file.fileno(), fcntl.LOCK_UN) - if __name__ == '__main__': from time import time, strftime, localtime import sys @@ -133,7 +133,7 @@ def unlock(file): portalocker.lock(log, portalocker.LOCK_EX) timestamp = strftime('%m/%d/%Y %H:%M:%S\n', localtime(time())) - log.write( timestamp ) + log.write(timestamp) print('Wrote lines. Hit enter to release lock.') dummy = sys.stdin.readline() diff --git a/nipype/external/provcopy.py b/nipype/external/provcopy.py index cddf96c64f..2a55f3b4ce 100644 --- a/nipype/external/provcopy.py +++ b/nipype/external/provcopy.py @@ -186,6 +186,7 @@ # Datatypes attr2rdf = lambda attr: PROV[PROV_ID_ATTRIBUTES_MAP[attr].split('prov:')[1]].rdf_representation() + def _parse_xsd_dateTime(s): return dateutil.parser.parse(s) @@ -318,7 +319,6 @@ def json_representation(self): # Assuming it is a valid identifier return {'$': str(self._value), 'type': self._datatype.get_uri()} - def rdf_representation(self): if self._langtag: # a langtag can only goes with string @@ -326,6 +326,7 @@ def rdf_representation(self): else: return RDFLiteral(self._value, datatype=self._datatype.get_uri()) + class Identifier(object): def __init__(self, uri): self._uri = str(uri) # Ensure this is a unicode string @@ -354,6 +355,7 @@ def json_representation(self): def rdf_representation(self): return URIRef(self.get_uri()) + class QName(Identifier): def __init__(self, namespace, localpart): self._namespace = namespace @@ -826,8 +828,8 @@ def rdf(self, graph=None): graph = Graph() pred = PROV[PROV_N_MAP[self.get_type()]].rdf_representation() items = [] - subj=None - obj=None + subj = None + obj = None for idx, (attr, value) in enumerate(self._attributes.items()): if idx == 0: subj = value.get_identifier().rdf_representation() @@ -873,6 +875,7 @@ def rdf(self, graph=None): # ## Component 1: Entities and Activities + class ProvEntity(ProvElement): def get_type(self): return PROV_REC_ENTITY @@ -880,6 +883,7 @@ def get_type(self): def get_prov_type(self): return PROV['Entity'] + class ProvActivity(ProvElement): def get_type(self): return PROV_REC_ACTIVITY @@ -1210,13 +1214,13 @@ def add_attributes(self, attributes, extra_attributes): specificEntity = self.required_attribute(attributes, PROV_ATTR_SPECIFIC_ENTITY, (ProvEntity, ProvAgent)) generalEntity = self.required_attribute(attributes, PROV_ATTR_GENERAL_ENTITY, Identifier) bundle = self.required_attribute(attributes, PROV_ATTR_BUNDLE, Identifier) - #======================================================================= + # ======================================================================= # # This is disabled so that mentionOf can refer to bundle that is not defined in the same place # bundle = self.required_attribute(attributes, PROV_ATTR_BUNDLE, ProvBundle) # # Check if generalEntity is in the bundle # if generalEntity.get_bundle() is not bundle: # raise ProvExceptionContraint(PROV_REC_MENTION, generalEntity, bundle, 'The generalEntity must belong to the bundle') - #======================================================================= + # ======================================================================= attributes = OrderedDict() attributes[PROV_ATTR_SPECIFIC_ENTITY] = specificEntity @@ -1246,24 +1250,24 @@ def add_attributes(self, attributes, extra_attributes): # Class mappings from PROV record type PROV_REC_CLS = { - PROV_REC_ENTITY : ProvEntity, - PROV_REC_ACTIVITY : ProvActivity, - PROV_REC_GENERATION : ProvGeneration, - PROV_REC_USAGE : ProvUsage, - PROV_REC_COMMUNICATION : ProvCommunication, - PROV_REC_START : ProvStart, - PROV_REC_END : ProvEnd, - PROV_REC_INVALIDATION : ProvInvalidation, - PROV_REC_DERIVATION : ProvDerivation, - PROV_REC_AGENT : ProvAgent, - PROV_REC_ATTRIBUTION : ProvAttribution, - PROV_REC_ASSOCIATION : ProvAssociation, - PROV_REC_DELEGATION : ProvDelegation, - PROV_REC_INFLUENCE : ProvInfluence, - PROV_REC_SPECIALIZATION : ProvSpecialization, - PROV_REC_ALTERNATE : ProvAlternate, - PROV_REC_MENTION : ProvMention, - PROV_REC_MEMBERSHIP : ProvMembership, + PROV_REC_ENTITY: ProvEntity, + PROV_REC_ACTIVITY: ProvActivity, + PROV_REC_GENERATION: ProvGeneration, + PROV_REC_USAGE: ProvUsage, + PROV_REC_COMMUNICATION: ProvCommunication, + PROV_REC_START: ProvStart, + PROV_REC_END: ProvEnd, + PROV_REC_INVALIDATION: ProvInvalidation, + PROV_REC_DERIVATION: ProvDerivation, + PROV_REC_AGENT: ProvAgent, + PROV_REC_ATTRIBUTION: ProvAttribution, + PROV_REC_ASSOCIATION: ProvAssociation, + PROV_REC_DELEGATION: ProvDelegation, + PROV_REC_INFLUENCE: ProvInfluence, + PROV_REC_SPECIALIZATION: ProvSpecialization, + PROV_REC_ALTERNATE: ProvAlternate, + PROV_REC_MENTION: ProvMention, + PROV_REC_MEMBERSHIP: ProvMembership, } diff --git a/nipype/external/six.py b/nipype/external/six.py index ffa3fe166a..876aed3945 100644 --- a/nipype/external/six.py +++ b/nipype/external/six.py @@ -88,7 +88,7 @@ def __init__(self, name): def __get__(self, obj, tp): result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. + setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. @@ -166,6 +166,7 @@ class _SixMetaPathImporter(object): This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ + def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} @@ -595,6 +596,7 @@ def iterlists(d, **kw): if PY3: def b(s): return s.encode("latin-1") + def u(s): return s unichr = chr @@ -617,12 +619,15 @@ def int2byte(i): def b(s): return s # Workaround for standalone backslash + def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr + def byte2int(bs): return ord(bs[0]) + def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) @@ -650,7 +655,6 @@ def assertRegex(self, *args, **kwargs): if PY3: exec_ = getattr(moves.builtins, "exec") - def reraise(tp, value, tb=None): if value is None: value = tp() @@ -671,7 +675,6 @@ def exec_(_code_, _globs_=None, _locs_=None): _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") - exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) @@ -699,13 +702,14 @@ def print_(*args, **kwargs): fp = kwargs.pop("file", sys.stdout) if fp is None: return + def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): + isinstance(data, unicode) and + fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" @@ -748,6 +752,7 @@ def write(data): write(end) if sys.version_info[:2] < (3, 3): _print = print_ + def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) @@ -768,6 +773,7 @@ def wrapper(f): else: wraps = functools.wraps + def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy @@ -830,7 +836,7 @@ def python_2_unicode_compatible(klass): # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): + importer.name == __name__): del sys.meta_path[i] break del i, importer diff --git a/nipype/fixes/__init__.py b/nipype/fixes/__init__.py index c3cb9c3de3..5038b83ab7 100644 --- a/nipype/fixes/__init__.py +++ b/nipype/fixes/__init__.py @@ -6,6 +6,7 @@ # Cache for the actual testing functin _tester = None + def test(*args, **kwargs): """ test function for fixes subpackage diff --git a/nipype/fixes/numpy/testing/noseclasses.py b/nipype/fixes/numpy/testing/noseclasses.py index a7b4c6bad0..2a1c6900c9 100644 --- a/nipype/fixes/numpy/testing/noseclasses.py +++ b/nipype/fixes/numpy/testing/noseclasses.py @@ -22,9 +22,11 @@ # Some of the classes in this module begin with 'Numpy' to clearly distinguish # them from the plethora of very similar names from nose/unittest/doctest -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Modified version of the one in the stdlib, that fixes a python bug (doctests # not found in extension modules, http://bugs.python.org/issue3158) + + class NumpyDocTestFinder(doctest.DocTestFinder): def _from_module(self, module, object): @@ -33,34 +35,34 @@ def _from_module(self, module, object): module. """ if module is None: - #print '_fm C1' # dbg + # print '_fm C1' # dbg return True elif inspect.isfunction(object): - #print '_fm C2' # dbg + # print '_fm C2' # dbg return module.__dict__ is object.__globals__ elif inspect.isbuiltin(object): - #print '_fm C2-1' # dbg + # print '_fm C2-1' # dbg return module.__name__ == object.__module__ elif inspect.isclass(object): - #print '_fm C3' # dbg + # print '_fm C3' # dbg return module.__name__ == object.__module__ elif inspect.ismethod(object): # This one may be a bug in cython that fails to correctly set the # __module__ attribute of methods, but since the same error is easy # to make by extension code writers, having this safety in place # isn't such a bad idea - #print '_fm C3-1' # dbg + # print '_fm C3-1' # dbg return module.__name__ == object.__self__.__class__.__module__ elif inspect.getmodule(object) is not None: - #print '_fm C4' # dbg - #print 'C4 mod',module,'obj',object # dbg + # print '_fm C4' # dbg + # print 'C4 mod',module,'obj',object # dbg return module is inspect.getmodule(object) elif hasattr(object, '__module__'): - #print '_fm C5' # dbg + # print '_fm C5' # dbg return module.__name__ == object.__module__ elif isinstance(object, property): - #print '_fm C6' # dbg - return True # [XX] no way not be sure. + # print '_fm C6' # dbg + return True # [XX] no way not be sure. else: raise ValueError("object must be a class or function") @@ -70,7 +72,7 @@ def _find(self, tests, obj, name, module, source_lines, globs, seen): add them to `tests`. """ - doctest.DocTestFinder._find(self,tests, obj, name, module, + doctest.DocTestFinder._find(self, tests, obj, name, module, source_lines, globs, seen) # Below we re-run pieces of the above method with manual modifications, @@ -79,25 +81,24 @@ def _find(self, tests, obj, name, module, source_lines, globs, seen): # Local shorthands from inspect import isroutine, isclass, ismodule, isfunction, \ - ismethod + ismethod # Look for tests in a module's contained objects. if ismodule(obj) and self._recurse: for valname, val in list(obj.__dict__.items()): valname1 = '%s.%s' % (name, valname) - if ( (isroutine(val) or isclass(val)) - and self._from_module(module, val) ): + if ((isroutine(val) or isclass(val)) + and self._from_module(module, val)): self._find(tests, val, valname1, module, source_lines, globs, seen) - # Look for tests in a class's contained objects. if isclass(obj) and self._recurse: - #print 'RECURSE into class:',obj # dbg + # print 'RECURSE into class:',obj # dbg for valname, val in list(obj.__dict__.items()): - #valname1 = '%s.%s' % (name, valname) # dbg - #print 'N',name,'VN:',valname,'val:',str(val)[:77] # dbg + # valname1 = '%s.%s' % (name, valname) # dbg + # print 'N',name,'VN:',valname,'val:',str(val)[:77] # dbg # Special handling for staticmethod/classmethod. if isinstance(val, staticmethod): val = getattr(obj, valname) @@ -106,8 +107,8 @@ def _find(self, tests, obj, name, module, source_lines, globs, seen): # Recurse to methods, properties, and nested classes. if ((isfunction(val) or isclass(val) or - ismethod(val) or isinstance(val, property)) and - self._from_module(module, val)): + ismethod(val) or isinstance(val, property)) and + self._from_module(module, val)): valname = '%s.%s' % (name, valname) self._find(tests, val, valname, module, source_lines, globs, seen) @@ -128,16 +129,16 @@ def check_output(self, want, got, optionflags): # bigendian machines don't fail all the tests (and there are # actually some bigendian examples in the doctests). Let's try # making them all little endian - got = got.replace("'>","'<") - want= want.replace("'>","'<") + got = got.replace("'>", "'<") + want = want.replace("'>", "'<") # try to normalize out 32 and 64 bit default int sizes - for sz in [4,8]: - got = got.replace("'=%s" % NIBABEL_MIN_VERSION, - "networkx>=%s" % NETWORKX_MIN_VERSION, - "numpy>=%s" % NUMPY_MIN_VERSION, - "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, - "scipy>=%s" % SCIPY_MIN_VERSION, - "traits>=%s" % TRAITS_MIN_VERSION, - "nose>=%s" % NOSE_MIN_VERSION, - "future>=%s" % FUTURE_MIN_VERSION, - "simplejson>=%s" % SIMPLEJSON_MIN_VERSION] -STATUS = 'stable' +NAME = 'nipype' +MAINTAINER = "nipype developers" +MAINTAINER_EMAIL = "neuroimaging@python.org" +DESCRIPTION = description +LONG_DESCRIPTION = long_description +URL = "http://nipy.org/nipype" +DOWNLOAD_URL = "http://github.com/nipy/nipype/archives/master" +LICENSE = "BSD license" +CLASSIFIERS = CLASSIFIERS +AUTHOR = "nipype developers" +AUTHOR_EMAIL = "neuroimaging@python.org" +PLATFORMS = "OS Independent" +MAJOR = _version_major +MINOR = _version_minor +MICRO = _version_micro +ISRELEASE = _version_extra == '' +VERSION = __version__ +PROVIDES = ['nipype'] +REQUIRES = ["nibabel>=%s" % NIBABEL_MIN_VERSION, + "networkx>=%s" % NETWORKX_MIN_VERSION, + "numpy>=%s" % NUMPY_MIN_VERSION, + "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, + "scipy>=%s" % SCIPY_MIN_VERSION, + "traits>=%s" % TRAITS_MIN_VERSION, + "nose>=%s" % NOSE_MIN_VERSION, + "future>=%s" % FUTURE_MIN_VERSION, + "simplejson>=%s" % SIMPLEJSON_MIN_VERSION] +STATUS = 'stable' diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index c702c8cc78..da77dc7728 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -74,7 +74,7 @@ def outputtype(cls): ------- None """ - #warn(('AFNI has no environment variable that sets filetype ' + # warn(('AFNI has no environment variable that sets filetype ' # 'Nipype uses NIFTI_GZ as default')) return 'AFNI' @@ -100,6 +100,7 @@ class AFNICommandInputSpec(CommandLineInputSpec): argstr='-prefix %s', name_source=["in_file"]) + class AFNICommandOutputSpec(TraitedSpec): out_file = File(desc='output file', exists=True) @@ -110,7 +111,6 @@ class AFNICommand(CommandLine): input_spec = AFNICommandInputSpec _outputtype = None - def __init__(self, **inputs): super(AFNICommand, self).__init__(**inputs) self.inputs.on_trait_change(self._output_update, 'outputtype') @@ -156,7 +156,7 @@ def _list_outputs(self): if out_names: for name in out_names: if outputs[name]: - _,_,ext = split_filename(outputs[name]) + _, _, ext = split_filename(outputs[name]) if ext == "": outputs[name] = outputs[name] + "+orig.BRIK" return outputs diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 40a6f40bc4..07877db098 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -109,7 +109,7 @@ class TShiftInputSpec(AFNICommandInputSpec): ' default = Fourier', argstr='-%s') tpattern = traits.Str(desc='use specified slice time pattern rather than one in header', - argstr='-tpattern %s') + argstr='-tpattern %s') rlt = traits.Bool(desc='Before shifting, remove the mean and linear trend', argstr="-rlt") @@ -179,7 +179,7 @@ class RefitInputSpec(CommandLineInputSpec): space = traits.Enum('TLRC', 'MNI', 'ORIG', argstr='-space %s', desc='Associates the dataset with a specific' + - ' template type, e.g. TLRC, MNI, ORIG') + ' template type, e.g. TLRC, MNI, ORIG') class Refit(CommandLine): @@ -354,9 +354,9 @@ class AutoTcorrelateInputSpec(AFNICommandInputSpec): argstr="-mask_only_targets", xor=['mask_source']) mask_source = File(exists=True, - desc="mask for source voxels", - argstr="-mask_source %s", - xor=['mask_only_targets']) + desc="mask for source voxels", + argstr="-mask_source %s", + xor=['mask_only_targets']) out_file = File(name_template="%s_similarity_matrix.1D", desc='output image file name', argstr='-prefix %s', name_source="in_file") @@ -587,8 +587,8 @@ class VolregInputSpec(AFNICommandInputSpec): argstr='-zpad %d', position=-5) md1d_file = File(name_template='%s_md.1D', desc='max displacement output file', - argstr='-maxdisp1D %s', name_source="in_file", - keep_extension=True, position=-4) + argstr='-maxdisp1D %s', name_source="in_file", + keep_extension=True, position=-4) oned_file = File(name_template='%s.1D', desc='1D movement parameters output file', argstr='-1Dfile %s', name_source="in_file", @@ -1147,7 +1147,7 @@ def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): outputs['out_file'] = self._gen_filename(self.inputs.in_file, - suffix=self.inputs.suffix) + suffix=self.inputs.suffix) else: outputs['out_file'] = os.path.abspath(self.inputs.out_file) return outputs @@ -1369,48 +1369,47 @@ class TCorrelate(AFNICommand): class TCorr1DInputSpec(AFNICommandInputSpec): - xset = File(desc = '3d+time dataset input', - argstr = ' %s', - position = -2, - mandatory = True, - exists = True, - copyfile=False) - y_1d = File(desc = '1D time series file input', - argstr = ' %s', - position = -1, - mandatory = True, - exists = True) - out_file = File(desc = 'output filename prefix', - name_template='%s_correlation.nii.gz', - argstr = '-prefix %s', - name_source = 'xset', - keep_extension = True) + xset = File(desc='3d+time dataset input', + argstr=' %s', + position=-2, + mandatory=True, + exists=True, + copyfile=False) + y_1d = File(desc='1D time series file input', + argstr=' %s', + position=-1, + mandatory=True, + exists=True) + out_file = File(desc='output filename prefix', + name_template='%s_correlation.nii.gz', + argstr='-prefix %s', + name_source='xset', + keep_extension=True) pearson = traits.Bool(desc='Correlation is the normal' + - ' Pearson correlation coefficient', - argstr=' -pearson', - xor=['spearman','quadrant','ktaub'], - position=1) + ' Pearson correlation coefficient', + argstr=' -pearson', + xor=['spearman', 'quadrant', 'ktaub'], + position=1) spearman = traits.Bool(desc='Correlation is the' + - ' Spearman (rank) correlation coefficient', - argstr=' -spearman', - xor=['pearson','quadrant','ktaub'], - position=1) + ' Spearman (rank) correlation coefficient', + argstr=' -spearman', + xor=['pearson', 'quadrant', 'ktaub'], + position=1) quadrant = traits.Bool(desc='Correlation is the' + - ' quadrant correlation coefficient', - argstr=' -quadrant', - xor=['pearson','spearman','ktaub'], - position=1) + ' quadrant correlation coefficient', + argstr=' -quadrant', + xor=['pearson', 'spearman', 'ktaub'], + position=1) ktaub = traits.Bool(desc='Correlation is the' + - ' Kendall\'s tau_b correlation coefficient', - argstr=' -ktaub', - xor=['pearson','spearman','quadrant'], - position=1) - + ' Kendall\'s tau_b correlation coefficient', + argstr=' -ktaub', + xor=['pearson', 'spearman', 'quadrant'], + position=1) class TCorr1DOutputSpec(TraitedSpec): - out_file = File(desc = 'output file containing correlations', - exists = True) + out_file = File(desc='output file containing correlations', + exists=True) class TCorr1D(AFNICommand): @@ -1534,7 +1533,7 @@ class ROIStatsInputSpec(CommandLineInputSpec): class ROIStatsOutputSpec(TraitedSpec): - stats = File(desc='output tab separated values file', exists=True) + stats = File(desc='output tab separated values file', exists=True) class ROIStats(CommandLine): @@ -1794,6 +1793,7 @@ def _format_arg(self, name, trait_spec, value): else: return super(TCorrMap, self)._format_arg(name, trait_spec, value) + class AutoboxInputSpec(AFNICommandInputSpec): in_file = File(exists=True, mandatory=True, argstr='-input %s', desc='input file', copyfile=False) @@ -1860,6 +1860,7 @@ def _gen_filename(self, name): return Undefined return super(Autobox, self)._gen_filename(name) + class RetroicorInputSpec(AFNICommandInputSpec): in_file = File(desc='input file to 3dretroicor', argstr='%s', @@ -1931,15 +1932,16 @@ class Retroicor(AFNICommand): class AFNItoNIFTIInputSpec(AFNICommandInputSpec): in_file = File(desc='input file to 3dAFNItoNIFTI', - argstr='%s', - position=-1, - mandatory=True, - exists=True, - copyfile=False) + argstr='%s', + position=-1, + mandatory=True, + exists=True, + copyfile=False) out_file = File(name_template="%s.nii", desc='output image file name', argstr='-prefix %s', name_source="in_file") hash_files = False + class AFNItoNIFTI(AFNICommand): """Changes AFNI format files to NIFTI format using 3dAFNItoNIFTI @@ -1971,6 +1973,7 @@ def _overload_extension(self, value): def _gen_filename(self, name): return os.path.abspath(super(AFNItoNIFTI, self)._gen_filename(name)) + class EvalInputSpec(AFNICommandInputSpec): in_file_a = File(desc='input file to 1deval', argstr='-a %s', position=0, mandatory=True, exists=True) @@ -1981,7 +1984,7 @@ class EvalInputSpec(AFNICommandInputSpec): out_file = File(name_template="%s_calc", desc='output image file name', argstr='-prefix %s', name_source="in_file_a") out1D = traits.Bool(desc="output in 1D", - argstr='-1D') + argstr='-1D') expr = traits.Str(desc='expr', argstr='-expr "%s"', position=3, mandatory=True) start_idx = traits.Int(desc='start index for in_file_a', @@ -1991,6 +1994,7 @@ class EvalInputSpec(AFNICommandInputSpec): single_idx = traits.Int(desc='volume index for in_file_a') other = File(desc='other options', argstr='') + class Eval(AFNICommand): """Evaluates an expression that may include columns of data from one or more text files @@ -2032,16 +2036,17 @@ def _parse_inputs(self, skip=None): return super(Eval, self)._parse_inputs( skip=('start_idx', 'stop_idx', 'out1D', 'other')) + class MeansInputSpec(AFNICommandInputSpec): in_file_a = File(desc='input file to 3dMean', - argstr='%s', - position=0, - mandatory=True, - exists=True) + argstr='%s', + position=0, + mandatory=True, + exists=True) in_file_b = File(desc='another input file to 3dMean', - argstr='%s', - position=1, - exists=True) + argstr='%s', + position=1, + exists=True) out_file = File(name_template="%s_mean", desc='output image file name', argstr='-prefix %s', name_source="in_file_a") scale = traits.Str(desc='scaling of output', argstr='-%sscale') @@ -2053,6 +2058,7 @@ class MeansInputSpec(AFNICommandInputSpec): mask_inter = traits.Bool(desc='create intersection mask', argstr='-mask_inter') mask_union = traits.Bool(desc='create union mask', argstr='-mask_union') + class Means(AFNICommand): """Takes the voxel-by-voxel mean of all input datasets using 3dMean diff --git a/nipype/interfaces/afni/svm.py b/nipype/interfaces/afni/svm.py index 492ebc10a9..c2bb335d32 100644 --- a/nipype/interfaces/afni/svm.py +++ b/nipype/interfaces/afni/svm.py @@ -24,57 +24,60 @@ warn = warnings.warn + class SVMTrainInputSpec(AFNICommandInputSpec): - #training options + # training options ttype = traits.Str(desc='tname: classification or regression', - argstr='-type %s', - mandatory=True) + argstr='-type %s', + mandatory=True) in_file = File(desc='A 3D+t AFNI brik dataset to be used for training.', - argstr='-trainvol %s', - mandatory=True, - exists=True, - copyfile=False) + argstr='-trainvol %s', + mandatory=True, + exists=True, + copyfile=False) out_file = File(name_template="%s_vectors", - desc='output sum of weighted linear support vectors file name', - argstr='-bucket %s', - suffix='_bucket', - name_source="in_file") + desc='output sum of weighted linear support vectors file name', + argstr='-bucket %s', + suffix='_bucket', + name_source="in_file") model = File(name_template="%s_model", - desc='basename for the brik containing the SVM model', - argstr='-model %s', - suffix='_model', - name_source="in_file") + desc='basename for the brik containing the SVM model', + argstr='-model %s', + suffix='_model', + name_source="in_file") alphas = File(name_template="%s_alphas", - desc='output alphas file name', - argstr='-alpha %s', - suffix='_alphas', - name_source="in_file") + desc='output alphas file name', + argstr='-alpha %s', + suffix='_alphas', + name_source="in_file") mask = File(desc='byte-format brik file used to mask voxels in the analysis', argstr='-mask %s', position=-1, exists=True, copyfile=False) nomodelmask = traits.Bool(desc='Flag to enable the omission of a mask file', - argstr='-nomodelmask') + argstr='-nomodelmask') trainlabels = File(desc='.1D labels corresponding to the stimulus paradigm for the training data.', - argstr='-trainlabels %s', - exists=True) + argstr='-trainlabels %s', + exists=True) censor = File(desc='.1D censor file that allows the user to ignore certain samples in the training data.', - argstr='-censor %s', - exists=True) + argstr='-censor %s', + exists=True) kernel = traits.Str(desc='string specifying type of kernel function:linear, polynomial, rbf, sigmoid', - argstr='-kernel %s') + argstr='-kernel %s') max_iterations = traits.Int(desc='Specify the maximum number of iterations for the optimization.', - argstr='-max_iterations %d') + argstr='-max_iterations %d') w_out = traits.Bool(desc='output sum of weighted linear support vectors', - argstr='-wout') + argstr='-wout') options = traits.Str(desc='additional options for SVM-light', argstr='%s') + class SVMTrainOutputSpec(TraitedSpec): out_file = File(desc='sum of weighted linear support vectors file name') model = File(desc='brik containing the SVM model file name') alphas = File(desc='output alphas file name') + class SVMTrain(AFNICommand): """Temporally predictive modeling with the support vector machine SVM Train Only @@ -104,31 +107,33 @@ class SVMTrain(AFNICommand): def _format_arg(self, name, trait_spec, value): return super(SVMTrain, self)._format_arg(name, trait_spec, value) + class SVMTestInputSpec(AFNICommandInputSpec): - #testing options + # testing options model = traits.Str(desc='modname is the basename for the brik containing the SVM model', - argstr='-model %s', - mandatory=True) + argstr='-model %s', + mandatory=True) in_file = File(desc='A 3D or 3D+t AFNI brik dataset to be used for testing.', - argstr='-testvol %s', - exists=True, - mandatory=True) + argstr='-testvol %s', + exists=True, + mandatory=True) out_file = File(name_template="%s_predictions", - desc='filename for .1D prediction file(s).', - argstr='-predictions %s') + desc='filename for .1D prediction file(s).', + argstr='-predictions %s') testlabels = File(desc='*true* class category .1D labels for the test dataset. It is used to calculate the prediction accuracy performance', - exists=True, - argstr='-testlabels %s') + exists=True, + argstr='-testlabels %s') classout = traits.Bool(desc='Flag to specify that pname files should be integer-valued, corresponding to class category decisions.', - argstr='-classout') + argstr='-classout') nopredcensord = traits.Bool(desc='Flag to prevent writing predicted values for censored time-points', - argstr='-nopredcensord') + argstr='-nopredcensord') nodetrend = traits.Bool(desc='Flag to specify that pname files should not be linearly detrended', - argstr='-nodetrend') + argstr='-nodetrend') multiclass = traits.Bool(desc='Specifies multiclass algorithm for classification', - argstr='-multiclass %s') + argstr='-multiclass %s') options = traits.Str(desc='additional options for SVM-light', argstr='%s') + class SVMTest(AFNICommand): """Temporally predictive modeling with the support vector machine SVM Test Only diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index 8638b7574c..4b02e838af 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -4,7 +4,7 @@ # Local imports from ..base import (CommandLine, CommandLineInputSpec, traits, -isdefined) + isdefined) from ... import logging logger = logging.getLogger('interface') @@ -15,14 +15,15 @@ # the computer (when running MultiProc) by forcing everything to # single threaded. This can be a severe penalty for registration # performance. -LOCAL_DEFAULT_NUMBER_OF_THREADS=1 +LOCAL_DEFAULT_NUMBER_OF_THREADS = 1 # -Using NSLOTS has the same behavior as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS # as long as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS is not set. Otherwise # ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precidence. # This behavior states that you the user explicitly specifies # num_threads, then respect that no matter what SGE tries to limit. -PREFERED_ITKv4_THREAD_LIMIT_VARIABLE='NSLOTS' -ALT_ITKv4_THREAD_LIMIT_VARIABLE='ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS' +PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = 'NSLOTS' +ALT_ITKv4_THREAD_LIMIT_VARIABLE = 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS' + class ANTSCommandInputSpec(CommandLineInputSpec): """Base Input Specification for all ANTS Commands @@ -50,23 +51,23 @@ def __init__(self, **inputs): def _num_threads_update(self): self._num_threads = self.inputs.num_threads - ## ONLY SET THE ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS if requested - ## by the end user. The default setting did not allow for - ## overwriting the default values. - ## In ITKv4 (the version used for all ANTS programs), ITK respects - ## the SGE controlled $NSLOTS environmental variable. - ## If user specifies -1, then that indicates that the system - ## default behavior should be the one specified by ITKv4 rules - ## (i.e. respect SGE $NSLOTS or environmental variables of threads, or - ## user environmental settings) - if ( self.inputs.num_threads == -1 ): - if ( ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ ): + # ONLY SET THE ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS if requested + # by the end user. The default setting did not allow for + # overwriting the default values. + # In ITKv4 (the version used for all ANTS programs), ITK respects + # the SGE controlled $NSLOTS environmental variable. + # If user specifies -1, then that indicates that the system + # default behavior should be the one specified by ITKv4 rules + # (i.e. respect SGE $NSLOTS or environmental variables of threads, or + # user environmental settings) + if (self.inputs.num_threads == -1): + if (ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ): del self.inputs.environ[ALT_ITKv4_THREAD_LIMIT_VARIABLE] - if ( PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ ): + if (PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ): del self.inputs.environ[PREFERED_ITKv4_THREAD_LIMIT_VARIABLE] else: self.inputs.environ.update({PREFERED_ITKv4_THREAD_LIMIT_VARIABLE: - '%s' % self.inputs.num_threads}) + '%s' % self.inputs.num_threads}) @staticmethod def _format_xarray(val): diff --git a/nipype/interfaces/ants/legacy.py b/nipype/interfaces/ants/legacy.py index 84cedf5f20..3482f91fe5 100644 --- a/nipype/interfaces/ants/legacy.py +++ b/nipype/interfaces/ants/legacy.py @@ -1,7 +1,7 @@ -## NOTE: This implementation has been superceeded buy the antsApplyTransform -## implmeentation that more closely follows the strucutre and capabilities -## of the antsApplyTransform program. This implementation is here -## for backwards compatibility. +# NOTE: This implementation has been superceeded buy the antsApplyTransform +# implmeentation that more closely follows the strucutre and capabilities +# of the antsApplyTransform program. This implementation is here +# for backwards compatibility. """ANTS Apply Transforms interface Change directory to provide relative paths for doctests @@ -25,7 +25,7 @@ class antsIntroductionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum(3, 2, argstr='-d %d', usedefault=True, - desc='image dimension (2 or 3)', position=1) + desc='image dimension (2 or 3)', position=1) reference_image = File(exists=True, argstr='-r %s', desc='template file to warp to', mandatory=True, copyfile=True) @@ -33,45 +33,45 @@ class antsIntroductionInputSpec(ANTSCommandInputSpec): argstr='-i %s', desc='input image to warp to template', mandatory=True, copyfile=False) force_proceed = traits.Bool(argstr='-f 1', - desc=('force script to proceed even if headers ' - 'may be incompatible')) + desc=('force script to proceed even if headers ' + 'may be incompatible')) inverse_warp_template_labels = traits.Bool(argstr='-l', - desc=('Applies inverse warp to the template labels ' - 'to estimate label positions in target space (use ' - 'for template-based segmentation)')) + desc=('Applies inverse warp to the template labels ' + 'to estimate label positions in target space (use ' + 'for template-based segmentation)')) max_iterations = traits.List(traits.Int, argstr='-m %s', sep='x', - desc=('maximum number of iterations (must be ' - 'list of integers in the form [J,K,L...]: ' - 'J = coarsest resolution iterations, K = ' - 'middle resolution interations, L = fine ' - 'resolution iterations')) + desc=('maximum number of iterations (must be ' + 'list of integers in the form [J,K,L...]: ' + 'J = coarsest resolution iterations, K = ' + 'middle resolution interations, L = fine ' + 'resolution iterations')) bias_field_correction = traits.Bool(argstr='-n 1', - desc=('Applies bias field correction to moving ' - 'image')) + desc=('Applies bias field correction to moving ' + 'image')) similarity_metric = traits.Enum('PR', 'CC', 'MI', 'MSQ', argstr='-s %s', - desc=('Type of similartiy metric used for registration ' - '(CC = cross correlation, MI = mutual information, ' - 'PR = probability mapping, MSQ = mean square difference)')) + desc=('Type of similartiy metric used for registration ' + '(CC = cross correlation, MI = mutual information, ' + 'PR = probability mapping, MSQ = mean square difference)')) transformation_model = traits.Enum('GR', 'EL', 'SY', 'S2', 'EX', 'DD', 'RI', 'RA', argstr='-t %s', usedefault=True, - desc=('Type of transofmration model used for registration ' - '(EL = elastic transformation model, SY = SyN with time, ' - 'arbitrary number of time points, S2 = SyN with time ' - 'optimized for 2 time points, GR = greedy SyN, EX = ' - 'exponential, DD = diffeomorphic demons style exponential ' - 'mapping, RI = purely rigid, RA = affine rigid')) + desc=('Type of transofmration model used for registration ' + '(EL = elastic transformation model, SY = SyN with time, ' + 'arbitrary number of time points, S2 = SyN with time ' + 'optimized for 2 time points, GR = greedy SyN, EX = ' + 'exponential, DD = diffeomorphic demons style exponential ' + 'mapping, RI = purely rigid, RA = affine rigid')) out_prefix = traits.Str('ants_', argstr='-o %s', usedefault=True, - desc=('Prefix that is prepended to all output ' - 'files (default = ants_)')) + desc=('Prefix that is prepended to all output ' + 'files (default = ants_)')) quality_check = traits.Bool(argstr='-q 1', - desc='Perform a quality check of the result') + desc='Perform a quality check of the result') class antsIntroductionOutputSpec(TraitedSpec): affine_transformation = File(exists=True, desc='affine (prefix_Affine.txt)') warp_field = File(exists=True, desc='warp field (prefix_Warp.nii)') inverse_warp_field = File(exists=True, - desc='inverse warp field (prefix_InverseWarp.nii)') + desc='inverse warp field (prefix_InverseWarp.nii)') input_file = File(exists=True, desc='input image (prefix_repaired.nii)') output_file = File(exists=True, desc='output image (prefix_deformed.nii)') @@ -104,11 +104,11 @@ def _list_outputs(self): # The default transformation is GR, which outputs the wrap fields if not isdefined(transmodel) or (isdefined(transmodel) and transmodel not in ['RI', 'RA']): outputs['warp_field'] = os.path.join(os.getcwd(), - self.inputs.out_prefix + - 'Warp.nii.gz') + self.inputs.out_prefix + + 'Warp.nii.gz') outputs['inverse_warp_field'] = os.path.join(os.getcwd(), - self.inputs.out_prefix + - 'InverseWarp.nii.gz') + self.inputs.out_prefix + + 'InverseWarp.nii.gz') outputs['affine_transformation'] = os.path.join(os.getcwd(), self.inputs.out_prefix + @@ -122,23 +122,26 @@ def _list_outputs(self): return outputs -## How do we make a pass through so that GenWarpFields is just an alias for antsIntroduction ? +# How do we make a pass through so that GenWarpFields is just an alias for antsIntroduction ? + + class GenWarpFields(antsIntroduction): pass + class buildtemplateparallelInputSpec(ANTSCommandInputSpec): dimension = traits.Enum(3, 2, argstr='-d %d', usedefault=True, - desc='image dimension (2 or 3)', position=1) + desc='image dimension (2 or 3)', position=1) out_prefix = traits.Str('antsTMPL_', argstr='-o %s', usedefault=True, - desc=('Prefix that is prepended to all output ' - 'files (default = antsTMPL_)')) + desc=('Prefix that is prepended to all output ' + 'files (default = antsTMPL_)')) in_files = traits.List(File(exists=True), mandatory=True, - desc='list of images to generate template from', - argstr='%s', position=-1) + desc='list of images to generate template from', + argstr='%s', position=-1) parallelization = traits.Enum(0, 1, 2, argstr='-c %d', usedefault=True, - desc=('control for parallel processing (0 = ' - 'serial, 1 = use PBS, 2 = use PEXEC, 3 = ' - 'use Apple XGrid')) + desc=('control for parallel processing (0 = ' + 'serial, 1 = use PBS, 2 = use PEXEC, 3 = ' + 'use Apple XGrid')) gradient_step_size = traits.Float(argstr='-g %f', desc=('smaller magnitude results in ' 'more cautious steps (default = ' @@ -149,29 +152,29 @@ class buildtemplateparallelInputSpec(ANTSCommandInputSpec): desc=('Requires parallelization = 2 (PEXEC). ' 'Sets number of cpu cores to use')) max_iterations = traits.List(traits.Int, argstr='-m %s', sep='x', - desc=('maximum number of iterations (must be ' - 'list of integers in the form [J,K,L...]: ' - 'J = coarsest resolution iterations, K = ' - 'middle resolution interations, L = fine ' - 'resolution iterations')) + desc=('maximum number of iterations (must be ' + 'list of integers in the form [J,K,L...]: ' + 'J = coarsest resolution iterations, K = ' + 'middle resolution interations, L = fine ' + 'resolution iterations')) bias_field_correction = traits.Bool(argstr='-n 1', - desc=('Applies bias field correction to moving ' - 'image')) + desc=('Applies bias field correction to moving ' + 'image')) rigid_body_registration = traits.Bool(argstr='-r 1', - desc=('registers inputs before creating template ' - '(useful if no initial template available)')) + desc=('registers inputs before creating template ' + '(useful if no initial template available)')) similarity_metric = traits.Enum('PR', 'CC', 'MI', 'MSQ', argstr='-s %s', - desc=('Type of similartiy metric used for registration ' - '(CC = cross correlation, MI = mutual information, ' - 'PR = probability mapping, MSQ = mean square difference)')) + desc=('Type of similartiy metric used for registration ' + '(CC = cross correlation, MI = mutual information, ' + 'PR = probability mapping, MSQ = mean square difference)')) transformation_model = traits.Enum('GR', 'EL', 'SY', 'S2', 'EX', 'DD', argstr='-t %s', usedefault=True, - desc=('Type of transofmration model used for registration ' - '(EL = elastic transformation model, SY = SyN with time, ' - 'arbitrary number of time points, S2 = SyN with time ' - 'optimized for 2 time points, GR = greedy SyN, EX = ' - 'exponential, DD = diffeomorphic demons style exponential ' - 'mapping')) + desc=('Type of transofmration model used for registration ' + '(EL = elastic transformation model, SY = SyN with time, ' + 'arbitrary number of time points, S2 = SyN with time ' + 'optimized for 2 time points, GR = greedy SyN, EX = ' + 'exponential, DD = diffeomorphic demons style exponential ' + 'mapping')) use_first_as_target = traits.Bool(desc=('uses first volume as target of ' 'all inputs. When not used, an ' 'unbiased average image is used ' @@ -181,11 +184,11 @@ class buildtemplateparallelInputSpec(ANTSCommandInputSpec): class buildtemplateparallelOutputSpec(TraitedSpec): final_template_file = File(exists=True, desc='final ANTS template') template_files = OutputMultiPath(File(exists=True), - desc='Templates from different stages of iteration') + desc='Templates from different stages of iteration') subject_outfiles = OutputMultiPath(File(exists=True), - desc=('Outputs for each input image. Includes warp ' - 'field, inverse warp, Affine, original image ' - '(repaired) and warped image (deformed)')) + desc=('Outputs for each input image. Includes warp ' + 'field, inverse warp, Affine, original image ' + '(repaired) and warped image (deformed)')) class buildtemplateparallel(ANTSCommand): @@ -247,8 +250,8 @@ def _list_outputs(self): outputs['template_files'].append(os.path.realpath(file_)) outputs['final_template_file'] = \ - os.path.realpath('%stemplate.nii.gz' % - self.inputs.out_prefix) + os.path.realpath('%stemplate.nii.gz' % + self.inputs.out_prefix) outputs['subject_outfiles'] = [] for filename in self.inputs.in_files: _, base, _ = split_filename(filename) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 28a4bb5df3..c4d6d6636b 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -48,7 +48,7 @@ class ANTSInputSpec(ANTSCommandInputSpec): # # Cost = Sum_i ( metricweight[i] Metric_i ( fixedimage[i], movingimage[i]) ) metric = traits.List(traits.Enum('CC', 'MI', 'SMI', 'PR', 'SSD', - 'MSQ', 'PSE'), mandatory=True, desc='') + 'MSQ', 'PSE'), mandatory=True, desc='') metric_weight = traits.List(traits.Float(), requires=['metric'], desc='') radius = traits.List(traits.Int(), requires=['metric'], desc='') @@ -305,11 +305,11 @@ class RegistrationInputSpec(ANTSCommandInputSpec): argstr='--initialize-transforms-per-stage %d', default=False, usedefault=True, # This should be true for explicit completeness desc=('Initialize linear transforms from the previous stage. By enabling this option, ' - 'the current linear stage transform is directly intialized from the previous ' - 'stages linear transform; this allows multiple linear stages to be run where ' - 'each stage directly updates the estimated linear transform from the previous ' - 'stage. (e.g. Translation -> Rigid -> Affine). ' - )) + 'the current linear stage transform is directly intialized from the previous ' + 'stages linear transform; this allows multiple linear stages to be run where ' + 'each stage directly updates the estimated linear transform from the previous ' + 'stage. (e.g. Translation -> Rigid -> Affine). ' + )) # NOTE: Even though only 0=False and 1=True are allowed, ants uses integer # values instead of booleans float = traits.Bool( @@ -376,6 +376,7 @@ class RegistrationOutputSpec(TraitedSpec): inverse_warped_image = File(desc="Outputs the inverse of the warped image") save_state = File(desc="The saved registration state to be restored") + class Registration(ANTSCommand): """ @@ -553,7 +554,7 @@ def _formatMetric(self, index): else: temp["moving_image"] = self.inputs.moving_image[i] - specs.append( temp ) + specs.append(temp) else: specs = [stage_inputs] @@ -605,9 +606,9 @@ def _formatRegistration(self): retval.append('--convergence %s' % self._formatConvergence(ii)) if isdefined(self.inputs.sigma_units): retval.append('--smoothing-sigmas %s%s' % - (self._antsJoinList(self.inputs.smoothing_sigmas[ + (self._antsJoinList(self.inputs.smoothing_sigmas[ ii]), - self.inputs.sigma_units[ii])) + self.inputs.sigma_units[ii])) else: retval.append('--smoothing-sigmas %s' % self._antsJoinList(self.inputs.smoothing_sigmas[ii])) @@ -669,7 +670,6 @@ def _formatWinsorizeImageIntensities(self): self._quantilesDone = True return '--winsorize-image-intensities [ %s, %s ]' % (self.inputs.winsorize_lower_quantile, self.inputs.winsorize_upper_quantile) - def _format_arg(self, opt, spec, val): if opt == 'fixed_image_mask': if isdefined(self.inputs.moving_image_mask): diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index df3469bd34..9473740a30 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -100,7 +100,7 @@ def _list_outputs(self): return outputs def _run_interface(self, runtime): - runtime = super(WarpTimeSeriesImageMultiTransform, self)._run_interface(runtime, correct_return_codes = [0,1]) + runtime = super(WarpTimeSeriesImageMultiTransform, self)._run_interface(runtime, correct_return_codes=[0, 1]) if "100 % complete" not in runtime.stdout: self.raise_exception(runtime) return runtime @@ -113,7 +113,7 @@ class WarpImageMultiTransformInputSpec(ANTSCommandInputSpec): desc=('image to apply transformation to (generally a ' 'coregistered functional)'), position=2) output_image = File(genfile=True, hash_files=False, argstr='%s', - desc=('name of the output warped image'), position = 3, xor=['out_postfix']) + desc=('name of the output warped image'), position=3, xor=['out_postfix']) out_postfix = File("_wimt", usedefault=True, hash_files=False, desc=('Postfix that is prepended to all output ' 'files (default = _wimt)'), xor=['output_image']) @@ -382,7 +382,6 @@ class ApplyTransformsToPoints(ANTSCommand): input_spec = ApplyTransformsToPointsInputSpec output_spec = ApplyTransformsToPointsOutputSpec - def _getTransformFileNames(self): retval = [] for ii in range(len(self.inputs.transforms)): diff --git a/nipype/interfaces/ants/tests/test_spec_JointFusion.py b/nipype/interfaces/ants/tests/test_spec_JointFusion.py index 1eaf601edd..ed6d283032 100644 --- a/nipype/interfaces/ants/tests/test_spec_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_spec_JointFusion.py @@ -43,7 +43,7 @@ def test_JointFusion_method(): def test_JointFusion_radius(): at = JointFusion() - set_radius = lambda attr,x,y,z: setattr(at.inputs, attr, [x, y, z]) + set_radius = lambda attr, x, y, z: setattr(at.inputs, attr, [x, y, z]) for attr in ['patch_radius', 'search_radius']: for x in range(5): set_radius(attr, x, x + 1, x**x) @@ -64,8 +64,8 @@ def test_JointFusion_cmd(): at.inputs.warped_label_images = segmentation_images T1_image = example_data('T1.nii') at.inputs.target_image = T1_image - at.inputs.patch_radius = [3,2,1] - at.inputs.search_radius = [1,2,3] + at.inputs.patch_radius = [3, 2, 1] + at.inputs.search_radius = [1, 2, 3] expected_command = ('jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3' ' -tg %s -g %s -g %s -l %s -l %s' ' fusion_labelimage_output.nii') % (T1_image, diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 5e83e13cc7..635c4be7f5 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -15,34 +15,36 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): - dimension=traits.Enum(3, 2, argstr= '%d', usedefault=True, - desc='image dimension (2 or 3)', mandatory=True, - position = 0) - input_image=File(argstr='%s', exists=True, - desc='Main input is a 3-D grayscale image.', mandatory=True, - position = 1) - output_image=traits.Str('rgb.nii.gz', argstr='%s', usedefault=True, - desc=('rgb output image'), position = 2) - mask_image=File('none', argstr='%s', exists=True, - desc = 'mask image', position = 3, usedefault = True) - colormap=traits.Str(argstr='%s', usedefault=True, - desc=('Possible colormaps: grey, red, green, ' - 'blue, copper, jet, hsv, spring, summer, ' - 'autumn, winter, hot, cool, overunder, custom ' - ), mandatory = True, position = 4) - custom_color_map_file=traits.Str('none', argstr='%s', usedefault=True, - desc = 'custom color map file', position = 5) + dimension = traits.Enum(3, 2, argstr='%d', usedefault=True, + desc='image dimension (2 or 3)', mandatory=True, + position=0) + input_image = File(argstr='%s', exists=True, + desc='Main input is a 3-D grayscale image.', mandatory=True, + position=1) + output_image = traits.Str('rgb.nii.gz', argstr='%s', usedefault=True, + desc=('rgb output image'), position=2) + mask_image = File('none', argstr='%s', exists=True, + desc='mask image', position=3, usedefault=True) + colormap = traits.Str(argstr='%s', usedefault=True, + desc=('Possible colormaps: grey, red, green, ' + 'blue, copper, jet, hsv, spring, summer, ' + 'autumn, winter, hot, cool, overunder, custom ' + ), mandatory=True, position=4) + custom_color_map_file = traits.Str('none', argstr='%s', usedefault=True, + desc='custom color map file', position=5) minimum_input = traits.Int(argstr='%d', desc='minimum input', - mandatory = True, position = 6) + mandatory=True, position=6) maximum_input = traits.Int(argstr='%d', desc='maximum input', - mandatory = True, position = 7) + mandatory=True, position=7) minimum_RGB_output = traits.Int(0, usedefault=True, - argstr='%d', desc = '', position = 8) + argstr='%d', desc='', position=8) maximum_RGB_output = traits.Int(255, usedefault=True, - argstr='%d', desc = '', position = 9) + argstr='%d', desc='', position=9) + class ConvertScalarImageToRGBOutputSpec(TraitedSpec): - output_image= File(exists=True, desc='converted RGB image') + output_image = File(exists=True, desc='converted RGB image') + class ConvertScalarImageToRGB(ANTSCommand): """ @@ -68,60 +70,61 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() outputs['output_image'] = os.path.join(os.getcwd(), - self.inputs.output_image) + self.inputs.output_image) return outputs class CreateTiledMosaicInputSpec(ANTSCommandInputSpec): input_image = File(argstr='-i %s', exists=True, - desc = 'Main input is a 3-D grayscale image.', - mandatory = True) - rgb_image= File(argstr='-r %s', exists = True, - desc = ('An optional Rgb image can be added as an overlay.' - 'It must have the same image' - 'geometry as the input grayscale image.'), - mandatory = True) - mask_image = File(argstr = '-x %s', exists = True, - desc = 'Specifies the ROI of the RGB voxels used.') - alpha_value = traits.Float(argstr = '-a %.2f', - desc = ('If an Rgb image is provided, render the overlay ' - 'using the specified alpha parameter.')) - output_image = traits.Str('output.png', argstr = '-o %s', - desc = 'The output consists of the tiled mosaic image.', - usedefault = True) - tile_geometry = traits.Str(argstr = '-t %s',desc = ( + desc='Main input is a 3-D grayscale image.', + mandatory=True) + rgb_image = File(argstr='-r %s', exists=True, + desc=('An optional Rgb image can be added as an overlay.' + 'It must have the same image' + 'geometry as the input grayscale image.'), + mandatory=True) + mask_image = File(argstr='-x %s', exists=True, + desc='Specifies the ROI of the RGB voxels used.') + alpha_value = traits.Float(argstr='-a %.2f', + desc=('If an Rgb image is provided, render the overlay ' + 'using the specified alpha parameter.')) + output_image = traits.Str('output.png', argstr='-o %s', + desc='The output consists of the tiled mosaic image.', + usedefault=True) + tile_geometry = traits.Str(argstr='-t %s', desc=( 'The tile geometry specifies the number of rows and columns' 'in the output image. For example, if the user specifies "5x10", ' 'then 5 rows by 10 columns of slices are rendered. If R < 0 and C > ' '0 (or vice versa), the negative value is selected' 'based on direction.')) - direction = traits.Int(argstr = '-d %d', desc = ('Specifies the direction of ' - 'the slices. If no direction is specified, the ' - 'direction with the coarsest spacing is chosen.')) + direction = traits.Int(argstr='-d %d', desc=('Specifies the direction of ' + 'the slices. If no direction is specified, the ' + 'direction with the coarsest spacing is chosen.')) pad_or_crop = traits.Str(argstr='-p %s', - desc = 'argument passed to -p flag:' - '[padVoxelWidth,]' - '[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1],' - 'constantValue]' - 'The user can specify whether to pad or crop a specified ' - 'voxel-width boundary of each individual slice. For this ' - 'program, cropping is simply padding with negative voxel-widths.' - 'If one pads (+), the user can also specify a constant pad ' - 'value (default = 0). If a mask is specified, the user can use ' - 'the mask to define the region, by using the keyword "mask"' - ' plus an offset, e.g. "-p mask+3".' - ) + desc='argument passed to -p flag:' + '[padVoxelWidth,]' + '[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1],' + 'constantValue]' + 'The user can specify whether to pad or crop a specified ' + 'voxel-width boundary of each individual slice. For this ' + 'program, cropping is simply padding with negative voxel-widths.' + 'If one pads (+), the user can also specify a constant pad ' + 'value (default = 0). If a mask is specified, the user can use ' + 'the mask to define the region, by using the keyword "mask"' + ' plus an offset, e.g. "-p mask+3".' + ) slices = traits.Str(argstr='-s %s', - desc = ('Number of slices to increment Slice1xSlice2xSlice3' - '[numberOfSlicesToIncrement,,]')) - flip_slice = traits.Str(argstr = '-f %s', - desc = ('flipXxflipY')) - permute_axes = traits.Bool(argstr = '-g', desc = 'doPermute' - ) + desc=('Number of slices to increment Slice1xSlice2xSlice3' + '[numberOfSlicesToIncrement,,]')) + flip_slice = traits.Str(argstr='-f %s', + desc=('flipXxflipY')) + permute_axes = traits.Bool(argstr='-g', desc='doPermute' + ) class CreateTiledMosaicOutputSpec(TraitedSpec): - output_image= File(exists=True, desc='image file') + output_image = File(exists=True, desc='image file') + class CreateTiledMosaic(ANTSCommand): """The program CreateTiledMosaic in conjunction with ConvertScalarImageToRGB @@ -153,5 +156,5 @@ class CreateTiledMosaic(ANTSCommand): def _list_outputs(self): outputs = self._outputs().get() outputs['output_image'] = os.path.join(os.getcwd(), - self.inputs.output_image) + self.inputs.output_image) return outputs diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 7d43901908..fd1f3e1dd5 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -54,9 +54,11 @@ __docformat__ = 'restructuredtext' + class NipypeInterfaceError(Exception): def __init__(self, value): self.value = value + def __str__(self): return repr(self.value) @@ -70,6 +72,7 @@ def _unlock_display(ndisplay): return True + def _exists_in_path(cmd, environ): ''' Based on a code snippet from @@ -89,6 +92,7 @@ def _exists_in_path(cmd, environ): return True, filename return False, None + def load_template(name): """Load a template from the script_templates directory @@ -137,6 +141,7 @@ class Bunch(object): Items", Python Cookbook, 2nd Ed, Chapter 4.18, 2005. """ + def __init__(self, *args, **kwargs): self.__dict__.update(*args, **kwargs) @@ -352,7 +357,7 @@ def __init__(self, **kwargs): # NOTE: In python 2.6, object.__init__ no longer accepts input # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. - #super(TraitedSpec, self).__init__(*args, **kwargs) + # super(TraitedSpec, self).__init__(*args, **kwargs) super(BaseTraitedSpec, self).__init__(**kwargs) traits.push_exception_handler(reraise_exceptions=True) undefined_traits = {} @@ -563,11 +568,11 @@ def get_hashval(self, hash_method=None): and not has_metadata(trait.trait_type, "name_source")) dict_nofilename.append((name, - self._get_sorteddict(val, hash_method=hash_method, - hash_files=hash_files))) + self._get_sorteddict(val, hash_method=hash_method, + hash_files=hash_files))) dict_withhash.append((name, - self._get_sorteddict(val, True, hash_method=hash_method, - hash_files=hash_files))) + self._get_sorteddict(val, True, hash_method=hash_method, + hash_files=hash_files))) return dict_withhash, md5(str(dict_nofilename).encode()).hexdigest() def _get_sorteddict(self, object, dictwithhash=False, hash_method=None, @@ -577,9 +582,9 @@ def _get_sorteddict(self, object, dictwithhash=False, hash_method=None, for key, val in sorted(object.items()): if isdefined(val): out.append((key, - self._get_sorteddict(val, dictwithhash, - hash_method=hash_method, - hash_files=hash_files))) + self._get_sorteddict(val, dictwithhash, + hash_method=hash_method, + hash_files=hash_files))) elif isinstance(object, (list, tuple)): out = [] for val in object: @@ -619,6 +624,7 @@ class DynamicTraitedSpec(BaseTraitedSpec): This class is a workaround for add_traits and clone_traits not functioning well together. """ + def __deepcopy__(self, memo): """ bug in deepcopy for HasTraits results in weird cloning behavior for added traits @@ -662,15 +668,15 @@ class Interface(object): input_spec = None # A traited input specification output_spec = None # A traited output specification - _can_resume = False # defines if the interface can reuse partial results - # after interruption + # defines if the interface can reuse partial results after interruption + _can_resume = False @property def can_resume(self): return self._can_resume - _always_run = False # should the interface be always run even if the - # inputs were not changed? + # should the interface be always run even if the inputs were not changed? + _always_run = False @property def always_run(self): @@ -764,8 +770,8 @@ def help(cls, returnhelp=False): """ if cls.__doc__: - #docstring = cls.__doc__.split('\n') - #docstring = [trim(line, '') for line in docstring] + # docstring = cls.__doc__.split('\n') + # docstring = [trim(line, '') for line in docstring] docstring = trim(cls.__doc__).split('\n') + [''] else: docstring = [''] @@ -932,7 +938,7 @@ def _check_mandatory_inputs(self): if isdefined(value): self._check_requires(spec, name, value) for name, spec in list(self.inputs.traits(mandatory=None, - transient=None).items()): + transient=None).items()): self._check_requires(spec, name, getattr(self.inputs, name)) def _check_version_requirements(self, trait_object, raise_exception=True): @@ -1071,7 +1077,7 @@ def run(self, **inputs): if inputs_str != '': e.args += (inputs_str, ) - #exception raising inhibition for special cases + # exception raising inhibition for special cases import traceback runtime.traceback = traceback.format_exc() runtime.traceback_args = e.args @@ -1494,7 +1500,7 @@ def _run_interface(self, runtime, correct_return_codes=[0]): runtime = run_command(runtime, output=self.inputs.terminal_output, redirect_x=self._redirect_x) if runtime.returncode is None or \ - runtime.returncode not in correct_return_codes: + runtime.returncode not in correct_return_codes: self.raise_exception(runtime) return runtime @@ -1568,7 +1574,7 @@ def _filename_from_source(self, name, chain=None): if not isinstance(ns, string_types): raise ValueError(('name_source of \'%s\' trait sould be an ' - 'input trait name') % name) + 'input trait name') % name) if isdefined(getattr(self.inputs, ns)): name_source = ns @@ -1723,6 +1729,7 @@ class SEMLikeCommandLine(CommandLine): used but only for the reduced (by excluding those that do not have corresponding inputs list of outputs. """ + def _list_outputs(self): outputs = self.output_spec().get() return self._outputs_from_inputs(outputs) @@ -1732,7 +1739,7 @@ def _outputs_from_inputs(self, outputs): corresponding_input = getattr(self.inputs, name) if isdefined(corresponding_input): if (isinstance(corresponding_input, bool) and - corresponding_input): + corresponding_input): outputs[name] = \ os.path.abspath(self._outputs_filenames[name]) else: diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index 6ceb736fc1..8246a68786 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -8,7 +8,7 @@ """ from .base import (CommandLineInputSpec, traits, TraitedSpec, - File, SEMLikeCommandLine) + File, SEMLikeCommandLine) class C3dAffineToolInputSpec(CommandLineInputSpec): @@ -16,8 +16,8 @@ class C3dAffineToolInputSpec(CommandLineInputSpec): source_file = File(exists=True, argstr='-src %s', position=2) transform_file = File(exists=True, argstr='%s', position=3) itk_transform = traits.Either(traits.Bool, File(), hash_files=False, - desc="Export ITK transform.", - argstr="-oitk %s", position=5) + desc="Export ITK transform.", + argstr="-oitk %s", position=5) fsl2ras = traits.Bool(argstr='-fsl2ras', position=4) diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index 41f410faf6..a56e501e7c 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -17,12 +17,12 @@ class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): snr = traits.Float(argstr='-snr %f', units='NA', desc=('Specifies the signal-to-noise ratio of the ' - 'non-diffusion-weighted measurements to use in simulations.')) + 'non-diffusion-weighted measurements to use in simulations.')) scheme_file = File(exists=True, argstr='-schemefile %s', mandatory=True, desc='Specifies the scheme file for the diffusion MRI data') info_file = File(desc='The name to be given to the information output filename.', argstr='-infooutputfile %s', mandatory=True, genfile=True, - hash_files=False) # Genfile and hash_files? + hash_files=False) # Genfile and hash_files? trace = traits.Float(argstr='-trace %f', units='NA', desc='Trace of the diffusion tensor(s) used in the test function.') onedtfarange = traits.List(traits.Float, argstr='-onedtfarange %s', @@ -39,16 +39,16 @@ class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): 'to give all the different permutations.')) twodtfastep = traits.Float(argstr='-twodtfastep %f', units='NA', desc=('FA step size controlling how many steps there are ' - 'between the minimum and maximum FA settings ' - 'for the two tensor cases.')) + 'between the minimum and maximum FA settings ' + 'for the two tensor cases.')) twodtanglerange = traits.List(traits.Float, argstr='-twodtanglerange %s', minlen=2, maxlen=2, units='NA', desc=('Minimum and maximum crossing angles ' 'between the two fibres.')) twodtanglestep = traits.Float(argstr='-twodtanglestep %f', units='NA', - desc=('Angle step size controlling how many steps there are ' - 'between the minimum and maximum crossing angles for ' - 'the two tensor cases.')) + desc=('Angle step size controlling how many steps there are ' + 'between the minimum and maximum crossing angles for ' + 'the two tensor cases.')) twodtmixmax = traits.Float(argstr='-twodtmixmax %f', units='NA', desc=('Mixing parameter controlling the proportion of one fibre population ' 'to the other. The minimum mixing parameter is (1 - twodtmixmax).')) @@ -58,10 +58,12 @@ class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): seed = traits.Float(argstr='-seed %f', units='NA', desc='Specifies the random seed to use for noise generation in simulation trials.') + class SFPICOCalibDataOutputSpec(TraitedSpec): PICOCalib = File(exists=True, desc='Calibration dataset') calib_info = File(exists=True, desc='Calibration dataset') + class SFPICOCalibData(StdOutCommandLine): """ Generates Spherical Function PICo Calibration Data. @@ -115,8 +117,8 @@ class SFPICOCalibData(StdOutCommandLine): data is generated for calculating the LUT. # doctest: +SKIP """ _cmd = 'sfpicocalibdata' - input_spec=SFPICOCalibDataInputSpec - output_spec=SFPICOCalibDataOutputSpec + input_spec = SFPICOCalibDataInputSpec + output_spec = SFPICOCalibDataOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -125,11 +127,10 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.scheme_file) + _, name, _ = split_filename(self.inputs.scheme_file) return name + '_PICOCalib.Bfloat' - class SFLUTGenInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, desc='Voxel-order data of the spherical functions peaks.') @@ -168,10 +169,12 @@ class SFLUTGenInputSpec(StdOutCommandLineInputSpec): desc=('The order of the polynomial fitting the surface. Order 1 is linear. ' 'Order 2 (default) is quadratic.')) + class SFLUTGenOutputSpec(TraitedSpec): lut_one_fibre = File(exists=True, desc='PICo lut for one-fibre model') lut_two_fibres = File(exists=True, desc='PICo lut for two-fibre model') + class SFLUTGen(StdOutCommandLine): """ Generates PICo lookup tables (LUT) for multi-fibre methods such as @@ -221,8 +224,8 @@ class SFLUTGen(StdOutCommandLine): >>> lutgen.run() # doctest: +SKIP """ _cmd = 'sflutgen' - input_spec=SFLUTGenInputSpec - output_spec=SFLUTGenOutputSpec + input_spec = SFLUTGenInputSpec + output_spec = SFLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 1196717269..fde488d13f 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -37,7 +37,7 @@ class ConmatInputSpec(CommandLineInputSpec): tract_stat = traits.Enum("mean", "min", "max", "sum", "median", "var", argstr='-tractstat %s', units='NA', desc=("Tract statistic to use. See TractStats for other options."), - requires=['scalar_file'],xor=['tract_prop']) + requires=['scalar_file'], xor=['tract_prop']) tract_prop = traits.Enum("length", "endpointsep", argstr='-tractstat %s', units='NA', xor=['tract_stat'], @@ -45,13 +45,15 @@ class ConmatInputSpec(CommandLineInputSpec): 'See TractStats for details.')) output_root = File(argstr='-outputroot %s', genfile=True, - desc=('filename root prepended onto the names of the output files. ' - 'The extension will be determined from the input.')) + desc=('filename root prepended onto the names of the output files. ' + 'The extension will be determined from the input.')) + class ConmatOutputSpec(TraitedSpec): conmat_sc = File(exists=True, desc='Connectivity matrix in CSV file.') conmat_ts = File(desc='Tract statistics in CSV file.') + class Conmat(CommandLine): """ Creates a connectivity matrix using a 3D label image (the target image) @@ -127,8 +129,8 @@ class Conmat(CommandLine): >>> conmat.run() # doctest: +SKIP """ _cmd = 'conmat' - input_spec=ConmatInputSpec - output_spec=ConmatOutputSpec + input_spec = ConmatInputSpec + output_spec = ConmatOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -148,7 +150,7 @@ def _gen_outputroot(self): def _gen_filename(self, name): if name == 'output_root': - _, filename , _ = split_filename(self.inputs.in_file) + _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index 1014bd8f27..956ef9b474 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -18,9 +18,9 @@ class Image2VoxelInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='-4dimage %s', - mandatory=True, position=1, - desc='4d image file') -#TODO convert list of files on the fly + mandatory=True, position=1, + desc='4d image file') +# TODO convert list of files on the fly # imagelist = File(exists=True, argstr='-imagelist %s', # mandatory=True, position=1, # desc='Name of a file containing a list of 3D images') @@ -31,9 +31,11 @@ class Image2VoxelInputSpec(StdOutCommandLineInputSpec): out_type = traits.Enum("float", "char", "short", "int", "long", "double", argstr='-outputdatatype %s', position=2, desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', usedefault=True) + class Image2VoxelOutputSpec(TraitedSpec): voxel_order = File(exists=True, desc='path/name of 4D volume in voxel order') + class Image2Voxel(StdOutCommandLine): """ Converts Analyze / NIFTI / MHA files to voxel order. @@ -60,37 +62,40 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) - return name + '.B'+ self.inputs.out_type + _, name, _ = split_filename(self.inputs.in_file) + return name + '.B' + self.inputs.out_type + class FSL2SchemeInputSpec(StdOutCommandLineInputSpec): bvec_file = File(exists=True, argstr='-bvecfile %s', - mandatory=True, position=1, - desc='b vector file') + mandatory=True, position=1, + desc='b vector file') bval_file = File(exists=True, argstr='-bvalfile %s', - mandatory=True, position=2, - desc='b value file') + mandatory=True, position=2, + desc='b value file') numscans = traits.Int(argstr='-numscans %d', units='NA', - desc="Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session.") + desc="Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session.") interleave = traits.Bool(argstr='-interleave', desc="Interleave repeated scans. Only used with -numscans.") bscale = traits.Float(argstr='-bscale %d', units='NA', - desc="Scaling factor to convert the b-values into different units. Default is 10^6.") + desc="Scaling factor to convert the b-values into different units. Default is 10^6.") - diffusiontime = traits.Float(argstr = '-diffusiontime %f', units = 'NA', - desc="Diffusion time") + diffusiontime = traits.Float(argstr='-diffusiontime %f', units='NA', + desc="Diffusion time") flipx = traits.Bool(argstr='-flipx', desc="Negate the x component of all the vectors.") flipy = traits.Bool(argstr='-flipy', desc="Negate the y component of all the vectors.") flipz = traits.Bool(argstr='-flipz', desc="Negate the z component of all the vectors.") usegradmod = traits.Bool(argstr='-usegradmod', desc="Use the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude.") + class FSL2SchemeOutputSpec(TraitedSpec): scheme = File(exists=True, desc='Scheme file') + class FSL2Scheme(StdOutCommandLine): """ Converts b-vectors and b-values from FSL format to a Camino scheme file. @@ -106,8 +111,8 @@ class FSL2Scheme(StdOutCommandLine): """ _cmd = 'fsl2scheme' - input_spec=FSL2SchemeInputSpec - output_spec=FSL2SchemeOutputSpec + input_spec = FSL2SchemeInputSpec + output_spec = FSL2SchemeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -115,36 +120,39 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.bvec_file) + _, name, _ = split_filename(self.inputs.bvec_file) return name + '.scheme' + class VtkStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum('raw', 'voxels', argstr='-inputmodel %s', desc='input model type (raw or voxels)', usedefault=True) in_file = File(exists=True, argstr=' < %s', - mandatory=True, position=-2, - desc='data file') + mandatory=True, position=-2, + desc='data file') - voxeldims = traits.List(traits.Int, desc = 'voxel dimensions in mm', - argstr='-voxeldims %s', minlen=3, maxlen=3, position=4, - units='mm') + voxeldims = traits.List(traits.Int, desc='voxel dimensions in mm', + argstr='-voxeldims %s', minlen=3, maxlen=3, position=4, + units='mm') seed_file = File(exists=False, argstr='-seedfile %s', position=1, - desc='image containing seed points') + desc='image containing seed points') target_file = File(exists=False, argstr='-targetfile %s', position=2, - desc='image containing integer-valued target regions') + desc='image containing integer-valued target regions') scalar_file = File(exists=False, argstr='-scalarfile %s', position=3, - desc='image that is in the same physical space as the tracts') + desc='image that is in the same physical space as the tracts') colourorient = traits.Bool(argstr='-colourorient', desc="Each point on the streamline is coloured by the local orientation.") interpolatescalars = traits.Bool(argstr='-interpolatescalars', desc="the scalar value at each point on the streamline is calculated by trilinear interpolation") interpolate = traits.Bool(argstr='-interpolate', desc="the scalar value at each point on the streamline is calculated by trilinear interpolation") + class VtkStreamlinesOutputSpec(TraitedSpec): vtk = File(exists=True, desc='Streamlines in VTK format') + class VtkStreamlines(StdOutCommandLine): """ Use vtkstreamlines to convert raw or voxel format streamlines to VTK polydata @@ -159,8 +167,8 @@ class VtkStreamlines(StdOutCommandLine): >>> vtk.run() # doctest: +SKIP """ _cmd = 'vtkstreamlines' - input_spec=VtkStreamlinesInputSpec - output_spec=VtkStreamlinesOutputSpec + input_spec = VtkStreamlinesInputSpec + output_spec = VtkStreamlinesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -168,83 +176,86 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '.vtk' + class ProcStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum('raw', 'voxels', argstr='-inputmodel %s', desc='input model type (raw or voxels)', usedefault=True) in_file = File(exists=True, argstr='-inputfile %s', - mandatory=True, position=1, - desc='data file') - - maxtractpoints= traits.Int(argstr='-maxtractpoints %d', units='NA', - desc="maximum number of tract points") - mintractpoints= traits.Int(argstr='-mintractpoints %d', units='NA', - desc="minimum number of tract points") - maxtractlength= traits.Int(argstr='-maxtractlength %d', units='mm', - desc="maximum length of tracts") - mintractlength= traits.Int(argstr='-mintractlength %d', units='mm', - desc="minimum length of tracts") - datadims = traits.List(traits.Int, desc = 'data dimensions in voxels', - argstr='-datadims %s', minlen=3, maxlen=3, - units='voxels') - voxeldims = traits.List(traits.Int, desc = 'voxel dimensions in mm', - argstr='-voxeldims %s', minlen=3, maxlen=3, - units='mm') - seedpointmm = traits.List(traits.Int, desc = 'The coordinates of a single seed point for tractography in mm', - argstr='-seedpointmm %s', minlen=3, maxlen=3, - units='mm') - seedpointvox = traits.List(traits.Int, desc = 'The coordinates of a single seed point for tractography in voxels', - argstr='-seedpointvox %s', minlen=3, maxlen=3, - units='voxels') + mandatory=True, position=1, + desc='data file') + + maxtractpoints = traits.Int(argstr='-maxtractpoints %d', units='NA', + desc="maximum number of tract points") + mintractpoints = traits.Int(argstr='-mintractpoints %d', units='NA', + desc="minimum number of tract points") + maxtractlength = traits.Int(argstr='-maxtractlength %d', units='mm', + desc="maximum length of tracts") + mintractlength = traits.Int(argstr='-mintractlength %d', units='mm', + desc="minimum length of tracts") + datadims = traits.List(traits.Int, desc='data dimensions in voxels', + argstr='-datadims %s', minlen=3, maxlen=3, + units='voxels') + voxeldims = traits.List(traits.Int, desc='voxel dimensions in mm', + argstr='-voxeldims %s', minlen=3, maxlen=3, + units='mm') + seedpointmm = traits.List(traits.Int, desc='The coordinates of a single seed point for tractography in mm', + argstr='-seedpointmm %s', minlen=3, maxlen=3, + units='mm') + seedpointvox = traits.List(traits.Int, desc='The coordinates of a single seed point for tractography in voxels', + argstr='-seedpointvox %s', minlen=3, maxlen=3, + units='voxels') seedfile = File(exists=False, argstr='-seedfile %s', desc='Image Containing Seed Points') regionindex = traits.Int(argstr='-regionindex %d', units='mm', - desc="index of specific region to process") + desc="index of specific region to process") iterations = traits.Float(argstr='-iterations %d', units='NA', - desc="Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images.") + desc="Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images.") targetfile = File(exists=False, argstr='-targetfile %s', - desc='Image containing target volumes.') + desc='Image containing target volumes.') allowmultitargets = traits.Bool(argstr='-allowmultitargets', desc="Allows streamlines to connect to multiple target volumes.") - directional = traits.List(traits.Int, desc = 'Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).', - argstr='-directional %s', minlen=3, maxlen=3, - units='NA') + directional = traits.List(traits.Int, desc='Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).', + argstr='-directional %s', minlen=3, maxlen=3, + units='NA') waypointfile = File(exists=False, argstr='-waypointfile %s', - desc='Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.') + desc='Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.') truncateloops = traits.Bool(argstr='-truncateloops', desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint.") discardloops = traits.Bool(argstr='-discardloops', desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint.") exclusionfile = File(exists=False, argstr='-exclusionfile %s', - desc='Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.') + desc='Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.') truncateinexclusion = traits.Bool(argstr='-truncateinexclusion', desc="Retain segments of a streamline before entry to an exclusion ROI.") endpointfile = File(exists=False, argstr='-endpointfile %s', - desc='Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.') + desc='Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.') resamplestepsize = traits.Float(argstr='-resamplestepsize %d', units='NA', - desc="Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option.") + desc="Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option.") noresample = traits.Bool(argstr='-noresample', desc="Disables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels.") outputtracts = traits.Bool(argstr='-outputtracts', desc="Output streamlines in raw binary format.") outputroot = File(exists=False, argstr='-outputroot %s', - desc='Prepended onto all output file names.') + desc='Prepended onto all output file names.') gzip = traits.Bool(argstr='-gzip', desc="save the output image in gzip format") outputcp = traits.Bool(argstr='-outputcp', desc="output the connection probability map (Analyze image, float)", - requires=['outputroot','seedfile']) + requires=['outputroot', 'seedfile']) outputsc = traits.Bool(argstr='-outputsc', desc="output the connection probability map (raw streamlines, int)", - requires=['outputroot','seedfile']) + requires=['outputroot', 'seedfile']) outputacm = traits.Bool(argstr='-outputacm', desc="output all tracts in a single connection probability map (Analyze image)", - requires=['outputroot','seedfile']) + requires=['outputroot', 'seedfile']) outputcbs = traits.Bool(argstr='-outputcbs', desc="outputs connectivity-based segmentation maps; requires target outputfile", - requires=['outputroot','targetfile','seedfile']) + requires=['outputroot', 'targetfile', 'seedfile']) + class ProcStreamlinesOutputSpec(TraitedSpec): proc = File(exists=True, desc='Processed Streamlines') outputroot_files = OutputMultiPath(File(exists=True)) + class ProcStreamlines(StdOutCommandLine): """ Process streamline data @@ -261,8 +272,8 @@ class ProcStreamlines(StdOutCommandLine): >>> proc.run() # doctest: +SKIP """ _cmd = 'procstreamlines' - input_spec=ProcStreamlinesInputSpec - output_spec=ProcStreamlinesOutputSpec + input_spec = ProcStreamlinesInputSpec + output_spec = ProcStreamlinesOutputSpec def _format_arg(self, name, spec, value): if name == 'outputroot': @@ -277,7 +288,7 @@ def _run_interface(self, runtime): if not os.path.exists(base): os.makedirs(base) new_runtime = super(ProcStreamlines, self)._run_interface(runtime) - self.outputroot_files = glob.glob(os.path.join(os.getcwd(),actual_outputroot+'*')) + self.outputroot_files = glob.glob(os.path.join(os.getcwd(), actual_outputroot+'*')) return new_runtime else: new_runtime = super(ProcStreamlines, self)._run_interface(runtime) @@ -294,24 +305,27 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_proc' + class TractShredderInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=-2, desc='tract file') offset = traits.Int(argstr='%d', units='NA', - desc='initial offset of offset tracts', position=1) + desc='initial offset of offset tracts', position=1) bunchsize = traits.Int(argstr='%d', units='NA', - desc='reads and outputs a group of bunchsize tracts', position=2) + desc='reads and outputs a group of bunchsize tracts', position=2) space = traits.Int(argstr='%d', units='NA', - desc='skips space tracts', position=3) + desc='skips space tracts', position=3) + class TractShredderOutputSpec(TraitedSpec): shredded = File(exists=True, desc='Shredded tract file') + class TractShredder(StdOutCommandLine): """ Extracts bunches of streamlines. @@ -334,8 +348,8 @@ class TractShredder(StdOutCommandLine): >>> shred.run() # doctest: +SKIP """ _cmd = 'tractshredder' - input_spec=TractShredderInputSpec - output_spec=TractShredderOutputSpec + input_spec = TractShredderInputSpec + output_spec = TractShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -343,18 +357,20 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + "_shredded" + class DT2NIfTIInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, position=1, - desc='tract file') + desc='tract file') output_root = File(argstr='-outputroot %s', position=2, genfile=True, - desc='filename root prepended onto the names of three output files.') + desc='filename root prepended onto the names of three output files.') header_file = File(exists=True, argstr='-header %s', mandatory=True, position=3, - desc=' A Nifti .nii or .hdr file containing the header information') + desc=' A Nifti .nii or .hdr file containing the header information') + class DT2NIfTIOutputSpec(TraitedSpec): dt = File(exists=True, desc='diffusion tensors in NIfTI format') @@ -371,8 +387,8 @@ class DT2NIfTI(CommandLine): Reads Camino diffusion tensors, and converts them to NIFTI format as three .nii files. """ _cmd = 'dt2nii' - input_spec=DT2NIfTIInputSpec - output_spec=DT2NIfTIOutputSpec + input_spec = DT2NIfTIInputSpec + output_spec = DT2NIfTIOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -393,42 +409,45 @@ def _gen_outputroot(self): def _gen_filename(self, name): if name == 'output_root': - _, filename , _ = split_filename(self.inputs.in_file) + _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename + class NIfTIDT2CaminoInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, position=1, - desc='A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be ' - 'in lower-triangular order as specified by the NIFTI standard for the storage of ' - 'symmetric matrices. This file should be either a .nii or a .hdr file.') + desc='A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be ' + 'in lower-triangular order as specified by the NIFTI standard for the storage of ' + 'symmetric matrices. This file should be either a .nii or a .hdr file.') s0_file = File(argstr='-s0 %s', exists=True, - desc='File containing the unweighted signal for each voxel, may be a raw binary ' - 'file (specify type with -inputdatatype) or a supported image file.') + desc='File containing the unweighted signal for each voxel, may be a raw binary ' + 'file (specify type with -inputdatatype) or a supported image file.') lns0_file = File(argstr='-lns0 %s', exists=True, - desc='File containing the log of the unweighted signal for each voxel, may be a ' - 'raw binary file (specify type with -inputdatatype) or a supported image file.') + desc='File containing the log of the unweighted signal for each voxel, may be a ' + 'raw binary file (specify type with -inputdatatype) or a supported image file.') bgmask = File(argstr='-bgmask %s', exists=True, - desc='Binary valued brain / background segmentation, may be a raw binary file ' - '(specify type with -maskdatatype) or a supported image file.') + desc='Binary valued brain / background segmentation, may be a raw binary file ' + '(specify type with -maskdatatype) or a supported image file.') scaleslope = traits.Float(argstr='-scaleslope %s', - desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' - 'applied after any scaling specified by the input image. Default is 1.0.') + desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' + 'applied after any scaling specified by the input image. Default is 1.0.') scaleinter = traits.Float(argstr='-scaleinter %s', - desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' - 'applied after any scaling specified by the input image. Default is 0.0.') + desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' + 'applied after any scaling specified by the input image. Default is 0.0.') uppertriangular = traits.Bool(argstr='-uppertriangular %s', - desc = 'Specifies input in upper-triangular (VTK style) order.') + desc='Specifies input in upper-triangular (VTK style) order.') + class NIfTIDT2CaminoOutputSpec(TraitedSpec): out_file = File(desc='diffusion tensors data in Camino format') + class NIfTIDT2Camino(CommandLine): """ Converts NIFTI-1 diffusion tensors to Camino format. The program reads the @@ -451,8 +470,8 @@ class NIfTIDT2Camino(CommandLine): """ _cmd = 'niftidt2camino' - input_spec=NIfTIDT2CaminoInputSpec - output_spec=NIfTIDT2CaminoOutputSpec + input_spec = NIfTIDT2CaminoInputSpec + output_spec = NIfTIDT2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -461,9 +480,10 @@ def _list_outputs(self): def _gen_filename(self, name): if name == 'out_file': - _, filename , _ = split_filename(self.inputs.in_file) + _, filename, _ = split_filename(self.inputs.in_file) return filename + class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=1, desc='Tensor-fitted data filename') @@ -484,8 +504,8 @@ class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): # How do we implement both file and enum (for the program) in one argument? # Is this option useful anyway? - #-printprogargs - #Prints data dimension (and type, if relevant) arguments for a specific + # -printprogargs + # Prints data dimension (and type, if relevant) arguments for a specific # Camino program, where prog is one of shredder, scanner2voxel, # vcthreshselect, pdview, track. printprogargs = File(exists=True, argstr='-printprogargs %s', position=3, @@ -510,11 +530,11 @@ class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): 'combination of fields in the new header by ' 'specifying subsequent options.')) - data_dims = traits.List(traits.Int, desc = 'data dimensions in voxels', + data_dims = traits.List(traits.Int, desc='data dimensions in voxels', argstr='-datadims %s', minlen=3, maxlen=3, units='voxels') - voxel_dims = traits.List(traits.Float, desc = 'voxel dimensions in mm', + voxel_dims = traits.List(traits.Float, desc='voxel dimensions in mm', argstr='-voxeldims %s', minlen=3, maxlen=3, units='mm') @@ -574,9 +594,11 @@ class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): "(big-endian). This is the default " "for new headers.")) + class AnalyzeHeaderOutputSpec(TraitedSpec): header = File(exists=True, desc='Analyze header') + class AnalyzeHeader(StdOutCommandLine): """ Create or read an Analyze 7.5 header file. @@ -602,8 +624,8 @@ class AnalyzeHeader(StdOutCommandLine): >>> hdr.run() # doctest: +SKIP """ _cmd = 'analyzeheader' - input_spec=AnalyzeHeaderInputSpec - output_spec=AnalyzeHeaderOutputSpec + input_spec = AnalyzeHeaderInputSpec + output_spec = AnalyzeHeaderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -611,24 +633,27 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + ".hdr" + class ShredderInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=-2, desc='raw binary data file') offset = traits.Int(argstr='%d', units='NA', - desc='initial offset of offset bytes', position=1) + desc='initial offset of offset bytes', position=1) chunksize = traits.Int(argstr='%d', units='NA', - desc='reads and outputs a chunk of chunksize bytes', position=2) + desc='reads and outputs a chunk of chunksize bytes', position=2) space = traits.Int(argstr='%d', units='NA', - desc='skips space bytes', position=3) + desc='skips space bytes', position=3) + class ShredderOutputSpec(TraitedSpec): shredded = File(exists=True, desc='Shredded binary data file') + class Shredder(StdOutCommandLine): """ Extracts periodic chunks from a data stream. @@ -653,8 +678,8 @@ class Shredder(StdOutCommandLine): >>> shred.run() # doctest: +SKIP """ _cmd = 'shredder' - input_spec=ShredderInputSpec - output_spec=ShredderOutputSpec + input_spec = ShredderInputSpec + output_spec = ShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -662,5 +687,5 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + "_shredded" diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index abd737d6fb..5043b83ff8 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -17,7 +17,7 @@ class DTIFitInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=1, - desc='voxel-order data filename') + desc='voxel-order data filename') bgmask = File(argstr='-bgmask %s', exists=True, desc=('Provides the name of a file containing a background mask computed using, ' @@ -25,14 +25,16 @@ class DTIFitInputSpec(StdOutCommandLineInputSpec): 'voxels and non-zero in foreground.')) scheme_file = File(exists=True, argstr='%s', mandatory=True, position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') non_linear = traits.Bool(argstr='-nonlinear', position=3, - desc="Use non-linear fitting instead of the default linear regression to the log measurements. ") + desc="Use non-linear fitting instead of the default linear regression to the log measurements. ") + class DTIFitOutputSpec(TraitedSpec): tensor_fitted = File(exists=True, desc='path/name of 4D volume in voxel order') + class DTIFit(StdOutCommandLine): """ Reads diffusion MRI data, acquired using the acquisition scheme detailed in the scheme file, from the data file. @@ -56,8 +58,8 @@ class DTIFit(StdOutCommandLine): >>> fit.run() # doctest: +SKIP """ _cmd = 'dtfit' - input_spec=DTIFitInputSpec - output_spec=DTIFitOutputSpec + input_spec = DTIFitInputSpec + output_spec = DTIFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -65,14 +67,15 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_DT.Bdouble' + class DTMetricInputSpec(CommandLineInputSpec): eigen_data = File(exists=True, argstr='-inputfile %s', mandatory=True, desc='voxel-order data filename') - metric = traits.Enum('fa','md','rd','l1', 'l2', 'l3', 'tr', 'ra', '2dfa','cl','cp','cs', + metric = traits.Enum('fa', 'md', 'rd', 'l1', 'l2', 'l3', 'tr', 'ra', '2dfa', 'cl', 'cp', 'cs', argstr='-stat %s', mandatory=True, desc=('Specifies the metric to compute. Possible choices are: ' '"fa", "md", "rd", "l1", "l2", "l3", "tr", "ra", "2dfa", "cl", "cp" or "cs".')) @@ -100,9 +103,11 @@ class DTMetricInputSpec(CommandLineInputSpec): desc=('Output name. Output will be a .nii.gz file if data_header is provided and' 'in voxel order with outputdatatype datatype (default: double) otherwise.')) + class DTMetricOutputSpec(TraitedSpec): metric_stats = File(exists=True, desc='Diffusion Tensor statistics of the chosen metric') + class DTMetric(CommandLine): """ Computes tensor metric statistics based on the eigenvalues l1 >= l2 >= l3 @@ -139,8 +144,8 @@ class DTMetric(CommandLine): >>> dtmetric.run() # doctest: +SKIP """ _cmd = 'dtshape' - input_spec=DTMetricInputSpec - output_spec=DTMetricOutputSpec + input_spec = DTMetricInputSpec + output_spec = DTMetricOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -158,17 +163,18 @@ def _gen_outputfile(self): def _gen_filename(self, name): if name == 'outputfile': - _, name , _ = split_filename(self.inputs.eigen_data) + _, name, _ = split_filename(self.inputs.eigen_data) metric = self.inputs.metric - datatype= self.inputs.outputdatatype + datatype = self.inputs.outputdatatype if isdefined(self.inputs.data_header): filename = name + '_' + metric + '.nii.gz' else: filename = name + '_' + metric + '.B' + datatype return filename + class ModelFitInputSpec(StdOutCommandLineInputSpec): - def _gen_model_options(): #@NoSelf + def _gen_model_options(): # @NoSelf """ Generate all possible permutations of < multi - tensor > < single - tensor > options """ @@ -192,7 +198,7 @@ def _gen_model_options(): #@NoSelf desc='Specifies the data type of the input file: "char", "short", "int", "long", "float" or "double". The input file must have BIG-ENDIAN ordering. By default, the input type is "float".') scheme_file = File(exists=True, argstr='-schemefile %s', mandatory=True, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') outputfile = File(argstr='-outputfile %s', desc='Filename of the output file.') @@ -216,9 +222,11 @@ def _gen_model_options(): #@NoSelf tau = traits.Float(argstr='-tau %G', desc='Sets the diffusion time separately. This overrides the diffusion time specified in a scheme file or by a scheme index for both the acquisition scheme and in the data synthesis.') + class ModelFitOutputSpec(TraitedSpec): fitted_data = File(exists=True, desc='output file of 4D volume in voxel order') + class ModelFit(StdOutCommandLine): """ Fits models of the spin-displacement density to diffusion MRI measurements. @@ -240,8 +248,8 @@ class ModelFit(StdOutCommandLine): >>> fit.run() # doctest: +SKIP """ _cmd = 'modelfit' - input_spec=ModelFitInputSpec - output_spec=ModelFitOutputSpec + input_spec = ModelFitInputSpec + output_spec = ModelFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -249,34 +257,35 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_fit.Bdouble' + class DTLUTGenInputSpec(StdOutCommandLineInputSpec): - lrange = traits.List(traits.Float, desc = 'Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3.' \ - 'The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition).' \ - 'The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, '\ - 'and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.', \ - argstr='-lrange %s', minlen=2, maxlen=2, position=1, - units='NA') - - frange = traits.List(traits.Float, desc = 'Index to two-tensor LUTs. This is the fractional anisotropy \ + lrange = traits.List(traits.Float, desc='Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3.' \ + 'The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition).' \ + 'The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, '\ + 'and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.', \ + argstr='-lrange %s', minlen=2, maxlen=2, position=1, + units='NA') + + frange = traits.List(traits.Float, desc='Index to two-tensor LUTs. This is the fractional anisotropy \ of the two tensors. The default is 0.3 to 0.94', \ - argstr='-frange %s', minlen=2, maxlen=2, position=1, - units='NA') + argstr='-frange %s', minlen=2, maxlen=2, position=1, + units='NA') step = traits.Float(argstr='-step %f', units='NA', - desc='Distance between points in the LUT.' \ - 'For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed ' \ - 'at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3.' \ - 'For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.') + desc='Distance between points in the LUT.' \ + 'For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed ' \ + 'at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3.' \ + 'For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.') samples = traits.Int(argstr='-samples %d', units='NA', - desc='The number of synthetic measurements to generate at each point in the LUT. The default is 2000.') + desc='The number of synthetic measurements to generate at each point in the LUT. The default is 2000.') snr = traits.Float(argstr='-snr %f', units='NA', - desc='The signal to noise ratio of the unweighted (q = 0) measurements.'\ - 'This should match the SNR (in white matter) of the images that the LUTs are used with.') + desc='The signal to noise ratio of the unweighted (q = 0) measurements.'\ + 'This should match the SNR (in white matter) of the images that the LUTs are used with.') bingham = traits.Bool(argstr='-bingham', desc="Compute a LUT for the Bingham PDF. This is the default.") @@ -285,17 +294,19 @@ class DTLUTGenInputSpec(StdOutCommandLineInputSpec): watson = traits.Bool(argstr='-watson', desc="Compute a LUT for the Watson PDF.") inversion = traits.Int(argstr='-inversion %d', units='NA', - desc='Index of the inversion to use. The default is 1 (linear single tensor inversion).') + desc='Index of the inversion to use. The default is 1 (linear single tensor inversion).') trace = traits.Float(argstr='-trace %G', units='NA', - desc='Trace of the diffusion tensor(s) used in the test function in the LUT generation. The default is 2100E-12 m^2 s^-1.') + desc='Trace of the diffusion tensor(s) used in the test function in the LUT generation. The default is 2100E-12 m^2 s^-1.') scheme_file = File(argstr='-schemefile %s', mandatory=True, position=2, - desc='The scheme file of the images to be processed using this LUT.') + desc='The scheme file of the images to be processed using this LUT.') + class DTLUTGenOutputSpec(TraitedSpec): dtLUT = File(exists=True, desc='Lookup Table') + class DTLUTGen(StdOutCommandLine): """ Calibrates the PDFs for PICo probabilistic tractography. @@ -319,8 +330,8 @@ class DTLUTGen(StdOutCommandLine): >>> dtl.run() # doctest: +SKIP """ _cmd = 'dtlutgen' - input_spec=DTLUTGenInputSpec - output_spec=DTLUTGenOutputSpec + input_spec = DTLUTGenInputSpec + output_spec = DTLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -328,43 +339,46 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.scheme_file) + _, name, _ = split_filename(self.inputs.scheme_file) return name + '.dat' + class PicoPDFsInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=1, - desc='voxel-order data filename') + desc='voxel-order data filename') inputmodel = traits.Enum('dt', 'multitensor', 'pds', - argstr='-inputmodel %s', position=2, desc='input model type', usedefault=True) + argstr='-inputmodel %s', position=2, desc='input model type', usedefault=True) luts = InputMultiPath(File(exists=True), argstr='-luts %s', mandatory=True, - desc='Files containing the lookup tables.'\ - 'For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor).'\ - 'For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above).'\ - 'These LUTs may be generated with dtlutgen.') + desc='Files containing the lookup tables.'\ + 'For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor).'\ + 'For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above).'\ + 'These LUTs may be generated with dtlutgen.') pdf = traits.Enum('bingham', 'watson', 'acg', - argstr='-pdf %s', position=4, desc=' Specifies the PDF to use. There are three choices:'\ - 'watson - The Watson distribution. This distribution is rotationally symmetric.'\ - 'bingham - The Bingham distributionn, which allows elliptical probability density contours.'\ - 'acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours', usedefault=True) + argstr='-pdf %s', position=4, desc=' Specifies the PDF to use. There are three choices:'\ + 'watson - The Watson distribution. This distribution is rotationally symmetric.'\ + 'bingham - The Bingham distributionn, which allows elliptical probability density contours.'\ + 'acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours', usedefault=True) directmap = traits.Bool(argstr='-directmap', desc="Only applicable when using pds as the inputmodel. Use direct mapping between the eigenvalues and the distribution parameters instead of the log of the eigenvalues.") maxcomponents = traits.Int(argstr='-maxcomponents %d', units='NA', - desc='The maximum number of tensor components in a voxel (default 2) for multitensor data.'\ - 'Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.') + desc='The maximum number of tensor components in a voxel (default 2) for multitensor data.'\ + 'Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.') numpds = traits.Int(argstr='-numpds %d', units='NA', - desc='The maximum number of PDs in a voxel (default 3) for PD data.' \ - 'This option determines the size of the input and output voxels.' \ - 'This means that the data file may be large enough to accomodate three or more PDs,'\ - 'but does not mean that any of the voxels are classified as containing three or more PDs.') + desc='The maximum number of PDs in a voxel (default 3) for PD data.' \ + 'This option determines the size of the input and output voxels.' \ + 'This means that the data file may be large enough to accomodate three or more PDs,'\ + 'but does not mean that any of the voxels are classified as containing three or more PDs.') + class PicoPDFsOutputSpec(TraitedSpec): pdfs = File(exists=True, desc='path/name of 4D volume in voxel order') + class PicoPDFs(StdOutCommandLine): """ Constructs a spherical PDF in each voxel for probabilistic tractography. @@ -380,8 +394,8 @@ class PicoPDFs(StdOutCommandLine): >>> pdf.run() # doctest: +SKIP """ _cmd = 'picopdfs' - input_spec=PicoPDFsInputSpec - output_spec=PicoPDFsOutputSpec + input_spec = PicoPDFsInputSpec + output_spec = PicoPDFsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -389,9 +403,10 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_pdfs.Bdouble' + class TrackInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='-inputfile %s', position=1, desc='input data file') @@ -401,7 +416,7 @@ class TrackInputSpec(CommandLineInputSpec): inputmodel = traits.Enum('dt', 'multitensor', 'sfpeak', 'pico', 'repbs_dt', 'repbs_multitensor', 'ballstick', 'wildbs_dt', - 'bayesdirac', 'bayesdirac_dt','bedpostx_dyad', + 'bayesdirac', 'bayesdirac_dt', 'bedpostx_dyad', 'bedpostx', argstr='-inputmodel %s', desc='input model type', usedefault=True) @@ -517,15 +532,17 @@ class TrackInputSpec(CommandLineInputSpec): outputtracts = traits.Enum('float', 'double', 'oogl', argstr='-outputtracts %s', desc='output tract file type') - out_file = File(argstr='-outputfile %s', position= -1, genfile=True, + out_file = File(argstr='-outputfile %s', position=-1, genfile=True, desc='output data file') - output_root = File(exists=False, argstr='-outputroot %s', position= -1, + output_root = File(exists=False, argstr='-outputroot %s', position=-1, desc='root directory for output') + class TrackOutputSpec(TraitedSpec): tracked = File(exists=True, desc='output file containing reconstructed tracts') + class Track(CommandLine): """ Performs tractography using one of the following models: @@ -567,9 +584,10 @@ def _gen_outfilename(self): if not isdefined(self.inputs.in_file): name = 'bedpostx' else: - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_tracked' + class TrackDT(Track): """ Performs streamline tractography using tensor data @@ -588,11 +606,13 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "dt" return super(TrackDT, self).__init__(command, **inputs) + class TrackPICoInputSpec(TrackInputSpec): pdf = traits.Enum('bingham', 'watson', 'acg', argstr='-pdf %s', desc='Specifies the model for PICo parameters. The default is "bingham.') iterations = traits.Int(argstr='-iterations %d', units='NA', desc="Number of streamlines to generate at each seed point. The default is 5000.") + class TrackPICo(Track): """ Performs streamline tractography using the Probabilistic Index of Connectivity (PICo) algorithm @@ -613,15 +633,17 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "pico" return super(TrackPICo, self).__init__(command, **inputs) + class TrackBedpostxDeterInputSpec(TrackInputSpec): bedpostxdir = Directory(argstr='-bedpostxdir %s', mandatory=True, exists=True, - desc=('Directory containing bedpostx output')) + desc=('Directory containing bedpostx output')) min_vol_frac = traits.Float(argstr='-bedpostxminf %d', units='NA', desc=("Zeros out compartments in bedpostx data " "with a mean volume fraction f of less than " "min_vol_frac. The default is 0.01.")) + class TrackBedpostxDeter(Track): """ Data from FSL's bedpostx can be imported into Camino for deterministic tracking. @@ -652,9 +674,10 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx_dyad" return super(TrackBedpostxDeter, self).__init__(command, **inputs) + class TrackBedpostxProbaInputSpec(TrackInputSpec): bedpostxdir = Directory(argstr='-bedpostxdir %s', mandatory=True, exists=True, - desc=('Directory containing bedpostx output')) + desc=('Directory containing bedpostx output')) min_vol_frac = traits.Float(argstr='-bedpostxminf %d', units='NA', desc=("Zeros out compartments in bedpostx data " @@ -665,6 +688,7 @@ class TrackBedpostxProbaInputSpec(TrackInputSpec): desc=("Number of streamlines to generate at each " "seed point. The default is 1.")) + class TrackBedpostxProba(Track): """ Data from FSL's bedpostx can be imported into Camino for probabilistic tracking. @@ -700,6 +724,7 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx_dyad" return super(TrackBedpostxProba, self).__init__(command, **inputs) + class TrackBayesDiracInputSpec(TrackInputSpec): scheme_file = File(argstr='-schemefile %s', mandatory=True, exists=True, desc=('The scheme file corresponding to the data being ' @@ -723,6 +748,7 @@ class TrackBayesDiracInputSpec(TrackInputSpec): extpriordatatype = traits.Enum('float', 'double', argstr='-extpriordatatype %s', desc='Datatype of the prior image. The default is "double".') + class TrackBayesDirac(Track): """ Performs streamline tractography using a Bayesian tracking with Dirac priors @@ -744,6 +770,7 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bayesdirac" return super(TrackBayesDirac, self).__init__(command, **inputs) + class TrackBallStick(Track): """ Performs streamline tractography using ball-stick fitted data @@ -762,16 +789,18 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "ballstick" return super(TrackBallStick, self).__init__(command, **inputs) + class TrackBootstrapInputSpec(TrackInputSpec): scheme_file = File(argstr='-schemefile %s', mandatory=True, exists=True, desc='The scheme file corresponding to the data being processed.') iterations = traits.Int(argstr='-iterations %d', units='NA', desc="Number of streamlines to generate at each seed point.") - inversion = traits.Int(argstr='-inversion %s', desc = 'Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).') + inversion = traits.Int(argstr='-inversion %s', desc='Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).') bsdatafiles = traits.List(File(exists=True), mandatory=True, argstr='-bsdatafile %s', desc='Specifies files containing raw data for repetition bootstrapping. Use -inputfile for wild bootstrap data.') - bgmask = File(argstr='-bgmask %s', exists=True, desc = 'Provides the name of a file containing a background mask computed using, for example, FSL\'s bet2 program. The mask file contains zero in background voxels and non-zero in foreground.') + bgmask = File(argstr='-bgmask %s', exists=True, desc='Provides the name of a file containing a background mask computed using, for example, FSL\'s bet2 program. The mask file contains zero in background voxels and non-zero in foreground.') + class TrackBootstrap(Track): """ @@ -794,35 +823,38 @@ class TrackBootstrap(Track): def __init__(self, command=None, **inputs): return super(TrackBootstrap, self).__init__(command, **inputs) + class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc='Tensor-fitted data filename') scheme_file = File(exists=True, argstr='%s', position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') out_file = File(argstr="> %s", position=-1, genfile=True) inputmodel = traits.Enum('dt', 'twotensor', 'threetensor', - argstr='-inputmodel %s', - desc='Specifies the model that the input tensor data contains parameters for.' \ - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ - '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ - 'contains a single diffusion tensor in each voxel.') + argstr='-inputmodel %s', + desc='Specifies the model that the input tensor data contains parameters for.' \ + 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ + '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ + 'contains a single diffusion tensor in each voxel.') inputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', - argstr='-inputdatatype %s', - desc='Specifies the data type of the input file. The data type can be any of the' \ - 'following strings: "char", "short", "int", "long", "float" or "double".') + argstr='-inputdatatype %s', + desc='Specifies the data type of the input file. The data type can be any of the' \ + 'following strings: "char", "short", "int", "long", "float" or "double".') outputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', - argstr='-outputdatatype %s', - desc='Specifies the data type of the output data. The data type can be any of the' \ - 'following strings: "char", "short", "int", "long", "float" or "double".') + argstr='-outputdatatype %s', + desc='Specifies the data type of the output data. The data type can be any of the' \ + 'following strings: "char", "short", "int", "long", "float" or "double".') + class ComputeMeanDiffusivityOutputSpec(TraitedSpec): md = File(exists=True, desc='Mean Diffusivity Map') + class ComputeMeanDiffusivity(StdOutCommandLine): """ Computes the mean diffusivity (trace/3) from diffusion tensors. @@ -837,8 +869,8 @@ class ComputeMeanDiffusivity(StdOutCommandLine): >>> md.run() # doctest: +SKIP """ _cmd = 'md' - input_spec=ComputeMeanDiffusivityInputSpec - output_spec=ComputeMeanDiffusivityOutputSpec + input_spec = ComputeMeanDiffusivityInputSpec + output_spec = ComputeMeanDiffusivityOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -846,36 +878,39 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) - return name + "_MD.img" #Need to change to self.inputs.outputdatatype + _, name, _ = split_filename(self.inputs.in_file) + return name + "_MD.img" # Need to change to self.inputs.outputdatatype + class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc='Tensor-fitted data filename') scheme_file = File(exists=True, argstr='%s', position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') inputmodel = traits.Enum('dt', 'twotensor', 'threetensor', 'multitensor', - argstr='-inputmodel %s', - desc='Specifies the model that the input tensor data contains parameters for.' \ - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ - '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ - 'contains a single diffusion tensor in each voxel.') + argstr='-inputmodel %s', + desc='Specifies the model that the input tensor data contains parameters for.' \ + 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ + '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ + 'contains a single diffusion tensor in each voxel.') inputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', - argstr='-inputdatatype %s', - desc='Specifies the data type of the input file. The data type can be any of the' \ - 'following strings: "char", "short", "int", "long", "float" or "double".') + argstr='-inputdatatype %s', + desc='Specifies the data type of the input file. The data type can be any of the' \ + 'following strings: "char", "short", "int", "long", "float" or "double".') outputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', - argstr='-outputdatatype %s', - desc='Specifies the data type of the output data. The data type can be any of the' \ - 'following strings: "char", "short", "int", "long", "float" or "double".') + argstr='-outputdatatype %s', + desc='Specifies the data type of the output data. The data type can be any of the' \ + 'following strings: "char", "short", "int", "long", "float" or "double".') + class ComputeFractionalAnisotropyOutputSpec(TraitedSpec): fa = File(exists=True, desc='Fractional Anisotropy Map') + class ComputeFractionalAnisotropy(StdOutCommandLine): """ Computes the fractional anisotropy of tensors. @@ -896,8 +931,8 @@ class ComputeFractionalAnisotropy(StdOutCommandLine): >>> fa.run() # doctest: +SKIP """ _cmd = 'fa' - input_spec=ComputeFractionalAnisotropyInputSpec - output_spec=ComputeFractionalAnisotropyOutputSpec + input_spec = ComputeFractionalAnisotropyInputSpec + output_spec = ComputeFractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -905,36 +940,39 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) - return name + '_FA.Bdouble' #Need to change to self.inputs.outputdatatype + _, name, _ = split_filename(self.inputs.in_file) + return name + '_FA.Bdouble' # Need to change to self.inputs.outputdatatype + class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='< %s', mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc='Tensor-fitted data filename') scheme_file = File(exists=True, argstr='%s', position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') inputmodel = traits.Enum('dt', 'twotensor', 'threetensor', 'multitensor', - argstr='-inputmodel %s', - desc='Specifies the model that the input tensor data contains parameters for.' \ - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ - '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ - 'contains a single diffusion tensor in each voxel.') + argstr='-inputmodel %s', + desc='Specifies the model that the input tensor data contains parameters for.' \ + 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ + '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ + 'contains a single diffusion tensor in each voxel.') inputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', - argstr='-inputdatatype %s', - desc='Specifies the data type of the input file. The data type can be any of the' \ - 'following strings: "char", "short", "int", "long", "float" or "double".') + argstr='-inputdatatype %s', + desc='Specifies the data type of the input file. The data type can be any of the' \ + 'following strings: "char", "short", "int", "long", "float" or "double".') outputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', - argstr='-outputdatatype %s', - desc='Specifies the data type of the output data. The data type can be any of the' \ - 'following strings: "char", "short", "int", "long", "float" or "double".') + argstr='-outputdatatype %s', + desc='Specifies the data type of the output data. The data type can be any of the' \ + 'following strings: "char", "short", "int", "long", "float" or "double".') + class ComputeTensorTraceOutputSpec(TraitedSpec): trace = File(exists=True, desc='Trace of the diffusion tensor') + class ComputeTensorTrace(StdOutCommandLine): """ Computes the trace of tensors. @@ -957,8 +995,8 @@ class ComputeTensorTrace(StdOutCommandLine): >>> trace.run() # doctest: +SKIP """ _cmd = 'trd' - input_spec=ComputeTensorTraceInputSpec - output_spec=ComputeTensorTraceOutputSpec + input_spec = ComputeTensorTraceInputSpec + output_spec = ComputeTensorTraceOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -966,8 +1004,8 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) - return name + '_TrD.img' #Need to change to self.inputs.outputdatatype + _, name, _ = split_filename(self.inputs.in_file) + return name + '_TrD.img' # Need to change to self.inputs.outputdatatype class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): @@ -978,8 +1016,8 @@ class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): maxcomponents = traits.Int(argstr='-maxcomponents %d', desc='The maximum number of tensor components in a voxel of the input data.') inputdatatype = traits.Enum('double', 'float', 'long', 'int', 'short', 'char', - argstr='-inputdatatype %s', usedefault=True, - desc=('Specifies the data type of the input data. ' + argstr='-inputdatatype %s', usedefault=True, + desc=('Specifies the data type of the input data. ' 'The data type can be any of the following strings: ' '"char", "short", "int", "long", "float" or "double".' 'Default is double data type')) @@ -991,9 +1029,11 @@ class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): '"char", "short", "int", "long", "float" or "double".' 'Default is double data type')) + class ComputeEigensystemOutputSpec(TraitedSpec): eigen = File(exists=True, desc='Trace of the diffusion tensor') + class ComputeEigensystem(StdOutCommandLine): """ Computes the eigensystem from tensor fitted data. @@ -1015,8 +1055,8 @@ class ComputeEigensystem(StdOutCommandLine): >>> dteig.run() # doctest: +SKIP """ _cmd = 'dteig' - input_spec=ComputeEigensystemInputSpec - output_spec=ComputeEigensystemOutputSpec + input_spec = ComputeEigensystemInputSpec + output_spec = ComputeEigensystemOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -1024,6 +1064,6 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) - datatype= self.inputs.outputdatatype + _, name, _ = split_filename(self.inputs.in_file) + datatype = self.inputs.outputdatatype return name + '_eig.B' + datatype diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index e10e076775..bd39f06760 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -16,13 +16,13 @@ class QBallMXInputSpec(StdOutCommandLineInputSpec): basistype = traits.Enum('rbf', 'sh', argstr='-basistype %s', - desc=('Basis function type. "rbf" to use radial basis functions ' + desc=('Basis function type. "rbf" to use radial basis functions ' '"sh" to use spherical harmonics'), usedefault=True) scheme_file = File(exists=True, argstr='-schemefile %s', mandatory=True, desc='Specifies the scheme file for the diffusion MRI data') order = traits.Int(argstr='-order %d', units='NA', - desc=('Specific to sh. Maximum order of the spherical harmonic series. ' - 'Default is 4.')) + desc=('Specific to sh. Maximum order of the spherical harmonic series. ' + 'Default is 4.')) rbfpointset = traits.Int(argstr='-rbfpointset %d', units='NA', desc=('Specific to rbf. Sets the number of radial basis functions to use. ' 'The value specified must be present in the Pointsets directory. ' @@ -34,9 +34,11 @@ class QBallMXInputSpec(StdOutCommandLineInputSpec): desc=('Specific to rbf. Sets the width of the smoothing basis functions. ' 'The default value is 0.1309 (7.5 degrees).')) + class QBallMXOutputSpec(TraitedSpec): qmat = File(exists=True, desc='Q-Ball reconstruction matrix') + class QBallMX(StdOutCommandLine): """ Generates a reconstruction matrix for Q-Ball. Used in LinRecon with @@ -77,8 +79,8 @@ class QBallMX(StdOutCommandLine): >>> qballcoeffs.run() # doctest: +SKIP """ _cmd = 'qballmx' - input_spec=QBallMXInputSpec - output_spec=QBallMXOutputSpec + input_spec = QBallMXInputSpec + output_spec = QBallMXOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -86,11 +88,10 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.scheme_file) + _, name, _ = split_filename(self.inputs.scheme_file) return name + '_qmat.Bdouble' - class LinReconInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=1, desc='voxel-order data filename') @@ -106,9 +107,11 @@ class LinReconInputSpec(StdOutCommandLineInputSpec): 'measurements themselves')) bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask') + class LinReconOutputSpec(TraitedSpec): recon_data = File(exists=True, desc='Transformed data') + class LinRecon(StdOutCommandLine): """ Runs a linear transformation in each voxel. @@ -153,8 +156,8 @@ class LinRecon(StdOutCommandLine): >>> qballcoeffs.run() # doctest: +SKIP """ _cmd = 'linrecon' - input_spec=LinReconInputSpec - output_spec=LinReconOutputSpec + input_spec = LinReconInputSpec + output_spec = LinReconOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -162,19 +165,20 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.scheme_file) + _, name, _ = split_filename(self.inputs.scheme_file) return name + '_recondata.Bdouble' + class MESDInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, position=1, desc='voxel-order data filename') inverter = traits.Enum('SPIKE', 'PAS', argstr='-filter %s', position=2, mandatory=True, desc=('The inversion index specifies the type of inversion to perform on the data.' 'The currently available choices are:' - 'Inverter name | Inverter parameters' - '---------------|------------------' - 'SPIKE | bd (b-value x diffusivity along the fibre.)' - 'PAS | r')) + 'Inverter name | Inverter parameters' + '---------------|------------------' + 'SPIKE | bd (b-value x diffusivity along the fibre.)' + 'PAS | r')) inverter_param = traits.Float(argstr='%f', units='NA', position=3, mandatory=True, desc=('Parameter associated with the inverter. Cf. inverter description for' 'more information.')) @@ -190,12 +194,14 @@ class MESDInputSpec(StdOutCommandLineInputSpec): bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask') inputdatatype = traits.Enum('float', 'char', 'short', 'int', 'long', 'double', argstr='-inputdatatype %s', desc=('Specifies the data type of the input file: "char", "short", "int", "long",' - '"float" or "double". The input file must have BIG-ENDIAN ordering.' - 'By default, the input type is "float".')) + '"float" or "double". The input file must have BIG-ENDIAN ordering.' + 'By default, the input type is "float".')) + class MESDOutputSpec(TraitedSpec): mesd_data = File(exists=True, desc='MESD data') + class MESD(StdOutCommandLine): """ MESD is a general program for maximum entropy spherical deconvolution. @@ -274,8 +280,8 @@ class MESD(StdOutCommandLine): >>> mesd.run() # doctest: +SKIP """ _cmd = 'mesd' - input_spec=MESDInputSpec - output_spec=MESDOutputSpec + input_spec = MESDInputSpec + output_spec = MESDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -283,9 +289,10 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.scheme_file) + _, name, _ = split_filename(self.inputs.scheme_file) return name + '_MESD.Bdouble' + class SFPeaksInputSpec(StdOutCommandLineInputSpec): in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, desc='Voxel-order data of spherical functions') @@ -310,10 +317,10 @@ class SFPeaksInputSpec(StdOutCommandLineInputSpec): 'The default value is 246.')) mepointset = traits.Int(argstr='-mepointset %d', units='NA', desc=('Use a set of directions other than those in the scheme file for the deconvolution ' - 'kernel. The number refers to the number of directions on the unit sphere. ' - 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' - 'Use this option only if you told MESD to use a custom set of directions with the same ' - 'option. Otherwise, specify the scheme file with the "schemefile" attribute.')) + 'kernel. The number refers to the number of directions on the unit sphere. ' + 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' + 'Use this option only if you told MESD to use a custom set of directions with the same ' + 'option. Otherwise, specify the scheme file with the "schemefile" attribute.')) numpds = traits.Int(argstr='-numpds %d', units='NA', desc='The largest number of peak directions to output in each voxel.') noconsistencycheck = traits.Bool(argstr='-noconsistencycheck', @@ -333,12 +340,14 @@ class SFPeaksInputSpec(StdOutCommandLineInputSpec): desc=('Base threshold on the actual peak direction strength divided by the mean of the ' 'function. The default is 1.0 (the peak must be equal or greater than the mean).')) stdsfrommean = traits.Float(argstr='-stdsfrommean %f', units='NA', - desc=('This is the number of standard deviations of the function to be added to the ' - '"pdthresh" attribute in the peak directions pruning.')) + desc=('This is the number of standard deviations of the function to be added to the ' + '"pdthresh" attribute in the peak directions pruning.')) + class SFPeaksOutputSpec(TraitedSpec): peaks = File(exists=True, desc='Peaks of the spherical functions.') + class SFPeaks(StdOutCommandLine): """ Finds the peaks of spherical functions. @@ -418,8 +427,8 @@ class SFPeaks(StdOutCommandLine): >>> sf_peaks.run() # doctest: +SKIP """ _cmd = 'sfpeaks' - input_spec=SFPeaksInputSpec - output_spec=SFPeaksOutputSpec + input_spec = SFPeaksInputSpec + output_spec = SFPeaksOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -427,7 +436,7 @@ def _list_outputs(self): return outputs def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_peaks.Bdouble' diff --git a/nipype/interfaces/camino/utils.py b/nipype/interfaces/camino/utils.py index 33497f2bdc..19fe6ac768 100644 --- a/nipype/interfaces/camino/utils.py +++ b/nipype/interfaces/camino/utils.py @@ -9,8 +9,8 @@ import os from ..base import (traits, TraitedSpec, File, - CommandLine, CommandLineInputSpec, isdefined, - InputMultiPath) + CommandLine, CommandLineInputSpec, isdefined, + InputMultiPath) from ...utils.filemanip import split_filename @@ -31,9 +31,11 @@ class ImageStatsInputSpec(CommandLineInputSpec): desc=('Filename root prepended onto the names of the output ' ' files. The extension will be determined from the input.')) + class ImageStatsOutputSpec(TraitedSpec): out_file = File(exists=True, desc='Path of the file computed with the statistic chosen') + class ImageStats(CommandLine): """ This program computes voxelwise statistics on a series of 3D images. The images @@ -61,5 +63,5 @@ def _list_outputs(self): def _gen_outfilename(self): output_root = self.inputs.output_root first_file = self.inputs.in_files[0] - _, _ , ext = split_filename(first_file) + _, _, ext = split_filename(first_file) return output_root + ext diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index bb95f5f8d0..9075a06ee2 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -16,25 +16,25 @@ class Camino2TrackvisInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='-i %s', mandatory=True, position=1, - desc='The input .Bfloat (camino) file.') + desc='The input .Bfloat (camino) file.') out_file = File(argstr='-o %s', genfile=True, position=2, - desc='The filename to which to write the .trk (trackvis) file.') + desc='The filename to which to write the .trk (trackvis) file.') min_length = traits.Float(argstr='-l %d', position=3, - units='mm', desc='The minimum length of tracts to output') + units='mm', desc='The minimum length of tracts to output') data_dims = traits.List(traits.Int, argstr='-d %s', sep=',', - mandatory=True, position=4, minlen=3, maxlen=3, - desc='Three comma-separated integers giving the number of voxels along each dimension of the source scans.') + mandatory=True, position=4, minlen=3, maxlen=3, + desc='Three comma-separated integers giving the number of voxels along each dimension of the source scans.') voxel_dims = traits.List(traits.Float, argstr='-x %s', sep=',', - mandatory=True, position=5, minlen=3, maxlen=3, - desc='Three comma-separated numbers giving the size of each voxel in mm.') + mandatory=True, position=5, minlen=3, maxlen=3, + desc='Three comma-separated numbers giving the size of each voxel in mm.') - #Change to enum with all combinations? i.e. LAS, LPI, RAS, etc.. + # Change to enum with all combinations? i.e. LAS, LPI, RAS, etc.. voxel_order = File(argstr='--voxel-order %s', mandatory=True, position=6, - desc='Set the order in which various directions were stored.\ + desc='Set the order in which various directions were stored.\ Specify with three letters consisting of one each \ from the pairs LR, AP, and SI. These stand for Left-Right, \ Anterior-Posterior, and Superior-Inferior. \ @@ -43,11 +43,13 @@ class Camino2TrackvisInputSpec(CommandLineInputSpec): Read coordinate system from a NIfTI file.') nifti_file = File(argstr='--nifti %s', exists=True, - position=7, desc='Read coordinate system from a NIfTI file.') + position=7, desc='Read coordinate system from a NIfTI file.') + class Camino2TrackvisOutputSpec(TraitedSpec): trackvis = File(exists=True, desc='The filename to which to write the .trk (trackvis) file.') + class Camino2Trackvis(CommandLine): """ Wraps camino_to_trackvis from Camino-Trackvis @@ -68,8 +70,8 @@ class Camino2Trackvis(CommandLine): """ _cmd = 'camino_to_trackvis' - input_spec=Camino2TrackvisInputSpec - output_spec=Camino2TrackvisOutputSpec + input_spec = Camino2TrackvisInputSpec + output_spec = Camino2TrackvisOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -81,10 +83,12 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '.trk' + class Trackvis2CaminoInputSpec(CommandLineInputSpec): """ Wraps trackvis_to_camino from Camino-Trackvis @@ -101,22 +105,24 @@ class Trackvis2CaminoInputSpec(CommandLineInputSpec): """ in_file = File(exists=True, argstr='-i %s', - mandatory=True, position=1, - desc='The input .trk (trackvis) file.') + mandatory=True, position=1, + desc='The input .trk (trackvis) file.') out_file = File(argstr='-o %s', genfile=True, - position=2, desc='The filename to which to write the .Bfloat (camino).') + position=2, desc='The filename to which to write the .Bfloat (camino).') append_file = File(exists=True, argstr='-a %s', - position=2, desc='A file to which the append the .Bfloat data. ') + position=2, desc='A file to which the append the .Bfloat data. ') + class Trackvis2CaminoOutputSpec(TraitedSpec): camino = File(exists=True, desc='The filename to which to write the .Bfloat (camino).') + class Trackvis2Camino(CommandLine): _cmd = 'trackvis_to_camino' - input_spec=Trackvis2CaminoInputSpec - output_spec=Trackvis2CaminoOutputSpec + input_spec = Trackvis2CaminoInputSpec + output_spec = Trackvis2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -128,6 +134,7 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '.Bfloat' diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 2e1b1e0501..0b539837eb 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -73,54 +73,58 @@ def length(xyz, along=False): return np.cumsum(dists) return np.sum(dists) + def get_rois_crossed(pointsmm, roiData, voxelSize): - n_points = len(pointsmm) - rois_crossed = [] - for j in range(0, n_points): - # store point - x = int(pointsmm[j, 0] / float(voxelSize[0])) - y = int(pointsmm[j, 1] / float(voxelSize[1])) - z = int(pointsmm[j, 2] / float(voxelSize[2])) - if not roiData[x, y, z] == 0: - rois_crossed.append(roiData[x, y, z]) - rois_crossed = list(dict.fromkeys(rois_crossed).keys()) #Removed duplicates from the list - return rois_crossed + n_points = len(pointsmm) + rois_crossed = [] + for j in range(0, n_points): + # store point + x = int(pointsmm[j, 0] / float(voxelSize[0])) + y = int(pointsmm[j, 1] / float(voxelSize[1])) + z = int(pointsmm[j, 2] / float(voxelSize[2])) + if not roiData[x, y, z] == 0: + rois_crossed.append(roiData[x, y, z]) + rois_crossed = list(dict.fromkeys(rois_crossed).keys()) # Removed duplicates from the list + return rois_crossed + def get_connectivity_matrix(n_rois, list_of_roi_crossed_lists): - connectivity_matrix = np.zeros((n_rois, n_rois), dtype=np.uint) - for rois_crossed in list_of_roi_crossed_lists: - for idx_i, roi_i in enumerate(rois_crossed): - for idx_j, roi_j in enumerate(rois_crossed): - if idx_i > idx_j: - if not roi_i == roi_j: - connectivity_matrix[roi_i - 1, roi_j - 1] += 1 - connectivity_matrix = connectivity_matrix + connectivity_matrix.T - return connectivity_matrix + connectivity_matrix = np.zeros((n_rois, n_rois), dtype=np.uint) + for rois_crossed in list_of_roi_crossed_lists: + for idx_i, roi_i in enumerate(rois_crossed): + for idx_j, roi_j in enumerate(rois_crossed): + if idx_i > idx_j: + if not roi_i == roi_j: + connectivity_matrix[roi_i - 1, roi_j - 1] += 1 + connectivity_matrix = connectivity_matrix + connectivity_matrix.T + return connectivity_matrix + def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): - """ Create the intersection arrays for each fiber - """ - n_fib = len(streamlines) - pc = -1 - # Computation for each fiber - final_fiber_ids = [] - list_of_roi_crossed_lists = [] - for i, fiber in enumerate(streamlines): - pcN = int(round(float(100 * i) / n_fib)) - if pcN > pc and pcN % 1 == 0: - pc = pcN - print('%4.0f%%' % (pc)) - rois_crossed = get_rois_crossed(fiber[0], roiData, voxelSize) - if len(rois_crossed) > 0: - list_of_roi_crossed_lists.append(list(rois_crossed)) - final_fiber_ids.append(i) - - connectivity_matrix = get_connectivity_matrix(n_rois, list_of_roi_crossed_lists) - dis = n_fib - len(final_fiber_ids) - iflogger.info("Found %i (%f percent out of %i fibers) fibers that start or terminate in a voxel which is not labeled. (orphans)" % (dis, dis * 100.0 / n_fib, n_fib)) - iflogger.info("Valid fibers: %i (%f percent)" % (n_fib - dis, 100 - dis * 100.0 / n_fib)) - iflogger.info('Returning the intersecting point connectivity matrix') - return connectivity_matrix, final_fiber_ids + """ Create the intersection arrays for each fiber + """ + n_fib = len(streamlines) + pc = -1 + # Computation for each fiber + final_fiber_ids = [] + list_of_roi_crossed_lists = [] + for i, fiber in enumerate(streamlines): + pcN = int(round(float(100 * i) / n_fib)) + if pcN > pc and pcN % 1 == 0: + pc = pcN + print('%4.0f%%' % (pc)) + rois_crossed = get_rois_crossed(fiber[0], roiData, voxelSize) + if len(rois_crossed) > 0: + list_of_roi_crossed_lists.append(list(rois_crossed)) + final_fiber_ids.append(i) + + connectivity_matrix = get_connectivity_matrix(n_rois, list_of_roi_crossed_lists) + dis = n_fib - len(final_fiber_ids) + iflogger.info("Found %i (%f percent out of %i fibers) fibers that start or terminate in a voxel which is not labeled. (orphans)" % (dis, dis * 100.0 / n_fib, n_fib)) + iflogger.info("Valid fibers: %i (%f percent)" % (n_fib - dis, 100 - dis * 100.0 / n_fib)) + iflogger.info('Returning the intersecting point connectivity matrix') + return connectivity_matrix, final_fiber_ids + def create_endpoints_array(fib, voxelSize): """ Create the endpoints arrays for each fiber @@ -167,6 +171,7 @@ def create_endpoints_array(fib, voxelSize): iflogger.info('Returning the endpoint matrix') return (endpoints, endpointsmm) + def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_name, endpoint_name, intersections=False): """ Create the connection matrix for each resolution using fibers and ROIs. """ @@ -217,7 +222,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ G.add_node(int(u), d) # compute a position for the node based on the mean position of the # ROI in voxel coordinates (segmentation volume ) - xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])) , axis=1)) + xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1)) G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) if intersections: @@ -228,7 +233,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ intersection_matrix = np.matrix(intersection_matrix) I = G.copy() H = nx.from_numpy_matrix(np.matrix(intersection_matrix)) - H = nx.relabel_nodes(H, lambda x: x + 1) #relabel nodes so they start at 1 + H = nx.relabel_nodes(H, lambda x: x + 1) # relabel nodes so they start at 1 I.add_weighted_edges_from(((u, v, d['weight']) for u, v, d in H.edges(data=True))) dis = 0 @@ -265,7 +270,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ fiberlabels[i, 0] = startROI fiberlabels[i, 1] = endROI - final_fiberlabels.append([ startROI, endROI ]) + final_fiberlabels.append([startROI, endROI]) final_fibers_idx.append(i) # Add edge to graph @@ -313,7 +318,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ di['fiber_length_mean'] = 0 di['fiber_length_median'] = 0 di['fiber_length_std'] = 0 - if not u == v: #Fix for self loop problem + if not u == v: # Fix for self loop problem G.add_edge(u, v, di) if 'fiblist' in d: numfib.add_edge(u, v, weight=di['number_of_fibers']) @@ -327,11 +332,11 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ numfib_mlab = nx.to_numpy_matrix(numfib, dtype=int) numfib_dict = {'number_of_fibers': numfib_mlab} fibmean_mlab = nx.to_numpy_matrix(fibmean, dtype=np.float64) - fibmean_dict = {'mean_fiber_length':fibmean_mlab} + fibmean_dict = {'mean_fiber_length': fibmean_mlab} fibmedian_mlab = nx.to_numpy_matrix(fibmedian, dtype=np.float64) - fibmedian_dict = {'median_fiber_length':fibmedian_mlab} + fibmedian_dict = {'median_fiber_length': fibmedian_mlab} fibdev_mlab = nx.to_numpy_matrix(fibdev, dtype=np.float64) - fibdev_dict = {'fiber_length_std':fibdev_mlab} + fibdev_dict = {'fiber_length_std': fibdev_mlab} if intersections: path, name, ext = split_filename(matrix_name) @@ -387,6 +392,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ iflogger.info("Saving matrix creation statistics as %s" % out_stats_file) sio.savemat(out_stats_file, stats) + def save_fibers(oldhdr, oldfib, fname, indices): """ Stores a new trackvis file fname using only given indices """ hdrnew = oldhdr.copy() @@ -399,6 +405,7 @@ def save_fibers(oldhdr, oldfib, fname, indices): nb.trackvis.write(fname, outstreams, hdrnew) return n_fib_out + class CreateMatrixInputSpec(TraitedSpec): roi_file = File(exists=True, mandatory=True, desc='Freesurfer aparc+aseg file') tract_file = File(exists=True, mandatory=True, desc='Trackvis tract file') @@ -412,6 +419,7 @@ class CreateMatrixInputSpec(TraitedSpec): out_intersection_matrix_mat_file = File(genfile=True, desc='Matlab connectivity matrix if all region/fiber intersections are counted.') out_endpoint_array_name = File(genfile=True, desc='Name for the generated endpoint arrays') + class CreateMatrixOutputSpec(TraitedSpec): matrix_file = File(desc='NetworkX graph describing the connectivity', exists=True) intersection_matrix_file = File(desc='NetworkX graph describing the connectivity', exists=True) @@ -432,6 +440,7 @@ class CreateMatrixOutputSpec(TraitedSpec): filtered_tractographies = OutputMultiPath(File(desc='TrackVis file containing only those fibers originate in one and terminate in another region', exists=True)) stats_file = File(desc='Saved Matlab .mat file with the number of fibers saved at each stage', exists=True) + class CreateMatrix(BaseInterface): """ Performs connectivity mapping and outputs the result as a NetworkX graph and a Matlab matrix @@ -478,13 +487,13 @@ def _run_interface(self, runtime): fiber_length_std_matrix_name = op.abspath(self._gen_outfilename('_fiber_length_std.mat')) if not isdefined(self.inputs.out_endpoint_array_name): - _, endpoint_name , _ = split_filename(self.inputs.tract_file) + _, endpoint_name, _ = split_filename(self.inputs.tract_file) endpoint_name = op.abspath(endpoint_name) else: endpoint_name = op.abspath(self.inputs.out_endpoint_array_name) cmat(self.inputs.tract_file, self.inputs.roi_file, self.inputs.resolution_network_file, - matrix_file, matrix_mat_file, endpoint_name, self.inputs.count_region_intersections) + matrix_file, matrix_mat_file, endpoint_name, self.inputs.count_region_intersections) return runtime def _list_outputs(self): @@ -535,7 +544,7 @@ def _list_outputs(self): outputs['fiber_label_file'] = op.abspath(self.inputs.out_endpoint_array_name + '_filtered_fiberslabel.npy') outputs['fiber_labels_noorphans'] = op.abspath(self.inputs.out_endpoint_array_name + '_final_fiberslabels.npy') else: - _, endpoint_name , _ = split_filename(self.inputs.tract_file) + _, endpoint_name, _ = split_filename(self.inputs.tract_file) outputs['endpoint_file'] = op.abspath(endpoint_name + '_endpoints.npy') outputs['endpoint_file_mm'] = op.abspath(endpoint_name + '_endpointsmm.npy') outputs['fiber_length_file'] = op.abspath(endpoint_name + '_final_fiberslength.npy') @@ -545,13 +554,13 @@ def _list_outputs(self): if self.inputs.count_region_intersections: outputs['matrix_files'] = [out_matrix_file, out_intersection_matrix_file] outputs['matlab_matrix_files'] = [outputs['matrix_mat_file'], - outputs['mean_fiber_length_matrix_mat_file'], outputs['median_fiber_length_matrix_mat_file'], - outputs['fiber_length_std_matrix_mat_file'], outputs['intersection_matrix_mat_file']] + outputs['mean_fiber_length_matrix_mat_file'], outputs['median_fiber_length_matrix_mat_file'], + outputs['fiber_length_std_matrix_mat_file'], outputs['intersection_matrix_mat_file']] else: outputs['matrix_files'] = [out_matrix_file] outputs['matlab_matrix_files'] = [outputs['matrix_mat_file'], - outputs['mean_fiber_length_matrix_mat_file'], outputs['median_fiber_length_matrix_mat_file'], - outputs['fiber_length_std_matrix_mat_file']] + outputs['mean_fiber_length_matrix_mat_file'], outputs['median_fiber_length_matrix_mat_file'], + outputs['fiber_length_std_matrix_mat_file']] outputs['filtered_tractography'] = op.abspath(endpoint_name + '_streamline_final.trk') outputs['filtered_tractography_by_intersections'] = op.abspath(endpoint_name + '_intersections_streamline_final.trk') @@ -561,13 +570,14 @@ def _list_outputs(self): def _gen_outfilename(self, ext): if ext.endswith("mat") and isdefined(self.inputs.out_matrix_mat_file): - _, name , _ = split_filename(self.inputs.out_matrix_mat_file) + _, name, _ = split_filename(self.inputs.out_matrix_mat_file) elif isdefined(self.inputs.out_matrix_file): - _, name , _ = split_filename(self.inputs.out_matrix_file) + _, name, _ = split_filename(self.inputs.out_matrix_file) else: - _, name , _ = split_filename(self.inputs.tract_file) + _, name, _ = split_filename(self.inputs.tract_file) return name + ext + class ROIGenInputSpec(BaseInterfaceInputSpec): aparc_aseg_file = File(exists=True, mandatory=True, desc='Freesurfer aparc+aseg file') LUT_file = File(exists=True, xor=['use_freesurfer_LUT'], desc='Custom lookup table (cf. FreeSurferColorLUT.txt)') @@ -576,10 +586,12 @@ class ROIGenInputSpec(BaseInterfaceInputSpec): out_roi_file = File(genfile=True, desc='Region of Interest file for connectivity mapping') out_dict_file = File(genfile=True, desc='Label dictionary saved in Pickle format') + class ROIGenOutputSpec(TraitedSpec): roi_file = File(desc='Region of Interest file for connectivity mapping') dict_file = File(desc='Label dictionary saved in Pickle format') + class ROIGen(BaseInterface): """ Generates a ROI file for connectivity mapping and a dictionary file containing relevant node information @@ -642,7 +654,7 @@ def _run_interface(self, runtime): if write_dict: iflogger.info('Lookup table: {name}'.format(name=op.abspath(self.LUT_file))) LUTlabelsRGBA = np.loadtxt(self.LUT_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], comments='#', - dtype={'names': ('index', 'label', 'R', 'G', 'B', 'A'), 'formats': ('int', '|S30', 'int', 'int', 'int', 'int')}) + dtype={'names': ('index', 'label', 'R', 'G', 'B', 'A'), 'formats': ('int', '|S30', 'int', 'int', 'int', 'int')}) numLUTLabels = np.size(LUTlabelsRGBA) if numLUTLabels < numDataLabels: iflogger.error('LUT file provided does not contain all of the regions in the image') @@ -659,18 +671,18 @@ def _run_interface(self, runtime): mapDict = {} MAPPING = [[1, 2012], [2, 2019], [3, 2032], [4, 2014], [5, 2020], [6, 2018], [7, 2027], [8, 2028], [9, 2003], [10, 2024], [11, 2017], [12, 2026], - [13, 2002], [14, 2023], [15, 2010], [16, 2022], [17, 2031], [18, 2029], [19, 2008], [20, 2025], [21, 2005], [22, 2021], [23, 2011], - [24, 2013], [25, 2007], [26, 2016], [27, 2006], [28, 2033], [29, 2009], [30, 2015], [31, 2001], [32, 2030], [33, 2034], [34, 2035], - [35, 49], [36, 50], [37, 51], [38, 52], [39, 58], [40, 53], [41, 54], [42, 1012], [43, 1019], [44, 1032], [45, 1014], [46, 1020], [47, 1018], - [48, 1027], [49, 1028], [50, 1003], [51, 1024], [52, 1017], [53, 1026], [54, 1002], [55, 1023], [56, 1010], [57, 1022], [58, 1031], - [59, 1029], [60, 1008], [61, 1025], [62, 1005], [63, 1021], [64, 1011], [65, 1013], [66, 1007], [67, 1016], [68, 1006], [69, 1033], - [70, 1009], [71, 1015], [72, 1001], [73, 1030], [74, 1034], [75, 1035], [76, 10], [77, 11], [78, 12], [79, 13], [80, 26], [81, 17], - [82, 18], [83, 16]] + [13, 2002], [14, 2023], [15, 2010], [16, 2022], [17, 2031], [18, 2029], [19, 2008], [20, 2025], [21, 2005], [22, 2021], [23, 2011], + [24, 2013], [25, 2007], [26, 2016], [27, 2006], [28, 2033], [29, 2009], [30, 2015], [31, 2001], [32, 2030], [33, 2034], [34, 2035], + [35, 49], [36, 50], [37, 51], [38, 52], [39, 58], [40, 53], [41, 54], [42, 1012], [43, 1019], [44, 1032], [45, 1014], [46, 1020], [47, 1018], + [48, 1027], [49, 1028], [50, 1003], [51, 1024], [52, 1017], [53, 1026], [54, 1002], [55, 1023], [56, 1010], [57, 1022], [58, 1031], + [59, 1029], [60, 1008], [61, 1025], [62, 1005], [63, 1021], [64, 1011], [65, 1013], [66, 1007], [67, 1016], [68, 1006], [69, 1033], + [70, 1009], [71, 1015], [72, 1001], [73, 1030], [74, 1034], [75, 1035], [76, 10], [77, 11], [78, 12], [79, 13], [80, 26], [81, 17], + [82, 18], [83, 16]] """ Create empty grey matter mask, Populate with only those regions defined in the mapping.""" niiGM = np.zeros(niiAPARCdata.shape, dtype=np.uint) for ma in MAPPING: - niiGM[ niiAPARCdata == ma[1]] = ma[0] + niiGM[niiAPARCdata == ma[1]] = ma[0] mapDict[ma[0]] = ma[1] iflogger.info('Grey matter mask created') greyMaskLabels = np.unique(niiGM) @@ -718,7 +730,7 @@ def _list_outputs(self): return outputs def _gen_outfilename(self, ext): - _, name , _ = split_filename(self.inputs.aparc_aseg_file) + _, name, _ = split_filename(self.inputs.aparc_aseg_file) if self.inputs.use_freesurfer_LUT: prefix = 'fsLUT' elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): @@ -728,51 +740,55 @@ def _gen_outfilename(self, ext): prefix = 'hardcoded' return prefix + '_' + name + '.' + ext + def create_nodes(roi_file, resolution_network_file, out_filename): - G = nx.Graph() - gp = nx.read_graphml(resolution_network_file) - roi_image = nb.load(roi_file) - roiData = roi_image.get_data() - nROIs = len(gp.nodes()) - for u, d in gp.nodes_iter(data=True): - G.add_node(int(u), d) - xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])) , axis=1)) - G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) - nx.write_gpickle(G, out_filename) - return out_filename + G = nx.Graph() + gp = nx.read_graphml(resolution_network_file) + roi_image = nb.load(roi_file) + roiData = roi_image.get_data() + nROIs = len(gp.nodes()) + for u, d in gp.nodes_iter(data=True): + G.add_node(int(u), d) + xyz = tuple(np.mean(np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1)) + G.node[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) + nx.write_gpickle(G, out_filename) + return out_filename + class CreateNodesInputSpec(BaseInterfaceInputSpec): roi_file = File(exists=True, mandatory=True, desc='Region of interest file') resolution_network_file = File(exists=True, mandatory=True, desc='Parcellation file from Connectome Mapping Toolkit') out_filename = File('nodenetwork.pck', usedefault=True, desc='Output gpickled network with the nodes defined.') + class CreateNodesOutputSpec(TraitedSpec): node_network = File(desc='Output gpickled network with the nodes defined.') + class CreateNodes(BaseInterface): - """ - Generates a NetworkX graph containing nodes at the centroid of each region in the input ROI file. - Node data is added from the resolution network file. - - Example - ------- - - >>> import nipype.interfaces.cmtk as cmtk - >>> mknode = cmtk.CreateNodes() - >>> mknode.inputs.roi_file = 'ROI_scale500.nii.gz' - >>> mknode.run() # doctest: +SKIP - """ - - input_spec = CreateNodesInputSpec - output_spec = CreateNodesOutputSpec - - def _run_interface(self, runtime): - iflogger.info('Creating nodes...') - create_nodes(self.inputs.roi_file, self.inputs.resolution_network_file, self.inputs.out_filename) - iflogger.info('Saving node network to {path}'.format(path=op.abspath(self.inputs.out_filename))) - return runtime - - def _list_outputs(self): - outputs = self._outputs().get() - outputs['node_network'] = op.abspath(self.inputs.out_filename) - return outputs + """ + Generates a NetworkX graph containing nodes at the centroid of each region in the input ROI file. + Node data is added from the resolution network file. + + Example + ------- + + >>> import nipype.interfaces.cmtk as cmtk + >>> mknode = cmtk.CreateNodes() + >>> mknode.inputs.roi_file = 'ROI_scale500.nii.gz' + >>> mknode.run() # doctest: +SKIP + """ + + input_spec = CreateNodesInputSpec + output_spec = CreateNodesOutputSpec + + def _run_interface(self, runtime): + iflogger.info('Creating nodes...') + create_nodes(self.inputs.roi_file, self.inputs.resolution_network_file, self.inputs.out_filename) + iflogger.info('Saving node network to {path}'.format(path=op.abspath(self.inputs.out_filename))) + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['node_network'] = op.abspath(self.inputs.out_filename) + return outputs diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index ba980545b4..f874aa186f 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -48,14 +48,16 @@ class CFFConverterInputSpec(BaseInterfaceInputSpec): rights = traits.Str(desc='Rights') references = traits.Str(desc='References') relation = traits.Str(desc='Relation') - species = traits.Str('Homo sapiens',desc='Species',usedefault=True) + species = traits.Str('Homo sapiens', desc='Species', usedefault=True) description = traits.Str('Created with the Nipype CFF converter', desc='Description', usedefault=True) - out_file = File('connectome.cff', usedefault = True, desc='Output connectome file') + out_file = File('connectome.cff', usedefault=True, desc='Output connectome file') + class CFFConverterOutputSpec(TraitedSpec): connectome_file = File(exists=True, desc='Output connectome file') + class CFFConverter(BaseInterface): """ Creates a Connectome File Format (CFF) file from input networks, surfaces, volumes, tracts, etcetera.... @@ -86,7 +88,7 @@ def _run_interface(self, runtime): if isdefined(self.inputs.creator): a.connectome_meta.set_creator(self.inputs.creator) else: - #Probably only works on some OSes... + # Probably only works on some OSes... a.connectome_meta.set_creator(os.getenv('USER')) if isdefined(self.inputs.email): @@ -119,7 +121,7 @@ def _run_interface(self, runtime): if isdefined(self.inputs.graphml_networks): for ntwk in self.inputs.graphml_networks: # There must be a better way to deal with the unique name problem - #(i.e. tracks and networks can't use the same name, and previously we were pulling them both from the input files) + # (i.e. tracks and networks can't use the same name, and previously we were pulling them both from the input files) ntwk_name = 'Network {cnt}'.format(cnt=count) a.add_connectome_network_from_graphml(ntwk_name, ntwk) count += 1 @@ -129,7 +131,7 @@ def _run_interface(self, runtime): for ntwk in self.inputs.gpickled_networks: _, ntwk_name, _ = split_filename(ntwk) unpickled = nx.read_gpickle(ntwk) - cnet = cf.CNetwork(name = ntwk_name) + cnet = cf.CNetwork(name=ntwk_name) cnet.set_with_nxgraph(unpickled) a.add_connectome_network(cnet) count += 1 @@ -146,9 +148,9 @@ def _run_interface(self, runtime): if isdefined(self.inputs.gifti_surfaces): for surf in self.inputs.gifti_surfaces: _, surf_name, _ = split_filename(surf) - csurf = cf.CSurface.create_from_gifti("Surface %d - %s" % (count,surf_name), surf) - csurf.fileformat='Gifti' - csurf.dtype='Surfaceset' + csurf = cf.CSurface.create_from_gifti("Surface %d - %s" % (count, surf_name), surf) + csurf.fileformat = 'Gifti' + csurf.dtype = 'Surfaceset' a.add_connectome_surface(csurf) count += 1 @@ -156,16 +158,16 @@ def _run_interface(self, runtime): if isdefined(self.inputs.gifti_labels): for label in self.inputs.gifti_labels: _, label_name, _ = split_filename(label) - csurf = cf.CSurface.create_from_gifti("Surface Label %d - %s" % (count,label_name), label) - csurf.fileformat='Gifti' - csurf.dtype='Labels' + csurf = cf.CSurface.create_from_gifti("Surface Label %d - %s" % (count, label_name), label) + csurf.fileformat = 'Gifti' + csurf.dtype = 'Labels' a.add_connectome_surface(csurf) count += 1 if isdefined(self.inputs.nifti_volumes): for vol in self.inputs.nifti_volumes: _, vol_name, _ = split_filename(vol) - cvol = cf.CVolume.create_from_nifti(vol_name,vol) + cvol = cf.CVolume.create_from_nifti(vol_name, vol) a.add_connectome_volume(cvol) if isdefined(self.inputs.script_files): @@ -178,11 +180,11 @@ def _run_interface(self, runtime): for data in self.inputs.data_files: _, data_name, _ = split_filename(data) cda = cf.CData(name=data_name, src=data, fileformat='NumPy') - if not string.find(data_name,'lengths') == -1: + if not string.find(data_name, 'lengths') == -1: cda.dtype = 'FinalFiberLengthArray' - if not string.find(data_name,'endpoints') == -1: + if not string.find(data_name, 'endpoints') == -1: cda.dtype = 'FiberEndpoints' - if not string.find(data_name,'labels') == -1: + if not string.find(data_name, 'labels') == -1: cda.dtype = 'FinalFiberLabels' a.add_connectome_data(cda) @@ -190,7 +192,7 @@ def _run_interface(self, runtime): _, name, ext = split_filename(self.inputs.out_file) if not ext == '.cff': ext = '.cff' - cf.save_to_cff(a,op.abspath(name + ext)) + cf.save_to_cff(a, op.abspath(name + ext)) return runtime @@ -202,13 +204,16 @@ def _list_outputs(self): outputs['connectome_file'] = op.abspath(name + ext) return outputs + class MergeCNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath(File(exists=True), mandatory=True, desc='List of CFF files to extract networks from') - out_file = File('merged_network_connectome.cff', usedefault = True, desc='Output CFF file with all the networks added') + out_file = File('merged_network_connectome.cff', usedefault=True, desc='Output CFF file with all the networks added') + class MergeCNetworksOutputSpec(TraitedSpec): connectome_file = File(exists=True, desc='Output CFF file with all the networks added') + class MergeCNetworks(BaseInterface): """ Merges networks from multiple CFF files into one new CFF file. @@ -235,12 +240,12 @@ def _run_interface(self, runtime): # metadata information ne.load() contitle = mycon.get_connectome_meta().get_title() - ne.set_name( str(i) + ': ' + contitle + ' - ' + ne.get_name() ) + ne.set_name(str(i) + ': ' + contitle + ' - ' + ne.get_name()) ne.set_src(ne.get_name()) extracted_networks.append(ne) # Add networks to new connectome - newcon = cf.connectome(title = 'All CNetworks', connectome_network = extracted_networks) + newcon = cf.connectome(title='All CNetworks', connectome_network=extracted_networks) # Setting additional metadata metadata = newcon.get_connectome_meta() metadata.set_creator('My Name') diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index 12ff6ee0da..8a64ad95f6 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -44,7 +44,7 @@ class NetworkBasedStatisticInputSpec(BaseInterfaceInputSpec): threshold = traits.Float(3, usedefault=True, desc='T-statistic threshold') t_tail = traits.Enum('left', 'right', 'both', usedefault=True, desc='Can be one of "left", "right", or "both"') edge_key = traits.Str('number_of_fibers', usedefault=True, desc='Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' \ - 'Sometimes "weight" or "value" for functional networks.') + 'Sometimes "weight" or "value" for functional networks.') out_nbs_network = File(desc='Output network with edges identified by the NBS') out_nbs_pval_network = File(desc='Output network with p-values to weight the edges identified by the NBS') diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index da9dc6695d..1716ad0c8d 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -168,7 +168,7 @@ def average_networks(in_files, ntwk_res_file, group_id): if not key == 'count': data[key] = data[key] / len(in_files) ntwk.edge[edge[0]][edge[1]] = data - avg_ntwk.add_edge(edge[0],edge[1],data) + avg_ntwk.add_edge(edge[0], edge[1], data) edge_dict['count'][edge[0]-1][edge[1]-1] = ntwk.edge[edge[0]][edge[1]]['count'] iflogger.info('After thresholding, the average network has has {n} edges'.format(n=avg_ntwk.number_of_edges())) @@ -228,7 +228,7 @@ def compute_node_measures(ntwk, calculate_cliques=False): isolate_list = nx.isolates(ntwk) binarized = np.zeros((ntwk.number_of_nodes(), 1)) for value in isolate_list: - value = value - 1 # Zero indexing + value = value - 1 # Zero indexing binarized[value] = 1 measures['isolates'] = binarized if calculate_cliques: @@ -245,12 +245,12 @@ def compute_edge_measures(ntwk): """ iflogger.info('Computing edge measures:') measures = {} - #iflogger.info('...Computing google matrix...' #Makes really large networks (500k+ edges)) - #measures['google_matrix'] = nx.google_matrix(ntwk) - #iflogger.info('...Computing hub matrix...') - #measures['hub_matrix'] = nx.hub_matrix(ntwk) - #iflogger.info('...Computing authority matrix...') - #measures['authority_matrix'] = nx.authority_matrix(ntwk) + # iflogger.info('...Computing google matrix...' #Makes really large networks (500k+ edges)) + # measures['google_matrix'] = nx.google_matrix(ntwk) + # iflogger.info('...Computing hub matrix...') + # measures['hub_matrix'] = nx.hub_matrix(ntwk) + # iflogger.info('...Computing authority matrix...') + # measures['authority_matrix'] = nx.authority_matrix(ntwk) return measures @@ -274,7 +274,7 @@ def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False): iflogger.info('...Computing degree assortativity (pearson number) ...') try: measures['degree_pearsonr'] = nx.degree_pearsonr(ntwk) - except AttributeError: # For NetworkX 1.6 + except AttributeError: # For NetworkX 1.6 measures['degree_pearsonr'] = nx.degree_pearson_correlation_coefficient(ntwk) iflogger.info('...Computing degree assortativity...') try: @@ -301,18 +301,18 @@ def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False): measures['average_shortest_path_length'] = nx.average_shortest_path_length(nx.connected_component_subgraphs(ntwk)[0], weighted) if calculate_cliques: iflogger.info('...Computing graph clique number...') - measures['graph_clique_number'] = nx.graph_clique_number(ntwk) #out of memory error + measures['graph_clique_number'] = nx.graph_clique_number(ntwk) # out of memory error return measures def compute_network_measures(ntwk): measures = {} - #iflogger.info('Identifying k-core') - #measures['k_core'] = nx.k_core(ntwk) - #iflogger.info('Identifying k-shell') - #measures['k_shell'] = nx.k_shell(ntwk) - #iflogger.info('Identifying k-crust') - #measures['k_crust'] = nx.k_crust(ntwk) + # iflogger.info('Identifying k-core') + # measures['k_core'] = nx.k_core(ntwk) + # iflogger.info('Identifying k-shell') + # measures['k_shell'] = nx.k_shell(ntwk) + # iflogger.info('Identifying k-crust') + # measures['k_crust'] = nx.k_crust(ntwk) return measures @@ -355,6 +355,7 @@ class NetworkXMetricsInputSpec(BaseInterfaceInputSpec): out_edge_metrics_matlab = File(genfile=True, desc='Output edge metrics in MATLAB .mat format') out_pickled_extra_measures = File('extra_measures', usedefault=True, desc='Network measures for group 1 that return dictionaries stored as a Pickle.') + class NetworkXMetricsOutputSpec(TraitedSpec): gpickled_network_files = OutputMultiPath(File(desc='Output gpickled network files')) matlab_matrix_files = OutputMultiPath(File(desc='Output network metrics in MATLAB .mat format')) @@ -370,6 +371,7 @@ class NetworkXMetricsOutputSpec(TraitedSpec): pickled_extra_measures = File(desc='Network measures for the group that return dictionaries, stored as a Pickle.') matlab_dict_measures = OutputMultiPath(File(desc='Network measures for the group that return dictionaries, stored as matlab matrices.')) + class NetworkXMetrics(BaseInterface): """ Calculates and outputs NetworkX-based measures for an input network @@ -500,19 +502,22 @@ def _list_outputs(self): def _gen_outfilename(self, name, ext): return name + '.' + ext + class AverageNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath(File(exists=True), mandatory=True, desc='Networks for a group of subjects') resolution_network_file = File(exists=True, desc='Parcellation files from Connectome Mapping Toolkit. This is not necessary' \ - ', but if included, the interface will output the statistical maps as networkx graphs.') + ', but if included, the interface will output the statistical maps as networkx graphs.') group_id = traits.Str('group1', usedefault=True, desc='ID for group') out_gpickled_groupavg = File(desc='Average network saved as a NetworkX .pck') out_gexf_groupavg = File(desc='Average network saved as a .gexf file') + class AverageNetworksOutputSpec(TraitedSpec): gpickled_groupavg = File(desc='Average network saved as a NetworkX .pck') gexf_groupavg = File(desc='Average network saved as a .gexf file') matlab_groupavgs = OutputMultiPath(File(desc='Average network saved as a .gexf file')) + class AverageNetworks(BaseInterface): """ Calculates and outputs the average network given a set of input NetworkX gpickle files diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index c102e0bb88..140b7273ca 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -156,7 +156,7 @@ def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): shutil.copy(op.join( output_dir, 'regenerated_lh_60', 'lh.corpuscallosum.label'), lhco) - mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ % (rhun, lhun, rhco, lhco, op.join(op.join(subjects_dir, subject_id), 'mri', 'orig.mgz'), op.join(fs_label_dir, 'cc_unknown.nii.gz') ) + mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ % (rhun, lhun, rhco, lhco, op.join(op.join(subjects_dir, subject_id), 'mri', 'orig.mgz'), op.join(fs_label_dir, 'cc_unknown.nii.gz')) runCmd(mri_cmd, log) runCmd('mris_volmask %s' % subject_id, log) mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % (op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id)) @@ -348,34 +348,34 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): # lateral ventricles, thalamus proper and caudate # the latter two removed for better erosion, but put back afterwards idx = np.where((asegd == 4) | - (asegd == 43) | - (asegd == 11) | - (asegd == 50) | - (asegd == 31) | - (asegd == 63) | - (asegd == 10) | - (asegd == 49)) + (asegd == 43) | + (asegd == 11) | + (asegd == 50) | + (asegd == 31) | + (asegd == 63) | + (asegd == 10) | + (asegd == 49)) csfA[idx] = 1 csfA = imerode(imerode(csfA, se1), se) # thalmus proper and cuadate are put back because they are not lateral ventricles idx = np.where((asegd == 11) | - (asegd == 50) | - (asegd == 10) | - (asegd == 49)) + (asegd == 50) | + (asegd == 10) | + (asegd == 49)) csfA[idx] = 0 # REST CSF, IE 3RD AND 4TH VENTRICULE AND EXTRACEREBRAL CSF idx = np.where((asegd == 5) | - (asegd == 14) | - (asegd == 15) | - (asegd == 24) | - (asegd == 44) | - (asegd == 72) | - (asegd == 75) | - (asegd == 76) | - (asegd == 213) | - (asegd == 221)) + (asegd == 14) | + (asegd == 15) | + (asegd == 24) | + (asegd == 44) | + (asegd == 72) | + (asegd == 75) | + (asegd == 76) | + (asegd == 213) | + (asegd == 221)) # 43 ??, 4?? 213?, 221? # more to discuss. for i in [5, 14, 15, 24, 44, 72, 75, 76, 213, 221]: @@ -442,7 +442,7 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): nb.save(img, wm_out) -def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file,dilation): +def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file, dilation): fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" @@ -462,10 +462,10 @@ def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, ] ds.append((op.abspath('ROI_%s.nii.gz' % parcellation_name), - op.abspath('ROI_HR_th.nii.gz'))) - if(dilation==True): - ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name), - op.abspath('ROIv_HR_th.nii.gz'))) + op.abspath('ROI_HR_th.nii.gz'))) + if(dilation == True): + ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name), + op.abspath('ROIv_HR_th.nii.gz'))) orig = op.join(fs_dir, 'mri', 'orig', '001.mgz') for d in ds: iflogger.info("Processing %s:" % d[0]) @@ -531,15 +531,15 @@ class ParcellateOutputSpec(TraitedSpec): white_matter_mask_file = File(exists=True, desc='White matter mask file') cc_unknown_file = File( desc='Image file with regions labelled as unknown cortical structures', - exists=True) + exists=True) ribbon_file = File(desc='Image file detailing the cortical ribbon', - exists=True) + exists=True) aseg_file = File( desc='Automated segmentation file converted from Freesurfer "subjects" directory', - exists=True) + exists=True) roi_file_in_structural_space = File( desc='ROI image resliced to the dimensions of the original structural image', - exists=True) + exists=True) dilated_roi_file_in_structural_space = File( desc='dilated ROI image resliced to the dimensions of the original structural image') @@ -578,7 +578,7 @@ def _run_interface(self, runtime): create_annot_label(self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name) create_roi(self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, self.inputs.dilation) create_wm_mask(self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name) - crop_and_move_datasets(self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, self.inputs.out_roi_file,self.inputs.dilation) + crop_and_move_datasets(self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, self.inputs.out_roi_file, self.inputs.dilation) return runtime def _list_outputs(self): @@ -588,18 +588,18 @@ def _list_outputs(self): else: outputs['roi_file'] = op.abspath( self._gen_outfilename('nii.gz', 'ROI')) - if(self.inputs.dilation==True): + if(self.inputs.dilation == True): outputs['roiv_file'] = op.abspath(self._gen_outfilename( - 'nii.gz', 'ROIv')) + 'nii.gz', 'ROIv')) outputs['white_matter_mask_file'] = op.abspath('fsmask_1mm.nii.gz') outputs['cc_unknown_file'] = op.abspath('cc_unknown.nii.gz') outputs['ribbon_file'] = op.abspath('ribbon.nii.gz') outputs['aseg_file'] = op.abspath('aseg.nii.gz') outputs['roi_file_in_structural_space'] = op.abspath( 'ROI_HR_th.nii.gz') - if(self.inputs.dilation==True): + if(self.inputs.dilation == True): outputs['dilated_roi_file_in_structural_space'] = op.abspath( - 'ROIv_HR_th.nii.gz') + 'ROIv_HR_th.nii.gz') return outputs def _gen_outfilename(self, ext, prefix='ROI'): diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index afaf3c1119..b3f7dc0c53 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -60,8 +60,8 @@ def _get_out_path(self, meta, idx=None): if self.inputs.out_format: out_fmt = self.inputs.out_format else: - #If no out_format is specified, use a sane default that will work - #with the provided meta data. + # If no out_format is specified, use a sane default that will work + # with the provided meta data. out_fmt = [] if not idx is None: out_fmt.append('%03d' % idx) @@ -149,7 +149,7 @@ def _run_interface(self, runtime): include_regexes) stack = dcmstack.DicomStack(meta_filter=meta_filter) for src_path in src_paths: - if not imghdr.what(src_path)=="gif": + if not imghdr.what(src_path) == "gif": src_dcm = dicom.read_file(src_path, force=self.inputs.force_read) stack.add_dcm(src_dcm) nii = stack.to_nifti(embed_meta=True) @@ -185,7 +185,7 @@ def _run_interface(self, runtime): for key, stack in stacks.items(): nw = NiftiWrapper(stack.to_nifti(embed_meta=True)) const_meta = nw.meta_ext.get_class_dict(('global', 'const')) - out_path = self._get_out_path(const_meta) + out_path = self._get_out_path(const_meta) if not self.inputs.embed_meta: nw.remove_extension() nb.save(nw.nii_img, out_path) @@ -207,9 +207,9 @@ class LookupMetaInputSpec(TraitedSpec): traits.Dict(), mandatory=True, desc=("List of meta data keys to lookup, or a " - "dict where keys specify the meta data keys to " - "lookup and the values specify the output names") - ) + "dict where keys specify the meta data keys to " + "lookup and the values specify the output names") + ) class LookupMeta(BaseInterface): @@ -299,12 +299,12 @@ def _run_interface(self, runtime): classes = [cls for cls in classes if cls in self.inputs.include_classes - ] + ] if self.inputs.exclude_classes: classes = [cls for cls in classes if not cls in self.inputs.exclude_classes - ] + ] for cls in classes: src_dict = src.meta_ext.get_class_dict(cls) @@ -315,7 +315,7 @@ def _run_interface(self, runtime): dest.meta_ext.shape = src.meta_ext.shape self.out_path = op.join(os.getcwd(), - op.basename(self.inputs.dest_file)) + op.basename(self.inputs.dest_file)) dest.to_filename(self.out_path) return runtime @@ -359,10 +359,10 @@ class MergeNifti(NiftiGeneratorBase): def _run_interface(self, runtime): niis = [nb.load(fn) for fn in self.inputs.in_files - ] + ] nws = [NiftiWrapper(nii, make_empty=True) for nii in niis - ] + ] if self.inputs.sort_order: sort_order = self.inputs.sort_order if isinstance(sort_order, string_types): diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index c330be2f0c..4191be77d8 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -18,6 +18,7 @@ import re from nipype.interfaces.base import CommandLine + class Info(object): """ Handle dtk output type and version information. diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 4bdb0d509a..76e87c2c64 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -21,12 +21,12 @@ class DTIReconInputSpec(CommandLineInputSpec): - DWI = File(desc='Input diffusion volume', argstr='%s',exists=True, mandatory=True,position=1) - out_prefix = traits.Str("dti", desc='Output file prefix', argstr='%s', usedefault=True,position=2) + DWI = File(desc='Input diffusion volume', argstr='%s', exists=True, mandatory=True, position=1) + out_prefix = traits.Str("dti", desc='Output file prefix', argstr='%s', usedefault=True, position=2) output_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', argstr='-ot %s', desc='output file type', usedefault=True) - bvecs = File(exists=True, desc = 'b vectors file', - argstr='-gm %s', mandatory=True) - bvals = File(exists=True,desc = 'b values file', mandatory=True) + bvecs = File(exists=True, desc='b vectors file', + argstr='-gm %s', mandatory=True) + bvals = File(exists=True, desc='b values file', mandatory=True) n_averages = traits.Int(desc='Number of averages', argstr='-nex %s') image_orientation_vectors = traits.List(traits.Float(), minlen=6, maxlen=6, desc="""specify image orientation vectors. if just one argument given, will treat it as filename and read the orientation vectors from @@ -57,26 +57,27 @@ class DTIReconOutputSpec(TraitedSpec): V2 = File(exists=True) V3 = File(exists=True) + class DTIRecon(CommandLine): """Use dti_recon to generate tensors and other maps """ - input_spec=DTIReconInputSpec - output_spec=DTIReconOutputSpec + input_spec = DTIReconInputSpec + output_spec = DTIReconOutputSpec _cmd = 'dti_recon' def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = 'gradient_matrix.txt' - bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] + bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] bvecs_f = open(bvecs_file) - bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] bvecs_f.close() gradient_matrix_f = open(_gradient_matrix_file, 'w') for i in range(len(bvals)): - gradient_matrix_f.write("%s, %s, %s, %s\n"%(bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) + gradient_matrix_f.write("%s, %s, %s, %s\n" %(bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) gradient_matrix_f.close() return _gradient_matrix_file @@ -91,21 +92,22 @@ def _list_outputs(self): output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['ADC'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_adc.'+ output_type)) - outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.'+ output_type)) - outputs['L1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e1.'+ output_type)) - outputs['L2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e2.'+ output_type)) - outputs['L3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e3.'+ output_type)) - outputs['exp'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_exp.'+ output_type)) - outputs['FA'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa.'+ output_type)) - outputs['FA_color'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa_color.'+ output_type)) - outputs['tensor'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_tensor.'+ output_type)) - outputs['V1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v1.'+ output_type)) - outputs['V2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v2.'+ output_type)) - outputs['V3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v3.'+ output_type)) + outputs['ADC'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_adc.' + output_type)) + outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.' + output_type)) + outputs['L1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e1.' + output_type)) + outputs['L2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e2.' + output_type)) + outputs['L3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e3.' + output_type)) + outputs['exp'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_exp.' + output_type)) + outputs['FA'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa.' + output_type)) + outputs['FA_color'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa_color.' + output_type)) + outputs['tensor'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_tensor.' + output_type)) + outputs['V1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v1.' + output_type)) + outputs['V2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v2.' + output_type)) + outputs['V3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v3.' + output_type)) return outputs + class DTITrackerInputSpec(CommandLineInputSpec): tensor_file = File(exists=True, desc="reconstructed tensor file") input_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', desc="""input and output file type. accepted values are: @@ -113,7 +115,7 @@ class DTITrackerInputSpec(CommandLineInputSpec): ni1 -> nifti format saved in seperate .hdr and .img file nii -> nifti format with one .nii file nii.gz -> nifti format with compression - default type is 'nii'""", argstr = "-it %s") + default type is 'nii'""", argstr="-it %s") tracking_method = traits.Enum('fact', 'rk2', 'tl', 'sl', desc="""fact -> use FACT method for tracking. this is the default method. rk2 -> use 2nd order runge-kutta method for tracking. tl -> use tensorline method for tracking. @@ -123,15 +125,15 @@ class DTITrackerInputSpec(CommandLineInputSpec): and 0.1 for other methods""", argstr="-l %f") angle_threshold = traits.Float(desc="set angle threshold. default value is 35 degree", argstr="-at %f") angle_threshold_weight = traits.Float(desc="set angle threshold weighting factor. weighting will be be applied \ - on top of the angle_threshold", argstr = "-atw %f") - random_seed = traits.Int(desc = "use random location in a voxel instead of the center of the voxel \ + on top of the angle_threshold", argstr="-atw %f") + random_seed = traits.Int(desc="use random location in a voxel instead of the center of the voxel \ to seed. can also define number of seed per voxel. default is 1", argstr="-rseed") - invert_x = traits.Bool(desc="invert x component of the vector", argstr = "-ix") - invert_y = traits.Bool(desc="invert y component of the vector", argstr = "-iy") - invert_z = traits.Bool(desc="invert z component of the vector", argstr = "-iz") - swap_xy = traits.Bool(desc="swap x & y vectors while tracking", argstr = "-sxy") - swap_yz = traits.Bool(desc="swap y & z vectors while tracking", argstr = "-syz") - swap_zx = traits.Bool(desc="swap x & z vectors while tracking", argstr = "-szx") + invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") + invert_y = traits.Bool(desc="invert y component of the vector", argstr="-iy") + invert_z = traits.Bool(desc="invert z component of the vector", argstr="-iz") + swap_xy = traits.Bool(desc="swap x & y vectors while tracking", argstr="-sxy") + swap_yz = traits.Bool(desc="swap y & z vectors while tracking", argstr="-syz") + swap_zx = traits.Bool(desc="swap x & z vectors while tracking", argstr="-szx") mask1_file = File(desc="first mask image", mandatory=True, argstr="-m %s", position=2) mask1_threshold = traits.Float(desc="threshold value for the first mask image, if not given, the program will \ try automatically find the threshold", position=3) @@ -141,15 +143,17 @@ class DTITrackerInputSpec(CommandLineInputSpec): input_data_prefix = traits.Str("dti", desc="for internal naming use only", position=0, argstr="%s", usedefault=True) output_file = File("tracks.trk", "file containing tracks", argstr="%s", position=1, usedefault=True) output_mask = File(desc="output a binary mask file in analyze format", argstr="-om %s") - primary_vector = traits.Enum('v2', 'v3', desc = "which vector to use for fibre tracking: v2 or v3. If not set use v1", argstr="-%s") + primary_vector = traits.Enum('v2', 'v3', desc="which vector to use for fibre tracking: v2 or v3. If not set use v1", argstr="-%s") + class DTITrackerOutputSpec(TraitedSpec): track_file = File(exists=True) mask_file = File(exists=True) + class DTITracker(CommandLine): - input_spec=DTITrackerInputSpec - output_spec=DTITrackerOutputSpec + input_spec = DTITrackerInputSpec + output_spec = DTITrackerOutputSpec _cmd = 'dti_tracker' @@ -165,4 +169,4 @@ def _list_outputs(self): if isdefined(self.inputs.output_mask) and self.inputs.output_mask: outputs['mask_file'] = os.path.abspath(self.inputs.output_mask) - return outputs \ No newline at end of file + return outputs diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index a91b6cb071..87495410f5 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -21,10 +21,10 @@ class HARDIMatInputSpec(CommandLineInputSpec): - bvecs = File(exists=True, desc = 'b vectors file', - argstr='%s', position=1, mandatory=True) - bvals = File(exists=True,desc = 'b values file', mandatory=True) - out_file = File("recon_mat.dat", desc = 'output matrix file', argstr='%s', usedefault=True, position=2) + bvecs = File(exists=True, desc='b vectors file', + argstr='%s', position=1, mandatory=True) + bvals = File(exists=True, desc='b values file', mandatory=True) + out_file = File("recon_mat.dat", desc='output matrix file', argstr='%s', usedefault=True, position=2) order = traits.Int(argstr='-order %s', desc="""maximum order of spherical harmonics. must be even number. default is 4""") odf_file = File(exists=True, argstr='-odf %s', desc="""filename that contains the reconstruction points on a HEMI-sphere. @@ -50,6 +50,7 @@ class HARDIMatInputSpec(CommandLineInputSpec): adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", argstr="-oc") + class HARDIMatOutputSpec(TraitedSpec): out_file = File(exists=True, desc='output matrix file') @@ -57,24 +58,24 @@ class HARDIMatOutputSpec(TraitedSpec): class HARDIMat(CommandLine): """Use hardi_mat to calculate a reconstruction matrix from a gradient table """ - input_spec=HARDIMatInputSpec - output_spec=HARDIMatOutputSpec + input_spec = HARDIMatInputSpec + output_spec = HARDIMatOutputSpec _cmd = 'hardi_mat' def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = 'gradient_matrix.txt' - bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] + bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] bvecs_f = open(bvecs_file) - bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] bvecs_f.close() gradient_matrix_f = open(_gradient_matrix_file, 'w') for i in range(len(bvals)): if int(bvals[i]) == 0: continue - gradient_matrix_f.write("%s %s %s\n"%(bvecs_x[i], bvecs_y[i], bvecs_z[i])) + gradient_matrix_f.write("%s %s %s\n" %(bvecs_x[i], bvecs_y[i], bvecs_z[i])) gradient_matrix_f.close() return _gradient_matrix_file @@ -89,8 +90,9 @@ def _list_outputs(self): outputs['out_file'] = os.path.abspath(self.inputs.out_file) return outputs + class ODFReconInputSpec(CommandLineInputSpec): - DWI = File(desc='Input raw data', argstr='%s',exists=True, mandatory=True,position=1) + DWI = File(desc='Input raw data', argstr='%s', exists=True, mandatory=True, position=1) n_directions = traits.Int(desc='Number of directions', argstr='%s', mandatory=True, position=2) n_output_directions = traits.Int(desc='Number of output directions', argstr='%s', mandatory=True, position=3) out_prefix = traits.Str("odf", desc='Output file prefix', argstr='%s', usedefault=True, position=4) @@ -126,12 +128,13 @@ class ODFReconOutputSpec(TraitedSpec): ODF = File(exists=True) entropy = File() + class ODFRecon(CommandLine): """Use odf_recon to generate tensors and other maps """ - input_spec=ODFReconInputSpec - output_spec=ODFReconOutputSpec + input_spec = ODFReconInputSpec + output_spec = ODFReconOutputSpec _cmd = 'odf_recon' @@ -140,26 +143,27 @@ def _list_outputs(self): output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.'+ output_type)) - outputs['DWI'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_dwi.'+ output_type)) - outputs['max'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_max.'+ output_type)) - outputs['ODF'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_odf.'+ output_type)) + outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.' + output_type)) + outputs['DWI'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_dwi.' + output_type)) + outputs['max'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_max.' + output_type)) + outputs['ODF'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_odf.' + output_type)) if isdefined(self.inputs.output_entropy): - outputs['entropy'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_entropy.'+ output_type)) + outputs['entropy'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_entropy.' + output_type)) return outputs + class ODFTrackerInputSpec(CommandLineInputSpec): max = File(exists=True, mandatory=True) ODF = File(exists=True, mandatory=True) input_data_prefix = traits.Str("odf", desc='recon data prefix', argstr='%s', usedefault=True, position=0) - out_file = File("tracks.trk", desc = 'output track file', argstr='%s', usedefault=True, position=1) + out_file = File("tracks.trk", desc='output track file', argstr='%s', usedefault=True, position=1) input_output_type = traits.Enum('nii', 'analyze', 'ni1', 'nii.gz', argstr='-it %s', desc='input and output file type', usedefault=True) runge_kutta2 = traits.Bool(argstr='-rk2', desc="""use 2nd order runge-kutta method for tracking. default tracking method is non-interpolate streamline""") step_length = traits.Float(argstr='-l %f', desc="""set step length, in the unit of minimum voxel size. default value is 0.1.""") - angle_threshold = traits.Float(argstr='-at %f',desc="""set angle threshold. default value is 35 degree for + angle_threshold = traits.Float(argstr='-at %f', desc="""set angle threshold. default value is 35 degree for default tracking method and 25 for rk2""") random_seed = traits.Int(argstr='-rseed %s', desc="""use random location in a voxel instead of the center of the voxel to seed. can also define number of seed per voxel. default is 1""") @@ -202,15 +206,17 @@ class ODFTrackerInputSpec(CommandLineInputSpec): in the track file and is essential for track display to map onto the right coordinates""") + class ODFTrackerOutputSpec(TraitedSpec): track_file = File(exists=True, desc='output track file') + class ODFTracker(CommandLine): """Use odf_tracker to generate track file """ - input_spec=ODFTrackerInputSpec - output_spec=ODFTrackerOutputSpec + input_spec = ODFTrackerInputSpec + output_spec = ODFTrackerOutputSpec _cmd = 'odf_tracker' diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 1e36cfa076..60d5b11115 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -22,9 +22,11 @@ class SplineFilterInputSpec(CommandLineInputSpec): step_length = traits.Float(desc="in the unit of minimum voxel size", position=1, argstr="%f", mandatory=True) output_file = File("spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", usedefault=True) + class SplineFilterOutputSpec(TraitedSpec): smoothed_track_file = File(exists=True) + class SplineFilter(CommandLine): """ Smoothes TrackVis track files with a B-Spline filter. @@ -43,8 +45,8 @@ class SplineFilter(CommandLine): >>> filt.inputs.step_length = 0.5 >>> filt.run() # doctest: +SKIP """ - input_spec=SplineFilterInputSpec - output_spec=SplineFilterOutputSpec + input_spec = SplineFilterInputSpec + output_spec = SplineFilterOutputSpec _cmd = "spline_filter" @@ -58,9 +60,11 @@ class TrackMergeInputSpec(CommandLineInputSpec): track_files = InputMultiPath(File(exists=True), desc="file containing tracks to be filtered", position=0, argstr="%s...", mandatory=True) output_file = File("merged_tracks.trk", desc="target file for merged tracks", position=-1, argstr="%s", usedefault=True) + class TrackMergeOutputSpec(TraitedSpec): track_file = File(exists=True) + class TrackMerge(CommandLine): """ Merges several TrackVis track files into a single track @@ -81,8 +85,8 @@ class TrackMerge(CommandLine): >>> mrg.inputs.track_files = ['track1.trk','track2.trk'] >>> mrg.run() # doctest: +SKIP """ - input_spec=TrackMergeInputSpec - output_spec=TrackMergeOutputSpec + input_spec = TrackMergeInputSpec + output_spec = TrackMergeOutputSpec _cmd = "track_merge" diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 8081f4b1f8..b0cc8f40ca 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -53,7 +53,7 @@ def tensor_fitting(data, bvals, bvecs, mask_file=None): if mask_file is not None: mask = nb.load(self.inputs.mask_file).get_data() else: - mask=None + mask = None # Load information about the gradients: gtab = grad.gradient_table(self.inputs.bvals, self.inputs.bvecs) @@ -71,7 +71,7 @@ class DTIInputSpec(TraitedSpec): bvals = File(exists=True, mandatory=True, desc='The input b-value text file') mask_file = File(exists=True, - desc='An optional white matter mask') + desc='An optional white matter mask') out_filename = File( genfile=True, desc='The output filename for the DTI parameters image') @@ -171,7 +171,7 @@ def _run_interface(self, runtime): ten_fit = tensor_fitting(self.inputs.in_file, self.inputs.bvals, self.inputs.bvecs, self.inputs.mask_file) - ## Write as a 3D Nifti image with the original affine + # Write as a 3D Nifti image with the original affine img = nb.Nifti1Image(tenfit.mode, affine) out_file = op.abspath(self._gen_outfilename()) nb.save(img, out_file) diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 95437ae377..4c6a7e99c7 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -8,9 +8,11 @@ DynamicTraitedSpec, traits, Undefined, File, isdefined) + class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): module = traits.Str(desc="name of the Slicer command line module you want to use") + class SlicerCommandLine(CommandLine): """Experimental Slicer wrapper. Work in progress. @@ -19,21 +21,19 @@ class SlicerCommandLine(CommandLine): input_spec = SlicerCommandLineInputSpec output_spec = DynamicTraitedSpec - - def _grab_xml(self, module): - cmd = CommandLine(command = "Slicer3", args="--launch %s --xml"%module) + cmd = CommandLine(command="Slicer3", args="--launch %s --xml" %module) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) else: - raise Exception(cmd.cmdline + " failed:\n%s"%ret.runtime.stderr) + raise Exception(cmd.cmdline + " failed:\n%s" %ret.runtime.stderr) def _outputs(self): base = super(SlicerCommandLine, self)._outputs() undefined_output_traits = {} for key in [node.getElementsByTagName('name')[0].firstChild.nodeValue for node in self._outputs_nodes]: - base.add_trait(key, File(exists = True)) + base.add_trait(key, File(exists=True)) undefined_output_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_output_traits) @@ -41,8 +41,8 @@ def _outputs(self): def __init__(self, module, **inputs): warnings.warn('slicer is Not fully implemented', - RuntimeWarning) - super(SlicerCommandLine, self).__init__(command= "Slicer3 --launch %s "%module, name= module, **inputs) + RuntimeWarning) + super(SlicerCommandLine, self).__init__(command="Slicer3 --launch %s " %module, name=module, **inputs) dom = self._grab_xml(module) self._outputs_filenames = {} @@ -64,7 +64,6 @@ def __init__(self, module, **inputs): else: traitsParams["argstr"] = "--" + name + " " - argsDict = {'file': '%s', 'integer': "%d", 'double': "%f", 'float': "%f", 'image': "%s", 'transform': "%s", 'boolean': '', 'string-enumeration': '%s', 'string': "%s"} if param.nodeName.endswith('-vector'): @@ -82,7 +81,7 @@ def __init__(self, module, **inputs): name = param.getElementsByTagName('name')[0].firstChild.nodeValue - typesDict = {'integer': traits.Int, 'double': traits.Float, 'float': traits.Float, 'image': File, 'transform': File, 'boolean': traits.Bool, 'string': traits.Str, 'file':File} + typesDict = {'integer': traits.Int, 'double': traits.Float, 'float': traits.Float, 'image': File, 'transform': File, 'boolean': traits.Bool, 'string': traits.Str, 'file': File} if param.nodeName == 'string-enumeration': type = traits.Enum @@ -99,10 +98,10 @@ def __init__(self, module, **inputs): self.inputs.add_trait(name, traits.Either(traits.Bool, File, **traitsParams)) undefined_traits[name] = Undefined - #traitsParams["exists"] = True + # traitsParams["exists"] = True self._outputs_filenames[name] = self._gen_filename_from_param(param) - #undefined_output_traits[name] = Undefined - #self._outputs().add_trait(name, File(*values, **traitsParams)) + # undefined_output_traits[name] = Undefined + # self._outputs().add_trait(name, File(*values, **traitsParams)) self._outputs_nodes.append(param) else: if param.nodeName in ['file', 'directory', 'image', 'transform']: @@ -113,15 +112,14 @@ def __init__(self, module, **inputs): self.inputs.trait_set(trait_change_notify=False, **undefined_traits) for name in list(undefined_traits.keys()): _ = getattr(self.inputs, name) - #self._outputs().trait_set(trait_change_notify=False, **undefined_output_traits) - + # self._outputs().trait_set(trait_change_notify=False, **undefined_output_traits) def _gen_filename(self, name): if name in self._outputs_filenames: return os.path.join(os.getcwd(), self._outputs_filenames[name]) return None - def _gen_filename_from_param(self,param): + def _gen_filename_from_param(self, param): base = param.getElementsByTagName('name')[0].firstChild.nodeValue fileExtensions = param.getAttribute("fileExtensions") if fileExtensions: diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index 430d1efcfc..b72123c321 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -23,9 +23,9 @@ class RegistrationInputSpec(ElastixBaseInputSpec): fixed_image = File(exists=True, mandatory=True, argstr='-f %s', - desc='fixed image') + desc='fixed image') moving_image = File(exists=True, mandatory=True, argstr='-m %s', - desc='moving image') + desc='moving image') parameters = InputMultiPath(File(exists=True), mandatory=True, argstr='-p %s...', desc='parameter file, elastix handles 1 or more -p') fixed_mask = File(exists=True, argstr='-fMask %s', desc='mask for fixed image') @@ -40,7 +40,7 @@ class RegistrationOutputSpec(TraitedSpec): warped_files = InputMultiPath(File(exists=False), desc=('input moving image warped to fixed image at each level')) warped_files_flags = traits.List(traits.Bool(False), - desc='flag indicating if warped image was generated') + desc='flag indicating if warped image was generated') class Registration(CommandLine): @@ -70,14 +70,14 @@ def _list_outputs(self): out_dir = op.abspath(self.inputs.output_path) - opts = [ 'WriteResultImage', 'ResultImageFormat' ] + opts = ['WriteResultImage', 'ResultImageFormat'] regex = re.compile(r'^\((\w+)\s(.+)\)$') outputs['transform'] = [] outputs['warped_files'] = [] outputs['warped_files_flags'] = [] - for i,params in enumerate(self.inputs.parameters): + for i, params in enumerate(self.inputs.parameters): config = {} with open(params, 'r') as f: @@ -90,12 +90,12 @@ def _list_outputs(self): config[m.group(1).strip()] = value outputs['transform'].append(op.join(out_dir, - 'TransformParameters.%01d.txt' % i )) + 'TransformParameters.%01d.txt' % i)) warped_file = None if config['WriteResultImage']: warped_file = op.join(out_dir, - 'result.%01d.%s' %(i,config['ResultImageFormat'])) + 'result.%01d.%s' % (i, config['ResultImageFormat'])) outputs['warped_files'].append(warped_file) outputs['warped_files_flags'].append(config['WriteResultImage']) @@ -105,8 +105,7 @@ def _list_outputs(self): return outputs - - def _cast(self,val): + def _cast(self, val): if val.startswith('"') and val.endswith('"'): if val == '"true"': return True @@ -123,6 +122,7 @@ def _cast(self,val): except ValueError: return val + class ApplyWarpInputSpec(ElastixBaseInputSpec): transform_file = File(exists=True, mandatory=True, argstr='-tp %s', desc='transform-parameter file, only 1') @@ -131,10 +131,10 @@ class ApplyWarpInputSpec(ElastixBaseInputSpec): desc='input image to deform') - class ApplyWarpOutputSpec(TraitedSpec): warped_file = File(desc='input moving image warped to fixed image') + class ApplyWarp(CommandLine): """ Use ``transformix`` to apply a transform on an input image. @@ -160,7 +160,7 @@ class ApplyWarp(CommandLine): def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) - outputs['warped_file'] = op.join(out_dir,'result.nii.gz') + outputs['warped_file'] = op.join(out_dir, 'result.nii.gz') return outputs @@ -174,6 +174,7 @@ class AnalyzeWarpOutputSpec(TraitedSpec): jacdet_map = File(desc='det(Jacobian) map') jacmat_map = File(desc='Jacobian matrix map') + class AnalyzeWarp(CommandLine): """ Use transformix to get details from the input transform (generate @@ -199,9 +200,9 @@ class AnalyzeWarp(CommandLine): def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) - outputs['disp_field'] = op.join(out_dir,'deformationField.nii.gz') - outputs['jacdet_map'] = op.join(out_dir,'spatialJacobian.nii.gz') - outputs['jacmat_map'] = op.join(out_dir,'fullSpatialJacobian.nii.gz') + outputs['disp_field'] = op.join(out_dir, 'deformationField.nii.gz') + outputs['jacdet_map'] = op.join(out_dir, 'spatialJacobian.nii.gz') + outputs['jacmat_map'] = op.join(out_dir, 'fullSpatialJacobian.nii.gz') return outputs @@ -212,10 +213,10 @@ class PointsWarpInputSpec(ElastixBaseInputSpec): desc='transform-parameter file, only 1') - class PointsWarpOutputSpec(TraitedSpec): warped_file = File(desc='input points displaced in fixed image domain') + class PointsWarp(CommandLine): """Use ``transformix`` to apply a transform on an input point set. The transform is specified in the transform-parameter file. @@ -243,5 +244,5 @@ def _list_outputs(self): fname, ext = op.splitext(op.basename(self.inputs.points_file)) - outputs['warped_file'] = op.join(out_dir,'outputpoints%s' % ext) + outputs['warped_file'] = op.join(out_dir, 'outputpoints%s' % ext) return outputs diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index cd885b5cc2..2c857136e5 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -11,7 +11,7 @@ import re from ..base import (BaseInterface, BaseInterfaceInputSpec, isdefined, - TraitedSpec, File, traits, InputMultiPath) + TraitedSpec, File, traits, InputMultiPath) from ... import logging logger = logging.getLogger('interface') @@ -22,12 +22,12 @@ class EditTransformInputSpec(BaseInterfaceInputSpec): reference_image = File(exists=True, desc=('set a new reference image to change the ' 'target coordinate system.')) - interpolation = traits.Enum('cubic','linear','nearest', usedefault=True, + interpolation = traits.Enum('cubic', 'linear', 'nearest', usedefault=True, argstr='FinalBSplineInterpolationOrder', desc='set a new interpolator for transformation') - output_type = traits.Enum('float', 'unsigned char', 'unsigned short','short', - 'unsigned long','long','double', + output_type = traits.Enum('float', 'unsigned char', 'unsigned short', 'short', + 'unsigned long', 'long', 'double', argstr='ResultImagePixelType', desc='set a new output pixel type for resampled images') output_format = traits.Enum('nii.gz', 'nii', 'mhd', 'hdr', 'vtk', @@ -61,7 +61,7 @@ class EditTransform(BaseInterface): _out_file = '' _pattern = '\((?P%s\s\"?)([-\.\s\w]+)(\"?\))' - _interp = { 'nearest': 0, 'linear': 1, 'cubic': 3 } + _interp = {'nearest': 0, 'linear': 1, 'cubic': 3} def _run_interface(self, runtime): import re @@ -94,12 +94,12 @@ def _run_interface(self, runtime): if len(im.get_header().get_zooms()) == 4: im = nb.func.four_to_three(im)[0] - size = ' '.join(["%01d" % s for s in im.get_shape() ]) + size = ' '.join(["%01d" % s for s in im.get_shape()]) p = re.compile((self._pattern % 'Size').decode('string-escape')) rep = '(\g%s\g<3>' % size contents = p.sub(rep, contents) - index = ' '.join(["0" for s in im.get_shape() ]) + index = ' '.join(["0" for s in im.get_shape()]) p = re.compile((self._pattern % 'Index').decode('string-escape')) rep = '(\g%s\g<3>' % index contents = p.sub(rep, contents) @@ -110,22 +110,21 @@ def _run_interface(self, runtime): contents = p.sub(rep, contents) itkmat = np.eye(4) - itkmat[0,0] = -1 - itkmat[1,1] = -1 + itkmat[0, 0] = -1 + itkmat[1, 1] = -1 - affine = np.dot( itkmat, im.get_affine() ) - dirs = ' '.join(['%0.4f' % f for f in affine[0:3,0:3].reshape(-1)]) - orig = ' '.join(['%0.4f' % f for f in affine[0:3,3].reshape(-1)]) + affine = np.dot(itkmat, im.get_affine()) + dirs = ' '.join(['%0.4f' % f for f in affine[0:3, 0:3].reshape(-1)]) + orig = ' '.join(['%0.4f' % f for f in affine[0:3, 3].reshape(-1)]) - #p = re.compile((self._pattern % 'Direction').decode('string-escape')) - #rep = '(\g%s\g<3>' % dirs - #contents = p.sub(rep, contents) + # p = re.compile((self._pattern % 'Direction').decode('string-escape')) + # rep = '(\g%s\g<3>' % dirs + # contents = p.sub(rep, contents) p = re.compile((self._pattern % 'Origin').decode('string-escape')) rep = '(\g%s\g<3>' % orig contents = p.sub(rep, contents) - with open(self._get_outfile(), 'w') as of: of.write(contents) @@ -137,12 +136,12 @@ def _list_outputs(self): return outputs def _get_outfile(self): - val = getattr(self,'_out_file') - if not val is None and not val=='': + val = getattr(self, '_out_file') + if not val is None and not val == '': return val if isdefined(self.inputs.output_file): - setattr(self,'_out_file',self.inputs.output_file) + setattr(self, '_out_file', self.inputs.output_file) return self.inputs.output_file out_file = op.abspath(op.basename(self.inputs.transform_file)) diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 16e5f69d15..07ff3e2588 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -4,12 +4,12 @@ from .base import Info, FSCommand from .preprocess import (ParseDICOMDir, UnpackSDICOMDir, MRIConvert, Resample, - ReconAll, BBRegister, ApplyVolTransform,Smooth, + ReconAll, BBRegister, ApplyVolTransform, Smooth, DICOMConvert, RobustRegister, FitMSParams, SynthesizeFLASH) from .model import (MRISPreproc, GLMFit, OneSampleTTest, Binarize, Concatenate, SegStats, Label2Vol, MS_LDA) from .utils import (SampleToSurface, SurfaceSmooth, SurfaceTransform, Surface2VolTransform, - SurfaceSnapshots,ApplyMask, MRIsConvert, MRITessellate, MRIPretess, + SurfaceSnapshots, ApplyMask, MRIsConvert, MRITessellate, MRIPretess, MRIMarchingCubes, SmoothTessellation, MakeAverageSubject, ExtractMainComponent, Tkregister2) diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index 1b10f9deff..54d1bb2c41 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -24,7 +24,6 @@ from ...utils.filemanip import fname_presuffix - class Info(object): """ Freesurfer subject directory and version information. @@ -111,7 +110,7 @@ def __init__(self, **inputs): def _subjects_dir_update(self): if self.inputs.subjects_dir: self.inputs.environ.update({'SUBJECTS_DIR': - self.inputs.subjects_dir}) + self.inputs.subjects_dir}) @classmethod def set_default_subjects_dir(cls, subjects_dir): diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 702978ad3a..ed40041249 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -22,53 +22,53 @@ class MRISPreprocInputSpec(FSTraitedSpec): out_file = File(argstr='--out %s', genfile=True, - desc='output filename') + desc='output filename') target = traits.Str(argstr='--target %s', mandatory=True, - desc='target subject name') + desc='target subject name') hemi = traits.Enum('lh', 'rh', argstr='--hemi %s', mandatory=True, desc='hemisphere for source and target') surf_measure = traits.Str(argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='Use subject/surf/hemi.surf_measure as input') + xor=('surf_measure', 'surf_measure_file', 'surf_area'), + desc='Use subject/surf/hemi.surf_measure as input') surf_area = traits.Str(argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='Extract vertex area from subject/surf/hemi.surfname to use as input.') + xor=('surf_measure', 'surf_measure_file', 'surf_area'), + desc='Extract vertex area from subject/surf/hemi.surfname to use as input.') subjects = traits.List(argstr='--s %s...', xor=('subjects', 'fsgd_file', 'subject_file'), - desc='subjects from who measures are calculated') + desc='subjects from who measures are calculated') fsgd_file = File(exists=True, argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='specify subjects using fsgd file') + xor=('subjects', 'fsgd_file', 'subject_file'), + desc='specify subjects using fsgd file') subject_file = File(exists=True, argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='file specifying subjects separated by white space') + xor=('subjects', 'fsgd_file', 'subject_file'), + desc='file specifying subjects separated by white space') surf_measure_file = InputMultiPath(File(exists=True), argstr='--is %s...', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='file alternative to surfmeas, still requires list of subjects') + xor=('surf_measure', 'surf_measure_file', 'surf_area'), + desc='file alternative to surfmeas, still requires list of subjects') source_format = traits.Str(argstr='--srcfmt %s', desc='source format') surf_dir = traits.Str(argstr='--surfdir %s', - desc='alternative directory (instead of surf)') + desc='alternative directory (instead of surf)') vol_measure_file = InputMultiPath(traits.Tuple(File(exists=True), - File(exists=True)), + File(exists=True)), argstr='--iv %s %s...', - desc='list of volume measure and reg file tuples') + desc='list of volume measure and reg file tuples') proj_frac = traits.Float(argstr='--projfrac %s', - desc='projection fraction for vol2surf') + desc='projection fraction for vol2surf') fwhm = traits.Float(argstr='--fwhm %f', xor=['num_iters'], desc='smooth by fwhm mm on the target surface') num_iters = traits.Int(argstr='--niters %d', - xor=['fwhm'], - desc='niters : smooth by niters on the target surface') + xor=['fwhm'], + desc='niters : smooth by niters on the target surface') fwhm_source = traits.Float(argstr='--fwhm-src %f', - xor=['num_iters_source'], - desc='smooth by fwhm mm on the source surface') + xor=['num_iters_source'], + desc='smooth by fwhm mm on the source surface') num_iters_source = traits.Int(argstr='--niterssrc %d', - xor=['fwhm_source'], - desc='niters : smooth by niters on the source surface') + xor=['fwhm_source'], + desc='niters : smooth by niters on the source surface') smooth_cortex_only = traits.Bool(argstr='--smooth-cortex-only', - desc='only smooth cortex (ie, exclude medial wall)') + desc='only smooth cortex (ie, exclude medial wall)') class MRISPreprocOutputSpec(TraitedSpec): @@ -103,8 +103,8 @@ def _list_outputs(self): outputs['out_file'] = outfile if not isdefined(outfile): outputs['out_file'] = os.path.join(os.getcwd(), - 'concat_%s_%s.mgz' % (self.inputs.hemi, - self.inputs.target)) + 'concat_%s_%s.mgz' % (self.inputs.hemi, + self.inputs.target)) return outputs def _gen_filename(self, name): @@ -117,7 +117,7 @@ class GLMFitInputSpec(FSTraitedSpec): glm_dir = traits.Str(argstr='--glmdir %s', desc='save outputs to dir', genfile=True) in_file = File(desc='input 4D file', argstr='--y %s', mandatory=True, - copyfile=False) + copyfile=False) _design_xor = ('fsgd', 'design', 'one_sample') fsgd = traits.Tuple(File(exists=True), traits.Enum('doss', 'dods'), argstr='--fsgd %s %s', xor=_design_xor, @@ -128,26 +128,26 @@ class GLMFitInputSpec(FSTraitedSpec): desc='contrast file') one_sample = traits.Bool(argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), - desc='construct X and C as a one-sample group mean') + xor=('one_sample', 'fsgd', 'design', 'contrast'), + desc='construct X and C as a one-sample group mean') no_contrast_sok = traits.Bool(argstr='--no-contrasts-ok', - desc='do not fail if no contrasts specified') + desc='do not fail if no contrasts specified') per_voxel_reg = InputMultiPath(File(exists=True), argstr='--pvr %s...', - desc='per-voxel regressors') + desc='per-voxel regressors') self_reg = traits.Tuple(traits.Int, traits.Int, traits.Int, - argstr='--selfreg %d %d %d', - desc='self-regressor from index col row slice') + argstr='--selfreg %d %d %d', + desc='self-regressor from index col row slice') weighted_ls = File(exists=True, argstr='--wls %s', xor=('weight_file', 'weight_inv', 'weight_sqrt'), desc='weighted least squares') fixed_fx_var = File(exists=True, argstr='--yffxvar %s', - desc='for fixed effects analysis') + desc='for fixed effects analysis') fixed_fx_dof = traits.Int(argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], - desc='dof for fixed effects analysis') + xor=['fixed_fx_dof_file'], + desc='dof for fixed effects analysis') fixed_fx_dof_file = File(argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], - desc='text file with dof for fixed effects analysis') + xor=['fixed_fx_dof'], + desc='text file with dof for fixed effects analysis') weight_file = File(exists=True, xor=['weighted_ls'], desc='weight for each input at each voxel') weight_inv = traits.Bool(argstr='--w-inv', desc='invert weights', @@ -164,15 +164,15 @@ class GLMFitInputSpec(FSTraitedSpec): desc='turn off FWHM output estimation') mask_file = File(exists=True, argstr='--mask %s', desc='binary mask') label_file = File(exists=True, argstr='--label %s', - xor=['cortex'], - desc='use label as mask, surfaces only') + xor=['cortex'], + desc='use label as mask, surfaces only') cortex = traits.Bool(argstr='--cortex', xor=['label_file'], desc='use subjects ?h.cortex.label as label') invert_mask = traits.Bool(argstr='--mask-inv', - desc='invert mask') + desc='invert mask') prune = traits.Bool(argstr='--prune', - desc='remove voxels that do not have a non-zero value at each frame (def)') + desc='remove voxels that do not have a non-zero value at each frame (def)') no_prune = traits.Bool(argstr='--no-prune', xor=['prunethresh'], desc='do not prune') @@ -180,13 +180,13 @@ class GLMFitInputSpec(FSTraitedSpec): xor=['noprune'], desc='prune threshold. Default is FLT_MIN') compute_log_y = traits.Bool(argstr='--logy', - desc='compute natural log of y prior to analysis') + desc='compute natural log of y prior to analysis') save_estimate = traits.Bool(argstr='--yhat-save', - desc='save signal estimate (yhat)') + desc='save signal estimate (yhat)') save_residual = traits.Bool(argstr='--eres-save', - desc='save residual error (eres)') + desc='save residual error (eres)') save_res_corr_mtx = traits.Bool(argstr='--eres-scm', - desc='save residual error spatial correlation matrix (eres.scm). Big!') + desc='save residual error spatial correlation matrix (eres.scm). Big!') surf = traits.Bool(argstr="--surf %s %s %s", requires=["subject_id", "hemi"], desc="analysis is on a surface mesh") @@ -199,37 +199,37 @@ class GLMFitInputSpec(FSTraitedSpec): argstr='--sim %s %d %f %s', desc='nulltype nsim thresh csdbasename') sim_sign = traits.Enum('abs', 'pos', 'neg', argstr='--sim-sign %s', - desc='abs, pos, or neg') + desc='abs, pos, or neg') uniform = traits.Tuple(traits.Float, traits.Float, argstr='--uniform %f %f', - desc='use uniform distribution instead of gaussian') + desc='use uniform distribution instead of gaussian') pca = traits.Bool(argstr='--pca', desc='perform pca/svd analysis on residual') calc_AR1 = traits.Bool(argstr='--tar1', - desc='compute and save temporal AR1 of residual') + desc='compute and save temporal AR1 of residual') save_cond = traits.Bool(argstr='--save-cond', - desc='flag to save design matrix condition at each voxel') + desc='flag to save design matrix condition at each voxel') vox_dump = traits.Tuple(traits.Int, traits.Int, traits.Int, - argstr='--voxdump %d %d %d', - desc='dump voxel GLM and exit') + argstr='--voxdump %d %d %d', + desc='dump voxel GLM and exit') seed = traits.Int(argstr='--seed %d', desc='used for synthesizing noise') synth = traits.Bool(argstr='--synth', desc='replace input with gaussian') resynth_test = traits.Int(argstr='--resynthtest %d', desc='test GLM by resynthsis') profile = traits.Int(argstr='--profile %d', desc='niters : test speed') force_perm = traits.Bool(argstr='--perm-force', - desc='force perumtation test, even when design matrix is not orthog') + desc='force perumtation test, even when design matrix is not orthog') diag = traits.Int('--diag %d', desc='Gdiag_no : set diagnositc level') diag_cluster = traits.Bool(argstr='--diag-cluster', - desc='save sig volume and exit from first sim loop') + desc='save sig volume and exit from first sim loop') debug = traits.Bool(argstr='--debug', desc='turn on debugging') check_opts = traits.Bool(argstr='--checkopts', - desc="don't run anything, just check options and exit") + desc="don't run anything, just check options and exit") allow_repeated_subjects = traits.Bool(argstr='--allowsubjrep', - desc='allow subject names to repeat in the fsgd file (must appear before --fsgd') + desc='allow subject names to repeat in the fsgd file (must appear before --fsgd') allow_ill_cond = traits.Bool(argstr='--illcond', - desc='allow ill-conditioned design matrices') + desc='allow ill-conditioned design matrices') sim_done_file = File(argstr='--sim-done %s', - desc='create file when simulation finished') + desc='create file when simulation finished') class GLMFitOutputSpec(TraitedSpec): @@ -341,7 +341,7 @@ def __init__(self, **kwargs): class BinarizeInputSpec(FSTraitedSpec): in_file = File(exists=True, argstr='--i %s', mandatory=True, - copyfile=False, desc='input volume') + copyfile=False, desc='input volume') min = traits.Float(argstr='--min %f', xor=['wm_ven_csf'], desc='min thresh') max = traits.Float(argstr='--max %f', xor=['wm_ven_csf'], @@ -353,40 +353,40 @@ class BinarizeInputSpec(FSTraitedSpec): match = traits.List(traits.Int, argstr='--match %d...', desc='match instead of threshold') wm = traits.Bool(argstr='--wm', - desc='set match vals to 2 and 41 (aseg for cerebral WM)') + desc='set match vals to 2 and 41 (aseg for cerebral WM)') ventricles = traits.Bool(argstr='--ventricles', - desc='set match vals those for aseg ventricles+choroid (not 4th)') + desc='set match vals those for aseg ventricles+choroid (not 4th)') wm_ven_csf = traits.Bool(argstr='--wm+vcsf', xor=['min', 'max'], - desc='WM and ventricular CSF, including choroid (not 4th)') + desc='WM and ventricular CSF, including choroid (not 4th)') binary_file = File(argstr='--o %s', genfile=True, - desc='binary output volume') + desc='binary output volume') out_type = traits.Enum('nii', 'nii.gz', 'mgz', argstr='', desc='output file type') count_file = traits.Either(traits.Bool, File, - argstr='--count %s', - desc='save number of hits in ascii file (hits, ntotvox, pct)') + argstr='--count %s', + desc='save number of hits in ascii file (hits, ntotvox, pct)') bin_val = traits.Int(argstr='--binval %d', - desc='set vox within thresh to val (default is 1)') + desc='set vox within thresh to val (default is 1)') bin_val_not = traits.Int(argstr='--binvalnot %d', - desc='set vox outside range to val (default is 0)') + desc='set vox outside range to val (default is 0)') invert = traits.Bool(argstr='--inv', desc='set binval=0, binvalnot=1') frame_no = traits.Int(argstr='--frame %s', - desc='use 0-based frame of input (default is 0)') + desc='use 0-based frame of input (default is 0)') merge_file = File(exists=True, argstr='--merge %s', - desc='merge with mergevol') + desc='merge with mergevol') mask_file = File(exists=True, argstr='--mask maskvol', - desc='must be within mask') + desc='must be within mask') mask_thresh = traits.Float(argstr='--mask-thresh %f', - desc='set thresh for mask') + desc='set thresh for mask') abs = traits.Bool(argstr='--abs', desc='take abs of invol first (ie, make unsigned)') bin_col_num = traits.Bool(argstr='--bincol', - desc='set binarized voxel value to its column number') + desc='set binarized voxel value to its column number') zero_edges = traits.Bool(argstr='--zero-edges', - desc='zero the edge voxels') + desc='zero the edge voxels') zero_slice_edge = traits.Bool(argstr='--zero-slice-edges', - desc='zero the edge slice voxels') + desc='zero the edge slice voxels') dilate = traits.Int(argstr='--dilate %d', desc='niters: dilate binarization in 3D') erode = traits.Int(argstr='--erode %d', @@ -424,7 +424,7 @@ def _list_outputs(self): outfile = fname_presuffix(self.inputs.in_file, newpath=os.getcwd(), suffix='.'.join(('_thresh', - self.inputs.out_type)), + self.inputs.out_type)), use_ext=False) else: outfile = fname_presuffix(self.inputs.in_file, @@ -462,45 +462,45 @@ def _gen_filename(self, name): class ConcatenateInputSpec(FSTraitedSpec): in_files = InputMultiPath(File(exists=True), - desc='Individual volumes to be concatenated', - argstr='--i %s...', mandatory=True) + desc='Individual volumes to be concatenated', + argstr='--i %s...', mandatory=True) concatenated_file = File(desc='Output volume', argstr='--o %s', genfile=True) sign = traits.Enum('abs', 'pos', 'neg', argstr='--%s', - desc='Take only pos or neg voxles from input, or take abs') + desc='Take only pos or neg voxles from input, or take abs') stats = traits.Enum('sum', 'var', 'std', 'max', 'min', 'mean', argstr='--%s', - desc='Compute the sum, var, std, max, min or mean of the input volumes') + desc='Compute the sum, var, std, max, min or mean of the input volumes') paired_stats = traits.Enum('sum', 'avg', 'diff', 'diff-norm', 'diff-norm1', - 'diff-norm2', argstr='--paired-%s', - desc='Compute paired sum, avg, or diff') + 'diff-norm2', argstr='--paired-%s', + desc='Compute paired sum, avg, or diff') gmean = traits.Int(argstr='--gmean %d', desc='create matrix to average Ng groups, Nper=Ntot/Ng') mean_div_n = traits.Bool(argstr='--mean-div-n', - desc='compute mean/nframes (good for var)') + desc='compute mean/nframes (good for var)') multiply_by = traits.Float(argstr='--mul %f', - desc='Multiply input volume by some amount') + desc='Multiply input volume by some amount') add_val = traits.Float(argstr='--add %f', - desc='Add some amount to the input volume') + desc='Add some amount to the input volume') multiply_matrix_file = File(exists=True, argstr='--mtx %s', - desc='Multiply input by an ascii matrix in file') + desc='Multiply input by an ascii matrix in file') combine = traits.Bool(argstr='--combine', - desc='Combine non-zero values into single frame volume') + desc='Combine non-zero values into single frame volume') keep_dtype = traits.Bool(argstr='--keep-datatype', - desc='Keep voxelwise precision type (default is float') + desc='Keep voxelwise precision type (default is float') max_bonfcor = traits.Bool(argstr='--max-bonfcor', - desc='Compute max and bonferroni correct (assumes -log10(ps))') + desc='Compute max and bonferroni correct (assumes -log10(ps))') max_index = traits.Bool(argstr='--max-index', - desc='Compute the index of max voxel in concatenated volumes') + desc='Compute the index of max voxel in concatenated volumes') mask_file = File(exists=True, argstr='--mask %s', desc='Mask input with a volume') vote = traits.Bool(argstr='--vote', - desc='Most frequent value at each voxel and fraction of occurances') + desc='Most frequent value at each voxel and fraction of occurances') sort = traits.Bool(argstr='--sort', - desc='Sort each voxel by ascending frame value') + desc='Sort each voxel by ascending frame value') class ConcatenateOutputSpec(TraitedSpec): concatenated_file = File(exists=True, - desc='Path/name of the output volume') + desc='Path/name of the output volume') class Concatenate(FSCommand): @@ -543,65 +543,65 @@ def _gen_filename(self, name): class SegStatsInputSpec(FSTraitedSpec): _xor_inputs = ('segmentation_file', 'annot', 'surf_label') segmentation_file = File(exists=True, argstr='--seg %s', xor=_xor_inputs, - mandatory=True, desc='segmentation volume path') + mandatory=True, desc='segmentation volume path') annot = traits.Tuple(traits.Str, traits.Enum('lh', 'rh'), traits.Str, argstr='--annot %s %s %s', xor=_xor_inputs, mandatory=True, desc='subject hemi parc : use surface parcellation') surf_label = traits.Tuple(traits.Str, traits.Enum('lh', 'rh'), traits.Str, - argstr='--slabel %s %s %s', xor=_xor_inputs, - mandatory=True, - desc='subject hemi label : use surface label') + argstr='--slabel %s %s %s', xor=_xor_inputs, + mandatory=True, + desc='subject hemi label : use surface label') summary_file = File(argstr='--sum %s', genfile=True, - desc='Segmentation stats summary table file') + desc='Segmentation stats summary table file') partial_volume_file = File(exists=True, argstr='--pv %f', - desc='Compensate for partial voluming') + desc='Compensate for partial voluming') in_file = File(exists=True, argstr='--i %s', - desc='Use the segmentation to report stats on this volume') + desc='Use the segmentation to report stats on this volume') frame = traits.Int(argstr='--frame %d', desc='Report stats on nth frame of input volume') multiply = traits.Float(argstr='--mul %f', desc='multiply input by val') calc_snr = traits.Bool(argstr='--snr', desc='save mean/std as extra column in output table') calc_power = traits.Enum('sqr', 'sqrt', argstr='--%s', - desc='Compute either the sqr or the sqrt of the input') + desc='Compute either the sqr or the sqrt of the input') _ctab_inputs = ('color_table_file', 'default_color_table', 'gca_color_table') color_table_file = File(exists=True, argstr='--ctab %s', xor=_ctab_inputs, - desc='color table file with seg id names') + desc='color table file with seg id names') default_color_table = traits.Bool(argstr='--ctab-default', xor=_ctab_inputs, - desc='use $FREESURFER_HOME/FreeSurferColorLUT.txt') + desc='use $FREESURFER_HOME/FreeSurferColorLUT.txt') gca_color_table = File(exists=True, argstr='--ctab-gca %s', xor=_ctab_inputs, - desc='get color table from GCA (CMA)') + desc='get color table from GCA (CMA)') segment_id = traits.List(argstr='--id %s...', desc='Manually specify segmentation ids') exclude_id = traits.Int(argstr='--excludeid %d', desc='Exclude seg id from report') exclude_ctx_gm_wm = traits.Bool(argstr='--excl-ctxgmwm', - desc='exclude cortical gray and white matter') + desc='exclude cortical gray and white matter') wm_vol_from_surf = traits.Bool(argstr='--surf-wm-vol', desc='Compute wm volume from surf') cortex_vol_from_surf = traits.Bool(argstr='--surf-ctx-vol', desc='Compute cortex volume from surf') non_empty_only = traits.Bool(argstr='--nonempty', desc='Only report nonempty segmentations') mask_file = File(exists=True, argstr='--mask %s', - desc='Mask volume (same size as seg') + desc='Mask volume (same size as seg') mask_thresh = traits.Float(argstr='--maskthresh %f', - desc='binarize mask with this threshold <0.5>') + desc='binarize mask with this threshold <0.5>') mask_sign = traits.Enum('abs', 'pos', 'neg', '--masksign %s', - desc='Sign for mask threshold: pos, neg, or abs') + desc='Sign for mask threshold: pos, neg, or abs') mask_frame = traits.Int('--maskframe %d', requires=['mask_file'], desc='Mask with this (0 based) frame of the mask volume') mask_invert = traits.Bool(argstr='--maskinvert', desc='Invert binarized mask volume') mask_erode = traits.Int(argstr='--maskerode %d', desc='Erode mask by some amount') brain_vol = traits.Enum('brain-vol-from-seg', 'brainmask', '--%s', - desc='Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``') + desc='Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``') etiv = traits.Bool(argstr='--etiv', desc='Compute ICV from talairach transform') etiv_only = traits.Enum('etiv', 'old-etiv', '--%s-only', - desc='Compute etiv and exit. Use ``etiv`` or ``old-etiv``') + desc='Compute etiv and exit. Use ``etiv`` or ``old-etiv``') avgwf_txt_file = traits.Either(traits.Bool, File, argstr='--avgwf %s', - desc='Save average waveform into file (bool or filename)') + desc='Save average waveform into file (bool or filename)') avgwf_file = traits.Either(traits.Bool, File, argstr='--avgwfvol %s', - desc='Save as binary volume (bool or filename)') + desc='Save as binary volume (bool or filename)') sf_avg_file = traits.Either(traits.Bool, File, argstr='--sfavg %s', - desc='Save mean across space and time') + desc='Save mean across space and time') vox = traits.List(traits.Int, argstr='--vox %s', - desc='Replace seg with all 0s except at C R S (three int inputs)') + desc='Replace seg with all 0s except at C R S (three int inputs)') class SegStatsOutputSpec(TraitedSpec): @@ -636,7 +636,7 @@ class SegStats(FSCommand): def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.summary_file): - outputs['summary_file'] = os.path.abspath(self.inputs.summary_file) + outputs['summary_file'] = os.path.abspath(self.inputs.summary_file) else: outputs['summary_file'] = os.path.join(os.getcwd(), 'summary.stats') suffices = dict(avgwf_txt_file='_avgwf.txt', avgwf_file='_avgwf.nii.gz', @@ -675,27 +675,27 @@ def _gen_filename(self, name): class Label2VolInputSpec(FSTraitedSpec): label_file = InputMultiPath(File(exists=True), argstr='--label %s...', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - copyfile=False, - mandatory=True, - desc='list of label files') + xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + copyfile=False, + mandatory=True, + desc='list of label files') annot_file = File(exists=True, argstr='--annot %s', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - requires=('subject_id', 'hemi'), - mandatory=True, - copyfile=False, - desc='surface annotation file') + xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + requires=('subject_id', 'hemi'), + mandatory=True, + copyfile=False, + desc='surface annotation file') seg_file = File(exists=True, argstr='--seg %s', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - mandatory=True, - copyfile=False, - desc='segmentation file') + xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + mandatory=True, + copyfile=False, + desc='segmentation file') aparc_aseg = traits.Bool(argstr='--aparc+aseg', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - mandatory=True, - desc='use aparc+aseg.mgz in subjectdir as seg') + xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + mandatory=True, + desc='use aparc+aseg.mgz in subjectdir as seg') template_file = File(exists=True, argstr='--temp %s', mandatory=True, - desc='output template volume') + desc='output template volume') reg_file = File(exists=True, argstr='--reg %s', xor=('reg_file', 'reg_header', 'identity'), desc='tkregister style matrix VolXYZ = R*LabelXYZ') @@ -706,18 +706,18 @@ class Label2VolInputSpec(FSTraitedSpec): xor=('reg_file', 'reg_header', 'identity'), desc='set R=I') invert_mtx = traits.Bool(argstr='--invertmtx', - desc='Invert the registration matrix') + desc='Invert the registration matrix') fill_thresh = traits.Range(0., 1., argstr='--fillthresh %.f', - desc='thresh : between 0 and 1') + desc='thresh : between 0 and 1') label_voxel_volume = traits.Float(argstr='--labvoxvol %f', - desc='volume of each label point (def 1mm3)') + desc='volume of each label point (def 1mm3)') proj = traits.Tuple(traits.Enum('abs', 'frac'), traits.Float, traits.Float, traits.Float, argstr='--proj %s %f %f %f', requires=('subject_id', 'hemi'), desc='project along surface normal') subject_id = traits.Str(argstr='--subject %s', - desc='subject id') + desc='subject id') hemi = traits.Enum('lh', 'rh', argstr='--hemi %s', desc='hemisphere to use lh or rh') surface = traits.Str(argstr='--surf %s', @@ -725,11 +725,11 @@ class Label2VolInputSpec(FSTraitedSpec): vol_label_file = File(argstr='--o %s', genfile=True, desc='output volume') label_hit_file = File(argstr='--hits %s', - desc='file with each frame is nhits for a label') + desc='file with each frame is nhits for a label') map_label_stat = File(argstr='--label-stat %s', - desc='map the label stats field into the vol') + desc='map the label stats field into the vol') native_vox2ras = traits.Bool(argstr='--native-vox2ras', - desc='use native vox2ras xform instead of tkregister-style') + desc='use native vox2ras xform instead of tkregister-style') class Label2VolOutputSpec(TraitedSpec): @@ -757,9 +757,9 @@ def _list_outputs(self): outfile = self.inputs.vol_label_file if not isdefined(outfile): for key in ['label_file', 'annot_file', 'seg_file']: - if isdefined(getattr(self.inputs,key)): + if isdefined(getattr(self.inputs, key)): path = getattr(self.inputs, key) - if isinstance(path,list): + if isinstance(path, list): path = path[0] _, src = os.path.split(path) if isdefined(self.inputs.aparc_aseg): @@ -781,7 +781,7 @@ class MS_LDAInputSpec(FSTraitedSpec): minlen=2, maxlen=2, sep=' ', desc='pair of class labels to optimize') weight_file = traits.File(argstr='-weight %s', mandatory=True, - desc='filename for the LDA weights (input or output)') + desc='filename for the LDA weights (input or output)') vol_synth_file = traits.File(exists=False, argstr='-synth %s', mandatory=True, desc=('filename for the synthesized output ' @@ -791,7 +791,7 @@ class MS_LDAInputSpec(FSTraitedSpec): mask_file = traits.File(exists=True, argstr='-mask %s', desc='filename of the brain mask volume') shift = traits.Int(argstr='-shift %d', - desc='shift all values equal to the given value to zero') + desc='shift all values equal to the given value to zero') conform = traits.Bool(argstr='-conform', desc=('Conform the input volumes (brain mask ' 'typically already conformed)')) diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 7f30a89758..c2d634291b 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -33,7 +33,7 @@ class ParseDICOMDirInputSpec(FSTraitedSpec): dicom_dir = Directory(exists=True, argstr='--d %s', mandatory=True, - desc='path to siemens dicom directory') + desc='path to siemens dicom directory') dicom_info_file = File('dicominfo.txt', argstr='--o %s', usedefault=True, desc='file to which results are written') sortbyrun = traits.Bool(argstr='--sortbyrun', desc='assign run numbers') @@ -80,30 +80,30 @@ class UnpackSDICOMDirInputSpec(FSTraitedSpec): output_dir = Directory(argstr='-targ %s', desc='top directory into which the files will be unpacked') run_info = traits.Tuple(traits.Int, traits.Str, traits.Str, traits.Str, - mandatory=True, - argstr='-run %d %s %s %s', - xor=('run_info', 'config', 'seq_config'), - desc='runno subdir format name : spec unpacking rules on cmdline') + mandatory=True, + argstr='-run %d %s %s %s', + xor=('run_info', 'config', 'seq_config'), + desc='runno subdir format name : spec unpacking rules on cmdline') config = File(exists=True, argstr='-cfg %s', mandatory=True, xor=('run_info', 'config', 'seq_config'), desc='specify unpacking rules in file') seq_config = File(exists=True, argstr='-seqcfg %s', - mandatory=True, - xor=('run_info', 'config', 'seq_config'), - desc='specify unpacking rules based on sequence') + mandatory=True, + xor=('run_info', 'config', 'seq_config'), + desc='specify unpacking rules based on sequence') dir_structure = traits.Enum('fsfast', 'generic', argstr='-%s', desc='unpack to specified directory structures') no_info_dump = traits.Bool(argstr='-noinfodump', - desc='do not create infodump file') + desc='do not create infodump file') scan_only = File(exists=True, argstr='-scanonly %s', - desc='only scan the directory and put result in file') + desc='only scan the directory and put result in file') log_file = File(exists=True, argstr='-log %s', - desc='explicilty set log file') + desc='explicilty set log file') spm_zeropad = traits.Int(argstr='-nspmzeropad %d', - desc='set frame number zero padding width for SPM') + desc='set frame number zero padding width for SPM') no_unpack_err = traits.Bool(argstr='-no-unpackerr', - desc='do not try to unpack runs with errors') + desc='do not try to unpack runs with errors') class UnpackSDICOMDir(FSCommand): @@ -134,17 +134,17 @@ class MRIConvertInputSpec(FSTraitedSpec): no_write = traits.Bool(argstr='--no_write', desc='do not write output') in_info = traits.Bool(argstr='--in_info', - desc='display input info') + desc='display input info') out_info = traits.Bool(argstr='--out_info', - desc='display output info') + desc='display output info') in_stats = traits.Bool(argstr='--in_stats', - desc='display input stats') + desc='display input stats') out_stats = traits.Bool(argstr='--out_stats', - desc='display output stats') + desc='display output stats') in_matrix = traits.Bool(argstr='--in_matrix', - desc='display input matrix') + desc='display input matrix') out_matrix = traits.Bool(argstr='--out_matrix', - desc='display output matrix') + desc='display output matrix') in_i_size = traits.Int(argstr='--in_i_size %d', desc='input i size') in_j_size = traits.Int(argstr='--in_j_size %d', @@ -152,24 +152,24 @@ class MRIConvertInputSpec(FSTraitedSpec): in_k_size = traits.Int(argstr='--in_k_size %d', desc='input k size') force_ras = traits.Bool(argstr='--force_ras_good', - desc='use default when orientation info absent') + desc='use default when orientation info absent') in_i_dir = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='--in_i_direction %f %f %f', - desc=' ') + argstr='--in_i_direction %f %f %f', + desc=' ') in_j_dir = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='--in_j_direction %f %f %f', - desc=' ') + argstr='--in_j_direction %f %f %f', + desc=' ') in_k_dir = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='--in_k_direction %f %f %f', - desc=' ') + argstr='--in_k_direction %f %f %f', + desc=' ') _orientations = ['LAI', 'LIA', 'ALI', 'AIL', 'ILA', 'IAL', 'LAS', 'LSA', 'ALS', 'ASL', 'SLA', 'SAL', 'LPI', 'LIP', 'PLI', 'PIL', 'ILP', 'IPL', 'LPS', 'LSP', 'PLS', 'PSL', 'SLP', 'SPL', 'RAI', 'RIA', 'ARI', 'AIR', 'IRA', 'IAR', 'RAS', 'RSA', 'ARS', 'ASR', 'SRA', 'SAR', 'RPI', 'RIP', 'PRI', 'PIR', 'IRP', 'IPR', 'RPS', 'RSP', 'PRS', 'PSR', 'SRP', 'SPR'] - #_orientations = [comb for comb in itertools.chain(*[[''.join(c) for c in itertools.permutations(s)] for s in [a+b+c for a in 'LR' for b in 'AP' for c in 'IS']])] + # _orientations = [comb for comb in itertools.chain(*[[''.join(c) for c in itertools.permutations(s)] for s in [a+b+c for a in 'LR' for b in 'AP' for c in 'IS']])] in_orientation = traits.Enum(_orientations, - argstr='--in_orientation %s', - desc='specify the input orientation') + argstr='--in_orientation %s', + desc='specify the input orientation') in_center = traits.List(traits.Float, maxlen=3, - argstr='--in_center %s', - desc=' ') + argstr='--in_center %s', + desc=' ') sphinx = traits.Bool(argstr='--sphinx', desc='change orientation info to sphinx') out_i_count = traits.Int(argstr='--out_i_count %d', @@ -179,8 +179,8 @@ class MRIConvertInputSpec(FSTraitedSpec): out_k_count = traits.Int(argstr='--out_k_count %d', desc='some count ?? in k direction') vox_size = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='-voxsize %f %f %f', - desc=' specify the size (mm) - useful for upsampling or downsampling') + argstr='-voxsize %f %f %f', + desc=' specify the size (mm) - useful for upsampling or downsampling') out_i_size = traits.Int(argstr='--out_i_size %d', desc='output i size') out_j_size = traits.Int(argstr='--out_j_size %d', @@ -188,30 +188,30 @@ class MRIConvertInputSpec(FSTraitedSpec): out_k_size = traits.Int(argstr='--out_k_size %d', desc='output k size') out_i_dir = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='--out_i_direction %f %f %f', - desc=' ') + argstr='--out_i_direction %f %f %f', + desc=' ') out_j_dir = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='--out_j_direction %f %f %f', - desc=' ') + argstr='--out_j_direction %f %f %f', + desc=' ') out_k_dir = traits.Tuple(traits.Float, traits.Float, traits.Float, argstr='--out_k_direction %f %f %f', desc=' ') out_orientation = traits.Enum(_orientations, - argstr='--out_orientation %s', - desc='specify the output orientation') + argstr='--out_orientation %s', + desc='specify the output orientation') out_center = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='--out_center %f %f %f', - desc=' ') + argstr='--out_center %f %f %f', + desc=' ') out_datatype = traits.Enum('uchar', 'short', 'int', 'float', - argstr='--out_data_type %s', - desc='output data type ') + argstr='--out_data_type %s', + desc='output data type ') resample_type = traits.Enum('interpolate', 'weighted', 'nearest', 'sinc', 'cubic', - argstr='--resample_type %s', - desc=' (default is interpolate)') + argstr='--resample_type %s', + desc=' (default is interpolate)') no_scale = traits.Bool(argstr='--no_scale 1', - desc='dont rescale values for COR') + desc='dont rescale values for COR') no_change = traits.Bool(argstr='--nochange', - desc="don't change type of input to that of template") + desc="don't change type of input to that of template") tr = traits.Int(argstr='-tr %d', desc='TR in msec') te = traits.Int(argstr='-te %d', @@ -219,30 +219,30 @@ class MRIConvertInputSpec(FSTraitedSpec): ti = traits.Int(argstr='-ti %d', desc='TI in msec (note upper case flag)') autoalign_matrix = File(exists=True, argstr='--autoalign %s', - desc='text file with autoalign matrix') + desc='text file with autoalign matrix') unwarp_gradient = traits.Bool(argstr='--unwarp_gradient_nonlinearity', - desc='unwarp gradient nonlinearity') + desc='unwarp gradient nonlinearity') apply_transform = File(exists=True, argstr='--apply_transform %s', - desc='apply xfm file') + desc='apply xfm file') apply_inv_transform = File(exists=True, argstr='--apply_inverse_transform %s', - desc='apply inverse transformation xfm file') + desc='apply inverse transformation xfm file') devolve_transform = traits.Str(argstr='--devolvexfm %s', - desc='subject id') + desc='subject id') crop_center = traits.Tuple(traits.Int, traits.Int, traits.Int, - argstr='--crop %d %d %d', - desc=' crop to 256 around center (x, y, z)') + argstr='--crop %d %d %d', + desc=' crop to 256 around center (x, y, z)') crop_size = traits.Tuple(traits.Int, traits.Int, traits.Int, - argstr='--cropsize %d %d %d', - desc=' crop to size ') + argstr='--cropsize %d %d %d', + desc=' crop to size ') cut_ends = traits.Int(argstr='--cutends %d', - desc='remove ncut slices from the ends') + desc='remove ncut slices from the ends') slice_crop = traits.Tuple(traits.Int, traits.Int, - argstr='--slice-crop %d %d', - desc='s_start s_end : keep slices s_start to s_end') + argstr='--slice-crop %d %d', + desc='s_start s_end : keep slices s_start to s_end') slice_reverse = traits.Bool(argstr='--slice-reverse', - desc='reverse order of slices, update vox2ras') + desc='reverse order of slices, update vox2ras') slice_bias = traits.Float(argstr='--slice-bias %f', - desc='apply half-cosine bias field') + desc='apply half-cosine bias field') fwhm = traits.Float(argstr='--fwhm %f', desc='smooth input volume by fwhm mm') _filetypes = ['cor', 'mgh', 'mgz', 'minc', 'analyze', @@ -251,38 +251,38 @@ class MRIConvertInputSpec(FSTraitedSpec): 'nifti1', 'nii', 'niigz'] _infiletypes = ['ge', 'gelx', 'lx', 'ximg', 'siemens', 'dicom', 'siemens_dicom'] in_type = traits.Enum(_filetypes + _infiletypes, argstr='--in_type %s', - desc='input file type') + desc='input file type') out_type = traits.Enum(_filetypes, argstr='--out_type %s', - desc='output file type') + desc='output file type') ascii = traits.Bool(argstr='--ascii', desc='save output as ascii col>row>slice>frame') reorder = traits.Tuple(traits.Int, traits.Int, traits.Int, argstr='--reorder %d %d %d', desc='olddim1 olddim2 olddim3') invert_contrast = traits.Float(argstr='--invert_contrast %f', - desc='threshold for inversting contrast') + desc='threshold for inversting contrast') in_file = File(exists=True, mandatory=True, - position=-2, - argstr='--input_volume %s', - desc='File to read/convert') + position=-2, + argstr='--input_volume %s', + desc='File to read/convert') out_file = File(argstr='--output_volume %s', - position=-1, genfile=True, - desc='output filename or True to generate one') + position=-1, genfile=True, + desc='output filename or True to generate one') conform = traits.Bool(argstr='--conform', desc='conform to 256^3') conform_min = traits.Bool(argstr='--conform_min', - desc='conform to smallest size') + desc='conform to smallest size') conform_size = traits.Float(argstr='--conform_size %s', - desc='conform to size_in_mm') + desc='conform to size_in_mm') parse_only = traits.Bool(argstr='--parse_only', desc='parse input only') subject_name = traits.Str(argstr='--subject_name %s', desc='subject name ???') reslice_like = File(exists=True, argstr='--reslice_like %s', - desc='reslice output to match file') + desc='reslice output to match file') template_type = traits.Enum(_filetypes + _infiletypes, - argstr='--template_type %s', - desc='template file type') + argstr='--template_type %s', + desc='template file type') split = traits.Bool(argstr='--split', desc='split output frames into separate output files.') frame = traits.Int(argstr='--frame %d', @@ -290,38 +290,38 @@ class MRIConvertInputSpec(FSTraitedSpec): midframe = traits.Bool(argstr='--mid-frame', desc='keep only the middle frame') skip_n = traits.Int(argstr='--nskip %d', - desc='skip the first n frames') + desc='skip the first n frames') drop_n = traits.Int(argstr='--ndrop %d', - desc='drop the last n frames') + desc='drop the last n frames') frame_subsample = traits.Tuple(traits.Int, traits.Int, traits.Int, - argstr='--fsubsample %d %d %d', - desc='start delta end : frame subsampling (end = -1 for end)') + argstr='--fsubsample %d %d %d', + desc='start delta end : frame subsampling (end = -1 for end)') in_scale = traits.Float(argstr='--scale %f', - desc='input intensity scale factor') + desc='input intensity scale factor') out_scale = traits.Float(argstr='--out-scale %d', - desc='output intensity scale factor') + desc='output intensity scale factor') in_like = File(exists=True, argstr='--in_like %s', - desc='input looks like') + desc='input looks like') fill_parcellation = traits.Bool(argstr='--fill_parcellation', - desc='fill parcellation') + desc='fill parcellation') smooth_parcellation = traits.Bool(argstr='--smooth_parcellation', - desc='smooth parcellation') + desc='smooth parcellation') zero_outlines = traits.Bool(argstr='--zero_outlines', - desc='zero outlines') + desc='zero outlines') color_file = File(exists=True, argstr='--color_file %s', - desc='color file') + desc='color file') no_translate = traits.Bool(argstr='--no_translate', - desc='???') + desc='???') status_file = File(argstr='--status %s', - desc='status file for DICOM conversion') + desc='status file for DICOM conversion') sdcm_list = File(exists=True, argstr='--sdcmlist %s', - desc='list of DICOM files for conversion') + desc='list of DICOM files for conversion') template_info = traits.Bool('--template_info', - desc='dump info about template') + desc='dump info about template') crop_gdf = traits.Bool(argstr='--crop_gdf', desc='apply GDF cropping') zero_ge_z_offset = traits.Bool(argstr='--zero_ge_z_offset', - desc='zero ge z offset ???') + desc='zero ge z offset ???') class MRIConvertOutputSpec(TraitedSpec): @@ -422,24 +422,24 @@ def _gen_filename(self, name): class DICOMConvertInputSpec(FSTraitedSpec): dicom_dir = Directory(exists=True, mandatory=True, - desc='dicom directory from which to convert dicom files') + desc='dicom directory from which to convert dicom files') base_output_dir = Directory(mandatory=True, - desc='directory in which subject directories are created') + desc='directory in which subject directories are created') subject_dir_template = traits.Str('S.%04d', usedefault=True, - desc='template for subject directory name') + desc='template for subject directory name') subject_id = traits.Any(desc='subject identifier to insert into template') file_mapping = traits.List(traits.Tuple(traits.Str, traits.Str), - desc='defines the output fields of interface') + desc='defines the output fields of interface') out_type = traits.Enum('niigz', MRIConvertInputSpec._filetypes, usedefault=True, - desc='defines the type of output file produced') + desc='defines the type of output file produced') dicom_info = File(exists=True, - desc='File containing summary information from mri_parse_sdcmdir') + desc='File containing summary information from mri_parse_sdcmdir') seq_list = traits.List(traits.Str, requires=['dicom_info'], - desc='list of pulse sequence names to be converted.') + desc='list of pulse sequence names to be converted.') ignore_single_slice = traits.Bool(requires=['dicom_info'], - desc='ignore volumes containing a single slice') + desc='ignore volumes containing a single slice') class DICOMConvert(FSCommand): @@ -509,7 +509,7 @@ def _get_filelist(self, outdir): out_type = MRIConvert.filemap[self.inputs.out_type] outfile = os.path.join(outdir, '.'.join(('%s-%02d' % (fileparts[0], runno), - out_type))) + out_type))) filemap[runno] = (f, outfile) if self.inputs.dicom_info: files = [filemap[r] for r in self._get_runs()] @@ -538,22 +538,22 @@ def cmdline(self): single_cmd = '%s %s %s' % (self.cmd, infile, os.path.join(outdir, outfile)) cmd.extend([single_cmd]) - return '; '.join(cmd) + return '; '.join(cmd) class ResampleInputSpec(FSTraitedSpec): in_file = File(exists=True, argstr='-i %s', mandatory=True, - desc='file to resample', position=-2) + desc='file to resample', position=-2) resampled_file = File(argstr='-o %s', desc='output filename', genfile=True, position=-1) voxel_size = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr='-vs %.2f %.2f %.2f', desc='triplet of output voxel sizes', + argstr='-vs %.2f %.2f %.2f', desc='triplet of output voxel sizes', mandatory=True) class ResampleOutputSpec(TraitedSpec): resampled_file = File(exists=True, - desc='output filename') + desc='output filename') class Resample(FSCommand): @@ -646,9 +646,8 @@ class ReconAll(CommandLine): output_spec = ReconAllIOutputSpec _can_resume = True - _steps = [ - #autorecon1 + # autorecon1 ('motioncor', ['mri/rawavg.mgz', 'mri/orig.mgz']), ('talairach', ['mri/transforms/talairach.auto.xfm', 'mri/transforms/talairach.xfm']), @@ -657,7 +656,7 @@ class ReconAll(CommandLine): ('skullstrip', ['mri/brainmask.auto.mgz', 'mri/brainmask.mgz']), - #autorecon2 + # autorecon2 ('gcareg', ['mri/transforms/talairach.lta']), ('canorm', ['mri/norm.mgz']), ('careg', ['mri/transforms/talairach.m3z']), @@ -696,7 +695,7 @@ class ReconAll(CommandLine): 'surf/rh.inflated.H', 'surf/lh.inflated.K', 'surf/rh.inflated.K']), - #autorecon3 + # autorecon3 ('sphere', ['surf/lh.sphere', 'surf/rh.sphere']), ('surfreg', ['surf/lh.sphere.reg', 'surf/rh.sphere.reg']), ('jacobian_white', ['surf/lh.jacobian_white', @@ -781,22 +780,22 @@ def cmdline(self): subjects_dir = self.inputs.subjects_dir if not isdefined(subjects_dir): subjects_dir = self._gen_subjects_dir() - #cmd = cmd.replace(' -all ', ' -make all ') + # cmd = cmd.replace(' -all ', ' -make all ') iflogger.info('Overriding recon-all directive') flags = [] directive = 'all' for idx, step in enumerate(self._steps): step, outfiles = step if all([os.path.exists(os.path.join(subjects_dir, - self.inputs.subject_id,f)) for + self.inputs.subject_id, f)) for f in outfiles]): - flags.append('-no%s'%step) + flags.append('-no%s' %step) if idx > 4: directive = 'autorecon2' elif idx > 23: directive = 'autorecon3' else: - flags.append('-%s'%step) + flags.append('-%s' %step) cmd = cmd.replace(' -%s ' % self.inputs.directive, ' -%s ' % directive) cmd += ' ' + ' '.join(flags) iflogger.info('resume recon-all : %s' % cmd) @@ -833,9 +832,9 @@ class BBRegisterInputSpec(FSTraitedSpec): epi_mask = traits.Bool(argstr="--epi-mask", desc="mask out B0 regions in stages 1 and 2") out_fsl_file = traits.Either(traits.Bool, File, argstr="--fslmat %s", - desc="write the transformation matrix in FSL FLIRT format") + desc="write the transformation matrix in FSL FLIRT format") registered_file = traits.Either(traits.Bool, File, argstr='--o %s', - desc='output warped sourcefile either True or filename') + desc='output warped sourcefile either True or filename') class BBRegisterOutputSpec(TraitedSpec): @@ -889,7 +888,7 @@ def _list_outputs(self): if isdefined(_in.out_fsl_file): if isinstance(_in.out_fsl_file, bool): - suffix='_bbreg_%s.mat' % _in.subject_id + suffix = '_bbreg_%s.mat' % _in.subject_id out_fsl_file = fname_presuffix(_in.source_file, suffix=suffix, use_ext=False) @@ -919,35 +918,35 @@ def _gen_filename(self, name): class ApplyVolTransformInputSpec(FSTraitedSpec): source_file = File(exists=True, argstr='--mov %s', - copyfile=False, mandatory=True, - desc='Input volume you wish to transform') + copyfile=False, mandatory=True, + desc='Input volume you wish to transform') transformed_file = File(desc='Output volume', argstr='--o %s', genfile=True) _targ_xor = ('target_file', 'tal', 'fs_target') target_file = File(exists=True, argstr='--targ %s', xor=_targ_xor, - desc='Output template volume', mandatory=True) + desc='Output template volume', mandatory=True) tal = traits.Bool(argstr='--tal', xor=_targ_xor, mandatory=True, desc='map to a sub FOV of MNI305 (with --reg only)') tal_resolution = traits.Float(argstr="--talres %.10f", desc="Resolution to sample when using tal") fs_target = traits.Bool(argstr='--fstarg', xor=_targ_xor, mandatory=True, - requires=['reg_file'], - desc='use orig.mgz from subject in regfile as target') + requires=['reg_file'], + desc='use orig.mgz from subject in regfile as target') _reg_xor = ('reg_file', 'fsl_reg_file', 'xfm_reg_file', 'reg_header', 'subject') reg_file = File(exists=True, xor=_reg_xor, argstr='--reg %s', mandatory=True, desc='tkRAS-to-tkRAS matrix (tkregister2 format)') fsl_reg_file = File(exists=True, xor=_reg_xor, argstr='--fsl %s', - mandatory=True, - desc='fslRAS-to-fslRAS matrix (FSL format)') + mandatory=True, + desc='fslRAS-to-fslRAS matrix (FSL format)') xfm_reg_file = File(exists=True, xor=_reg_xor, argstr='--xfm %s', - mandatory=True, - desc='ScannerRAS-to-ScannerRAS matrix (MNI format)') + mandatory=True, + desc='ScannerRAS-to-ScannerRAS matrix (MNI format)') reg_header = traits.Bool(xor=_reg_xor, argstr='--regheader', - mandatory=True, - desc='ScannerRAS-to-ScannerRAS matrix = identity') + mandatory=True, + desc='ScannerRAS-to-ScannerRAS matrix = identity') subject = traits.Str(xor=_reg_xor, argstr='--s %s', - mandatory=True, - desc='set matrix = identity and use subject for any templates') + mandatory=True, + desc='set matrix = identity and use subject for any templates') inverse = traits.Bool(desc='sample from target to source', argstr='--inv') interp = traits.Enum('trilin', 'nearest', 'cubic', argstr='--interp %s', @@ -975,6 +974,7 @@ class ApplyVolTransformInputSpec(FSTraitedSpec): 'non-linear morph to resample the input ' 'volume. To be used by --m3z.')) + class ApplyVolTransformOutputSpec(TraitedSpec): transformed_file = File(exists=True, desc='Path to output file if used normally') @@ -1025,17 +1025,18 @@ def _gen_filename(self, name): return self._get_outfile() return None + class SmoothInputSpec(FSTraitedSpec): in_file = File(exists=True, desc='source volume', - argstr='--i %s', mandatory=True) + argstr='--i %s', mandatory=True) reg_file = File(desc='registers volume to surface anatomical ', - argstr='--reg %s', mandatory=True, - exists=True) + argstr='--reg %s', mandatory=True, + exists=True) smoothed_file = File(desc='output volume', argstr='--o %s', genfile=True) proj_frac_avg = traits.Tuple(traits.Float, traits.Float, traits.Float, - xor=['proj_frac'], - desc='average a long normal min max delta', - argstr='--projfrac-avg %.2f %.2f %.2f') + xor=['proj_frac'], + desc='average a long normal min max delta', + argstr='--projfrac-avg %.2f %.2f %.2f') proj_frac = traits.Float(desc='project frac of thickness a long surface normal', xor=['proj_frac_avg'], argstr='--projfrac %s') @@ -1043,8 +1044,8 @@ class SmoothInputSpec(FSTraitedSpec): mandatory=True, xor=['num_iters'], desc='surface FWHM in mm', argstr='--fwhm %f') num_iters = traits.Range(low=1, xor=['surface_fwhm'], - mandatory=True, argstr='--niters %d', - desc='number of iterations instead of fwhm') + mandatory=True, argstr='--niters %d', + desc='number of iterations instead of fwhm') vol_fwhm = traits.Range(low=0.0, argstr='--vol-fwhm %f', desc='volume smoothing outside of surface') @@ -1103,11 +1104,11 @@ class RobustRegisterInputSpec(FSTraitedSpec): out_reg_file = File(genfile=True, argstr='--lta %s', desc='registration file to write') registered_file = traits.Either(traits.Bool, File, argstr='--warp %s', - desc='registered image; either True or filename') + desc='registered image; either True or filename') weights_file = traits.Either(traits.Bool, File, argstr='--weights %s', - desc='weights image to write; either True or filename') + desc='weights image to write; either True or filename') est_int_scale = traits.Bool(argstr='--iscale', - desc='estimate intensity scale (recommended for unnormalized images)') + desc='estimate intensity scale (recommended for unnormalized images)') trans_only = traits.Bool(argstr='--transonly', desc='find 3 parameter translation only') in_xfm_file = File(exists=True, argstr='--transform', @@ -1130,7 +1131,7 @@ class RobustRegisterInputSpec(FSTraitedSpec): desc='use least squares instead of robust estimator') no_init = traits.Bool(argstr='--noinit', desc='skip transform init') init_orient = traits.Bool(argstr='--initorient', - desc='use moments for initial orient (recommended for stripped brains)') + desc='use moments for initial orient (recommended for stripped brains)') max_iterations = traits.Int(argstr='--maxit %d', desc='maximum # of times on each resolution') high_iterations = traits.Int(argstr='--highit %d', @@ -1138,7 +1139,7 @@ class RobustRegisterInputSpec(FSTraitedSpec): iteration_thresh = traits.Float(argstr='--epsit %.3f', desc='stop iterations when below threshold') subsample_thresh = traits.Int(argstr='--subsample %d', - desc='subsample if dimension is above threshold size') + desc='subsample if dimension is above threshold size') outlier_limit = traits.Float(argstr='--wlimit %.3f', desc='set maximal outlier limit in satit') write_vo2vox = traits.Bool(argstr='--vox2vox', @@ -1205,7 +1206,7 @@ def _list_outputs(self): outputs['out_reg_file'] = self.inputs.out_reg_file if not isdefined(self.inputs.out_reg_file) and self.inputs.source_file: outputs['out_reg_file'] = fname_presuffix(self.inputs.source_file, - suffix='_robustreg.lta', use_ext=False) + suffix='_robustreg.lta', use_ext=False) prefices = dict(src=self.inputs.source_file, trg=self.inputs.target_file) suffices = dict(registered_file=("src", "_robustreg", True), weights_file=("src", "_robustweights", True), @@ -1242,7 +1243,7 @@ class FitMSParamsInputSpec(FSTraitedSpec): xfm_list = traits.List(File(exists=True), desc="list of transform files to apply to each FLASH image") out_dir = Directory(argstr="%s", position=-1, genfile=True, - desc="directory to store output in") + desc="directory to store output in") class FitMSParamsOutputSpec(TraitedSpec): @@ -1305,7 +1306,7 @@ def _gen_filename(self, name): class SynthesizeFLASHInputSpec(FSTraitedSpec): fixed_weighting = traits.Bool(position=1, argstr="-w", - desc="use a fixed weighting to generate optimal gray/white contrast") + desc="use a fixed weighting to generate optimal gray/white contrast") tr = traits.Float(mandatory=True, position=2, argstr="%.2f", desc="repetition time (in msec)") flip_angle = traits.Float(mandatory=True, position=3, argstr="%.2f", @@ -1348,7 +1349,7 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file else: outputs["out_file"] = self._gen_fname("synth-flash_%02d.mgz" % self.inputs.flip_angle, - suffix="") + suffix="") return outputs def _gen_filename(self, name): diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index e0d6ac9eb8..509964ddf5 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -9,31 +9,35 @@ assert_raises, skipif) import nipype.interfaces.freesurfer as freesurfer + def no_freesurfer(): if freesurfer.Info().version is None: return True else: return False + def create_files_in_directory(): outdir = os.path.realpath(mkdtemp()) cwd = os.getcwd() os.chdir(outdir) - filelist = ['a.nii','b.nii'] + filelist = ['a.nii', 'b.nii'] for f in filelist: hdr = nif.Nifti1Header() - shape = (3,3,3,4) + shape = (3, 3, 3, 4) hdr.set_data_shape(shape) img = np.random.random(shape) - nif.save(nif.Nifti1Image(img,np.eye(4),hdr), - os.path.join(outdir,f)) + nif.save(nif.Nifti1Image(img, np.eye(4), hdr), + os.path.join(outdir, f)) return filelist, outdir, cwd + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) os.chdir(old_wd) + @skipif(no_freesurfer) def test_robustregister(): filelist, outdir, cwd = create_files_in_directory() @@ -51,16 +55,17 @@ def test_robustregister(): reg.inputs.target_file = filelist[1] reg.inputs.auto_sens = True yield assert_equal, reg.cmdline, ('mri_robust_register ' - '--satit --lta %s_robustreg.lta --mov %s --dst %s'%(filelist[0][:-4],filelist[0],filelist[1])) + '--satit --lta %s_robustreg.lta --mov %s --dst %s' %(filelist[0][:-4], filelist[0], filelist[1])) # constructor based parameter setting - reg2 = freesurfer.RobustRegister(source_file=filelist[0],target_file=filelist[1],outlier_sens=3.0, + reg2 = freesurfer.RobustRegister(source_file=filelist[0], target_file=filelist[1], outlier_sens=3.0, out_reg_file='foo.lta', half_targ=True) yield assert_equal, reg2.cmdline, ('mri_robust_register --halfdst %s_halfway.nii --lta foo.lta ' '--sat 3.0000 --mov %s --dst %s' - %(os.path.join(outdir,filelist[1][:-4]),filelist[0],filelist[1])) + % (os.path.join(outdir, filelist[1][:-4]), filelist[0], filelist[1])) clean_directory(outdir, cwd) + @skipif(no_freesurfer) def test_fitmsparams(): filelist, outdir, cwd = create_files_in_directory() @@ -76,15 +81,16 @@ def test_fitmsparams(): # .inputs based parameters setting fit.inputs.in_files = filelist fit.inputs.out_dir = outdir - yield assert_equal, fit.cmdline, 'mri_ms_fitparms %s %s %s'%(filelist[0],filelist[1],outdir) + yield assert_equal, fit.cmdline, 'mri_ms_fitparms %s %s %s' %(filelist[0], filelist[1], outdir) # constructor based parameter setting - fit2 = freesurfer.FitMSParams(in_files=filelist,te_list=[1.5,3.5],flip_list=[20,30],out_dir=outdir) + fit2 = freesurfer.FitMSParams(in_files=filelist, te_list=[1.5, 3.5], flip_list=[20, 30], out_dir=outdir) yield assert_equal, fit2.cmdline, ('mri_ms_fitparms -te %.3f -fa %.1f %s -te %.3f -fa %.1f %s %s' - %(1.500,20.0,filelist[0],3.500,30.0,filelist[1],outdir)) + % (1.500, 20.0, filelist[0], 3.500, 30.0, filelist[1], outdir)) clean_directory(outdir, cwd) + @skipif(no_freesurfer) def test_synthesizeflash(): filelist, outdir, cwd = create_files_in_directory() @@ -105,10 +111,10 @@ def test_synthesizeflash(): syn.inputs.tr = 20 yield assert_equal, syn.cmdline, ('mri_synthesize 20.00 30.00 4.500 %s %s %s' - %(filelist[0],filelist[1],os.path.join(outdir,'synth-flash_30.mgz'))) + % (filelist[0], filelist[1], os.path.join(outdir, 'synth-flash_30.mgz'))) # constructor based parameters setting - syn2 = freesurfer.SynthesizeFLASH(t1_image=filelist[0],pd_image=filelist[1],flip_angle=20,te=5,tr=25) + syn2 = freesurfer.SynthesizeFLASH(t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25) yield assert_equal, syn2.cmdline, ('mri_synthesize 25.00 20.00 5.000 %s %s %s' - %(filelist[0],filelist[1],os.path.join(outdir,'synth-flash_20.mgz'))) + % (filelist[0], filelist[1], os.path.join(outdir, 'synth-flash_20.mgz'))) diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index 0a1616e7c8..a1ce930dc2 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -14,47 +14,52 @@ import nipype.interfaces.freesurfer as fs + def no_freesurfer(): if fs.Info().version is None: return True else: return False + def create_files_in_directory(): outdir = os.path.realpath(mkdtemp()) cwd = os.getcwd() os.chdir(outdir) - filelist = ['a.nii','b.nii'] + filelist = ['a.nii', 'b.nii'] for f in filelist: hdr = nif.Nifti1Header() - shape = (3,3,3,4) + shape = (3, 3, 3, 4) hdr.set_data_shape(shape) img = np.random.random(shape) - nif.save(nif.Nifti1Image(img,np.eye(4),hdr), - os.path.join(outdir,f)) + nif.save(nif.Nifti1Image(img, np.eye(4), hdr), + os.path.join(outdir, f)) with open(os.path.join(outdir, 'reg.dat'), 'wt') as fp: fp.write('dummy file') filelist.append('reg.dat') return filelist, outdir, cwd + def create_surf_file(): outdir = os.path.realpath(mkdtemp()) cwd = os.getcwd() os.chdir(outdir) surf = 'lh.a.nii' hdr = nif.Nifti1Header() - shape = (1,100,1) + shape = (1, 100, 1) hdr.set_data_shape(shape) img = np.random.random(shape) - nif.save(nif.Nifti1Image(img,np.eye(4),hdr), - os.path.join(outdir,surf)) + nif.save(nif.Nifti1Image(img, np.eye(4), hdr), + os.path.join(outdir, surf)) return surf, outdir, cwd + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) os.chdir(old_wd) + @skipif(no_freesurfer) def test_sample2surf(): @@ -79,11 +84,11 @@ def test_sample2surf(): # Test a basic command line yield assert_equal, s2s.cmdline, ("mri_vol2surf " - "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" - %(os.path.join(cwd, "lh.a.mgz"),files[1],files[0])) + "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" + % (os.path.join(cwd, "lh.a.mgz"), files[1], files[0])) # Test identity - s2sish = fs.SampleToSurface(source_file = files[1], reference_file = files[0],hemi="rh") + s2sish = fs.SampleToSurface(source_file=files[1], reference_file=files[0], hemi="rh") yield assert_not_equal, s2s, s2sish # Test hits file name creation @@ -98,6 +103,7 @@ def set_illegal_range(): # Clean up our mess clean_directory(cwd, oldwd) + @skipif(no_freesurfer) def test_surfsmooth(): @@ -121,8 +127,8 @@ def test_surfsmooth(): # Test the command line yield assert_equal, smooth.cmdline, \ - ("mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage"% - (surf, cwd, fwhm)) + ("mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" % + (surf, cwd, fwhm)) # Test identity shmooth = fs.SurfaceSmooth( @@ -132,6 +138,7 @@ def test_surfsmooth(): # Clean up clean_directory(cwd, oldwd) + @skipif(no_freesurfer) def test_surfxfm(): @@ -154,8 +161,8 @@ def test_surfxfm(): # Test the command line yield assert_equal, xfm.cmdline, \ - ("mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage"% - (cwd, surf)) + ("mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage" % + (cwd, surf)) # Test identity xfmish = fs.SurfaceTransform( @@ -165,6 +172,7 @@ def test_surfxfm(): # Clean up clean_directory(cwd, oldwd) + @skipif(no_freesurfer) def test_applymask(): masker = fs.ApplyMask() @@ -177,7 +185,7 @@ def test_applymask(): # Test exception with mandatory args absent yield assert_raises, ValueError, masker.run for input in ["in_file", "mask_file"]: - indict = {input:filelist[0]} + indict = {input: filelist[0]} willbreak = fs.ApplyMask(**indict) yield assert_raises, ValueError, willbreak.run @@ -185,16 +193,17 @@ def test_applymask(): masker.inputs.in_file = filelist[0] masker.inputs.mask_file = filelist[1] outfile = os.path.join(testdir, "a_masked.nii") - yield assert_equal, masker.cmdline, "mri_mask a.nii b.nii %s"%outfile + yield assert_equal, masker.cmdline, "mri_mask a.nii b.nii %s" %outfile # Now test that optional inputs get formatted properly masker.inputs.mask_thresh = 2 - yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 a.nii b.nii %s"%outfile + yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 a.nii b.nii %s" %outfile masker.inputs.use_abs = True - yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 -abs a.nii b.nii %s"%outfile + yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 -abs a.nii b.nii %s" %outfile # Now clean up clean_directory(testdir, origdir) + @skipif(no_freesurfer) def test_surfshots(): @@ -218,7 +227,7 @@ def test_surfshots(): yield assert_equal, fotos.cmdline, "tksurfer fsaverage lh pial -tcl snapshots.tcl" # Test identity - schmotos = fs.SurfaceSnapshots(subject_id="mysubject",hemi="rh",surface="white") + schmotos = fs.SurfaceSnapshots(subject_id="mysubject", hemi="rh", surface="white") yield assert_not_equal, fotos, schmotos # Test that the tcl script gets written diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 3879e1e2b4..7a79386c7e 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -34,7 +34,7 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): source_file = File(exists=True, mandatory=True, argstr="--mov %s", - desc="volume to sample values from") + desc="volume to sample values from") reference_file = File(exists=True, argstr="--ref %s", desc="reference volume (default is orig.mgz)") @@ -56,21 +56,21 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): argstr="--rot %.3f %.3f %.3f", desc="rotation angles (in degrees) to apply to reg matrix") apply_trans = traits.Tuple(traits.Float, traits.Float, traits.Float, - argstr="--trans %.3f %.3f %.3f", - desc="translation (in mm) to apply to reg matrix") + argstr="--trans %.3f %.3f %.3f", + desc="translation (in mm) to apply to reg matrix") override_reg_subj = traits.Bool(argstr="--srcsubject %s", requires=["subject_id"], - desc="override the subject in the reg file header") + desc="override the subject in the reg file header") sampling_method = traits.Enum("point", "max", "average", mandatory=True, argstr="%s", xor=["projection_stem"], requires=["sampling_range", "sampling_units"], - desc="how to sample -- at a point or at the max or average over a range") + desc="how to sample -- at a point or at the max or average over a range") sampling_range = traits.Either(traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), desc="sampling range - a point or a tuple of (min, max, step)") sampling_units = traits.Enum("mm", "frac", desc="sampling range type -- either 'mm' or 'frac'") projection_stem = traits.String(mandatory=True, xor=["sampling_method"], - desc="stem for precomputed linear estimates and volume fractions") + desc="stem for precomputed linear estimates and volume fractions") smooth_vol = traits.Float(argstr="--fwhm %.3f", desc="smooth input volume (mm fwhm)") smooth_surf = traits.Float(argstr="--surf-fwhm %.3f", desc="smooth output surface (mm fwhm)") @@ -84,12 +84,12 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): desc="label file to mask output with") float2int_method = traits.Enum("round", "tkregister", argstr="--float2int %s", - desc="method to convert reg matrix values (default is round)") + desc="method to convert reg matrix values (default is round)") fix_tk_reg = traits.Bool(argstr="--fixtkreg", desc="make reg matrix round-compatible") subject_id = traits.String(desc="subject id") target_subject = traits.String(argstr="--trgsubject %s", - desc="sample to surface of different subject than source") + desc="sample to surface of different subject than source") surf_reg = traits.Bool(argstr="--surfreg", requires=["target_subject"], desc="use surface registration to target subject") ico_order = traits.Int(argstr="--icoorder %d", requires=["target_subject"], @@ -110,7 +110,7 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): desc="save image with number of hits at each voxel") hits_type = traits.Enum(filetypes, argstr="--srchit_type", desc="hits file type") vox_file = traits.Either(traits.Bool, File, argstr="--nvox %s", - desc="text file with the number of voxels intersecting the surface") + desc="text file with the number of voxels intersecting the surface") class SampleToSurfaceOutputSpec(TraitedSpec): @@ -386,8 +386,8 @@ def _gen_filename(self, name): class Surface2VolTransformInputSpec(FSTraitedSpec): source_file = File(exists=True, argstr='--surfval %s', - copyfile=False, mandatory=True, - desc='This is the source of the surface values') + copyfile=False, mandatory=True, + desc='This is the source of the surface values') hemi = traits.Str(argstr='--hemi %s', mandatory=True, desc='hemisphere of data') transformed_file = File(name_template="%s_asVol.nii", desc='Output volume', @@ -398,7 +398,7 @@ class Surface2VolTransformInputSpec(FSTraitedSpec): desc='tkRAS-to-tkRAS matrix (tkregister2 format)', xor=['subject_id']) template_file = File(exists=True, argstr='--template %s', - desc='Output template volume') + desc='Output template volume') mkmask = traits.Bool(desc='make a mask instead of loading surface values', argstr='--mkmask') vertexvol_file = File(name_template="%s_asVol_vertex.nii", @@ -414,7 +414,7 @@ class Surface2VolTransformInputSpec(FSTraitedSpec): subjects_dir = traits.Str(argstr='--sd %s', desc=('freesurfer subjects directory defaults to ' '$SUBJECTS_DIR')) - subject_id = traits.Str(argstr='--identity %s',desc='subject id', + subject_id = traits.Str(argstr='--identity %s', desc='subject id', xor=['reg_file']) @@ -518,9 +518,9 @@ class SurfaceSnapshotsInputSpec(FSTraitedSpec): overlay_reg = traits.File(exists=True, argstr="-overlay-reg %s", xor=reg_xors, desc="registration matrix file to register overlay to surface") identity_reg = traits.Bool(argstr="-overlay-reg-identity", xor=reg_xors, - desc="use the identity matrix to register the overlay to the surface") + desc="use the identity matrix to register the overlay to the surface") mni152_reg = traits.Bool(argstr="-mni152reg", xor=reg_xors, - desc="use to display a volume in MNI152 space on the average subject") + desc="use to display a volume in MNI152 space on the average subject") overlay_range = traits.Either(traits.Float, traits.Tuple(traits.Float, traits.Float), @@ -528,7 +528,7 @@ class SurfaceSnapshotsInputSpec(FSTraitedSpec): desc="overlay range--either min, (min, max) or (min, mid, max)", argstr="%s") overlay_range_offset = traits.Float(argstr="-foffset %.3f", - desc="overlay range will be symettric around offset value") + desc="overlay range will be symettric around offset value") truncate_overlay = traits.Bool(argstr="-truncphaseflag 1", desc="truncate the overlay display") @@ -541,12 +541,12 @@ class SurfaceSnapshotsInputSpec(FSTraitedSpec): annot_file = File(exists=True, argstr="-annotation %s", xor=["annot_name"], desc="path to annotation file to display") annot_name = traits.String(argstr="-annotation %s", xor=["annot_file"], - desc="name of annotation to display (must be in $subject/label directory") + desc="name of annotation to display (must be in $subject/label directory") label_file = File(exists=True, argstr="-label %s", xor=["label_name"], desc="path to label file to display") label_name = traits.String(argstr="-label %s", xor=["label_file"], - desc="name of label to display (must be in $subject/label directory") + desc="name of label to display (must be in $subject/label directory") colortable = File(exists=True, argstr="-colortable %s", desc="load colortable file") label_under = traits.Bool(argstr="-labels-under", desc="draw label/annotation under overlay") @@ -565,15 +565,15 @@ class SurfaceSnapshotsInputSpec(FSTraitedSpec): six_images = traits.Bool(desc="also take anterior and posterior snapshots") screenshot_stem = traits.String(desc="stem to use for screenshot file names") stem_template_args = traits.List(traits.String, requires=["screenshot_stem"], - desc="input names to use as arguments for a string-formated stem template") + desc="input names to use as arguments for a string-formated stem template") tcl_script = File(exists=True, argstr="%s", genfile=True, - desc="override default screenshot script") + desc="override default screenshot script") class SurfaceSnapshotsOutputSpec(TraitedSpec): snapshots = OutputMultiPath(File(exists=True), - desc="tiff images of the surface from different perspectives") + desc="tiff images of the surface from different perspectives") class SurfaceSnapshots(FSCommand): @@ -780,22 +780,22 @@ class MRIsConvertInputSpec(FSTraitedSpec): Uses Freesurfer's mris_convert to convert surface files to various formats """ annot_file = File(exists=True, argstr="--annot %s", - desc="input is annotation or gifti label data") + desc="input is annotation or gifti label data") parcstats_file = File(exists=True, argstr="--parcstats %s", - desc="infile is name of text file containing label/val pairs") + desc="infile is name of text file containing label/val pairs") label_file = File(exists=True, argstr="--label %s", - desc="infile is .label file, label is name of this label") + desc="infile is .label file, label is name of this label") scalarcurv_file = File(exists=True, argstr="-c %s", - desc="input is scalar curv overlay file (must still specify surface)") + desc="input is scalar curv overlay file (must still specify surface)") functional_file = File(exists=True, argstr="-f %s", - desc="input is functional time-series or other multi-frame data (must specify surface)") + desc="input is functional time-series or other multi-frame data (must specify surface)") labelstats_outfile = File(exists=False, argstr="--labelstats %s", - desc="outfile is name of gifti file to which label stats will be written") + desc="outfile is name of gifti file to which label stats will be written") patch = traits.Bool(argstr="-p", desc="input is a patch, not a full surface") rescale = traits.Bool(argstr="-r", desc="rescale vertex xyz so total area is same as group average") @@ -811,11 +811,11 @@ class MRIsConvertInputSpec(FSTraitedSpec): in_file = File(exists=True, mandatory=True, position=-2, argstr='%s', desc='File to read/convert') out_file = File(argstr='./%s', position=-1, genfile=True, desc='output filename or True to generate one') - #Not really sure why the ./ is necessary but the module fails without it + # Not really sure why the ./ is necessary but the module fails without it out_datatype = traits.Enum("ico", "tri", "stl", "vtk", "gii", "mgh", "mgz", mandatory=True, - desc="These file formats are supported: ASCII: .asc" \ - "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz") + desc="These file formats are supported: ASCII: .asc" \ + "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz") class MRIsConvertOutputSpec(TraitedSpec): @@ -869,6 +869,7 @@ def _gen_outfilename(self): return name + ext + "_converted." + self.inputs.out_datatype + class MRITessellateInputSpec(FSTraitedSpec): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume @@ -876,11 +877,12 @@ class MRITessellateInputSpec(FSTraitedSpec): in_file = File(exists=True, mandatory=True, position=-3, argstr='%s', desc='Input volume to tesselate voxels from.') label_value = traits.Int(position=-2, argstr='%d', mandatory=True, - desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)') + desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)') out_file = File(argstr='./%s', position=-1, genfile=True, desc='output filename or True to generate one') tesselate_all_voxels = traits.Bool(argstr='-a', desc='Tessellate the surface of all voxels with different labels') use_real_RAS_coordinates = traits.Bool(argstr='-n', desc='Saves surface with real RAS coordinates where c_(r,a,s) != 0') + class MRITessellateOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume @@ -937,13 +939,13 @@ class MRIPretessInputSpec(FSTraitedSpec): out_file = File(position=-1, argstr='%s', genfile=True, desc=('the output file after mri_pretess.')) - nocorners = traits.Bool(False, argstr='-nocorners', desc=('do not remove corner configurations' - ' in addition to edge ones.')) + ' in addition to edge ones.')) keep = traits.Bool(False, argstr='-keep', desc=('keep WM edits')) test = traits.Bool(False, argstr='-test', desc=('adds a voxel that should be removed by ' - 'mri_pretess. The value of the voxel is set to that of an ON-edited WM, ' - 'so it should be kept with -keep. The output will NOT be saved.')) + 'mri_pretess. The value of the voxel is set to that of an ON-edited WM, ' + 'so it should be kept with -keep. The output will NOT be saved.')) + class MRIPretessOutputSpec(TraitedSpec): out_file = File(exists=True, desc='output file after mri_pretess') @@ -1002,11 +1004,12 @@ class MRIMarchingCubesInputSpec(FSTraitedSpec): in_file = File(exists=True, mandatory=True, position=1, argstr='%s', desc='Input volume to tesselate voxels from.') label_value = traits.Int(position=2, argstr='%d', mandatory=True, - desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)') + desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)') connectivity_value = traits.Int(1, position=-1, argstr='%d', usedefault=True, - desc='Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)') + desc='Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)') out_file = File(argstr='./%s', position=-2, genfile=True, desc='output filename or True to generate one') + class MRIMarchingCubesOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_mc to create surfaces by tessellating a given input volume @@ -1050,6 +1053,7 @@ def _gen_outfilename(self): _, name, ext = split_filename(self.inputs.in_file) return os.path.abspath(name + ext + '_' + str(self.inputs.label_value)) + class SmoothTessellationInputSpec(FSTraitedSpec): """ This program smooths the tessellation of a surface using 'mris_smooth' @@ -1073,6 +1077,7 @@ class SmoothTessellationInputSpec(FSTraitedSpec): out_curvature_file = File(argstr='-c %s', desc='Write curvature to ?h.curvname (default "curv")') out_area_file = File(argstr='-b %s', desc='Write area to ?h.areaname (default "area")') + class SmoothTessellationOutputSpec(TraitedSpec): """ This program smooths the tessellation of a surface using 'mris_smooth' @@ -1163,6 +1168,7 @@ def _list_outputs(self): outputs['average_subject_name'] = self.inputs.out_name return outputs + class ExtractMainComponentInputSpec(CommandLineInputSpec): in_file = File(exists=True, mandatory=True, argstr='%s', position=1, desc='input surface file') @@ -1170,9 +1176,11 @@ class ExtractMainComponentInputSpec(CommandLineInputSpec): argstr='%s', position=2, desc='surface containing main component') + class ExtractMainComponentOutputSpec(TraitedSpec): out_file = File(exists=True, desc='surface containing main component') + class ExtractMainComponent(CommandLine): """Extract the main component of a tesselated surface @@ -1186,9 +1194,9 @@ class ExtractMainComponent(CommandLine): """ - _cmd='mris_extract_main_component' - input_spec=ExtractMainComponentInputSpec - output_spec=ExtractMainComponentOutputSpec + _cmd = 'mris_extract_main_component' + input_spec = ExtractMainComponentInputSpec + output_spec = ExtractMainComponentOutputSpec class Tkregister2InputSpec(FSTraitedSpec): @@ -1277,4 +1285,4 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath(name + '_smoothed' + ext) \ No newline at end of file + return os.path.abspath(name + '_smoothed' + ext) diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index 86a53d8efe..3e13ddccbe 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -35,9 +35,9 @@ class DTIFitInputSpec(FSLCommandInputSpec): mask = File(exists=True, desc='bet binary mask file', argstr='-m %s', position=2, mandatory=True) bvecs = File(exists=True, desc='b vectors file', - argstr='-r %s', position=3, mandatory=True) + argstr='-r %s', position=3, mandatory=True) bvals = File(exists=True, desc='b values file', - argstr='-b %s', position=4, mandatory=True) + argstr='-b %s', position=4, mandatory=True) min_z = traits.Int(argstr='-z %d', desc='min z') max_z = traits.Int(argstr='-Z %d', desc='max z') min_y = traits.Int(argstr='-y %d', desc='min y') @@ -45,7 +45,7 @@ class DTIFitInputSpec(FSLCommandInputSpec): min_x = traits.Int(argstr='-x %d', desc='min x') max_x = traits.Int(argstr='-X %d', desc='max x') save_tensor = traits.Bool(desc='save the elements of the tensor', - argstr='--save_tensor') + argstr='--save_tensor') sse = traits.Bool(desc='output sum of squared errors', argstr='--sse') cni = File(exists=True, desc='input counfound regressors', argstr='--cni=%s') little_bit = traits.Bool(desc='only process small area of brain', @@ -53,6 +53,7 @@ class DTIFitInputSpec(FSLCommandInputSpec): gradnonlin = File(exists=True, argstr='--gradnonlin=%s', desc='gradient non linearities') + class DTIFitOutputSpec(TraitedSpec): V1 = File(exists=True, desc='path/name of file with the 1st eigenvector') V2 = File(exists=True, desc='path/name of file with the 2nd eigenvector') @@ -96,7 +97,7 @@ def _list_outputs(self): for k in list(outputs.keys()): if k not in ('outputtype', 'environ', 'args'): if k != 'tensor' or (isdefined(self.inputs.save_tensor) - and self.inputs.save_tensor): + and self.inputs.save_tensor): outputs[k] = self._gen_fname(self.inputs.base_name, suffix='_' + k) return outputs @@ -169,16 +170,16 @@ class FSLXCommandInputSpec(FSLCommandInputSpec): class FSLXCommandOutputSpec(TraitedSpec): dyads = OutputMultiPath(File(exists=True), desc=('Mean of PDD distribution' - ' in vector form.')) + ' in vector form.')) fsamples = OutputMultiPath(File(exists=True), desc=('Samples from the ' - 'distribution on f anisotropy')) + 'distribution on f anisotropy')) mean_dsamples = File(exists=True, desc='Mean of distribution on diffusivity d') mean_fsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' - 'distribution on f anisotropy')) - mean_S0samples = File(exists=True,desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) - mean_tausamples = File(exists=True,desc=('Mean of distribution on ' - 'tau samples (only with rician noise)')) + 'distribution on f anisotropy')) + mean_S0samples = File(exists=True, desc=('Mean of distribution on T2w' + 'baseline signal intensity S0')) + mean_tausamples = File(exists=True, desc=('Mean of distribution on ' + 'tau samples (only with rician noise)')) phsamples = OutputMultiPath(File(exists=True), desc=('phi samples, per fiber')) thsamples = OutputMultiPath(File(exists=True), desc=('theta samples, per fiber')) @@ -222,17 +223,17 @@ def _list_outputs(self, out_dir=None): for i in range(1, n_fibres + 1): outputs['fsamples'].append(self._gen_fname('f%dsamples' % i, - cwd=out_dir)) + cwd=out_dir)) outputs['mean_fsamples'].append(self._gen_fname(('mean_f%d' - 'samples') % i, cwd=out_dir)) + 'samples') % i, cwd=out_dir)) for i in range(1, n_fibres + 1): outputs['dyads'].append(self._gen_fname('dyads%d' % i, - cwd=out_dir)) + cwd=out_dir)) outputs['phsamples'].append(self._gen_fname('ph%dsamples' % i, - cwd=out_dir)) + cwd=out_dir)) outputs['thsamples'].append(self._gen_fname('th%dsamples' % i, - cwd=out_dir)) + cwd=out_dir)) return outputs @@ -263,29 +264,29 @@ class BEDPOSTX5InputSpec(FSLXCommandInputSpec): out_dir = Directory('bedpostx', mandatory=True, desc='output directory', usedefault=True, position=1, argstr='%s') gradnonlin = traits.Bool(False, argstr='-g', desc=('consider gradient ' - 'nonlinearities, default off')) + 'nonlinearities, default off')) use_gpu = traits.Bool(False, desc='Use the GPU version of bedpostx') class BEDPOSTX5OutputSpec(TraitedSpec): mean_dsamples = File(exists=True, desc='Mean of distribution on diffusivity d') mean_fsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' - 'distribution on f anisotropy')) - mean_S0samples = File(exists=True,desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + 'distribution on f anisotropy')) + mean_S0samples = File(exists=True, desc=('Mean of distribution on T2w' + 'baseline signal intensity S0')) mean_phsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' - 'distribution on phi')) + 'distribution on phi')) mean_thsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' - 'distribution on theta')) + 'distribution on theta')) merged_thsamples = OutputMultiPath(File(exists=True), desc=('Samples from ' - 'the distribution on theta')) + 'the distribution on theta')) merged_phsamples = OutputMultiPath(File(exists=True), desc=('Samples from ' - 'the distribution on phi')) + 'the distribution on phi')) merged_fsamples = OutputMultiPath(File(exists=True), desc=('Samples from the distribution on ' 'anisotropic volume fraction')) dyads = OutputMultiPath(File(exists=True), desc=('Mean of PDD distribution' - ' in vector form.')) + ' in vector form.')) dyads_dispersion = OutputMultiPath(File(exists=True), desc=('Dispersion')) @@ -367,28 +368,27 @@ def _list_outputs(self): for k in single_out: outputs[k] = self._gen_fname(k, cwd=self._out_dir) - for k in multi_out: outputs[k] = [] for i in range(1, n_fibres + 1): outputs['merged_thsamples'].append(self._gen_fname('merged_th%dsamples' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['merged_fsamples'].append(self._gen_fname('merged_f%dsamples' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['merged_phsamples'].append(self._gen_fname('merged_ph%dsamples' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['mean_thsamples'].append(self._gen_fname('mean_th%dsamples' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['mean_phsamples'].append(self._gen_fname('mean_ph%dsamples' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['mean_fsamples'].append(self._gen_fname('mean_f%dsamples' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['dyads'].append(self._gen_fname('dyads%d' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) outputs['dyads_dispersion'].append(self._gen_fname('dyads%d_dispersion' % i, - cwd=self._out_dir)) + cwd=self._out_dir)) return outputs @@ -417,17 +417,17 @@ class XFibres4InputSpec(FSLCommandInputSpec): n_fibres = traits.Range(low=1, argstr="--nfibres=%d", desc="Maximum nukmber of fibres to fit in each voxel") fudge = traits.Int(argstr="--fudge=%d", - desc="ARD fudge factor") + desc="ARD fudge factor") n_jumps = traits.Range(low=1, argstr="--njumps=%d", desc="Num of jumps to be made by MCMC") burn_in = traits.Range(low=0, argstr="--burnin=%d", desc="Total num of jumps at start of MCMC to be discarded") burn_in_no_ard = traits.Range(low=0, argstr="--burninnoard=%d", - desc="num of burnin jumps before the ard is imposed") + desc="num of burnin jumps before the ard is imposed") sample_every = traits.Range(low=0, argstr="--sampleevery=%d", - desc="Num of jumps for each sample (MCMC)") + desc="Num of jumps for each sample (MCMC)") update_proposal_every = traits.Range(low=1, argstr="--updateproposalevery=%d", - desc="Num of jumps for each update to the proposal density std (MCMC)") + desc="Num of jumps for each update to the proposal density std (MCMC)") seed = traits.Int(argstr="--seed=%d", desc="seed for pseudo random number generator") model = traits.Int(argstr="--model=%d", desc="Which model to use. \ 1=mono-exponential (default and required for single shell). 2=continous \ @@ -441,9 +441,9 @@ class XFibres4InputSpec(FSLCommandInputSpec): no_spat = traits.Bool(argstr="--nospat", desc="Initialise with tensor, not spatially", xor=_xor_inputs2) non_linear = traits.Bool(argstr="--nonlinear", desc="Initialise with nonlinear fitting", xor=_xor_inputs2) force_dir = traits.Bool(True, - desc='use the actual directory name given - i.e. ' + - 'do not add + to make a new directory', - argstr='--forcedir', usedefault=True) + desc='use the actual directory name given - i.e. ' + + 'do not add + to make a new directory', + argstr='--forcedir', usedefault=True) class XFibres4OutputSpec(TraitedSpec): @@ -522,7 +522,7 @@ class BEDPOSTX4InputSpec(XFibres4InputSpec): desc=('model choice: monoexponential (1) or ' 'multiexponential (2). ')) nlgradient = traits.Bool(False, argstr='-g', desc=('consider gradient' - 'nonlinearities, default off')) + 'nonlinearities, default off')) no_cuda = traits.Bool(False, argstr='-c', desc=('do not use CUDA capable hardware/queue ' '(if found)')) @@ -533,7 +533,7 @@ class BEDPOSTX4OutputSpec(TraitedSpec): desc='path/name of directory with all ' + 'bedpostx output files for this subject') xfms_directory = Directory(exists=True, - desc='path/name of directory with the ' + + desc='path/name of directory with the ' + 'tranformation matrices') merged_thsamples = traits.List(File(exists=True), desc='a list of path/name of 4D volume ' + @@ -543,15 +543,15 @@ class BEDPOSTX4OutputSpec(TraitedSpec): desc='a list of path/name of file with ' 'samples from the distribution on phi') merged_fsamples = traits.List(File(exists=True), - desc='a list of path/name of 4D volume ' + - 'with samples from the distribution ' + - 'on anisotropic volume fraction') + desc='a list of path/name of 4D volume ' + + 'with samples from the distribution ' + + 'on anisotropic volume fraction') mean_thsamples = traits.List(File(exists=True), desc='a list of path/name of 3D volume with mean of distribution on theta') mean_phsamples = traits.List(File(exists=True), desc='a list of path/name of 3D volume with mean of distribution on phi') mean_fsamples = traits.List(File(exists=True), - desc='a list of path/name of 3D volume with mean of distribution on f anisotropy') + desc='a list of path/name of 3D volume with mean of distribution on f anisotropy') dyads = traits.List(File(exists=True), desc='a list of path/name of mean of PDD distribution in vector form') @@ -587,7 +587,7 @@ def _get_bedpostx_dir(self): def _run_interface(self, runtime, correct_return_codes=[0]): - #create the subject specific bpx_directory + # create the subject specific bpx_directory bpx_directory = self._get_bedpostx_dir() if not os.path.exists(bpx_directory): os.makedirs(bpx_directory) @@ -625,7 +625,7 @@ def _list_outputs(self): for n in range(self.inputs.fibres): outputs['merged_thsamples'].append(self._gen_fname('merged_th' + repr(n + 1) + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) + cwd=outputs['bpx_out_directory'])) outputs['merged_phsamples'].append(self._gen_fname('merged_ph' + repr(n + 1) + 'samples', suffix='', cwd=outputs['bpx_out_directory'])) @@ -633,8 +633,8 @@ def _list_outputs(self): + 'samples', suffix='', cwd=outputs['bpx_out_directory'])) outputs['mean_thsamples'].append(self._gen_fname('mean_th' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) + + 'samples', suffix='', + cwd=outputs['bpx_out_directory'])) outputs['mean_phsamples'].append(self._gen_fname('mean_ph' + repr(n + 1) + 'samples', suffix='', cwd=outputs['bpx_out_directory'])) @@ -642,13 +642,12 @@ def _list_outputs(self): + 'samples', suffix='', cwd=outputs['bpx_out_directory'])) outputs['dyads'].append(self._gen_fname('dyads' + repr(n + 1), - suffix='', - cwd=outputs['bpx_out_directory'])) + suffix='', + cwd=outputs['bpx_out_directory'])) return outputs -if (Info.version() and - LooseVersion(Info.version()) >= LooseVersion('5.0.0')): +if (Info.version() and LooseVersion(Info.version()) >= LooseVersion('5.0.0')): CurrentXFibres = XFibres5 CurrentBEDPOST = BEDPOSTX5 else: @@ -671,24 +670,24 @@ class ProbTrackXBaseInputSpec(FSLCommandInputSpec): samples_base_name = traits.Str("merged", desc='the rootname/base_name for samples files', argstr='--samples=%s', usedefault=True) mask = File(exists=True, desc='bet binary mask file in diffusion space', - argstr='-m %s', mandatory=True) + argstr='-m %s', mandatory=True) seed = traits.Either(File(exists=True), traits.List(File(exists=True)), traits.List(traits.List(traits.Int(), minlen=3, maxlen=3)), desc='seed volume(s), or voxel(s)' + 'or freesurfer label file', argstr='--seed=%s', mandatory=True) target_masks = InputMultiPath(File(exits=True), desc='list of target masks - ' + - 'required for seeds_to_targets classification', argstr='--targetmasks=%s') + 'required for seeds_to_targets classification', argstr='--targetmasks=%s') waypoints = File(exists=True, desc='waypoint mask or ascii list of waypoint masks - ' + 'only keep paths going through ALL the masks', argstr='--waypoints=%s') network = traits.Bool(desc='activate network mode - only keep paths going through ' + 'at least one seed mask (required if multiple seed masks)', argstr='--network') seed_ref = File(exists=True, desc='reference vol to define seed space in ' + - 'simple mode - diffusion space assumed if absent', - argstr='--seedref=%s') + 'simple mode - diffusion space assumed if absent', + argstr='--seedref=%s') out_dir = Directory(exists=True, argstr='--dir=%s', - desc='directory to put the final volumes in', genfile=True) + desc='directory to put the final volumes in', genfile=True) force_dir = traits.Bool(True, desc='use the actual directory name given - i.e. ' + 'do not add + to make a new directory', argstr='--forcedir', usedefault=True) @@ -696,33 +695,33 @@ class ProbTrackXBaseInputSpec(FSLCommandInputSpec): correct_path_distribution = traits.Bool(desc='correct path distribution for the length of the pathways', argstr='--pd') os2t = traits.Bool(desc='Outputs seeds to targets', argstr='--os2t') - #paths_file = File('nipype_fdtpaths', usedefault=True, argstr='--out=%s', + # paths_file = File('nipype_fdtpaths', usedefault=True, argstr='--out=%s', # desc='produces an output file (default is fdt_paths)') avoid_mp = File(exists=True, desc='reject pathways passing through locations given by this mask', argstr='--avoid=%s') stop_mask = File(exists=True, argstr='--stop=%s', - desc='stop tracking at locations given by this mask file') + desc='stop tracking at locations given by this mask file') xfm = File(exists=True, argstr='--xfm=%s', desc='transformation matrix taking seed space to DTI space ' + - '(either FLIRT matrix or FNIRT warp_field) - default is identity') + '(either FLIRT matrix or FNIRT warp_field) - default is identity') inv_xfm = File(argstr='--invxfm=%s', desc='transformation matrix taking DTI space to seed' + - ' space (compulsory when using a warp_field for seeds_to_dti)') + ' space (compulsory when using a warp_field for seeds_to_dti)') n_samples = traits.Int(5000, argstr='--nsamples=%d', desc='number of samples - default=5000', usedefault=True) n_steps = traits.Int(argstr='--nsteps=%d', desc='number of steps per sample - default=2000') dist_thresh = traits.Float(argstr='--distthresh=%.3f', desc='discards samples shorter than ' + - 'this threshold (in mm - default=0)') + 'this threshold (in mm - default=0)') c_thresh = traits.Float(argstr='--cthr=%.3f', desc='curvature threshold - default=0.2') sample_random_points = traits.Bool(argstr='--sampvox', desc='sample random points within seed voxels') step_length = traits.Float(argstr='--steplength=%.3f', desc='step_length in mm - default=0.5') loop_check = traits.Bool(argstr='--loopcheck', desc='perform loop_checks on paths -' + - ' slower, but allows lower curvature threshold') + ' slower, but allows lower curvature threshold') use_anisotropy = traits.Bool(argstr='--usef', desc='use anisotropy to constrain tracking') rand_fib = traits.Enum(0, 1, 2, 3, argstr='--randfib=%d', desc='options: 0 - default, 1 - to randomly sample' + - ' initial fibres (with f > fibthresh), 2 - to sample in ' + - 'proportion fibres (with f>fibthresh) to f, 3 - to sample ALL ' + - 'populations at random (even if f fibthresh), 2 - to sample in ' + + 'proportion fibres (with f>fibthresh) to f, 3 - to sample ALL ' + + 'populations at random (even if f-1: + if line.find("set fmri(outputdir)") > -1: try: - outputdir_spec=line.split('"')[-2] + outputdir_spec = line.split('"')[-2] if os.path.exists(outputdir_spec): - outputs['feat_dir']=outputdir_spec + outputs['feat_dir'] = outputdir_spec except: pass @@ -406,7 +406,7 @@ def _list_outputs(self): outputs['feat_dir'] = glob(os.path.join(os.getcwd(), '*ica'))[0] else: outputs['feat_dir'] = glob(os.path.join(os.getcwd(), '*feat'))[0] - print('Outputs from FEATmodel:',outputs) + print('Outputs from FEATmodel:', outputs) return outputs @@ -491,7 +491,7 @@ class FILMGLSInputSpec(FSLCommandInputSpec): mask_size = traits.Int(argstr='-ms %d', desc="susan mask size") brightness_threshold = traits.Range(low=0, argstr='-epith %d', - desc='susan brightness threshold, otherwise it is estimated') + desc='susan brightness threshold, otherwise it is estimated') full_data = traits.Bool(argstr='-v', desc='output full data') _estimate_xor = ['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'] @@ -513,6 +513,7 @@ class FILMGLSInputSpec(FSLCommandInputSpec): results_dir = Directory('results', argstr='-rn %s', usedefault=True, desc='directory to store results in') + class FILMGLSInputSpec505(FSLCommandInputSpec): in_file = File(exists=True, mandatory=True, position=-3, argstr='--in=%s', desc='input data file') @@ -524,8 +525,8 @@ class FILMGLSInputSpec505(FSLCommandInputSpec): desc='Smooth auto corr estimates') mask_size = traits.Int(argstr='--ms=%d', desc="susan mask size") brightness_threshold = traits.Range(low=0, argstr='--epith=%d', - desc=('susan brightness threshold, ' - 'otherwise it is estimated')) + desc=('susan brightness threshold, ' + 'otherwise it is estimated')) full_data = traits.Bool(argstr='-v', desc='output full data') _estimate_xor = ['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'] @@ -550,6 +551,7 @@ class FILMGLSInputSpec505(FSLCommandInputSpec): results_dir = Directory('results', argstr='--rn=%s', usedefault=True, desc='directory to store results in') + class FILMGLSInputSpec507(FILMGLSInputSpec505): threshold = traits.Float(default=-1000., argstr='--thr=%f', position=-1, usedefault=True, @@ -564,6 +566,7 @@ class FILMGLSInputSpec507(FILMGLSInputSpec505): desc=("input surface for autocorr smoothing in " "surface-based analyses")) + class FILMGLSOutputSpec(TraitedSpec): param_estimates = OutputMultiPath(File(exists=True), desc='Parameter estimates for each column of the design matrix') @@ -983,7 +986,7 @@ class ContrastMgrInputSpec(FSLCommandInputSpec): copyfile=False, mandatory=True, desc='summary of residuals, See Woolrich, et. al., 2001') contrast_num = traits.Range(low=1, argstr='-cope', - desc='contrast number to start labeling copes from') + desc='contrast number to start labeling copes from') suffix = traits.Str(argstr='-suffix %s', desc='suffix to put on the end of the cope filename before the contrast number, default is nothing') diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 6b92acbf8d..65540646e1 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -240,11 +240,11 @@ class FASTInputSpec(FSLCommandInputSpec): argstr='-N') use_priors = traits.Bool(desc='use priors throughout', argstr='-P') # must also set -a!, - # mutually inclusive?? - # No, conditional - # mandatory... need to - # figure out how to - # handle with traits. + # mutually inclusive?? + # No, conditional + # mandatory... need to + # figure out how to + # handle with traits. segment_iters = traits.Range(low=1, high=50, desc='number of segmentation-initialisation' ' iterations', @@ -1023,12 +1023,12 @@ def _gen_filename(self, name): class SliceTimerInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr='--in=%s', - mandatory=True, position=0, - desc='filename of input timeseries') + mandatory=True, position=0, + desc='filename of input timeseries') out_file = File(argstr='--out=%s', genfile=True, - desc='filename of output timeseries', hash_files=False) + desc='filename of output timeseries', hash_files=False) index_dir = traits.Bool(argstr='--down', - desc='slice indexing from top to bottom') + desc='slice indexing from top to bottom') time_repetition = traits.Float(argstr='--repeat=%f', desc='Specify TR of data - default is 3s') slice_direction = traits.Enum(1, 2, 3, argstr='--direction=%d', @@ -1071,7 +1071,7 @@ def _list_outputs(self): out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_fname(self.inputs.in_file, - suffix='_st') + suffix='_st') outputs['slice_time_corrected_file'] = os.path.abspath(out_file) return outputs @@ -1087,23 +1087,23 @@ class SUSANInputSpec(FSLCommandInputSpec): desc='filename of input timeseries') brightness_threshold = traits.Float(argstr='%.10f', position=2, mandatory=True, - desc='brightness threshold and should be greater than ' - 'noise level and less than contrast of edges to ' - 'be preserved.') + desc='brightness threshold and should be greater than ' + 'noise level and less than contrast of edges to ' + 'be preserved.') fwhm = traits.Float(argstr='%.10f', position=3, mandatory=True, desc='fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))') dimension = traits.Enum(3, 2, argstr='%d', position=4, usedefault=True, desc='within-plane (2) or fully 3D (3)') use_median = traits.Enum(1, 0, argstr='%d', position=5, usedefault=True, - desc='whether to use a local median filter in the cases where single-point noise is detected') + desc='whether to use a local median filter in the cases where single-point noise is detected') usans = traits.List( traits.Tuple(File(exists=True), traits.Float), maxlen=2, - argstr='', position=6, default=[], usedefault=True, - desc='determines whether the smoothing area (USAN) is to be ' - 'found from secondary images (0, 1 or 2). A negative ' - 'value for any brightness threshold will auto-set the ' - 'threshold at 10% of the robust range') + argstr='', position=6, default=[], usedefault=True, + desc='determines whether the smoothing area (USAN) is to be ' + 'found from secondary images (0, 1 or 2). A negative ' + 'value for any brightness threshold will auto-set the ' + 'threshold at 10% of the robust range') out_file = File(argstr='%s', position=-1, genfile=True, desc='output file name', hash_files=False) @@ -1150,7 +1150,7 @@ def _list_outputs(self): out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_fname(self.inputs.in_file, - suffix='_smooth') + suffix='_smooth') outputs['smoothed_file'] = os.path.abspath(out_file) return outputs @@ -1185,7 +1185,7 @@ class FUGUEInputSpec(FSLCommandInputSpec): asym_se_time = traits.Float(argstr='--asym=%.10f', desc='set the fieldmap asymmetric spin echo time (sec)') median_2dfilter = traits.Bool(argstr='--median', - desc='apply 2D median filtering') + desc='apply 2D median filtering') despike_2dfilter = traits.Bool(argstr='--despike', desc='apply a 2D de-spiking filter') no_gap_fill = traits.Bool(argstr='--nofill', @@ -1203,7 +1203,7 @@ class FUGUEInputSpec(FSLCommandInputSpec): pava = traits.Bool(argstr='--pava', desc='apply monotonic enforcement via PAVA') despike_threshold = traits.Float(argstr='--despikethreshold=%s', - desc='specify the threshold for de-spiking (default=3.0)') + desc='specify the threshold for de-spiking (default=3.0)') unwarp_direction = traits.Enum('x', 'y', 'z', 'x-', 'y-', 'z-', argstr='--unwarpdir=%s', desc='specifies direction of warping (default y)') @@ -1231,8 +1231,6 @@ class FUGUEInputSpec(FSLCommandInputSpec): desc='saves the unmasked fieldmap when using --savefmap') - - class FUGUEOutputSpec(TraitedSpec): unwarped_file = File(desc='unwarped file') warped_file = File(desc='forward warped file') @@ -1351,7 +1349,7 @@ def _parse_inputs(self, skip=None): trait_spec.name_source = 'shift_in_file' else: raise RuntimeError(('Either phasemap_in_file, shift_in_file or ' - 'fmap_in_file must be set.')) + 'fmap_in_file must be set.')) if vsm_save_unmasked: trait_spec.name_template = '%s_vsm_unmasked' @@ -1364,7 +1362,7 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.fmap_out_file): fmap_save_masked = (isdefined(self.inputs.save_fmap) and self.inputs.save_fmap) fmap_save_unmasked = (isdefined(self.inputs.save_unmasked_fmap) and - self.inputs.save_unmasked_fmap) + self.inputs.save_unmasked_fmap) if (fmap_save_masked or fmap_save_unmasked): trait_spec = self.inputs.trait('fmap_out_file') @@ -1378,7 +1376,7 @@ def _parse_inputs(self, skip=None): trait_spec.name_source = 'fmap_in_file' else: raise RuntimeError(('Either phasemap_in_file, shift_in_file or ' - 'fmap_in_file must be set.')) + 'fmap_in_file must be set.')) if fmap_save_unmasked: trait_spec.name_template = '%s_fieldmap_unmasked' @@ -1601,4 +1599,4 @@ def _gen_mesh_names(self, name, structures): bvar = prefix + '-' + struct + '_first.bvars' bvars.append(op.abspath(bvar)) return bvars - return None \ No newline at end of file + return None diff --git a/nipype/interfaces/fsl/tests/test_FILMGLS.py b/nipype/interfaces/fsl/tests/test_FILMGLS.py index 22caa313fd..8e55381660 100644 --- a/nipype/interfaces/fsl/tests/test_FILMGLS.py +++ b/nipype/interfaces/fsl/tests/test_FILMGLS.py @@ -1,47 +1,48 @@ from nipype.testing import assert_equal from nipype.interfaces.fsl.model import FILMGLS, FILMGLSInputSpec + def test_filmgls(): - input_map = dict(args = dict(argstr='%s',), - autocorr_estimate_only = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-ac',), - autocorr_noestimate = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-noest',), - brightness_threshold = dict(argstr='-epith %d',), - design_file = dict(argstr='%s',), - environ = dict(usedefault=True,), - fit_armodel = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-ar',), - full_data = dict(argstr='-v',), - ignore_exception = dict(usedefault=True,), - in_file = dict(mandatory=True,argstr='%s',), - mask_size = dict(argstr='-ms %d',), - multitaper_product = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-mt %d',), - output_pwdata = dict(argstr='-output_pwdata',), - output_type = dict(), - results_dir = dict(usedefault=True,argstr='-rn %s',), - smooth_autocorr = dict(argstr='-sa',), - threshold = dict(argstr='%f',), - tukey_window = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-tukey %d',), - use_pava = dict(argstr='-pava',), - ) - input_map2 = dict(args = dict(argstr='%s',), - autocorr_estimate_only = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--ac',), - autocorr_noestimate = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--noest',), - brightness_threshold = dict(argstr='--epith=%d',), - design_file = dict(argstr='--pd=%s',), - environ = dict(usedefault=True,), - fit_armodel = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--ar',), - full_data = dict(argstr='-v',), - ignore_exception = dict(usedefault=True,), - in_file = dict(mandatory=True,argstr='--in=%s',), - mask_size = dict(argstr='--ms=%d',), - multitaper_product = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--mt=%d',), - output_pwdata = dict(argstr='--outputPWdata',), - output_type = dict(), - results_dir = dict(argstr='--rn=%s',usedefault=True,), - smooth_autocorr = dict(argstr='--sa',), - terminal_output = dict(), - threshold = dict(usedefault=True,argstr='--thr=%f',), - tukey_window = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--tukey=%d',), - use_pava = dict(argstr='--pava',), + input_map = dict(args=dict(argstr='%s',), + autocorr_estimate_only=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='-ac',), + autocorr_noestimate=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='-noest',), + brightness_threshold=dict(argstr='-epith %d',), + design_file=dict(argstr='%s',), + environ=dict(usedefault=True,), + fit_armodel=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='-ar',), + full_data=dict(argstr='-v',), + ignore_exception=dict(usedefault=True,), + in_file=dict(mandatory=True, argstr='%s',), + mask_size=dict(argstr='-ms %d',), + multitaper_product=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='-mt %d',), + output_pwdata=dict(argstr='-output_pwdata',), + output_type=dict(), + results_dir=dict(usedefault=True, argstr='-rn %s',), + smooth_autocorr=dict(argstr='-sa',), + threshold=dict(argstr='%f',), + tukey_window=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='-tukey %d',), + use_pava=dict(argstr='-pava',), + ) + input_map2 = dict(args=dict(argstr='%s',), + autocorr_estimate_only=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='--ac',), + autocorr_noestimate=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='--noest',), + brightness_threshold=dict(argstr='--epith=%d',), + design_file=dict(argstr='--pd=%s',), + environ=dict(usedefault=True,), + fit_armodel=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='--ar',), + full_data=dict(argstr='-v',), + ignore_exception=dict(usedefault=True,), + in_file=dict(mandatory=True, argstr='--in=%s',), + mask_size=dict(argstr='--ms=%d',), + multitaper_product=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='--mt=%d',), + output_pwdata=dict(argstr='--outputPWdata',), + output_type=dict(), + results_dir=dict(argstr='--rn=%s', usedefault=True,), + smooth_autocorr=dict(argstr='--sa',), + terminal_output=dict(), + threshold=dict(usedefault=True, argstr='--thr=%f',), + tukey_window=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='--tukey=%d',), + use_pava=dict(argstr='--pava',), ) instance = FILMGLS() if isinstance(instance.inputs, FILMGLSInputSpec): diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index 3aa0f5aae1..a8ec012905 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -9,7 +9,7 @@ from nipype.interfaces.fsl import check_fsl, no_fsl -@skipif(no_fsl)#skip if fsl not installed) +@skipif(no_fsl) # skip if fsl not installed) def test_fslversion(): ver = fsl.Info.version() if ver: @@ -17,7 +17,8 @@ def test_fslversion(): ver = ver.split('.') yield assert_true, ver[0] in ['4', '5'] -@skipif(no_fsl)#skip if fsl not installed) + +@skipif(no_fsl) # skip if fsl not installed) def test_fsloutputtype(): types = list(fsl.Info.ftypes.keys()) orig_out_type = fsl.Info.output_type() @@ -31,7 +32,8 @@ def test_outputtype_to_ext(): yield assert_raises, KeyError, fsl.Info.output_type_to_ext, 'JUNK' -@skipif(no_fsl)#skip if fsl not installed) + +@skipif(no_fsl) # skip if fsl not installed) def test_FSLCommand(): # Most methods in FSLCommand are tested in the subclasses. Only # testing the one item that is not. @@ -39,7 +41,8 @@ def test_FSLCommand(): res = cmd.run() yield assert_equal, type(res), InterfaceResult -@skipif(no_fsl)#skip if fsl not installed) + +@skipif(no_fsl) # skip if fsl not installed) def test_FSLCommand2(): # Check default output type and environ cmd = fsl.FSLCommand(command='junk') @@ -56,25 +59,26 @@ def test_FSLCommand2(): # Setting class outputtype should not effect existing instances yield assert_not_equal, cmdinst.inputs.output_type, out_type -@skipif(no_fsl)#skip if fsl not installed) + +@skipif(no_fsl) # skip if fsl not installed) def test_gen_fname(): # Test _gen_fname method of FSLCommand - cmd = fsl.FSLCommand(command = 'junk',output_type = 'NIFTI_GZ') + cmd = fsl.FSLCommand(command='junk', output_type='NIFTI_GZ') pth = os.getcwd() # just the filename - fname = cmd._gen_fname('foo.nii.gz',suffix='_fsl') + fname = cmd._gen_fname('foo.nii.gz', suffix='_fsl') desired = os.path.join(pth, 'foo_fsl.nii.gz') yield assert_equal, fname, desired # filename with suffix - fname = cmd._gen_fname('foo.nii.gz', suffix = '_brain') + fname = cmd._gen_fname('foo.nii.gz', suffix='_brain') desired = os.path.join(pth, 'foo_brain.nii.gz') yield assert_equal, fname, desired # filename with suffix and working directory - fname = cmd._gen_fname('foo.nii.gz', suffix = '_brain', cwd = '/data') + fname = cmd._gen_fname('foo.nii.gz', suffix='_brain', cwd='/data') desired = os.path.join('/data', 'foo_brain.nii.gz') yield assert_equal, fname, desired # filename with suffix and no file extension change - fname = cmd._gen_fname('foo.nii.gz', suffix = '_brain.mat', - change_ext = False) + fname = cmd._gen_fname('foo.nii.gz', suffix='_brain.mat', + change_ext=False) desired = os.path.join(pth, 'foo_brain.mat') yield assert_equal, fname, desired diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index 5d114e4d8d..0b7f27c165 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -98,7 +98,7 @@ def test_dtifit2(): dti.inputs.max_z = 50 yield assert_equal, dti.cmdline, \ - 'dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10'%(filelist[0], + 'dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10' %(filelist[0], filelist[1], filelist[0], filelist[1]) @@ -169,7 +169,7 @@ def test_randomise2(): results = rand3.run(input_4D='infile3', output_rootname='outfile3') yield assert_equal, results.runtime.cmdline, \ - 'randomise -i infile3 -o outfile3' + 'randomise -i infile3 -o outfile3' # test arguments for opt_map opt_map = {'demean_data': ('-D', True), @@ -252,7 +252,7 @@ def test_Randomise_parallel(): results = rand3.run(input_4D='infile3', output_rootname='outfile3') yield assert_equal, results.runtime.cmdline, \ - 'randomise_parallel -i infile3 -o outfile3' + 'randomise_parallel -i infile3 -o outfile3' # test arguments for opt_map opt_map = {'demean_data': ('-D', True), @@ -286,11 +286,11 @@ def test_Randomise_parallel(): 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), 'list_num_voxel_EVs_pos': ('--vxl=' \ - + repr([1, 2, 3, 4]), + + repr([1, 2, 3, 4]), repr([1, 2, 3, 4])), 'list_img_voxel_EVs': ('--vxf=' \ - + repr([6, 7, 8, 9, 3]), - repr([6, 7, 8, 9, 3]))} + + repr([6, 7, 8, 9, 3]), + repr([6, 7, 8, 9, 3]))} for name, settings in list(opt_map.items()): rand4 = fsl.Randomise_parallel(input_4D='infile', @@ -366,12 +366,12 @@ def test_Vec_reg(): vrg3 = fsl.VecReg() results = vrg3.run(infile='infile3', - outfile='outfile3', - refVolName='MNI152', - affineTmat='tmat3.mat',) + outfile='outfile3', + refVolName='MNI152', + affineTmat='tmat3.mat',) yield assert_equal, results.runtime.cmdline, \ - 'vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat' + 'vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat' yield assert_not_equal, results.runtime.returncode, 0 yield assert_equal, results.interface.inputs.infile, 'infile3' yield assert_equal, results.interface.inputs.outfile, 'outfile3' @@ -379,13 +379,13 @@ def test_Vec_reg(): yield assert_equal, results.interface.inputs.affineTmat, 'tmat3.mat' # test arguments for opt_map - opt_map = { 'verbose': ('-v', True), - 'helpDoc': ('-h', True), - 'tensor': ('--tensor', True), - 'affineTmat': ('-t Tmat', 'Tmat'), - 'warpFile': ('-w wrpFile', 'wrpFile'), - 'interpolation': ('--interp=sinc', 'sinc'), - 'brainMask': ('-m mask', 'mask')} + opt_map = {'verbose': ('-v', True), + 'helpDoc': ('-h', True), + 'tensor': ('--tensor', True), + 'affineTmat': ('-t Tmat', 'Tmat'), + 'warpFile': ('-w wrpFile', 'wrpFile'), + 'interpolation': ('--interp=sinc', 'sinc'), + 'brainMask': ('-m mask', 'mask')} for name, settings in list(opt_map.items()): vrg4 = fsl.VecReg(infile='infile', outfile='outfile', @@ -440,7 +440,7 @@ def test_tbss_skeleton(): # First by implicit argument skeletor.inputs.skeleton_file = True yield assert_equal, skeletor.cmdline, \ - "tbss_skeleton -i a.nii -o %s"%os.path.join(newdir, "a_skeleton.nii") + "tbss_skeleton -i a.nii -o %s" %os.path.join(newdir, "a_skeleton.nii") # Now with a specific name skeletor.inputs.skeleton_file = "old_boney.nii" @@ -455,22 +455,23 @@ def test_tbss_skeleton(): # But we can set what we need bones.inputs.threshold = 0.2 bones.inputs.distance_map = "b.nii" - bones.inputs.data_file = "b.nii" # Even though that's silly + bones.inputs.data_file = "b.nii" # Even though that's silly # Now we get a command line yield assert_equal, bones.cmdline, \ - "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s"%(Info.standard_image("LowerCingulum_1mm.nii.gz"), - os.path.join(newdir, "b_skeletonised.nii")) + "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" %(Info.standard_image("LowerCingulum_1mm.nii.gz"), + os.path.join(newdir, "b_skeletonised.nii")) # Can we specify a mask? bones.inputs.use_cingulum_mask = Undefined bones.inputs.search_mask_file = "a.nii" yield assert_equal, bones.cmdline, \ - "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s"%os.path.join(newdir, "b_skeletonised.nii") + "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" %os.path.join(newdir, "b_skeletonised.nii") # Looks good; clean up clean_directory(newdir, olddir) + @skipif(no_fsl) def test_distancemap(): mapper = fsl.DistanceMap() @@ -487,18 +488,18 @@ def test_distancemap(): mapper.inputs.in_file = "a.nii" # It should - yield assert_equal, mapper.cmdline, "distancemap --out=%s --in=a.nii"%os.path.join(newdir, "a_dstmap.nii") + yield assert_equal, mapper.cmdline, "distancemap --out=%s --in=a.nii" %os.path.join(newdir, "a_dstmap.nii") # And we should be able to write out a maxima map mapper.inputs.local_max_file = True yield assert_equal, mapper.cmdline, \ - "distancemap --out=%s --in=a.nii --localmax=%s"%(os.path.join(newdir, "a_dstmap.nii"), + "distancemap --out=%s --in=a.nii --localmax=%s" %(os.path.join(newdir, "a_dstmap.nii"), os.path.join(newdir, "a_lclmax.nii")) # And call it whatever we want mapper.inputs.local_max_file = "max.nii" yield assert_equal, mapper.cmdline, \ - "distancemap --out=%s --in=a.nii --localmax=max.nii"%os.path.join(newdir, "a_dstmap.nii") + "distancemap --out=%s --in=a.nii --localmax=max.nii" %os.path.join(newdir, "a_dstmap.nii") # Not much else to do here clean_directory(newdir, olddir) diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index e0cf810a2d..ec209808c2 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -9,25 +9,27 @@ import nibabel as nb -from nipype.testing import ( assert_equal, assert_not_equal, +from nipype.testing import (assert_equal, assert_not_equal, assert_raises, skipif) import nipype.interfaces.fsl.epi as fsl from nipype.interfaces.fsl import no_fsl + def create_files_in_directory(): outdir = os.path.realpath(mkdtemp()) cwd = os.getcwd() os.chdir(outdir) - filelist = ['a.nii','b.nii'] + filelist = ['a.nii', 'b.nii'] for f in filelist: hdr = nb.Nifti1Header() - shape = (3,3,3,4) + shape = (3, 3, 3, 4) hdr.set_data_shape(shape) img = np.random.random(shape) - nb.save(nb.Nifti1Image(img,np.eye(4),hdr), - os.path.join(outdir,f)) + nb.save(nb.Nifti1Image(img, np.eye(4), hdr), + os.path.join(outdir, f)) return filelist, outdir, cwd + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) @@ -50,11 +52,11 @@ def test_eddy_correct2(): eddy.inputs.in_file = filelist[0] eddy.inputs.out_file = 'foo_eddc.nii' eddy.inputs.ref_num = 100 - yield assert_equal, eddy.cmdline, 'eddy_correct %s foo_eddc.nii 100'%filelist[0] + yield assert_equal, eddy.cmdline, 'eddy_correct %s foo_eddc.nii 100' %filelist[0] # .run based parameter setting eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file='foo_ec.nii', ref_num=20) - yield assert_equal, eddy2.cmdline, 'eddy_correct %s foo_ec.nii 20'%filelist[0] + yield assert_equal, eddy2.cmdline, 'eddy_correct %s foo_ec.nii 20' %filelist[0] # test arguments for opt_map # eddy_correct class doesn't have opt_map{} diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index 299c5157c4..592b1f90f4 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -28,23 +28,25 @@ def set_output_type(fsl_output_type): return prev_output_type + def create_files_in_directory(): testdir = os.path.realpath(mkdtemp()) origdir = os.getcwd() os.chdir(testdir) - filelist = ['a.nii','b.nii'] + filelist = ['a.nii', 'b.nii'] for f in filelist: hdr = nb.Nifti1Header() - shape = (3,3,3,4) + shape = (3, 3, 3, 4) hdr.set_data_shape(shape) img = np.random.random(shape) - nb.save(nb.Nifti1Image(img,np.eye(4),hdr), - os.path.join(testdir,f)) + nb.save(nb.Nifti1Image(img, np.eye(4), hdr), + os.path.join(testdir, f)) out_ext = Info.output_type_to_ext(Info.output_type()) return filelist, testdir, origdir, out_ext + def clean_directory(testdir, origdir): if os.path.exists(testdir): rmtree(testdir) @@ -70,20 +72,20 @@ def test_maths_base(fsl_output_type=None): out_file = "a_maths%s" % out_ext # Now test the most basic command line - yield assert_equal, maths.cmdline, "fslmaths a.nii %s"%os.path.join(testdir, out_file) + yield assert_equal, maths.cmdline, "fslmaths a.nii %s" %os.path.join(testdir, out_file) # Now test that we can set the various data types - dtypes = ["float","char","int","short","double","input"] - int_cmdline = "fslmaths -dt %s a.nii " + os.path.join(testdir, out_file) - out_cmdline = "fslmaths a.nii " + os.path.join(testdir, out_file) + " -odt %s" - duo_cmdline = "fslmaths -dt %s a.nii " + os.path.join(testdir, out_file) + " -odt %s" + dtypes = ["float", "char", "int", "short", "double", "input"] + int_cmdline = "fslmaths -dt %s a.nii " + os.path.join(testdir, out_file) + out_cmdline = "fslmaths a.nii " + os.path.join(testdir, out_file) + " -odt %s" + duo_cmdline = "fslmaths -dt %s a.nii " + os.path.join(testdir, out_file) + " -odt %s" for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype) - yield assert_equal, foo.cmdline, int_cmdline%dtype + yield assert_equal, foo.cmdline, int_cmdline %dtype bar = fsl.MathsCommand(in_file="a.nii", output_datatype=dtype) yield assert_equal, bar.cmdline, out_cmdline % dtype foobar = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype, output_datatype=dtype) - yield assert_equal, foobar.cmdline, duo_cmdline%(dtype, dtype) + yield assert_equal, foobar.cmdline, duo_cmdline %(dtype, dtype) # Test that we can ask for an outfile name maths.inputs.out_file = "b.nii" @@ -93,6 +95,7 @@ def test_maths_base(fsl_output_type=None): clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_changedt(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) @@ -115,23 +118,24 @@ def test_changedt(fsl_output_type=None): yield assert_raises, ValueError, cdt.run # Now test that we can set the various data types - dtypes = ["float","char","int","short","double","input"] - cmdline = "fslmaths a.nii b.nii -odt %s" + dtypes = ["float", "char", "int", "short", "double", "input"] + cmdline = "fslmaths a.nii b.nii -odt %s" for dtype in dtypes: - foo = fsl.MathsCommand(in_file="a.nii",out_file="b.nii",output_datatype=dtype) - yield assert_equal, foo.cmdline, cmdline%dtype + foo = fsl.MathsCommand(in_file="a.nii", out_file="b.nii", output_datatype=dtype) + yield assert_equal, foo.cmdline, cmdline %dtype # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_threshold(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - thresh = fsl.Threshold(in_file="a.nii",out_file="b.nii") + thresh = fsl.Threshold(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, thresh.cmd, "fslmaths" @@ -143,19 +147,19 @@ def test_threshold(fsl_output_type=None): cmdline = "fslmaths a.nii %s b.nii" for val in [0, 0., -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: thresh.inputs.thresh = val - yield assert_equal, thresh.cmdline, cmdline%"-thr %.10f"%val + yield assert_equal, thresh.cmdline, cmdline %"-thr %.10f" %val - val = "%.10f"%42 - thresh = fsl.Threshold(in_file="a.nii",out_file="b.nii",thresh=42,use_robust_range=True) - yield assert_equal, thresh.cmdline, cmdline%("-thrp "+val) + val = "%.10f" %42 + thresh = fsl.Threshold(in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True) + yield assert_equal, thresh.cmdline, cmdline %("-thrp "+val) thresh.inputs.use_nonzero_voxels = True - yield assert_equal, thresh.cmdline, cmdline%("-thrP "+val) - thresh = fsl.Threshold(in_file="a.nii",out_file="b.nii",thresh=42,direction="above") - yield assert_equal, thresh.cmdline, cmdline%("-uthr "+val) - thresh.inputs.use_robust_range=True - yield assert_equal, thresh.cmdline, cmdline%("-uthrp "+val) + yield assert_equal, thresh.cmdline, cmdline %("-thrP "+val) + thresh = fsl.Threshold(in_file="a.nii", out_file="b.nii", thresh=42, direction="above") + yield assert_equal, thresh.cmdline, cmdline %("-uthr "+val) + thresh.inputs.use_robust_range = True + yield assert_equal, thresh.cmdline, cmdline %("-uthrp "+val) thresh.inputs.use_nonzero_voxels = True - yield assert_equal, thresh.cmdline, cmdline%("-uthrP "+val) + yield assert_equal, thresh.cmdline, cmdline %("-uthrP "+val) # Clean up our mess clean_directory(testdir, origdir) @@ -168,7 +172,7 @@ def test_meanimage(fsl_output_type=None): files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - meaner = fsl.MeanImage(in_file="a.nii",out_file="b.nii") + meaner = fsl.MeanImage(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, meaner.cmd, "fslmaths" @@ -178,25 +182,26 @@ def test_meanimage(fsl_output_type=None): # Test the other dimensions cmdline = "fslmaths a.nii -%smean b.nii" - for dim in ["X","Y","Z","T"]: - meaner.inputs.dimension=dim - yield assert_equal, meaner.cmdline, cmdline%dim + for dim in ["X", "Y", "Z", "T"]: + meaner.inputs.dimension = dim + yield assert_equal, meaner.cmdline, cmdline %dim # Test the auto naming meaner = fsl.MeanImage(in_file="a.nii") - yield assert_equal, meaner.cmdline, "fslmaths a.nii -Tmean %s"%os.path.join(testdir, "a_mean%s" % out_ext) + yield assert_equal, meaner.cmdline, "fslmaths a.nii -Tmean %s" %os.path.join(testdir, "a_mean%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_maximage(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - maxer = fsl.MaxImage(in_file="a.nii",out_file="b.nii") + maxer = fsl.MaxImage(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, maxer.cmd, "fslmaths" @@ -206,25 +211,26 @@ def test_maximage(fsl_output_type=None): # Test the other dimensions cmdline = "fslmaths a.nii -%smax b.nii" - for dim in ["X","Y","Z","T"]: - maxer.inputs.dimension=dim - yield assert_equal, maxer.cmdline, cmdline%dim + for dim in ["X", "Y", "Z", "T"]: + maxer.inputs.dimension = dim + yield assert_equal, maxer.cmdline, cmdline %dim # Test the auto naming maxer = fsl.MaxImage(in_file="a.nii") - yield assert_equal, maxer.cmdline, "fslmaths a.nii -Tmax %s"%os.path.join(testdir, "a_max%s" % out_ext) + yield assert_equal, maxer.cmdline, "fslmaths a.nii -Tmax %s" %os.path.join(testdir, "a_max%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_smooth(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - smoother = fsl.IsotropicSmooth(in_file="a.nii",out_file="b.nii") + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, smoother.cmd, "fslmaths" @@ -235,27 +241,28 @@ def test_smooth(fsl_output_type=None): # Test smoothing kernels cmdline = "fslmaths a.nii -s %.5f b.nii" for val in [0, 1., 1, 25, 0.5, 8 / 3.]: - smoother = fsl.IsotropicSmooth(in_file="a.nii",out_file="b.nii",sigma=val) - yield assert_equal, smoother.cmdline, cmdline%val - smoother = fsl.IsotropicSmooth(in_file="a.nii",out_file="b.nii",fwhm=val) + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", sigma=val) + yield assert_equal, smoother.cmdline, cmdline %val + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", fwhm=val) val = float(val) / np.sqrt(8 * np.log(2)) - yield assert_equal, smoother.cmdline, cmdline%val + yield assert_equal, smoother.cmdline, cmdline %val # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) - yield assert_equal, smoother.cmdline, "fslmaths a.nii -s %.5f %s"%(5, os.path.join(testdir, "a_smooth%s" % out_ext)) + yield assert_equal, smoother.cmdline, "fslmaths a.nii -s %.5f %s" %(5, os.path.join(testdir, "a_smooth%s" % out_ext)) # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_mask(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - masker = fsl.ApplyMask(in_file="a.nii",out_file="c.nii") + masker = fsl.ApplyMask(in_file="a.nii", out_file="c.nii") # Test the underlying command yield assert_equal, masker.cmd, "fslmaths" @@ -268,7 +275,7 @@ def test_mask(fsl_output_type=None): yield assert_equal, masker.cmdline, "fslmaths a.nii -mas b.nii c.nii" # Test auto name generation - masker = fsl.ApplyMask(in_file="a.nii",mask_file="b.nii") + masker = fsl.ApplyMask(in_file="a.nii", mask_file="b.nii") yield assert_equal, masker.cmdline, "fslmaths a.nii -mas b.nii "+os.path.join(testdir, "a_masked%s" % out_ext) # Clean up our mess @@ -282,7 +289,7 @@ def test_dilation(fsl_output_type=None): files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - diller = fsl.DilateImage(in_file="a.nii",out_file="b.nii") + diller = fsl.DilateImage(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, diller.cmd, "fslmaths" @@ -294,18 +301,18 @@ def test_dilation(fsl_output_type=None): for op in ["mean", "modal", "max"]: cv = dict(mean="M", modal="D", max="F") diller.inputs.operation = op - yield assert_equal, diller.cmdline, "fslmaths a.nii -dil%s b.nii"%cv[op] + yield assert_equal, diller.cmdline, "fslmaths a.nii -dil%s b.nii" %cv[op] # Now test the different kernel options for k in ["3D", "2D", "box", "boxv", "gauss", "sphere"]: for size in [1, 1.5, 5]: diller.inputs.kernel_shape = k diller.inputs.kernel_size = size - yield assert_equal, diller.cmdline, "fslmaths a.nii -kernel %s %.4f -dilF b.nii"%(k, size) + yield assert_equal, diller.cmdline, "fslmaths a.nii -kernel %s %.4f -dilF b.nii" %(k, size) # Test that we can use a file kernel - f = open("kernel.txt","w").close() - del f # Shut pyflakes up + f = open("kernel.txt", "w").close() + del f # Shut pyflakes up diller.inputs.kernel_shape = "file" diller.inputs.kernel_size = Undefined diller.inputs.kernel_file = "kernel.txt" @@ -313,19 +320,20 @@ def test_dilation(fsl_output_type=None): # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") - yield assert_equal, dil.cmdline, "fslmaths a.nii -dilF %s"%os.path.join(testdir, "a_dil%s" % out_ext) + yield assert_equal, dil.cmdline, "fslmaths a.nii -dilF %s" %os.path.join(testdir, "a_dil%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_erosion(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - erode = fsl.ErodeImage(in_file="a.nii",out_file="b.nii") + erode = fsl.ErodeImage(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, erode.cmd, "fslmaths" @@ -339,19 +347,20 @@ def test_erosion(fsl_output_type=None): # Test that we don't need to request an out name erode = fsl.ErodeImage(in_file="a.nii") - yield assert_equal, erode.cmdline, "fslmaths a.nii -ero %s"%os.path.join(testdir, "a_ero%s" % out_ext) + yield assert_equal, erode.cmdline, "fslmaths a.nii -ero %s" %os.path.join(testdir, "a_ero%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_spatial_filter(fsl_output_type=None): prev_type = set_output_type(fsl_output_type) files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - filter = fsl.SpatialFilter(in_file="a.nii",out_file="b.nii") + filter = fsl.SpatialFilter(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, filter.cmd, "fslmaths" @@ -362,11 +371,11 @@ def test_spatial_filter(fsl_output_type=None): # Test the different operations for op in ["mean", "meanu", "median"]: filter.inputs.operation = op - yield assert_equal, filter.cmdline, "fslmaths a.nii -f%s b.nii"%op + yield assert_equal, filter.cmdline, "fslmaths a.nii -f%s b.nii" %op # Test that we don't need to ask for an out name filter = fsl.SpatialFilter(in_file="a.nii", operation="mean") - yield assert_equal, filter.cmdline, "fslmaths a.nii -fmean %s"%os.path.join(testdir, "a_filt%s" % out_ext) + yield assert_equal, filter.cmdline, "fslmaths a.nii -fmean %s" %os.path.join(testdir, "a_filt%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -379,7 +388,7 @@ def test_unarymaths(fsl_output_type=None): files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - maths = fsl.UnaryMaths(in_file="a.nii",out_file="b.nii") + maths = fsl.UnaryMaths(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, maths.cmd, "fslmaths" @@ -391,12 +400,12 @@ def test_unarymaths(fsl_output_type=None): ops = ["exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index"] for op in ops: maths.inputs.operation = op - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii"%op + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii" %op # Test that we don't need to ask for an out file for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %s"%(op, os.path.join(testdir, "a_%s%s"%(op, out_ext))) + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %s" %(op, os.path.join(testdir, "a_%s%s" %(op, out_ext))) # Clean up our mess clean_directory(testdir, origdir) @@ -409,7 +418,7 @@ def test_binarymaths(fsl_output_type=None): files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - maths = fsl.BinaryMaths(in_file="a.nii",out_file="c.nii") + maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii") # Test the underlying command yield assert_equal, maths.cmd, "fslmaths" @@ -422,19 +431,18 @@ def test_binarymaths(fsl_output_type=None): operands = ["b.nii", -2, -0.5, 0, .123456, np.pi, 500] for op in ops: for ent in operands: - maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation = op) + maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation=op) if ent == "b.nii": maths.inputs.operand_file = ent - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii c.nii"%op + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii c.nii" %op else: maths.inputs.operand_value = ent - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %.8f c.nii"%(op, ent) - + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %.8f c.nii" %(op, ent) # Test that we don't need to ask for an out file for op in ops: maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii %s"%(op,os.path.join(testdir, "a_maths%s" % out_ext)) + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii %s" %(op, os.path.join(testdir, "a_maths%s" % out_ext)) # Clean up our mess clean_directory(testdir, origdir) @@ -447,7 +455,7 @@ def test_multimaths(fsl_output_type=None): files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - maths = fsl.MultiImageMaths(in_file="a.nii",out_file="c.nii") + maths = fsl.MultiImageMaths(in_file="a.nii", out_file="c.nii") # Test the underlying command yield assert_equal, maths.cmd, "fslmaths" @@ -462,12 +470,12 @@ def test_multimaths(fsl_output_type=None): "-mas %s -add %s"] for ostr in opstrings: maths.inputs.op_string = ostr - yield assert_equal, maths.cmdline, "fslmaths a.nii %s c.nii"%ostr%("a.nii", "b.nii") + yield assert_equal, maths.cmdline, "fslmaths a.nii %s c.nii" %ostr %("a.nii", "b.nii") # Test that we don't need to ask for an out file maths = fsl.MultiImageMaths(in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"]) yield assert_equal, maths.cmdline, \ - "fslmaths a.nii -add b.nii -mul 5 %s"%os.path.join(testdir, "a_maths%s" % out_ext) + "fslmaths a.nii -add b.nii -mul 5 %s" %os.path.join(testdir, "a_maths%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -480,7 +488,7 @@ def test_tempfilt(fsl_output_type=None): files, testdir, origdir, out_ext = create_files_in_directory() # Get the command - filt = fsl.TemporalFilter(in_file="a.nii",out_file="b.nii") + filt = fsl.TemporalFilter(in_file="a.nii", out_file="b.nii") # Test the underlying command yield assert_equal, filt.cmd, "fslmaths" @@ -493,17 +501,18 @@ def test_tempfilt(fsl_output_type=None): for win in windows: filt.inputs.highpass_sigma = win[0] filt.inputs.lowpass_sigma = win[1] - yield assert_equal, filt.cmdline, "fslmaths a.nii -bptf %.6f %.6f b.nii"%win + yield assert_equal, filt.cmdline, "fslmaths a.nii -bptf %.6f %.6f b.nii" %win # Test that we don't need to ask for an out file - filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma = 64) + filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) yield assert_equal, filt.cmdline, \ - "fslmaths a.nii -bptf 64.000000 -1.000000 %s"%os.path.join(testdir,"a_filt%s" % out_ext) + "fslmaths a.nii -bptf 64.000000 -1.000000 %s" %os.path.join(testdir, "a_filt%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) set_output_type(prev_type) + @skipif(no_fsl) def test_all_again(): # Rerun tests with all output file types diff --git a/nipype/interfaces/fsl/tests/test_model.py b/nipype/interfaces/fsl/tests/test_model.py index 655c2101e6..c40814139c 100644 --- a/nipype/interfaces/fsl/tests/test_model.py +++ b/nipype/interfaces/fsl/tests/test_model.py @@ -16,6 +16,7 @@ tmp_dir = None cwd = None + @skipif(no_fsl) def setup_infile(): global tmp_infile, tmp_dir, cwd @@ -27,23 +28,25 @@ def setup_infile(): os.chdir(tmp_dir) return tmp_infile, tmp_dir + def teardown_infile(tmp_dir): os.chdir(cwd) shutil.rmtree(tmp_dir) + @skipif(no_fsl) def test_MultipleRegressDesign(): _, tp_dir = setup_infile() foo = fsl.MultipleRegressDesign() - foo.inputs.regressors = dict(voice_stenght=[1,1,1],age=[0.2,0.4,0.5],BMI=[1,-1,2]) - con1 = ['voice_and_age','T',['age','voice_stenght'],[0.5,0.5]] - con2 = ['just_BMI','T',['BMI'],[1]] - foo.inputs.contrasts = [con1,con2,['con3','F',[con1,con2]]] + foo.inputs.regressors = dict(voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2]) + con1 = ['voice_and_age', 'T', ['age', 'voice_stenght'], [0.5, 0.5]] + con2 = ['just_BMI', 'T', ['BMI'], [1]] + foo.inputs.contrasts = [con1, con2, ['con3', 'F', [con1, con2]]] res = foo.run() - yield assert_equal, res.outputs.design_mat, os.path.join(os.getcwd(),'design.mat') - yield assert_equal, res.outputs.design_con, os.path.join(os.getcwd(),'design.con') - yield assert_equal, res.outputs.design_fts, os.path.join(os.getcwd(),'design.fts') - yield assert_equal, res.outputs.design_grp, os.path.join(os.getcwd(),'design.grp') + yield assert_equal, res.outputs.design_mat, os.path.join(os.getcwd(), 'design.mat') + yield assert_equal, res.outputs.design_con, os.path.join(os.getcwd(), 'design.con') + yield assert_equal, res.outputs.design_fts, os.path.join(os.getcwd(), 'design.fts') + yield assert_equal, res.outputs.design_grp, os.path.join(os.getcwd(), 'design.grp') design_mat_expected_content = """/NumWaves 3 /NumPoints 3 diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index 8abf68fa3e..8ae16e6c28 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -41,7 +41,7 @@ def teardown_infile(tmp_dir): shutil.rmtree(tmp_dir) # test BET -#@with_setup(setup_infile, teardown_infile) +# @with_setup(setup_infile, teardown_infile) # broken in nose with generators @@ -85,8 +85,8 @@ def func(): 'threshold': ('-t', True), 'mesh': ('-e', True), 'surfaces': ('-A', True) - #'verbose': ('-v', True), - #'flags': ('--i-made-this-up', '--i-made-this-up'), + # 'verbose': ('-v', True), + # 'flags': ('--i-made-this-up', '--i-made-this-up'), } # Currently we don't test -R, -S, -B, -Z, -F, -A or -A2 diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index 1e74fc3f7e..b55099c1a9 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -89,6 +89,8 @@ def test_fslmerge(): # Fslmerge class doesn't have a filled opt_map{} # test fslmath + + @skipif(no_fsl) def test_fslmaths(): filelist, outdir, cwd, _ = create_files_in_directory() diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 7324d58b8e..214f308e03 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -44,9 +44,11 @@ class CopyGeomInputSpec(FSLCommandInputSpec): ignore_dims = traits.Bool(desc=('Do not copy image dimensions'), argstr='-d', position="-1") + class CopyGeomOutputSpec(TraitedSpec): out_file = File(exists=True, desc="image with new geometry header") + class CopyGeom(FSLCommand): """Use fslcpgeom to copy the header geometry information to another image. Copy certain parts of the header information (image dimensions, voxel dimensions, @@ -1257,13 +1259,16 @@ class SigLossInputSpec(FSLCommandInputSpec): desc='brain mask file') echo_time = traits.Float(argstr='--te=%f', desc='echo time in seconds') - slice_direction = traits.Enum('x','y','z', + slice_direction = traits.Enum('x', 'y', 'z', argstr='-d %s', desc='slicing direction') + + class SigLossOuputSpec(TraitedSpec): out_file = File(exists=True, desc='signal loss estimate file') + class SigLoss(FSLCommand): """Estimates signal loss from a field map (in rad/s) @@ -1286,12 +1291,12 @@ def _list_outputs(self): outputs['out_file'] = self.inputs.out_file if not isdefined(outputs['out_file']) and \ isdefined(self.inputs.in_file): - outputs['out_file']=self._gen_fname(self.inputs.in_file, + outputs['out_file'] = self._gen_fname(self.inputs.in_file, suffix='_sigloss') return outputs def _gen_filename(self, name): - if name=='out_file': + if name == 'out_file': return self._list_outputs()['out_file'] return None @@ -1338,7 +1343,6 @@ def _list_outputs(self): return outputs - class InvWarpInputSpec(FSLCommandInputSpec): warp = File(exists=True, argstr='--warp=%s', mandatory=True, desc=('Name of file containing warp-coefficients/fields. This ' @@ -1419,6 +1423,7 @@ class InvWarp(FSLCommand): _cmd = 'invwarp' + class ComplexInputSpec(FSLCommandInputSpec): complex_in_file = File(exists=True, argstr="%s", position=2) complex_in_file2 = File(exists=True, argstr="%s", position=3) @@ -1429,11 +1434,11 @@ class ComplexInputSpec(FSLCommandInputSpec): phase_in_file = File(exists=True, argstr='%s', position=3) _ofs = ['complex_out_file', - 'magnitude_out_file','phase_out_file', - 'real_out_file','imaginary_out_file'] - _conversion = ['real_polar','real_cartesian', - 'complex_cartesian','complex_polar', - 'complex_split','complex_merge',] + 'magnitude_out_file', 'phase_out_file', + 'real_out_file', 'imaginary_out_file'] + _conversion = ['real_polar', 'real_cartesian', + 'complex_cartesian', 'complex_polar', + 'complex_split', 'complex_merge', ] complex_out_file = File(genfile=True, argstr="%s", position=-3, xor=_ofs+_conversion[:2]) @@ -1450,26 +1455,27 @@ class ComplexInputSpec(FSLCommandInputSpec): end_vol = traits.Int(position=-1, argstr='%d') real_polar = traits.Bool( - argstr = '-realpolar', xor = _conversion, position=1,) + argstr='-realpolar', xor=_conversion, position=1,) # requires=['complex_in_file','magnitude_out_file','phase_out_file']) real_cartesian = traits.Bool( - argstr = '-realcartesian', xor = _conversion, position=1,) + argstr='-realcartesian', xor=_conversion, position=1,) # requires=['complex_in_file','real_out_file','imaginary_out_file']) complex_cartesian = traits.Bool( - argstr = '-complex', xor = _conversion, position=1,) + argstr='-complex', xor=_conversion, position=1,) # requires=['real_in_file','imaginary_in_file','complex_out_file']) complex_polar = traits.Bool( - argstr = '-complexpolar', xor = _conversion, position=1,) + argstr='-complexpolar', xor=_conversion, position=1,) # requires=['magnitude_in_file','phase_in_file', # 'magnitude_out_file','phase_out_file']) complex_split = traits.Bool( - argstr = '-complexsplit', xor = _conversion, position=1,) + argstr='-complexsplit', xor=_conversion, position=1,) # requires=['complex_in_file','complex_out_file']) complex_merge = traits.Bool( - argstr = '-complexmerge', xor = _conversion + ['start_vol','end_vol'], + argstr='-complexmerge', xor=_conversion + ['start_vol', 'end_vol'], position=1,) # requires=['complex_in_file','complex_in_file2','complex_out_file']) + class ComplexOuputSpec(TraitedSpec): magnitude_out_file = File() phase_out_file = File() @@ -1504,7 +1510,7 @@ def _parse_inputs(self, skip=None): skip += self.inputs._ofs[:1]+self.inputs._ofs[3:] else: skip += self.inputs._ofs[1:] - return super(Complex,self)._parse_inputs(skip) + return super(Complex, self)._parse_inputs(skip) def _gen_filename(self, name): if name == 'complex_out_file': @@ -1517,18 +1523,18 @@ def _gen_filename(self, name): else: return None return self._gen_fname(in_file, suffix="_cplx") - elif name =='magnitude_out_file': + elif name == 'magnitude_out_file': return self._gen_fname(self.inputs.complex_in_file, suffix="_mag") - elif name =='phase_out_file': - return self._gen_fname(self.inputs.complex_in_file,suffix="_phase") - elif name =='real_out_file': + elif name == 'phase_out_file': + return self._gen_fname(self.inputs.complex_in_file, suffix="_phase") + elif name == 'real_out_file': return self._gen_fname(self.inputs.complex_in_file, suffix="_real") - elif name =='imaginary_out_file': + elif name == 'imaginary_out_file': return self._gen_fname(self.inputs.complex_in_file, suffix="_imag") return None - def _get_output(self,name): - output = getattr(self.inputs,name) + def _get_output(self, name): + output = getattr(self.inputs, name) if not isdefined(output): output = self._gen_filename(name) return os.path.abspath(output) @@ -1547,7 +1553,6 @@ def _list_outputs(self): return outputs - class WarpUtilsInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr='--in=%s', mandatory=True, desc=('Name of file containing warp-coefficients/fields. This ' @@ -1563,8 +1568,8 @@ class WarpUtilsInputSpec(FSLCommandInputSpec): out_format = traits.Enum('spline', 'field', argstr='--outformat=%s', desc=('Specifies the output format. If set to field (default) ' - 'the output will be a (4D) field-file. If set to spline ' - 'the format will be a (4D) file of spline coefficients.')) + 'the output will be a (4D) field-file. If set to spline ' + 'the format will be a (4D) file of spline coefficients.')) warp_resolution = traits.Tuple(traits.Float, traits.Float, traits.Float, argstr='--warpres=%0.4f,%0.4f,%0.4f', @@ -1583,7 +1588,7 @@ class WarpUtilsInputSpec(FSLCommandInputSpec): desc=('Alternative (to --warpres) specification of the resolution of ' 'the output spline-field.')) - out_file = File(argstr='--out=%s', position=-1, name_source = ['in_file'], output_name='out_file', + out_file = File(argstr='--out=%s', position=-1, name_source=['in_file'], output_name='out_file', desc=('Name of output file. The format of the output depends on what other ' 'parameters are set. The default format is a (4D) field-file. If the ' '--outformat is set to spline the format will be a (4D) file of spline ' @@ -1599,10 +1604,11 @@ class WarpUtilsInputSpec(FSLCommandInputSpec): with_affine = traits.Bool(False, argstr='--withaff', desc=('Specifies that the affine transform (i.e. that which was ' 'specified for the --aff parameter in fnirt) should be ' - 'included as displacements in the --out file. That can be ' - 'useful for interfacing with software that cannot decode ' - 'FSL/fnirt coefficient-files (where the affine transform is ' - 'stored separately from the displacements).')) + 'included as displacements in the --out file. That can be ' + 'useful for interfacing with software that cannot decode ' + 'FSL/fnirt coefficient-files (where the affine transform is ' + 'stored separately from the displacements).')) + class WarpUtilsOutputSpec(TraitedSpec): out_file = File(desc=('Name of output file, containing the warp as field or coefficients.')) @@ -1642,7 +1648,7 @@ def _parse_inputs(self, skip=None): skip = [] suffix = 'field' - if isdefined(self.inputs.out_format) and self.inputs.out_format=='spline': + if isdefined(self.inputs.out_format) and self.inputs.out_format == 'spline': suffix = 'coeffs' trait_spec = self.inputs.trait('out_file') @@ -1655,9 +1661,9 @@ def _parse_inputs(self, skip=None): jac_spec.name_template = '%s_jac' jac_spec.output_name = 'out_jacobian' else: - skip+=['out_jacobian'] + skip += ['out_jacobian'] - skip+=['write_jacobian'] + skip += ['write_jacobian'] return super(WarpUtils, self)._parse_inputs(skip=skip) @@ -1679,7 +1685,7 @@ class ConvertWarpInputSpec(FSLCommandInputSpec): 'fnirt-transform from a subjects structural scan to an average of a group ' 'of subjects.')) - midmat=File(exists=True, argstr="--midmat=%s", + midmat = File(exists=True, argstr="--midmat=%s", desc="Name of file containing mid-warp-affine transform") warp2 = File(exists=True, argstr='--warp2=%s', @@ -1699,7 +1705,7 @@ class ConvertWarpInputSpec(FSLCommandInputSpec): 'subjects functional (EPI) data onto an undistorted space (i.e. a space ' 'that corresponds to his/her true anatomy).')) - shift_direction = traits.Enum('y-','y','x','x-','z','z-', + shift_direction = traits.Enum('y-', 'y', 'x', 'x-', 'z', 'z-', argstr="--shiftdir=%s", requires=['shift_in_file'], desc=('Indicates the direction that the distortions from ' '--shiftmap goes. It depends on the direction and ' @@ -1729,12 +1735,12 @@ class ConvertWarpInputSpec(FSLCommandInputSpec): 'coordinates in the next space.')) out_abswarp = traits.Bool(argstr='--absout', xor=['out_relwarp'], - desc=('If set it indicates that the warps in --out should be absolute, i.e. ' - 'the values in --out are displacements from the coordinates in --ref.')) + desc=('If set it indicates that the warps in --out should be absolute, i.e. ' + 'the values in --out are displacements from the coordinates in --ref.')) out_relwarp = traits.Bool(argstr='--relout', xor=['out_abswarp'], - desc=('If set it indicates that the warps in --out should be relative, i.e. ' - 'the values in --out are displacements from the coordinates in --ref.')) + desc=('If set it indicates that the warps in --out should be relative, i.e. ' + 'the values in --out are displacements from the coordinates in --ref.')) class ConvertWarpOutputSpec(TraitedSpec): @@ -1784,6 +1790,7 @@ class WarpPointsBaseInputSpec(CommandLineInputSpec): name_template='%s_warped', output_name='out_file', desc='output file name') + class WarpPointsInputSpec(WarpPointsBaseInputSpec): src_file = File(exists=True, argstr='-src %s', mandatory=True, desc=('filename of source image')) @@ -1832,7 +1839,6 @@ def __init__(self, command=None, **inputs): super(WarpPoints, self).__init__(command=command, **inputs) - def _format_arg(self, name, trait_spec, value): if name == 'out_file': return '' @@ -1855,7 +1861,7 @@ def _parse_inputs(self, skip=None): delete=False).name second_args = self._tmpfile - return first_args + [ second_args ] + return first_args + [second_args] def _vtk_to_coords(self, in_file, out_file=None): import os.path as op @@ -1869,7 +1875,7 @@ def _vtk_to_coords(self, in_file, out_file=None): points = reader.output.points if out_file is None: - out_file, _ = op.splitext(in_file) + '.txt' + out_file, _ = op.splitext(in_file) + '.txt' np.savetxt(out_file, points) return out_file @@ -1940,7 +1946,7 @@ def _run_interface(self, runtime): elif outformat == 'trk': self._coords_to_trk(newpoints, out_file) else: - np.savetxt(out_file, newpoints.reshape(-1,3)) + np.savetxt(out_file, newpoints.reshape(-1, 3)) return runtime @@ -2002,7 +2008,8 @@ class MotionOutliersInputSpec(FSLCommandInputSpec): out_metric_values = File(argstr="-s %s", name_source='in_file', name_template='%s_metrics.txt', keep_extension=True, desc='output metric values (DVARS etc.) file name', hash_files=False) out_metric_plot = File(argstr="-p %s", name_source='in_file', name_template='%s_metrics.png', - keep_extension=True, desc='output metric values plot (DVARS etc.) file name', hash_files=False) + keep_extension=True, desc='output metric values plot (DVARS etc.) file name', hash_files=False) + class MotionOutliersOutputSpec(TraitedSpec): out_file = File(exists=True) diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 31eb48ca61..03123c39ae 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -320,7 +320,7 @@ def _list_outputs(self): else: raise(inst) use_hardlink = str2bool(config.get('execution', - 'try_hard_link_datasink') ) + 'try_hard_link_datasink')) for key, files in list(self.inputs._outputs.items()): if not isdefined(files): continue @@ -382,15 +382,15 @@ def _list_outputs(self): class S3DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): testing = traits.Bool(False, usedefault=True, - desc='Flag for using local fakes3 server.' - ' (for testing purposes only)') + desc='Flag for using local fakes3 server.' + ' (for testing purposes only)') anon = traits.Bool(False, usedefault=True, - desc='Use anonymous connection to s3') + desc='Use anonymous connection to s3') bucket = traits.Str(mandatory=True, desc='Amazon S3 bucket where your data is stored') bucket_path = traits.Str('', usedefault=True, desc='Location within your bucket to store ' - 'data.') + 'data.') base_directory = Directory( desc='Path to the base directory for storing data.') container = traits.Str( @@ -475,16 +475,16 @@ def localtos3(self, paths): class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): anon = traits.Bool(False, usedefault=True, - desc='Use anonymous connection to s3') + desc='Use anonymous connection to s3') region = traits.Str('us-east-1', usedefault=True, - desc='Region of s3 bucket') + desc='Region of s3 bucket') bucket = traits.Str(mandatory=True, desc='Amazon S3 bucket where your data is stored') bucket_path = traits.Str('', usedefault=True, desc='Location within your bucket for subject data.') local_directory = Directory(exists=True, desc='Path to the local directory for subject data to be downloaded ' - 'and accessed. Should be on HDFS for Spark jobs.') + 'and accessed. Should be on HDFS for Spark jobs.') raise_on_empty = traits.Bool(True, usedefault=True, desc='Generate exception if list is empty for a given field') sort_filelist = traits.Bool(mandatory=True, @@ -587,7 +587,7 @@ def _list_outputs(self): if hasattr(self.inputs, 'field_template') and \ isdefined(self.inputs.field_template) and \ key in self.inputs.field_template: - template = self.inputs.field_template[key] # template override for multiple outfields + template = self.inputs.field_template[key] # template override for multiple outfields if isdefined(self.inputs.bucket_path): template = os.path.join(self.inputs.bucket_path, template) if not args: @@ -892,17 +892,17 @@ def _list_outputs(self): class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): base_directory = Directory(exists=True, - desc="Root path common to templates.") + desc="Root path common to templates.") sort_filelist = traits.Bool(True, usedefault=True, - desc="When matching mutliple files, return them in sorted order.") + desc="When matching mutliple files, return them in sorted order.") raise_on_empty = traits.Bool(True, usedefault=True, - desc="Raise an exception if a template pattern matches no files.") + desc="Raise an exception if a template pattern matches no files.") force_lists = traits.Either(traits.Bool(), traits.List(traits.Str()), - default=False, usedefault=True, - desc=("Whether to return outputs as a list even when only one file " - "matches the template. Either a boolean that applies to all " - "output fields or a list of output field names to coerce to " - " a list")) + default=False, usedefault=True, + desc=("Whether to return outputs as a list even when only one file " + "matches the template. Either a boolean that applies to all " + "output fields or a list of output field names to coerce to " + " a list")) class SelectFiles(IOBase): @@ -1040,10 +1040,10 @@ class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): match_regex = traits.Str('(.+)', usedefault=True, desc=("Regular expression for matching " - "paths.")) + "paths.")) ignore_regexes = traits.List(desc=("List of regular expressions, " - "if any match the path it will be " - "ignored.") + "if any match the path it will be " + "ignored.") ) max_depth = traits.Int(desc="The maximum depth to search beneath " "the root_paths") @@ -1094,11 +1094,11 @@ class DataFinder(IOBase): _always_run = True def _match_path(self, target_path): - #Check if we should ignore the path + # Check if we should ignore the path for ignore_re in self.ignore_regexes: if ignore_re.search(target_path): return - #Check if we can match the path + # Check if we can match the path match = self.match_regex.search(target_path) if not match is None: match_dict = match.groupdict() @@ -1111,7 +1111,7 @@ def _match_path(self, target_path): self.result[key].append(val) def _run_interface(self, runtime): - #Prepare some of the inputs + # Prepare some of the inputs if isinstance(self.inputs.root_paths, string_types): self.inputs.root_paths = [self.inputs.root_paths] self.match_regex = re.compile(self.inputs.match_regex) @@ -1131,24 +1131,24 @@ def _run_interface(self, runtime): for regex in self.inputs.ignore_regexes] self.result = None for root_path in self.inputs.root_paths: - #Handle tilda/env variables and remove extra seperators + # Handle tilda/env variables and remove extra seperators root_path = os.path.normpath(os.path.expandvars(os.path.expanduser(root_path))) - #Check if the root_path is a file + # Check if the root_path is a file if os.path.isfile(root_path): if min_depth == 0: self._match_path(root_path) continue - #Walk through directory structure checking paths + # Walk through directory structure checking paths for curr_dir, sub_dirs, files in os.walk(root_path): - #Determine the current depth from the root_path + # Determine the current depth from the root_path curr_depth = (curr_dir.count(os.sep) - root_path.count(os.sep)) - #If the max path depth has been reached, clear sub_dirs - #and files + # If the max path depth has been reached, clear sub_dirs + # and files if max_depth is not None and curr_depth >= max_depth: sub_dirs[:] = [] files = [] - #Test the path for the curr_dir and all files + # Test the path for the curr_dir and all files if curr_depth >= min_depth: self._match_path(curr_dir) if curr_depth >= (min_depth - 1): @@ -1156,17 +1156,17 @@ def _run_interface(self, runtime): full_path = os.path.join(curr_dir, infile) self._match_path(full_path) if (self.inputs.unpack_single and - len(self.result['out_paths']) == 1 - ): + len(self.result['out_paths']) == 1 + ): for key, vals in self.result.items(): self.result[key] = vals[0] else: - #sort all keys acording to out_paths + # sort all keys acording to out_paths for key in list(self.result.keys()): if key == "out_paths": continue sort_tuples = human_order_sorted(list(zip(self.result["out_paths"], - self.result[key]))) + self.result[key]))) self.result[key] = [x for (_, x) in sort_tuples] self.result["out_paths"] = human_order_sorted(self.result["out_paths"]) @@ -1419,7 +1419,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): desc="arguments that fit into query_template") ) undefined_traits['field_template'] = Undefined - #self.inputs.remove_trait('query_template_args') + # self.inputs.remove_trait('query_template_args') outdict = {} for key in outfields: outdict[key] = [] @@ -1455,7 +1455,7 @@ def _list_outputs(self): if not isdefined(value): msg = ("%s requires a value for input '%s' " "because it was listed in 'infields'" % - (self.__class__.__name__, key) + (self.__class__.__name__, key) ) raise ValueError(msg) @@ -1575,11 +1575,11 @@ class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): ) share = traits.Bool(False, - desc=('Option to share the subjects from the original project' - 'instead of creating new ones when possible - the created ' - 'experiments are then shared back to the original project' + desc=('Option to share the subjects from the original project' + 'instead of creating new ones when possible - the created ' + 'experiments are then shared back to the original project' ), - usedefault=True) + usedefault=True) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): @@ -1873,18 +1873,19 @@ def _list_outputs(self): c.close() return None + class SSHDataGrabberInputSpec(DataGrabberInputSpec): hostname = traits.Str(mandatory=True, desc='Server hostname.') username = traits.Str(desc='Server username.') password = traits.Password(desc='Server password.') download_files = traits.Bool(True, usedefault=True, - desc='If false it will return the file names without downloading them') + desc='If false it will return the file names without downloading them') base_directory = traits.Str(mandatory=True, - desc='Path to the base directory consisting of subject data.') + desc='Path to the base directory consisting of subject data.') template_expression = traits.Enum(['fnmatch', 'regexp'], usedefault=True, - desc='Use either fnmatch or regexp to express templates') + desc='Use either fnmatch or regexp to express templates') ssh_log_to_file = traits.Str('', usedefault=True, - desc='If set SSH commands will be logged to the given file') + desc='If set SSH commands will be logged to the given file') class SSHDataGrabber(DataGrabber): @@ -1992,7 +1993,6 @@ def __init__(self, infields=None, outfields=None, **kwargs): ): self.inputs.template += '$' - def _list_outputs(self): try: paramiko @@ -2138,7 +2138,7 @@ def _get_ssh_client(self): class JSONFileGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): in_file = File(exists=True, desc='JSON source file') defaults = traits.Dict(desc=('JSON dictionary that sets default output' - 'values, overridden by values found in in_file')) + 'values, overridden by values found in in_file')) class JSONFileGrabber(IOBase): diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index 8690507ded..e3e9081433 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -27,10 +27,11 @@ def get_matlab_command(): no_matlab = get_matlab_command() is None + class MatlabInputSpec(CommandLineInputSpec): """ Basic expected inputs to Matlab interface """ - script = traits.Str(argstr='-r \"%s;exit\"', desc='m-code to run', + script = traits.Str(argstr='-r \"%s;exit\"', desc='m-code to run', mandatory=True, position=-1) uses_mcr = traits.Bool(desc='use MCR interface', xor=['nodesktop', 'nosplash', @@ -46,21 +47,22 @@ class MatlabInputSpec(CommandLineInputSpec): logfile = File(argstr='-logfile %s', desc='Save matlab output to log') single_comp_thread = traits.Bool(argstr="-singleCompThread", - desc="force single threaded operation", - nohash=True) + desc="force single threaded operation", + nohash=True) # non-commandline options - mfile = traits.Bool(True, desc='Run m-code using m-file', + mfile = traits.Bool(True, desc='Run m-code using m-file', usedefault=True) script_file = File('pyscript.m', usedefault=True, - desc='Name of file to write m-code to') - paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') - prescript = traits.List(["ver,","try,"], usedefault=True, + desc='Name of file to write m-code to') + paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') + prescript = traits.List(["ver,", "try,"], usedefault=True, desc='prescript to be added before code') postscript = traits.List(["\n,catch ME,", "fprintf(2,'MATLAB code threw an exception:\\n');", "fprintf(2,'%s\\n',ME.message);", "if length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\nLine:%d\\n',ME.stack.file,ME.stack.name,ME.stack.line);, end;", - "end;"], desc='script added after code', usedefault = True) + "end;"], desc='script added after code', usedefault=True) + class MatlabCommand(CommandLine): """Interface that runs matlab code @@ -77,11 +79,11 @@ class MatlabCommand(CommandLine): _default_paths = None input_spec = MatlabInputSpec - def __init__(self, matlab_cmd = None, **inputs): + def __init__(self, matlab_cmd=None, **inputs): """initializes interface to matlab (default 'matlab -nodesktop -nosplash') """ - super(MatlabCommand,self).__init__(**inputs) + super(MatlabCommand, self).__init__(**inputs) if matlab_cmd and isdefined(matlab_cmd): self._cmd = matlab_cmd elif self._default_matlab_cmd: @@ -95,7 +97,7 @@ def __init__(self, matlab_cmd = None, **inputs): if not isdefined(self.inputs.single_comp_thread) and \ not isdefined(self.inputs.uses_mcr): - if config.getboolean('execution','single_thread_matlab'): + if config.getboolean('execution', 'single_thread_matlab'): self.inputs.single_comp_thread = True # For matlab commands force all output to be returned since matlab # does not have a clean way of notifying an error @@ -134,7 +136,7 @@ def set_default_paths(cls, paths): """ cls._default_paths = paths - def _run_interface(self,runtime): + def _run_interface(self, runtime): self.inputs.terminal_output = 'allatonce' runtime = super(MatlabCommand, self)._run_interface(runtime) try: @@ -151,7 +153,7 @@ def _format_arg(self, name, trait_spec, value): if name in ['script']: argstr = trait_spec.argstr if self.inputs.uses_mcr: - argstr='%s' + argstr = '%s' return self._gen_matlab_command(argstr, value) return super(MatlabCommand, self)._format_arg(name, trait_spec, value) @@ -166,24 +168,24 @@ def _gen_matlab_command(self, argstr, script_lines): prescript = self.inputs.prescript postscript = self.inputs.postscript - #postcript takes different default value depending on the mfile argument + # postcript takes different default value depending on the mfile argument if mfile: - prescript.insert(0,"fprintf(1,'Executing %s at %s:\\n',mfilename,datestr(now));") + prescript.insert(0, "fprintf(1,'Executing %s at %s:\\n',mfilename,datestr(now));") else: - prescript.insert(0,"fprintf(1,'Executing code at %s:\\n',datestr(now));") + prescript.insert(0, "fprintf(1,'Executing code at %s:\\n',datestr(now));") for path in paths: prescript.append("addpath('%s');\n" % path) if not mfile: - #clean up the code of comments and replace newlines with commas + # clean up the code of comments and replace newlines with commas script_lines = ','.join([line for line in script_lines.split("\n") if not line.strip().startswith("%")]) script_lines = '\n'.join(prescript)+script_lines+'\n'.join(postscript) if mfile: - with open(os.path.join(cwd,self.inputs.script_file), 'wt') as mfile: + with open(os.path.join(cwd, self.inputs.script_file), 'wt') as mfile: mfile.write(script_lines) if self.inputs.uses_mcr: - script = '%s' % (os.path.join(cwd,self.inputs.script_file)) + script = '%s' % (os.path.join(cwd, self.inputs.script_file)) else: script = "addpath('%s');%s" % (cwd, self.inputs.script_file.split('.')[0]) else: diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index b7224740f5..36a25a2606 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -16,6 +16,7 @@ import os.path as op from ..utils.filemanip import split_filename + class MeshFixInputSpec(CommandLineInputSpec): number_of_biggest_shells = traits.Int(argstr='--shells %d', desc="Only the N biggest shells are kept") @@ -29,16 +30,16 @@ class MeshFixInputSpec(CommandLineInputSpec): dont_clean = traits.Bool(argstr='--no-clean', desc="Don't Clean") - save_as_stl = traits.Bool(xor= ['save_as_vmrl', 'save_as_freesurfer_mesh'], argstr='--stl', desc="Result is saved in stereolithographic format (.stl)") - save_as_vmrl = traits.Bool(argstr='--wrl', xor= ['save_as_stl', 'save_as_freesurfer_mesh'], desc="Result is saved in VRML1.0 format (.wrl)") - save_as_freesurfer_mesh = traits.Bool(argstr='--fsmesh', xor= ['save_as_vrml', 'save_as_stl'], desc="Result is saved in freesurfer mesh format") + save_as_stl = traits.Bool(xor=['save_as_vmrl', 'save_as_freesurfer_mesh'], argstr='--stl', desc="Result is saved in stereolithographic format (.stl)") + save_as_vmrl = traits.Bool(argstr='--wrl', xor=['save_as_stl', 'save_as_freesurfer_mesh'], desc="Result is saved in VRML1.0 format (.wrl)") + save_as_freesurfer_mesh = traits.Bool(argstr='--fsmesh', xor=['save_as_vrml', 'save_as_stl'], desc="Result is saved in freesurfer mesh format") remove_handles = traits.Bool(argstr='--remove-handles', desc="Remove handles") uniform_remeshing_steps = traits.Int(argstr='-u %d', requires=['uniform_remeshing_vertices'], desc="Number of steps for uniform remeshing of the whole mesh") uniform_remeshing_vertices = traits.Int(argstr='--vertices %d', requires=['uniform_remeshing_steps'], desc="Constrains the number of vertices." \ - "Must be used with uniform_remeshing_steps") + "Must be used with uniform_remeshing_steps") laplacian_smoothing_steps = traits.Int(argstr='--smooth %d', desc="The number of laplacian smoothing steps to apply") @@ -48,43 +49,45 @@ class MeshFixInputSpec(CommandLineInputSpec): cut_outer = traits.Int(argstr='--cut-outer %d', desc="Remove triangles of 1st that are outside of the 2nd shell.") cut_inner = traits.Int(argstr='--cut-inner %d', desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards.") decouple_inin = traits.Int(argstr='--decouple-inin %d', desc="Treat 1st file as inner, 2nd file as outer component." \ - "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d.") + "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d.") decouple_outin = traits.Int(argstr='--decouple-outin %d', desc="Treat 1st file as outer, 2nd file as inner component." \ - "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d.") + "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d.") decouple_outout = traits.Int(argstr='--decouple-outout %d', desc="Treat 1st file as outer, 2nd file as inner component." \ - "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d.") + "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d.") finetuning_inwards = traits.Bool(argstr='--fineTuneIn ', requires=['finetuning_distance', 'finetuning_substeps']) finetuning_outwards = traits.Bool(argstr='--fineTuneIn ', requires=['finetuning_distance', 'finetuning_substeps'], xor=['finetuning_inwards'], - desc = 'Similar to finetuning_inwards, but ensures minimal distance in the other direction') + desc='Similar to finetuning_inwards, but ensures minimal distance in the other direction') finetuning_distance = traits.Float(argstr='%f', requires=['finetuning_substeps'], desc="Used to fine-tune the minimal distance between surfaces." \ - "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)") + "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)") finetuning_substeps = traits.Int(argstr='%d', requires=['finetuning_distance'], desc="Used to fine-tune the minimal distance between surfaces." \ - "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)") + "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)") dilation = traits.Int(argstr='--dilate %d', desc="Dilate the surface by d. d < 0 means shrinking.") set_intersections_to_one = traits.Bool(argstr='--intersect', desc="If the mesh contains intersections, return value = 1." \ - "If saved in gmsh format, intersections will be highlighted.") + "If saved in gmsh format, intersections will be highlighted.") in_file1 = File(exists=True, argstr="%s", position=1, mandatory=True) in_file2 = File(exists=True, argstr="%s", position=2) output_type = traits.Enum('off', ['stl', 'msh', 'wrl', 'vrml', 'fs', 'off'], usedefault=True, desc='The output type to save the file as.') out_filename = File(genfile=True, argstr="-o %s", desc='The output filename for the fixed mesh file') + class MeshFixOutputSpec(TraitedSpec): mesh_file = File(exists=True, desc='The output mesh file') + class MeshFix(CommandLine): """ MeshFix v1.2-alpha - by Marco Attene, Mirko Windhoff, Axel Thielscher. .. seealso:: - http://jmeshlib.sourceforge.net - Sourceforge page + http://jmeshlib.sourceforge.net + Sourceforge page - http://simnibs.de/installation/meshfixandgetfem - Ubuntu installation instructions + http://simnibs.de/installation/meshfixandgetfem + Ubuntu installation instructions If MeshFix is used for research purposes, please cite the following paper: M. Attene - A lightweight approach to repairing digitized polygon meshes. @@ -105,8 +108,8 @@ class MeshFix(CommandLine): 'meshfix lh-pial.stl rh-pial.stl -o lh-pial_fixed.off' """ _cmd = 'meshfix' - input_spec=MeshFixInputSpec - output_spec=MeshFixOutputSpec + input_spec = MeshFixInputSpec + output_spec = MeshFixOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -130,7 +133,7 @@ def _gen_filename(self, name): return None def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file1) + _, name, _ = split_filename(self.inputs.in_file1) if self.inputs.save_as_freesurfer_mesh or self.inputs.output_type == 'fs': self.inputs.output_type = 'fs' self.inputs.save_as_freesurfer_mesh = True diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index da6aaa6c31..c0762332cb 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -53,7 +53,7 @@ class JistLaminarVolumetricLayering(SEMLikeCommandLine): input_spec = JistLaminarVolumetricLayeringInputSpec output_spec = JistLaminarVolumetricLayeringOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering " - _outputs_filenames = {'outContinuous':'outContinuous.nii','outLayer':'outLayer.nii','outDiscrete':'outDiscrete.nii'} + _outputs_filenames = {'outContinuous': 'outContinuous.nii', 'outLayer': 'outLayer.nii', 'outDiscrete': 'outDiscrete.nii'} _redirect_x = True @@ -105,7 +105,7 @@ class JistBrainMgdmSegmentation(SEMLikeCommandLine): input_spec = JistBrainMgdmSegmentationInputSpec output_spec = JistBrainMgdmSegmentationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation " - _outputs_filenames = {'outSegmented':'outSegmented.nii','outPosterior2':'outPosterior2.nii','outPosterior3':'outPosterior3.nii','outLevelset':'outLevelset.nii'} + _outputs_filenames = {'outSegmented': 'outSegmented.nii', 'outPosterior2': 'outPosterior2.nii', 'outPosterior3': 'outPosterior3.nii', 'outLevelset': 'outLevelset.nii'} _redirect_x = True @@ -140,7 +140,7 @@ class JistLaminarProfileGeometry(SEMLikeCommandLine): input_spec = JistLaminarProfileGeometryInputSpec output_spec = JistLaminarProfileGeometryOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry " - _outputs_filenames = {'outResult':'outResult.nii'} + _outputs_filenames = {'outResult': 'outResult.nii'} _redirect_x = True @@ -173,7 +173,7 @@ class JistLaminarProfileCalculator(SEMLikeCommandLine): input_spec = JistLaminarProfileCalculatorInputSpec output_spec = JistLaminarProfileCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator " - _outputs_filenames = {'outResult':'outResult.nii'} + _outputs_filenames = {'outResult': 'outResult.nii'} _redirect_x = True @@ -214,7 +214,7 @@ class MedicAlgorithmN3(SEMLikeCommandLine): input_spec = MedicAlgorithmN3InputSpec output_spec = MedicAlgorithmN3OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3 " - _outputs_filenames = {'outInhomogeneity2':'outInhomogeneity2.nii','outInhomogeneity':'outInhomogeneity.nii'} + _outputs_filenames = {'outInhomogeneity2': 'outInhomogeneity2.nii', 'outInhomogeneity': 'outInhomogeneity.nii'} _redirect_x = True @@ -248,7 +248,7 @@ class JistLaminarROIAveraging(SEMLikeCommandLine): input_spec = JistLaminarROIAveragingInputSpec output_spec = JistLaminarROIAveragingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarROIAveraging " - _outputs_filenames = {'outROI3':'outROI3'} + _outputs_filenames = {'outROI3': 'outROI3'} _redirect_x = True @@ -318,7 +318,7 @@ class MedicAlgorithmLesionToads(SEMLikeCommandLine): input_spec = MedicAlgorithmLesionToadsInputSpec output_spec = MedicAlgorithmLesionToadsOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads " - _outputs_filenames = {'outWM':'outWM.nii','outHard':'outHard.nii','outFilled':'outFilled.nii','outMembership':'outMembership.nii','outInhomogeneity':'outInhomogeneity.nii','outCortical':'outCortical.nii','outHard2':'outHard2.nii','outLesion':'outLesion.nii','outSulcal':'outSulcal.nii'} + _outputs_filenames = {'outWM': 'outWM.nii', 'outHard': 'outHard.nii', 'outFilled': 'outFilled.nii', 'outMembership': 'outMembership.nii', 'outInhomogeneity': 'outInhomogeneity.nii', 'outCortical': 'outCortical.nii', 'outHard2': 'outHard2.nii', 'outLesion': 'outLesion.nii', 'outSulcal': 'outSulcal.nii'} _redirect_x = True @@ -359,7 +359,7 @@ class JistBrainMp2rageSkullStripping(SEMLikeCommandLine): input_spec = JistBrainMp2rageSkullStrippingInputSpec output_spec = JistBrainMp2rageSkullStrippingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping " - _outputs_filenames = {'outBrain':'outBrain.nii','outMasked3':'outMasked3.nii','outMasked2':'outMasked2.nii','outMasked':'outMasked.nii'} + _outputs_filenames = {'outBrain': 'outBrain.nii', 'outMasked3': 'outMasked3.nii', 'outMasked2': 'outMasked2.nii', 'outMasked': 'outMasked.nii'} _redirect_x = True @@ -401,7 +401,7 @@ class JistCortexSurfaceMeshInflation(SEMLikeCommandLine): input_spec = JistCortexSurfaceMeshInflationInputSpec output_spec = JistCortexSurfaceMeshInflationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation " - _outputs_filenames = {'outOriginal':'outOriginal','outInflated':'outInflated'} + _outputs_filenames = {'outOriginal': 'outOriginal', 'outInflated': 'outInflated'} _redirect_x = True @@ -442,7 +442,7 @@ class RandomVol(SEMLikeCommandLine): input_spec = RandomVolInputSpec output_spec = RandomVolOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.bme.smile.demo.RandomVol " - _outputs_filenames = {'outRand1':'outRand1.nii'} + _outputs_filenames = {'outRand1': 'outRand1.nii'} _redirect_x = True @@ -477,7 +477,7 @@ class MedicAlgorithmImageCalculator(SEMLikeCommandLine): input_spec = MedicAlgorithmImageCalculatorInputSpec output_spec = MedicAlgorithmImageCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator " - _outputs_filenames = {'outResult':'outResult.nii'} + _outputs_filenames = {'outResult': 'outResult.nii'} _redirect_x = True @@ -511,7 +511,7 @@ class JistBrainMp2rageDuraEstimation(SEMLikeCommandLine): input_spec = JistBrainMp2rageDuraEstimationInputSpec output_spec = JistBrainMp2rageDuraEstimationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation " - _outputs_filenames = {'outDura':'outDura.nii'} + _outputs_filenames = {'outDura': 'outDura.nii'} _redirect_x = True @@ -546,7 +546,7 @@ class JistLaminarProfileSampling(SEMLikeCommandLine): input_spec = JistLaminarProfileSamplingInputSpec output_spec = JistLaminarProfileSamplingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileSampling " - _outputs_filenames = {'outProfile2':'outProfile2.nii','outProfilemapped':'outProfilemapped.nii'} + _outputs_filenames = {'outProfile2': 'outProfile2.nii', 'outProfilemapped': 'outProfilemapped.nii'} _redirect_x = True @@ -676,7 +676,7 @@ class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): input_spec = MedicAlgorithmSPECTRE2010InputSpec output_spec = MedicAlgorithmSPECTRE2010OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010 " - _outputs_filenames = {'outd0':'outd0.nii','outOriginal':'outOriginal.nii','outMask':'outMask.nii','outSplitHalves':'outSplitHalves.nii','outMidsagittal':'outMidsagittal.nii','outPrior':'outPrior.nii','outFANTASM':'outFANTASM.nii','outSegmentation':'outSegmentation.nii','outStripped':'outStripped.nii'} + _outputs_filenames = {'outd0': 'outd0.nii', 'outOriginal': 'outOriginal.nii', 'outMask': 'outMask.nii', 'outSplitHalves': 'outSplitHalves.nii', 'outMidsagittal': 'outMidsagittal.nii', 'outPrior': 'outPrior.nii', 'outFANTASM': 'outFANTASM.nii', 'outSegmentation': 'outSegmentation.nii', 'outStripped': 'outStripped.nii'} _redirect_x = True @@ -709,7 +709,7 @@ class JistBrainPartialVolumeFilter(SEMLikeCommandLine): input_spec = JistBrainPartialVolumeFilterInputSpec output_spec = JistBrainPartialVolumeFilterOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter " - _outputs_filenames = {'outPartial':'outPartial.nii'} + _outputs_filenames = {'outPartial': 'outPartial.nii'} _redirect_x = True @@ -751,7 +751,7 @@ class JistIntensityMp2rageMasking(SEMLikeCommandLine): input_spec = JistIntensityMp2rageMaskingInputSpec output_spec = JistIntensityMp2rageMaskingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking " - _outputs_filenames = {'outSignal2':'outSignal2.nii','outSignal':'outSignal.nii','outMasked2':'outMasked2.nii','outMasked':'outMasked.nii'} + _outputs_filenames = {'outSignal2': 'outSignal2.nii', 'outSignal': 'outSignal.nii', 'outMasked2': 'outMasked2.nii', 'outMasked': 'outMasked.nii'} _redirect_x = True diff --git a/nipype/interfaces/mipav/generate_classes.py b/nipype/interfaces/mipav/generate_classes.py index 9b6c2d719f..d4c8f8d9f9 100644 --- a/nipype/interfaces/mipav/generate_classes.py +++ b/nipype/interfaces/mipav/generate_classes.py @@ -2,28 +2,28 @@ if __name__ == "__main__": from nipype.interfaces.slicer.generate_classes import generate_all_classes - ## NOTE: For now either the launcher needs to be found on the default path, or - ## every tool in the modules list must be found on the default path - ## AND calling the module with --xml must be supported and compliant. + # NOTE: For now either the launcher needs to be found on the default path, or + # every tool in the modules list must be found on the default path + # AND calling the module with --xml must be supported and compliant. modules_list = ['edu.jhu.bme.smile.demo.RandomVol', 'de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileSampling', - 'de.mpg.cbs.jist.laminar.JistLaminarROIAveraging', - 'de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry', - 'de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping', - 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation'] + 'de.mpg.cbs.jist.laminar.JistLaminarProfileSampling', + 'de.mpg.cbs.jist.laminar.JistLaminarROIAveraging', + 'de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering', + 'de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry', + 'de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation', + 'de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping', + 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', + 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation'] modules_from_chris = ['edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010', - 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient', - 'edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation', - 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', - 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask', - #'de.mpg.cbs.jist.cortex.JistCortexFullCRUISE', # waiting for http://www.nitrc.org/tracker/index.php?func=detail&aid=7236&group_id=228&atid=942 to be fixed - 'de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation'] + 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient', + 'edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator', + 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation', + 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', + 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask', + # 'de.mpg.cbs.jist.cortex.JistCortexFullCRUISE', # waiting for http://www.nitrc.org/tracker/index.php?func=detail&aid=7236&group_id=228&atid=942 to be fixed + 'de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation'] modules_from_julia = ['de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking', 'edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010'] @@ -34,4 +34,4 @@ modules_list = list(set(modules_list).union(modules_from_chris).union(modules_from_leonie).union(modules_from_julia).union(modules_from_yasinyazici).union(modules_list)) - generate_all_classes(modules_list=modules_list,launcher=["java edu.jhu.ece.iacl.jist.cli.run" ], redirect_x=True, mipav_hacks=True) + generate_all_classes(modules_list=modules_list, launcher=["java edu.jhu.ece.iacl.jist.cli.run"], redirect_x=True, mipav_hacks=True) diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 59938d1af9..96d238f8ef 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -13,32 +13,32 @@ class WatershedBEMInputSpec(FSTraitedSpec): subject_id = traits.Str(argstr='--subject %s', mandatory=True, - desc='Subject ID (must have a complete Freesurfer directory)') + desc='Subject ID (must have a complete Freesurfer directory)') subjects_dir = Directory(exists=True, mandatory=True, usedefault=True, - desc='Path to Freesurfer subjects directory') + desc='Path to Freesurfer subjects directory') volume = traits.Enum('T1', 'aparc+aseg', 'aseg', 'brain', 'orig', 'brainmask', 'ribbon', - argstr='--volume %s', usedefault=True, - desc='The volume from the "mri" directory to use (defaults to T1)') + argstr='--volume %s', usedefault=True, + desc='The volume from the "mri" directory to use (defaults to T1)') overwrite = traits.Bool(True, usedefault=True, argstr='--overwrite', desc='Overwrites the existing files') atlas_mode = traits.Bool(argstr='--atlas', - desc='Use atlas mode for registration (default: no rigid alignment)') + desc='Use atlas mode for registration (default: no rigid alignment)') class WatershedBEMOutputSpec(TraitedSpec): mesh_files = OutputMultiPath(File(exists=True), - desc=('Paths to the output meshes (brain, inner ' - 'skull, outer skull, outer skin)')) + desc=('Paths to the output meshes (brain, inner ' + 'skull, outer skull, outer skin)')) brain_surface = File(exists=True, loc='bem/watershed', - desc='Brain surface (in Freesurfer format)') + desc='Brain surface (in Freesurfer format)') inner_skull_surface = File(exists=True, loc='bem/watershed', - desc='Inner skull surface (in Freesurfer format)') + desc='Inner skull surface (in Freesurfer format)') outer_skull_surface = File(exists=True, loc='bem/watershed', - desc='Outer skull surface (in Freesurfer format)') + desc='Outer skull surface (in Freesurfer format)') outer_skin_surface = File(exists=True, loc='bem/watershed', - desc='Outer skin surface (in Freesurfer format)') + desc='Outer skin surface (in Freesurfer format)') fif_file = File(exists=True, loc='bem', altkey='fif', - desc='"fif" format file for EEG processing in MNE') + desc='"fif" format file for EEG processing in MNE') cor_files = OutputMultiPath(File(exists=True), loc='bem/watershed/ws', altkey='COR', desc='"COR" format files') diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index b6e7d7df69..b7369524ae 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -38,21 +38,24 @@ from ... import logging iflogger = logging.getLogger('interface') + def transform_to_affine(streams, header, affine): rotation, scale = np.linalg.qr(affine) streams = move_streamlines(streams, rotation) - scale[0:3,0:3] = np.dot(scale[0:3,0:3], np.diag(1. / header['voxel_size'])) - scale[0:3,3] = abs(scale[0:3,3]) + scale[0:3, 0:3] = np.dot(scale[0:3, 0:3], np.diag(1. / header['voxel_size'])) + scale[0:3, 3] = abs(scale[0:3, 3]) streams = move_streamlines(streams, scale) return streams + def read_mrtrix_tracks(in_file, as_generator=True): header = read_mrtrix_header(in_file) streamlines = read_mrtrix_streamlines(in_file, header, as_generator) return header, streamlines + def read_mrtrix_header(in_file): - fileobj = open(in_file,'r') + fileobj = open(in_file, 'r') header = {} iflogger.info('Reading header data...') for line in fileobj: @@ -60,26 +63,28 @@ def read_mrtrix_header(in_file): iflogger.info('Reached the end of the header!') break elif ': ' in line: - line = line.replace('\n','') - line = line.replace("'","") - key = line.split(': ')[0] + line = line.replace('\n', '') + line = line.replace("'", "") + key = line.split(': ')[0] value = line.split(': ')[1] header[key] = value - iflogger.info('...adding "{v}" to header for key "{k}"'.format(v=value,k=key)) + iflogger.info('...adding "{v}" to header for key "{k}"'.format(v=value, k=key)) fileobj.close() - header['count'] = int(header['count'].replace('\n','')) - header['offset'] = int(header['file'].replace('.','')) + header['count'] = int(header['count'].replace('\n', '')) + header['offset'] = int(header['file'].replace('.', '')) return header + def read_mrtrix_streamlines(in_file, header, as_generator=True): offset = header['offset'] stream_count = header['count'] - fileobj = open(in_file,'r') + fileobj = open(in_file, 'r') fileobj.seek(offset) endianness = native_code f4dt = np.dtype(endianness + 'f4') pt_cols = 3 bytesize = pt_cols*4 + def points_per_track(offset): n_streams = 0 n_points = 0 @@ -87,9 +92,9 @@ def points_per_track(offset): iflogger.info('Identifying the number of points per tract...') all_str = fileobj.read() num_triplets = int(len(all_str) / bytesize) - pts = np.ndarray(shape=(num_triplets,pt_cols), dtype='f4',buffer=all_str) - nonfinite_list = np.where(np.isfinite(pts[:,2]) == False) - nonfinite_list = list(nonfinite_list[0])[0:-1] # Converts numpy array to list, removes the last value + pts = np.ndarray(shape=(num_triplets, pt_cols), dtype='f4', buffer=all_str) + nonfinite_list = np.where(np.isfinite(pts[:, 2]) == False) + nonfinite_list = list(nonfinite_list[0])[0:-1] # Converts numpy array to list, removes the last value nonfinite_list_bytes = [offset+x*bytesize for x in nonfinite_list] for idx, value in enumerate(nonfinite_list): if idx == 0: @@ -117,17 +122,17 @@ def track_gen(track_points): stream_count, n_streams)) break pts = np.ndarray( - shape = (n_pts, pt_cols), - dtype = f4dt, - buffer = pts_str) + shape=(n_pts, pt_cols), + dtype=f4dt, + buffer=pts_str) nan_pt = np.ndarray( - shape = (1, pt_cols), - dtype = f4dt, - buffer = nan_str) + shape=(1, pt_cols), + dtype=f4dt, + buffer=nan_str) if np.isfinite(nan_pt[0][0]): raise ValueError break - xyz = pts[:,:3] + xyz = pts[:, :3] yield xyz n_streams += 1 if n_streams == stream_count: @@ -143,17 +148,20 @@ def track_gen(track_points): streamlines = list(streamlines) return streamlines + class MRTrix2TrackVisInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True, - desc='The input file for the tracks in MRTrix (.tck) format') + desc='The input file for the tracks in MRTrix (.tck) format') image_file = File(exists=True, desc='The image the tracks were generated from') matrix_file = File(exists=True, desc='A transformation matrix to apply to the tracts after they have been generated (from FLIRT - affine transformation from image_file to registration_image_file)') registration_image_file = File(exists=True, desc='The final image the tracks should be registered to.') out_filename = File('converted.trk', genfile=True, usedefault=True, desc='The output filename for the tracks in TrackVis (.trk) format') + class MRTrix2TrackVisOutputSpec(TraitedSpec): out_file = File(exists=True) + class MRTrix2TrackVis(BaseInterface): """ Converts MRtrix (.tck) tract files into TrackVis (.trk) format @@ -178,14 +186,14 @@ def _run_interface(self, runtime): affine = image_file.get_affine() out_filename = op.abspath(self.inputs.out_filename) - #Reads MRTrix tracks + # Reads MRTrix tracks header, streamlines = read_mrtrix_tracks(self.inputs.in_file, as_generator=True) iflogger.info('MRTrix Header:') iflogger.info(header) # Writes to Trackvis trk_header = nb.trackvis.empty_header() - trk_header['dim'] = [dx,dy,dz] - trk_header['voxel_size'] = [vx,vy,vz] + trk_header['dim'] = [dx, dy, dz] + trk_header['voxel_size'] = [vx, vy, vz] trk_header['n_count'] = header['count'] if isdefined(self.inputs.matrix_file) and isdefined(self.inputs.registration_image_file): @@ -199,20 +207,20 @@ def _run_interface(self, runtime): iflogger.info('Using affine from registration image file {r}'.format(r=self.inputs.registration_image_file)) iflogger.info(reg_affine) trk_header['vox_to_ras'] = reg_affine - trk_header['dim'] = [r_dx,r_dy,r_dz] - trk_header['voxel_size'] = [r_vx,r_vy,r_vz] + trk_header['dim'] = [r_dx, r_dy, r_dz] + trk_header['voxel_size'] = [r_vx, r_vy, r_vz] - affine = np.dot(affine,np.diag(1. / np.array([vx, vy, vz, 1]))) + affine = np.dot(affine, np.diag(1. / np.array([vx, vy, vz, 1]))) transformed_streamlines = transform_to_affine(streamlines, trk_header, affine) - aff = affine_from_fsl_mat_file(xfm, [vx,vy,vz], [r_vx,r_vy,r_vz]) + aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz], [r_vx, r_vy, r_vz]) iflogger.info(aff) axcode = aff2axcodes(reg_affine) trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2] final_streamlines = move_streamlines(transformed_streamlines, aff) - trk_tracks = ((ii,None,None) for ii in final_streamlines) + trk_tracks = ((ii, None, None) for ii in final_streamlines) trk.write(out_filename, trk_tracks, trk_header) iflogger.info('Saving transformed Trackvis file as {out}'.format(out=out_filename)) iflogger.info('New TrackVis Header:') @@ -223,7 +231,7 @@ def _run_interface(self, runtime): trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2] trk_header['vox_to_ras'] = affine transformed_streamlines = transform_to_affine(streamlines, trk_header, affine) - trk_tracks = ((ii,None,None) for ii in transformed_streamlines) + trk_tracks = ((ii, None, None) for ii in transformed_streamlines) trk.write(out_filename, trk_tracks, trk_header) iflogger.info('Saving Trackvis file as {out}'.format(out=out_filename)) iflogger.info('TrackVis Header:') @@ -240,6 +248,7 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '.trk' diff --git a/nipype/interfaces/mrtrix/preprocess.py b/nipype/interfaces/mrtrix/preprocess.py index 68f16c237f..67242e9705 100644 --- a/nipype/interfaces/mrtrix/preprocess.py +++ b/nipype/interfaces/mrtrix/preprocess.py @@ -17,31 +17,33 @@ class MRConvertInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='voxel-order data filename') + desc='voxel-order data filename') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output filename') - extract_at_axis = traits.Enum(1,2,3, argstr='-coord %s', position=1, - desc='"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.') + extract_at_axis = traits.Enum(1, 2, 3, argstr='-coord %s', position=1, + desc='"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.') extract_at_coordinate = traits.List(traits.Float, argstr='%s', sep=',', position=2, minlen=1, maxlen=3, - desc='"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.') + desc='"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.') voxel_dims = traits.List(traits.Float, argstr='-vox %s', sep=',', - position=3, minlen=3, maxlen=3, - desc='Three comma-separated numbers giving the size of each voxel in mm.') + position=3, minlen=3, maxlen=3, + desc='Three comma-separated numbers giving the size of each voxel in mm.') output_datatype = traits.Enum("nii", "float", "char", "short", "int", "long", "double", argstr='-output %s', position=2, - desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"') #, usedefault=True) - extension = traits.Enum("mif","nii", "float", "char", "short", "int", "long", "double", position=2, - desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', usedefault=True) + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"') # , usedefault=True) + extension = traits.Enum("mif", "nii", "float", "char", "short", "int", "long", "double", position=2, + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', usedefault=True) layout = traits.Enum("nii", "float", "char", "short", "int", "long", "double", argstr='-output %s', position=2, - desc='specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.') + desc='specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.') resample = traits.Float(argstr='-scale %d', position=3, - units='mm', desc='Apply scaling to the intensity values.') + units='mm', desc='Apply scaling to the intensity values.') offset_bias = traits.Float(argstr='-scale %d', position=3, - units='mm', desc='Apply offset to the intensity values.') + units='mm', desc='Apply offset to the intensity values.') replace_NaN_with_zero = traits.Bool(argstr='-zero', position=3, desc="Replace all NaN values with zero.") prs = traits.Bool(argstr='-prs', position=3, desc="Assume that the DW gradients are specified in the PRS frame (Siemens DICOM only).") + class MRConvertOutputSpec(TraitedSpec): converted = File(exists=True, desc='path/name of 4D volume in voxel order') + class MRConvert(CommandLine): """ Perform conversion between different file types and optionally extract a subset of the input image. @@ -62,8 +64,8 @@ class MRConvert(CommandLine): """ _cmd = 'mrconvert' - input_spec=MRConvertInputSpec - output_spec=MRConvertOutputSpec + input_spec = MRConvertInputSpec + output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -79,14 +81,16 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) if isdefined(self.inputs.out_filename): outname = self.inputs.out_filename else: outname = name + '_mrconvert.' + self.inputs.extension return outname + class DWI2TensorInputSpec(CommandLineInputSpec): in_file = InputMultiPath(File(exists=True), argstr='%s', mandatory=True, position=-2, desc='Diffusion-weighted images') @@ -143,19 +147,22 @@ class DWI2Tensor(CommandLine): """ _cmd = 'dwi2tensor' - input_spec=DWI2TensorInputSpec - output_spec=DWI2TensorOutputSpec + input_spec = DWI2TensorInputSpec + output_spec = DWI2TensorOutputSpec + class Tensor2VectorInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='Diffusion tensor image') + desc='Diffusion tensor image') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output vector filename') quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class Tensor2VectorOutputSpec(TraitedSpec): vector = File(exists=True, desc='the output image of the major eigenvectors of the diffusion tensor image.') + class Tensor2Vector(CommandLine): """ Generates a map of the major eigenvectors of the tensors in each voxel. @@ -170,8 +177,8 @@ class Tensor2Vector(CommandLine): """ _cmd = 'tensor2vector' - input_spec=Tensor2VectorInputSpec - output_spec=Tensor2VectorOutputSpec + input_spec = Tensor2VectorInputSpec + output_spec = Tensor2VectorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -187,20 +194,24 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_vector.mif' + class Tensor2FractionalAnisotropyInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='Diffusion tensor image') + desc='Diffusion tensor image') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output Fractional Anisotropy filename') quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class Tensor2FractionalAnisotropyOutputSpec(TraitedSpec): FA = File(exists=True, desc='the output image of the major eigenvectors of the diffusion tensor image.') + class Tensor2FractionalAnisotropy(CommandLine): """ Generates a map of the fractional anisotropy in each voxel. @@ -215,8 +226,8 @@ class Tensor2FractionalAnisotropy(CommandLine): """ _cmd = 'tensor2FA' - input_spec=Tensor2FractionalAnisotropyInputSpec - output_spec=Tensor2FractionalAnisotropyOutputSpec + input_spec = Tensor2FractionalAnisotropyInputSpec + output_spec = Tensor2FractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -232,20 +243,24 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_FA.mif' + class Tensor2ApparentDiffusionInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='Diffusion tensor image') + desc='Diffusion tensor image') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output Fractional Anisotropy filename') quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class Tensor2ApparentDiffusionOutputSpec(TraitedSpec): ADC = File(exists=True, desc='the output image of the major eigenvectors of the diffusion tensor image.') + class Tensor2ApparentDiffusion(CommandLine): """ Generates a map of the apparent diffusion coefficient (ADC) in each voxel @@ -260,8 +275,8 @@ class Tensor2ApparentDiffusion(CommandLine): """ _cmd = 'tensor2ADC' - input_spec=Tensor2ApparentDiffusionInputSpec - output_spec=Tensor2ApparentDiffusionOutputSpec + input_spec = Tensor2ApparentDiffusionInputSpec + output_spec = Tensor2ApparentDiffusionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -277,10 +292,12 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_ADC.mif' + class MRMultiplyInputSpec(CommandLineInputSpec): in_files = InputMultiPath(File(exists=True), argstr='%s', mandatory=True, position=-2, @@ -289,9 +306,11 @@ class MRMultiplyInputSpec(CommandLineInputSpec): quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class MRMultiplyOutputSpec(TraitedSpec): out_file = File(exists=True, desc='the output image of the multiplication') + class MRMultiply(CommandLine): """ Multiplies two images. @@ -306,8 +325,8 @@ class MRMultiply(CommandLine): """ _cmd = 'mrmult' - input_spec=MRMultiplyInputSpec - output_spec=MRMultiplyOutputSpec + input_spec = MRMultiplyInputSpec + output_spec = MRMultiplyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -323,10 +342,12 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_files[0]) + _, name, _ = split_filename(self.inputs.in_files[0]) return name + '_MRMult.mif' + class MRTrixViewerInputSpec(CommandLineInputSpec): in_files = InputMultiPath(File(exists=True), argstr='%s', mandatory=True, position=-2, @@ -334,9 +355,11 @@ class MRTrixViewerInputSpec(CommandLineInputSpec): quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class MRTrixViewerOutputSpec(TraitedSpec): pass + class MRTrixViewer(CommandLine): """ Loads the input images in the MRTrix Viewer. @@ -351,19 +374,22 @@ class MRTrixViewer(CommandLine): """ _cmd = 'mrview' - input_spec=MRTrixViewerInputSpec - output_spec=MRTrixViewerOutputSpec + input_spec = MRTrixViewerInputSpec + output_spec = MRTrixViewerOutputSpec def _list_outputs(self): return + class MRTrixInfoInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='Input images to be read') + desc='Input images to be read') + class MRTrixInfoOutputSpec(TraitedSpec): pass + class MRTrixInfo(CommandLine): """ Prints out relevant header information found in the image specified. @@ -378,23 +404,26 @@ class MRTrixInfo(CommandLine): """ _cmd = 'mrinfo' - input_spec=MRTrixInfoInputSpec - output_spec=MRTrixInfoOutputSpec + input_spec = MRTrixInfoInputSpec + output_spec = MRTrixInfoOutputSpec def _list_outputs(self): return + class GenerateWhiteMatterMaskInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-3, desc='Diffusion-weighted images') - binary_mask = File(exists=True, argstr='%s', mandatory=True, position = -2, desc='Binary brain mask') - out_WMProb_filename = File(genfile=True, argstr='%s', position = -1, desc='Output WM probability image filename') + binary_mask = File(exists=True, argstr='%s', mandatory=True, position=-2, desc='Binary brain mask') + out_WMProb_filename = File(genfile=True, argstr='%s', position=-1, desc='Output WM probability image filename') encoding_file = File(exists=True, argstr='-grad %s', mandatory=True, position=1, - desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') + desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') noise_level_margin = traits.Float(argstr='-margin %s', desc='Specify the width of the margin on either side of the image to be used to estimate the noise level (default = 10)') + class GenerateWhiteMatterMaskOutputSpec(TraitedSpec): WMprobabilitymap = File(exists=True, desc='WMprobabilitymap') + class GenerateWhiteMatterMask(CommandLine): """ Generates a white matter probability mask from the DW images. @@ -410,8 +439,8 @@ class GenerateWhiteMatterMask(CommandLine): """ _cmd = 'gen_WM_mask' - input_spec=GenerateWhiteMatterMaskInputSpec - output_spec=GenerateWhiteMatterMaskOutputSpec + input_spec = GenerateWhiteMatterMaskInputSpec + output_spec = GenerateWhiteMatterMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -423,22 +452,26 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_WMProb.mif' + class ErodeInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='Input mask image to be eroded') + desc='Input mask image to be eroded') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output image filename') number_of_passes = traits.Int(argstr='-npass %s', desc='the number of passes (default: 1)') dilate = traits.Bool(argstr='-dilate', position=1, desc="Perform dilation rather than erosion") quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class ErodeOutputSpec(TraitedSpec): out_file = File(exists=True, desc='the output image') + class Erode(CommandLine): """ Erode (or dilates) a mask (i.e. binary) image @@ -452,8 +485,8 @@ class Erode(CommandLine): >>> erode.run() # doctest: +SKIP """ _cmd = 'erode' - input_spec=ErodeInputSpec - output_spec=ErodeOutputSpec + input_spec = ErodeInputSpec + output_spec = ErodeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -469,13 +502,15 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_erode.mif' + class ThresholdInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='The input image to be thresholded') + desc='The input image to be thresholded') out_filename = File(genfile=True, argstr='%s', position=-1, desc='The output binary image mask.') absolute_threshold_value = traits.Float(argstr='-abs %s', desc='Specify threshold value as absolute intensity.') percentage_threshold_value = traits.Float(argstr='-percent %s', desc='Specify threshold value as a percentage of the peak intensity in the input image.') @@ -484,9 +519,11 @@ class ThresholdInputSpec(CommandLineInputSpec): quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class ThresholdOutputSpec(TraitedSpec): out_file = File(exists=True, desc='The output binary image mask.') + class Threshold(CommandLine): """ Create bitwise image by thresholding image intensity. @@ -506,8 +543,8 @@ class Threshold(CommandLine): """ _cmd = 'threshold' - input_spec=ThresholdInputSpec - output_spec=ThresholdOutputSpec + input_spec = ThresholdInputSpec + output_spec = ThresholdOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -523,20 +560,24 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_thresh.mif' + class MedianFilter3DInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='Input images to be smoothed') + desc='Input images to be smoothed') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output image filename') quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class MedianFilter3DOutputSpec(TraitedSpec): out_file = File(exists=True, desc='the output image') + class MedianFilter3D(CommandLine): """ Smooth images using a 3x3x3 median filter. @@ -551,8 +592,8 @@ class MedianFilter3D(CommandLine): """ _cmd = 'median3D' - input_spec=MedianFilter3DInputSpec - output_spec=MedianFilter3DOutputSpec + input_spec = MedianFilter3DInputSpec + output_spec = MedianFilter3DOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -568,10 +609,12 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_median3D.mif' + class MRTransformInputSpec(CommandLineInputSpec): in_files = InputMultiPath(File(exists=True), argstr='%s', mandatory=True, position=-2, @@ -580,18 +623,20 @@ class MRTransformInputSpec(CommandLineInputSpec): invert = traits.Bool(argstr='-inverse', position=1, desc="Invert the specified transform before using it") replace_transform = traits.Bool(argstr='-replace', position=1, desc="replace the current transform by that specified, rather than applying it to the current transform") transformation_file = File(exists=True, argstr='-transform %s', position=1, - desc='The transform to apply, in the form of a 4x4 ascii file.') + desc='The transform to apply, in the form of a 4x4 ascii file.') template_image = File(exists=True, argstr='-template %s', position=1, - desc='Reslice the input image to match the specified template image.') + desc='Reslice the input image to match the specified template image.') reference_image = File(exists=True, argstr='-reference %s', position=1, - desc='in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.') + desc='in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.') flip_x = traits.Bool(argstr='-flipx', position=1, desc="assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option.") quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class MRTransformOutputSpec(TraitedSpec): out_file = File(exists=True, desc='the output image of the transformation') + class MRTransform(CommandLine): """ Apply spatial transformations or reslice images @@ -605,8 +650,8 @@ class MRTransform(CommandLine): """ _cmd = 'mrtransform' - input_spec=MRTransformInputSpec - output_spec=MRTransformOutputSpec + input_spec = MRTransformInputSpec + output_spec = MRTransformOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -622,6 +667,7 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_files[0]) + _, name, _ = split_filename(self.inputs.in_files[0]) return name + '_MRTransform.mif' diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 9307cde264..5577f0d564 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -24,13 +24,15 @@ class DWI2SphericalHarmonicsImageInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, desc='Diffusion-weighted images') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output filename') encoding_file = File(exists=True, argstr='-grad %s', mandatory=True, position=1, - desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') + desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') maximum_harmonic_order = traits.Float(argstr='-lmax %s', desc='set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.') normalise = traits.Bool(argstr='-normalise', position=3, desc="normalise the DW signal to the b=0 image") + class DWI2SphericalHarmonicsImageOutputSpec(TraitedSpec): spherical_harmonics_image = File(exists=True, desc='Spherical harmonics image') + class DWI2SphericalHarmonicsImage(CommandLine): """ Convert base diffusion-weighted images to their spherical harmonic representation. @@ -70,8 +72,8 @@ class DWI2SphericalHarmonicsImage(CommandLine): >>> dwi2SH.run() # doctest: +SKIP """ _cmd = 'dwi2SH' - input_spec=DWI2SphericalHarmonicsImageInputSpec - output_spec=DWI2SphericalHarmonicsImageOutputSpec + input_spec = DWI2SphericalHarmonicsImageInputSpec + output_spec = DWI2SphericalHarmonicsImageOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -87,21 +89,23 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_SH.mif' + class ConstrainedSphericalDeconvolutionInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-3, desc='diffusion-weighted image') response_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)') + desc='the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output filename') mask_image = File(exists=True, argstr='-mask %s', position=2, desc='only perform computation within the specified binary brain mask image') encoding_file = File(exists=True, argstr='-grad %s', position=1, - desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') + desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') filter_file = File(exists=True, argstr='-filter %s', position=-2, - desc='a text file containing the filtering coefficients for each even harmonic order.' \ - 'the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).') + desc='a text file containing the filtering coefficients for each even harmonic order.' \ + 'the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).') lambda_value = traits.Float(argstr='-lambda %s', desc='the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).') maximum_harmonic_order = traits.Int(argstr='-lmax %s', desc='set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.') @@ -109,13 +113,15 @@ class ConstrainedSphericalDeconvolutionInputSpec(CommandLineInputSpec): iterations = traits.Int(argstr='-niter %s', desc='the maximum number of iterations to perform for each voxel (default = 50)') debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') directions_file = File(exists=True, argstr='-directions %s', position=-2, - desc='a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)') + desc='a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)') normalise = traits.Bool(argstr='-normalise', position=3, desc="normalise the DW signal to the b=0 image") + class ConstrainedSphericalDeconvolutionOutputSpec(TraitedSpec): spherical_harmonics_image = File(exists=True, desc='Spherical harmonics image') + class ConstrainedSphericalDeconvolution(CommandLine): """ Perform non-negativity constrained spherical deconvolution. @@ -149,8 +155,8 @@ class ConstrainedSphericalDeconvolution(CommandLine): """ _cmd = 'csdeconv' - input_spec=ConstrainedSphericalDeconvolutionInputSpec - output_spec=ConstrainedSphericalDeconvolutionOutputSpec + input_spec = ConstrainedSphericalDeconvolutionInputSpec + output_spec = ConstrainedSphericalDeconvolutionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -166,24 +172,28 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_CSD.mif' + class EstimateResponseForSHInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-3, desc='Diffusion-weighted images') mask_image = File(exists=True, mandatory=True, argstr='%s', position=-2, desc='only perform computation within the specified binary brain mask image') out_filename = File(genfile=True, argstr='%s', position=-1, desc='Output filename') encoding_file = File(exists=True, argstr='-grad %s', mandatory=True, position=1, - desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') + desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') maximum_harmonic_order = traits.Int(argstr='-lmax %s', desc='set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.') normalise = traits.Bool(argstr='-normalise', desc='normalise the DW signal to the b=0 image') quiet = traits.Bool(argstr='-quiet', desc='Do not display information messages or progress status.') debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') + class EstimateResponseForSHOutputSpec(TraitedSpec): response = File(exists=True, desc='Spherical harmonics image') + class EstimateResponseForSH(CommandLine): """ Estimates the fibre response function for use in spherical deconvolution. @@ -199,8 +209,8 @@ class EstimateResponseForSH(CommandLine): >>> estresp.run() # doctest: +SKIP """ _cmd = 'estimate_response' - input_spec=EstimateResponseForSHInputSpec - output_spec=EstimateResponseForSHOutputSpec + input_spec = EstimateResponseForSHInputSpec + output_spec = EstimateResponseForSHOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -216,10 +226,12 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_ER.txt' + def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): bvecs = np.loadtxt(bvec_file) bvals = np.loadtxt(bval_file) @@ -228,23 +240,24 @@ def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): flip = True bvecs = np.transpose(bvecs) if invert_x: - bvecs[0,:] = -bvecs[0,:] + bvecs[0, :] = -bvecs[0, :] iflogger.info('Inverting b-vectors in the x direction') if invert_y: - bvecs[1,:] = -bvecs[1,:] + bvecs[1, :] = -bvecs[1, :] iflogger.info('Inverting b-vectors in the y direction') if invert_z: - bvecs[2,:] = -bvecs[2,:] + bvecs[2, :] = -bvecs[2, :] iflogger.info('Inverting b-vectors in the z direction') iflogger.info(np.shape(bvecs)) iflogger.info(np.shape(bvals)) - encoding = np.transpose(np.vstack((bvecs,bvals))) - _, bvec , _ = split_filename(bvec_file) - _, bval , _ = split_filename(bval_file) + encoding = np.transpose(np.vstack((bvecs, bvals))) + _, bvec, _ = split_filename(bvec_file) + _, bval, _ = split_filename(bval_file) out_encoding_file = bvec + '_' + bval + '.txt' np.savetxt(out_encoding_file, encoding) return out_encoding_file + class FSL2MRTrixInputSpec(TraitedSpec): bvec_file = File(exists=True, mandatory=True, desc='FSL b-vectors file (3xN text file)') bval_file = File(exists=True, mandatory=True, desc='FSL b-values file (1xN text file)') @@ -253,9 +266,11 @@ class FSL2MRTrixInputSpec(TraitedSpec): invert_z = traits.Bool(False, usedefault=True, desc='Inverts the b-vectors along the z-axis') out_encoding_file = File(genfile=True, desc='Output encoding filename') + class FSL2MRTrixOutputSpec(TraitedSpec): encoding_file = File(desc='The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient' \ - 'and b gives the b-value in units (1000 s/mm^2).') + 'and b gives the b-value in units (1000 s/mm^2).') + class FSL2MRTrix(BaseInterface): """ @@ -293,13 +308,13 @@ def _gen_filename(self, name): return None def _gen_outfilename(self): - _, bvec , _ = split_filename(self.inputs.bvec_file) - _, bval , _ = split_filename(self.inputs.bval_file) + _, bvec, _ = split_filename(self.inputs.bvec_file) + _, bval, _ = split_filename(self.inputs.bval_file) return bvec + '_' + bval + '.txt' class GenerateDirectionsInputSpec(CommandLineInputSpec): - num_dirs = traits.Int(mandatory=True, argstr='%s', position=-2 , desc='the number of directions to generate.') + num_dirs = traits.Int(mandatory=True, argstr='%s', position=-2, desc='the number of directions to generate.') power = traits.Float(argstr='-power %s', desc='specify exponent to use for repulsion power law.') niter = traits.Int(argstr='-niter %s', desc='specify the maximum number of iterations to perform.') @@ -307,11 +322,13 @@ class GenerateDirectionsInputSpec(CommandLineInputSpec): quiet_display = traits.Bool(argstr='-quiet', desc='do not display information messages or progress status.') display_debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') out_file = File(name_source=['num_dirs'], name_template='directions_%d.txt', argstr='%s', hash_files=False, - position= -1, desc='the text file to write the directions to, as [ az el ] pairs.') + position=-1, desc='the text file to write the directions to, as [ az el ] pairs.') + class GenerateDirectionsOutputSpec(TraitedSpec): out_file = File(exists=True, desc='directions file') + class GenerateDirections(CommandLine): """ generate a set of directions evenly distributed over a hemisphere. @@ -326,8 +343,8 @@ class GenerateDirections(CommandLine): """ _cmd = 'gendir' - input_spec=GenerateDirectionsInputSpec - output_spec=GenerateDirectionsOutputSpec + input_spec = GenerateDirectionsInputSpec + output_spec = GenerateDirectionsOutputSpec class FindShPeaksInputSpec(CommandLineInputSpec): @@ -342,12 +359,14 @@ class FindShPeaksInputSpec(CommandLineInputSpec): display_info = traits.Bool(argstr='-info', desc='Display information messages.') quiet_display = traits.Bool(argstr='-quiet', desc='do not display information messages or progress status.') display_debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') - out_file = File(name_template="%s_peak_dirs.mif", keep_extension=False, argstr='%s', hash_files=False, position= -1, + out_file = File(name_template="%s_peak_dirs.mif", keep_extension=False, argstr='%s', hash_files=False, position=-1, desc='the output image. Each volume corresponds to the x, y & z component of each peak direction vector in turn', name_source=["in_file"]) + class FindShPeaksOutputSpec(TraitedSpec): out_file = File(exists=True, desc='Peak directions image') + class FindShPeaks(CommandLine): """ identify the orientations of the N largest peaks of a SH profile @@ -364,9 +383,8 @@ class FindShPeaks(CommandLine): """ _cmd = 'find_SH_peaks' - input_spec=FindShPeaksInputSpec - output_spec=FindShPeaksOutputSpec - + input_spec = FindShPeaksInputSpec + output_spec = FindShPeaksOutputSpec class Directions2AmplitudeInputSpec(CommandLineInputSpec): @@ -379,12 +397,14 @@ class Directions2AmplitudeInputSpec(CommandLineInputSpec): display_info = traits.Bool(argstr='-info', desc='Display information messages.') quiet_display = traits.Bool(argstr='-quiet', desc='do not display information messages or progress status.') display_debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') - out_file = File(name_template="%s_amplitudes.mif", keep_extension=False, argstr='%s', hash_files=False, position= -1, + out_file = File(name_template="%s_amplitudes.mif", keep_extension=False, argstr='%s', hash_files=False, position=-1, desc='the output amplitudes image', name_source=["in_file"]) + class Directions2AmplitudeOutputSpec(TraitedSpec): out_file = File(exists=True, desc='amplitudes image') + class Directions2Amplitude(CommandLine): """ convert directions image to amplitudes @@ -399,6 +419,6 @@ class Directions2Amplitude(CommandLine): """ _cmd = 'dir2amp' - input_spec=Directions2AmplitudeInputSpec - output_spec=Directions2AmplitudeOutputSpec + input_spec = Directions2AmplitudeInputSpec + output_spec = Directions2AmplitudeOutputSpec diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index c07fd166ca..cce2b38c6e 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -17,35 +17,35 @@ class FilterTracksInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='input tracks to be filtered') + desc='input tracks to be filtered') include_xor = ['include_file', 'include_spec'] - include_file = File(exists=True, argstr='-include %s', desc='inclusion file', xor = include_xor) + include_file = File(exists=True, argstr='-include %s', desc='inclusion file', xor=include_xor) include_spec = traits.List(traits.Float, desc='inclusion specification in mm and radius (x y z r)', position=2, - argstr='-include %s', minlen=4, maxlen=4, sep=',', units='mm', xor = include_xor) + argstr='-include %s', minlen=4, maxlen=4, sep=',', units='mm', xor=include_xor) exclude_xor = ['exclude_file', 'exclude_spec'] - exclude_file = File(exists=True, argstr='-exclude %s', desc='exclusion file', xor = exclude_xor) + exclude_file = File(exists=True, argstr='-exclude %s', desc='exclusion file', xor=exclude_xor) exclude_spec = traits.List(traits.Float, desc='exclusion specification in mm and radius (x y z r)', position=2, - argstr='-exclude %s', minlen=4, maxlen=4, sep=',', units='mm', xor = exclude_xor) + argstr='-exclude %s', minlen=4, maxlen=4, sep=',', units='mm', xor=exclude_xor) minimum_tract_length = traits.Float(argstr='-minlength %s', units='mm', - desc="Sets the minimum length of any track in millimeters (default is 10 mm).") - + desc="Sets the minimum length of any track in millimeters (default is 10 mm).") out_file = File(argstr='%s', position=-1, desc='Output filtered track filename', - name_source=['in_file'], hash_files=False, name_template='%s_filt') + name_source=['in_file'], hash_files=False, name_template='%s_filt') no_mask_interpolation = traits.Bool(argstr='-nomaskinterp', desc="Turns off trilinear interpolation of mask images.") invert = traits.Bool(argstr='-invert', desc="invert the matching process, so that tracks that would" \ - "otherwise have been included are now excluded and vice-versa.") - + "otherwise have been included are now excluded and vice-versa.") quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") debug = traits.Bool(argstr='-debug', position=1, desc="Display debugging messages.") + class FilterTracksOutputSpec(TraitedSpec): out_file = File(exists=True, desc='the output filtered tracks') + class FilterTracks(CommandLine): """ Use regions-of-interest to select a subset of tracks @@ -61,28 +61,30 @@ class FilterTracks(CommandLine): """ _cmd = 'filter_tracks' - input_spec=FilterTracksInputSpec - output_spec=FilterTracksOutputSpec + input_spec = FilterTracksInputSpec + output_spec = FilterTracksOutputSpec class Tracks2ProbInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, - desc='tract file') + desc='tract file') template_file = File(exists=True, argstr='-template %s', position=1, - desc='an image file to be used as a template for the output (the output image wil have the same transform and field of view)') + desc='an image file to be used as a template for the output (the output image wil have the same transform and field of view)') voxel_dims = traits.List(traits.Float, argstr='-vox %s', sep=',', position=2, minlen=3, maxlen=3, - desc='Three comma-separated numbers giving the size of each voxel in mm.') + desc='Three comma-separated numbers giving the size of each voxel in mm.') colour = traits.Bool(argstr='-colour', position=3, desc="add colour to the output image according to the direction of the tracks.") fraction = traits.Bool(argstr='-fraction', position=3, desc="produce an image of the fraction of fibres through each voxel (as a proportion of the total number in the file), rather than the count.") - output_datatype = traits.Enum("Bit","Int8", "UInt8","Int16", "UInt16","Int32", "UInt32", "float32", "float64", argstr='-datatype %s', position=2, - desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"') #, usedefault=True) + output_datatype = traits.Enum("Bit", "Int8", "UInt8", "Int16", "UInt16", "Int32", "UInt32", "float32", "float64", argstr='-datatype %s', position=2, + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"') # , usedefault=True) resample = traits.Float(argstr='-resample %d', position=3, - units='mm', desc='resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.') - out_filename = File(genfile=True, argstr='%s', position= -1, desc='output data file') + units='mm', desc='resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.') + out_filename = File(genfile=True, argstr='%s', position=-1, desc='output data file') + class Tracks2ProbOutputSpec(TraitedSpec): tract_image = File(exists=True, desc='Output tract count or track density image') + class Tracks2Prob(CommandLine): """ Convert a tract file into a map of the fraction of tracks to enter @@ -101,8 +103,8 @@ class Tracks2Prob(CommandLine): """ _cmd = 'tracks2prob' - input_spec=Tracks2ProbInputSpec - output_spec=Tracks2ProbOutputSpec + input_spec = Tracks2ProbInputSpec + output_spec = Tracks2ProbOutputSpec def _list_outputs(self): outputs = self.output_spec().get() @@ -118,37 +120,39 @@ def _gen_filename(self, name): return self._gen_outfilename() else: return None + def _gen_outfilename(self): - _, name , _ = split_filename(self.inputs.in_file) + _, name, _ = split_filename(self.inputs.in_file) return name + '_TDI.mif' + class StreamlineTrackInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, desc='the image containing the source data.' \ - 'The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, ' \ - 'the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.') + 'The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, ' \ + 'the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.') seed_xor = ['seed_file', 'seed_spec'] - seed_file = File(exists=True, argstr='-seed %s', desc='seed file', xor = seed_xor) + seed_file = File(exists=True, argstr='-seed %s', desc='seed file', xor=seed_xor) seed_spec = traits.List(traits.Float, desc='seed specification in mm and radius (x y z r)', position=2, - argstr='-seed %s', minlen=4, maxlen=4, sep=',', units='mm', xor = seed_xor) + argstr='-seed %s', minlen=4, maxlen=4, sep=',', units='mm', xor=seed_xor) include_xor = ['include_file', 'include_spec'] - include_file = File(exists=True, argstr='-include %s', desc='inclusion file', xor = include_xor) + include_file = File(exists=True, argstr='-include %s', desc='inclusion file', xor=include_xor) include_spec = traits.List(traits.Float, desc='inclusion specification in mm and radius (x y z r)', position=2, - argstr='-include %s', minlen=4, maxlen=4, sep=',', units='mm', xor = include_xor) + argstr='-include %s', minlen=4, maxlen=4, sep=',', units='mm', xor=include_xor) exclude_xor = ['exclude_file', 'exclude_spec'] - exclude_file = File(exists=True, argstr='-exclude %s', desc='exclusion file', xor = exclude_xor) + exclude_file = File(exists=True, argstr='-exclude %s', desc='exclusion file', xor=exclude_xor) exclude_spec = traits.List(traits.Float, desc='exclusion specification in mm and radius (x y z r)', position=2, - argstr='-exclude %s', minlen=4, maxlen=4, sep=',', units='mm', xor = exclude_xor) + argstr='-exclude %s', minlen=4, maxlen=4, sep=',', units='mm', xor=exclude_xor) mask_xor = ['mask_file', 'mask_spec'] - mask_file = File(exists=True, argstr='-mask %s', desc='mask file. Only tracks within mask.', xor = mask_xor) + mask_file = File(exists=True, argstr='-mask %s', desc='mask file. Only tracks within mask.', xor=mask_xor) mask_spec = traits.List(traits.Float, desc='Mask specification in mm and radius (x y z r). Tracks will be terminated when they leave the ROI.', position=2, - argstr='-mask %s', minlen=4, maxlen=4, sep=',', units='mm', xor = mask_xor) + argstr='-mask %s', minlen=4, maxlen=4, sep=',', units='mm', xor=mask_xor) inputmodel = traits.Enum('DT_STREAM', 'SD_PROB', 'SD_STREAM', - argstr='%s', desc='input model type', usedefault=True, position=-3) + argstr='%s', desc='input model type', usedefault=True, position=-3) stop = traits.Bool(argstr='-stop', desc="stop track as soon as it enters any of the include regions.") do_not_precompute = traits.Bool(argstr='-noprecomputed', desc="Turns off precomputation of the legendre polynomial values. Warning: this will slow down the algorithm by a factor of approximately 4.") @@ -156,34 +160,36 @@ class StreamlineTrackInputSpec(CommandLineInputSpec): no_mask_interpolation = traits.Bool(argstr='-nomaskinterp', desc="Turns off trilinear interpolation of mask images.") step_size = traits.Float(argstr='-step %s', units='mm', - desc="Set the step size of the algorithm in mm (default is 0.2).") + desc="Set the step size of the algorithm in mm (default is 0.2).") minimum_radius_of_curvature = traits.Float(argstr='-curvature %s', units='mm', - desc="Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)") + desc="Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)") desired_number_of_tracks = traits.Int(argstr='-number %d', desc='Sets the desired number of tracks.' \ - 'The program will continue to generate tracks until this number of tracks have been selected and written to the output file' \ - '(default is 100 for *_STREAM methods, 1000 for *_PROB methods).') + 'The program will continue to generate tracks until this number of tracks have been selected and written to the output file' \ + '(default is 100 for *_STREAM methods, 1000 for *_PROB methods).') maximum_number_of_tracks = traits.Int(argstr='-maxnum %d', desc='Sets the maximum number of tracks to generate.' \ - "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" \ - '(default is 100 x number).') + "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" \ + '(default is 100 x number).') minimum_tract_length = traits.Float(argstr='-minlength %s', units='mm', - desc="Sets the minimum length of any track in millimeters (default is 10 mm).") + desc="Sets the minimum length of any track in millimeters (default is 10 mm).") maximum_tract_length = traits.Float(argstr='-length %s', units='mm', - desc="Sets the maximum length of any track in millimeters (default is 200 mm).") + desc="Sets the maximum length of any track in millimeters (default is 200 mm).") cutoff_value = traits.Float(argstr='-cutoff %s', units='NA', - desc="Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1).") + desc="Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1).") initial_cutoff_value = traits.Float(argstr='-initcutoff %s', units='NA', - desc="Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff).") + desc="Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff).") initial_direction = traits.List(traits.Int, desc='Specify the initial tracking direction as a vector', - argstr='-initdirection %s', minlen=2, maxlen=2, units='voxels') - out_file = File(argstr='%s', position= -1, name_source = ['in_file'], name_template='%s_tracked.tck', + argstr='-initdirection %s', minlen=2, maxlen=2, units='voxels') + out_file = File(argstr='%s', position=-1, name_source=['in_file'], name_template='%s_tracked.tck', output_name='tracked', desc='output data file') + class StreamlineTrackOutputSpec(TraitedSpec): tracked = File(exists=True, desc='output file containing reconstructed tracts') + class StreamlineTrack(CommandLine): """ Performs tractography using one of the following models: @@ -211,7 +217,8 @@ class StreamlineTrack(CommandLine): class DiffusionTensorStreamlineTrackInputSpec(StreamlineTrackInputSpec): gradient_encoding_file = File(exists=True, argstr='-grad %s', mandatory=True, position=-2, - desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') + desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') + class DiffusionTensorStreamlineTrack(StreamlineTrack): """ @@ -235,9 +242,11 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "DT_STREAM" return super(DiffusionTensorStreamlineTrack, self).__init__(command, **inputs) + class ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec(StreamlineTrackInputSpec): maximum_number_of_trials = traits.Int(argstr='-trials %s', - desc="Set the maximum number of sampling trials at each point (only used for probabilistic tracking).") + desc="Set the maximum number of sampling trials at each point (only used for probabilistic tracking).") + class ProbabilisticSphericallyDeconvolutedStreamlineTrack(StreamlineTrack): """ @@ -262,6 +271,7 @@ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_PROB" return super(ProbabilisticSphericallyDeconvolutedStreamlineTrack, self).__init__(command, **inputs) + class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): """ Performs streamline tracking using spherically deconvolved data diff --git a/nipype/interfaces/mrtrix3/__init__.py b/nipype/interfaces/mrtrix3/__init__.py index d5ef942b12..3ff5c8e2e7 100644 --- a/nipype/interfaces/mrtrix3/__init__.py +++ b/nipype/interfaces/mrtrix3/__init__.py @@ -4,7 +4,7 @@ # -*- coding: utf-8 -*- from .utils import (Mesh2PVE, Generate5tt, BrainMask, TensorMetrics, - ComputeTDI, TCK2VTK) + ComputeTDI, TCK2VTK) from .preprocess import ResponseSD, ACTPrepareFSL, ReplaceFSwithFIRST from .tracking import Tractography from .reconst import FitTensor, EstimateFOD diff --git a/nipype/interfaces/mrtrix3/utils.py b/nipype/interfaces/mrtrix3/utils.py index 0667353a9c..6a5b68f521 100644 --- a/nipype/interfaces/mrtrix3/utils.py +++ b/nipype/interfaces/mrtrix3/utils.py @@ -18,7 +18,7 @@ from .base import MRTrix3BaseInputSpec, MRTrix3Base from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File,InputMultiPath) + File, InputMultiPath) from ..traits_extension import isdefined from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index cf45ae0c6d..54fc6c26ca 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -26,6 +26,7 @@ from ..base import (BaseInterface, TraitedSpec, traits, File, OutputMultiPath, BaseInterfaceInputSpec, isdefined) + class FitGLMInputSpec(BaseInterfaceInputSpec): session_info = traits.List(minlen=1, maxlen=1, mandatory=True, desc=('Session specific information generated by' @@ -37,7 +38,7 @@ class FitGLMInputSpec(BaseInterfaceInputSpec): "function it can be 'Canonical', 'Canonical " "With Derivative' or 'FIR'"), usedefault=True) drift_model = traits.Enum("Cosine", "Polynomial", "Blank", - desc = ("string that specifies the desired drift " + desc=("string that specifies the desired drift " "model, to be chosen among 'Polynomial', " "'Cosine', 'Blank'"), usedefault=True) TR = traits.Float(mandatory=True) @@ -58,6 +59,7 @@ class FitGLMInputSpec(BaseInterfaceInputSpec): save_residuals = traits.Bool(False, usedefault=True) plot_design_matrix = traits.Bool(False, usedefault=True) + class FitGLMOutputSpec(TraitedSpec): beta = File(exists=True) nvbeta = traits.Any() @@ -69,6 +71,7 @@ class FitGLMOutputSpec(TraitedSpec): residuals = traits.File() a = File(exists=True) + class FitGLM(BaseInterface): ''' Fit GLM model based on the specified design. Supports only single or concatenated runs. @@ -86,13 +89,12 @@ def _run_interface(self, runtime): nii = nb.load(functional_runs[0]) data = nii.get_data() - if isdefined(self.inputs.mask): mask = nb.load(self.inputs.mask).get_data() > 0 else: mask = np.ones(nii.shape[:3]) == 1 - timeseries = data.copy()[mask,:] + timeseries = data.copy()[mask, :] del data for functional_run in functional_runs[1:]: @@ -100,34 +102,33 @@ def _run_interface(self, runtime): data = nii.get_data() npdata = data.copy() del data - timeseries = np.concatenate((timeseries,npdata[mask,:]), axis=1) + timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1) del npdata nscans = timeseries.shape[1] if 'hpf' in list(session_info[0].keys()): hpf = session_info[0]['hpf'] - drift_model=self.inputs.drift_model + drift_model = self.inputs.drift_model else: - hpf=0 + hpf = 0 drift_model = "Blank" reg_names = [] for reg in session_info[0]['regress']: reg_names.append(reg['name']) - reg_vals = np.zeros((nscans,len(reg_names))) + reg_vals = np.zeros((nscans, len(reg_names))) for i in range(len(reg_names)): - reg_vals[:,i] = np.array(session_info[0]['regress'][i]['val']).reshape(1,-1) - + reg_vals[:, i] = np.array(session_info[0]['regress'][i]['val']).reshape(1, -1) - frametimes= np.linspace(0, (nscans-1)*self.inputs.TR, nscans) + frametimes = np.linspace(0, (nscans-1)*self.inputs.TR, nscans) conditions = [] onsets = [] duration = [] - for i,cond in enumerate(session_info[0]['cond']): + for i, cond in enumerate(session_info[0]['cond']): onsets += cond['onset'] conditions += [cond['name']]*len(cond['onset']) if len(cond['duration']) == 1: @@ -135,21 +136,20 @@ def _run_interface(self, runtime): else: duration += cond['duration'] - if conditions: - paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration) + paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration) else: paradigm = None design_matrix, self._reg_names = dm.dmtx_light(frametimes, paradigm, drift_model=drift_model, hfcut=hpf, - hrf_model=self.inputs.hrf_model, - add_regs=reg_vals, - add_reg_names=reg_names - ) + hrf_model=self.inputs.hrf_model, + add_regs=reg_vals, + add_reg_names=reg_names + ) if self.inputs.normalize_design_matrix: for i in range(len(self._reg_names)-1): - design_matrix[:,i] = ((design_matrix[:,i] - - design_matrix[:,i].mean()) / - design_matrix[:,i].std()) + design_matrix[:, i] = ((design_matrix[:, i] - + design_matrix[:, i].mean()) / + design_matrix[:, i].std()) if self.inputs.plot_design_matrix: import pylab @@ -161,10 +161,9 @@ def _run_interface(self, runtime): glm = GLM.glm() glm.fit(timeseries.T, design_matrix, method=self.inputs.method, model=self.inputs.model) - self._beta_file = os.path.abspath("beta.nii") beta = np.zeros(mask.shape + (glm.beta.shape[0],)) - beta[mask,:] = glm.beta.T + beta[mask, :] = glm.beta.T nb.save(nb.Nifti1Image(beta, nii.get_affine()), self._beta_file) self._s2_file = os.path.abspath("s2.nii") @@ -173,9 +172,9 @@ def _run_interface(self, runtime): nb.save(nb.Nifti1Image(s2, nii.get_affine()), self._s2_file) if self.inputs.save_residuals: - explained = np.dot(design_matrix,glm.beta) + explained = np.dot(design_matrix, glm.beta) residuals = np.zeros(mask.shape + (nscans,)) - residuals[mask,:] = timeseries - explained.T + residuals[mask, :] = timeseries - explained.T self._residuals_file = os.path.abspath("residuals.nii") nb.save(nb.Nifti1Image(residuals, nii.get_affine()), self._residuals_file) @@ -208,6 +207,7 @@ def _list_outputs(self): outputs["residuals"] = self._residuals_file return outputs + class EstimateContrastInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either(traits.Tuple(traits.Str, @@ -235,20 +235,22 @@ class EstimateContrastInputSpec(BaseInterfaceInputSpec): session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", mandatory=True) - beta = File(exists=True, desc="beta coefficients of the fitted model",mandatory=True) + beta = File(exists=True, desc="beta coefficients of the fitted model", mandatory=True) nvbeta = traits.Any(mandatory=True) - s2 = File(exists=True, desc="squared variance of the residuals",mandatory=True) + s2 = File(exists=True, desc="squared variance of the residuals", mandatory=True) dof = traits.Any(desc="degrees of freedom", mandatory=True) constants = traits.Any(mandatory=True) axis = traits.Any(mandatory=True) reg_names = traits.List(mandatory=True) mask = traits.File(exists=True) + class EstimateContrastOutputSpec(TraitedSpec): stat_maps = OutputMultiPath(File(exists=True)) z_maps = OutputMultiPath(File(exists=True)) p_maps = OutputMultiPath(File(exists=True)) + class EstimateContrast(BaseInterface): ''' Estimate contrast of a fitted model. @@ -264,10 +266,9 @@ def _run_interface(self, runtime): else: mask = np.ones(beta_nii.shape[:3]) == 1 - glm = GLM.glm() nii = nb.load(self.inputs.beta) - glm.beta = beta_nii.get_data().copy()[mask,:].T + glm.beta = beta_nii.get_data().copy()[mask, :].T glm.nvbeta = self.inputs.nvbeta glm.s2 = nb.load(self.inputs.s2).get_data().copy()[mask] glm.dof = self.inputs.dof diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 8efdce940d..6016181ee0 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -66,7 +66,7 @@ def _run_interface(self, runtime): _, name, ext = split_filename(self.inputs.mean_volume) self._brain_mask_path = os.path.abspath("%s_mask.%s" % (name, ext)) nb.save(nb.Nifti1Image(brain_mask.astype(np.uint8), - nii.get_affine()), self._brain_mask_path) + nii.get_affine()), self._brain_mask_path) return runtime @@ -84,11 +84,11 @@ class FmriRealign4dInputSpec(BaseInterfaceInputSpec): tr = traits.Float(desc="TR in seconds", mandatory=True) slice_order = traits.List(traits.Int(), - desc=('0 based slice order. This would be equivalent to entering' - 'np.argsort(spm_slice_order) for this field. This effects' - 'interleaved acquisition. This field will be deprecated in' - 'future Nipy releases and be replaced by actual slice' - 'acquisition times.'), + desc=('0 based slice order. This would be equivalent to entering' + 'np.argsort(spm_slice_order) for this field. This effects' + 'interleaved acquisition. This field will be deprecated in' + 'future Nipy releases and be replaced by actual slice' + 'acquisition times.'), requires=["time_interp"]) tr_slices = traits.Float(desc="TR slices", requires=['time_interp']) start = traits.Float(0.0, usedefault=True, @@ -171,11 +171,11 @@ def _run_interface(self, runtime): for j, corr in enumerate(corr_run): self._out_file_path.append(os.path.abspath('corr_%s.nii.gz' % - (split_filename(self.inputs.in_file[j])[1]))) + (split_filename(self.inputs.in_file[j])[1]))) save_image(corr, self._out_file_path[j]) self._par_file_path.append(os.path.abspath('%s.par' % - (os.path.split(self.inputs.in_file[j])[1]))) + (os.path.split(self.inputs.in_file[j])[1]))) mfile = open(self._par_file_path[j], 'w') motion = R._transforms[j] # nipy does not encode euler angles. return in original form of @@ -302,11 +302,11 @@ def _run_interface(self, runtime): for j, corr in enumerate(corr_run): self._out_file_path.append(os.path.abspath('corr_%s.nii.gz' % - (split_filename(self.inputs.in_file[j])[1]))) + (split_filename(self.inputs.in_file[j])[1]))) save_image(corr, self._out_file_path[j]) self._par_file_path.append(os.path.abspath('%s.par' % - (os.path.split(self.inputs.in_file[j])[1]))) + (os.path.split(self.inputs.in_file[j])[1]))) mfile = open(self._par_file_path[j], 'w') motion = R._transforms[j] # nipy does not encode euler angles. return in original form of diff --git a/nipype/interfaces/nipy/utils.py b/nipype/interfaces/nipy/utils.py index 82e80c32c3..0e78111c0e 100644 --- a/nipype/interfaces/nipy/utils.py +++ b/nipype/interfaces/nipy/utils.py @@ -31,8 +31,8 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either(traits.Enum('cc', 'cr', 'crl1', 'mi', 'nmi', 'slr'), - traits.Callable(), - desc="""str or callable + traits.Callable(), + desc="""str or callable Cost-function for assessing image similarity. If a string, one of 'cc': correlation coefficient, 'cr': correlation ratio, 'crl1': L1-norm based correlation ratio, 'mi': mutual @@ -71,9 +71,9 @@ class Similarity(BaseInterface): def __init__(self, **inputs): warnings.warn(("This interface is deprecated since 0.10.0." - " Please use nipype.algorithms.metrics.Similarity"), + " Please use nipype.algorithms.metrics.Similarity"), DeprecationWarning) - super(Similarity,self).__init__(**inputs) + super(Similarity, self).__init__(**inputs) def _run_interface(self, runtime): @@ -90,11 +90,11 @@ def _run_interface(self, runtime): else: mask2 = None - histreg = HistogramRegistration(from_img = vol1_nii, - to_img = vol2_nii, + histreg = HistogramRegistration(from_img=vol1_nii, + to_img=vol2_nii, similarity=self.inputs.metric, - from_mask = mask1, - to_mask = mask2) + from_mask=mask1, + to_mask=mask2) self._similarity = histreg.eval(Affine()) return runtime diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 740f56cde7..da76a5882f 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -36,15 +36,15 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): - #Input either csv file, or time-series object and use _xor_inputs to - #discriminate + # Input either csv file, or time-series object and use _xor_inputs to + # discriminate _xor_inputs = ('in_file', 'in_TS') in_file = File(desc=('csv file with ROIs on the columns and ' - 'time-points on the rows. ROI names at the top row'), + 'time-points on the rows. ROI names at the top row'), exists=True, requires=('TR',)) - #If you gave just a file name, you need to specify the sampling_rate: + # If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float(desc=('The TR used to collect the data' 'in your csv file ')) @@ -52,12 +52,12 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): NFFT = traits.Range(low=32, value=64, usedefault=True, desc=('This is the size of the window used for ' - 'the spectral estimation. Use values between ' - '32 and the number of samples in your time-series.' - '(Defaults to 64.)')) + 'the spectral estimation. Use values between ' + '32 and the number of samples in your time-series.' + '(Defaults to 64.)')) n_overlap = traits.Range(low=0, value=0, usedefault=True, desc=('The number of samples which overlap' - 'between subsequent windows.(Defaults to 0)')) + 'between subsequent windows.(Defaults to 0)')) frequency_range = traits.List(value=[0.02, 0.15], usedefault=True, minlen=2, @@ -85,10 +85,10 @@ class CoherenceAnalyzerOutputSpec(TraitedSpec): 'ROIs (in seconds)')) coherence_csv = File(desc=('A csv file containing the pairwise ' - 'coherence values')) + 'coherence values')) timedelay_csv = File(desc=('A csv file containing the pairwise ' - 'time delay values')) + 'time delay values')) coherence_fig = File(desc=('Figure representing coherence values')) timedelay_fig = File(desc=('Figure representing coherence values')) @@ -110,13 +110,13 @@ def _read_csv(self): (TRs) will becomes the second (and last) dimension of the array """ - #Check that input conforms to expectations: + # Check that input conforms to expectations: first_row = open(self.inputs.in_file).readline() if not first_row[1].isalpha(): raise ValueError("First row of in_file should contain ROI names as strings of characters") roi_names = open(self.inputs.in_file).readline().replace('\"', '').strip('\n').split(',') - #Transpose, so that the time is the last dimension: + # Transpose, so that the time is the last dimension: data = np.loadtxt(self.inputs.in_file, skiprows=1, delimiter=',').T return data, roi_names @@ -133,7 +133,7 @@ def _csv2ts(self): return TS - #Rewrite _run_interface, but not run + # Rewrite _run_interface, but not run def _run_interface(self, runtime): lb, ub = self.inputs.frequency_range @@ -159,27 +159,27 @@ def _run_interface(self, runtime): freq_idx = np.where((A.frequencies > self.inputs.frequency_range[0]) * (A.frequencies < self.inputs.frequency_range[1]))[0] - #Get the coherence matrix from the analyzer, averaging on the last - #(frequency) dimension: (roi X roi array) + # Get the coherence matrix from the analyzer, averaging on the last + # (frequency) dimension: (roi X roi array) self.coherence = np.mean(A.coherence[:, :, freq_idx], -1) # Get the time delay from analyzer, (roi X roi array) self.delay = np.mean(A.delay[:, :, freq_idx], -1) return runtime - #Rewrite _list_outputs (look at BET) + # Rewrite _list_outputs (look at BET) def _list_outputs(self): outputs = self.output_spec().get() - #if isdefined(self.inputs.output_csv_file): + # if isdefined(self.inputs.output_csv_file): - #write to a csv file and assign a value to self.coherence_file (a - #file name + path) + # write to a csv file and assign a value to self.coherence_file (a + # file name + path) - #Always defined (the arrays): + # Always defined (the arrays): outputs['coherence_array'] = self.coherence outputs['timedelay_array'] = self.delay - #Conditional + # Conditional if isdefined(self.inputs.output_csv_file) and hasattr(self, 'coherence'): # we need to make a function that we call here that writes the # coherence values to this file "coherence_csv" and makes the @@ -222,30 +222,30 @@ def _make_output_figures(self): """ if self.inputs.figure_type == 'matrix': fig_coh = viz.drawmatrix_channels(self.coherence, - channel_names=self.ROIs, - color_anchor=0) + channel_names=self.ROIs, + color_anchor=0) fig_coh.savefig(fname_presuffix(self.inputs.output_figure_file, - suffix='_coherence')) + suffix='_coherence')) fig_dt = viz.drawmatrix_channels(self.delay, - channel_names=self.ROIs, - color_anchor=0) + channel_names=self.ROIs, + color_anchor=0) fig_dt.savefig(fname_presuffix(self.inputs.output_figure_file, - suffix='_delay')) + suffix='_delay')) else: fig_coh = viz.drawgraph_channels(self.coherence, - channel_names=self.ROIs) + channel_names=self.ROIs) fig_coh.savefig(fname_presuffix(self.inputs.output_figure_file, - suffix='_coherence')) + suffix='_coherence')) fig_dt = viz.drawgraph_channels(self.delay, - channel_names=self.ROIs) + channel_names=self.ROIs) fig_dt.savefig(fname_presuffix(self.inputs.output_figure_file, - suffix='_delay')) + suffix='_delay')) class GetTimeSeriesInputSpec(object): diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index eef23f7ca7..a270a5c0ab 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -12,19 +12,20 @@ no_nitime = not nitime.analysis.have_nitime display_available = 'DISPLAY' in os.environ and os.environ['DISPLAY'] + @skipif(no_nitime) def test_read_csv(): """Test that reading the data from csv file gives you back a reasonable time-series object """ CA = nitime.CoherenceAnalyzer() - CA.inputs.TR = 1.89 # bogus value just to pass traits test + CA.inputs.TR = 1.89 # bogus value just to pass traits test CA.inputs.in_file = example_data('fmri_timeseries_nolabels.csv') - yield assert_raises,ValueError,CA._read_csv + yield assert_raises, ValueError, CA._read_csv CA.inputs.in_file = example_data('fmri_timeseries.csv') - data,roi_names = CA._read_csv() - yield assert_equal, data[0][0],10125.9 - yield assert_equal, roi_names[0],'WM' + data, roi_names = CA._read_csv() + yield assert_equal, data[0][0], 10125.9 + yield assert_equal, roi_names[0], 'WM' @skipif(no_nitime) @@ -33,7 +34,7 @@ def test_coherence_analysis(): import nitime.analysis as nta import nitime.timeseries as ts - #This is the nipype interface analysis: + # This is the nipype interface analysis: CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 CA.inputs.in_file = example_data('fmri_timeseries.csv') @@ -44,33 +45,33 @@ def test_coherence_analysis(): CA.inputs.output_csv_file = tmp_csv o = CA.run() - yield assert_equal,o.outputs.coherence_array.shape,(31,31) + yield assert_equal, o.outputs.coherence_array.shape, (31, 31) - #This is the nitime analysis: - TR=1.89 + # This is the nitime analysis: + TR = 1.89 data_rec = np.recfromcsv(example_data('fmri_timeseries.csv')) - roi_names= np.array(data_rec.dtype.names) + roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] - data = np.zeros((len(roi_names),n_samples)) + data = np.zeros((len(roi_names), n_samples)) for n_idx, roi in enumerate(roi_names): - data[n_idx] = data_rec[roi] + data[n_idx] = data_rec[roi] - T = ts.TimeSeries(data,sampling_interval=TR) + T = ts.TimeSeries(data, sampling_interval=TR) - yield assert_equal,CA._csv2ts().data,T.data + yield assert_equal, CA._csv2ts().data, T.data T.metadata['roi'] = roi_names - C = nta.CoherenceAnalyzer(T,method=dict(this_method='welch', + C = nta.CoherenceAnalyzer(T, method=dict(this_method='welch', NFFT=CA.inputs.NFFT, n_overlap=CA.inputs.n_overlap)) - freq_idx = np.where((C.frequencies>CA.inputs.frequency_range[0]) * - (C.frequencies CA.inputs.frequency_range[0]) * + (C.frequencies < CA.inputs.frequency_range[1]))[0] - #Extract the coherence and average across these frequency bands: - coh = np.mean(C.coherence[:,:,freq_idx],-1) #Averaging on the last dimension + # Extract the coherence and average across these frequency bands: + coh = np.mean(C.coherence[:, :, freq_idx], -1) # Averaging on the last dimension - yield assert_equal,o.outputs.coherence_array,coh + yield assert_equal, o.outputs.coherence_array, coh diff --git a/nipype/interfaces/slicer/base.py b/nipype/interfaces/slicer/base.py index 899f96bb9e..de00883265 100644 --- a/nipype/interfaces/slicer/base.py +++ b/nipype/interfaces/slicer/base.py @@ -1,4 +1,5 @@ from ..base import SEMLikeCommandLine + class SlicerCommandLine(SEMLikeCommandLine): pass diff --git a/nipype/interfaces/slicer/converters.py b/nipype/interfaces/slicer/converters.py index 50ab468c47..c55656fd4b 100644 --- a/nipype/interfaces/slicer/converters.py +++ b/nipype/interfaces/slicer/converters.py @@ -42,7 +42,7 @@ class DicomToNrrdConverter(SEMLikeCommandLine): input_spec = DicomToNrrdConverterInputSpec output_spec = DicomToNrrdConverterOutputSpec _cmd = "DicomToNrrdConverter " - _outputs_filenames = {'outputDirectory':'outputDirectory'} + _outputs_filenames = {'outputDirectory': 'outputDirectory'} class OrientScalarVolumeInputSpec(CommandLineInputSpec): @@ -75,4 +75,4 @@ class OrientScalarVolume(SEMLikeCommandLine): input_spec = OrientScalarVolumeInputSpec output_spec = OrientScalarVolumeOutputSpec _cmd = "OrientScalarVolume " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/diffusion/diffusion.py b/nipype/interfaces/slicer/diffusion/diffusion.py index c935c71a8e..cb87deb4f5 100644 --- a/nipype/interfaces/slicer/diffusion/diffusion.py +++ b/nipype/interfaces/slicer/diffusion/diffusion.py @@ -59,7 +59,7 @@ class ResampleDTIVolume(SEMLikeCommandLine): input_spec = ResampleDTIVolumeInputSpec output_spec = ResampleDTIVolumeOutputSpec _cmd = "ResampleDTIVolume " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class DWIRicianLMMSEFilterInputSpec(CommandLineInputSpec): @@ -104,7 +104,7 @@ class DWIRicianLMMSEFilter(SEMLikeCommandLine): input_spec = DWIRicianLMMSEFilterInputSpec output_spec = DWIRicianLMMSEFilterOutputSpec _cmd = "DWIRicianLMMSEFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class TractographyLabelMapSeedingInputSpec(CommandLineInputSpec): @@ -154,7 +154,7 @@ class TractographyLabelMapSeeding(SEMLikeCommandLine): input_spec = TractographyLabelMapSeedingInputSpec output_spec = TractographyLabelMapSeedingOutputSpec _cmd = "TractographyLabelMapSeeding " - _outputs_filenames = {'OutputFibers':'OutputFibers.vtk','outputdirectory':'outputdirectory'} + _outputs_filenames = {'OutputFibers': 'OutputFibers.vtk', 'outputdirectory': 'outputdirectory'} class DWIJointRicianLMMSEFilterInputSpec(CommandLineInputSpec): @@ -193,7 +193,7 @@ class DWIJointRicianLMMSEFilter(SEMLikeCommandLine): input_spec = DWIJointRicianLMMSEFilterInputSpec output_spec = DWIJointRicianLMMSEFilterOutputSpec _cmd = "DWIJointRicianLMMSEFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class DiffusionWeightedVolumeMaskingInputSpec(CommandLineInputSpec): @@ -229,7 +229,7 @@ class DiffusionWeightedVolumeMasking(SEMLikeCommandLine): input_spec = DiffusionWeightedVolumeMaskingInputSpec output_spec = DiffusionWeightedVolumeMaskingOutputSpec _cmd = "DiffusionWeightedVolumeMasking " - _outputs_filenames = {'outputBaseline':'outputBaseline.nii','thresholdMask':'thresholdMask.nii'} + _outputs_filenames = {'outputBaseline': 'outputBaseline.nii', 'thresholdMask': 'thresholdMask.nii'} class DTIimportInputSpec(CommandLineInputSpec): @@ -262,7 +262,7 @@ class DTIimport(SEMLikeCommandLine): input_spec = DTIimportInputSpec output_spec = DTIimportOutputSpec _cmd = "DTIimport " - _outputs_filenames = {'outputTensor':'outputTensor.nii'} + _outputs_filenames = {'outputTensor': 'outputTensor.nii'} class DWIToDTIEstimationInputSpec(CommandLineInputSpec): @@ -303,7 +303,7 @@ class DWIToDTIEstimation(SEMLikeCommandLine): input_spec = DWIToDTIEstimationInputSpec output_spec = DWIToDTIEstimationOutputSpec _cmd = "DWIToDTIEstimation " - _outputs_filenames = {'outputTensor':'outputTensor.nii','outputBaseline':'outputBaseline.nii'} + _outputs_filenames = {'outputTensor': 'outputTensor.nii', 'outputBaseline': 'outputBaseline.nii'} class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec): @@ -336,7 +336,7 @@ class DiffusionTensorScalarMeasurements(SEMLikeCommandLine): input_spec = DiffusionTensorScalarMeasurementsInputSpec output_spec = DiffusionTensorScalarMeasurementsOutputSpec _cmd = "DiffusionTensorScalarMeasurements " - _outputs_filenames = {'outputScalar':'outputScalar.nii'} + _outputs_filenames = {'outputScalar': 'outputScalar.nii'} class DTIexportInputSpec(CommandLineInputSpec): @@ -368,4 +368,4 @@ class DTIexport(SEMLikeCommandLine): input_spec = DTIexportInputSpec output_spec = DTIexportOutputSpec _cmd = "DTIexport " - _outputs_filenames = {'outputFile':'outputFile'} + _outputs_filenames = {'outputFile': 'outputFile'} diff --git a/nipype/interfaces/slicer/filtering/arithmetic.py b/nipype/interfaces/slicer/filtering/arithmetic.py index 4454efcddf..cfde7f5d02 100644 --- a/nipype/interfaces/slicer/filtering/arithmetic.py +++ b/nipype/interfaces/slicer/filtering/arithmetic.py @@ -37,7 +37,7 @@ class MultiplyScalarVolumes(SEMLikeCommandLine): input_spec = MultiplyScalarVolumesInputSpec output_spec = MultiplyScalarVolumesOutputSpec _cmd = "MultiplyScalarVolumes " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class MaskScalarVolumeInputSpec(CommandLineInputSpec): @@ -72,7 +72,7 @@ class MaskScalarVolume(SEMLikeCommandLine): input_spec = MaskScalarVolumeInputSpec output_spec = MaskScalarVolumeOutputSpec _cmd = "MaskScalarVolume " - _outputs_filenames = {'OutputVolume':'OutputVolume.nii'} + _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} class SubtractScalarVolumesInputSpec(CommandLineInputSpec): @@ -106,7 +106,7 @@ class SubtractScalarVolumes(SEMLikeCommandLine): input_spec = SubtractScalarVolumesInputSpec output_spec = SubtractScalarVolumesOutputSpec _cmd = "SubtractScalarVolumes " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class AddScalarVolumesInputSpec(CommandLineInputSpec): @@ -140,7 +140,7 @@ class AddScalarVolumes(SEMLikeCommandLine): input_spec = AddScalarVolumesInputSpec output_spec = AddScalarVolumesOutputSpec _cmd = "AddScalarVolumes " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class CastScalarVolumeInputSpec(CommandLineInputSpec): @@ -175,4 +175,4 @@ class CastScalarVolume(SEMLikeCommandLine): input_spec = CastScalarVolumeInputSpec output_spec = CastScalarVolumeOutputSpec _cmd = "CastScalarVolume " - _outputs_filenames = {'OutputVolume':'OutputVolume.nii'} + _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/checkerboardfilter.py b/nipype/interfaces/slicer/filtering/checkerboardfilter.py index 91cf2441a1..894777bbdf 100644 --- a/nipype/interfaces/slicer/filtering/checkerboardfilter.py +++ b/nipype/interfaces/slicer/filtering/checkerboardfilter.py @@ -37,4 +37,4 @@ class CheckerBoardFilter(SEMLikeCommandLine): input_spec = CheckerBoardFilterInputSpec output_spec = CheckerBoardFilterOutputSpec _cmd = "CheckerBoardFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/denoising.py b/nipype/interfaces/slicer/filtering/denoising.py index 4006fc80b8..7c506d4839 100644 --- a/nipype/interfaces/slicer/filtering/denoising.py +++ b/nipype/interfaces/slicer/filtering/denoising.py @@ -40,7 +40,7 @@ class GradientAnisotropicDiffusion(SEMLikeCommandLine): input_spec = GradientAnisotropicDiffusionInputSpec output_spec = GradientAnisotropicDiffusionOutputSpec _cmd = "GradientAnisotropicDiffusion " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class CurvatureAnisotropicDiffusionInputSpec(CommandLineInputSpec): @@ -79,7 +79,7 @@ class CurvatureAnisotropicDiffusion(SEMLikeCommandLine): input_spec = CurvatureAnisotropicDiffusionInputSpec output_spec = CurvatureAnisotropicDiffusionOutputSpec _cmd = "CurvatureAnisotropicDiffusion " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class GaussianBlurImageFilterInputSpec(CommandLineInputSpec): @@ -112,7 +112,7 @@ class GaussianBlurImageFilter(SEMLikeCommandLine): input_spec = GaussianBlurImageFilterInputSpec output_spec = GaussianBlurImageFilterOutputSpec _cmd = "GaussianBlurImageFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class MedianImageFilterInputSpec(CommandLineInputSpec): @@ -145,4 +145,4 @@ class MedianImageFilter(SEMLikeCommandLine): input_spec = MedianImageFilterInputSpec output_spec = MedianImageFilterOutputSpec _cmd = "MedianImageFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/extractskeleton.py b/nipype/interfaces/slicer/filtering/extractskeleton.py index 389bf8324e..0c516850db 100644 --- a/nipype/interfaces/slicer/filtering/extractskeleton.py +++ b/nipype/interfaces/slicer/filtering/extractskeleton.py @@ -39,4 +39,4 @@ class ExtractSkeleton(SEMLikeCommandLine): input_spec = ExtractSkeletonInputSpec output_spec = ExtractSkeletonOutputSpec _cmd = "ExtractSkeleton " - _outputs_filenames = {'OutputImageFileName':'OutputImageFileName.nii'} + _outputs_filenames = {'OutputImageFileName': 'OutputImageFileName.nii'} diff --git a/nipype/interfaces/slicer/filtering/histogrammatching.py b/nipype/interfaces/slicer/filtering/histogrammatching.py index fbac35af31..beaeb044bc 100644 --- a/nipype/interfaces/slicer/filtering/histogrammatching.py +++ b/nipype/interfaces/slicer/filtering/histogrammatching.py @@ -45,4 +45,4 @@ class HistogramMatching(SEMLikeCommandLine): input_spec = HistogramMatchingInputSpec output_spec = HistogramMatchingOutputSpec _cmd = "HistogramMatching " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/imagelabelcombine.py b/nipype/interfaces/slicer/filtering/imagelabelcombine.py index d6e0ddb24f..36611de864 100644 --- a/nipype/interfaces/slicer/filtering/imagelabelcombine.py +++ b/nipype/interfaces/slicer/filtering/imagelabelcombine.py @@ -35,4 +35,4 @@ class ImageLabelCombine(SEMLikeCommandLine): input_spec = ImageLabelCombineInputSpec output_spec = ImageLabelCombineOutputSpec _cmd = "ImageLabelCombine " - _outputs_filenames = {'OutputLabelMap':'OutputLabelMap.nii'} + _outputs_filenames = {'OutputLabelMap': 'OutputLabelMap.nii'} diff --git a/nipype/interfaces/slicer/filtering/morphology.py b/nipype/interfaces/slicer/filtering/morphology.py index 5d55ef5f3d..7214828d95 100644 --- a/nipype/interfaces/slicer/filtering/morphology.py +++ b/nipype/interfaces/slicer/filtering/morphology.py @@ -45,7 +45,7 @@ class GrayscaleGrindPeakImageFilter(SEMLikeCommandLine): input_spec = GrayscaleGrindPeakImageFilterInputSpec output_spec = GrayscaleGrindPeakImageFilterOutputSpec _cmd = "GrayscaleGrindPeakImageFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class GrayscaleFillHoleImageFilterInputSpec(CommandLineInputSpec): @@ -85,4 +85,4 @@ class GrayscaleFillHoleImageFilter(SEMLikeCommandLine): input_spec = GrayscaleFillHoleImageFilterInputSpec output_spec = GrayscaleFillHoleImageFilterOutputSpec _cmd = "GrayscaleFillHoleImageFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py index ad1c914083..b56d4d3af2 100644 --- a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py +++ b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py @@ -46,4 +46,4 @@ class N4ITKBiasFieldCorrection(SEMLikeCommandLine): input_spec = N4ITKBiasFieldCorrectionInputSpec output_spec = N4ITKBiasFieldCorrectionOutputSpec _cmd = "N4ITKBiasFieldCorrection " - _outputs_filenames = {'outputimage':'outputimage.nii','outputbiasfield':'outputbiasfield.nii'} + _outputs_filenames = {'outputimage': 'outputimage.nii', 'outputbiasfield': 'outputbiasfield.nii'} diff --git a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py index 2963eb483e..b90694118f 100644 --- a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py +++ b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py @@ -61,4 +61,4 @@ class ResampleScalarVectorDWIVolume(SEMLikeCommandLine): input_spec = ResampleScalarVectorDWIVolumeInputSpec output_spec = ResampleScalarVectorDWIVolumeOutputSpec _cmd = "ResampleScalarVectorDWIVolume " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py index d73a6af5f6..63e7e5a7b7 100644 --- a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py +++ b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py @@ -40,4 +40,4 @@ class ThresholdScalarVolume(SEMLikeCommandLine): input_spec = ThresholdScalarVolumeInputSpec output_spec = ThresholdScalarVolumeOutputSpec _cmd = "ThresholdScalarVolume " - _outputs_filenames = {'OutputVolume':'OutputVolume.nii'} + _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} diff --git a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py index db9b86a454..5dd42d9437 100644 --- a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py +++ b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py @@ -39,4 +39,4 @@ class VotingBinaryHoleFillingImageFilter(SEMLikeCommandLine): input_spec = VotingBinaryHoleFillingImageFilterInputSpec output_spec = VotingBinaryHoleFillingImageFilterOutputSpec _cmd = "VotingBinaryHoleFillingImageFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index d784e6f5fe..73ce553ce8 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -114,7 +114,7 @@ def generate_all_classes(modules_list=[], launcher=[], redirect_x=False, mipav_h print("=" * 80) print("Generating Definition for module {0}".format(module)) print("^" * 80) - package, code, module = generate_class(module, launcher, redirect_x = redirect_x, mipav_hacks=mipav_hacks) + package, code, module = generate_class(module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks) cur_package = all_code module_name = package.strip().split(" ")[0].split(".")[-1] for package in package.strip().split(" ")[0].split(".")[:-1]: @@ -129,7 +129,7 @@ def generate_all_classes(modules_list=[], launcher=[], redirect_x=False, mipav_h crawl_code_struct(all_code, os.getcwd()) -def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x = False, mipav_hacks=False): +def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x=False, mipav_hacks=False): dom = grab_xml(module, launcher, mipav_hacks=mipav_hacks) if strip_module_name_prefix: module_name = module.split(".")[-1] @@ -139,7 +139,7 @@ def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x = outputTraits = [] outputs_filenames = {} - #self._outputs_nodes = [] + # self._outputs_nodes = [] class_string = "\"\"\"" @@ -166,13 +166,13 @@ def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x = longFlagNode = param.getElementsByTagName('longflag') if longFlagNode: - ## Prefer to use longFlag as name if it is given, rather than the parameter name + # Prefer to use longFlag as name if it is given, rather than the parameter name longFlagName = longFlagNode[0].firstChild.nodeValue - ## SEM automatically strips prefixed "--" or "-" from from xml before processing - ## we need to replicate that behavior here The following - ## two nodes in xml have the same behavior in the program - ## --test - ## test + # SEM automatically strips prefixed "--" or "-" from from xml before processing + # we need to replicate that behavior here The following + # two nodes in xml have the same behavior in the program + # --test + # test longFlagName = longFlagName.lstrip(" -").rstrip(" ") name = longFlagName name = force_to_valid_python_variable_name(name) @@ -254,7 +254,7 @@ def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x = "%s = traits.Either(traits.Bool, %s(%s), %s)" % (name, type, parse_values( - values).replace("exists=True", ""), + values).replace("exists=True", ""), parse_params(traitsParams))) traitsParams["exists"] = True traitsParams.pop("argstr") @@ -281,7 +281,6 @@ def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x = 'xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)'] inputTraits += compulsory_inputs - input_spec_code = "class " + module_name + "InputSpec(CommandLineInputSpec):\n" for trait in inputTraits: input_spec_code += " " + trait + "\n" @@ -310,22 +309,21 @@ def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x = %output_filenames_code%\n""" template += " _redirect_x = {0}\n".format(str(redirect_x)) - main_class = template.replace('%class_str%', class_string).replace("%module_name%", module_name).replace("%name%", module).replace("%output_filenames_code%", output_filenames_code).replace("%launcher%", " ".join(launcher)) return category, input_spec_code + output_spec_code + main_class, module_name def grab_xml(module, launcher, mipav_hacks=False): -# cmd = CommandLine(command = "Slicer3", args="--launch %s --xml"%module) -# ret = cmd.run() + # cmd = CommandLine(command = "Slicer3", args="--launch %s --xml"%module) + # ret = cmd.run() command_list = launcher[:] # force copy to preserve original command_list.extend([module, "--xml"]) final_command = " ".join(command_list) xmlReturnValue = subprocess.Popen( final_command, stdout=subprocess.PIPE, shell=True).communicate()[0] if mipav_hacks: - #workaround for a jist bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7234&group_id=228&atid=942 + # workaround for a jist bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7234&group_id=228&atid=942 new_xml = "" replace_closing_tag = False for line in xmlReturnValue.splitlines(): @@ -340,7 +338,7 @@ def grab_xml(module, launcher, mipav_hacks=False): xmlReturnValue = new_xml - #workaround for a JIST bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7233&group_id=228&atid=942 + # workaround for a JIST bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7233&group_id=228&atid=942 if xmlReturnValue.strip().endswith("XML"): xmlReturnValue = xmlReturnValue.strip()[:-3] if xmlReturnValue.strip().startswith("Error: Unable to set default atlas"): @@ -380,8 +378,8 @@ def parse_values(values): def gen_filename_from_param(param, base): fileExtensions = param.getAttribute("fileExtensions") if fileExtensions: - ## It is possible that multiple file extensions can be specified in a - ## comma separated list, This will extract just the first extension + # It is possible that multiple file extensions can be specified in a + # comma separated list, This will extract just the first extension firstFileExtension = fileExtensions.split(',')[0] ext = firstFileExtension else: @@ -390,14 +388,14 @@ def gen_filename_from_param(param, base): return base + ext if __name__ == "__main__": - ## NOTE: For now either the launcher needs to be found on the default path, or - ## every tool in the modules list must be found on the default path - ## AND calling the module with --xml must be supported and compliant. + # NOTE: For now either the launcher needs to be found on the default path, or + # every tool in the modules list must be found on the default path + # AND calling the module with --xml must be supported and compliant. modules_list = ['MedianImageFilter', 'CheckerBoardFilter', 'EMSegmentCommandLine', 'GrayscaleFillHoleImageFilter', - #'CreateDICOMSeries', #missing channel + # 'CreateDICOMSeries', #missing channel 'TractographyLabelMapSeeding', 'IntensityDifferenceMetric', 'DWIToDTIEstimation', @@ -456,9 +454,9 @@ def gen_filename_from_param(param, base): 'EMSegmentTransformToNewFormat', 'BSplineToDeformationField'] - ## SlicerExecutionModel compliant tools that are usually statically built, and don't need the Slicer3 --launcher - generate_all_classes(modules_list=modules_list,launcher=[]) - ## Tools compliant with SlicerExecutionModel called from the Slicer environment (for shared lib compatibility) - #launcher = ['/home/raid3/gorgolewski/software/slicer/Slicer', '--launch'] - #generate_all_classes(modules_list=modules_list, launcher=launcher) - #generate_all_classes(modules_list=['BRAINSABC'], launcher=[] ) + # SlicerExecutionModel compliant tools that are usually statically built, and don't need the Slicer3 --launcher + generate_all_classes(modules_list=modules_list, launcher=[]) + # Tools compliant with SlicerExecutionModel called from the Slicer environment (for shared lib compatibility) + # launcher = ['/home/raid3/gorgolewski/software/slicer/Slicer', '--launch'] + # generate_all_classes(modules_list=modules_list, launcher=launcher) + # generate_all_classes(modules_list=['BRAINSABC'], launcher=[] ) diff --git a/nipype/interfaces/slicer/legacy/converters.py b/nipype/interfaces/slicer/legacy/converters.py index 3920cd23ea..fd1817c06f 100644 --- a/nipype/interfaces/slicer/legacy/converters.py +++ b/nipype/interfaces/slicer/legacy/converters.py @@ -36,4 +36,4 @@ class BSplineToDeformationField(SEMLikeCommandLine): input_spec = BSplineToDeformationFieldInputSpec output_spec = BSplineToDeformationFieldOutputSpec _cmd = "BSplineToDeformationField " - _outputs_filenames = {'defImage':'defImage.nii'} + _outputs_filenames = {'defImage': 'defImage.nii'} diff --git a/nipype/interfaces/slicer/legacy/diffusion/denoising.py b/nipype/interfaces/slicer/legacy/diffusion/denoising.py index ec0e9e6252..bbec5f7d9b 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/denoising.py +++ b/nipype/interfaces/slicer/legacy/diffusion/denoising.py @@ -44,4 +44,4 @@ class DWIUnbiasedNonLocalMeansFilter(SEMLikeCommandLine): input_spec = DWIUnbiasedNonLocalMeansFilterInputSpec output_spec = DWIUnbiasedNonLocalMeansFilterOutputSpec _cmd = "DWIUnbiasedNonLocalMeansFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/legacy/filtering.py b/nipype/interfaces/slicer/legacy/filtering.py index 3197527904..b6684d2d64 100644 --- a/nipype/interfaces/slicer/legacy/filtering.py +++ b/nipype/interfaces/slicer/legacy/filtering.py @@ -42,7 +42,7 @@ class OtsuThresholdImageFilter(SEMLikeCommandLine): input_spec = OtsuThresholdImageFilterInputSpec output_spec = OtsuThresholdImageFilterOutputSpec _cmd = "OtsuThresholdImageFilter " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class ResampleScalarVolumeInputSpec(CommandLineInputSpec): @@ -76,4 +76,4 @@ class ResampleScalarVolume(SEMLikeCommandLine): input_spec = ResampleScalarVolumeInputSpec output_spec = ResampleScalarVolumeOutputSpec _cmd = "ResampleScalarVolume " - _outputs_filenames = {'OutputVolume':'OutputVolume.nii'} + _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} diff --git a/nipype/interfaces/slicer/legacy/registration.py b/nipype/interfaces/slicer/legacy/registration.py index dc41db16d4..5ba5baf3f5 100644 --- a/nipype/interfaces/slicer/legacy/registration.py +++ b/nipype/interfaces/slicer/legacy/registration.py @@ -48,7 +48,7 @@ class BSplineDeformableRegistration(SEMLikeCommandLine): input_spec = BSplineDeformableRegistrationInputSpec output_spec = BSplineDeformableRegistrationOutputSpec _cmd = "BSplineDeformableRegistration " - _outputs_filenames = {'resampledmovingfilename':'resampledmovingfilename.nii','outputtransform':'outputtransform.txt','outputwarp':'outputwarp.nrrd'} + _outputs_filenames = {'resampledmovingfilename': 'resampledmovingfilename.nii', 'outputtransform': 'outputtransform.txt', 'outputwarp': 'outputwarp.nrrd'} class AffineRegistrationInputSpec(CommandLineInputSpec): @@ -96,7 +96,7 @@ class AffineRegistration(SEMLikeCommandLine): input_spec = AffineRegistrationInputSpec output_spec = AffineRegistrationOutputSpec _cmd = "AffineRegistration " - _outputs_filenames = {'resampledmovingfilename':'resampledmovingfilename.nii','outputtransform':'outputtransform.txt'} + _outputs_filenames = {'resampledmovingfilename': 'resampledmovingfilename.nii', 'outputtransform': 'outputtransform.txt'} class MultiResolutionAffineRegistrationInputSpec(CommandLineInputSpec): @@ -138,7 +138,7 @@ class MultiResolutionAffineRegistration(SEMLikeCommandLine): input_spec = MultiResolutionAffineRegistrationInputSpec output_spec = MultiResolutionAffineRegistrationOutputSpec _cmd = "MultiResolutionAffineRegistration " - _outputs_filenames = {'resampledImage':'resampledImage.nii','saveTransform':'saveTransform.txt'} + _outputs_filenames = {'resampledImage': 'resampledImage.nii', 'saveTransform': 'saveTransform.txt'} class RigidRegistrationInputSpec(CommandLineInputSpec): @@ -192,7 +192,7 @@ class RigidRegistration(SEMLikeCommandLine): input_spec = RigidRegistrationInputSpec output_spec = RigidRegistrationOutputSpec _cmd = "RigidRegistration " - _outputs_filenames = {'resampledmovingfilename':'resampledmovingfilename.nii','outputtransform':'outputtransform.txt'} + _outputs_filenames = {'resampledmovingfilename': 'resampledmovingfilename.nii', 'outputtransform': 'outputtransform.txt'} class LinearRegistrationInputSpec(CommandLineInputSpec): @@ -235,7 +235,7 @@ class LinearRegistration(SEMLikeCommandLine): input_spec = LinearRegistrationInputSpec output_spec = LinearRegistrationOutputSpec _cmd = "LinearRegistration " - _outputs_filenames = {'resampledmovingfilename':'resampledmovingfilename.nii','outputtransform':'outputtransform.txt'} + _outputs_filenames = {'resampledmovingfilename': 'resampledmovingfilename.nii', 'outputtransform': 'outputtransform.txt'} class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): @@ -294,4 +294,4 @@ class ExpertAutomatedRegistration(SEMLikeCommandLine): input_spec = ExpertAutomatedRegistrationInputSpec output_spec = ExpertAutomatedRegistrationOutputSpec _cmd = "ExpertAutomatedRegistration " - _outputs_filenames = {'resampledImage':'resampledImage.nii','saveTransform':'saveTransform.txt'} + _outputs_filenames = {'resampledImage': 'resampledImage.nii', 'saveTransform': 'saveTransform.txt'} diff --git a/nipype/interfaces/slicer/legacy/segmentation.py b/nipype/interfaces/slicer/legacy/segmentation.py index 914ea74e60..af724c9f96 100644 --- a/nipype/interfaces/slicer/legacy/segmentation.py +++ b/nipype/interfaces/slicer/legacy/segmentation.py @@ -39,4 +39,4 @@ class OtsuThresholdSegmentation(SEMLikeCommandLine): input_spec = OtsuThresholdSegmentationInputSpec output_spec = OtsuThresholdSegmentationOutputSpec _cmd = "OtsuThresholdSegmentation " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/quantification/changequantification.py b/nipype/interfaces/slicer/quantification/changequantification.py index 949be220ea..f5225065e2 100644 --- a/nipype/interfaces/slicer/quantification/changequantification.py +++ b/nipype/interfaces/slicer/quantification/changequantification.py @@ -46,4 +46,4 @@ class IntensityDifferenceMetric(SEMLikeCommandLine): input_spec = IntensityDifferenceMetricInputSpec output_spec = IntensityDifferenceMetricOutputSpec _cmd = "IntensityDifferenceMetric " - _outputs_filenames = {'outputVolume':'outputVolume.nii','reportFileName':'reportFileName'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii', 'reportFileName': 'reportFileName'} diff --git a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py index 32c3b7e833..8dfe67b546 100644 --- a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py +++ b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py @@ -43,4 +43,4 @@ class PETStandardUptakeValueComputation(SEMLikeCommandLine): input_spec = PETStandardUptakeValueComputationInputSpec output_spec = PETStandardUptakeValueComputationOutputSpec _cmd = "PETStandardUptakeValueComputation " - _outputs_filenames = {'csvFile':'csvFile.csv'} + _outputs_filenames = {'csvFile': 'csvFile.csv'} diff --git a/nipype/interfaces/slicer/registration/brainsfit.py b/nipype/interfaces/slicer/registration/brainsfit.py index e3ca021f9f..b093235b84 100644 --- a/nipype/interfaces/slicer/registration/brainsfit.py +++ b/nipype/interfaces/slicer/registration/brainsfit.py @@ -102,4 +102,4 @@ class BRAINSFit(SEMLikeCommandLine): input_spec = BRAINSFitInputSpec output_spec = BRAINSFitOutputSpec _cmd = "BRAINSFit " - _outputs_filenames = {'outputVolume':'outputVolume.nii','bsplineTransform':'bsplineTransform.mat','outputTransform':'outputTransform.mat','outputFixedVolumeROI':'outputFixedVolumeROI.nii','strippedOutputTransform':'strippedOutputTransform.mat','outputMovingVolumeROI':'outputMovingVolumeROI.nii','linearTransform':'linearTransform.mat'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii', 'bsplineTransform': 'bsplineTransform.mat', 'outputTransform': 'outputTransform.mat', 'outputFixedVolumeROI': 'outputFixedVolumeROI.nii', 'strippedOutputTransform': 'strippedOutputTransform.mat', 'outputMovingVolumeROI': 'outputMovingVolumeROI.nii', 'linearTransform': 'linearTransform.mat'} diff --git a/nipype/interfaces/slicer/registration/brainsresample.py b/nipype/interfaces/slicer/registration/brainsresample.py index 707ae913d2..c7f2ba63a2 100644 --- a/nipype/interfaces/slicer/registration/brainsresample.py +++ b/nipype/interfaces/slicer/registration/brainsresample.py @@ -48,4 +48,4 @@ class BRAINSResample(SEMLikeCommandLine): input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = "BRAINSResample " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/registration/specialized.py b/nipype/interfaces/slicer/registration/specialized.py index a8e60c4858..3123cd2e63 100644 --- a/nipype/interfaces/slicer/registration/specialized.py +++ b/nipype/interfaces/slicer/registration/specialized.py @@ -39,7 +39,7 @@ class ACPCTransform(SEMLikeCommandLine): input_spec = ACPCTransformInputSpec output_spec = ACPCTransformOutputSpec _cmd = "ACPCTransform " - _outputs_filenames = {'outputTransform':'outputTransform.mat'} + _outputs_filenames = {'outputTransform': 'outputTransform.mat'} class FiducialRegistrationInputSpec(CommandLineInputSpec): @@ -75,7 +75,7 @@ class FiducialRegistration(SEMLikeCommandLine): input_spec = FiducialRegistrationInputSpec output_spec = FiducialRegistrationOutputSpec _cmd = "FiducialRegistration " - _outputs_filenames = {'saveTransform':'saveTransform.txt'} + _outputs_filenames = {'saveTransform': 'saveTransform.txt'} class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): @@ -153,7 +153,7 @@ class VBRAINSDemonWarp(SEMLikeCommandLine): input_spec = VBRAINSDemonWarpInputSpec output_spec = VBRAINSDemonWarpOutputSpec _cmd = "VBRAINSDemonWarp " - _outputs_filenames = {'outputVolume':'outputVolume.nii','outputCheckerboardVolume':'outputCheckerboardVolume.nii','outputDisplacementFieldVolume':'outputDisplacementFieldVolume.nrrd'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii', 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd'} class BRAINSDemonWarpInputSpec(CommandLineInputSpec): @@ -230,4 +230,4 @@ class BRAINSDemonWarp(SEMLikeCommandLine): input_spec = BRAINSDemonWarpInputSpec output_spec = BRAINSDemonWarpOutputSpec _cmd = "BRAINSDemonWarp " - _outputs_filenames = {'outputVolume':'outputVolume.nii','outputCheckerboardVolume':'outputCheckerboardVolume.nii','outputDisplacementFieldVolume':'outputDisplacementFieldVolume.nrrd'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii', 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd'} diff --git a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py index b6444f64c5..ee2c9e11ea 100644 --- a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py +++ b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py @@ -42,4 +42,4 @@ class SimpleRegionGrowingSegmentation(SEMLikeCommandLine): input_spec = SimpleRegionGrowingSegmentationInputSpec output_spec = SimpleRegionGrowingSegmentationOutputSpec _cmd = "SimpleRegionGrowingSegmentation " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} diff --git a/nipype/interfaces/slicer/segmentation/specialized.py b/nipype/interfaces/slicer/segmentation/specialized.py index f39a08fa74..fef3cb7df3 100644 --- a/nipype/interfaces/slicer/segmentation/specialized.py +++ b/nipype/interfaces/slicer/segmentation/specialized.py @@ -41,7 +41,7 @@ class RobustStatisticsSegmenter(SEMLikeCommandLine): input_spec = RobustStatisticsSegmenterInputSpec output_spec = RobustStatisticsSegmenterOutputSpec _cmd = "RobustStatisticsSegmenter " - _outputs_filenames = {'segmentedImageFileName':'segmentedImageFileName.nii'} + _outputs_filenames = {'segmentedImageFileName': 'segmentedImageFileName.nii'} class EMSegmentCommandLineInputSpec(CommandLineInputSpec): @@ -99,7 +99,7 @@ class EMSegmentCommandLine(SEMLikeCommandLine): input_spec = EMSegmentCommandLineInputSpec output_spec = EMSegmentCommandLineOutputSpec _cmd = "EMSegmentCommandLine " - _outputs_filenames = {'generateEmptyMRMLSceneAndQuit':'generateEmptyMRMLSceneAndQuit','resultMRMLSceneFileName':'resultMRMLSceneFileName','resultVolumeFileName':'resultVolumeFileName.mhd'} + _outputs_filenames = {'generateEmptyMRMLSceneAndQuit': 'generateEmptyMRMLSceneAndQuit', 'resultMRMLSceneFileName': 'resultMRMLSceneFileName', 'resultVolumeFileName': 'resultVolumeFileName.mhd'} class BRAINSROIAutoInputSpec(CommandLineInputSpec): @@ -140,4 +140,4 @@ class BRAINSROIAuto(SEMLikeCommandLine): input_spec = BRAINSROIAutoInputSpec output_spec = BRAINSROIAutoOutputSpec _cmd = "BRAINSROIAuto " - _outputs_filenames = {'outputROIMaskVolume':'outputROIMaskVolume.nii','outputClippedVolumeROI':'outputClippedVolumeROI.nii'} + _outputs_filenames = {'outputROIMaskVolume': 'outputROIMaskVolume.nii', 'outputClippedVolumeROI': 'outputClippedVolumeROI.nii'} diff --git a/nipype/interfaces/slicer/surface.py b/nipype/interfaces/slicer/surface.py index 6e84a7875d..cd8edcf2cf 100644 --- a/nipype/interfaces/slicer/surface.py +++ b/nipype/interfaces/slicer/surface.py @@ -36,7 +36,7 @@ class MergeModels(SEMLikeCommandLine): input_spec = MergeModelsInputSpec output_spec = MergeModelsOutputSpec _cmd = "MergeModels " - _outputs_filenames = {'ModelOutput':'ModelOutput.vtk'} + _outputs_filenames = {'ModelOutput': 'ModelOutput.vtk'} class ModelToLabelMapInputSpec(CommandLineInputSpec): @@ -70,7 +70,7 @@ class ModelToLabelMap(SEMLikeCommandLine): input_spec = ModelToLabelMapInputSpec output_spec = ModelToLabelMapOutputSpec _cmd = "ModelToLabelMap " - _outputs_filenames = {'OutputVolume':'OutputVolume.nii'} + _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} class GrayscaleModelMakerInputSpec(CommandLineInputSpec): @@ -110,7 +110,7 @@ class GrayscaleModelMaker(SEMLikeCommandLine): input_spec = GrayscaleModelMakerInputSpec output_spec = GrayscaleModelMakerOutputSpec _cmd = "GrayscaleModelMaker " - _outputs_filenames = {'OutputGeometry':'OutputGeometry.vtk'} + _outputs_filenames = {'OutputGeometry': 'OutputGeometry.vtk'} class ProbeVolumeWithModelInputSpec(CommandLineInputSpec): @@ -143,7 +143,7 @@ class ProbeVolumeWithModel(SEMLikeCommandLine): input_spec = ProbeVolumeWithModelInputSpec output_spec = ProbeVolumeWithModelOutputSpec _cmd = "ProbeVolumeWithModel " - _outputs_filenames = {'OutputModel':'OutputModel.vtk'} + _outputs_filenames = {'OutputModel': 'OutputModel.vtk'} class LabelMapSmoothingInputSpec(CommandLineInputSpec): @@ -179,7 +179,7 @@ class LabelMapSmoothing(SEMLikeCommandLine): input_spec = LabelMapSmoothingInputSpec output_spec = LabelMapSmoothingOutputSpec _cmd = "LabelMapSmoothing " - _outputs_filenames = {'outputVolume':'outputVolume.nii'} + _outputs_filenames = {'outputVolume': 'outputVolume.nii'} class ModelMakerInputSpec(CommandLineInputSpec): @@ -229,4 +229,4 @@ class ModelMaker(SEMLikeCommandLine): input_spec = ModelMakerInputSpec output_spec = ModelMakerOutputSpec _cmd = "ModelMaker " - _outputs_filenames = {'modelSceneFile':'modelSceneFile.mrml'} + _outputs_filenames = {'modelSceneFile': 'modelSceneFile.mrml'} diff --git a/nipype/interfaces/slicer/utilities.py b/nipype/interfaces/slicer/utilities.py index 5bc3173048..bdc675e55a 100644 --- a/nipype/interfaces/slicer/utilities.py +++ b/nipype/interfaces/slicer/utilities.py @@ -34,4 +34,4 @@ class EMSegmentTransformToNewFormat(SEMLikeCommandLine): input_spec = EMSegmentTransformToNewFormatInputSpec output_spec = EMSegmentTransformToNewFormatOutputSpec _cmd = "EMSegmentTransformToNewFormat " - _outputs_filenames = {'outputMRMLFileName':'outputMRMLFileName.mrml'} + _outputs_filenames = {'outputMRMLFileName': 'outputMRMLFileName.mrml'} diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index cc31a7ad58..44fc7ca7ac 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -482,14 +482,14 @@ def _make_matlab_command(self, contents, postscript=None): contents[0]) else: if self.jobname in ['st', 'smooth', 'preproc', 'preproc8', - 'fmri_spec', 'fmri_est', 'factorial_design', - 'defs']: + 'fmri_spec', 'fmri_est', 'factorial_design', + 'defs']: # parentheses mscript += self._generate_job('jobs{1}.%s{1}.%s(1)' % (self.jobtype, self.jobname), contents[0]) else: - #curly brackets + # curly brackets mscript += self._generate_job('jobs{1}.%s{1}.%s{1}' % (self.jobtype, self.jobname), contents[0]) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 71fc268565..c942161c8e 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -23,12 +23,12 @@ # Local imports from .base import (SPMCommand, SPMCommandInputSpec, - scans_for_fnames) + scans_for_fnames) from ..base import (Bunch, traits, TraitedSpec, File, Directory, - OutputMultiPath, InputMultiPath, isdefined) + OutputMultiPath, InputMultiPath, isdefined) from ...external.six import string_types from ...utils.filemanip import (filename_to_list, list_to_filename, - split_filename) + split_filename) from ... import logging logger = logging.getLogger('interface') @@ -42,16 +42,16 @@ class Level1DesignInputSpec(SPMCommandInputSpec): desc='Interscan interval in secs', mandatory=True) microtime_resolution = traits.Int(field='timing.fmri_t', - desc='Number of time-bins per scan in secs (opt)') + desc='Number of time-bins per scan in secs (opt)') microtime_onset = traits.Float(field='timing.fmri_t0', - desc='The onset/time-bin in seconds for alignment (opt)') + desc='The onset/time-bin in seconds for alignment (opt)') session_info = traits.Any(field='sess', desc='Session specific information generated by ``modelgen.SpecifyModel``', mandatory=True) factor_info = traits.List(traits.Dict(traits.Enum('name', 'levels')), field='fact', desc='Factor specific information file (opt)') bases = traits.Dict(traits.Enum('hrf', 'fourier', 'fourier_han', - 'gamma', 'fir'), field='bases', desc=""" + 'gamma', 'fir'), field='bases', desc=""" dict {'name':{'basesparam1':val,...}} name : string Name of basis function (hrf, fourier, fourier_han, @@ -69,14 +69,14 @@ class Level1DesignInputSpec(SPMCommandInputSpec): Number of basis functions """, mandatory=True) volterra_expansion_order = traits.Enum(1, 2, field='volt', - desc='Model interactions - yes:1, no:2') + desc='Model interactions - yes:1, no:2') global_intensity_normalization = traits.Enum('none', 'scaling', field='global', - desc='Global intensity normalization - scaling or none') + desc='Global intensity normalization - scaling or none') mask_image = File(exists=True, field='mask', desc='Image for explicitly masking the analysis') mask_threshold = traits.Either(traits.Enum('-Inf'), traits.Float(), - desc="Thresholding for the mask", - default='-Inf', usedefault=True) + desc="Thresholding for the mask", + default='-Inf', usedefault=True) model_serial_correlations = traits.Enum('AR(1)', 'FAST', 'none', field='cvi', desc=('Model serial correlations ' @@ -116,7 +116,7 @@ def _format_arg(self, opt, spec, val): """ if opt in ['spm_mat_dir', 'mask_image']: return np.array([str(val)], dtype=object) - if opt in ['session_info']: #, 'factor_info']: + if opt in ['session_info']: # , 'factor_info']: if isinstance(val, dict): return [val] else: @@ -275,7 +275,7 @@ class EstimateContrastInputSpec(SPMCommandInputSpec): session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", - mandatory=True) + mandatory=True) beta_images = InputMultiPath(File(exists=True), desc='Parameter estimates of the design matrix', copyfile=False, @@ -326,10 +326,10 @@ def _make_matlab_command(self, _): for i, cont in enumerate(self.inputs.contrasts): cname.insert(i, cont[0]) contrasts.insert(i, Bunch(name=cont[0], - stat=cont[1], - conditions=cont[2], - weights=None, - sessions=None)) + stat=cont[1], + conditions=cont[2], + weights=None, + sessions=None)) if len(cont) >= 4: contrasts[i].weights = cont[3] if len(cont) >= 5: @@ -377,7 +377,7 @@ def _make_matlab_command(self, _): except: Exception("Contrast Estimate: could not get index of" \ " T contrast. probably not defined prior " \ - "to the F contrasts") + "to the F contrasts") script += "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;\n" % (i + 1, cl0 + 1, tidx + 1) script += "jobs{1}.stats{1}.con.consess = consess;\n" script += "if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');jobs=spm_jobman('spm5tospm8',{jobs});end\n" @@ -694,7 +694,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): cur_output = "" continue if len(line.split()) != 0 and line.split()[0] in ["clusterwise_P_FDR", "clusterwise_P_RF", "voxelwise_P_Bonf", "voxelwise_P_FDR", - "voxelwise_P_RF", "voxelwise_P_uncor"]: + "voxelwise_P_RF", "voxelwise_P_uncor"]: cur_output = line.split()[0] continue @@ -712,14 +712,14 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): xor=['threshold_mask_absolute', 'threshold_mask_relative'], desc='do not use threshold masking') threshold_mask_absolute = traits.Float(field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], - desc='use an absolute threshold') + xor=['threshold_mask_none', 'threshold_mask_relative'], + desc='use an absolute threshold') threshold_mask_relative = traits.Float(field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], - desc='threshold using a proportion of the global value') + xor=['threshold_mask_absolute', 'threshold_mask_none'], + desc='threshold using a proportion of the global value') use_implicit_threshold = traits.Bool(field='masking.im', desc='use implicit mask NaNs or zeros to threshold') - explicit_mask_file = File(field='masking.em', #requires cell + explicit_mask_file = File(field='masking.em', # requires cell desc='use an implicit mask file to threshold') global_calc_omit = traits.Bool(field='globalc.g_omit', xor=['global_calc_mean', 'global_calc_values'], @@ -728,12 +728,12 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): xor=['global_calc_omit', 'global_calc_values'], desc='use mean for global calculation') global_calc_values = traits.List(traits.Float, field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], - desc='omit global calculation') + xor=['global_calc_mean', 'global_calc_omit'], + desc='omit global calculation') no_grand_mean_scaling = traits.Bool(field='globalm.gmsca.gmsca_no', - desc='do not perform grand mean scaling') + desc='do not perform grand mean scaling') global_normalization = traits.Enum(1, 2, 3, field='globalm.glonorm', - desc='global normalization None-1, Proportional-2, ANCOVA-3') + desc='global normalization None-1, Proportional-2, ANCOVA-3') class FactorialDesignOutputSpec(TraitedSpec): @@ -855,9 +855,9 @@ class PairedTTestDesignInputSpec(FactorialDesignInputSpec): mandatory=True, minlen=2, desc='List of paired files') grand_mean_scaling = traits.Bool(field='des.pt.gmsca', - desc='Perform grand mean scaling') + desc='Perform grand mean scaling') ancova = traits.Bool(field='des.pt.ancova', - desc='Specify ancova-by-factor regressors') + desc='Specify ancova-by-factor regressors') class PairedTTestDesign(FactorialDesign): @@ -892,8 +892,8 @@ class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): user_covariates = InputMultiPath(traits.Dict(key_trait=traits.Enum('vector', 'name', 'centering')), - field='des.mreg.mcov', - desc='covariate dictionary {vector, name, centering}') + field='des.mreg.mcov', + desc='covariate dictionary {vector, name, centering}') class MultipleRegressionDesign(FactorialDesign): diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 77f460e2c1..caec4af720 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -138,10 +138,10 @@ class RealignInputSpec(SPMCommandInputSpec): minlen=2, maxlen=2, usedefault=True, desc='determines which images to reslice') write_interp = traits.Range(low=0, high=7, field='roptions.interp', - desc='degree of b-spline used for interpolation') + desc='degree of b-spline used for interpolation') write_wrap = traits.List(traits.Int(), minlen=3, maxlen=3, field='roptions.wrap', - desc='Check if interpolation should wrap in [x,y,z]') + desc='Check if interpolation should wrap in [x,y,z]') write_mask = traits.Bool(field='roptions.mask', desc='True/False mask output image') out_prefix = traits.String('r', field='roptions.prefix', usedefault=True, @@ -151,7 +151,7 @@ class RealignInputSpec(SPMCommandInputSpec): class RealignOutputSpec(TraitedSpec): mean_image = File(exists=True, desc='Mean image file from the realignment') modified_in_files = OutputMultiPath(traits.Either(traits.List(File(exists=True)), - File(exists=True)), + File(exists=True)), desc='Copies of all files passed to in_files.\ Headers will have been modified to align all\ images with the first, or optionally to first\ @@ -163,7 +163,7 @@ class RealignOutputSpec(TraitedSpec): resliced files. Otherwise, they will be copies of\ in_files that have had their headers rewritten.') realignment_parameters = OutputMultiPath(File(exists=True), - desc='Estimated translation and rotation parameters') + desc='Estimated translation and rotation parameters') class Realign(SPMCommand): @@ -269,7 +269,7 @@ class CoregisterInputSpec(SPMCommandInputSpec): copyfile=True) cost_function = traits.Enum('mi', 'nmi', 'ecc', 'ncc', field='eoptions.cost_fun', - desc="""cost function, one of: 'mi' - Mutual Information, + desc="""cost function, one of: 'mi' - Mutual Information, 'nmi' - Normalised Mutual Information, 'ecc' - Entropy Correlation Coefficient, 'ncc' - Normalised Cross Correlation""") @@ -281,10 +281,10 @@ class CoregisterInputSpec(SPMCommandInputSpec): tolerance = traits.List(traits.Float(), field='eoptions.tol', desc='acceptable tolerance for each of 12 params') write_interp = traits.Range(low=0, high=7, field='roptions.interp', - desc='degree of b-spline used for interpolation') + desc='degree of b-spline used for interpolation') write_wrap = traits.List(traits.Int(), minlen=3, maxlen=3, field='roptions.wrap', - desc='Check if interpolation should wrap in [x,y,z]') + desc='Check if interpolation should wrap in [x,y,z]') write_mask = traits.Bool(field='roptions.mask', desc='True/False mask output image') out_prefix = traits.String('r', field='roptions.prefix', usedefault=True, @@ -397,11 +397,11 @@ class NormalizeInputSpec(SPMCommandInputSpec): DCT_period_cutoff = traits.Float(field='eoptions.cutoff', desc='Cutoff of for DCT bases') nonlinear_iterations = traits.Int(field='eoptions.nits', - desc='Number of iterations of nonlinear warping') + desc='Number of iterations of nonlinear warping') nonlinear_regularization = traits.Float(field='eoptions.reg', - desc='the amount of the regularization for the nonlinear part of the normalization') + desc='the amount of the regularization for the nonlinear part of the normalization') write_preserve = traits.Bool(field='roptions.preserve', - desc='True/False warped images are modulated') + desc='True/False warped images are modulated') write_bounding_box = traits.List(traits.List(traits.Float(), minlen=3, maxlen=3), field='roptions.bb', minlen=2, maxlen=2, @@ -410,21 +410,21 @@ class NormalizeInputSpec(SPMCommandInputSpec): minlen=3, maxlen=3, desc='3-element list') write_interp = traits.Range(low=0, high=7, field='roptions.interp', - desc='degree of b-spline used for interpolation') + desc='degree of b-spline used for interpolation') write_wrap = traits.List(traits.Int(), field='roptions.wrap', - desc=('Check if interpolation should wrap in [x,y,z]' - '- list of bools')) + desc=('Check if interpolation should wrap in [x,y,z]' + '- list of bools')) out_prefix = traits.String('w', field='roptions.prefix', usedefault=True, desc='normalized output prefix') class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath(File(exists=True), - desc='MAT files containing the normalization parameters') + desc='MAT files containing the normalization parameters') normalized_source = OutputMultiPath(File(exists=True), desc='Normalized source files') normalized_files = OutputMultiPath(File(exists=True), - desc='Normalized other files') + desc='Normalized other files') class Normalize(SPMCommand): @@ -487,8 +487,8 @@ def _list_outputs(self): outputs['normalization_parameters'] = [] for imgf in filename_to_list(self.inputs.source): outputs['normalization_parameters'].append(fname_presuffix(imgf, - suffix='_sn.mat', - use_ext=False)) + suffix='_sn.mat', + use_ext=False)) outputs['normalization_parameters'] = list_to_filename(outputs['normalization_parameters']) if self.inputs.jobtype == "estimate": @@ -513,7 +513,7 @@ def _list_outputs(self): outputs['normalized_source'] = [] for imgf in filename_to_list(self.inputs.source): outputs['normalized_source'].append(fname_presuffix(imgf, - prefix=prefixNorm)) + prefix=prefixNorm)) return outputs @@ -524,7 +524,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): xor=['deformation_file'], mandatory=True, copyfile=True) apply_to_files = InputMultiPath(traits.Either(File(exists=True), - traits.List(File(exists=True))), + traits.List(File(exists=True))), field='subj.resample', desc='files to apply transformation to', copyfile=True) @@ -708,25 +708,25 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""") save_bias_corrected = traits.Bool(field='output.biascor', - desc='True/False produce a bias corrected image') + desc='True/False produce a bias corrected image') clean_masks = traits.Enum('no', 'light', 'thorough', field='output.cleanup', - desc="clean using estimated brain mask ('no','light','thorough')") + desc="clean using estimated brain mask ('no','light','thorough')") tissue_prob_maps = traits.List(File(exists=True), field='opts.tpm', - desc='list of gray, white & csf prob. (opt,)') + desc='list of gray, white & csf prob. (opt,)') gaussians_per_class = traits.List(traits.Int(), field='opts.ngaus', - desc='num Gaussians capture intensity distribution') + desc='num Gaussians capture intensity distribution') affine_regularization = traits.Enum('mni', 'eastern', 'subj', 'none', '', field='opts.regtype', - desc='Possible options: "mni", "eastern", "subj", "none" (no reguralisation), "" (no affine registration)') + desc='Possible options: "mni", "eastern", "subj", "none" (no reguralisation), "" (no affine registration)') warping_regularization = traits.Float(field='opts.warpreg', - desc='Controls balance between parameters and data') + desc='Controls balance between parameters and data') warp_frequency_cutoff = traits.Float(field='opts.warpco', desc='Cutoff of DCT bases') bias_regularization = traits.Enum(0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10, field='opts.biasreg', - desc='no(0) - extremely heavy (10)') + desc='no(0) - extremely heavy (10)') bias_fwhm = traits.Enum(30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 'Inf', field='opts.biasfwhm', desc='FWHM of Gaussian smoothness of bias') sampling_distance = traits.Float(field='opts.samp', - desc='Sampling distance on data for parameter estimation') + desc='Sampling distance on data for parameter estimation') mask_image = File(exists=True, field='opts.msk', desc='Binary image to restrict parameter estimation ') @@ -810,8 +810,8 @@ def _list_outputs(self): if getattr(self.inputs, outtype)[idx]: outfield = '%s_%s_image' % (image, tissue) outputs[outfield] = fname_presuffix(f, - prefix='%sc%d' % (prefix, - tidx+1)) + prefix='%sc%d' % (prefix, + tidx+1)) if isdefined(self.inputs.save_bias_corrected) and \ self.inputs.save_bias_corrected: outputs['bias_corrected_image'] = fname_presuffix(f, prefix='m') @@ -824,29 +824,29 @@ def _list_outputs(self): class NewSegmentInputSpec(SPMCommandInputSpec): channel_files = InputMultiPath(File(exists=True), - desc="A list of files to be segmented", - field='channel', copyfile=False, mandatory=True) + desc="A list of files to be segmented", + field='channel', copyfile=False, mandatory=True) channel_info = traits.Tuple(traits.Float(), traits.Float(), traits.Tuple(traits.Bool, traits.Bool), desc="""A tuple with the following fields: - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - which maps to save (Corrected, Field) - a tuple of two boolean values""", - field='channel') + field='channel') tissues = traits.List(traits.Tuple(traits.Tuple(File(exists=True), traits.Int()), traits.Int(), traits.Tuple(traits.Bool, traits.Bool), traits.Tuple(traits.Bool, traits.Bool)), - desc="""A list of tuples (one per tissue) with the following fields: + desc="""A list of tuples (one per tissue) with the following fields: - tissue probability map (4D), 1-based index to frame - number of gaussians - which maps to save [Native, DARTEL] - a tuple of two boolean values - which maps to save [Unmodulated, Modulated] - a tuple of two boolean values""", - field='tissue') + field='tissue') affine_regularization = traits.Enum('mni', 'eastern', 'subj', 'none', field='warp.affreg', - desc='mni, eastern, subj, none ') + desc='mni, eastern, subj, none ') warping_regularization = traits.Float(field='warp.reg', - desc='Aproximate distance between sampling points.') + desc='Aproximate distance between sampling points.') sampling_distance = traits.Float(field='warp.samp', - desc='Sampling distance on data for parameter estimation') + desc='Sampling distance on data for parameter estimation') write_deformation_fields = traits.List(traits.Bool(), minlen=2, maxlen=2, field='warp.write', desc="Which deformation fields to write:[Inverse, Forward]") @@ -1166,9 +1166,9 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): field='mni_norm.data.subjs.flowfields', mandatory=True) apply_to_files = InputMultiPath(File(exists=True), - desc="Files to apply the transform to", - field='mni_norm.data.subjs.images', - mandatory=True, copyfile=False) + desc="Files to apply the transform to", + field='mni_norm.data.subjs.images', + mandatory=True, copyfile=False) voxel_size = traits.Tuple(traits.Float, traits.Float, traits.Float, desc="Voxel sizes for output file", field='mni_norm.vox') @@ -1252,9 +1252,9 @@ def _list_outputs(self): class CreateWarpedInputSpec(SPMCommandInputSpec): image_files = InputMultiPath(File(exists=True), - desc="A list of files to be warped", - field='crt_warped.images', copyfile=False, - mandatory=True) + desc="A list of files to be warped", + field='crt_warped.images', copyfile=False, + mandatory=True) flowfield_files = InputMultiPath(File(exists=True), desc="DARTEL flow fields u_rc1*", field='crt_warped.flowfields', @@ -1558,7 +1558,7 @@ def _list_outputs(self): os.path.join(pth, "%s_seg8.mat" % base)) for i, tis in enumerate(['gm', 'wm', 'csf']): - # native space + # native space if getattr(self.inputs, '%s_native' % tis): outputs['native_class_images'][i].append( diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index 98ac4f6a37..4a7ce2c852 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -33,7 +33,7 @@ def create_files_in_directory(): hdr.set_data_shape(shape) img = np.random.random(shape) nb.save(nb.Nifti1Image(img, np.eye(4), hdr), - os.path.join(outdir, f)) + os.path.join(outdir, f)) return filelist, outdir, cwd @@ -70,6 +70,7 @@ class TestClass(spm.SPMCommand): def test_find_mlab_cmd_defaults(): saved_env = dict(os.environ) + class TestClass(spm.SPMCommand): pass # test without FORCE_SPMMCR, SPMMCRCMD set @@ -151,6 +152,7 @@ class TestClass(spm.SPMCommand): out = dc._generate_job(prefix='test', contents=contents) yield assert_equal, out, 'test.onsets = {...\n[1, 2, 3, 4];...\n};\n' + def test_bool(): class TestClassInputSpec(SPMCommandInputSpec): test_in = include_intercept = traits.Bool(field='testfield') @@ -167,6 +169,7 @@ class TestClass(spm.SPMCommand): out = dc._make_matlab_command(dc._parse_inputs()) yield assert_equal, out.find('jobs{1}.jobtype{1}.jobname{1}.testfield = 1;') > 0, 1 + def test_make_matlab_command(): class TestClass(spm.SPMCommand): _jobtype = 'jobtype' diff --git a/nipype/interfaces/spm/tests/test_model.py b/nipype/interfaces/spm/tests/test_model.py index 309312dff1..a8f8bf0256 100644 --- a/nipype/interfaces/spm/tests/test_model.py +++ b/nipype/interfaces/spm/tests/test_model.py @@ -25,45 +25,53 @@ def create_files_in_directory(): outdir = mkdtemp() cwd = os.getcwd() os.chdir(outdir) - filelist = ['a.nii','b.nii'] + filelist = ['a.nii', 'b.nii'] for f in filelist: hdr = nb.Nifti1Header() - shape = (3,3,3,4) + shape = (3, 3, 3, 4) hdr.set_data_shape(shape) img = np.random.random(shape) - nb.save(nb.Nifti1Image(img,np.eye(4),hdr), - os.path.join(outdir,f)) + nb.save(nb.Nifti1Image(img, np.eye(4), hdr), + os.path.join(outdir, f)) return filelist, outdir, cwd + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) os.chdir(old_wd) + def test_level1design(): yield assert_equal, spm.Level1Design._jobtype, 'stats' yield assert_equal, spm.Level1Design._jobname, 'fmri_spec' + def test_estimatemodel(): yield assert_equal, spm.EstimateModel._jobtype, 'stats' yield assert_equal, spm.EstimateModel._jobname, 'fmri_est' + def test_estimatecontrast(): yield assert_equal, spm.EstimateContrast._jobtype, 'stats' yield assert_equal, spm.EstimateContrast._jobname, 'con' + def test_threshold(): yield assert_equal, spm.Threshold._jobtype, 'basetype' yield assert_equal, spm.Threshold._jobname, 'basename' + def test_factorialdesign(): yield assert_equal, spm.FactorialDesign._jobtype, 'stats' yield assert_equal, spm.FactorialDesign._jobname, 'factorial_design' + def test_onesamplettestdesign(): yield assert_equal, spm.OneSampleTTestDesign._jobtype, 'stats' yield assert_equal, spm.OneSampleTTestDesign._jobname, 'factorial_design' + def test_twosamplettestdesign(): yield assert_equal, spm.TwoSampleTTestDesign._jobtype, 'stats' yield assert_equal, spm.TwoSampleTTestDesign._jobname, 'factorial_design' diff --git a/nipype/interfaces/spm/tests/test_preprocess.py b/nipype/interfaces/spm/tests/test_preprocess.py index 47c2e16154..406ddf8e54 100644 --- a/nipype/interfaces/spm/tests/test_preprocess.py +++ b/nipype/interfaces/spm/tests/test_preprocess.py @@ -25,36 +25,41 @@ def create_files_in_directory(): outdir = mkdtemp() cwd = os.getcwd() os.chdir(outdir) - filelist = ['a.nii','b.nii'] + filelist = ['a.nii', 'b.nii'] for f in filelist: hdr = nb.Nifti1Header() - shape = (3,3,3,4) + shape = (3, 3, 3, 4) hdr.set_data_shape(shape) img = np.random.random(shape) - nb.save(nb.Nifti1Image(img,np.eye(4),hdr), - os.path.join(outdir,f)) + nb.save(nb.Nifti1Image(img, np.eye(4), hdr), + os.path.join(outdir, f)) return filelist, outdir, cwd + def clean_directory(outdir, old_wd): if os.path.exists(outdir): rmtree(outdir) os.chdir(old_wd) + def test_slicetiming(): yield assert_equal, spm.SliceTiming._jobtype, 'temporal' yield assert_equal, spm.SliceTiming._jobname, 'st' + def test_slicetiming_list_outputs(): filelist, outdir, cwd = create_files_in_directory() st = spm.SliceTiming(in_files=filelist[0]) yield assert_equal, st._list_outputs()['timecorrected_files'][0][0], 'a' clean_directory(outdir, cwd) + def test_realign(): yield assert_equal, spm.Realign._jobtype, 'spatial' yield assert_equal, spm.Realign._jobname, 'realign' yield assert_equal, spm.Realign().inputs.jobtype, 'estwrite' + def test_realign_list_outputs(): filelist, outdir, cwd = create_files_in_directory() rlgn = spm.Realign(in_files=filelist[0]) @@ -63,24 +68,28 @@ def test_realign_list_outputs(): yield assert_true, rlgn._list_outputs()['mean_image'].startswith('mean') clean_directory(outdir, cwd) + def test_coregister(): yield assert_equal, spm.Coregister._jobtype, 'spatial' yield assert_equal, spm.Coregister._jobname, 'coreg' yield assert_equal, spm.Coregister().inputs.jobtype, 'estwrite' + def test_coregister_list_outputs(): filelist, outdir, cwd = create_files_in_directory() coreg = spm.Coregister(source=filelist[0]) yield assert_true, coreg._list_outputs()['coregistered_source'][0].startswith('r') - coreg = spm.Coregister(source=filelist[0],apply_to_files=filelist[1]) + coreg = spm.Coregister(source=filelist[0], apply_to_files=filelist[1]) yield assert_true, coreg._list_outputs()['coregistered_files'][0].startswith('r') clean_directory(outdir, cwd) + def test_normalize(): yield assert_equal, spm.Normalize._jobtype, 'spatial' yield assert_equal, spm.Normalize._jobname, 'normalise' yield assert_equal, spm.Normalize().inputs.jobtype, 'estwrite' + def test_normalize_list_outputs(): filelist, outdir, cwd = create_files_in_directory() norm = spm.Normalize(source=filelist[0]) @@ -89,11 +98,13 @@ def test_normalize_list_outputs(): yield assert_true, norm._list_outputs()['normalized_files'][0].startswith('w') clean_directory(outdir, cwd) + def test_normalize12(): yield assert_equal, spm.Normalize12._jobtype, 'spatial' yield assert_equal, spm.Normalize12._jobname, 'normalise' yield assert_equal, spm.Normalize12().inputs.jobtype, 'estwrite' + def test_normalize12_list_outputs(): filelist, outdir, cwd = create_files_in_directory() norm12 = spm.Normalize12(image_to_align=filelist[0]) @@ -103,6 +114,7 @@ def test_normalize12_list_outputs(): yield assert_true, norm12._list_outputs()['normalized_files'][0].startswith('w') clean_directory(outdir, cwd) + @skipif(no_spm) def test_segment(): if spm.Info.version()['name'] == "SPM12": @@ -112,6 +124,7 @@ def test_segment(): yield assert_equal, spm.Segment()._jobtype, 'spatial' yield assert_equal, spm.Segment()._jobname, 'preproc' + @skipif(no_spm) def test_newsegment(): if spm.Info.version()['name'] == "SPM12": @@ -126,10 +139,12 @@ def test_smooth(): yield assert_equal, spm.Smooth._jobtype, 'spatial' yield assert_equal, spm.Smooth._jobname, 'smooth' + def test_dartel(): yield assert_equal, spm.DARTEL._jobtype, 'tools' yield assert_equal, spm.DARTEL._jobname, 'dartel' + def test_dartelnorm2mni(): yield assert_equal, spm.DARTELNorm2MNI._jobtype, 'tools' yield assert_equal, spm.DARTELNorm2MNI._jobname, 'dartel' diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index 6a272d45b9..cd800949fb 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -1,75 +1,78 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os -from nipype.testing import (assert_equal, assert_false,assert_raises, - assert_true, skipif, example_data) +from nipype.testing import (assert_equal, assert_false, assert_raises, + assert_true, skipif, example_data) from nipype.interfaces.spm import no_spm import nipype.interfaces.spm.utils as spmu from nipype.interfaces.base import isdefined from nipype.utils.filemanip import split_filename, fname_presuffix from nipype.interfaces.base import TraitError + def test_coreg(): - moving = example_data(infile = 'functional.nii') - target = example_data(infile = 'T1.nii') - mat = example_data(infile = 'trans.mat') - coreg = spmu.CalcCoregAffine(matlab_cmd = 'mymatlab') + moving = example_data(infile='functional.nii') + target = example_data(infile='T1.nii') + mat = example_data(infile='trans.mat') + coreg = spmu.CalcCoregAffine(matlab_cmd='mymatlab') coreg.inputs.target = target assert_equal(coreg.inputs.matlab_cmd, 'mymatlab') coreg.inputs.moving = moving - assert_equal( isdefined(coreg.inputs.mat),False) + assert_equal(isdefined(coreg.inputs.mat), False) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) - mat = os.path.join(pth, '%s_to_%s.mat'%(mov,tgt)) - invmat = fname_presuffix(mat, prefix = 'inverse_') + mat = os.path.join(pth, '%s_to_%s.mat' %(mov, tgt)) + invmat = fname_presuffix(mat, prefix='inverse_') scrpt = coreg._make_matlab_command(None) assert_equal(coreg.inputs.mat, mat) - assert_equal( coreg.inputs.invmat, invmat) + assert_equal(coreg.inputs.invmat, invmat) def test_apply_transform(): - moving = example_data(infile = 'functional.nii') - mat = example_data(infile = 'trans.mat') - applymat = spmu.ApplyTransform(matlab_cmd = 'mymatlab') - assert_equal( applymat.inputs.matlab_cmd, 'mymatlab' ) + moving = example_data(infile='functional.nii') + mat = example_data(infile='trans.mat') + applymat = spmu.ApplyTransform(matlab_cmd='mymatlab') + assert_equal(applymat.inputs.matlab_cmd, 'mymatlab') applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = '[p n e v] = spm_fileparts(V.fname);' - assert_equal( expected in scrpt, True) + assert_equal(expected in scrpt, True) expected = 'V.mat = transform.M * V.mat;' assert_equal(expected in scrpt, True) + def test_reslice(): - moving = example_data(infile = 'functional.nii') - space_defining = example_data(infile = 'T1.nii') - reslice = spmu.Reslice(matlab_cmd = 'mymatlab_version') - assert_equal( reslice.inputs.matlab_cmd, 'mymatlab_version') + moving = example_data(infile='functional.nii') + space_defining = example_data(infile='T1.nii') + reslice = spmu.Reslice(matlab_cmd='mymatlab_version') + assert_equal(reslice.inputs.matlab_cmd, 'mymatlab_version') reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining - assert_equal( reslice.inputs.interp, 0) - assert_raises(TraitError,reslice.inputs.trait_set,interp = 'nearest') - assert_raises(TraitError, reslice.inputs.trait_set, interp = 10) + assert_equal(reslice.inputs.interp, 0) + assert_raises(TraitError, reslice.inputs.trait_set, interp='nearest') + assert_raises(TraitError, reslice.inputs.trait_set, interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) outfile = fname_presuffix(moving, prefix='r') assert_equal(reslice.inputs.out_file, outfile) expected = '\nflags.mean=0;\nflags.which=1;\nflags.mask=0;' - assert_equal(expected in script.replace(' ',''), True) + assert_equal(expected in script.replace(' ', ''), True) expected_interp = 'flags.interp = 1;\n' assert_equal(expected_interp in script, True) assert_equal('spm_reslice(invols, flags);' in script, True) + def test_dicom_import(): - dicom = example_data(infile = 'dicomdir/123456-1-1.dcm') - di = spmu.DicomImport(matlab_cmd = 'mymatlab') + dicom = example_data(infile='dicomdir/123456-1-1.dcm') + di = spmu.DicomImport(matlab_cmd='mymatlab') assert_equal(di.inputs.matlab_cmd, 'mymatlab') assert_equal(di.inputs.output_dir_struct, 'flat') assert_equal(di.inputs.output_dir, './converted_dicom') assert_equal(di.inputs.format, 'nii') assert_equal(di.inputs.icedims, False) - assert_raises(TraitError,di.inputs.trait_set,output_dir_struct = 'wrong') - assert_raises(TraitError,di.inputs.trait_set,format = 'FAT') - assert_raises(TraitError,di.inputs.trait_set,in_files = ['does_sfd_not_32fn_exist.dcm']) + assert_raises(TraitError, di.inputs.trait_set, output_dir_struct='wrong') + assert_raises(TraitError, di.inputs.trait_set, format='FAT') + assert_raises(TraitError, di.inputs.trait_set, in_files=['does_sfd_not_32fn_exist.dcm']) di.inputs.in_files = [dicom] assert_equal(di.inputs.in_files, [dicom]) diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 2cab617a74..9b7aa84dd3 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -6,27 +6,30 @@ BaseInterfaceInputSpec, isdefined, OutputMultiPath, InputMultiPath) from nipype.interfaces.base import File, traits -from nipype.utils.filemanip import split_filename, fname_presuffix, filename_to_list,list_to_filename +from nipype.utils.filemanip import split_filename, fname_presuffix, filename_to_list, list_to_filename import os import numpy as np + class Analyze2niiInputSpec(SPMCommandInputSpec): analyze_file = File(exists=True, mandatory=True) + class Analyze2niiOutputSpec(SPMCommandInputSpec): nifti_file = File(exists=True) + class Analyze2nii(SPMCommand): input_spec = Analyze2niiInputSpec output_spec = Analyze2niiOutputSpec def _make_matlab_command(self, _): - script = "V = spm_vol('%s');\n"%self.inputs.analyze_file - _, name,_ = split_filename(self.inputs.analyze_file) + script = "V = spm_vol('%s');\n" %self.inputs.analyze_file + _, name, _ = split_filename(self.inputs.analyze_file) self.output_name = os.path.join(os.getcwd(), name + ".nii") script += "[Y, XYZ] = spm_read_vols(V);\n" - script += "V.fname = '%s';\n"%self.output_name + script += "V.fname = '%s';\n" %self.output_name script += "spm_write_vol(V, Y);\n" return script @@ -36,18 +39,19 @@ def _list_outputs(self): outputs['nifti_file'] = self.output_name return outputs + class CalcCoregAffineInputSpec(SPMCommandInputSpec): - target = File( exists = True, mandatory = True, - desc = 'target for generating affine transform') - moving = File( exists = True, mandatory = True, copyfile=False, - desc = 'volume transform can be applied to register with target') - mat = File( desc = 'Filename used to store affine matrix') - invmat = File( desc = 'Filename used to store inverse affine matrix') + target = File(exists=True, mandatory=True, + desc='target for generating affine transform') + moving = File(exists=True, mandatory=True, copyfile=False, + desc='volume transform can be applied to register with target') + mat = File(desc='Filename used to store affine matrix') + invmat = File(desc='Filename used to store inverse affine matrix') class CalcCoregAffineOutputSpec(TraitedSpec): - mat = File(exists = True, desc = 'Matlab file holding transform') - invmat = File( desc = 'Matlab file holding inverse transform') + mat = File(exists=True, desc='Matlab file holding transform') + invmat = File(desc='Matlab file holding inverse transform') class CalcCoregAffine(SPMCommand): @@ -78,14 +82,14 @@ class CalcCoregAffine(SPMCommand): def _make_inv_file(self): """ makes filename to hold inverse transform if not specified""" - invmat = fname_presuffix(self.inputs.mat, prefix = 'inverse_') + invmat = fname_presuffix(self.inputs.mat, prefix='inverse_') return invmat def _make_mat_file(self): """ makes name for matfile if doesn exist""" - pth, mv, _ = split_filename(self.inputs.moving) + pth, mv, _ = split_filename(self.inputs.moving) _, tgt, _ = split_filename(self.inputs.target) - mat = os.path.join(pth, '%s_to_%s.mat'%(mv,tgt)) + mat = os.path.join(pth, '%s_to_%s.mat' %(mv, tgt)) return mat def _make_matlab_command(self, _): @@ -104,7 +108,7 @@ def _make_matlab_command(self, _): save('%s' , 'M' ); M = inv(M); save('%s','M') - """%(self.inputs.target, + """ %(self.inputs.target, self.inputs.moving, self.inputs.mat, self.inputs.invmat) @@ -116,16 +120,18 @@ def _list_outputs(self): outputs['invmat'] = os.path.abspath(self.inputs.invmat) return outputs + class ApplyTransformInputSpec(SPMCommandInputSpec): - in_file = File( exists = True, mandatory = True, copyfile=True, - desc='file to apply transform to, (only updates header)') - mat = File( exists = True, mandatory = True, + in_file = File(exists=True, mandatory=True, copyfile=True, + desc='file to apply transform to, (only updates header)') + mat = File(exists=True, mandatory=True, desc='file holding transform to apply') out_file = File(desc="output file name for transformed data", genfile=True) + class ApplyTransformOutputSpec(TraitedSpec): - out_file = File(exists = True, desc = 'Transformed image file') + out_file = File(exists=True, desc='Transformed image file') class ApplyTransform(SPMCommand): @@ -160,11 +166,11 @@ def _make_matlab_command(self, _): V.fname = fullfile(outfile); spm_write_vol(V,X); - """%(self.inputs.in_file, + """ %(self.inputs.in_file, self.inputs.out_file, self.inputs.mat) - #img_space = spm_get_space(infile); - #spm_get_space(infile, transform.M * img_space); + # img_space = spm_get_space(infile); + # spm_get_space(infile, transform.M * img_space); return script def _list_outputs(self): @@ -179,21 +185,23 @@ def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + '_trans.nii' + class ResliceInputSpec(SPMCommandInputSpec): - in_file = File( exists = True, mandatory=True, + in_file = File(exists=True, mandatory=True, desc='file to apply transform to, (only updates header)') - space_defining = File ( exists = True, mandatory = True, - desc = 'Volume defining space to slice in_file into') + space_defining = File(exists=True, mandatory=True, + desc='Volume defining space to slice in_file into') - interp = traits.Range(low = 0, high = 7, usedefault = True, + interp = traits.Range(low=0, high=7, usedefault=True, desc='degree of b-spline used for interpolation'\ - '0 is nearest neighbor (default)') + '0 is nearest neighbor (default)') + out_file = File(desc='Optional file to save resliced volume') - out_file = File(desc = 'Optional file to save resliced volume') class ResliceOutputSpec(TraitedSpec): - out_file = File( exists = True, desc = 'resliced volume') + out_file = File(exists=True, desc='resliced volume') + class Reslice(SPMCommand): """ uses spm_reslice to resample in_file into space of space_defining""" @@ -205,7 +213,7 @@ def _make_matlab_command(self, _): """ generates script""" if not isdefined(self.inputs.out_file): self.inputs.out_file = fname_presuffix(self.inputs.in_file, - prefix = 'r') + prefix='r') script = """ flags.mean = 0; flags.which = 1; @@ -214,7 +222,7 @@ def _make_matlab_command(self, _): infiles = strvcat(\'%s\', \'%s\'); invols = spm_vol(infiles); spm_reslice(invols, flags); - """%(self.inputs.interp, + """ %(self.inputs.interp, self.inputs.space_defining, self.inputs.in_file) return script @@ -224,6 +232,7 @@ def _list_outputs(self): outputs['out_file'] = os.path.abspath(self.inputs.out_file) return outputs + class ApplyInverseDeformationInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, field='fnames', @@ -375,6 +384,7 @@ def _list_outputs(self): outputs['out_files'].append(os.path.realpath('w%s' % fname)) return outputs + class DicomImportInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), @@ -387,26 +397,28 @@ class DicomImportInputSpec(SPMCommandInputSpec): usedefault=True, desc='directory structure for the output.') output_dir = traits.Str('./converted_dicom', - field='outdir', - usedefault=True, - desc='output directory.') + field='outdir', + usedefault=True, + desc='output directory.') format = traits.Enum( 'nii', 'img', field='convopts.format', usedefault=True, desc='output format.') icedims = traits.Bool(False, - field='convopts.icedims', - usedefault=True, - desc='If image sorting fails, one can try using the additional\ + field='convopts.icedims', + usedefault=True, + desc='If image sorting fails, one can try using the additional\ SIEMENS ICEDims information to create unique filenames.\ Use this only if there would be multiple volumes with\ exactly the same file names.') + class DicomImportOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True), desc='converted files') + class DicomImport(SPMCommand): """ Uses spm to convert DICOM files to nii or img+hdr. @@ -453,11 +465,11 @@ def _list_outputs(self): ext = self.inputs.format if self.inputs.output_dir_struct == "flat": - outputs['out_files'] = glob(os.path.join(od, '*.%s'%ext)) + outputs['out_files'] = glob(os.path.join(od, '*.%s' %ext)) elif self.inputs.output_dir_struct == 'series': - outputs['out_files'] = glob(os.path.join(od, os.path.join('*','*.%s'%ext))) + outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*.%s' %ext))) elif self.inputs.output_dir_struct in ['patid', 'date_time', 'patname']: - outputs['out_files'] = glob(os.path.join(od, os.path.join('*','*','*.%s'%ext))) + outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*', '*.%s' %ext))) elif self.inputs.output_dir_struct == 'patid_date': - outputs['out_files'] = glob(os.path.join(od, os.path.join('*','*','*','*.%s'%ext))) + outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*', '*', '*.%s' %ext))) return outputs diff --git a/nipype/interfaces/tests/test_base.py b/nipype/interfaces/tests/test_base.py index edc51035ae..cbfd95f092 100644 --- a/nipype/interfaces/tests/test_base.py +++ b/nipype/interfaces/tests/test_base.py @@ -10,8 +10,8 @@ import warnings from nipype.testing import (assert_equal, assert_not_equal, assert_raises, - assert_true, assert_false, with_setup, package_check, - skipif) + assert_true, assert_false, with_setup, package_check, + skipif) import nipype.interfaces.base as nib from nipype.utils.filemanip import split_filename from nipype.interfaces.base import Undefined, config @@ -21,20 +21,23 @@ def test_bunch(): b = nib.Bunch() - yield assert_equal, b.__dict__,{} - b = nib.Bunch(a=1,b=[2,3]) - yield assert_equal, b.__dict__,{'a': 1, 'b': [2,3]} + yield assert_equal, b.__dict__, {} + b = nib.Bunch(a=1, b=[2, 3]) + yield assert_equal, b.__dict__, {'a': 1, 'b': [2, 3]} + def test_bunch_attribute(): - b = nib.Bunch(a=1,b=[2,3],c=None) - yield assert_equal, b.a ,1 - yield assert_equal, b.b, [2,3] + b = nib.Bunch(a=1, b=[2, 3], c=None) + yield assert_equal, b.a, 1 + yield assert_equal, b.b, [2, 3] yield assert_equal, b.c, None + def test_bunch_repr(): - b = nib.Bunch(b=2,c=3,a=dict(n=1,m=2)) + b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2)) yield assert_equal, repr(b), "Bunch(a={'m': 2, 'n': 1}, b=2, c=3)" + def test_bunch_methods(): b = nib.Bunch(a=2) b.update(a=3) @@ -46,14 +49,15 @@ def test_bunch_methods(): yield assert_equal, type(dict()), type(newb) yield assert_equal, newb['a'], 3 + def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. pth = os.path.split(os.path.abspath(__file__))[0] json_pth = os.path.join(pth, 'realign_json.json') - b = nib.Bunch(infile = json_pth, - otherthing = 'blue', - yat = True) + b = nib.Bunch(infile=json_pth, + otherthing='blue', + yat=True) newbdict, bhash = b._get_bunch_hash() yield assert_equal, bhash, 'ddcc7b4ec5675df8cf317a48bd1857fa' # Make sure the hash stored in the json file for `infile` is correct. @@ -65,17 +69,18 @@ def test_bunch_hash(): # create a temp file -#global tmp_infile, tmp_dir -#tmp_infile = None -#tmp_dir = None +# global tmp_infile, tmp_dir +# tmp_infile = None +# tmp_dir = None def setup_file(): - #global tmp_infile, tmp_dir + # global tmp_infile, tmp_dir tmp_dir = tempfile.mkdtemp() tmp_infile = os.path.join(tmp_dir, 'foo.txt') with open(tmp_infile, 'w') as fp: fp.writelines(['123456789']) return tmp_infile + def teardown_file(tmp_dir): shutil.rmtree(tmp_dir) @@ -88,43 +93,46 @@ class spec(nib.TraitedSpec): foo = nib.traits.Int goo = nib.traits.Float(usedefault=True) - yield assert_equal, spec().foo, Undefined yield assert_equal, spec().goo, 0.0 - specfunc = lambda x : spec(hoo=x) + specfunc = lambda x: spec(hoo=x) yield assert_raises, nib.traits.TraitError, specfunc, 1 infields = spec(foo=1) hashval = ([('foo', 1), ('goo', '0.0000000000')], 'e89433b8c9141aa0fda2f8f4d662c047') yield assert_equal, infields.get_hashval(), hashval - #yield assert_equal, infields.hashval[1], hashval[1] + # yield assert_equal, infields.hashval[1], hashval[1] yield assert_equal, infields.__repr__(), '\nfoo = 1\ngoo = 0.0\n' + @skip def test_TraitedSpec_dynamic(): from pickle import dumps, loads a = nib.BaseTraitedSpec() a.add_trait('foo', nib.traits.Int) a.foo = 1 - assign_a = lambda : setattr(a, 'foo', 'a') + assign_a = lambda: setattr(a, 'foo', 'a') yield assert_raises, Exception, assign_a pkld_a = dumps(a) unpkld_a = loads(pkld_a) - assign_a_again = lambda : setattr(unpkld_a, 'foo', 'a') + assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a') yield assert_raises, Exception, assign_a_again + def test_TraitedSpec_logic(): class spec3(nib.TraitedSpec): _xor_inputs = ('foo', 'bar') - foo = nib.traits.Int(xor = _xor_inputs, - desc = 'foo or bar, not both') - bar = nib.traits.Int(xor = _xor_inputs, - desc = 'bar or foo, not both') - kung = nib.traits.Float(requires = ('foo',), - position = 0, - desc = 'kung foo') + foo = nib.traits.Int(xor=_xor_inputs, + desc='foo or bar, not both') + bar = nib.traits.Int(xor=_xor_inputs, + desc='bar or foo, not both') + kung = nib.traits.Float(requires=('foo',), + position=0, + desc='kung foo') + class out3(nib.TraitedSpec): output = nib.traits.Int + class MyInterface(nib.BaseInterface): input_spec = spec3 output_spec = out3 @@ -133,12 +141,13 @@ class MyInterface(nib.BaseInterface): yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0) myif.inputs.foo = 1 yield assert_equal, myif.inputs.foo, 1 - set_bar = lambda : setattr(myif.inputs, 'bar', 1) + set_bar = lambda: setattr(myif.inputs, 'bar', 1) yield assert_raises, IOError, set_bar yield assert_equal, myif.inputs.foo, 1 myif.inputs.kung = 2 yield assert_equal, myif.inputs.kung, 2.0 + def test_deprecation(): with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) @@ -146,7 +155,7 @@ def test_deprecation(): class DeprecationSpec1(nib.TraitedSpec): foo = nib.traits.Int(deprecated='0.1') spec_instance = DeprecationSpec1() - set_foo = lambda : setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, 'foo', 1) yield assert_raises, nib.TraitError, set_foo yield assert_equal, len(w), 0, 'no warnings, just errors' @@ -156,7 +165,7 @@ class DeprecationSpec1(nib.TraitedSpec): class DeprecationSpec1numeric(nib.TraitedSpec): foo = nib.traits.Int(deprecated='0.1') spec_instance = DeprecationSpec1numeric() - set_foo = lambda : setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, 'foo', 1) yield assert_raises, nib.TraitError, set_foo yield assert_equal, len(w), 0, 'no warnings, just errors' @@ -166,7 +175,7 @@ class DeprecationSpec1numeric(nib.TraitedSpec): class DeprecationSpec2(nib.TraitedSpec): foo = nib.traits.Int(deprecated='100', new_name='bar') spec_instance = DeprecationSpec2() - set_foo = lambda : setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, 'foo', 1) yield assert_raises, nib.TraitError, set_foo yield assert_equal, len(w), 0, 'no warnings, just errors' @@ -214,7 +223,7 @@ class spec2(nib.CommandLineInputSpec): position=2) doo = nib.File(exists=True, argstr="%s", position=1) goo = traits.Int(argstr="%d", position=4) - poo = nib.File(name_source=['goo'], hash_files=False, argstr="%s",position=3) + poo = nib.File(name_source=['goo'], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" @@ -287,13 +296,13 @@ class TestCycle(nib.CommandLine): os.chdir(pwd) teardown_file(tmpd) + def test_cycle_namesource2(): tmp_infile = setup_file() tmpd, nme, ext = split_filename(tmp_infile) pwd = os.getcwd() os.chdir(tmpd) - class spec3(nib.CommandLineInputSpec): moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s", position=1, name_template='%s_mootpl') @@ -334,11 +343,13 @@ def checknose(): else: return 1 + @skipif(checknose) def test_TraitedSpec_withFile(): tmp_infile = setup_file() tmpd, nme = os.path.split(tmp_infile) yield assert_true, os.path.exists(tmp_infile) + class spec2(nib.TraitedSpec): moo = nib.File(exists=True) doo = nib.traits.List(nib.File(exists=True)) @@ -347,6 +358,7 @@ class spec2(nib.TraitedSpec): yield assert_equal, hashval[1], 'a00e9ee24f5bfa9545a515b7a759886b' teardown_file(tmpd) + @skipif(checknose) def test_TraitedSpec_withNoFileHashing(): tmp_infile = setup_file() @@ -354,6 +366,7 @@ def test_TraitedSpec_withNoFileHashing(): pwd = os.getcwd() os.chdir(tmpd) yield assert_true, os.path.exists(tmp_infile) + class spec2(nib.TraitedSpec): moo = nib.File(exists=True, hash_files=False) doo = nib.traits.List(nib.File(exists=True)) @@ -377,6 +390,7 @@ class spec4(nib.TraitedSpec): os.chdir(pwd) teardown_file(tmpd) + def test_Interface(): yield assert_equal, nib.Interface.input_spec, None yield assert_equal, nib.Interface.output_spec, None @@ -396,6 +410,7 @@ def __init__(self): yield assert_raises, NotImplementedError, nif._list_outputs yield assert_raises, NotImplementedError, nif._get_filecopy_info + def test_BaseInterface(): yield assert_equal, nib.BaseInterface.help(), None yield assert_equal, nib.BaseInterface._get_filecopy_info(), [] @@ -407,8 +422,10 @@ class InputSpec(nib.TraitedSpec): hoo = nib.traits.Int(desc='a random int', usedefault=True) zoo = nib.File(desc='a file', copyfile=False) woo = nib.File(desc='a file', copyfile=True) + class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int') + class DerivedInterface(nib.BaseInterface): input_spec = InputSpec @@ -427,6 +444,7 @@ class DerivedInterface(nib.BaseInterface): class DerivedInterface2(DerivedInterface): output_spec = OutputSpec + def _run_interface(self, runtime): return runtime @@ -437,13 +455,16 @@ def _run_interface(self, runtime): nib.BaseInterface.input_spec = None yield assert_raises, Exception, nib.BaseInterface + def assert_not_raises(fn, *args, **kwargs): fn(*args, **kwargs) return True + def test_input_version(): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.9') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec obj = DerivedInterface1() @@ -453,8 +474,10 @@ class DerivedInterface1(nib.BaseInterface): yield assert_raises, Exception, obj._check_version_requirements, obj.inputs config.set_default_config() + class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.9') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec _version = '0.8' @@ -464,6 +487,7 @@ class DerivedInterface1(nib.BaseInterface): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.9') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec _version = '0.10' @@ -472,6 +496,7 @@ class DerivedInterface1(nib.BaseInterface): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.9') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec _version = '0.9' @@ -482,6 +507,7 @@ class DerivedInterface1(nib.BaseInterface): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', max_ver='0.7') + class DerivedInterface2(nib.BaseInterface): input_spec = InputSpec _version = '0.8' @@ -491,6 +517,7 @@ class DerivedInterface2(nib.BaseInterface): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', max_ver='0.9') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec _version = '0.9' @@ -499,11 +526,14 @@ class DerivedInterface1(nib.BaseInterface): not_raised = True yield assert_not_raises, obj._check_version_requirements, obj.inputs + def test_output_version(): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.9') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -513,29 +543,37 @@ class DerivedInterface1(nib.BaseInterface): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.11') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec _version = '0.10' obj = DerivedInterface1() yield assert_equal, obj._check_version_requirements(obj._outputs()), ['foo'] + class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc='a random int', min_ver='0.11') + class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec _version = '0.10' + def _run_interface(self, runtime): return runtime + def _list_outputs(self): return {'foo': 1} obj = DerivedInterface1() yield assert_raises, KeyError, obj.run + def test_Commandline(): yield assert_raises, Exception, nib.CommandLine ci = nib.CommandLine(command='which') @@ -544,7 +582,7 @@ def test_Commandline(): ci2 = nib.CommandLine(command='which', args='ls') yield assert_equal, ci2.cmdline, 'which ls' ci3 = nib.CommandLine(command='echo') - ci3.inputs.environ = {'MYENV' : 'foo'} + ci3.inputs.environ = {'MYENV': 'foo'} res = ci3.run() yield assert_equal, res.runtime.environ['MYENV'], 'foo' yield assert_equal, res.outputs, None @@ -584,6 +622,7 @@ class CommandLineInputSpec2(nib.CommandLineInputSpec): class DerivedClass(nib.CommandLine): input_spec = CommandLineInputSpec2 + def _gen_filename(self, name): return 'filename' @@ -602,10 +641,11 @@ def test_Commandline_environ(): res = ci3.run() yield assert_false, 'DISPLAY' in ci3.inputs.environ yield assert_equal, res.runtime.environ['DISPLAY'], ':3' - ci3.inputs.environ = {'DISPLAY' : ':2'} + ci3.inputs.environ = {'DISPLAY': ':2'} res = ci3.run() yield assert_equal, res.runtime.environ['DISPLAY'], ':2' + def test_CommandLine_output(): tmp_infile = setup_file() tmpd, name = os.path.split(tmp_infile) @@ -632,6 +672,7 @@ def test_CommandLine_output(): os.chdir(pwd) teardown_file(tmpd) + def test_global_CommandLine_output(): tmp_infile = setup_file() tmpd, name = os.path.split(tmp_infile) diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index 598621b5d8..a46b6c14b4 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -32,6 +32,7 @@ def test_datagrabber(): yield assert_equal, dg.inputs.base_directory, Undefined yield assert_equal, dg.inputs.template_args, {'outfiles': []} + @skipif(noboto) def test_s3datagrabber(): dg = nio.S3DataGrabber() @@ -92,6 +93,7 @@ def test_selectfiles_valueerror(): force_lists=force_lists) yield assert_raises, ValueError, sf.run + @skipif(noboto) def test_s3datagrabber_communication(): dg = nio.S3DataGrabber(infields=['subj_id', 'run_num'], outfields=['func', 'struct']) @@ -124,6 +126,7 @@ def test_s3datagrabber_communication(): shutil.rmtree(tempdir) + def test_datagrabber_order(): tempdir = mkdtemp() file1 = mkstemp(prefix='sub002_L1_R1.q', dir=tempdir) @@ -149,6 +152,7 @@ def test_datagrabber_order(): yield assert_true, 'sub002_L3_R10' in outfiles[2][1] shutil.rmtree(tempdir) + def test_datasink(): ds = nio.DataSink() yield assert_true, ds.inputs.parameterization @@ -160,6 +164,7 @@ def test_datasink(): ds = nio.DataSink(infields=['test']) yield assert_true, 'test' in ds.inputs.copyable_trait_names() + @skipif(noboto) def test_s3datasink(): ds = nio.S3DataSink() @@ -195,12 +200,13 @@ def test_datasink_substitutions(): setattr(ds.inputs, '@outdir', files) ds.run() yield assert_equal, \ - sorted([os.path.basename(x) for - x in glob.glob(os.path.join(outdir, '*'))]), \ - ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns + sorted([os.path.basename(x) for + x in glob.glob(os.path.join(outdir, '*'))]), \ + ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns shutil.rmtree(indir) shutil.rmtree(outdir) + @skipif(noboto) def test_s3datasink_substitutions(): indir = mkdtemp(prefix='-Tmp-nipype_ds_subs_in') @@ -221,8 +227,8 @@ def test_s3datasink_substitutions(): raise ose conn = S3Connection(anon=True, is_secure=False, port=4567, - host='localhost', - calling_format=OrdinaryCallingFormat()) + host='localhost', + calling_format=OrdinaryCallingFormat()) conn.create_bucket('test') ds = nio.S3DataSink( @@ -243,9 +249,9 @@ def test_s3datasink_substitutions(): setattr(ds.inputs, '@outdir', files) ds.run() yield assert_equal, \ - sorted([os.path.basename(x) for - x in glob.glob(os.path.join(outdir, '*'))]), \ - ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns + sorted([os.path.basename(x) for + x in glob.glob(os.path.join(outdir, '*'))]), \ + ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns bkt = conn.get_bucket(ds.inputs.bucket) bkt_files = list(k for k in bkt.list()) @@ -277,6 +283,7 @@ def test_s3datasink_substitutions(): shutil.rmtree(indir) shutil.rmtree(outdir) + def _temp_analyze_files(): """Generate temporary analyze file pair.""" fd, orig_img = mkstemp(suffix='.img', dir=mkdtemp()) diff --git a/nipype/interfaces/tests/test_matlab.py b/nipype/interfaces/tests/test_matlab.py index 36facb8038..874e0bddd8 100644 --- a/nipype/interfaces/tests/test_matlab.py +++ b/nipype/interfaces/tests/test_matlab.py @@ -21,6 +21,7 @@ def clean_workspace_and_get_default_script_file(): os.remove(default_script_file) # raise Exception('Default script file needed for tests; please remove %s!' % default_script_file) return default_script_file + @skipif(no_matlab) def test_cmdline(): default_script_file = clean_workspace_and_get_default_script_file() diff --git a/nipype/interfaces/traits_extension.py b/nipype/interfaces/traits_extension.py index 994488284c..084eb5ea7f 100644 --- a/nipype/interfaces/traits_extension.py +++ b/nipype/interfaces/traits_extension.py @@ -26,15 +26,16 @@ from traits.trait_errors import TraitError from traits.trait_base import _Undefined -class BaseFile ( traits.BaseStr ): + +class BaseFile (traits.BaseStr): """ Defines a trait whose value must be the name of a file. """ # A description of the type of value this trait accepts: info_text = 'a file name' - def __init__ ( self, value = '', filter = None, auto_set = False, - entries = 0, exists = False, **metadata ): + def __init__(self, value='', filter=None, auto_set=False, + entries=0, exists=False, **metadata): """ Creates a File trait. Parameters @@ -63,29 +64,29 @@ def __init__ ( self, value = '', filter = None, auto_set = False, if exists: self.info_text = 'an existing file name' - super( BaseFile, self ).__init__( value, **metadata ) + super(BaseFile, self).__init__(value, **metadata) - def validate ( self, object, name, value ): + def validate(self, object, name, value): """ Validates that a specified value is valid for this trait. Note: The 'fast validator' version performs this check in C. """ - validated_value = super( BaseFile, self ).validate( object, name, value ) + validated_value = super(BaseFile, self).validate(object, name, value) if not self.exists: return validated_value - elif os.path.isfile( value ): + elif os.path.isfile(value): return validated_value - self.error( object, name, value ) + self.error(object, name, value) -class File ( BaseFile ): +class File (BaseFile): """ Defines a trait whose value must be the name of a file using a C-level fast validator. """ - def __init__ ( self, value = '', filter = None, auto_set = False, - entries = 0, exists = False, **metadata ): + def __init__(self, value='', filter=None, auto_set=False, + entries=0, exists=False, **metadata): """ Creates a File trait. Parameters @@ -108,24 +109,25 @@ def __init__ ( self, value = '', filter = None, auto_set = False, """ if not exists: # Define the C-level fast validator to use: - fast_validate = ( 11, str ) + fast_validate = (11, str) - super( File, self ).__init__( value, filter, auto_set, entries, exists, - **metadata ) + super(File, self).__init__(value, filter, auto_set, entries, exists, + **metadata) -#------------------------------------------------------------------------------- +# ------------------------------------------------------------------------------- # 'BaseDirectory' and 'Directory' traits: -#------------------------------------------------------------------------------- +# ------------------------------------------------------------------------------- + -class BaseDirectory ( traits.BaseStr ): +class BaseDirectory (traits.BaseStr): """ Defines a trait whose value must be the name of a directory. """ # A description of the type of value this trait accepts: info_text = 'a directory name' - def __init__ ( self, value = '', auto_set = False, entries = 0, - exists = False, **metadata ): + def __init__(self, value='', auto_set=False, entries=0, + exists=False, **metadata): """ Creates a BaseDirectory trait. Parameters @@ -150,30 +152,30 @@ def __init__ ( self, value = '', auto_set = False, entries = 0, if exists: self.info_text = 'an existing directory name' - super( BaseDirectory, self ).__init__( value, **metadata ) + super(BaseDirectory, self).__init__(value, **metadata) - def validate ( self, object, name, value ): + def validate(self, object, name, value): """ Validates that a specified value is valid for this trait. Note: The 'fast validator' version performs this check in C. """ - validated_value = super( BaseDirectory, self ).validate( object, name, value ) + validated_value = super(BaseDirectory, self).validate(object, name, value) if not self.exists: return validated_value - if os.path.isdir( value ): + if os.path.isdir(value): return validated_value - self.error( object, name, value ) + self.error(object, name, value) -class Directory ( BaseDirectory ): +class Directory (BaseDirectory): """ Defines a trait whose value must be the name of a directory using a C-level fast validator. """ - def __init__ ( self, value = '', auto_set = False, entries = 0, - exists = False, **metadata ): + def __init__(self, value='', auto_set=False, entries=0, + exists=False, **metadata): """ Creates a Directory trait. Parameters @@ -194,10 +196,10 @@ def __init__ ( self, value = '', auto_set = False, entries = 0, # Define the C-level fast validator to use if the directory existence # test is not required: if not exists: - self.fast_validate = ( 11, str ) + self.fast_validate = (11, str) - super( Directory, self ).__init__( value, auto_set, entries, exists, - **metadata ) + super(Directory, self).__init__(value, auto_set, entries, exists, + **metadata) """ @@ -216,6 +218,7 @@ def __init__ ( self, value = '', auto_set = False, entries = 0, monkey patched. """ + def length(self): return 0 @@ -226,15 +229,17 @@ def length(self): Undefined = _Undefined() + def isdefined(object): return not isinstance(object, _Undefined) + def has_metadata(trait, metadata, value=None, recursive=True): ''' Checks if a given trait has a metadata (and optionally if it is set to particular value) ''' count = 0 - if hasattr(trait, "_metadata") and metadata in list(trait._metadata.keys()) and (trait._metadata[metadata] == value or value==None): + if hasattr(trait, "_metadata") and metadata in list(trait._metadata.keys()) and (trait._metadata[metadata] == value or value == None): count += 1 if recursive: if hasattr(trait, 'inner_traits'): diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index fb1cb628c3..0cf6aa6e64 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -84,14 +84,14 @@ def _add_output_traits(self, base): return base def _list_outputs(self): - #manual mandatory inputs check + # manual mandatory inputs check if self._fields and self._mandatory_inputs: for key in self._fields: value = getattr(self.inputs, key) if not isdefined(value): msg = "%s requires a value for input '%s' because it was listed in 'fields'. \ You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." % \ - (self.__class__.__name__, key) + (self.__class__.__name__, key) raise ValueError(msg) outputs = self._outputs().get() @@ -104,9 +104,10 @@ def _list_outputs(self): class MergeInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): axis = traits.Enum('vstack', 'hstack', usedefault=True, - desc='direction in which to merge, hstack requires same number of elements in each input') + desc='direction in which to merge, hstack requires same number of elements in each input') no_flatten = traits.Bool(False, usedefault=True, desc='append to outlist instead of extending in vstack mode') + class MergeOutputSpec(TraitedSpec): out = traits.List(desc='Merged output') @@ -264,9 +265,9 @@ def _list_outputs(self): class SplitInputSpec(BaseInterfaceInputSpec): inlist = traits.List(traits.Any, mandatory=True, - desc='list of values to split') + desc='list of values to split') splits = traits.List(traits.Int, mandatory=True, - desc='Number of outputs in each split - should add to number of inputs') + desc='Number of outputs in each split - should add to number of inputs') squeeze = traits.Bool(False, usedefault=True, desc='unfold one-element splits removing the list') @@ -316,9 +317,9 @@ def _list_outputs(self): class SelectInputSpec(BaseInterfaceInputSpec): inlist = InputMultiPath(traits.Any, mandatory=True, - desc='list of values to choose from') + desc='list of values to choose from') index = InputMultiPath(traits.Int, mandatory=True, - desc='0-based indices of values to choose') + desc='0-based indices of values to choose') class SelectOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index 246106aadd..9f6286ade3 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -14,15 +14,18 @@ import os, os.path as op from nipype.interfaces.traits_extension import isdefined + class Vnifti2ImageInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='-in %s', mandatory=True, position=1, desc='in file') attributes = File(exists=True, argstr='-attr %s', position=2, desc='attribute file') out_file = File(name_template="%s.v", keep_extension=False, argstr='-out %s', hash_files=False, - position= -1, desc='output data file', name_source=["in_file"]) + position=-1, desc='output data file', name_source=["in_file"]) + class Vnifti2ImageOutputSpec(TraitedSpec): out_file = File(exists=True, desc='Output vista file') + class Vnifti2Image(CommandLine): """ Convert a nifti file into a vista file. @@ -38,18 +41,20 @@ class Vnifti2Image(CommandLine): """ _cmd = 'vnifti2image' - input_spec=Vnifti2ImageInputSpec - output_spec=Vnifti2ImageOutputSpec + input_spec = Vnifti2ImageInputSpec + output_spec = Vnifti2ImageOutputSpec class VtoMatInputSpec(CommandLineInputSpec): in_file = File(exists=True, argstr='-in %s', mandatory=True, position=1, desc='in file') out_file = File(name_template="%s.mat", keep_extension=False, argstr='-out %s', hash_files=False, - position= -1, desc='output mat file', name_source=["in_file"]) + position=-1, desc='output mat file', name_source=["in_file"]) + class VtoMatOutputSpec(TraitedSpec): out_file = File(exists=True, desc='Output mat file') + class VtoMat(CommandLine): """ Convert a nifti file into a vista file. @@ -65,6 +70,6 @@ class VtoMat(CommandLine): """ _cmd = 'vtomat' - input_spec=VtoMatInputSpec - output_spec=VtoMatOutputSpec + input_spec = VtoMatInputSpec + output_spec = VtoMatOutputSpec diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index c4d621a5e0..151441c7b2 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -110,7 +110,7 @@ def format_node(node, format='python', include_config=False): if args: filled_args = [] for arg in args: - if hasattr(node._interface, '_%s' % arg): + if hasattr(node._interface, '_%s' % arg): filled_args.append('%s=%s' % (arg, getattr(node._interface, '_%s' % arg))) args = ', '.join(filled_args) @@ -410,7 +410,7 @@ def connect(self, *args, **kwargs): destnode, edge_data)]) else: - #pass + # pass logger.debug('Removing connection: %s->%s' % (srcnode, destnode)) self._graph.remove_edges_from([(srcnode, destnode)]) @@ -706,7 +706,7 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): datestr = datetime.utcnow().strftime('%Y%m%dT%H%M%S') if str2bool(self.config['execution']['write_provenance']): prov_base = op.join(self.base_dir, - 'workflow_provenance_%s' % datestr) + 'workflow_provenance_%s' % datestr) logger.info('Provenance file prefix: %s' % prov_base) write_workflow_prov(execgraph, prov_base, format='all') return execgraph @@ -720,10 +720,10 @@ def _write_report_info(self, workingdir, name, graph): if not op.exists(report_dir): os.makedirs(report_dir) shutil.copyfile(op.join(op.dirname(__file__), - 'report_template.html'), + 'report_template.html'), op.join(report_dir, 'index.html')) shutil.copyfile(op.join(op.dirname(__file__), - '..', 'external', 'd3.js'), + '..', 'external', 'd3.js'), op.join(report_dir, 'd3.js')) nodes, groups = topological_sort(graph, depth_first=True) graph_file = op.join(report_dir, 'graph1.json') @@ -755,9 +755,10 @@ def _write_report_info(self, workingdir, name, graph): save_json(graph_file, json_dict) graph_file = op.join(report_dir, 'graph.json') template = '%%0%dd_' % np.ceil(np.log10(len(nodes))).astype(int) + def getname(u, i): name_parts = u.fullname.split('.') - #return '.'.join(name_parts[:-1] + [template % i + name_parts[-1]]) + # return '.'.join(name_parts[:-1] + [template % i + name_parts[-1]]) return template % i + name_parts[-1] json_dict = [] for i, node in enumerate(nodes): @@ -796,7 +797,7 @@ def _configure_exec_nodes(self, graph): for sourceinfo, field in sorted(data['connect']): node.input_source[field] = \ (op.join(edge[0].output_dir(), - 'result_%s.pklz' % edge[0].name), + 'result_%s.pklz' % edge[0].name), sourceinfo) def _check_nodes(self, nodes): @@ -1005,7 +1006,7 @@ def _generate_flatgraph(self): self.disconnect(node, cd[0], v, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # expand the workflow node - #logger.debug('expanding workflow: %s', node) + # logger.debug('expanding workflow: %s', node) node._generate_flatgraph() for innernode in node._graph.nodes(): innernode._hierarchy = '.'.join((self.name, @@ -1024,8 +1025,8 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, prefix = ' ' if hierarchy is None: hierarchy = [] - colorset = ['#FFFFC8','#0000FF','#B4B4FF','#E6E6FF','#FF0000', - '#FFB4B4','#FFE6E6','#00A300','#B4FFB4','#E6FFE6'] + colorset = ['#FFFFC8', '#0000FF', '#B4B4FF', '#E6E6FF', '#FF0000', + '#FFB4B4', '#FFE6E6', '#00A300', '#B4FFB4', '#E6FFE6'] dotlist = ['%slabel="%s";' % (prefix, self.name)] for node in nx.topological_sort(self._graph): @@ -1039,16 +1040,16 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, dotlist.append(('%s[label="%s", shape=box3d,' 'style=filled, color=black, colorscheme' '=greys7 fillcolor=2];') % (nodename, - node_class_name)) + node_class_name)) else: if colored: dotlist.append(('%s[label="%s", style=filled,' ' fillcolor="%s"];') - % (nodename,node_class_name, + % (nodename, node_class_name, colorset[level])) else: dotlist.append(('%s[label="%s"];') - % (nodename,node_class_name)) + % (nodename, node_class_name)) for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): @@ -1064,7 +1065,7 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, colored=colored, simple_form=simple_form, level=level+3)) dotlist.append('}') - if level==6:level=2 + if level == 6: level = 2 else: for subnode in self._graph.successors_iter(node): if node._hierarchy != subnode._hierarchy: @@ -1262,7 +1263,7 @@ def output_dir(self): else: outputdir = op.join(outputdir, *self.parameterization) return op.abspath(op.join(outputdir, - self.name)) + self.name)) def set_input(self, parameter, val): """ Set interface input value""" @@ -1380,13 +1381,13 @@ def run(self, updatehash=False): cannot_rerun = (str2bool( self.config['execution']['stop_on_first_rerun']) and not (self.overwrite is None - and self._interface.always_run)) + and self._interface.always_run)) if cannot_rerun: raise Exception(("Cannot rerun when 'stop_on_first_rerun' " "is set to True")) hashfile_unfinished = op.join(outdir, - '_0x%s_unfinished.json' % - hashvalue) + '_0x%s_unfinished.json' % + hashvalue) if op.exists(hashfile): os.remove(hashfile) rm_outdir = (op.exists(outdir) @@ -1401,12 +1402,12 @@ def run(self, updatehash=False): outdircont = os.listdir(outdir) if ((ex.errno == errno.ENOTEMPTY) and (len(outdircont) == 0)): logger.warn(('An exception was raised trying to remove old %s, ' - 'but the path seems empty. Is it an NFS mount?. ' - 'Passing the exception.') % outdir) + 'but the path seems empty. Is it an NFS mount?. ' + 'Passing the exception.') % outdir) pass elif ((ex.errno == errno.ENOTEMPTY) and (len(outdircont) != 0)): logger.debug(('Folder contents (%d items): ' - '%s') % (len(outdircont), outdircont)) + '%s') % (len(outdircont), outdircont)) raise ex else: raise ex @@ -1822,7 +1823,7 @@ class JoinNode(Node): """ def __init__(self, interface, name, joinsource, joinfield=None, - unique=False, **kwargs): + unique=False, **kwargs): """ Parameters @@ -2015,8 +2016,9 @@ def _slot_value(self, field, index): return getattr(self._inputs, slot_field) except AttributeError as e: raise AttributeError("The join node %s does not have a slot field %s" - " to hold the %s value at index %d: %s" - % (self, slot_field, field, index, e)) + " to hold the %s value at index %d: %s" + % (self, slot_field, field, index, e)) + class MapNode(Node): """Wraps interface objects that need to be iterated on a list of inputs. @@ -2056,7 +2058,6 @@ def __init__(self, interface, iterfield, name, serial=False, nested=False, **kwa See Node docstring for additional keyword arguments. """ - super(MapNode, self).__init__(interface, name, **kwargs) if isinstance(iterfield, string_types): iterfield = [iterfield] @@ -2257,10 +2258,10 @@ def write_report(self, report_type=None, cwd=None): nodename = '_' + self.name + str(i) subnode_report_files.insert(i, 'subnode %d' % i + ' : ' + op.join(cwd, - 'mapflow', - nodename, - '_report', - 'report.rst')) + 'mapflow', + nodename, + '_report', + 'report.rst')) fp.writelines(write_rst_list(subnode_report_files)) fp.close() @@ -2277,7 +2278,7 @@ def num_subnodes(self): self._get_inputs() self._got_inputs = True self._check_iterfield() - if self._serial : + if self._serial: return 1 else: if self.nested: @@ -2325,7 +2326,7 @@ def _run_interface(self, execute=True, updatehash=False): if execute: if self.nested: nitems = len(filename_to_list(flatten(getattr(self.inputs, - self.iterfield[0])))) + self.iterfield[0])))) else: nitems = len(filename_to_list(getattr(self.inputs, self.iterfield[0]))) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 1413ba2c75..e4aea237c8 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -57,8 +57,8 @@ def report_crash(node, traceback=None, hostname=None): timeofcrash = strftime('%Y%m%d-%H%M%S') login_name = getpass.getuser() crashfile = 'crash-%s-%s-%s.pklz' % (timeofcrash, - login_name, - name) + login_name, + name) crashdir = node.config['execution']['crashdump_dir'] if crashdir is None: crashdir = os.getcwd() @@ -68,7 +68,7 @@ def report_crash(node, traceback=None, hostname=None): logger.info('Saving crash info to %s' % crashfile) logger.info(''.join(traceback)) savepkl(crashfile, dict(node=node, traceback=traceback)) - #np.savez(crashfile, node=node, traceback=traceback) + # np.savez(crashfile, node=node, traceback=traceback) return crashfile @@ -233,7 +233,7 @@ def run(self, graph, config, updatehash=False): # setup polling - TODO: change to threaded model notrun = [] while np.any(self.proc_done == False) | \ - np.any(self.proc_pending == True): + np.any(self.proc_pending == True): toappend = [] # trigger callbacks for any pending results while self.pending_tasks: @@ -311,7 +311,7 @@ def _submit_mapnode(self, jobid): self.procs.extend(mapnodesubids) self.depidx = ssp.vstack((self.depidx, ssp.lil_matrix(np.zeros( - (numnodes, self.depidx.shape[1])))), + (numnodes, self.depidx.shape[1])))), 'lil') self.depidx = ssp.hstack((self.depidx, ssp.lil_matrix( @@ -360,23 +360,23 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self.proc_pending[jobid] = True # Send job to task manager and add to pending tasks logger.info('Executing: %s ID: %d' % - (self.procs[jobid]._id, jobid)) + (self.procs[jobid]._id, jobid)) if self._status_callback: self._status_callback(self.procs[jobid], 'start') continue_with_submission = True if str2bool(self.procs[jobid].config['execution'] - ['local_hash_check']): + ['local_hash_check']): logger.debug('checking hash locally') try: hash_exists, _, _, _ = self.procs[ jobid].hash_exists() logger.debug('Hash exists %s' % str(hash_exists)) if (hash_exists and - (self.procs[jobid].overwrite == False or - (self.procs[jobid].overwrite == None and - not self.procs[jobid]._interface.always_run) - ) - ): + (self.procs[jobid].overwrite == False or + (self.procs[jobid].overwrite == None and + not self.procs[jobid]._interface.always_run) + ) + ): continue_with_submission = False self._task_finished_cb(jobid) self._remove_node_dirs() @@ -530,7 +530,7 @@ def _get_result(self, taskid): 'seconds. Batch dir contains crashdump file ' 'if node raised an exception.\n' 'Node working directory: ({2}) '.format( - taskid,timeout,node_dir) ) + taskid, timeout, node_dir)) raise IOError(error_message) except IOError as e: result_data['traceback'] = format_exc() @@ -607,18 +607,18 @@ def _get_args(self, node, keywords): value = open(value).read() if (hasattr(node, "plugin_args") and isinstance(node.plugin_args, dict) and - keyword in node.plugin_args): - if (keyword == "template" and - os.path.isfile(node.plugin_args[keyword])): - tmp_value = open(node.plugin_args[keyword]).read() - else: - tmp_value = node.plugin_args[keyword] - - if ('overwrite' in node.plugin_args and - node.plugin_args['overwrite']): - value = tmp_value - else: - value += tmp_value + keyword in node.plugin_args): + if (keyword == "template" and + os.path.isfile(node.plugin_args[keyword])): + tmp_value = open(node.plugin_args[keyword]).read() + else: + tmp_value = node.plugin_args[keyword] + + if ('overwrite' in node.plugin_args and + node.plugin_args['overwrite']): + value = tmp_value + else: + value += tmp_value values += (value, ) return values @@ -629,8 +629,6 @@ def _submit_graph(self, pyfiles, dependencies, nodes): """ raise NotImplementedError - - def _get_result(self, taskid): if taskid not in self._pending: raise Exception('Task %d not found' % taskid) @@ -638,7 +636,6 @@ def _get_result(self, taskid): return None node_dir = self._pending[taskid] - logger.debug(os.listdir(os.path.realpath(os.path.join(node_dir, '..')))) logger.debug(os.listdir(node_dir)) diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index 19f2b68ba1..4f571f4b27 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -40,7 +40,7 @@ def __init__(self, **kwargs): if 'plugin_args' in kwargs and kwargs['plugin_args']: if 'retry_timeout' in kwargs['plugin_args']: self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: + if 'max_tries' in kwargs['plugin_args']: self._max_tries = kwargs['plugin_args']['max_tries'] super(CondorPlugin, self).__init__(template, **kwargs) diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index c7443a4985..a41fff6988 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -49,6 +49,7 @@ class CondorDAGManPlugin(GraphPluginBase): log = %(basename)s.log getenv = True """ + def _get_str_or_file(self, arg): if os.path.isfile(arg): content = open(arg).read() @@ -75,8 +76,8 @@ def __init__(self, **kwargs): ('_block', 'block', False), ('_dagman_args', 'dagman_args', '')): if 'plugin_args' in kwargs \ - and not kwargs['plugin_args'] is None \ - and id_ in kwargs['plugin_args']: + and not kwargs['plugin_args'] is None \ + and id_ in kwargs['plugin_args']: if id_ == 'wrapper_cmd': val = os.path.abspath(kwargs['plugin_args'][id_]) elif id_ == 'block': @@ -86,7 +87,7 @@ def __init__(self, **kwargs): setattr(self, var, val) # TODO remove after some time if 'plugin_args' in kwargs \ - and not kwargs['plugin_args'] is None: + and not kwargs['plugin_args'] is None: plugin_args = kwargs['plugin_args'] if 'template' in plugin_args: warn("the 'template' argument is deprecated, use 'initial_specs' instead") @@ -129,9 +130,9 @@ def _submit_graph(self, pyfiles, dependencies, nodes): if not wrapper_cmd is None: specs['executable'] = wrapper_cmd specs['nodescript'] = \ - '%s %s %s' % (wrapper_args % specs, # give access to variables - sys.executable, - pyscript) + '%s %s %s' % (wrapper_args % specs, # give access to variables + sys.executable, + pyscript) submitspec = template % specs # write submit spec for this job submitfile = os.path.join(batch_dir, diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 29bb27c37f..3afa3f80fa 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -6,6 +6,7 @@ from .base import (PluginBase, logger) from ..utils import (nx) + class DebugPlugin(PluginBase): """Execute workflow in series """ @@ -13,12 +14,11 @@ class DebugPlugin(PluginBase): def __init__(self, plugin_args=None): super(DebugPlugin, self).__init__(plugin_args=plugin_args) if plugin_args and "callable" in plugin_args and \ - hasattr(plugin_args['callable'], '__call__'): + hasattr(plugin_args['callable'], '__call__'): self._callable = plugin_args['callable'] else: raise ValueError('plugin_args must contain a callable function') - def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline in a serial order. diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index 10576d8e5c..2599f3f76c 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -19,12 +19,13 @@ from .base import (DistributedPluginBase, logger, report_crash) + def execute_task(pckld_task, node_config, updatehash): from socket import gethostname from traceback import format_exc from nipype import config, logging - traceback=None - result=None + traceback = None + result = None import os cwd = os.getcwd() try: @@ -39,6 +40,7 @@ def execute_task(pckld_task, node_config, updatehash): os.chdir(cwd) return result, traceback, gethostname() + class IPythonPlugin(DistributedPluginBase): """Execute workflow with ipython """ @@ -110,6 +112,6 @@ def _report_crash(self, node, result=None): def _clear_task(self, taskid): if IPyversion >= '0.11': - logger.debug("Clearing id: %d"%taskid) + logger.debug("Clearing id: %d" %taskid) self.taskclient.purge_results(self.taskmap[taskid]) del self.taskmap[taskid] diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py index 38066589a2..47887e3a93 100644 --- a/nipype/pipeline/plugins/ipythonx.py +++ b/nipype/pipeline/plugins/ipythonx.py @@ -15,6 +15,7 @@ from .base import (DistributedPluginBase, logger, report_crash) + class IPythonXPlugin(DistributedPluginBase): """Execute workflow with ipython """ @@ -63,10 +64,10 @@ def _submit_job(self, node, updatehash=False): result = task.result """ task = self.ipyclient.StringTask(cmdstr, - push = dict(task=node, + push=dict(task=node, updatehash=updatehash), - pull = ['result','traceback']) - return self.taskclient.run(task, block = False) + pull=['result', 'traceback']) + return self.taskclient.run(task, block=False) def _report_crash(self, node, result=None): if result and result['traceback']: @@ -79,5 +80,5 @@ def _report_crash(self, node, result=None): def _clear_task(self, taskid): if IPyversion >= '0.10.1': - logger.debug("Clearing id: %d"%taskid) + logger.debug("Clearing id: %d" %taskid) self.taskclient.clear(taskid) diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 5892015c7b..fd6648c4ac 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -8,6 +8,7 @@ str2bool) from ..utils import (nx, dfs_preorder, topological_sort) + class LinearPlugin(PluginBase): """Execute workflow in series """ @@ -28,7 +29,7 @@ def run(self, graph, config, updatehash=False): old_wd = os.getcwd() notrun = [] donotrun = [] - nodes, _ = topological_sort(graph) + nodes, _ = topological_sort(graph) for node in nodes: try: if node in donotrun: @@ -47,9 +48,9 @@ def run(self, graph, config, updatehash=False): crashfile = report_crash(node) # remove dependencies from queue subnodes = [s for s in dfs_preorder(graph, node)] - notrun.append(dict(node = node, - dependents = subnodes, - crashfile = crashfile)) + notrun.append(dict(node=node, + dependents=subnodes, + crashfile=crashfile)) donotrun.extend(subnodes) if self._status_callback: self._status_callback(node, 'exception') diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index 5b03ab6a73..2b75398a9e 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -34,7 +34,7 @@ def __init__(self, **kwargs): if 'plugin_args' in kwargs and kwargs['plugin_args']: if 'retry_timeout' in kwargs['plugin_args']: self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: + if 'max_tries' in kwargs['plugin_args']: self._max_tries = kwargs['plugin_args']['max_tries'] if 'bsub_args' in kwargs['plugin_args']: self._bsub_args = kwargs['plugin_args']['bsub_args'] diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 0f6b11c30a..062c8e5d04 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -12,16 +12,18 @@ from .base import (DistributedPluginBase, report_crash) + def run_node(node, updatehash): result = dict(result=None, traceback=None) try: result['result'] = node.run(updatehash=updatehash) except: etype, eval, etr = sys.exc_info() - result['traceback'] = format_exception(etype,eval,etr) + result['traceback'] = format_exception(etype, eval, etr) result['result'] = node.result return result + class NonDaemonProcess(Process): """A non-daemon process to support internal multiprocessing. """ @@ -33,11 +35,13 @@ def _set_daemon(self, value): daemon = property(_get_daemon, _set_daemon) + class NonDaemonPool(pool.Pool): """A process pool with non-daemon processes. """ Process = NonDaemonProcess + class MultiProcPlugin(DistributedPluginBase): """Execute workflow with multiprocessing @@ -68,7 +72,7 @@ def __init__(self, plugin_args=None): def _get_result(self, taskid): if taskid not in self._taskresult: - raise RuntimeError('Multiproc task %d not found'%taskid) + raise RuntimeError('Multiproc task %d not found' %taskid) if not self._taskresult[taskid].ready(): return None return self._taskresult[taskid].get() diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index 089a8288e8..eba7c5c599 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -36,9 +36,9 @@ def __init__(self, **kwargs): if 'plugin_args' in kwargs and kwargs['plugin_args']: if 'retry_timeout' in kwargs['plugin_args']: self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: + if 'max_tries' in kwargs['plugin_args']: self._max_tries = kwargs['plugin_args']['max_tries'] - if 'max_jobname_len' in kwargs['plugin_args']: + if 'max_jobname_len' in kwargs['plugin_args']: self._max_jobname_len = kwargs['plugin_args']['max_jobname_len'] super(PBSPlugin, self).__init__(template, **kwargs) @@ -48,8 +48,8 @@ def _is_pending(self, taskid): stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, e = proc.communicate() - errmsg = 'Unknown Job Id' # %s' % taskid - return errmsg not in e + errmsg = 'Unknown Job Id' # %s' % taskid + return errmsg not in e def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('qsub', environ=os.environ.data, diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 042ebaf3bd..60c8c4600b 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -9,23 +9,23 @@ from ...interfaces.base import CommandLine -def node_completed_status( checknode): +def node_completed_status(checknode): """ A function to determine if a node has previously completed it's work :param checknode: The node to check the run status :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = ( checknode.overwrite == False or - (checknode.overwrite == None and - not checknode._interface.always_run ) + node_state_does_not_require_overwrite = (checknode.overwrite == False or + (checknode.overwrite == None and + not checknode._interface.always_run) ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False - return (hash_exists and node_state_does_not_require_overwrite ) + return (hash_exists and node_state_does_not_require_overwrite) class SGEGraphPlugin(GraphPluginBase): @@ -67,21 +67,21 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SGE """ - job_name='j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) + job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) # Condition job_name to be a valid bash identifier (i.e. - is invalid) - job_name=job_name.replace('-','_').replace('.','_').replace(':','_') + job_name = job_name.replace('-', '_').replace('.', '_').replace(':', '_') return job_name batch_dir, _ = os.path.split(pyfiles[0]) submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') cache_doneness_per_node = dict() - if self._dont_resubmit_completed_jobs: ## A future parameter for controlling this behavior could be added here + if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) - #if the node itself claims done, then check to ensure all - #dependancies are also done + # if the node itself claims done, then check to ensure all + # dependancies are also done if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: @@ -97,7 +97,7 @@ def make_job_name(jobnumber, nodeslist): fp.writelines('# Condense format attempted\n') for idx, pyscript in enumerate(pyfiles): node = nodes[idx] - if cache_doneness_per_node.get(idx,False): + if cache_doneness_per_node.get(idx, False): continue else: template, qsub_args = self._get_args( @@ -120,10 +120,10 @@ def make_job_name(jobnumber, nodeslist): if idx in dependencies: values = ' ' for jobid in dependencies[idx]: - ## Avoid dependancies of done jobs + # Avoid dependancies of done jobs if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False: values += "${{{0}}},".format(make_job_name(jobid, nodes)) - if values != ' ': # i.e. if some jobs were added to dependency list + if values != ' ': # i.e. if some jobs were added to dependency list values = values.rstrip(',') deps = '-hold_jid%s' % values jobname = make_job_name(idx, nodes) diff --git a/nipype/pipeline/plugins/slurm.py b/nipype/pipeline/plugins/slurm.py index 04218589c3..aab7c38317 100644 --- a/nipype/pipeline/plugins/slurm.py +++ b/nipype/pipeline/plugins/slurm.py @@ -16,8 +16,6 @@ from nipype.interfaces.base import CommandLine - - class SLURMPlugin(SGELikeBatchManagerBase): ''' Execute using SLURM @@ -32,10 +30,9 @@ class SLURMPlugin(SGELikeBatchManagerBase): ''' - def __init__(self, **kwargs): - template="#!/bin/bash" + template = "#!/bin/bash" self._retry_timeout = 2 self._max_tries = 2 @@ -45,7 +42,7 @@ def __init__(self, **kwargs): if 'plugin_args' in kwargs and kwargs['plugin_args']: if 'retry_timeout' in kwargs['plugin_args']: self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: + if 'max_tries' in kwargs['plugin_args']: self._max_tries = kwargs['plugin_args']['max_tries'] if 'template' in kwargs['plugin_args']: self._template = kwargs['plugin_args']['template'] diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index bd03f1a3e1..08abe4b394 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -9,23 +9,23 @@ from ...interfaces.base import CommandLine -def node_completed_status( checknode): +def node_completed_status(checknode): """ A function to determine if a node has previously completed it's work :param checknode: The node to check the run status :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = ( checknode.overwrite == False or - (checknode.overwrite == None and - not checknode._interface.always_run ) + node_state_does_not_require_overwrite = (checknode.overwrite == False or + (checknode.overwrite == None and + not checknode._interface.always_run) ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False - return (hash_exists and node_state_does_not_require_overwrite ) + return (hash_exists and node_state_does_not_require_overwrite) class SLURMGraphPlugin(GraphPluginBase): @@ -39,13 +39,13 @@ class SLURMGraphPlugin(GraphPluginBase): qsub call """ - _template="#!/bin/bash" + _template = "#!/bin/bash" def __init__(self, **kwargs): if 'plugin_args' in kwargs and kwargs['plugin_args']: if 'retry_timeout' in kwargs['plugin_args']: self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: + if 'max_tries' in kwargs['plugin_args']: self._max_tries = kwargs['plugin_args']['max_tries'] if 'template' in kwargs['plugin_args']: self._template = kwargs['plugin_args']['template'] @@ -66,21 +66,21 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SLURM """ - job_name='j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) + job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) # Condition job_name to be a valid bash identifier (i.e. - is invalid) - job_name=job_name.replace('-','_').replace('.','_').replace(':','_') + job_name = job_name.replace('-', '_').replace('.', '_').replace(':', '_') return job_name batch_dir, _ = os.path.split(pyfiles[0]) submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') cache_doneness_per_node = dict() - if self._dont_resubmit_completed_jobs: ## A future parameter for controlling this behavior could be added here + if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) - #if the node itself claims done, then check to ensure all - #dependancies are also done + # if the node itself claims done, then check to ensure all + # dependancies are also done if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: @@ -96,7 +96,7 @@ def make_job_name(jobnumber, nodeslist): fp.writelines('# Condense format attempted\n') for idx, pyscript in enumerate(pyfiles): node = nodes[idx] - if cache_doneness_per_node.get(idx,False): + if cache_doneness_per_node.get(idx, False): continue else: template, sbatch_args = self._get_args( @@ -119,10 +119,10 @@ def make_job_name(jobnumber, nodeslist): if idx in dependencies: values = '' for jobid in dependencies[idx]: - ## Avoid dependancies of done jobs + # Avoid dependancies of done jobs if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False: values += "${{{0}}}:".format(make_job_name(jobid, nodes)) - if values != '': # i.e. if some jobs were added to dependency list + if values != '': # i.e. if some jobs were added to dependency list values = values.rstrip(':') deps = '--dependency=afterok:%s' % values jobname = make_job_name(idx, nodes) diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index 915b0e373e..db7768703e 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -14,6 +14,7 @@ from .base import (GraphPluginBase, logger) + class SomaFlowPlugin(GraphPluginBase): """Execute using Soma workflow """ diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index ca38e80fcd..243ae195c2 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -9,11 +9,12 @@ assert_false, skipif) import nipype.pipeline.plugins.base as pb + def test_scipy_sparse(): foo = ssp.lil_matrix(np.eye(3, k=1)) goo = foo.getrowview(0) goo[goo.nonzero()] = 0 - yield assert_equal, foo[0,1], 0 + yield assert_equal, foo[0, 1], 0 ''' Can use the following code to test that a mapnode crash continues successfully @@ -38,4 +39,4 @@ def func(arg1): wf.base_dir = '/tmp' wf.run(plugin='MultiProc') -''' \ No newline at end of file +''' diff --git a/nipype/pipeline/plugins/tests/test_debug.py b/nipype/pipeline/plugins/tests/test_debug.py index 3216e8a21e..f15fc62939 100644 --- a/nipype/pipeline/plugins/tests/test_debug.py +++ b/nipype/pipeline/plugins/tests/test_debug.py @@ -6,13 +6,16 @@ from nipype.testing import assert_raises, assert_false import nipype.pipeline.engine as pe + class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc='a random int') input2 = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -26,20 +29,22 @@ def _list_outputs(self): outputs['output1'] = [1, self.inputs.input1] return outputs + def callme(node, graph): pass + def test_debug(): cur_dir = os.getcwd() temp_dir = mkdtemp(prefix='test_engine_') os.chdir(temp_dir) pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod2 = pe.MapNode(interface=TestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1,mod2,[('output1','input1')])]) + pipe.connect([(mod1, mod2, [('output1', 'input1')])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 run_wf = lambda: pipe.run(plugin="Debug") @@ -51,4 +56,4 @@ def test_debug(): exception_raised = True yield assert_false, exception_raised os.chdir(cur_dir) - rmtree(temp_dir) \ No newline at end of file + rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index 9ae20ea2b5..a59c7c1981 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -6,13 +6,16 @@ from nipype.testing import assert_equal import nipype.pipeline.engine as pe + class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc='a random int') input2 = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -26,23 +29,24 @@ def _list_outputs(self): outputs['output1'] = [1, self.inputs.input1] return outputs + def test_run_in_series(): cur_dir = os.getcwd() temp_dir = mkdtemp(prefix='test_engine_') os.chdir(temp_dir) pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod2 = pe.MapNode(interface=TestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1,mod2,[('output1','input1')])]) + pipe.connect([(mod1, mod2, [('output1', 'input1')])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="Linear") - names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = execgraph.nodes()[names.index('pipe.mod1')] result = node.get_output('output1') yield assert_equal, result, [1, 1] os.chdir(cur_dir) - rmtree(temp_dir) \ No newline at end of file + rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 66f755da9a..efa9ec4161 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -6,13 +6,16 @@ from nipype.testing import assert_equal import nipype.pipeline.engine as pe + class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc='a random int') input2 = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -26,24 +29,25 @@ def _list_outputs(self): outputs['output1'] = [1, self.inputs.input1] return outputs + def test_run_multiproc(): cur_dir = os.getcwd() temp_dir = mkdtemp(prefix='test_engine_') os.chdir(temp_dir) pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod2 = pe.MapNode(interface=TestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1,mod2,[('output1','input1')])]) + pipe.connect([(mod1, mod2, [('output1', 'input1')])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 pipe.config['execution']['poll_sleep_duration'] = 2 execgraph = pipe.run(plugin="MultiProc") - names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = execgraph.nodes()[names.index('pipe.mod1')] result = node.get_output('output1') yield assert_equal, result, [1, 1] os.chdir(cur_dir) - rmtree(temp_dir) \ No newline at end of file + rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py index 0b9dc3af0b..5682965de1 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py @@ -36,49 +36,48 @@ def dummyFunction(filename): ''' j = 0 for i in range(0, 10): - j += i + j += i # j is now 45 (0+1+2+3+4+5+6+7+8+9) with open(filename, 'w') as f: - f.write(str(j)) + f.write(str(j)) for n in range(numberOfThreads): - # mark thread as alive - a[n] = True - - # create a temp file to use as the data exchange container - tmpFile = tempfile.mkstemp('.txt', 'test_engine_')[1] - f[n] = tmpFile # keep track of the temp file - t[n] = multiprocessing.Process(target=dummyFunction, - args=(tmpFile,)) - # fire up the job - t[n].start() + # mark thread as alive + a[n] = True + # create a temp file to use as the data exchange container + tmpFile = tempfile.mkstemp('.txt', 'test_engine_')[1] + f[n] = tmpFile # keep track of the temp file + t[n] = multiprocessing.Process(target=dummyFunction, + args=(tmpFile,)) + # fire up the job + t[n].start() # block until all processes are done allDone = False while not allDone: - time.sleep(1) + time.sleep(1) - for n in range(numberOfThreads): + for n in range(numberOfThreads): - a[n] = t[n].is_alive() + a[n] = t[n].is_alive() - if not any(a): - # if no thread is alive - allDone = True + if not any(a): + # if no thread is alive + allDone = True # here, all processes are done # read in all temp files and sum them up total = insum for file in f: - with open(file) as fd: - total += int(fd.read()) - os.remove(file) + with open(file) as fd: + total += int(fd.read()) + os.remove(file) return total @@ -116,7 +115,7 @@ def run_multiproc_nondaemon_with_flag(nondaemon_flag): plugin_args={'n_procs': 2, 'non_daemon': nondaemon_flag}) - names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = execgraph.nodes()[names.index('pipe.f2')] result = node.get_output('sum_out') os.chdir(cur_dir) @@ -134,14 +133,15 @@ def test_run_multiproc_nondaemon_false(): ''' shouldHaveFailed = False try: - # with nondaemon_flag = False, the execution should fail + # with nondaemon_flag = False, the execution should fail run_multiproc_nondaemon_with_flag(False) except: shouldHaveFailed = True yield assert_true, shouldHaveFailed + def test_run_multiproc_nondaemon_true(): # with nondaemon_flag = True, the execution should succeed result = run_multiproc_nondaemon_with_flag(True) - yield assert_equal, result, 180 # n_procs (2) * numberOfThreads (2) * 45 == 180 + yield assert_equal, result, 180 # n_procs (2) * numberOfThreads (2) * 45 == 180 diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index 51b0ed20e2..8aa52e1163 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -7,13 +7,16 @@ from nipype.testing import assert_equal, skipif import nipype.pipeline.engine as pe + class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc='a random int') input2 = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -27,6 +30,7 @@ def _list_outputs(self): outputs['output1'] = [1, self.inputs.input1] return outputs + @skipif(True) def test_run_pbsgraph(): cur_dir = os.getcwd() @@ -34,17 +38,17 @@ def test_run_pbsgraph(): os.chdir(temp_dir) pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod2 = pe.MapNode(interface=TestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1,mod2,[('output1','input1')])]) + pipe.connect([(mod1, mod2, [('output1', 'input1')])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="PBSGraph") - names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = execgraph.nodes()[names.index('pipe.mod1')] result = node.get_output('output1') yield assert_equal, result, [1, 1] os.chdir(cur_dir) - rmtree(temp_dir) \ No newline at end of file + rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index b83cc5218e..27b2e30a83 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -9,13 +9,16 @@ from nipype.pipeline.plugins.somaflow import soma_not_loaded + class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc='a random int') input2 = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -29,6 +32,7 @@ def _list_outputs(self): outputs['output1'] = [1, self.inputs.input1] return outputs + @skipif(soma_not_loaded) def test_run_somaflow(): cur_dir = os.getcwd() @@ -36,17 +40,17 @@ def test_run_somaflow(): os.chdir(temp_dir) pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod2 = pe.MapNode(interface=TestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1,mod2,[('output1','input1')])]) + pipe.connect([(mod1, mod2, [('output1', 'input1')])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="SomaFlow") - names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()] + names = ['.'.join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = execgraph.nodes()[names.index('pipe.mod1')] result = node.get_output('output1') yield assert_equal, result, [1, 1] os.chdir(cur_dir) - rmtree(temp_dir) \ No newline at end of file + rmtree(temp_dir) diff --git a/nipype/pipeline/tests/test_engine.py b/nipype/pipeline/tests/test_engine.py index 020476fbda..08c25aced6 100644 --- a/nipype/pipeline/tests/test_engine.py +++ b/nipype/pipeline/tests/test_engine.py @@ -21,9 +21,11 @@ class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc='a random int') input2 = nib.traits.Int(desc='a random int') + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -37,26 +39,29 @@ def _list_outputs(self): outputs['output1'] = [1, self.inputs.input1] return outputs + def test_init(): yield assert_raises, Exception, pe.Workflow pipe = pe.Workflow(name='pipe') yield assert_equal, type(pipe._graph), nx.DiGraph + def test_connect(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - pipe.connect([(mod1,mod2,[('output1','input1')])]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + pipe.connect([(mod1, mod2, [('output1', 'input1')])]) yield assert_true, mod1 in pipe._graph.nodes() yield assert_true, mod2 in pipe._graph.nodes() - yield assert_equal, pipe._graph.get_edge_data(mod1,mod2), {'connect':[('output1','input1')]} + yield assert_equal, pipe._graph.get_edge_data(mod1, mod2), {'connect': [('output1', 'input1')]} + def test_add_nodes(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - pipe.add_nodes([mod1,mod2]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + pipe.add_nodes([mod1, mod2]) yield assert_true, mod1 in pipe._graph.nodes() yield assert_true, mod2 in pipe._graph.nodes() @@ -66,128 +71,137 @@ def test_add_nodes(): # XXX - SG I'll create a graphical version of these tests and actually # ensure that all connections are tested later + def test1(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') pipe.add_nodes([mod1]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 1 yield assert_equal, len(pipe._execgraph.edges()), 0 + def test2(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod1.iterables = dict(input1=lambda:[1,2],input2=lambda:[1,2]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod1.iterables = dict(input1=lambda: [1, 2], input2=lambda: [1, 2]) pipe.add_nodes([mod1]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 4 yield assert_equal, len(pipe._execgraph.edges()), 0 + def test3(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod1.iterables = {} - mod2 = pe.Node(interface=TestInterface(),name='mod2') - mod2.iterables = dict(input1=lambda:[1,2]) - pipe.connect([(mod1,mod2,[('output1','input2')])]) + mod2 = pe.Node(interface=TestInterface(), name='mod2') + mod2.iterables = dict(input1=lambda: [1, 2]) + pipe.connect([(mod1, mod2, [('output1', 'input2')])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 3 yield assert_equal, len(pipe._execgraph.edges()), 2 + def test4(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - mod1.iterables = dict(input1=lambda:[1,2]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + mod1.iterables = dict(input1=lambda: [1, 2]) mod2.iterables = {} - pipe.connect([(mod1,mod2,[('output1','input2')])]) + pipe.connect([(mod1, mod2, [('output1', 'input2')])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 4 yield assert_equal, len(pipe._execgraph.edges()), 2 + def test5(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - mod1.iterables = dict(input1=lambda:[1,2]) - mod2.iterables = dict(input1=lambda:[1,2]) - pipe.connect([(mod1,mod2,[('output1','input2')])]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + mod1.iterables = dict(input1=lambda: [1, 2]) + mod2.iterables = dict(input1=lambda: [1, 2]) + pipe.connect([(mod1, mod2, [('output1', 'input2')])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 6 yield assert_equal, len(pipe._execgraph.edges()), 4 + def test6(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - mod3 = pe.Node(interface=TestInterface(),name='mod3') + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + mod3 = pe.Node(interface=TestInterface(), name='mod3') mod1.iterables = {} - mod2.iterables = dict(input1=lambda:[1,2]) + mod2.iterables = dict(input1=lambda: [1, 2]) mod3.iterables = {} - pipe.connect([(mod1,mod2,[('output1','input2')]), - (mod2,mod3,[('output1','input2')])]) + pipe.connect([(mod1, mod2, [('output1', 'input2')]), + (mod2, mod3, [('output1', 'input2')])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 5 yield assert_equal, len(pipe._execgraph.edges()), 4 + def test7(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - mod3 = pe.Node(interface=TestInterface(),name='mod3') - mod1.iterables = dict(input1=lambda:[1,2]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + mod3 = pe.Node(interface=TestInterface(), name='mod3') + mod1.iterables = dict(input1=lambda: [1, 2]) mod2.iterables = {} mod3.iterables = {} - pipe.connect([(mod1,mod3,[('output1','input1')]), - (mod2,mod3,[('output1','input2')])]) + pipe.connect([(mod1, mod3, [('output1', 'input1')]), + (mod2, mod3, [('output1', 'input2')])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 5 yield assert_equal, len(pipe._execgraph.edges()), 4 + def test8(): pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - mod3 = pe.Node(interface=TestInterface(),name='mod3') - mod1.iterables = dict(input1=lambda:[1,2]) - mod2.iterables = dict(input1=lambda:[1,2]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + mod3 = pe.Node(interface=TestInterface(), name='mod3') + mod1.iterables = dict(input1=lambda: [1, 2]) + mod2.iterables = dict(input1=lambda: [1, 2]) mod3.iterables = {} - pipe.connect([(mod1,mod3,[('output1','input1')]), - (mod2,mod3,[('output1','input2')])]) + pipe.connect([(mod1, mod3, [('output1', 'input1')]), + (mod2, mod3, [('output1', 'input2')])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 8 yield assert_equal, len(pipe._execgraph.edges()), 8 edgenum = sorted([(len(pipe._execgraph.in_edges(node)) + \ - len(pipe._execgraph.out_edges(node))) \ - for node in pipe._execgraph.nodes()]) - yield assert_true, edgenum[0]>0 + len(pipe._execgraph.out_edges(node))) \ + for node in pipe._execgraph.nodes()]) + yield assert_true, edgenum[0] > 0 + def test_expansion(): pipe1 = pe.Workflow(name='pipe1') - mod1 = pe.Node(interface=TestInterface(),name='mod1') - mod2 = pe.Node(interface=TestInterface(),name='mod2') - pipe1.connect([(mod1,mod2,[('output1','input2')])]) + mod1 = pe.Node(interface=TestInterface(), name='mod1') + mod2 = pe.Node(interface=TestInterface(), name='mod2') + pipe1.connect([(mod1, mod2, [('output1', 'input2')])]) pipe2 = pe.Workflow(name='pipe2') - mod3 = pe.Node(interface=TestInterface(),name='mod3') - mod4 = pe.Node(interface=TestInterface(),name='mod4') - pipe2.connect([(mod3,mod4,[('output1','input2')])]) + mod3 = pe.Node(interface=TestInterface(), name='mod3') + mod4 = pe.Node(interface=TestInterface(), name='mod4') + pipe2.connect([(mod3, mod4, [('output1', 'input2')])]) pipe3 = pe.Workflow(name="pipe3") - pipe3.connect([(pipe1, pipe2, [('mod2.output1','mod4.input1')])]) + pipe3.connect([(pipe1, pipe2, [('mod2.output1', 'mod4.input1')])]) pipe4 = pe.Workflow(name="pipe4") - mod5 = pe.Node(interface=TestInterface(),name='mod5') + mod5 = pe.Node(interface=TestInterface(), name='mod5') pipe4.add_nodes([mod5]) pipe5 = pe.Workflow(name="pipe5") pipe5.add_nodes([pipe4]) pipe6 = pe.Workflow(name="pipe6") - pipe6.connect([(pipe5, pipe3, [('pipe4.mod5.output1','pipe2.mod3.input1')])]) + pipe6.connect([(pipe5, pipe3, [('pipe4.mod5.output1', 'pipe2.mod3.input1')])]) error_raised = False try: pipe6._flatgraph = pipe6._create_flat_graph() @@ -195,30 +209,32 @@ def test_expansion(): error_raised = True yield assert_false, error_raised + def test_iterable_expansion(): import nipype.pipeline.engine as pe wf1 = pe.Workflow(name='test') - node1 = pe.Node(TestInterface(),name='node1') - node2 = pe.Node(TestInterface(),name='node2') - node1.iterables = ('input1',[1,2]) - wf1.connect(node1,'output1', node2, 'input2') + node1 = pe.Node(TestInterface(), name='node1') + node2 = pe.Node(TestInterface(), name='node2') + node1.iterables = ('input1', [1, 2]) + wf1.connect(node1, 'output1', node2, 'input2') wf3 = pe.Workflow(name='group') - for i in [0,1,2]: - wf3.add_nodes([wf1.clone(name='test%d'%i)]) + for i in [0, 1, 2]: + wf3.add_nodes([wf1.clone(name='test%d' %i)]) wf3._flatgraph = wf3._create_flat_graph() - yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()),12 + yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 12 + def test_synchronize_expansion(): import nipype.pipeline.engine as pe wf1 = pe.Workflow(name='test') - node1 = pe.Node(TestInterface(),name='node1') - node1.iterables = [('input1',[1,2]),('input2',[3,4,5])] + node1 = pe.Node(TestInterface(), name='node1') + node1.iterables = [('input1', [1, 2]), ('input2', [3, 4, 5])] node1.synchronize = True - node2 = pe.Node(TestInterface(),name='node2') - wf1.connect(node1,'output1', node2, 'input2') + node2 = pe.Node(TestInterface(), name='node2') + wf1.connect(node1, 'output1', node2, 'input2') wf3 = pe.Workflow(name='group') - for i in [0,1,2]: - wf3.add_nodes([wf1.clone(name='test%d'%i)]) + for i in [0, 1, 2]: + wf3.add_nodes([wf1.clone(name='test%d' %i)]) wf3._flatgraph = wf3._create_flat_graph() # Each expanded graph clone has: # 3 node1 expansion nodes and @@ -227,6 +243,7 @@ def test_synchronize_expansion(): # => 18 nodes in the group yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 18 + def test_synchronize_tuples_expansion(): import nipype.pipeline.engine as pe wf1 = pe.Workflow(name='test') @@ -241,12 +258,13 @@ def test_synchronize_tuples_expansion(): wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d'%i)]) + wf3.add_nodes([wf1.clone(name='test%d' %i)]) wf3._flatgraph = wf3._create_flat_graph() # Identical to test_synchronize_expansion yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 18 + def test_itersource_expansion(): import nipype.pipeline.engine as pe @@ -268,7 +286,7 @@ def test_itersource_expansion(): wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d'%i)]) + wf3.add_nodes([wf1.clone(name='test%d' %i)]) wf3._flatgraph = wf3._create_flat_graph() @@ -282,24 +300,25 @@ def test_itersource_expansion(): # => 3 * 14 = 42 nodes in the group yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 42 + def test_itersource_synchronize1_expansion(): import nipype.pipeline.engine as pe wf1 = pe.Workflow(name='test') - node1 = pe.Node(TestInterface(),name='node1') + node1 = pe.Node(TestInterface(), name='node1') node1.iterables = [('input1', [1, 2]), ('input2', [3, 4])] node1.synchronize = True - node2 = pe.Node(TestInterface(),name='node2') + node2 = pe.Node(TestInterface(), name='node2') wf1.connect(node1, 'output1', node2, 'input1') - node3 = pe.Node(TestInterface(),name='node3') + node3 = pe.Node(TestInterface(), name='node3') node3.itersource = ('node1', ['input1', 'input2']) node3.iterables = [('input1', {(1, 3): [5, 6]}), ('input2', {(1, 3): [7, 8], (2, 4): [9]})] - wf1.connect(node2,'output1', node3, 'input1') - node4 = pe.Node(TestInterface(),name='node4') - wf1.connect(node3,'output1', node4, 'input1') + wf1.connect(node2, 'output1', node3, 'input1') + node4 = pe.Node(TestInterface(), name='node4') + wf1.connect(node3, 'output1', node4, 'input1') wf3 = pe.Workflow(name='group') - for i in [0,1,2]: - wf3.add_nodes([wf1.clone(name='test%d'%i)]) + for i in [0, 1, 2]: + wf3.add_nodes([wf1.clone(name='test%d' %i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -312,26 +331,27 @@ def test_itersource_synchronize1_expansion(): # => 3 * 14 = 42 nodes in the group yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 42 + def test_itersource_synchronize2_expansion(): import nipype.pipeline.engine as pe wf1 = pe.Workflow(name='test') - node1 = pe.Node(TestInterface(),name='node1') - node1.iterables = [('input1',[1,2]), ('input2',[3,4])] + node1 = pe.Node(TestInterface(), name='node1') + node1.iterables = [('input1', [1, 2]), ('input2', [3, 4])] node1.synchronize = True - node2 = pe.Node(TestInterface(),name='node2') - wf1.connect(node1,'output1', node2, 'input1') - node3 = pe.Node(TestInterface(),name='node3') + node2 = pe.Node(TestInterface(), name='node2') + wf1.connect(node1, 'output1', node2, 'input1') + node3 = pe.Node(TestInterface(), name='node3') node3.itersource = ('node1', ['input1', 'input2']) node3.synchronize = True node3.iterables = [('input1', 'input2'), - {(1,3):[(5,7), (6,8)], (2,4):[(None,9)]}] - wf1.connect(node2,'output1', node3, 'input1') - node4 = pe.Node(TestInterface(),name='node4') - wf1.connect(node3,'output1', node4, 'input1') + {(1, 3): [(5, 7), (6, 8)], (2, 4):[(None, 9)]}] + wf1.connect(node2, 'output1', node3, 'input1') + node4 = pe.Node(TestInterface(), name='node4') + wf1.connect(node3, 'output1', node4, 'input1') wf3 = pe.Workflow(name='group') - for i in [0,1,2]: - wf3.add_nodes([wf1.clone(name='test%d'%i)]) + for i in [0, 1, 2]: + wf3.add_nodes([wf1.clone(name='test%d' %i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -344,28 +364,30 @@ def test_itersource_synchronize2_expansion(): # => 3 * 10 = 30 nodes in the group yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 30 + def test_disconnect(): import nipype.pipeline.engine as pe from nipype.interfaces.utility import IdentityInterface - a = pe.Node(IdentityInterface(fields=['a','b']),name='a') - b = pe.Node(IdentityInterface(fields=['a','b']),name='b') + a = pe.Node(IdentityInterface(fields=['a', 'b']), name='a') + b = pe.Node(IdentityInterface(fields=['a', 'b']), name='b') flow1 = pe.Workflow(name='test') - flow1.connect(a,'a',b,'a') - flow1.disconnect(a,'a',b,'a') + flow1.connect(a, 'a', b, 'a') + flow1.disconnect(a, 'a', b, 'a') yield assert_equal, flow1._graph.edges(), [] + def test_doubleconnect(): import nipype.pipeline.engine as pe from nipype.interfaces.utility import IdentityInterface - a = pe.Node(IdentityInterface(fields=['a','b']),name='a') - b = pe.Node(IdentityInterface(fields=['a','b']),name='b') + a = pe.Node(IdentityInterface(fields=['a', 'b']), name='a') + b = pe.Node(IdentityInterface(fields=['a', 'b']), name='b') flow1 = pe.Workflow(name='test') - flow1.connect(a,'a',b,'a') - x = lambda: flow1.connect(a,'b',b,'a') + flow1.connect(a, 'a', b, 'a') + x = lambda: flow1.connect(a, 'b', b, 'a') yield assert_raises, Exception, x - c = pe.Node(IdentityInterface(fields=['a','b']),name='c') + c = pe.Node(IdentityInterface(fields=['a', 'b']), name='c') flow1 = pe.Workflow(name='test2') - x = lambda : flow1.connect([(a, c, [('b', 'b')]), (b, c, [('a', 'b')])]) + x = lambda: flow1.connect([(a, c, [('b', 'b')]), (b, c, [('a', 'b')])]) yield assert_raises, Exception, x @@ -440,6 +462,8 @@ def test_doubleconnect(): ''' # Node + + def test_node_init(): yield assert_raises, Exception, pe.Node try: @@ -450,21 +474,22 @@ def test_node_init(): exception = False yield assert_true, exception + def test_workflow_add(): from nipype.interfaces.utility import IdentityInterface as ii - n1 = pe.Node(ii(fields=['a','b']),name='n1') - n2 = pe.Node(ii(fields=['c','d']),name='n2') - n3 = pe.Node(ii(fields=['c','d']),name='n1') + n1 = pe.Node(ii(fields=['a', 'b']), name='n1') + n2 = pe.Node(ii(fields=['c', 'd']), name='n2') + n3 = pe.Node(ii(fields=['c', 'd']), name='n1') w1 = pe.Workflow(name='test') - w1.connect(n1,'a',n2,'c') + w1.connect(n1, 'a', n2, 'c') yield assert_raises, IOError, w1.add_nodes, [n1] yield assert_raises, IOError, w1.add_nodes, [n2] yield assert_raises, IOError, w1.add_nodes, [n3] - yield assert_raises, IOError, w1.connect, [(w1,n2,[('n1.a','d')])] + yield assert_raises, IOError, w1.connect, [(w1, n2, [('n1.a', 'd')])] def test_node_get_output(): - mod1 = pe.Node(interface=TestInterface(),name='mod1') + mod1 = pe.Node(interface=TestInterface(), name='mod1') mod1.inputs.input1 = 1 mod1.run() yield assert_equal, mod1.get_output('output1'), [1, 1] @@ -480,7 +505,7 @@ def test_mapnode_iterfield_check(): mod1 = pe.MapNode(TestInterface(), iterfield=['input1', 'input2'], name='mod1') - mod1.inputs.input1 = [1,2] + mod1.inputs.input1 = [1, 2] mod1.inputs.input2 = 3 yield assert_raises, ValueError, mod1._check_iterfield @@ -490,6 +515,7 @@ def test_mapnode_nested(): wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function + def func1(in1): return in1 + 1 n1 = MapNode(Function(input_names=['in1'], @@ -498,10 +524,10 @@ def func1(in1): iterfield=['in1'], nested=True, name='n1') - n1.inputs.in1 = [[1,[2]],3,[4,5]] + n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() print(n1.get_output('out')) - yield assert_equal, n1.get_output('out'), [[2,[3]],4,[5,6]] + yield assert_equal, n1.get_output('out'), [[2, [3]], 4, [5, 6]] n2 = MapNode(Function(input_names=['in1'], output_names=['out'], @@ -509,7 +535,7 @@ def func1(in1): iterfield=['in1'], nested=False, name='n1') - n2.inputs.in1 = [[1,[2]],3,[4,5]] + n2.inputs.in1 = [[1, [2]], 3, [4, 5]] error_raised = False try: n2.run() @@ -518,13 +544,16 @@ def func1(in1): error_raised = True yield assert_true, error_raised + def test_node_hash(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype.interfaces.utility import Function + def func1(): return 1 + def func2(a): return a+1 n1 = pe.Node(Function(input_names=[], @@ -538,7 +567,7 @@ def func2(a): w1 = pe.Workflow(name='test') modify = lambda x: x+1 n1.inputs.a = 1 - w1.connect(n1, ('a', modify), n2,'a') + w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd # generate outputs w1.run(plugin='Linear') @@ -549,6 +578,7 @@ def func2(a): error_raised = False # create dummy distributed plugin class from nipype.pipeline.plugins.base import DistributedPluginBase + class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise Exception('Submit called') @@ -558,7 +588,7 @@ def _submit_job(self, node, updatehash=False): pe.logger.info('Exception: %s' % str(e)) error_raised = True yield assert_true, error_raised - #yield assert_true, 'Submit called' in e + # yield assert_true, 'Submit called' in e # rerun to ensure we have outputs w1.run(plugin='Linear') # set local check @@ -575,13 +605,16 @@ def _submit_job(self, node, updatehash=False): os.chdir(cwd) rmtree(wd) + def test_old_config(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype.interfaces.utility import Function + def func1(): return 1 + def func2(a): return a+1 n1 = pe.Node(Function(input_names=[], @@ -595,7 +628,7 @@ def func2(a): w1 = pe.Workflow(name='test') modify = lambda x: x+1 n1.inputs.a = 1 - w1.connect(n1, ('a', modify), n2,'a') + w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd w1.config['execution']['crashdump_dir'] = wd @@ -618,6 +651,7 @@ def test_mapnode_json(): wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function, Workflow + def func1(in1): return in1 + 1 n1 = MapNode(Function(input_names=['in1'], @@ -654,11 +688,13 @@ def func1(in1): os.chdir(cwd) rmtree(wd) + def test_serial_input(): cwd = os.getcwd() wd = mkdtemp() os.chdir(wd) from nipype import MapNode, Function, Workflow + def func1(in1): return in1 n1 = MapNode(Function(input_names=['in1'], @@ -666,8 +702,7 @@ def func1(in1): function=func1), iterfield=['in1'], name='n1') - n1.inputs.in1 = [1,2,3] - + n1.inputs.in1 = [1, 2, 3] w1 = Workflow(name='test') w1.base_dir = wd @@ -691,7 +726,7 @@ def func1(in1): yield assert_false, error_raised # test output of num_subnodes method when serial is True - n1._serial=True + n1._serial = True yield assert_equal, n1.num_subnodes(), 1 # test running the workflow on serial conditions diff --git a/nipype/pipeline/tests/test_join.py b/nipype/pipeline/tests/test_join.py index ab58fbacb5..4c5119ff46 100644 --- a/nipype/pipeline/tests/test_join.py +++ b/nipype/pipeline/tests/test_join.py @@ -13,13 +13,16 @@ from nipype.interfaces.utility import IdentityInterface from nipype.interfaces.base import traits, File + class PickFirstSpec(nib.TraitedSpec): in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) + class PickFirstOutSpec(nib.TraitedSpec): output1 = File(exists=True) + class PickFirst(nib.BaseInterface): input_spec = PickFirstSpec output_spec = PickFirstOutSpec @@ -38,9 +41,11 @@ class IncrementInputSpec(nib.TraitedSpec): input1 = nib.traits.Int(mandatory=True, desc='input') inc = nib.traits.Int(usedefault=True, default_value=1, desc='increment') + class IncrementOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(desc='ouput') + class IncrementInterface(nib.BaseInterface): input_spec = IncrementInputSpec output_spec = IncrementOutputSpec @@ -58,13 +63,16 @@ def _list_outputs(self): _sum_operands = [] + class SumInputSpec(nib.TraitedSpec): input1 = nib.traits.List(nib.traits.Int, mandatory=True, desc='input') + class SumOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(desc='ouput') operands = nib.traits.List(nib.traits.Int, desc='operands') + class SumInterface(nib.BaseInterface): input_spec = SumInputSpec output_spec = SumOutputSpec @@ -87,12 +95,15 @@ def _list_outputs(self): _set_len = None """The Set interface execution result.""" + class SetInputSpec(nib.TraitedSpec): input1 = nib.traits.Set(nib.traits.Int, mandatory=True, desc='input') + class SetOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(desc='ouput') + class SetInterface(nib.BaseInterface): input_spec = SetInputSpec output_spec = SetOutputSpec @@ -111,13 +122,16 @@ def _list_outputs(self): _products = [] """The Products interface execution results.""" + class ProductInputSpec(nib.TraitedSpec): input1 = nib.traits.Int(mandatory=True, desc='input1') input2 = nib.traits.Int(mandatory=True, desc='input2') + class ProductOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(mandatory=True, desc='output') + class ProductInterface(nib.BaseInterface): input_spec = ProductInputSpec output_spec = ProductOutputSpec @@ -133,6 +147,7 @@ def _list_outputs(self): _products.append(outputs['output1']) return outputs + def test_join_expansion(): cwd = os.getcwd() wd = mkdtemp() @@ -151,7 +166,7 @@ def test_join_expansion(): wf.connect(pre_join1, 'output1', pre_join2, 'input1') # the join node join = pe.JoinNode(SumInterface(), joinsource='inputspec', - joinfield='input1', name='join') + joinfield='input1', name='join') wf.connect(pre_join2, 'output1', join, 'input1') # an uniterated post-join node post_join1 = pe.Node(IncrementInterface(), name='post_join1') @@ -176,7 +191,7 @@ def test_join_expansion(): assert_equal(_sums[0], 7, "The join Sum output value is incorrect: %s." % _sums[0]) # the join input preserves the iterables input order assert_equal(_sum_operands[0], [3, 4], - "The join Sum input is incorrect: %s." % _sum_operands[0]) + "The join Sum input is incorrect: %s." % _sum_operands[0]) # there are two iterations of the post-join node in the iterable path assert_equal(len(_products), 2, "The number of iterated post-join outputs is incorrect") @@ -184,6 +199,7 @@ def test_join_expansion(): os.chdir(cwd) rmtree(wd) + def test_node_joinsource(): """Test setting the joinsource to a Node.""" cwd = os.getcwd() @@ -197,15 +213,16 @@ def test_node_joinsource(): inputspec.iterables = [('n', [1, 2])] # the join node join = pe.JoinNode(SetInterface(), joinsource=inputspec, - joinfield='input1', name='join') + joinfield='input1', name='join') # the joinsource is the inputspec name assert_equal(join.joinsource, inputspec.name, - "The joinsource is not set to the node name.") + "The joinsource is not set to the node name.") os.chdir(cwd) rmtree(wd) + def test_set_join_node(): """Test collecting join inputs to a set.""" cwd = os.getcwd() @@ -222,18 +239,19 @@ def test_set_join_node(): wf.connect(inputspec, 'n', pre_join1, 'input1') # the set join node join = pe.JoinNode(SetInterface(), joinsource='inputspec', - joinfield='input1', name='join') + joinfield='input1', name='join') wf.connect(pre_join1, 'output1', join, 'input1') wf.run() # the join length is the number of unique inputs assert_equal(_set_len, 3, - "The join Set output value is incorrect: %s." % _set_len) + "The join Set output value is incorrect: %s." % _set_len) os.chdir(cwd) rmtree(wd) + def test_unique_join_node(): """Test join with the ``unique`` flag set to True.""" global _sum_operands @@ -252,17 +270,18 @@ def test_unique_join_node(): wf.connect(inputspec, 'n', pre_join1, 'input1') # the set join node join = pe.JoinNode(SumInterface(), joinsource='inputspec', - joinfield='input1', unique=True, name='join') + joinfield='input1', unique=True, name='join') wf.connect(pre_join1, 'output1', join, 'input1') wf.run() assert_equal(_sum_operands[0], [4, 2, 3], - "The unique join output value is incorrect: %s." % _sum_operands[0]) + "The unique join output value is incorrect: %s." % _sum_operands[0]) os.chdir(cwd) rmtree(wd) + def test_multiple_join_nodes(): """Test two join nodes, one downstream of the other.""" global _products @@ -320,6 +339,7 @@ def test_multiple_join_nodes(): os.chdir(cwd) rmtree(wd) + def test_identity_join_node(): """Test an IdentityInterface join.""" global _sum_operands @@ -352,13 +372,14 @@ def test_identity_join_node(): # node and 1 post-join node. Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. assert_equal(len(result.nodes()), 5, - "The number of expanded nodes is incorrect.") + "The number of expanded nodes is incorrect.") assert_equal(_sum_operands[0], [2, 3, 4], - "The join Sum input is incorrect: %s." %_sum_operands[0]) + "The join Sum input is incorrect: %s." % _sum_operands[0]) os.chdir(cwd) rmtree(wd) + def test_multifield_join_node(): """Test join on several fields.""" global _products @@ -385,7 +406,7 @@ def test_multifield_join_node(): wf.connect(inc2, 'output1', join, 'vector2') # a post-join node prod = pe.MapNode(ProductInterface(), name='prod', - iterfield=['input1', 'input2']) + iterfield=['input1', 'input2']) wf.connect(join, 'vector1', prod, 'input1') wf.connect(join, 'vector2', prod, 'input2') @@ -403,6 +424,7 @@ def test_multifield_join_node(): os.chdir(cwd) rmtree(wd) + def test_synchronize_join_node(): """Test join on an input node which has the ``synchronize`` flag set to True.""" global _products @@ -424,7 +446,7 @@ def test_synchronize_join_node(): wf.connect(inputspec, 'n', inc2, 'input1') # the join node join = pe.JoinNode(IdentityInterface(fields=['vector1', 'vector2']), - joinsource='inputspec', name='join') + joinsource='inputspec', name='join') wf.connect(inc1, 'output1', join, 'vector1') wf.connect(inc2, 'output1', join, 'vector2') # a post-join node @@ -446,6 +468,7 @@ def test_synchronize_join_node(): os.chdir(cwd) rmtree(wd) + def test_itersource_join_source_node(): """Test join on an input node which has an ``itersource``.""" cwd = os.getcwd() @@ -470,7 +493,7 @@ def test_itersource_join_source_node(): wf.connect(pre_join2, 'output1', pre_join3, 'input1') # the join node join = pe.JoinNode(IdentityInterface(fields=['vector']), - joinsource='pre_join2', joinfield='vector', name='join') + joinsource='pre_join2', joinfield='vector', name='join') wf.connect(pre_join3, 'output1', join, 'vector') # a join successor node post_join1 = pe.Node(SumInterface(), name='post_join1') @@ -496,13 +519,14 @@ def test_itersource_join_source_node(): # the post-join nodes execution order is indeterminate; # therefore, compare the lists item-wise. assert_true([16, 19] in _sum_operands, - "The join Sum input is incorrect: %s." % _sum_operands) + "The join Sum input is incorrect: %s." % _sum_operands) assert_true([7, 9] in _sum_operands, - "The join Sum input is incorrect: %s." % _sum_operands) + "The join Sum input is incorrect: %s." % _sum_operands) os.chdir(cwd) rmtree(wd) + def test_itersource_two_join_nodes(): """Test join with a midstream ``itersource`` and an upstream iterable.""" @@ -528,14 +552,14 @@ def test_itersource_two_join_nodes(): wf.connect(pre_join2, 'output1', pre_join3, 'input1') # the first join node join1 = pe.JoinNode(IdentityInterface(fields=['vector']), - joinsource='pre_join2', joinfield='vector', name='join1') + joinsource='pre_join2', joinfield='vector', name='join1') wf.connect(pre_join3, 'output1', join1, 'vector') # a join successor node post_join1 = pe.Node(SumInterface(), name='post_join1') wf.connect(join1, 'vector', post_join1, 'input1') # a summary join node join2 = pe.JoinNode(IdentityInterface(fields=['vector']), - joinsource='inputspec', joinfield='vector', name='join2') + joinsource='inputspec', joinfield='vector', name='join2') wf.connect(post_join1, 'output1', join2, 'vector') result = wf.run() @@ -548,6 +572,7 @@ def test_itersource_two_join_nodes(): os.chdir(cwd) rmtree(wd) + def test_set_join_node_file_input(): """Test collecting join inputs to a set.""" cwd = os.getcwd() @@ -566,7 +591,7 @@ def test_set_join_node_file_input(): wf.connect(inputspec, 'n', pre_join1, 'n') # the set join node join = pe.JoinNode(PickFirst(), joinsource='inputspec', - joinfield='in_files', name='join') + joinfield='in_files', name='join') wf.connect(pre_join1, 'n', join, 'in_files') wf.run() diff --git a/nipype/pipeline/tests/test_utils.py b/nipype/pipeline/tests/test_utils.py index 6c7f19d160..3e559acb53 100644 --- a/nipype/pipeline/tests/test_utils.py +++ b/nipype/pipeline/tests/test_utils.py @@ -16,6 +16,7 @@ from ... import config from ..utils import merge_dict, clean_working_directory + def test_identitynode_removal(): def test_function(arg1, arg2, arg3): @@ -50,6 +51,7 @@ def test_clean_working_directory(): class OutputSpec(nib.TraitedSpec): files = nib.traits.List(nib.File) others = nib.File() + class InputSpec(nib.TraitedSpec): infile = nib.File() outputs = OutputSpec() @@ -88,6 +90,7 @@ class InputSpec(nib.TraitedSpec): config.set_default_config() rmtree(wd) + def test_outputs_removal(): def test_function(arg1): @@ -121,8 +124,8 @@ def test_function(arg1): n1.needed_outputs = ['file2'] n1.run() yield assert_false, os.path.exists(os.path.join(out_dir, - n1.name, - 'file1.txt')) + n1.name, + 'file1.txt')) yield assert_true, os.path.exists(os.path.join(out_dir, n1.name, 'file2.txt')) @@ -132,9 +135,11 @@ def test_function(arg1): class InputSpec(nib.TraitedSpec): in_file = nib.File(exists=True, copyfile=True) + class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc='outputs') + class TestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec @@ -148,6 +153,7 @@ def _list_outputs(self): outputs['output1'] = [1] return outputs + def test_inputs_removal(): out_dir = mkdtemp() file1 = os.path.join(out_dir, 'file1.txt') @@ -170,8 +176,8 @@ def test_inputs_removal(): n1.overwrite = True n1.run() yield assert_false, os.path.exists(os.path.join(out_dir, - n1.name, - 'file1.txt')) + n1.name, + 'file1.txt')) rmtree(out_dir) @@ -188,7 +194,7 @@ def test_function(arg1): for filename in files: with open(filename, 'wt') as fp: fp.write('%d' % arg1) - return file1, file2, os.path.join(os.getcwd(),"subdir") + return file1, file2, os.path.join(os.getcwd(), "subdir") def test_function2(in_file, arg): import os @@ -208,23 +214,23 @@ def test_function3(arg): out_dir = mkdtemp() - for plugin in ('Linear',):#, 'MultiProc'): + for plugin in ('Linear',): # , 'MultiProc'): n1 = pe.Node(niu.Function(input_names=['arg1'], - output_names=['out_file1', 'out_file2', 'dir'], - function=test_function), - name='n1') + output_names=['out_file1', 'out_file2', 'dir'], + function=test_function), + name='n1') n1.inputs.arg1 = 1 n2 = pe.Node(niu.Function(input_names=['in_file', 'arg'], - output_names=['out_file1', 'out_file2', 'n'], - function=test_function2), - name='n2') + output_names=['out_file1', 'out_file2', 'n'], + function=test_function2), + name='n2') n2.inputs.arg = 2 n3 = pe.Node(niu.Function(input_names=['arg'], - output_names=['n'], - function=test_function3), - name='n3') + output_names=['n'], + function=test_function3), + name='n3') wf = pe.Workflow(name="node_rem_test" + plugin, base_dir=out_dir) wf.connect(n1, "out_file1", n2, "in_file") @@ -238,14 +244,14 @@ def test_function3(arg): wf.run(plugin=plugin) yield assert_true, os.path.exists(os.path.join(wf.base_dir, - wf.name, - n1.name, - 'file2.txt')) != remove_unnecessary_outputs + wf.name, + n1.name, + 'file2.txt')) != remove_unnecessary_outputs yield assert_true, os.path.exists(os.path.join(wf.base_dir, - wf.name, - n1.name, - "subdir", - 'file1.txt')) != remove_unnecessary_outputs + wf.name, + n1.name, + "subdir", + 'file1.txt')) != remove_unnecessary_outputs yield assert_true, os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, @@ -281,38 +287,41 @@ def pick_first(l): rmtree(os.path.join(wf.base_dir, wf.name)) wf.run(plugin=plugin) yield assert_true, os.path.exists(os.path.join(wf.base_dir, - wf.name, - n2.name, - 'file1.txt')) + wf.name, + n2.name, + 'file1.txt')) yield assert_true, os.path.exists(os.path.join(wf.base_dir, - wf.name, - n2.name, - 'file2.txt')) != remove_unnecessary_outputs + wf.name, + n2.name, + 'file2.txt')) != remove_unnecessary_outputs yield assert_true, os.path.exists(os.path.join(wf.base_dir, - wf.name, - n4.name, - 'file1.txt')) == keep_inputs + wf.name, + n4.name, + 'file1.txt')) == keep_inputs rmtree(out_dir) + def fwhm(fwhm): return fwhm + def create_wf(name): pipe = pe.Workflow(name=name) process = pe.Node(niu.Function(input_names=['fwhm'], - output_names=['fwhm'], - function=fwhm), - name='proc') - process.iterables = ('fwhm', [0]) - process2 = pe.Node(niu.Function(input_names=['fwhm'], output_names=['fwhm'], function=fwhm), - name='proc2') + name='proc') + process.iterables = ('fwhm', [0]) + process2 = pe.Node(niu.Function(input_names=['fwhm'], + output_names=['fwhm'], + function=fwhm), + name='proc2') process2.iterables = ('fwhm', [0]) pipe.connect(process, 'fwhm', process2, 'fwhm') return pipe + def test_multi_disconnected_iterable(): out_dir = mkdtemp() metawf = pe.Workflow(name='meta') diff --git a/nipype/pipeline/utils.py b/nipype/pipeline/utils.py index d6bb996140..25931f1310 100644 --- a/nipype/pipeline/utils.py +++ b/nipype/pipeline/utils.py @@ -262,12 +262,14 @@ def _get_valid_pathstr(pathstr): pathstr = pathstr.replace(',', '.') return pathstr + def expand_iterables(iterables, synchronize=False): if synchronize: return synchronize_iterables(iterables) else: return list(walk(list(iterables.items()))) + def count_iterables(iterables, synchronize=False): """Return the number of iterable expansion nodes. @@ -279,9 +281,10 @@ def count_iterables(iterables, synchronize=False): if synchronize: op = max else: - op = lambda x,y: x*y + op = lambda x, y: x*y return reduce(op, [len(func()) for _, func in iterables.items()]) + def walk(children, level=0, path=None, usename=True): """Generate all the full paths in a tree, as a dict. @@ -314,6 +317,7 @@ def walk(children, level=0, path=None, usename=True): for child_paths in walk(tail, level + 1, path, usename): yield child_paths + def synchronize_iterables(iterables): """Synchronize the given iterables in item-wise order. @@ -348,6 +352,7 @@ def synchronize_iterables(iterables): return out_list + def evaluate_connect_function(function_source, args, first_arg): func = create_function_from_source(function_source) try: @@ -415,12 +420,12 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, for n in subgraph.nodes(): nidx = ids.index(n._hierarchy + n._id) for edge in supergraph.in_edges_iter(supernodes[nidx]): - #make sure edge is not part of subgraph + # make sure edge is not part of subgraph if edge[0] not in subgraph.nodes(): if n._hierarchy + n._id not in edgeinfo.keys(): edgeinfo[n._hierarchy + n._id] = [] edgeinfo[n._hierarchy + n._id].append((edge[0], - supergraph.get_edge_data(*edge))) + supergraph.get_edge_data(*edge))) supergraph.remove_nodes_from(nodes) # Add copies of the subgraph depending on the number of iterables iterable_params = expand_iterables(iterables, synchronize) @@ -479,6 +484,7 @@ def _connect_nodes(graph, srcnode, destnode, connection_info): else: data['connect'].extend(connection_info) + def _remove_nonjoin_identity_nodes(graph, keep_iterables=False): """Remove non-join identity nodes from the given graph @@ -492,6 +498,7 @@ def _remove_nonjoin_identity_nodes(graph, keep_iterables=False): _remove_identity_node(graph, node) return graph + def _identity_nodes(graph, include_iterables): """Return the IdentityInterface nodes in the graph @@ -500,8 +507,9 @@ def _identity_nodes(graph, include_iterables): to True. """ return [node for node in nx.topological_sort(graph) - if isinstance(node._interface, IdentityInterface) and - (include_iterables or getattr(node, 'iterables') is None)] + if isinstance(node._interface, IdentityInterface) and + (include_iterables or getattr(node, 'iterables') is None)] + def _remove_identity_node(graph, node): """Remove identity nodes from an execution graph @@ -510,12 +518,13 @@ def _remove_identity_node(graph, node): for field, connections in list(portoutputs.items()): if portinputs: _propagate_internal_output(graph, node, field, connections, - portinputs) + portinputs) else: _propagate_root_output(graph, node, field, connections) graph.remove_nodes_from([node]) logger.debug("Removed the identity node %s from the graph." % node) + def _node_ports(graph, node): """Return the given node's input and output ports @@ -531,7 +540,7 @@ def _node_ports(graph, node): for u, _, d in graph.in_edges_iter(node, data=True): for src, dest in d['connect']: portinputs[dest] = (u, src) - for _, v, d in graph.out_edges_iter(node, data=True): + for _, v, d in graph.out_edges_iter(node, data=True): for src, dest in d['connect']: if isinstance(src, tuple): srcport = src[0] @@ -542,6 +551,7 @@ def _node_ports(graph, node): portoutputs[srcport].append((v, dest, src)) return (portinputs, portoutputs) + def _propagate_root_output(graph, node, field, connections): """Propagates the given graph root node output port field connections to the out-edge destination nodes.""" @@ -552,6 +562,7 @@ def _propagate_root_output(graph, node, field, connections): value) destnode.set_input(inport, value) + def _propagate_internal_output(graph, node, field, connections, portinputs): """Propagates the given graph internal node output port field connections to the out-edge source node and in-edge @@ -563,8 +574,8 @@ def _propagate_internal_output(graph, node, field, connections, portinputs): raise ValueError(("Does not support two inline functions " "in series (\'%s\' and \'%s\'). " "Please use a Function node") % - (srcport[1].split("\\n")[0][6:-1], - src[1].split("\\n")[0][6:-1])) + (srcport[1].split("\\n")[0][6:-1], + src[1].split("\\n")[0][6:-1])) connect = graph.get_edge_data(srcnode, destnode, default={'connect': []}) if isinstance(src, tuple): @@ -581,6 +592,7 @@ def _propagate_internal_output(graph, node, field, connections, portinputs): value = evaluate_connect_function(src[1], src[2], value) destnode.set_input(inport, value) + def generate_expanded_graph(graph_in): """Generates an expanded graph based on node parameterization @@ -608,9 +620,9 @@ def generate_expanded_graph(graph_in): # the join successor nodes of the current iterable node jnodes = [node for node in graph_in.nodes_iter() - if hasattr(node, 'joinsource') - and inode.name == node.joinsource - and nx.has_path(graph_in, inode, node)] + if hasattr(node, 'joinsource') + and inode.name == node.joinsource + and nx.has_path(graph_in, inode, node)] # excise the join in-edges. save the excised edges in a # {jnode: {source name: (destination name, edge data)}} @@ -662,6 +674,7 @@ def generate_expanded_graph(graph_in): iter_dict = dict([(field, lookup[key]) for field, lookup in inode.iterables if key in lookup]) # convert the iterables to the standard {field: function} format + def make_field_func(*pair): return pair[0], lambda: pair[1] @@ -683,7 +696,7 @@ def make_field_func(*pair): if prior_prefix[-1] == 'z': raise ValueError('Too many iterables in the workflow') iterable_prefix =\ - allprefixes[allprefixes.index(prior_prefix[-1]) + 1] + allprefixes[allprefixes.index(prior_prefix[-1]) + 1] logger.debug(('subnodes:', subnodes)) # append a suffix to the iterable node id @@ -708,7 +721,7 @@ def make_field_func(*pair): expansions[src_id].append(node) for in_id, in_nodes in list(expansions.items()): logger.debug("The join node %s input %s was expanded" - " to %d nodes." %(jnode, in_id, len(in_nodes))) + " to %d nodes." % (jnode, in_id, len(in_nodes))) # preserve the node iteration order by sorting on the node id for in_nodes in list(expansions.values()): in_nodes.sort(key=lambda node: node._id) @@ -736,7 +749,7 @@ def make_field_func(*pair): connects = newdata['connect'] # the join fields connected to the source join_fields = [field for _, field in connects - if field in jnode.joinfield] + if field in jnode.joinfield] # the {field: slot fields} maps assigned to the input # node, e.g. {'image': 'imageJ3', 'mask': 'maskJ3'} # for the third join source expansion replicate of a @@ -755,7 +768,7 @@ def make_field_func(*pair): logger.debug("Connected the join node %s subgraph to the" " expanded join point %s" % (jnode, in_node)) - #nx.write_dot(graph_in, '%s_post.dot' % node) + # nx.write_dot(graph_in, '%s_post.dot' % node) # the remaining iterable nodes inodes = _iterable_nodes(graph_in) @@ -767,6 +780,7 @@ def make_field_func(*pair): return _remove_nonjoin_identity_nodes(graph_in) + def _iterable_nodes(graph_in): """Returns the iterable nodes in the given graph and their join dependencies. @@ -797,6 +811,7 @@ def _iterable_nodes(graph_in): inodes_no_src.reverse() return inodes_no_src + inodes_src + def _standardize_iterables(node): """Converts the given iterables to a {field: function} dictionary, if necessary, where the function returns a list.""" @@ -835,6 +850,7 @@ def make_field_func(*pair): iterables = dict(iter_items) node.iterables = iterables + def _validate_iterables(node, iterables, fields): """ Raise TypeError if an iterables member is not iterable. @@ -862,6 +878,7 @@ def _validate_iterables(node, iterables, fields): raise ValueError("The %s iterables field is unrecognized: %s" % (node.name, field)) + def _transpose_iterables(fields, values): """ Converts the given fields and tuple values into a standardized @@ -882,7 +899,8 @@ def _transpose_iterables(fields, values): return list(transposed.items()) else: return list(zip(fields, [[v for v in list(transpose) if v != None] - for transpose in zip(*values)])) + for transpose in zip(*values)])) + def export_graph(graph_in, base_dir=None, show=False, use_execgraph=False, show_connectinfo=False, dotfilename='graph.dot', format='png', @@ -1117,6 +1135,7 @@ def merge_bundles(g1, g2): g1._add_record(rec) return g1 + def write_workflow_prov(graph, filename=None, format='turtle'): """Write W3C PROV Model JSON file """ @@ -1163,7 +1182,7 @@ def write_workflow_prov(graph, filename=None, format='turtle'): # Process->Process for idx, edgeinfo in enumerate(graph.in_edges_iter()): ps.g.wasStartedBy(processes[nodes.index(edgeinfo[1])], - starter=processes[nodes.index(edgeinfo[0])]) + starter=processes[nodes.index(edgeinfo[0])]) # write provenance try: @@ -1180,6 +1199,7 @@ def write_workflow_prov(graph, filename=None, format='turtle'): pm.json.dump(ps.g, fp, cls=pm.ProvBundle.JSONEncoder) return ps.g + def topological_sort(graph, depth_first=False): """Returns a depth first sorted order if depth_first is True """ @@ -1187,9 +1207,9 @@ def topological_sort(graph, depth_first=False): if not depth_first: return nodesort, None logger.debug("Performing depth first search") - nodes=[] - groups=[] - group=0 + nodes = [] + groups = [] + group = 0 G = nx.Graph() G.add_nodes_from(graph.nodes()) G.add_edges_from(graph.edges()) diff --git a/nipype/pkg_info.py b/nipype/pkg_info.py index b3982bb4cb..04ea874f7d 100644 --- a/nipype/pkg_info.py +++ b/nipype/pkg_info.py @@ -50,7 +50,7 @@ def pkg_commit_hash(pkg_path): cfg_parser = ConfigParser() cfg_parser.read(pth) archive_subst = cfg_parser.get('commit hash', 'archive_subst_hash') - if not archive_subst.startswith('$Format'): # it has been substituted + if not archive_subst.startswith('$Format'): # it has been substituted return 'archive substitution', archive_subst install_subst = cfg_parser.get('commit hash', 'install_hash') if install_subst != '': diff --git a/nipype/testing/__init__.py b/nipype/testing/__init__.py index 727adfd7a7..9a4848fb50 100644 --- a/nipype/testing/__init__.py +++ b/nipype/testing/__init__.py @@ -33,6 +33,7 @@ skipif = dec.skipif + def example_data(infile='functional.nii'): """returns path to empty example data files for doc tests it will raise an exception if filename is not in the directory""" diff --git a/nipype/testing/decorators.py b/nipype/testing/decorators.py index d225f5d1e9..7a76d26958 100644 --- a/nipype/testing/decorators.py +++ b/nipype/testing/decorators.py @@ -44,19 +44,20 @@ def make_label_dec(label, ds=None): >>> f.hard True """ - if isinstance(label,str): + if isinstance(label, str): labels = [label] else: labels = label # Validate that the given label(s) are OK for use in setattr() by doing a # dry run on a dummy function. - tmp = lambda : None + tmp = lambda: None for label in labels: - setattr(tmp,label,True) + setattr(tmp, label, True) # This is the actual decorator we'll return + def decor(f): for label in labels: - setattr(f,label,True) + setattr(f, label, True) return f # Apply the user's docstring if ds is None: @@ -65,6 +66,8 @@ def decor(f): return decor # For tests that need further review + + def needs_review(msg): """ Skip a test that needs further review. @@ -88,4 +91,4 @@ def if_datasource(ds, msg): ds.get_filename() except DataError: return skipif(True, msg) - return lambda f : f + return lambda f: f diff --git a/nipype/utils/config.py b/nipype/utils/config.py index c07136ecf7..4cae18d2a7 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -86,7 +86,7 @@ def __init__(self, *args, **kwargs): # To be deprecated in two releases if os.path.exists(old_config_file): if os.path.exists(new_config_file): - msg=("Detected presence of both old (%s, used by versions " + msg = ("Detected presence of both old (%s, used by versions " "< 0.5.2) and new (%s) config files. This version will " "proceed with the new one. We advise to merge settings " "and remove old config file if you are not planning to " diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index e5cf459ce0..4875ee6b95 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -42,7 +42,7 @@ def grab_doc(cmd, trap_error=True): stdout, stderr = proc.communicate() if trap_error and proc.returncode: - msg = 'Attempting to run %s. Returned Error: %s'%(cmd,stderr) + msg = 'Attempting to run %s. Returned Error: %s' %(cmd, stderr) raise IOError(msg) if stderr: @@ -52,6 +52,7 @@ def grab_doc(cmd, trap_error=True): return stderr return stdout + def reverse_opt_map(opt_map): """Reverse the key/value pairs of the option map in the interface classes. @@ -123,6 +124,7 @@ def format_params(paramlist, otherlist=None): doc = ''.join([doc, otherparams]) return doc + def insert_doc(doc, new_items): """Insert ``new_items`` into the beginning of the ``doc`` @@ -206,13 +208,13 @@ def build_doc(doc, opts): # Probably an empty line continue # For lines we care about, the first item is the flag - if ',' in linelist[0]: #sometimes flags are only seperated by comma + if ',' in linelist[0]: # sometimes flags are only seperated by comma flag = linelist[0].split(',')[0] else: flag = linelist[0] attr = opts.get(flag) if attr is not None: - #newline = line.replace(flag, attr) + # newline = line.replace(flag, attr) # Replace the flag with our attribute name linelist[0] = '%s :' % str(attr) # Add some line formatting @@ -229,6 +231,7 @@ def build_doc(doc, opts): flags_doc.append(line) return format_params(newdoc, flags_doc) + def get_doc(cmd, opt_map, help_flag=None, trap_error=True): """Get the docstring from our command and options map. @@ -253,13 +256,14 @@ def get_doc(cmd, opt_map, help_flag=None, trap_error=True): terminal_output='allatonce').run() cmd_path = res.runtime.stdout.strip() if cmd_path == '': - raise Exception('Command %s not found'%cmd.split(' ')[0]) + raise Exception('Command %s not found' %cmd.split(' ')[0]) if help_flag: - cmd = ' '.join((cmd,help_flag)) - doc = grab_doc(cmd,trap_error) + cmd = ' '.join((cmd, help_flag)) + doc = grab_doc(cmd, trap_error) opts = reverse_opt_map(opt_map) return build_doc(doc, opts) + def _parse_doc(doc, style=['--']): """Parses a help doc for inputs @@ -283,25 +287,26 @@ def _parse_doc(doc, style=['--']): style = [style] for line in doclist: linelist = line.split() - flag =[item for i,item in enumerate(linelist) if i<2 and \ - any([item.startswith(s) for s in style]) and \ - len(item)>1] + flag = [item for i, item in enumerate(linelist) if i < 2 and \ + any([item.startswith(s) for s in style]) and \ + len(item) > 1] if flag: - if len(flag)==1: + if len(flag) == 1: style_idx = [flag[0].startswith(s) for s in style].index(True) flag = flag[0] else: style_idx = [] for f in flag: - for i,s in enumerate(style): + for i, s in enumerate(style): if f.startswith(s): style_idx.append(i) break flag = flag[style_idx.index(min(style_idx))] style_idx = min(style_idx) - optmap[flag.split(style[style_idx])[1]] = '%s %%s'%flag + optmap[flag.split(style[style_idx])[1]] = '%s %%s' %flag return optmap + def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): """Auto-generate option map from command line help @@ -327,11 +332,12 @@ def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): terminal_output='allatonce').run() cmd_path = res.runtime.stdout.strip() if cmd_path == '': - raise Exception('Command %s not found'%cmd.split(' ')[0]) + raise Exception('Command %s not found' %cmd.split(' ')[0]) if help_flag: - cmd = ' '.join((cmd,help_flag)) - doc = grab_doc(cmd,trap_error) - return _parse_doc(doc,style) + cmd = ' '.join((cmd, help_flag)) + doc = grab_doc(cmd, trap_error) + return _parse_doc(doc, style) + def replace_opts(rep_doc, opts): """Replace flags with parameter names. diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 9249dc294b..ba364b153b 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -242,8 +242,8 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, newhash = hash_infile(newfile) fmlogger.debug("File: %s already exists,%s, copy:%d" % (newfile, newhash, copy)) - #the following seems unnecessary - #if os.name is 'posix' and copy: + # the following seems unnecessary + # if os.name is 'posix' and copy: # if os.path.lexists(newfile) and os.path.islink(newfile): # os.unlink(newfile) # newhash = None @@ -400,10 +400,11 @@ def load_json(filename): """ - with open (filename, 'r') as fp: + with open(filename, 'r') as fp: data = simplejson.load(fp) return data + def loadcrash(infile, *args): if '.pkl' in infile: return loadpkl(infile) @@ -420,6 +421,7 @@ def loadcrash(infile, *args): else: raise ValueError('Only pickled crashfiles are supported') + def loadpkl(infile): """Load a zipped or plain cPickled file """ diff --git a/nipype/utils/logger.py b/nipype/utils/logger.py index 4088ec117d..4c69af5822 100644 --- a/nipype/utils/logger.py +++ b/nipype/utils/logger.py @@ -7,7 +7,7 @@ import sys try: from ..external.cloghandler import ConcurrentRotatingFileHandler as \ - RFHandler + RFHandler except ImportError: # Next 2 lines are optional: issue a warning to the user from warnings import warn @@ -16,17 +16,19 @@ from .misc import str2bool from .config import NipypeConfig + class Logging(object): """Nipype logging class """ fmt = ('%(asctime)s,%(msecs)d %(name)-2s ' '%(levelname)-2s:\n\t %(message)s') datefmt = '%y%m%d-%H:%M:%S' + def __init__(self, config): self._config = config logging.basicConfig(format=self.fmt, datefmt=self.datefmt, stream=sys.stdout) - #logging.basicConfig(stream=sys.stdout) + # logging.basicConfig(stream=sys.stdout) self._logger = logging.getLogger('workflow') self._fmlogger = logging.getLogger('filemanip') self._iflogger = logging.getLogger('interface') @@ -91,10 +93,10 @@ def logdebug_dict_differences(self, dold, dnew, prefix=""): old_keys = set(dold.keys()) if len(new_keys - old_keys): self._logger.debug("%s not previously seen: %s" - % (prefix, new_keys - old_keys)) + % (prefix, new_keys - old_keys)) if len(old_keys - new_keys): self._logger.debug("%s not presently seen: %s" - % (prefix, old_keys - new_keys)) + % (prefix, old_keys - new_keys)) # Values in common keys would differ quite often, # so we need to join the messages together @@ -116,4 +118,4 @@ def logdebug_dict_differences(self, dold, dnew, prefix=""): % (k, dnew[k], dold[k])] if len(msgs): self._logger.debug("%s values differ in fields: %s" % (prefix, - ", ".join(msgs))) + ", ".join(msgs))) diff --git a/nipype/utils/matlabtools.py b/nipype/utils/matlabtools.py index ad1daced57..e272288b75 100644 --- a/nipype/utils/matlabtools.py +++ b/nipype/utils/matlabtools.py @@ -10,10 +10,13 @@ import tempfile # Functions, classes and other top-level code + + def fltcols(vals): ''' Trivial little function to make 1xN float vector ''' return np.atleast_2d(np.array(vals, dtype=float)) + def mlab_tempfile(dir=None): """Returns a temporary file-like object with valid matlab name. @@ -53,10 +56,10 @@ def mlab_tempfile(dir=None): # directly, we just keep trying until we get a valid name. To avoid an # infinite loop for some strange reason, we only try 100 times. for n in range(100): - f = tempfile.NamedTemporaryFile(suffix='.m',prefix='tmp_matlab_', + f = tempfile.NamedTemporaryFile(suffix='.m', prefix='tmp_matlab_', dir=dir) # Check the file name for matlab compilance - fname = os.path.splitext(os.path.basename(f.name))[0] + fname = os.path.splitext(os.path.basename(f.name))[0] if valid_name.match(fname): break # Close the temp file we just made if its name is not valid; the diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index cccb0cc86d..de19f5843c 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -27,10 +27,11 @@ def atoi(text): def natural_keys(text): if isinstance(text, tuple): text = text[0] - return [ atoi(c) for c in re.split('(\d+)', text) ] + return [atoi(c) for c in re.split('(\d+)', text)] return sorted(l, key=natural_keys) + def trim(docstring, marker=None): if not docstring: return '' @@ -50,7 +51,7 @@ def trim(docstring, marker=None): # replace existing REST marker with doc level marker stripped = line.lstrip().strip().rstrip() if marker is not None and stripped and \ - all([s==stripped[0] for s in stripped]) and \ + all([s == stripped[0] for s in stripped]) and \ stripped[0] not in [':']: line = line.replace(stripped[0], marker) trimmed.append(line[indent:].rstrip()) @@ -62,11 +63,13 @@ def trim(docstring, marker=None): # Return a single string: return '\n'.join(trimmed) + def getsource(function): """Returns the source code of a function""" src = dedent(inspect.getsource(function)) return src + def create_function_from_source(function_source, imports=None): """Return a function object from a function source @@ -88,12 +91,12 @@ def create_function_from_source(function_source, imports=None): exec(function_source, ns) except Exception as msg: - msg = str(msg) + '\nError executing function:\n %s\n'%function_source + msg = str(msg) + '\nError executing function:\n %s\n' %function_source msg += '\n'.join(["Functions in connection strings have to be standalone.", "They cannot be declared either interactively or inside", "another function or inline in the connect string. Any", "imports should be done inside the function" - ]) + ]) raise RuntimeError(msg) ns_funcs = list(set(ns) - set(import_keys + ['__builtins__'])) assert len(ns_funcs) == 1, "Function or inputs are ill-defined" @@ -101,56 +104,59 @@ def create_function_from_source(function_source, imports=None): func = ns[funcname] return func + def find_indices(condition): - "Return the indices where ravel(condition) is true" - res, = np.nonzero(np.ravel(condition)) - return res + "Return the indices where ravel(condition) is true" + res, = np.nonzero(np.ravel(condition)) + return res + def is_container(item): - """Checks if item is a container (list, tuple, dict, set) - - Parameters - ---------- - item : object - object to check for .__iter__ - - Returns - ------- - output : Boolean - True if container - False if not (eg string) - """ - if isinstance(item, string_types): - return False - elif hasattr(item, '__iter__'): - return True - else: - return False + """Checks if item is a container (list, tuple, dict, set) + + Parameters + ---------- + item : object + object to check for .__iter__ + + Returns + ------- + output : Boolean + True if container + False if not (eg string) + """ + if isinstance(item, string_types): + return False + elif hasattr(item, '__iter__'): + return True + else: + return False + def container_to_string(cont): - """Convert a container to a command line string. + """Convert a container to a command line string. - Elements of the container are joined with a space between them, - suitable for a command line parameter. + Elements of the container are joined with a space between them, + suitable for a command line parameter. - If the container `cont` is only a sequence, like a string and not a - container, it is returned unmodified. + If the container `cont` is only a sequence, like a string and not a + container, it is returned unmodified. - Parameters - ---------- - cont : container - A container object like a list, tuple, dict, or a set. + Parameters + ---------- + cont : container + A container object like a list, tuple, dict, or a set. - Returns - ------- - cont_str : string - Container elements joined into a string. + Returns + ------- + cont_str : string + Container elements joined into a string. - """ - if hasattr(cont, '__iter__') and not isinstance(cont, string_types): - return str(' '.join(cont)) + """ + if hasattr(cont, '__iter__') and not isinstance(cont, string_types): + return str(' '.join(cont)) - return str(cont) + return str(cont) # Dependency checks. Copied this from Nipy, with some modificiations @@ -190,7 +196,7 @@ def package_check(pkg_name, version=None, app=None, checker=LooseVersion, else: msg = 'Nipype requires %s' % pkg_name if version: - msg += ' with version >= %s' % (version,) + msg += ' with version >= %s' % (version,) try: mod = __import__(pkg_name) except ImportError: @@ -204,6 +210,7 @@ def package_check(pkg_name, version=None, app=None, checker=LooseVersion, if checker(have_version) < checker(version): raise exc_failed_check(msg) + def str2bool(v): if isinstance(v, bool): return v @@ -215,6 +222,7 @@ def str2bool(v): else: raise ValueError("%s cannot be converted to bool" % v) + def flatten(S): if S == []: return S @@ -222,6 +230,7 @@ def flatten(S): return flatten(S[0]) + flatten(S[1:]) return S[:1] + flatten(S[1:]) + def unflatten(in_list, prev_structure): if not isinstance(in_list, Iterator): in_list = iter(in_list) diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index 6b41e7a5ab..bb7c515b34 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -20,259 +20,267 @@ from nipype.interfaces.base import Interface + def main(argv): - # Parses arguments - parser = argparse.ArgumentParser(description='Nipype Boutiques exporter. See Boutiques specification at https://github.com/boutiques/schema.', prog=argv[0]) - parser.add_argument("-i" , "--interface" , type=str, help="Name of the Nipype interface to export." , required=True) - parser.add_argument("-m" , "--module" , type=str, help="Module where the interface is defined." , required=True) - parser.add_argument("-o" , "--output" , type=str, help="JSON file name where the Boutiques descriptor will be written.", required=True) - parser.add_argument("-t" , "--ignored-template-inputs" , type=str, help="Interface inputs ignored in path template creations.", nargs='+') - parser.add_argument("-d" , "--docker-image" , type=str, help="Name of the Docker image where the Nipype interface is available.") - parser.add_argument("-r" , "--docker-index" , type=str, help="Docker index where the Docker image is stored (e.g. http://index.docker.io).") - parser.add_argument("-n" , "--ignore-template-numbers" , action='store_true', default=False, help="Ignore all numbers in path template creations.") - parser.add_argument("-v" , "--verbose" , action='store_true', default=False, help="Enable verbose output.") - - parsed = parser.parse_args() - - # Generates JSON string - json_string = generate_boutiques_descriptor(parsed.module, - parsed.interface, - parsed.ignored_template_inputs, - parsed.docker_image,parsed.docker_index, - parsed.verbose, - parsed.ignore_template_numbers) - - # Writes JSON string to file - f = open(parsed.output,'w') - f.write(json_string) - f.close() - -def generate_boutiques_descriptor(module, interface_name, ignored_template_inputs,docker_image,docker_index,verbose,ignore_template_numbers): - ''' - Returns a JSON string containing a JSON Boutiques description of a Nipype interface. - Arguments: - * module: module where the Nipype interface is declared. - * interface: Nipype interface. - * ignored_template_inputs: a list of input names that should be ignored in the generation of output path templates. - * ignore_template_numbers: True if numbers must be ignored in output path creations. - ''' - - if not module: - raise Exception("Undefined module.") - - # Retrieves Nipype interface - __import__(module) - interface = getattr(sys.modules[module],interface_name)() - inputs = interface.input_spec() - outputs = interface.output_spec() - - # Tool description - tool_desc = {} - tool_desc['name'] = interface_name - tool_desc['command-line'] = "nipype_cmd "+str(module)+" "+interface_name+" " - tool_desc['description'] = interface_name+", as implemented in Nipype (module: "+str(module)+", interface: "+interface_name+")." - tool_desc['inputs'] = [] - tool_desc['outputs'] = [] - tool_desc['tool-version'] = interface.version - tool_desc['schema-version'] = '0.2-snapshot' - if docker_image: - tool_desc['docker-image'] = docker_image - if docker_index: - tool_desc['docker-index'] = docker_index - - # Generates tool inputs - for name, spec in sorted(interface.inputs.traits(transient=None).items()): - input = get_boutiques_input(inputs, interface, name, spec,ignored_template_inputs,verbose,ignore_template_numbers) - tool_desc['inputs'].append(input) - tool_desc['command-line']+= input['command-line-key']+" " - if verbose: - print("-> Adding input "+input['name']) - - # Generates tool outputs - for name,spec in sorted(outputs.traits(transient=None).items()): - output = get_boutiques_output(name,interface,tool_desc['inputs'],verbose) - if output['path-template'] != "": - tool_desc['outputs'].append(output) - if verbose: - print("-> Adding output "+output['name']) - elif verbose: - print("xx Skipping output "+output['name']+" with no path template.") - if tool_desc['outputs'] == []: - raise Exception("Tool has no output.") - - # Removes all temporary values from inputs (otherwise they will - # appear in the JSON output) - for input in tool_desc['inputs']: - del input['tempvalue'] - - return simplejson.dumps(tool_desc, indent=4, separators=(',', ': ')) - -def get_boutiques_input(inputs,interface,input_name,spec,ignored_template_inputs,verbose,ignore_template_numbers): - """ - Returns a dictionary containing the Boutiques input corresponding to a Nipype intput. - - Args: - * inputs: inputs of the Nipype interface. + # Parses arguments + parser = argparse.ArgumentParser(description='Nipype Boutiques exporter. See Boutiques specification at https://github.com/boutiques/schema.', prog=argv[0]) + parser.add_argument("-i", "--interface", type=str, help="Name of the Nipype interface to export.", required=True) + parser.add_argument("-m", "--module", type=str, help="Module where the interface is defined.", required=True) + parser.add_argument("-o", "--output", type=str, help="JSON file name where the Boutiques descriptor will be written.", required=True) + parser.add_argument("-t", "--ignored-template-inputs", type=str, help="Interface inputs ignored in path template creations.", nargs='+') + parser.add_argument("-d", "--docker-image", type=str, help="Name of the Docker image where the Nipype interface is available.") + parser.add_argument("-r", "--docker-index", type=str, help="Docker index where the Docker image is stored (e.g. http://index.docker.io).") + parser.add_argument("-n", "--ignore-template-numbers", action='store_true', default=False, help="Ignore all numbers in path template creations.") + parser.add_argument("-v", "--verbose", action='store_true', default=False, help="Enable verbose output.") + + parsed = parser.parse_args() + + # Generates JSON string + json_string = generate_boutiques_descriptor(parsed.module, + parsed.interface, + parsed.ignored_template_inputs, + parsed.docker_image, parsed.docker_index, + parsed.verbose, + parsed.ignore_template_numbers) + + # Writes JSON string to file + f = open(parsed.output, 'w') + f.write(json_string) + f.close() + + +def generate_boutiques_descriptor(module, interface_name, ignored_template_inputs, docker_image, docker_index, verbose, ignore_template_numbers): + ''' + Returns a JSON string containing a JSON Boutiques description of a Nipype interface. + Arguments: + * module: module where the Nipype interface is declared. * interface: Nipype interface. - * input_name: name of the Nipype input. - * spec: Nipype input spec. - * ignored_template_inputs: input names for which no temporary value must be generated. + * ignored_template_inputs: a list of input names that should be ignored in the generation of output path templates. * ignore_template_numbers: True if numbers must be ignored in output path creations. + ''' + + if not module: + raise Exception("Undefined module.") + + # Retrieves Nipype interface + __import__(module) + interface = getattr(sys.modules[module], interface_name)() + inputs = interface.input_spec() + outputs = interface.output_spec() + + # Tool description + tool_desc = {} + tool_desc['name'] = interface_name + tool_desc['command-line'] = "nipype_cmd "+str(module)+" "+interface_name+" " + tool_desc['description'] = interface_name+", as implemented in Nipype (module: "+str(module)+", interface: "+interface_name+")." + tool_desc['inputs'] = [] + tool_desc['outputs'] = [] + tool_desc['tool-version'] = interface.version + tool_desc['schema-version'] = '0.2-snapshot' + if docker_image: + tool_desc['docker-image'] = docker_image + if docker_index: + tool_desc['docker-index'] = docker_index + + # Generates tool inputs + for name, spec in sorted(interface.inputs.traits(transient=None).items()): + input = get_boutiques_input(inputs, interface, name, spec, ignored_template_inputs, verbose, ignore_template_numbers) + tool_desc['inputs'].append(input) + tool_desc['command-line'] += input['command-line-key']+" " + if verbose: + print("-> Adding input "+input['name']) + + # Generates tool outputs + for name, spec in sorted(outputs.traits(transient=None).items()): + output = get_boutiques_output(name, interface, tool_desc['inputs'], verbose) + if output['path-template'] != "": + tool_desc['outputs'].append(output) + if verbose: + print("-> Adding output "+output['name']) + elif verbose: + print("xx Skipping output "+output['name']+" with no path template.") + if tool_desc['outputs'] == []: + raise Exception("Tool has no output.") + + # Removes all temporary values from inputs (otherwise they will + # appear in the JSON output) + for input in tool_desc['inputs']: + del input['tempvalue'] + + return simplejson.dumps(tool_desc, indent=4, separators=(',', ': ')) + + +def get_boutiques_input(inputs, interface, input_name, spec, ignored_template_inputs, verbose, ignore_template_numbers): + """ + Returns a dictionary containing the Boutiques input corresponding to a Nipype intput. + + Args: + * inputs: inputs of the Nipype interface. + * interface: Nipype interface. + * input_name: name of the Nipype input. + * spec: Nipype input spec. + * ignored_template_inputs: input names for which no temporary value must be generated. + * ignore_template_numbers: True if numbers must be ignored in output path creations. + + Assumes that: + * Input names are unique. + """ + if not spec.desc: + spec.desc = "No description provided." + spec_info = spec.full_info(inputs, input_name, None) + + input = {} + input['id'] = input_name + input['name'] = input_name.replace('_', ' ').capitalize() + input['type'] = get_type_from_spec_info(spec_info) + input['list'] = is_list(spec_info) + input['command-line-key'] = "["+input_name.upper()+"]" # assumes that input names are unique + input['command-line-flag'] = ("--%s" %input_name+" ").strip() + input['tempvalue'] = None + input['description'] = spec_info.capitalize()+". "+spec.desc.capitalize() + if not input['description'].endswith('.'): + input['description'] += '.' + if not (hasattr(spec, "mandatory") and spec.mandatory): + input['optional'] = True + else: + input['optional'] = False + if spec.usedefault: + input['default-value'] = spec.default_value()[1] + + # Create unique, temporary value. + temp_value = must_generate_value(input_name, input['type'], ignored_template_inputs, spec_info, spec, ignore_template_numbers) + if temp_value: + tempvalue = get_unique_value(input['type'], input_name) + setattr(interface.inputs, input_name, tempvalue) + input['tempvalue'] = tempvalue + if verbose: + print("oo Path-template creation using "+input['id']+"="+str(tempvalue)) + + # Now that temp values have been generated, set Boolean types to + # Number (there is no Boolean type in Boutiques) + if input['type'] == "Boolean": + input['type'] = "Number" + + return input + + +def get_boutiques_output(name, interface, tool_inputs, verbose=False): + """ + Returns a dictionary containing the Boutiques output corresponding to a Nipype output. + + Args: + * name: name of the Nipype output. + * interface: Nipype interface. + * tool_inputs: list of tool inputs (as produced by method get_boutiques_input). + + Assumes that: + * Output names are unique. + * Input values involved in the path template are defined. + * Output files are written in the current directory. + * There is a single output value (output lists are not supported). + """ + output = {} + output['name'] = name.replace('_', ' ').capitalize() + output['id'] = name + output['type'] = "File" + output['path-template'] = "" + output['optional'] = True # no real way to determine if an output is always produced, regardless of the input values. + + # Path template creation. + + output_value = interface._list_outputs()[name] + if output_value != "" and isinstance(output_value, str): # FIXME: this crashes when there are multiple output values. + # Go find from which input value it was built + for input in tool_inputs: + if not input['tempvalue']: + continue + input_value = input['tempvalue'] + if input['type'] == "File": + # Take the base name + input_value = os.path.splitext(os.path.basename(input_value))[0] + if str(input_value) in output_value: + output_value = os.path.basename(output_value.replace(input_value, input['command-line-key'])) # FIXME: this only works if output is written in the current directory + output['path-template'] = os.path.basename(output_value) + return output - Assumes that: - * Input names are unique. - """ - if not spec.desc: - spec.desc = "No description provided." - spec_info = spec.full_info(inputs, input_name, None) - - input = {} - input['id'] = input_name - input['name'] = input_name.replace('_',' ').capitalize() - input['type'] = get_type_from_spec_info(spec_info) - input['list'] = is_list(spec_info) - input['command-line-key'] = "["+input_name.upper()+"]" # assumes that input names are unique - input['command-line-flag'] = ("--%s"%input_name+" ").strip() - input['tempvalue'] = None - input['description'] = spec_info.capitalize()+". "+spec.desc.capitalize() - if not input['description'].endswith('.'): - input['description'] += '.' - if not ( hasattr(spec, "mandatory") and spec.mandatory ): - input['optional'] = True - else: - input['optional'] = False - if spec.usedefault: - input['default-value'] = spec.default_value()[1] - - - # Create unique, temporary value. - temp_value = must_generate_value(input_name,input['type'],ignored_template_inputs,spec_info,spec,ignore_template_numbers) - if temp_value: - tempvalue = get_unique_value(input['type'],input_name) - setattr(interface.inputs,input_name,tempvalue) - input['tempvalue'] = tempvalue - if verbose: - print("oo Path-template creation using "+input['id']+"="+str(tempvalue)) - - # Now that temp values have been generated, set Boolean types to - # Number (there is no Boolean type in Boutiques) - if input['type'] == "Boolean": - input['type'] = "Number" - - return input - -def get_boutiques_output(name,interface,tool_inputs,verbose=False): - """ - Returns a dictionary containing the Boutiques output corresponding to a Nipype output. - - Args: - * name: name of the Nipype output. - * interface: Nipype interface. - * tool_inputs: list of tool inputs (as produced by method get_boutiques_input). - - Assumes that: - * Output names are unique. - * Input values involved in the path template are defined. - * Output files are written in the current directory. - * There is a single output value (output lists are not supported). - """ - output = {} - output['name'] = name.replace('_',' ').capitalize() - output['id'] = name - output['type'] = "File" - output['path-template'] = "" - output['optional'] = True # no real way to determine if an output is always produced, regardless of the input values. - - # Path template creation. - - output_value = interface._list_outputs()[name] - if output_value != "" and isinstance(output_value,str): # FIXME: this crashes when there are multiple output values. - # Go find from which input value it was built - for input in tool_inputs: - if not input['tempvalue']: - continue - input_value = input['tempvalue'] - if input['type'] == "File": - # Take the base name - input_value = os.path.splitext(os.path.basename(input_value))[0] - if str(input_value) in output_value: - output_value = os.path.basename(output_value.replace(input_value,input['command-line-key'])) # FIXME: this only works if output is written in the current directory - output['path-template'] = os.path.basename(output_value) - return output def get_type_from_spec_info(spec_info): - ''' - Returns an input type from the spec info. There must be a better - way to get an input type in Nipype than to parse the spec info. - ''' - if ("an existing file name" in spec_info) or ("input volumes" in spec_info): - return "File" - elif ("an integer" in spec_info or "a float" in spec_info) : - return "Number" - elif "a boolean" in spec_info: - return "Boolean" - return "String" + ''' + Returns an input type from the spec info. There must be a better + way to get an input type in Nipype than to parse the spec info. + ''' + if ("an existing file name" in spec_info) or ("input volumes" in spec_info): + return "File" + elif ("an integer" in spec_info or "a float" in spec_info): + return "Number" + elif "a boolean" in spec_info: + return "Boolean" + return "String" + def is_list(spec_info): - ''' - Returns True if the spec info looks like it describes a list - parameter. There must be a better way in Nipype to check if an input - is a list. - ''' - if "a list" in spec_info: - return True - return False - -def get_unique_value(type,id): - ''' - Returns a unique value of type 'type', for input with id 'id', - assuming id is unique. - ''' - return { - "File": os.path.abspath(create_tempfile()), - "Boolean": True, - "Number": abs(hash(id)), # abs in case input param must be positive... - "String": id - }[type] + ''' + Returns True if the spec info looks like it describes a list + parameter. There must be a better way in Nipype to check if an input + is a list. + ''' + if "a list" in spec_info: + return True + return False + + +def get_unique_value(type, id): + ''' + Returns a unique value of type 'type', for input with id 'id', + assuming id is unique. + ''' + return { + "File": os.path.abspath(create_tempfile()), + "Boolean": True, + "Number": abs(hash(id)), # abs in case input param must be positive... + "String": id + }[type] + def create_tempfile(): - ''' - Creates a temp file and returns its name. - ''' - fileTemp = tempfile.NamedTemporaryFile(delete = False) - fileTemp.write("hello") - fileTemp.close() - return fileTemp.name - -def must_generate_value(name,type,ignored_template_inputs,spec_info,spec,ignore_template_numbers): - ''' - Return True if a temporary value must be generated for this input. - Arguments: - * name: input name. - * type: input_type. - * ignored_template_inputs: a list of inputs names for which no value must be generated. - * spec_info: spec info of the Nipype input -. * ignore_template_numbers: True if numbers must be ignored. - ''' - # Return false when type is number and numbers must be ignored. - if ignore_template_numbers and type == "Number": - return False - # Only generate value for the first element of mutually exclusive inputs. - if spec.xor and spec.xor[0]!=name: - return False - # Directory types are not supported - if "an existing directory name" in spec_info: - return False - # Don't know how to generate a list. - if "a list" in spec_info or "a tuple" in spec_info: - return False - # Don't know how to generate a dictionary. - if "a dictionary" in spec_info: - return False - # Best guess to detect string restrictions... - if "' or '" in spec_info: - return False - if not ignored_template_inputs: - return True - return not (name in ignored_template_inputs) + ''' + Creates a temp file and returns its name. + ''' + fileTemp = tempfile.NamedTemporaryFile(delete=False) + fileTemp.write("hello") + fileTemp.close() + return fileTemp.name + + +def must_generate_value(name, type, ignored_template_inputs, spec_info, spec, ignore_template_numbers): + ''' + Return True if a temporary value must be generated for this input. + Arguments: + * name: input name. + * type: input_type. + * ignored_template_inputs: a list of inputs names for which no value must be generated. + * spec_info: spec info of the Nipype input + * ignore_template_numbers: True if numbers must be ignored. + ''' + # Return false when type is number and numbers must be ignored. + if ignore_template_numbers and type == "Number": + return False + # Only generate value for the first element of mutually exclusive inputs. + if spec.xor and spec.xor[0] != name: + return False + # Directory types are not supported + if "an existing directory name" in spec_info: + return False + # Don't know how to generate a list. + if "a list" in spec_info or "a tuple" in spec_info: + return False + # Don't know how to generate a dictionary. + if "a dictionary" in spec_info: + return False + # Best guess to detect string restrictions... + if "' or '" in spec_info: + return False + if not ignored_template_inputs: + return True + return not (name in ignored_template_inputs) diff --git a/nipype/utils/nipype_cmd.py b/nipype/utils/nipype_cmd.py index 66fb12f199..d32b33da7d 100644 --- a/nipype/utils/nipype_cmd.py +++ b/nipype/utils/nipype_cmd.py @@ -6,6 +6,7 @@ from nipype.interfaces.base import Interface, InputMultiPath, traits from nipype.utils.misc import str2bool + def listClasses(module=None): if module: __import__(module) @@ -13,7 +14,8 @@ def listClasses(module=None): print("Available Interfaces:") for k, v in sorted(list(pkg.__dict__.items())): if inspect.isclass(v) and issubclass(v, Interface): - print("\t%s"%k) + print("\t%s" %k) + def add_options(parser=None, module=None, function=None): interface = None @@ -36,10 +38,11 @@ def add_options(parser=None, module=None, function=None): else: if spec.is_trait_type(InputMultiPath): args["nargs"] = "*" - parser.add_argument("--%s"%name, dest=name, + parser.add_argument("--%s" %name, dest=name, help=desc, **args) return parser, interface + def run_instance(interface, options): if interface: print("setting function inputs") @@ -48,12 +51,12 @@ def run_instance(interface, options): if getattr(options, input_name) != None: value = getattr(options, input_name) if not isinstance(value, bool): - #traits cannot cast from string to float or int + # traits cannot cast from string to float or int try: value = float(value) except: pass - #try to cast string input to boolean + # try to cast string input to boolean try: value = str2bool(value) except: @@ -62,7 +65,7 @@ def run_instance(interface, options): setattr(interface.inputs, input_name, value) except ValueError as e: - print("Error when setting the value of %s: '%s'"%(input_name, str(e))) + print("Error when setting the value of %s: '%s'" %(input_name, str(e))) print(interface.inputs) res = interface.run() @@ -81,7 +84,7 @@ def main(argv): parsed = parser.parse_args(args=argv[1:3]) _, prog = os.path.split(argv[0]) - interface_parser = argparse.ArgumentParser(description="Run %s"%parsed.interface, prog=" ".join([prog] + argv[1:3])) - interface_parser, interface = add_options(interface_parser, parsed.module, parsed.interface) + interface_parser = argparse.ArgumentParser(description="Run %s" %parsed.interface, prog=" ".join([prog] + argv[1:3])) + interface_parser, interface = add_options(interface_parser, parsed.module, parsed.interface) args = interface_parser.parse_args(args=argv[3:]) run_instance(interface, args) diff --git a/nipype/utils/onetime.py b/nipype/utils/onetime.py index f40560dedb..44ddd233c8 100644 --- a/nipype/utils/onetime.py +++ b/nipype/utils/onetime.py @@ -19,42 +19,43 @@ from builtins import object + class OneTimeProperty(object): - """A descriptor to make special properties that become normal attributes. - """ - def __init__(self, func): - """Create a OneTimeProperty instance. + """A descriptor to make special properties that become normal attributes. + """ + def __init__(self, func): + """Create a OneTimeProperty instance. - Parameters - ---------- - func : method + Parameters + ---------- + func : method - The method that will be called the first time to compute a value. - Afterwards, the method's name will be a standard attribute holding - the value of this computation. - """ - self.getter = func - self.name = func.__name__ + The method that will be called the first time to compute a value. + Afterwards, the method's name will be a standard attribute holding + the value of this computation. + """ + self.getter = func + self.name = func.__name__ - def __get__(self, obj, type=None): - """ Called on attribute access on the class or instance. """ - if obj is None: - # Being called on the class, return the original function. This way, - # introspection works on the class. - return self.getter + def __get__(self, obj, type=None): + """ Called on attribute access on the class or instance. """ + if obj is None: + # Being called on the class, return the original function. This way, + # introspection works on the class. + return self.getter - val = self.getter(obj) - #print "** setattr_on_read - loading '%s'" % self.name # dbg - setattr(obj, self.name, val) - return val + val = self.getter(obj) + # print "** setattr_on_read - loading '%s'" % self.name # dbg + setattr(obj, self.name, val) + return val def setattr_on_read(func): -# XXX - beetter names for this? -# - cor_property (copy on read property) -# - sor_property (set on read property) -# - prop2attr_on_read -#... ? + # XXX - beetter names for this? + # - cor_property (copy on read property) + # - sor_property (set on read property) + # - prop2attr_on_read + # ... ? """Decorator to create OneTimeProperty attributes. diff --git a/nipype/utils/provenance.py b/nipype/utils/provenance.py index 16dd645cfe..f6c6e1e6a5 100644 --- a/nipype/utils/provenance.py +++ b/nipype/utils/provenance.py @@ -15,9 +15,9 @@ except ImportError: from ordereddict import OrderedDict -#try: +# try: # import prov.model as pm -#except ImportError: +# except ImportError: from ..external import provcopy as pm from ..external.six import string_types @@ -35,12 +35,14 @@ "cryptographicHashFunctions/")) get_id = lambda: niiri[uuid1().hex] + def get_attr_id(attr, skip=None): dictwithhash, hashval = get_hashval(attr, skip=skip) return niiri[hashval] max_text_len = 1024000 + def get_hashval(inputdict, skip=None): """Return a dictionary of our items with hashes for each file. @@ -85,6 +87,7 @@ def get_hashval(inputdict, skip=None): dict_withhash[outname] = _get_sorteddict(val, True) return (dict_withhash, md5(str(dict_nofilename)).hexdigest()) + def _get_sorteddict(object, dictwithhash=False): if isinstance(object, dict): out = OrderedDict() @@ -283,18 +286,18 @@ def add_results(self, results): try: a0_attrs.update({nipype_ns['command']: safe_encode(runtime.cmdline)}) a0_attrs.update({nipype_ns['commandPath']: - safe_encode(runtime.command_path)}) + safe_encode(runtime.command_path)}) a0_attrs.update({nipype_ns['dependencies']: - safe_encode(runtime.dependencies)}) + safe_encode(runtime.dependencies)}) except AttributeError: pass a0 = self.g.activity(get_id(), runtime.startTime, runtime.endTime, - a0_attrs) + a0_attrs) # environment id = get_id() env_collection = self.g.collection(id) env_collection.add_extra_attributes({pm.PROV['type']: - nipype_ns['Environment'], + nipype_ns['Environment'], pm.PROV['label']: "Environment"}) self.g.used(a0, id) # write environment entities @@ -317,7 +320,7 @@ def add_results(self, results): id = get_id() input_collection = self.g.collection(id) input_collection.add_extra_attributes({pm.PROV['type']: - nipype_ns['Inputs'], + nipype_ns['Inputs'], pm.PROV['label']: "Inputs"}) # write input entities for idx, (key, val) in enumerate(sorted(inputs.items())): @@ -334,7 +337,7 @@ def add_results(self, results): if not isinstance(outputs, dict): outputs = outputs.get_traitsfree() output_collection.add_extra_attributes({pm.PROV['type']: - nipype_ns['Outputs'], + nipype_ns['Outputs'], pm.PROV['label']: "Outputs"}) self.g.wasGeneratedBy(output_collection, a0) @@ -350,7 +353,7 @@ def add_results(self, results): id = get_id() runtime_collection = self.g.collection(id) runtime_collection.add_extra_attributes({pm.PROV['type']: - nipype_ns['Runtime'], + nipype_ns['Runtime'], pm.PROV['label']: "RuntimeInfo"}) self.g.wasGeneratedBy(runtime_collection, a0) @@ -377,7 +380,7 @@ def add_results(self, results): agent_attr.update({nipype_ns[key]: safe_encode(value)}) software_agent = self.g.agent(get_attr_id(agent_attr), agent_attr) self.g.wasAssociatedWith(a0, user_agent, None, None, - {pm.PROV["hadRole"]: nipype_ns["LoggedInUser"]}) + {pm.PROV["hadRole"]: nipype_ns["LoggedInUser"]}) self.g.wasAssociatedWith(a0, software_agent) return self.g diff --git a/nipype/utils/spm_docs.py b/nipype/utils/spm_docs.py index 2f81542425..dd720e024a 100644 --- a/nipype/utils/spm_docs.py +++ b/nipype/utils/spm_docs.py @@ -6,6 +6,7 @@ from nipype.interfaces import matlab + def grab_doc(task_name): """Grab the SPM documentation for the given SPM task named `task_name` @@ -41,9 +42,9 @@ def _strip_header(doc): """ hdr = 'NIPYPE' - cruft = '\x1b' # There's some weird cruft at the end of the - # docstring, almost looks like the hex for the - # escape character 0x1b. + # There's some weird cruft at the end of the docstring, almost looks like + # the hex for the escape character 0x1b. + cruft = '\x1b' try: index = doc.index(hdr) index += len(hdr) diff --git a/nipype/utils/tests/test_cmd.py b/nipype/utils/tests/test_cmd.py index 11af7b2ccc..91e565e6c3 100644 --- a/nipype/utils/tests/test_cmd.py +++ b/nipype/utils/tests/test_cmd.py @@ -9,6 +9,7 @@ from nipype.external.six import PY2, PY3, StringIO from nipype.utils import nipype_cmd + @contextmanager def capture_sys_output(): caputure_out, capture_err = StringIO(), StringIO() @@ -32,12 +33,12 @@ def test_main_returns_2_on_empty(self): if PY2: self.assertEqual(stderr.getvalue(), -"""usage: nipype_cmd [-h] module interface + """usage: nipype_cmd [-h] module interface nipype_cmd: error: too few arguments """) elif PY3: self.assertEqual(stderr.getvalue(), -"""usage: nipype_cmd [-h] module interface + """usage: nipype_cmd [-h] module interface nipype_cmd: error: the following arguments are required: module, interface """) @@ -53,7 +54,7 @@ def test_main_returns_0_on_help(self): self.assertEqual(stderr.getvalue(), '') self.assertEqual(stdout.getvalue(), -"""usage: nipype_cmd [-h] module interface + """usage: nipype_cmd [-h] module interface Nipype interface runner @@ -79,7 +80,7 @@ def test_list_nipy_interfacesp(self): self.assertEqual(stderr.getvalue(), '') self.assertEqual(stdout.getvalue(), -"""Available Interfaces: + """Available Interfaces: ComputeMask EstimateContrast FitGLM diff --git a/nipype/utils/tests/test_docparse.py b/nipype/utils/tests/test_docparse.py index 0138f92c9d..9e3871d301 100644 --- a/nipype/utils/tests/test_docparse.py +++ b/nipype/utils/tests/test_docparse.py @@ -5,6 +5,7 @@ from nipype.utils.docparse import reverse_opt_map, build_doc, insert_doc + class Foo(object): opt_map = {'outline': '-o', 'fun': '-f %.2f', 'flags': '%s'} @@ -31,11 +32,13 @@ class Foo(object): ----------------- -v verbose""" + def test_rev_opt_map(): map = {'-f': 'fun', '-o': 'outline'} rev_map = reverse_opt_map(Foo.opt_map) assert_equal(rev_map, map) + def test_build_doc(): opts = reverse_opt_map(Foo.opt_map) doc = build_doc(foo_doc, opts) @@ -56,6 +59,7 @@ def test_build_doc(): ----------------- -v verbose""" + def test_insert_doc(): new_items = ['infile : str', ' The name of the input file'] new_items.extend(['outfile : str', ' The name of the output file']) diff --git a/nipype/utils/tests/test_filemanip.py b/nipype/utils/tests/test_filemanip.py index 0c958f3da6..2c0aaa44b1 100644 --- a/nipype/utils/tests/test_filemanip.py +++ b/nipype/utils/tests/test_filemanip.py @@ -15,6 +15,7 @@ import numpy as np + def test_split_filename(): res = split_filename('foo.nii') yield assert_equal, res, ('', 'foo', '.nii') @@ -29,6 +30,7 @@ def test_split_filename(): res = split_filename('/usr/local/') yield assert_equal, res, ('/usr/local', '', '') + def test_fname_presuffix(): fname = 'foo.nii' pth = fname_presuffix(fname, 'pre_', '_post', '/tmp') @@ -39,17 +41,20 @@ def test_fname_presuffix(): pth = fname_presuffix(fname, 'pre_', '_post', '/tmp', use_ext=False) yield assert_equal, pth, '/tmp/pre_foo_post' + def test_fnames_presuffix(): fnames = ['foo.nii', 'bar.nii'] pths = fnames_presuffix(fnames, 'pre_', '_post', '/tmp') yield assert_equal, pths, ['/tmp/pre_foo_post.nii', '/tmp/pre_bar_post.nii'] + def test_hash_rename(): new_name = hash_rename('foobar.nii', 'abc123') yield assert_equal, new_name, 'foobar_0xabc123.nii' new_name = hash_rename('foobar.nii.gz', 'abc123') yield assert_equal, new_name, 'foobar_0xabc123.nii.gz' + def test_check_forhash(): fname = 'foobar' orig_hash = '_0x4323dbcefdc51906decd8edcb3327943' @@ -61,14 +66,16 @@ def test_check_forhash(): yield assert_false, result yield assert_equal, hash, None + def _temp_analyze_files(): """Generate temporary analyze file pair.""" - fd, orig_img = mkstemp(suffix = '.img') + fd, orig_img = mkstemp(suffix='.img') orig_hdr = orig_img[:-4] + '.hdr' fp = open(orig_hdr, 'w+') fp.close() return orig_img, orig_hdr + def test_copyfile(): orig_img, orig_hdr = _temp_analyze_files() pth, fname = os.path.split(orig_img) @@ -83,6 +90,7 @@ def test_copyfile(): os.unlink(orig_img) os.unlink(orig_hdr) + def test_copyfile_true(): orig_img, orig_hdr = _temp_analyze_files() pth, fname = os.path.split(orig_img) @@ -98,6 +106,7 @@ def test_copyfile_true(): os.unlink(orig_img) os.unlink(orig_hdr) + def test_copyfiles(): orig_img1, orig_hdr1 = _temp_analyze_files() orig_img2, orig_hdr2 = _temp_analyze_files() @@ -122,6 +131,7 @@ def test_copyfiles(): os.unlink(new_img2) os.unlink(new_hdr2) + def test_filename_to_list(): x = filename_to_list('foo.nii') yield assert_equal, x, ['foo.nii'] @@ -132,21 +142,24 @@ def test_filename_to_list(): x = filename_to_list(12.34) yield assert_equal, x, None + def test_list_to_filename(): x = list_to_filename(['foo.nii']) yield assert_equal, x, 'foo.nii' x = list_to_filename(['foo', 'bar']) yield assert_equal, x, ['foo', 'bar'] + def test_json(): # Simple roundtrip test of json files, just a sanity check. adict = dict(a='one', c='three', b='two') fd, name = mkstemp(suffix='.json') - save_json(name, adict) # save_json closes the file + save_json(name, adict) # save_json closes the file new_dict = load_json(name) os.unlink(name) yield assert_equal, sorted(adict.items()), sorted(new_dict.items()) + def test_related_files(): file1 = '/path/test.img' file2 = '/path/test.hdr' diff --git a/nipype/utils/tests/test_misc.py b/nipype/utils/tests/test_misc.py index 5a0f6bd382..861ee4ec5e 100644 --- a/nipype/utils/tests/test_misc.py +++ b/nipype/utils/tests/test_misc.py @@ -33,9 +33,11 @@ def test_cont_to_str(): # no reason why they shouldn't work. yield assert_true, (container_to_string(123) == '123') + def _func1(x): return x**3 + def test_func_to_str(): def func1(x): @@ -47,6 +49,7 @@ def func1(x): f_recreated = create_function_from_source(f_src) yield assert_equal, f(2.3), f_recreated(2.3) + def test_str2bool(): yield assert_true, str2bool("yes") yield assert_true, str2bool("true") @@ -58,18 +61,19 @@ def test_str2bool(): yield assert_false, str2bool("f") yield assert_false, str2bool("0") + def test_flatten(): - in_list = [[1,2,3],[4],[[5,6],7],8] + in_list = [[1, 2, 3], [4], [[5, 6], 7], 8] flat = flatten(in_list) - yield assert_equal, flat, [1,2,3,4,5,6,7,8] + yield assert_equal, flat, [1, 2, 3, 4, 5, 6, 7, 8] back = unflatten(flat, in_list) yield assert_equal, in_list, back - new_list = [2,3,4,5,6,7,8,9] + new_list = [2, 3, 4, 5, 6, 7, 8, 9] back = unflatten(new_list, in_list) - yield assert_equal, back, [[2,3,4],[5],[[6,7],8],9] + yield assert_equal, back, [[2, 3, 4], [5], [[6, 7], 8], 9] flat = flatten([]) yield assert_equal, flat, [] diff --git a/nipype/workflows/dmri/camino/connectivity_mapping.py b/nipype/workflows/dmri/camino/connectivity_mapping.py index 838aa2371d..7cbb1ba996 100644 --- a/nipype/workflows/dmri/camino/connectivity_mapping.py +++ b/nipype/workflows/dmri/camino/connectivity_mapping.py @@ -1,7 +1,7 @@ import inspect import os.path as op -from ....interfaces import io as nio # Data i/o +from ....interfaces import io as nio # Data i/o from ....interfaces import utility as util # utility from ....pipeline import engine as pe # pypeline engine from ....interfaces import camino as camino @@ -54,13 +54,13 @@ def create_connectivity_pipeline(name="connectivity"): """ inputnode_within = pe.Node(interface=util.IdentityInterface(fields=["subject_id", - "dwi", - "bvecs", - "bvals", - "subjects_dir", - "resolution_network_file", - ]), - name="inputnode_within") + "dwi", + "bvecs", + "bvals", + "subjects_dir", + "resolution_network_file", + ]), + name="inputnode_within") FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource') @@ -82,7 +82,7 @@ def create_connectivity_pipeline(name="connectivity"): FSL's Brain Extraction tool is used to create a mask from the b0 image """ - b0Strip = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0') + b0Strip = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') """ FSL's FLIRT function is used to coregister the b0 mask and the structural image. @@ -90,16 +90,16 @@ def create_connectivity_pipeline(name="connectivity"): FLIRT is used once again to apply the inverse transformation to the parcellated brain image. """ - coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister') + coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') coregister.inputs.cost = ('normmi') - convertxfm = pe.Node(interface=fsl.ConvertXFM(), name = 'convertxfm') + convertxfm = pe.Node(interface=fsl.ConvertXFM(), name='convertxfm') convertxfm.inputs.invert_xfm = True - inverse = pe.Node(interface=fsl.FLIRT(), name = 'inverse') + inverse = pe.Node(interface=fsl.FLIRT(), name='inverse') inverse.inputs.interp = ('nearestneighbour') - inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name = 'inverse_AparcAseg') + inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name='inverse_AparcAseg') inverse_AparcAseg.inputs.interp = ('nearestneighbour') """ @@ -143,7 +143,7 @@ def create_connectivity_pipeline(name="connectivity"): If desired, these tensors can be converted to a Nifti tensor image using the DT2NIfTI interface. """ - dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit') + dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit') """ Next, a lookup table is generated from the schemefile and the @@ -197,16 +197,16 @@ def create_connectivity_pipeline(name="connectivity"): into a single .nii file. """ - fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa') - trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace') + fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa') + trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace') dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') - analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_fa') + analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(), name='analyzeheader_fa') analyzeheader_fa.inputs.datatype = 'double' - analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_trace') + analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(), name='analyzeheader_trace') analyzeheader_trace.inputs.datatype = 'double' - fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii') + fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii') trace2nii = fa2nii.clone("trace2nii") """ @@ -260,46 +260,44 @@ def create_connectivity_pipeline(name="connectivity"): FreeSurfer input nodes: """ + mapping.connect([(inputnode_within, FreeSurferSource, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, FreeSurferSource, [("subject_id", "subject_id")])]) - mapping.connect([(inputnode_within, FreeSurferSource,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSource,[("subject_id","subject_id")])]) + mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subject_id", "subject_id")])]) - mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subject_id","subject_id")])]) - - mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subject_id","subject_id")])]) + mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subject_id", "subject_id")])]) """ Required conversions for processing in Camino: """ mapping.connect([(inputnode_within, image2voxel, [("dwi", "in_file")]), - (inputnode_within, fsl2scheme, [("bvecs", "bvec_file"), - ("bvals", "bval_file")]), - (image2voxel, dtifit,[['voxel_order','in_file']]), - (fsl2scheme, dtifit,[['scheme','scheme_file']]) - ]) + (inputnode_within, fsl2scheme, [("bvecs", "bvec_file"), + ("bvals", "bval_file")]), + (image2voxel, dtifit, [['voxel_order', 'in_file']]), + (fsl2scheme, dtifit, [['scheme', 'scheme_file']]) + ]) """ Nifti conversions for the subject's stripped brain image from Freesurfer: """ - - mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])]) + mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', 'in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ - mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', 'in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', 'in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, [('inflated', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, [('inflated', 'in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, [('sphere', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, [('sphere', 'in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. @@ -307,8 +305,8 @@ def create_connectivity_pipeline(name="connectivity"): specifically (rather than i.e. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ - mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [('pial', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [('pial', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) @@ -318,25 +316,25 @@ def create_connectivity_pipeline(name="connectivity"): code that have presented some users with errors. """ - mapping.connect([(inputnode_within, b0Strip,[('dwi','in_file')])]) - mapping.connect([(inputnode_within, b0Strip,[('dwi','t2_guided')])]) # Added to improve damaged brain extraction - mapping.connect([(b0Strip, coregister,[('out_file','in_file')])]) - mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])]) - mapping.connect([(coregister, convertxfm,[('out_matrix_file','in_file')])]) - mapping.connect([(b0Strip, inverse,[('out_file','reference')])]) - mapping.connect([(convertxfm, inverse,[('out_file','in_matrix_file')])]) - mapping.connect([(mri_convert_Brain, inverse,[('out_file','in_file')])]) + mapping.connect([(inputnode_within, b0Strip, [('dwi', 'in_file')])]) + mapping.connect([(inputnode_within, b0Strip, [('dwi', 't2_guided')])]) # Added to improve damaged brain extraction + mapping.connect([(b0Strip, coregister, [('out_file', 'in_file')])]) + mapping.connect([(mri_convert_Brain, coregister, [('out_file', 'reference')])]) + mapping.connect([(coregister, convertxfm, [('out_matrix_file', 'in_file')])]) + mapping.connect([(b0Strip, inverse, [('out_file', 'reference')])]) + mapping.connect([(convertxfm, inverse, [('out_file', 'in_matrix_file')])]) + mapping.connect([(mri_convert_Brain, inverse, [('out_file', 'in_file')])]) """ The tractography pipeline consists of the following nodes. Further information about the tractography can be found in nipype/examples/dmri_camino_dti.py. """ - mapping.connect([(b0Strip, track,[("mask_file","seed_file")])]) - mapping.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])]) - mapping.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])]) - mapping.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])]) - mapping.connect([(picopdfs, track,[("pdfs","in_file")])]) + mapping.connect([(b0Strip, track, [("mask_file", "seed_file")])]) + mapping.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])]) + mapping.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])]) + mapping.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])]) + mapping.connect([(picopdfs, track, [("pdfs", "in_file")])]) """ Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the @@ -345,36 +343,35 @@ def create_connectivity_pipeline(name="connectivity"): files will be correct and readable. """ - mapping.connect([(dtifit, fa,[("tensor_fitted","in_file")])]) - mapping.connect([(fa, analyzeheader_fa,[("fa","in_file")])]) - mapping.connect([(inputnode_within, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - mapping.connect([(fa, fa2nii,[('fa','data_file')])]) - mapping.connect([(inputnode_within, fa2nii,[(('dwi', get_affine), 'affine')])]) - mapping.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])]) + mapping.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) + mapping.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) + mapping.connect([(inputnode_within, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) + mapping.connect([(fa, fa2nii, [('fa', 'data_file')])]) + mapping.connect([(inputnode_within, fa2nii, [(('dwi', get_affine), 'affine')])]) + mapping.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) + mapping.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) + mapping.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) + mapping.connect([(inputnode_within, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) + mapping.connect([(trace, trace2nii, [('trace', 'data_file')])]) + mapping.connect([(inputnode_within, trace2nii, [(('dwi', get_affine), 'affine')])]) + mapping.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) - mapping.connect([(dtifit, trace,[("tensor_fitted","in_file")])]) - mapping.connect([(trace, analyzeheader_trace,[("trace","in_file")])]) - mapping.connect([(inputnode_within, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - mapping.connect([(trace, trace2nii,[('trace','data_file')])]) - mapping.connect([(inputnode_within, trace2nii,[(('dwi', get_affine), 'affine')])]) - mapping.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])]) - - mapping.connect([(dtifit, dteig,[("tensor_fitted","in_file")])]) + mapping.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])]) """ The output tracts are converted to Trackvis format (and back). Here we also use the voxel- and data-grabbing functions defined at the beginning of the pipeline. """ - mapping.connect([(track, camino2trackvis, [('tracked','in_file')]), - (track, vtkstreamlines,[['tracked','in_file']]), - (camino2trackvis, trk2camino,[['trackvis','in_file']]) - ]) - mapping.connect([(inputnode_within, camino2trackvis,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + mapping.connect([(track, camino2trackvis, [('tracked', 'in_file')]), + (track, vtkstreamlines, [['tracked', 'in_file']]), + (camino2trackvis, trk2camino, [['trackvis', 'in_file']]) + ]) + mapping.connect([(inputnode_within, camino2trackvis, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) """ Here the CMTK connectivity mapping nodes are connected. @@ -389,43 +386,43 @@ def create_connectivity_pipeline(name="connectivity"): creatematrix, 'resolution_network_file') mapping.connect([(FreeSurferSource, mri_convert_AparcAseg, [(('aparc_aseg', select_aparc), 'in_file')])]) - mapping.connect([(b0Strip, inverse_AparcAseg,[('out_file','reference')])]) - mapping.connect([(convertxfm, inverse_AparcAseg,[('out_file','in_matrix_file')])]) - mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg,[('out_file','in_file')])]) - mapping.connect([(mri_convert_AparcAseg, roigen_structspace,[('out_file','aparc_aseg_file')])]) - mapping.connect([(roigen_structspace, createnodes,[("roi_file","roi_file")])]) + mapping.connect([(b0Strip, inverse_AparcAseg, [('out_file', 'reference')])]) + mapping.connect([(convertxfm, inverse_AparcAseg, [('out_file', 'in_matrix_file')])]) + mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg, [('out_file', 'in_file')])]) + mapping.connect([(mri_convert_AparcAseg, roigen_structspace, [('out_file', 'aparc_aseg_file')])]) + mapping.connect([(roigen_structspace, createnodes, [("roi_file", "roi_file")])]) - mapping.connect([(inverse_AparcAseg, roigen,[("out_file","aparc_aseg_file")])]) - mapping.connect([(roigen, creatematrix,[("roi_file","roi_file")])]) - mapping.connect([(camino2trackvis, creatematrix,[("trackvis","tract_file")])]) - mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_file")])]) - mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_mat_file")])]) + mapping.connect([(inverse_AparcAseg, roigen, [("out_file", "aparc_aseg_file")])]) + mapping.connect([(roigen, creatematrix, [("roi_file", "roi_file")])]) + mapping.connect([(camino2trackvis, creatematrix, [("trackvis", "tract_file")])]) + mapping.connect([(inputnode_within, creatematrix, [("subject_id", "out_matrix_file")])]) + mapping.connect([(inputnode_within, creatematrix, [("subject_id", "out_matrix_mat_file")])]) """ The merge nodes defined earlier are used here to create lists of the files which are destined for the CFFConverter. """ - mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])]) - mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])]) - mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])]) - mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])]) - mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])]) - mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])]) - mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])]) - mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])]) + mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) + mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) + mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", "in3")])]) + mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", "in4")])]) + mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", "in5")])]) + mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", "in6")])]) + mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", "in7")])]) + mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", "in8")])]) - mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])]) - mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])]) + mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", "in1")])]) + mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", "in2")])]) - mapping.connect([(roigen, niftiVolumes,[("roi_file","in1")])]) - mapping.connect([(inputnode_within, niftiVolumes,[("dwi","in2")])]) - mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])]) + mapping.connect([(roigen, niftiVolumes, [("roi_file", "in1")])]) + mapping.connect([(inputnode_within, niftiVolumes, [("dwi", "in2")])]) + mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", "in1")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", "in2")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", "in3")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", "in4")])]) """ This block actually connects the merged lists to the CFF converter. We pass the surfaces @@ -436,14 +433,14 @@ def create_connectivity_pipeline(name="connectivity"): """ CFFConverter.inputs.script_files = op.abspath(inspect.getfile(inspect.currentframe())) - mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])]) - mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])]) - mapping.connect([(creatematrix, CFFConverter,[("matrix_files","gpickled_networks")])]) + mapping.connect([(giftiSurfaces, CFFConverter, [("out", "gifti_surfaces")])]) + mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) + mapping.connect([(creatematrix, CFFConverter, [("matrix_files", "gpickled_networks")])]) - mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])]) - mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])]) - mapping.connect([(camino2trackvis, CFFConverter,[("trackvis","tract_files")])]) - mapping.connect([(inputnode_within, CFFConverter,[("subject_id","title")])]) + mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) + mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) + mapping.connect([(camino2trackvis, CFFConverter, [("trackvis", "tract_files")])]) + mapping.connect([(inputnode_within, CFFConverter, [("subject_id", "title")])]) """ Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes @@ -453,41 +450,41 @@ def create_connectivity_pipeline(name="connectivity"): inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir", "resolution_network_file"]), name="inputnode") - outputnode = pe.Node(interface = util.IdentityInterface(fields=["fa", - "struct", - "trace", - "tracts", - "connectome", - "cmatrix", - "networks", - "rois", - "mean_fiber_length", - "fiber_length_std", - "tensors"]), - name="outputnode") + outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa", + "struct", + "trace", + "tracts", + "connectome", + "cmatrix", + "networks", + "rois", + "mean_fiber_length", + "fiber_length_std", + "tensors"]), + name="outputnode") connectivity = pe.Workflow(name="connectivity") - connectivity.base_output_dir=name + connectivity.base_output_dir = name connectivity.connect([(inputnode, mapping, [("dwi", "inputnode_within.dwi"), - ("bvals", "inputnode_within.bvals"), - ("bvecs", "inputnode_within.bvecs"), - ("subject_id", "inputnode_within.subject_id"), - ("subjects_dir", "inputnode_within.subjects_dir"), - ("resolution_network_file", "inputnode_within.resolution_network_file")]) - ]) + ("bvals", "inputnode_within.bvals"), + ("bvecs", "inputnode_within.bvecs"), + ("subject_id", "inputnode_within.subject_id"), + ("subjects_dir", "inputnode_within.subjects_dir"), + ("resolution_network_file", "inputnode_within.resolution_network_file")]) + ]) connectivity.connect([(mapping, outputnode, [("camino2trackvis.trackvis", "tracts"), - ("CFFConverter.connectome_file", "connectome"), - ("CreateMatrix.matrix_mat_file", "cmatrix"), - ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"), - ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"), - ("fa2nii.nifti_file", "fa"), - ("CreateMatrix.matrix_files", "networks"), - ("ROIGen.roi_file", "rois"), - ("mri_convert_Brain.out_file", "struct"), - ("trace2nii.nifti_file", "trace"), - ("dtifit.tensor_fitted", "tensors")]) + ("CFFConverter.connectome_file", "connectome"), + ("CreateMatrix.matrix_mat_file", "cmatrix"), + ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"), + ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"), + ("fa2nii.nifti_file", "fa"), + ("CreateMatrix.matrix_files", "networks"), + ("ROIGen.roi_file", "rois"), + ("mri_convert_Brain.out_file", "struct"), + ("trace2nii.nifti_file", "trace"), + ("dtifit.tensor_fitted", "tensors")]) ]) return connectivity diff --git a/nipype/workflows/dmri/camino/diffusion.py b/nipype/workflows/dmri/camino/diffusion.py index cf0a5ffd51..3e176491f0 100644 --- a/nipype/workflows/dmri/camino/diffusion.py +++ b/nipype/workflows/dmri/camino/diffusion.py @@ -41,7 +41,6 @@ def create_camino_dti_pipeline(name="dtiproc"): inputnode1 = pe.Node(interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), name="inputnode1") - """ Setup for Diffusion Tensor Computation -------------------------------------- @@ -57,7 +56,7 @@ def create_camino_dti_pipeline(name="dtiproc"): Second, diffusion tensors are fit to the voxel-order data. """ - dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit') + dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit') """ Next, a lookup table is generated from the schemefile and the @@ -115,29 +114,28 @@ def create_camino_dti_pipeline(name="dtiproc"): Tracts can also be converted to VTK and OOGL formats, for use in programs such as GeomView and Paraview, using the following two nodes. """ - #vtkstreamlines = pe.Node(interface=camino.VtkStreamlines(), name="vtkstreamlines") - #procstreamlines = pe.Node(interface=camino.ProcStreamlines(), name="procstreamlines") - #procstreamlines.inputs.outputtracts = 'oogl' - + # vtkstreamlines = pe.Node(interface=camino.VtkStreamlines(), name="vtkstreamlines") + # procstreamlines = pe.Node(interface=camino.ProcStreamlines(), name="procstreamlines") + # procstreamlines.inputs.outputtracts = 'oogl' """ We can also produce a variety of scalar values from our fitted tensors. The following nodes generate the fractional anisotropy and diffusivity trace maps and their associated headers. """ - fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa') - #md = pe.Node(interface=camino.MD(),name='md') - trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace') + fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa') + # md = pe.Node(interface=camino.MD(),name='md') + trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace') dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') - analyzeheader_fa = pe.Node(interface= camino.AnalyzeHeader(), name = "analyzeheader_fa") + analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(), name="analyzeheader_fa") analyzeheader_fa.inputs.datatype = "double" analyzeheader_trace = analyzeheader_fa.clone('analyzeheader_trace') - #analyzeheader_md = pe.Node(interface= camino.AnalyzeHeader(), name = "analyzeheader_md") - #analyzeheader_md.inputs.datatype = "double" - #analyzeheader_trace = analyzeheader_md.clone('analyzeheader_trace') + # analyzeheader_md = pe.Node(interface= camino.AnalyzeHeader(), name = "analyzeheader_md") + # analyzeheader_md.inputs.datatype = "double" + # analyzeheader_trace = analyzeheader_md.clone('analyzeheader_trace') - fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii') + fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii') trace2nii = fa2nii.clone("trace2nii") """ @@ -146,51 +144,49 @@ def create_camino_dti_pipeline(name="dtiproc"): tractography = pe.Workflow(name='tractography') - tractography.connect([(inputnode1, bet,[("dwi","in_file")])]) + tractography.connect([(inputnode1, bet, [("dwi", "in_file")])]) """ File format conversion """ tractography.connect([(inputnode1, image2voxel, [("dwi", "in_file")]), - (inputnode1, fsl2scheme, [("bvecs", "bvec_file"), - ("bvals", "bval_file")]) + (inputnode1, fsl2scheme, [("bvecs", "bvec_file"), + ("bvals", "bval_file")]) ]) """ Tensor fitting """ - tractography.connect([(image2voxel, dtifit,[['voxel_order','in_file']]), - (fsl2scheme, dtifit,[['scheme','scheme_file']]) + tractography.connect([(image2voxel, dtifit, [['voxel_order', 'in_file']]), + (fsl2scheme, dtifit, [['scheme', 'scheme_file']]) ]) """ Workflow for applying DT streamline tractogpahy """ - tractography.connect([(bet, trackdt,[("mask_file","seed_file")])]) - tractography.connect([(dtifit, trackdt,[("tensor_fitted","in_file")])]) + tractography.connect([(bet, trackdt, [("mask_file", "seed_file")])]) + tractography.connect([(dtifit, trackdt, [("tensor_fitted", "in_file")])]) """ Workflow for applying PICo """ - tractography.connect([(bet, trackpico,[("mask_file","seed_file")])]) - tractography.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])]) - tractography.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])]) - tractography.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])]) - tractography.connect([(picopdfs, trackpico,[("pdfs","in_file")])]) - + tractography.connect([(bet, trackpico, [("mask_file", "seed_file")])]) + tractography.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])]) + tractography.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])]) + tractography.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])]) + tractography.connect([(picopdfs, trackpico, [("pdfs", "in_file")])]) # Mean diffusivity still appears broken - #tractography.connect([(dtifit, md,[("tensor_fitted","in_file")])]) - #tractography.connect([(md, analyzeheader_md,[("md","in_file")])]) - #tractography.connect([(inputnode, analyzeheader_md,[(('dwi', get_vox_dims), 'voxel_dims'), - #(('dwi', get_data_dims), 'data_dims')])]) - #This line is commented out because the ProcStreamlines node keeps throwing memory errors - #tractography.connect([(track, procstreamlines,[("tracked","in_file")])]) - + # tractography.connect([(dtifit, md,[("tensor_fitted","in_file")])]) + # tractography.connect([(md, analyzeheader_md,[("md","in_file")])]) + # tractography.connect([(inputnode, analyzeheader_md,[(('dwi', get_vox_dims), 'voxel_dims'), + # (('dwi', get_data_dims), 'data_dims')])]) + # This line is commented out because the ProcStreamlines node keeps throwing memory errors + # tractography.connect([(track, procstreamlines,[("tracked","in_file")])]) """ Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the @@ -199,55 +195,53 @@ def create_camino_dti_pipeline(name="dtiproc"): This is also where our voxel- and data-grabbing functions come in. We pass these functions, along with the original DWI image from the input node, to the header-generating nodes. This ensures that the files will be correct and readable. """ - tractography.connect([(dtifit, fa,[("tensor_fitted","in_file")])]) - tractography.connect([(fa, analyzeheader_fa,[("fa","in_file")])]) - tractography.connect([(inputnode1, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - tractography.connect([(fa, fa2nii,[('fa','data_file')])]) - tractography.connect([(inputnode1, fa2nii,[(('dwi', get_affine), 'affine')])]) - tractography.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])]) - - - tractography.connect([(dtifit, trace,[("tensor_fitted","in_file")])]) - tractography.connect([(trace, analyzeheader_trace,[("trace","in_file")])]) - tractography.connect([(inputnode1, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - tractography.connect([(trace, trace2nii,[('trace','data_file')])]) - tractography.connect([(inputnode1, trace2nii,[(('dwi', get_affine), 'affine')])]) - tractography.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])]) + tractography.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) + tractography.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) + tractography.connect([(inputnode1, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) + tractography.connect([(fa, fa2nii, [('fa', 'data_file')])]) + tractography.connect([(inputnode1, fa2nii, [(('dwi', get_affine), 'affine')])]) + tractography.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) - tractography.connect([(dtifit, dteig,[("tensor_fitted","in_file")])]) + tractography.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) + tractography.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) + tractography.connect([(inputnode1, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) + tractography.connect([(trace, trace2nii, [('trace', 'data_file')])]) + tractography.connect([(inputnode1, trace2nii, [(('dwi', get_affine), 'affine')])]) + tractography.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) - tractography.connect([(trackpico, cam2trk_pico, [('tracked','in_file')])]) - tractography.connect([(trackdt, cam2trk_dt, [('tracked','in_file')])]) - tractography.connect([(inputnode1, cam2trk_pico,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + tractography.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])]) - tractography.connect([(inputnode1, cam2trk_dt,[(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + tractography.connect([(trackpico, cam2trk_pico, [('tracked', 'in_file')])]) + tractography.connect([(trackdt, cam2trk_dt, [('tracked', 'in_file')])]) + tractography.connect([(inputnode1, cam2trk_pico, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) + tractography.connect([(inputnode1, cam2trk_dt, [(('dwi', get_vox_dims), 'voxel_dims'), + (('dwi', get_data_dims), 'data_dims')])]) - inputnode= pe.Node(interface = util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), name="inputnode") + inputnode = pe.Node(interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), name="inputnode") - outputnode = pe.Node(interface = util.IdentityInterface(fields=["fa", - "trace", - "tracts_pico", - "tracts_dt", - "tensors"]), - name="outputnode") + outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa", + "trace", + "tracts_pico", + "tracts_dt", + "tensors"]), + name="outputnode") workflow = pe.Workflow(name=name) - workflow.base_output_dir=name + workflow.base_output_dir = name workflow.connect([(inputnode, tractography, [("dwi", "inputnode1.dwi"), - ("bvals", "inputnode1.bvals"), - ("bvecs", "inputnode1.bvecs")])]) + ("bvals", "inputnode1.bvals"), + ("bvecs", "inputnode1.bvecs")])]) workflow.connect([(tractography, outputnode, [("cam2trk_dt.trackvis", "tracts_dt"), - ("cam2trk_pico.trackvis", "tracts_pico"), - ("fa2nii.nifti_file", "fa"), - ("trace2nii.nifti_file", "trace"), - ("dtifit.tensor_fitted", "tensors")]) + ("cam2trk_pico.trackvis", "tracts_pico"), + ("fa2nii.nifti_file", "fa"), + ("trace2nii.nifti_file", "trace"), + ("dtifit.tensor_fitted", "tensors")]) ]) return workflow diff --git a/nipype/workflows/dmri/camino/group_connectivity.py b/nipype/workflows/dmri/camino/group_connectivity.py index 92b2eeb21b..f2a03c1140 100644 --- a/nipype/workflows/dmri/camino/group_connectivity.py +++ b/nipype/workflows/dmri/camino/group_connectivity.py @@ -49,14 +49,14 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ if template_args_dict == 0: info = dict(dwi=[['subject_id', 'dwi']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) else: info = template_args_dict datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" datasource.inputs.base_directory = data_dir @@ -78,22 +78,22 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ l1pipeline = pe.Workflow(name="l1pipeline_"+group_id) l1pipeline.base_dir = output_dir l1pipeline.base_output_dir = group_id - l1pipeline.connect([(subj_infosource, datasource,[('subject_id', 'subject_id')])]) - l1pipeline.connect([(subj_infosource, conmapper,[('subject_id', 'inputnode.subject_id')])]) + l1pipeline.connect([(subj_infosource, datasource, [('subject_id', 'subject_id')])]) + l1pipeline.connect([(subj_infosource, conmapper, [('subject_id', 'inputnode.subject_id')])]) l1pipeline.connect([(datasource, conmapper, [("dwi", "inputnode.dwi"), - ("bvals", "inputnode.bvals"), - ("bvecs", "inputnode.bvecs"), + ("bvals", "inputnode.bvals"), + ("bvecs", "inputnode.bvecs"), ])]) l1pipeline.connect([(conmapper, datasink, [("outputnode.connectome", "@l1output.cff"), - ("outputnode.fa", "@l1output.fa"), - ("outputnode.tracts", "@l1output.tracts"), - ("outputnode.trace", "@l1output.trace"), - ("outputnode.cmatrix", "@l1output.cmatrix"), - ("outputnode.rois", "@l1output.rois"), - ("outputnode.struct", "@l1output.struct"), - ("outputnode.networks", "@l1output.networks"), - ("outputnode.mean_fiber_length", "@l1output.mean_fiber_length"), - ("outputnode.fiber_length_std", "@l1output.fiber_length_std"), - ])]) - l1pipeline.connect([(group_infosource, datasink,[('group_id','@group_id')])]) + ("outputnode.fa", "@l1output.fa"), + ("outputnode.tracts", "@l1output.tracts"), + ("outputnode.trace", "@l1output.trace"), + ("outputnode.cmatrix", "@l1output.cmatrix"), + ("outputnode.rois", "@l1output.rois"), + ("outputnode.struct", "@l1output.struct"), + ("outputnode.networks", "@l1output.networks"), + ("outputnode.mean_fiber_length", "@l1output.mean_fiber_length"), + ("outputnode.fiber_length_std", "@l1output.fiber_length_std"), + ])]) + l1pipeline.connect([(group_infosource, datasink, [('group_id', '@group_id')])]) return l1pipeline diff --git a/nipype/workflows/dmri/connectivity/__init__.py b/nipype/workflows/dmri/connectivity/__init__.py index 78434ae883..8b9ffea19a 100644 --- a/nipype/workflows/dmri/connectivity/__init__.py +++ b/nipype/workflows/dmri/connectivity/__init__.py @@ -1,7 +1,7 @@ from __future__ import absolute_import from .nx import (create_networkx_pipeline, create_cmats_to_csv_pipeline) from .group_connectivity import (create_merge_networks_by_group_workflow, - create_merge_network_results_by_group_workflow, - create_merge_group_networks_workflow, - create_merge_group_network_results_workflow, - create_average_networks_by_group_workflow) + create_merge_network_results_by_group_workflow, + create_merge_group_networks_workflow, + create_merge_group_network_results_workflow, + create_average_networks_by_group_workflow) diff --git a/nipype/workflows/dmri/connectivity/group_connectivity.py b/nipype/workflows/dmri/connectivity/group_connectivity.py index 5afe2983d3..2f8438bfe8 100644 --- a/nipype/workflows/dmri/connectivity/group_connectivity.py +++ b/nipype/workflows/dmri/connectivity/group_connectivity.py @@ -176,27 +176,26 @@ def create_merge_network_results_by_group_workflow(group_list, group_id, data_di group_infosource.inputs.group_id = group_id l2infosource = pe.Node(interface=util.IdentityInterface(fields=['group_id', - 'merged', - ]), name='l2infosource') + 'merged']), name='l2infosource') l2source = pe.Node( nio.DataGrabber( infields=['group_id'], outfields=['CFFfiles', 'CSVmatrices', - 'CSVfibers', 'CSVnodal', 'CSVglobal']), name='l2source') + 'CSVfibers', 'CSVnodal', 'CSVglobal']), name='l2source') l2source.inputs.template_args = dict( CFFfiles=[['group_id']], CSVmatrices=[['group_id']], - CSVnodal=[['group_id']], CSVglobal=[['group_id']], CSVfibers=[['group_id']]) + CSVnodal=[['group_id']], CSVglobal=[['group_id']], CSVfibers=[['group_id']]) l2source.inputs.base_directory = data_dir l2source.inputs.template = '%s/%s' l2source.inputs.field_template = dict( CFFfiles=op.join(output_dir, '%s/cff/*/connectome.cff'), - CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'), CSVnodal=op.join(output_dir, '%s/nxcsv/*/*nodal*.csv'), - CSVglobal=op.join(output_dir, '%s/nxcsv/*/*global*.csv'), CSVfibers=op.join(output_dir, '%s/fiber_csv/*/*fibers*.csv')) + CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'), CSVnodal=op.join(output_dir, '%s/nxcsv/*/*nodal*.csv'), + CSVglobal=op.join(output_dir, '%s/nxcsv/*/*global*.csv'), CSVfibers=op.join(output_dir, '%s/fiber_csv/*/*fibers*.csv')) l2source.inputs.sort_filelist = True l2inputnode = pe.Node(interface=util.IdentityInterface(fields=['CFFfiles', - 'CSVfibers', 'CSVmatrices', 'CSVnodal', 'CSVglobal', 'network_file']), name='l2inputnode') + 'CSVfibers', 'CSVmatrices', 'CSVnodal', 'CSVglobal', 'network_file']), name='l2inputnode') MergeCNetworks = pe.Node( interface=cmtk.MergeCNetworks(), name="MergeCNetworks") @@ -238,7 +237,7 @@ def create_merge_network_results_by_group_workflow(group_list, group_id, data_di concat_csv_interface = Function( input_names=["in_files"], output_names=["out_name"], - function=concatcsv) + function=concatcsv) concat_node_csvs = pe.Node( interface=concat_csv_interface, name='concat_node_csvs') @@ -267,7 +266,7 @@ def create_merge_network_results_by_group_workflow(group_list, group_id, data_di l2pipeline.connect([(group_infosource, AddCSVColumn_global, [( 'group_id', 'extra_field')])]) l2pipeline.connect([(AddCSVColumn_global, l2datasink, [('csv_file', - '@l2output.global_csv')])]) + '@l2output.global_csv')])]) l2pipeline.connect( [(l2inputnode, concat_matrix_csvs, [('CSVmatrices', 'in_files')])]) @@ -285,7 +284,7 @@ def create_merge_network_results_by_group_workflow(group_list, group_id, data_di l2pipeline.connect([(group_infosource, AddCSVColumn_fibers, [( 'group_id', 'extra_field')])]) l2pipeline.connect([(AddCSVColumn_fibers, l2datasink, [('csv_file', - '@l2output.fibers_csv')])]) + '@l2output.fibers_csv')])]) return l2pipeline @@ -388,7 +387,7 @@ def create_merge_group_network_results_workflow(group_list, data_dir, subjects_d l3source.inputs.field_template = dict( CFFfiles=op.join(output_dir, '%s/*.cff'), CSVnodemetrics=op.join(output_dir, '%s/node_csv/*.csv'), - CSVglobalmetrics=op.join(output_dir, '%s/global_csv/*.csv'), CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv')) + CSVglobalmetrics=op.join(output_dir, '%s/global_csv/*.csv'), CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv')) l3inputnode = pe.Node(interface=util.IdentityInterface(fields=['Group_CFFs', 'Group_CSVnodemetrics', 'Group_CSVglobalmetrics', 'Group_CSVmatrices']), name='l3inputnode') @@ -412,7 +411,7 @@ def create_merge_group_network_results_workflow(group_list, data_dir, subjects_d l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file', '@l3output')])]) concat_csv_interface = Function(input_names=["in_files"], output_names=["out_name"], - function=concatcsv) + function=concatcsv) concat_node_csvs = pe.Node(interface=concat_csv_interface, name='concat_node_csvs') concat_global_csvs = pe.Node(interface=concat_csv_interface, name='concat_global_csvs') @@ -465,12 +464,12 @@ def create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir raise Exception l4info = dict(networks=[['group_id', '']], CMatrices=[['group_id', '']], fibmean=[['group_id', 'mean_fiber_length']], - fibdev=[['group_id', 'fiber_length_std']]) + fibdev=[['group_id', 'fiber_length_std']]) l4source_grp1 = pe.Node(nio.DataGrabber(infields=['group_id'], outfields=list(l4info.keys())), name='l4source_grp1') l4source_grp1.inputs.template = '%s/%s' l4source_grp1.inputs.field_template = dict(networks=op.join(output_dir, '%s/networks/*/*%s*intersections*.pck'), CMatrices=op.join(output_dir, '%s/cmatrix/*/*%s*.mat'), - fibmean=op.join(output_dir, '%s/mean_fiber_length/*/*%s*.mat'), fibdev=op.join(output_dir, '%s/fiber_length_std/*/*%s*.mat')) + fibmean=op.join(output_dir, '%s/mean_fiber_length/*/*%s*.mat'), fibdev=op.join(output_dir, '%s/fiber_length_std/*/*%s*.mat')) l4source_grp1.inputs.base_directory = output_dir l4source_grp1.inputs.template_args = l4info l4source_grp1.inputs.sort_filelist = True @@ -478,7 +477,7 @@ def create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir l4source_grp2 = l4source_grp1.clone(name='l4source_grp2') l4inputnode = pe.Node(interface=util.IdentityInterface(fields=['networks_grp1', 'networks_grp2', 'CMatrices_grp1', 'CMatrices_grp2', - 'fibmean_grp1', 'fibmean_grp2', 'fibdev_grp1', 'fibdev_grp2']), name='l4inputnode') + 'fibmean_grp1', 'fibmean_grp2', 'fibdev_grp1', 'fibdev_grp2']), name='l4inputnode') average_networks_grp1 = pe.Node(interface=cmtk.AverageNetworks(), name='average_networks_grp1') average_networks_grp2 = average_networks_grp1.clone('average_networks_grp2') diff --git a/nipype/workflows/dmri/connectivity/nx.py b/nipype/workflows/dmri/connectivity/nx.py index 29b23e42f3..804f77ec84 100644 --- a/nipype/workflows/dmri/connectivity/nx.py +++ b/nipype/workflows/dmri/connectivity/nx.py @@ -5,16 +5,19 @@ from ....algorithms.misc import remove_identical_paths from .group_connectivity import pullnodeIDs + def add_global_to_filename(in_file): from nipype.utils.filemanip import split_filename path, name, ext = split_filename(in_file) return name + '_global' + ext + def add_nodal_to_filename(in_file): from nipype.utils.filemanip import split_filename path, name, ext = split_filename(in_file) return name + '_nodal' + ext + def create_networkx_pipeline(name="networkx", extra_column_heading="subject"): """Creates a workflow to calculate various graph measures (via NetworkX) on an input network. The output measures are then converted to comma-separated value @@ -42,7 +45,7 @@ def create_networkx_pipeline(name="networkx", extra_column_heading="subject"): outputnode.matlab_files """ - inputnode = pe.Node(interface = util.IdentityInterface(fields=["extra_field", "network_file"]), + inputnode = pe.Node(interface=util.IdentityInterface(fields=["extra_field", "network_file"]), name="inputnode") pipeline = pe.Workflow(name=name) @@ -56,33 +59,32 @@ def create_networkx_pipeline(name="networkx", extra_column_heading="subject"): MergeCSVFiles_global = MergeCSVFiles_node.clone(name="MergeCSVFiles_global") MergeCSVFiles_global.inputs.extra_column_heading = extra_column_heading - mergeNetworks = pe.Node(interface=util.Merge(2), name="mergeNetworks") mergeCSVs = mergeNetworks.clone("mergeCSVs") - pipeline.connect([(inputnode, ntwkMetrics,[("network_file","in_file")])]) - pipeline.connect([(ntwkMetrics, Matlab2CSV_node,[("node_measures_matlab","in_file")])]) - pipeline.connect([(ntwkMetrics, Matlab2CSV_global,[("global_measures_matlab","in_file")])]) + pipeline.connect([(inputnode, ntwkMetrics, [("network_file", "in_file")])]) + pipeline.connect([(ntwkMetrics, Matlab2CSV_node, [("node_measures_matlab", "in_file")])]) + pipeline.connect([(ntwkMetrics, Matlab2CSV_global, [("global_measures_matlab", "in_file")])]) - pipeline.connect([(Matlab2CSV_node, MergeCSVFiles_node,[("csv_files","in_files")])]) + pipeline.connect([(Matlab2CSV_node, MergeCSVFiles_node, [("csv_files", "in_files")])]) pipeline.connect([(inputnode, MergeCSVFiles_node, [(("extra_field", add_nodal_to_filename), "out_file")])]) - pipeline.connect([(inputnode, MergeCSVFiles_node,[("extra_field","extra_field")])]) + pipeline.connect([(inputnode, MergeCSVFiles_node, [("extra_field", "extra_field")])]) pipeline.connect([(inputnode, MergeCSVFiles_node, [(("network_file", pullnodeIDs), "row_headings")])]) - pipeline.connect([(Matlab2CSV_global, MergeCSVFiles_global,[("csv_files","in_files")])]) + pipeline.connect([(Matlab2CSV_global, MergeCSVFiles_global, [("csv_files", "in_files")])]) pipeline.connect([(Matlab2CSV_global, MergeCSVFiles_global, [(("csv_files", remove_identical_paths), "column_headings")])]) - #MergeCSVFiles_global.inputs.row_heading_title = 'metric' - #MergeCSVFiles_global.inputs.column_headings = ['average'] + # MergeCSVFiles_global.inputs.row_heading_title = 'metric' + # MergeCSVFiles_global.inputs.column_headings = ['average'] pipeline.connect([(inputnode, MergeCSVFiles_global, [(("extra_field", add_global_to_filename), "out_file")])]) - pipeline.connect([(inputnode, MergeCSVFiles_global,[("extra_field","extra_field")])]) + pipeline.connect([(inputnode, MergeCSVFiles_global, [("extra_field", "extra_field")])]) - pipeline.connect([(inputnode, mergeNetworks,[("network_file","in1")])]) - pipeline.connect([(ntwkMetrics, mergeNetworks,[("gpickled_network_files","in2")])]) + pipeline.connect([(inputnode, mergeNetworks, [("network_file", "in1")])]) + pipeline.connect([(ntwkMetrics, mergeNetworks, [("gpickled_network_files", "in2")])]) - outputnode = pe.Node(interface = util.IdentityInterface(fields=["network_files", - "csv_files", "matlab_files", "node_csv", "global_csv"]), - name="outputnode") + outputnode = pe.Node(interface=util.IdentityInterface(fields=["network_files", + "csv_files", "matlab_files", "node_csv", "global_csv"]), + name="outputnode") pipeline.connect([(MergeCSVFiles_node, outputnode, [("csv_file", "node_csv")])]) pipeline.connect([(MergeCSVFiles_global, outputnode, [("csv_file", "global_csv")])]) @@ -91,9 +93,10 @@ def create_networkx_pipeline(name="networkx", extra_column_heading="subject"): pipeline.connect([(MergeCSVFiles_global, mergeCSVs, [("csv_file", "in2")])]) pipeline.connect([(mergeNetworks, outputnode, [("out", "network_files")])]) pipeline.connect([(mergeCSVs, outputnode, [("out", "csv_files")])]) - pipeline.connect([(ntwkMetrics, outputnode,[("matlab_matrix_files", "matlab_files")])]) + pipeline.connect([(ntwkMetrics, outputnode, [("matlab_matrix_files", "matlab_files")])]) return pipeline + def create_cmats_to_csv_pipeline(name="cmats_to_csv", extra_column_heading="subject"): """Creates a workflow to convert the outputs from CreateMatrix into a single comma-separated value text file. An extra column / field is also added to the @@ -118,7 +121,7 @@ def create_cmats_to_csv_pipeline(name="cmats_to_csv", extra_column_heading="subj outputnode.csv_file """ - inputnode = pe.Node(interface = util.IdentityInterface(fields=["extra_field", "matlab_matrix_files"]), + inputnode = pe.Node(interface=util.IdentityInterface(fields=["extra_field", "matlab_matrix_files"]), name="inputnode") pipeline = pe.Workflow(name=name) @@ -127,12 +130,12 @@ def create_cmats_to_csv_pipeline(name="cmats_to_csv", extra_column_heading="subj MergeCSVFiles = pe.Node(interface=misc.MergeCSVFiles(), name="MergeCSVFiles") MergeCSVFiles.inputs.extra_column_heading = extra_column_heading - pipeline.connect([(inputnode, Matlab2CSV,[("matlab_matrix_files","in_file")])]) - pipeline.connect([(Matlab2CSV, MergeCSVFiles,[("csv_files","in_files")])]) - pipeline.connect([(inputnode, MergeCSVFiles,[("extra_field","extra_field")])]) + pipeline.connect([(inputnode, Matlab2CSV, [("matlab_matrix_files", "in_file")])]) + pipeline.connect([(Matlab2CSV, MergeCSVFiles, [("csv_files", "in_files")])]) + pipeline.connect([(inputnode, MergeCSVFiles, [("extra_field", "extra_field")])]) - outputnode = pe.Node(interface = util.IdentityInterface(fields=["csv_file"]), - name="outputnode") + outputnode = pe.Node(interface=util.IdentityInterface(fields=["csv_file"]), + name="outputnode") pipeline.connect([(MergeCSVFiles, outputnode, [("csv_file", "csv_file")])]) return pipeline diff --git a/nipype/workflows/dmri/dipy/denoise.py b/nipype/workflows/dmri/dipy/denoise.py index 08269d015d..6519584783 100644 --- a/nipype/workflows/dmri/dipy/denoise.py +++ b/nipype/workflows/dmri/dipy/denoise.py @@ -37,11 +37,11 @@ def nlmeans_pipeline(name='Denoise', wf = pe.Workflow(name=name) wf.connect([ (inputnode, nmask, [('in_file', 'in_file'), - ('in_mask', 'in_mask')]) - ,(inputnode, nlmeans, [('in_file', 'in_file'), - ('in_mask', 'in_mask')]) - ,(nmask, nlmeans, [('out_file', 'noise_mask')]) - ,(nlmeans, outputnode, [('out_file', 'out_file')]) + ('in_mask', 'in_mask')]), + (inputnode, nlmeans, [('in_file', 'in_file'), + ('in_mask', 'in_mask')]), + (nmask, nlmeans, [('out_file', 'noise_mask')]), + (nlmeans, outputnode, [('out_file', 'out_file')]) ]) return wf @@ -57,9 +57,9 @@ def csf_mask(in_file, in_mask, out_file=None): import os.path as op if out_file is None: - fname,ext = op.splitext(op.basename(in_file)) + fname, ext = op.splitext(op.basename(in_file)) if ext == ".gz": - fname,ext2 = op.splitext(fname) + fname, ext2 = op.splitext(fname) ext = ext2 + ext out_file = op.abspath("%s_csfmask%s" % (fname, ext)) @@ -100,9 +100,9 @@ def bg_mask(in_file, in_mask, out_file=None): import os.path as op if out_file is None: - fname,ext = op.splitext(op.basename(in_file)) + fname, ext = op.splitext(op.basename(in_file)) if ext == ".gz": - fname,ext2 = op.splitext(fname) + fname, ext2 = op.splitext(fname) ext = ext2 + ext out_file = op.abspath("%s_bgmask%s" % (fname, ext)) diff --git a/nipype/workflows/dmri/fsl/__init__.py b/nipype/workflows/dmri/fsl/__init__.py index b477b29a76..9d76d17ba1 100644 --- a/nipype/workflows/dmri/fsl/__init__.py +++ b/nipype/workflows/dmri/fsl/__init__.py @@ -2,13 +2,13 @@ from .dti import create_bedpostx_pipeline, bedpostx_parallel from .artifacts import (all_fmb_pipeline, all_peb_pipeline, all_fsl_pipeline, - hmc_pipeline, ecc_pipeline, sdc_fmb, sdc_peb, - remove_bias) + hmc_pipeline, ecc_pipeline, sdc_fmb, sdc_peb, + remove_bias) from .epi import (fieldmap_correction, topup_correction, - create_eddy_correct_pipeline, - create_epidewarp_pipeline, create_dmri_preprocessing) + create_eddy_correct_pipeline, + create_epidewarp_pipeline, create_dmri_preprocessing) from .tbss import (create_tbss_1_preproc, create_tbss_2_reg, - create_tbss_3_postreg, create_tbss_4_prestats, - create_tbss_all, create_tbss_non_FA) + create_tbss_3_postreg, create_tbss_4_prestats, + create_tbss_all, create_tbss_non_FA) diff --git a/nipype/workflows/dmri/fsl/artifacts.py b/nipype/workflows/dmri/fsl/artifacts.py index 9579d55d02..03936c5695 100644 --- a/nipype/workflows/dmri/fsl/artifacts.py +++ b/nipype/workflows/dmri/fsl/artifacts.py @@ -15,6 +15,7 @@ cleanup_edge_pipeline, add_empty_vol, vsm2warp, compute_readout,) + def all_fmb_pipeline(name='hmc_sdc_ecc', fugue_params=dict(smooth3d=2.0)): """ Builds a pipeline including three artifact corrections: head-motion diff --git a/nipype/workflows/dmri/fsl/dti.py b/nipype/workflows/dmri/fsl/dti.py index a0a5d2db94..b3427645b8 100644 --- a/nipype/workflows/dmri/fsl/dti.py +++ b/nipype/workflows/dmri/fsl/dti.py @@ -7,7 +7,7 @@ from ....interfaces import fsl from ....algorithms import misc -#backwards compatibility +# backwards compatibility from .epi import create_eddy_correct_pipeline @@ -17,9 +17,9 @@ def transpose(samples_over_fibres): return np.squeeze(a.T).tolist() -def create_bedpostx_pipeline(name='bedpostx', params={'n_fibres':2, 'fudge':1, 'burn_in':1000, - 'n_jumps':1250, 'sample_every':25, 'model':2, - 'cnlinear':True}): +def create_bedpostx_pipeline(name='bedpostx', params={'n_fibres': 2, 'fudge': 1, 'burn_in': 1000, + 'n_jumps': 1250, 'sample_every': 25, 'model': 2, + 'cnlinear': True}): """ Creates a pipeline that does the same as bedpostx script from FSL - calculates diffusion model parameters (distributions not MLE) voxelwise for @@ -100,7 +100,7 @@ def create_bedpostx_pipeline(name='bedpostx', params={'n_fibres':2, 'fudge':1, ' wf.connect([ (mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]), (mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]), - #(xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]), + # (xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]), (make_dyads, outputnode, [('dyads', 'dyads'), ('dispersion', 'dyads_disp')]) ]) @@ -129,9 +129,9 @@ def merge_and_mean(name='mm'): def bedpostx_parallel(name='bedpostx_parallel', compute_all_outputs=True, - params={'n_fibres':2, 'fudge':1, 'burn_in':1000, - 'n_jumps':1250, 'sample_every':25, 'model':1, - 'cnlinear':True}): + params={'n_fibres': 2, 'fudge': 1, 'burn_in': 1000, + 'n_jumps': 1250, 'sample_every': 25, 'model': 1, + 'cnlinear': True}): """ Does the same as :func:`.create_bedpostx_pipeline` by splitting the input dMRI in small ROIs that are better suited for parallel @@ -228,7 +228,7 @@ def bedpostx_parallel(name='bedpostx_parallel', wf.connect([ (mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]), (mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]), - #(xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]), + # (xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]), (make_dyads, outputnode, [('dispersion', 'dyads_disp')]) ]) diff --git a/nipype/workflows/dmri/fsl/epi.py b/nipype/workflows/dmri/fsl/epi.py index 9f267f8a7e..2fa7b012a0 100644 --- a/nipype/workflows/dmri/fsl/epi.py +++ b/nipype/workflows/dmri/fsl/epi.py @@ -9,7 +9,6 @@ from ....interfaces import fsl as fsl - def create_dmri_preprocessing(name='dMRI_preprocessing', use_fieldmap=True, fieldmap_registration=False): """ Creates a workflow that chains the necessary pipelines to @@ -71,7 +70,7 @@ def create_dmri_preprocessing(name='dMRI_preprocessing', use_fieldmap=True, fiel """ warnings.warn(('This workflow is deprecated from v.1.0.0, use of available ' - 'nipype.workflows.dmri.preprocess.epi.all_*'), DeprecationWarning) + 'nipype.workflows.dmri.preprocess.epi.all_*'), DeprecationWarning) pipeline = pe.Workflow(name=name) @@ -88,7 +87,7 @@ def create_dmri_preprocessing(name='dMRI_preprocessing', use_fieldmap=True, fiel motion = create_motion_correct_pipeline() eddy = create_eddy_correct_pipeline() - if use_fieldmap: # we have a fieldmap, so lets use it (yay!) + if use_fieldmap: # we have a fieldmap, so lets use it (yay!) susceptibility = create_epidewarp_pipeline( fieldmap_registration=fieldmap_registration) @@ -96,21 +95,21 @@ def create_dmri_preprocessing(name='dMRI_preprocessing', use_fieldmap=True, fiel (inputnode, motion, [('in_file', 'inputnode.in_file'), ('in_bvec', 'inputnode.in_bvec'), ('ref_num', 'inputnode.ref_num')]), - (inputnode, eddy, [('ref_num', 'inputnode.ref_num')]), - (motion, eddy, [('outputnode.motion_corrected', 'inputnode.in_file')]), - (eddy, susceptibility, [('outputnode.eddy_corrected', 'inputnode.in_file')]), - (inputnode, susceptibility, [('ref_num', 'inputnode.ref_num'), - ('fieldmap_mag', 'inputnode.fieldmap_mag'), - ('fieldmap_pha', 'inputnode.fieldmap_pha'), - ('te_diff', 'inputnode.te_diff'), - ('epi_echospacing', 'inputnode.epi_echospacing'), - ('epi_rev_encoding', 'inputnode.epi_rev_encoding'), - ('pi_accel_factor', 'inputnode.pi_accel_factor'), - ('vsm_sigma', 'inputnode.vsm_sigma')]), - (motion, outputnode, [('outputnode.out_bvec', 'bvec_rotated')]), - (susceptibility, outputnode, [('outputnode.epi_corrected', 'dmri_corrected')]) + (inputnode, eddy, [('ref_num', 'inputnode.ref_num')]), + (motion, eddy, [('outputnode.motion_corrected', 'inputnode.in_file')]), + (eddy, susceptibility, [('outputnode.eddy_corrected', 'inputnode.in_file')]), + (inputnode, susceptibility, [('ref_num', 'inputnode.ref_num'), + ('fieldmap_mag', 'inputnode.fieldmap_mag'), + ('fieldmap_pha', 'inputnode.fieldmap_pha'), + ('te_diff', 'inputnode.te_diff'), + ('epi_echospacing', 'inputnode.epi_echospacing'), + ('epi_rev_encoding', 'inputnode.epi_rev_encoding'), + ('pi_accel_factor', 'inputnode.pi_accel_factor'), + ('vsm_sigma', 'inputnode.vsm_sigma')]), + (motion, outputnode, [('outputnode.out_bvec', 'bvec_rotated')]), + (susceptibility, outputnode, [('outputnode.epi_corrected', 'dmri_corrected')]) ]) - else: # we don't have a fieldmap, so we just carry on without it :( + else: # we don't have a fieldmap, so we just carry on without it :( pipeline.connect([ (inputnode, motion, [('in_file', 'inputnode.in_file'), ('in_bvec', 'inputnode.in_bvec'), @@ -164,40 +163,39 @@ def create_motion_correct_pipeline(name='motion_correct'): """ warnings.warn(('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.hmc_pipeline instead'), + 'nipype.workflows.dmri.preprocess.epi.hmc_pipeline instead'), DeprecationWarning) inputnode = pe.Node( niu.IdentityInterface( fields=['in_file', 'ref_num', 'in_bvec']), - name='inputnode') + name='inputnode') pipeline = pe.Workflow(name=name) split = pe.Node(fsl.Split(dimension='t'), name='split') pick_ref = pe.Node(niu.Select(), name='pick_ref') coregistration = pe.MapNode(fsl.FLIRT(no_search=True, interp='spline', - padding_size=1, dof=6), name='coregistration', iterfield=['in_file']) + padding_size=1, dof=6), name='coregistration', iterfield=['in_file']) rotate_bvecs = pe.Node(niu.Function(input_names=['in_bvec', 'in_matrix'], output_names=[ 'out_file'], function=_rotate_bvecs), name='rotate_b_matrix') merge = pe.Node(fsl.Merge(dimension='t'), name='merge') outputnode = pe.Node( niu.IdentityInterface( fields=['motion_corrected', 'out_bvec']), - name='outputnode') + name='outputnode') - pipeline.connect([ - (inputnode, split, [('in_file', 'in_file')]) - ,(split, pick_ref, [('out_files', 'inlist')]) - ,(inputnode, pick_ref, [('ref_num', 'index')]) - ,(split, coregistration, [('out_files', 'in_file')]) - ,(inputnode, rotate_bvecs, [('in_bvec', 'in_bvec')]) - ,(coregistration, rotate_bvecs, [('out_matrix_file', 'in_matrix')]) - ,(pick_ref, coregistration, [('out', 'reference')]) - ,(coregistration, merge, [('out_file', 'in_files')]) - ,(merge, outputnode, [('merged_file', 'motion_corrected')]) - ,(rotate_bvecs, outputnode, [('out_file', 'out_bvec')]) - ]) + pipeline.connect([(inputnode, split, [('in_file', 'in_file')]), + (split, pick_ref, [('out_files', 'inlist')]), + (inputnode, pick_ref, [('ref_num', 'index')]), + (split, coregistration, [('out_files', 'in_file')]), + (inputnode, rotate_bvecs, [('in_bvec', 'in_bvec')]), + (coregistration, rotate_bvecs, [('out_matrix_file', 'in_matrix')]), + (pick_ref, coregistration, [('out', 'reference')]), + (coregistration, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'motion_corrected')]), + (rotate_bvecs, outputnode, [('out_file', 'out_bvec')]) + ]) return pipeline @@ -233,38 +231,36 @@ def create_eddy_correct_pipeline(name='eddy_correct'): """ warnings.warn(('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.ecc_pipeline instead'), + 'nipype.workflows.dmri.preprocess.epi.ecc_pipeline instead'), DeprecationWarning) inputnode = pe.Node( niu.IdentityInterface(fields=['in_file', 'ref_num']), - name='inputnode') + name='inputnode') pipeline = pe.Workflow(name=name) split = pe.Node(fsl.Split(dimension='t'), name='split') pick_ref = pe.Node(niu.Select(), name='pick_ref') coregistration = pe.MapNode(fsl.FLIRT(no_search=True, padding_size=1, - interp='trilinear'), name='coregistration', iterfield=['in_file']) + interp='trilinear'), name='coregistration', iterfield=['in_file']) merge = pe.Node(fsl.Merge(dimension='t'), name='merge') outputnode = pe.Node( niu.IdentityInterface(fields=['eddy_corrected']), - name='outputnode') + name='outputnode') pipeline.connect([ - (inputnode, split, [('in_file', 'in_file')]) - ,(split, pick_ref, [('out_files', 'inlist')]) - ,(inputnode, pick_ref, [('ref_num', 'index')]) - ,(split, coregistration, [('out_files', 'in_file')]) - ,(pick_ref, coregistration, [('out', 'reference')]) - ,(coregistration, merge, [('out_file', 'in_files')]) - ,(merge, outputnode, [('merged_file', 'eddy_corrected')]) - ]) + (inputnode, split, [('in_file', 'in_file')]), + (split, pick_ref, [('out_files', 'inlist')]), + (inputnode, pick_ref, [('ref_num', 'index')]), + (split, coregistration, [('out_files', 'in_file')]), + (pick_ref, coregistration, [('out', 'reference')]), + (coregistration, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'eddy_corrected')]) + ]) return pipeline - - def fieldmap_correction(name='fieldmap_correction', nocheck=False): """ @@ -317,20 +313,20 @@ def fieldmap_correction(name='fieldmap_correction', nocheck=False): """ warnings.warn(('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.sdc_fmb instead'), + 'nipype.workflows.dmri.preprocess.epi.sdc_fmb instead'), DeprecationWarning) inputnode = pe.Node(niu.IdentityInterface( fields=['in_file', - 'in_mask', - 'fieldmap_pha', - 'fieldmap_mag', - 'te_diff', - 'epi_echospacing', - 'vsm_sigma', - 'encoding_direction' - ]), name='inputnode' - ) + 'in_mask', + 'fieldmap_pha', + 'fieldmap_mag', + 'te_diff', + 'epi_echospacing', + 'vsm_sigma', + 'encoding_direction' + ]), name='inputnode' + ) pipeline = pe.Workflow(name=name) @@ -339,10 +335,10 @@ def fieldmap_correction(name='fieldmap_correction', nocheck=False): t_size=1, t_min=0), name='select_magnitude') # Mask magnitude (it is required by PreparedFieldMap) - mask_mag = pe.Node( fsl.maths.ApplyMask(), name='mask_magnitude' ) + mask_mag = pe.Node(fsl.maths.ApplyMask(), name='mask_magnitude') # Run fsl_prepare_fieldmap - fslprep = pe.Node( fsl.PrepareFieldmap(), name='prepare_fieldmap' ) + fslprep = pe.Node(fsl.PrepareFieldmap(), name='prepare_fieldmap') if nocheck: fslprep.inputs.nocheck = True @@ -351,7 +347,7 @@ def fieldmap_correction(name='fieldmap_correction', nocheck=False): vsm = pe.Node(fsl.FUGUE(save_shift=True), name='generate_vsm') # VSM demean is not anymore present in the epi_reg script - #vsm_mean = pe.Node(niu.Function(input_names=['in_file', 'mask_file', 'in_unwarped'], output_names=[ + # vsm_mean = pe.Node(niu.Function(input_names=['in_file', 'mask_file', 'in_unwarped'], output_names=[ # 'out_file'], function=_vsm_remove_mean), name='vsm_mean_shift') # fugue_epi @@ -366,37 +362,36 @@ def fieldmap_correction(name='fieldmap_correction', nocheck=False): dimension='t'), name='dwi_merge') outputnode = pe.Node( - niu.IdentityInterface(fields=['epi_corrected','out_vsm']), - name='outputnode') + niu.IdentityInterface(fields=['epi_corrected', 'out_vsm']), + name='outputnode') pipeline.connect([ - (inputnode, select_mag, [('fieldmap_mag', 'in_file')]) - ,(inputnode, fslprep, [('fieldmap_pha', 'in_phase'),('te_diff', 'delta_TE') ]) - ,(inputnode, mask_mag, [('in_mask', 'mask_file' )]) - ,(select_mag, mask_mag, [('roi_file', 'in_file')]) - ,(mask_mag, fslprep, [('out_file', 'in_magnitude')]) - ,(fslprep, vsm, [('out_fieldmap', 'phasemap_in_file')]) - ,(inputnode, vsm, [('fieldmap_mag', 'in_file'), - ('encoding_direction','unwarp_direction'), - (('te_diff', _ms2sec), 'asym_se_time'), - ('vsm_sigma', 'smooth2d'), - (('epi_echospacing', _ms2sec), 'dwell_time')]) - ,(mask_mag, vsm, [('out_file', 'mask_file')]) - ,(inputnode, dwi_split, [('in_file', 'in_file')]) - ,(dwi_split, dwi_applyxfm, [('out_files', 'in_file')]) - ,(mask_mag, dwi_applyxfm, [('out_file', 'mask_file')]) - ,(vsm, dwi_applyxfm, [('shift_out_file', 'shift_in_file')]) - ,(inputnode, dwi_applyxfm, [('encoding_direction','unwarp_direction')]) - ,(dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]) - ,(dwi_merge, outputnode, [('merged_file', 'epi_corrected')]) - ,(vsm, outputnode, [('shift_out_file','out_vsm') ]) - ]) - + (inputnode, select_mag, [('fieldmap_mag', 'in_file')]), + (inputnode, fslprep, [('fieldmap_pha', 'in_phase'), ('te_diff', 'delta_TE')]), + (inputnode, mask_mag, [('in_mask', 'mask_file')]), + (select_mag, mask_mag, [('roi_file', 'in_file')]), + (mask_mag, fslprep, [('out_file', 'in_magnitude')]), + (fslprep, vsm, [('out_fieldmap', 'phasemap_in_file')]), + (inputnode, vsm, [('fieldmap_mag', 'in_file'), + ('encoding_direction', 'unwarp_direction'), + (('te_diff', _ms2sec), 'asym_se_time'), + ('vsm_sigma', 'smooth2d'), + (('epi_echospacing', _ms2sec), 'dwell_time')]), + (mask_mag, vsm, [('out_file', 'mask_file')]), + (inputnode, dwi_split, [('in_file', 'in_file')]), + (dwi_split, dwi_applyxfm, [('out_files', 'in_file')]), + (mask_mag, dwi_applyxfm, [('out_file', 'mask_file')]), + (vsm, dwi_applyxfm, [('shift_out_file', 'shift_in_file')]), + (inputnode, dwi_applyxfm, [('encoding_direction', 'unwarp_direction')]), + (dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]), + (dwi_merge, outputnode, [('merged_file', 'epi_corrected')]), + (vsm, outputnode, [('shift_out_file', 'out_vsm')]) + ]) return pipeline -def topup_correction( name='topup_correction' ): +def topup_correction(name='topup_correction'): """ .. deprecated:: 0.9.3 @@ -434,7 +429,7 @@ def topup_correction( name='topup_correction' ): """ warnings.warn(('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.sdc_peb instead'), + 'nipype.workflows.dmri.preprocess.epi.sdc_peb instead'), DeprecationWarning) pipeline = pe.Workflow(name=name) @@ -445,41 +440,41 @@ def topup_correction( name='topup_correction' ): 'encoding_direction', 'readout_times', 'ref_num' - ]), name='inputnode' - ) + ]), name='inputnode' + ) - outputnode = pe.Node( niu.IdentityInterface( + outputnode = pe.Node(niu.IdentityInterface( fields=['out_fieldcoef', 'out_movpar', 'out_enc_file', 'epi_corrected' - ]), name='outputnode' - ) + ]), name='outputnode' + ) - b0_dir = pe.Node( fsl.ExtractROI( t_size=1 ), name='b0_1' ) - b0_rev = pe.Node( fsl.ExtractROI( t_size=1 ), name='b0_2' ) - combin = pe.Node( niu.Merge(2), name='merge' ) - combin2 = pe.Node( niu.Merge(2), name='merge2' ) - merged = pe.Node( fsl.Merge( dimension='t' ), name='b0_comb' ) + b0_dir = pe.Node(fsl.ExtractROI(t_size=1), name='b0_1') + b0_rev = pe.Node(fsl.ExtractROI(t_size=1), name='b0_2') + combin = pe.Node(niu.Merge(2), name='merge') + combin2 = pe.Node(niu.Merge(2), name='merge2') + merged = pe.Node(fsl.Merge(dimension='t'), name='b0_comb') - topup = pe.Node( fsl.TOPUP(), name='topup' ) - applytopup = pe.Node( fsl.ApplyTOPUP(in_index=[1,2] ), name='applytopup' ) + topup = pe.Node(fsl.TOPUP(), name='topup') + applytopup = pe.Node(fsl.ApplyTOPUP(in_index=[1, 2]), name='applytopup') pipeline.connect([ - (inputnode, b0_dir, [('in_file_dir','in_file'),('ref_num','t_min')] ) - ,(inputnode, b0_rev, [('in_file_rev','in_file'),('ref_num','t_min')] ) - ,(inputnode, combin2, [('in_file_dir','in1'),('in_file_rev','in2') ] ) - ,(b0_dir, combin, [('roi_file','in1')] ) - ,(b0_rev, combin, [('roi_file','in2')] ) - ,(combin, merged, [('out', 'in_files')] ) - ,(merged, topup, [('merged_file','in_file')]) - ,(inputnode, topup, [('encoding_direction','encoding_direction'),('readout_times','readout_times') ]) - ,(topup, applytopup, [('out_fieldcoef','in_topup_fieldcoef'),('out_movpar','in_topup_movpar'), - ('out_enc_file','encoding_file')]) - ,(combin2, applytopup, [('out','in_files')] ) - ,(topup, outputnode, [('out_fieldcoef','out_fieldcoef'),('out_movpar','out_movpar'), - ('out_enc_file','out_enc_file') ]) - ,(applytopup,outputnode, [('out_corrected','epi_corrected')]) + (inputnode, b0_dir, [('in_file_dir', 'in_file'), ('ref_num', 't_min')]), + (inputnode, b0_rev, [('in_file_rev', 'in_file'), ('ref_num', 't_min')]), + (inputnode, combin2, [('in_file_dir', 'in1'), ('in_file_rev', 'in2')]), + (b0_dir, combin, [('roi_file', 'in1')]), + (b0_rev, combin, [('roi_file', 'in2')]), + (combin, merged, [('out', 'in_files')]), + (merged, topup, [('merged_file', 'in_file')]), + (inputnode, topup, [('encoding_direction', 'encoding_direction'), ('readout_times', 'readout_times')]), + (topup, applytopup, [('out_fieldcoef', 'in_topup_fieldcoef'), ('out_movpar', 'in_topup_movpar'), + ('out_enc_file', 'encoding_file')]), + (combin2, applytopup, [('out', 'in_files')]), + (topup, outputnode, [('out_fieldcoef', 'out_fieldcoef'), ('out_movpar', 'out_movpar'), + ('out_enc_file', 'out_enc_file')]), + (applytopup, outputnode, [('out_corrected', 'epi_corrected')]) ]) return pipeline @@ -541,17 +536,17 @@ def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): DeprecationWarning) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', - 'fieldmap_mag', - 'fieldmap_pha', - 'te_diff', - 'epi_echospacing', - 'epi_ph_encoding_dir', - 'epi_rev_encoding', - 'pi_accel_factor', - 'vsm_sigma', - 'ref_num', - 'unwarp_direction' - ]), name='inputnode') + 'fieldmap_mag', + 'fieldmap_pha', + 'te_diff', + 'epi_echospacing', + 'epi_ph_encoding_dir', + 'epi_rev_encoding', + 'pi_accel_factor', + 'vsm_sigma', + 'ref_num', + 'unwarp_direction' + ]), name='inputnode') pipeline = pe.Workflow(name=name) @@ -598,30 +593,30 @@ def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): outputnode = pe.Node( niu.IdentityInterface(fields=['epi_corrected']), - name='outputnode') + name='outputnode') pipeline.connect([ - (inputnode, dwell_time, [('epi_echospacing', 'dwell_time'), ('pi_accel_factor', 'pi_factor'), ('epi_rev_encoding', 'is_reverse_encoding')]) - ,(inputnode, select_mag, [('fieldmap_mag', 'in_file')]) - ,(inputnode, norm_pha, [('fieldmap_pha', 'in_file')]) - ,(select_mag, mask_mag, [('roi_file', 'in_file')]) - ,(mask_mag, mask_mag_dil, [('mask_file', 'in_file')]) - ,(select_mag, prelude, [('roi_file', 'magnitude_file')]) - ,(norm_pha, prelude, [('out_file', 'phase_file')]) - ,(mask_mag_dil, prelude, [('out_file', 'mask_file')]) - ,(prelude, fill_phase, [('unwrapped_phase_file', 'in_file')]) - ,(inputnode, vsm, [('fieldmap_mag', 'in_file')]) - ,(fill_phase, vsm, [('out_file', 'phasemap_in_file')]) - ,(inputnode, vsm, [(('te_diff', _ms2sec), 'asym_se_time'), ('vsm_sigma', 'smooth2d')]) - ,(dwell_time, vsm, [(('dwell_time', _ms2sec), 'dwell_time')]) - ,(mask_mag_dil, vsm, [('out_file', 'mask_file')]) - ,(mask_mag_dil, vsm_mean, [('out_file', 'mask_file')]) - ,(vsm, vsm_mean, [('unwarped_file', 'in_unwarped'), ('shift_out_file', 'in_file')]) - ,(inputnode, dwi_split, [('in_file', 'in_file')]) - ,(dwi_split, dwi_applyxfm, [('out_files', 'in_file')]) - ,(dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]) - ,(dwi_merge, outputnode, [('merged_file', 'epi_corrected')]) - ]) + (inputnode, dwell_time, [('epi_echospacing', 'dwell_time'), ('pi_accel_factor', 'pi_factor'), ('epi_rev_encoding', 'is_reverse_encoding')]), + (inputnode, select_mag, [('fieldmap_mag', 'in_file')]), + (inputnode, norm_pha, [('fieldmap_pha', 'in_file')]), + (select_mag, mask_mag, [('roi_file', 'in_file')]), + (mask_mag, mask_mag_dil, [('mask_file', 'in_file')]), + (select_mag, prelude, [('roi_file', 'magnitude_file')]), + (norm_pha, prelude, [('out_file', 'phase_file')]), + (mask_mag_dil, prelude, [('out_file', 'mask_file')]), + (prelude, fill_phase, [('unwrapped_phase_file', 'in_file')]), + (inputnode, vsm, [('fieldmap_mag', 'in_file')]), + (fill_phase, vsm, [('out_file', 'phasemap_in_file')]), + (inputnode, vsm, [(('te_diff', _ms2sec), 'asym_se_time'), ('vsm_sigma', 'smooth2d')]), + (dwell_time, vsm, [(('dwell_time', _ms2sec), 'dwell_time')]), + (mask_mag_dil, vsm, [('out_file', 'mask_file')]), + (mask_mag_dil, vsm_mean, [('out_file', 'mask_file')]), + (vsm, vsm_mean, [('unwarped_file', 'in_unwarped'), ('shift_out_file', 'in_file')]), + (inputnode, dwi_split, [('in_file', 'in_file')]), + (dwi_split, dwi_applyxfm, [('out_files', 'in_file')]), + (dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]), + (dwi_merge, outputnode, [('merged_file', 'epi_corrected')]) + ]) if fieldmap_registration: """ Register magfw to example epi. There are some parameters here that may need to be tweaked. Should probably strip the mag @@ -646,25 +641,25 @@ def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): interp='nearestneighbour'), name='msk_apply_xfm') pipeline.connect([ - (inputnode, select_epi, [('in_file', 'in_file'), ('ref_num', 't_min')]) - ,(select_epi, vsm_reg, [('roi_file', 'reference')]) - ,(vsm, vsm_fwd, [('shift_out_file', 'shift_in_file')]) - ,(mask_mag_dil, vsm_fwd, [('out_file', 'mask_file')]) - ,(inputnode, vsm_fwd, [('fieldmap_mag', 'in_file')]) - ,(vsm_fwd, vsm_reg, [('warped_file', 'in_file')]) - ,(vsm_reg, msk_applyxfm, [('out_matrix_file', 'in_matrix_file')]) - ,(select_epi, msk_applyxfm, [('roi_file', 'reference')]) - ,(mask_mag_dil, msk_applyxfm, [('out_file', 'in_file')]) - ,(vsm_reg, vsm_applyxfm, [('out_matrix_file', 'in_matrix_file')]) - ,(select_epi, vsm_applyxfm, [('roi_file', 'reference')]) - ,(vsm_mean, vsm_applyxfm, [('out_file', 'in_file')]) - ,(msk_applyxfm, dwi_applyxfm, [('out_file', 'mask_file')]) - ,(vsm_applyxfm, dwi_applyxfm, [('out_file', 'shift_in_file')]) + (inputnode, select_epi, [('in_file', 'in_file'), ('ref_num', 't_min')]), + (select_epi, vsm_reg, [('roi_file', 'reference')]), + (vsm, vsm_fwd, [('shift_out_file', 'shift_in_file')]), + (mask_mag_dil, vsm_fwd, [('out_file', 'mask_file')]), + (inputnode, vsm_fwd, [('fieldmap_mag', 'in_file')]), + (vsm_fwd, vsm_reg, [('warped_file', 'in_file')]), + (vsm_reg, msk_applyxfm, [('out_matrix_file', 'in_matrix_file')]), + (select_epi, msk_applyxfm, [('roi_file', 'reference')]), + (mask_mag_dil, msk_applyxfm, [('out_file', 'in_file')]), + (vsm_reg, vsm_applyxfm, [('out_matrix_file', 'in_matrix_file')]), + (select_epi, vsm_applyxfm, [('roi_file', 'reference')]), + (vsm_mean, vsm_applyxfm, [('out_file', 'in_file')]), + (msk_applyxfm, dwi_applyxfm, [('out_file', 'mask_file')]), + (vsm_applyxfm, dwi_applyxfm, [('out_file', 'shift_in_file')]) ]) else: pipeline.connect([ - (mask_mag_dil, dwi_applyxfm, [('out_file', 'mask_file')]) - ,( vsm_mean, dwi_applyxfm, [('out_file', 'shift_in_file')]) + (mask_mag_dil, dwi_applyxfm, [('out_file', 'mask_file')]), + (vsm_mean, dwi_applyxfm, [('out_file', 'shift_in_file')]) ]) return pipeline @@ -679,12 +674,12 @@ def _rotate_bvecs(in_bvec, in_matrix): name, _ = os.path.splitext(name) out_file = os.path.abspath('./%s_rotated.bvec' % name) bvecs = np.loadtxt(in_bvec) - new_bvecs = np.zeros(shape=bvecs.T.shape) #pre-initialise array, 3 col format + new_bvecs = np.zeros(shape=bvecs.T.shape) # pre-initialise array, 3 col format - for i, vol_matrix in enumerate(in_matrix[0::]): #start index at 0 + for i, vol_matrix in enumerate(in_matrix[0::]): # start index at 0 bvec = np.matrix(bvecs[:, i]) rot = np.matrix(np.loadtxt(vol_matrix)[0:3, 0:3]) - new_bvecs[i] = (np.array(rot * bvec.T).T)[0] #fill each volume with x,y,z as we go along + new_bvecs[i] = (np.array(rot * bvec.T).T)[0] # fill each volume with x,y,z as we go along np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') return out_file @@ -715,7 +710,8 @@ def _compute_dwelltime(dwell_time=0.68, pi_factor=1.0, is_reverse_encoding=False return dwell_time -def _effective_echospacing( dwell_time, pi_factor=1.0 ): + +def _effective_echospacing(dwell_time, pi_factor=1.0): dwelltime = 1.0e-3 * dwell_time * (1.0 / pi_factor) return dwelltime diff --git a/nipype/workflows/dmri/fsl/tbss.py b/nipype/workflows/dmri/fsl/tbss.py index 071ad70d9d..792263e370 100644 --- a/nipype/workflows/dmri/fsl/tbss.py +++ b/nipype/workflows/dmri/fsl/tbss.py @@ -51,8 +51,8 @@ def create_tbss_1_preproc(name='tbss_1_preproc'): # Prep the FA images prepfa = pe.MapNode(fsl.ImageMaths(suffix="_prep"), - name="prepfa", - iterfield=['in_file', 'op_string']) + name="prepfa", + iterfield=['in_file', 'op_string']) # Slicer slicer = pe.MapNode(fsl.Slicer(all_axial=True, image_width=1280), @@ -61,13 +61,13 @@ def create_tbss_1_preproc(name='tbss_1_preproc'): # Create a mask getmask1 = pe.MapNode(fsl.ImageMaths(op_string="-bin", suffix="_mask"), - name="getmask1", - iterfield=['in_file']) + name="getmask1", + iterfield=['in_file']) getmask2 = pe.MapNode(fsl.MultiImageMaths(op_string="-dilD -dilD -sub 1 -abs -add %s"), - name="getmask2", - iterfield=['in_file', 'operand_files']) + name="getmask2", + iterfield=['in_file', 'operand_files']) -# $FSLDIR/bin/fslmaths FA/${f}_FA_mask -dilD -dilD -sub 1 -abs -add FA/${f}_FA_mask FA/${f}_FA_mask -odt char + # $FSLDIR/bin/fslmaths FA/${f}_FA_mask -dilD -dilD -sub 1 -abs -add FA/${f}_FA_mask FA/${f}_FA_mask -odt char # Define the tbss1 workflow tbss1 = pe.Workflow(name=name) tbss1.connect([ @@ -81,9 +81,9 @@ def create_tbss_1_preproc(name='tbss_1_preproc'): # Define the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list", - "mask_list", - "slices"]), - name="outputnode") + "mask_list", + "slices"]), + name="outputnode") tbss1.connect([ (prepfa, outputnode, [("out_file", "fa_list")]), (getmask2, outputnode, [("out_file", "mask_list")]), @@ -120,14 +120,14 @@ def create_tbss_2_reg(name="tbss_2_reg"): # Define the inputnode inputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list", - "mask_list", - "target"]), + "mask_list", + "target"]), name="inputnode") # Flirt the FA image to the target flirt = pe.MapNode(interface=fsl.FLIRT(dof=12), - iterfield=['in_file', 'in_weight'], - name="flirt") + iterfield=['in_file', 'in_weight'], + name="flirt") fnirt = pe.MapNode(interface=fsl.FNIRT(fieldcoeff_file=True), iterfield=['in_file', 'inmask_file', 'affine_file'], @@ -137,8 +137,8 @@ def create_tbss_2_reg(name="tbss_2_reg"): warn('NO FSL found') else: config_file = os.path.join(os.environ["FSLDIR"], - "etc/flirtsch/FA_2_FMRIB58_1mm.cnf") - fnirt.inputs.config_file=config_file + "etc/flirtsch/FA_2_FMRIB58_1mm.cnf") + fnirt.inputs.config_file = config_file # Define the registration workflow tbss2 = pe.Workflow(name=name) @@ -146,11 +146,11 @@ def create_tbss_2_reg(name="tbss_2_reg"): # Connect up the registration workflow tbss2.connect([ (inputnode, flirt, [("fa_list", "in_file"), - ("target", "reference"), - ("mask_list", "in_weight")]), + ("target", "reference"), + ("mask_list", "in_weight")]), (inputnode, fnirt, [("fa_list", "in_file"), - ("mask_list", "inmask_file"), - ("target", "ref_file")]), + ("mask_list", "inmask_file"), + ("target", "ref_file")]), (flirt, fnirt, [("out_matrix_file", "affine_file")]), ]) @@ -194,13 +194,13 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): # Create the inputnode inputnode = pe.Node(interface=util.IdentityInterface(fields=['field_list', - 'fa_list']), + 'fa_list']), name='inputnode') # Apply the warpfield to the masked FA image applywarp = pe.MapNode(interface=fsl.ApplyWarp(), iterfield=['in_file', 'field_file'], - name="applywarp") + name="applywarp") if fsl.no_fsl(): warn('NO FSL found') else: @@ -208,7 +208,7 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): # Merge the FA files into a 4D file mergefa = pe.Node(fsl.Merge(dimension="t"), - name="mergefa") + name="mergefa") # Get a group mask groupmask = pe.Node(fsl.ImageMaths(op_string="-max 0 -Tmin -bin", @@ -223,7 +223,7 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): tbss3 = pe.Workflow(name=name) tbss3.connect([ (inputnode, applywarp, [("fa_list", "in_file"), - ("field_list", "field_file")]), + ("field_list", "field_file")]), (applywarp, mergefa, [("out_file", "in_files")]), (mergefa, groupmask, [("merged_file", "in_file")]), (mergefa, maskgroup, [("merged_file", "in_file")]), @@ -232,16 +232,16 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): # Create outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['groupmask', - 'skeleton_file', - 'meanfa_file', - 'mergefa_file']), + 'skeleton_file', + 'meanfa_file', + 'mergefa_file']), name='outputnode') if estimate_skeleton: # Take the mean over the fourth dimension meanfa = pe.Node(fsl.ImageMaths(op_string="-Tmean", - suffix="_mean"), - name="meanfa") + suffix="_mean"), + name="meanfa") # Use the mean FA volume to generate a tract skeleton makeskeleton = pe.Node(fsl.TractSkeleton(skeleton_file=True), @@ -255,20 +255,20 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): (maskgroup, outputnode, [('out_file', 'mergefa_file')]) ]) else: - #$FSLDIR/bin/fslmaths $FSLDIR/data/standard/FMRIB58_FA_1mm -mas mean_FA_mask mean_FA + # $FSLDIR/bin/fslmaths $FSLDIR/data/standard/FMRIB58_FA_1mm -mas mean_FA_mask mean_FA maskstd = pe.Node(fsl.ImageMaths(op_string="-mas", - suffix="_masked"), - name="maskstd") + suffix="_masked"), + name="maskstd") maskstd.inputs.in_file = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz") - #$FSLDIR/bin/fslmaths mean_FA -bin mean_FA_mask + # $FSLDIR/bin/fslmaths mean_FA -bin mean_FA_mask binmaskstd = pe.Node(fsl.ImageMaths(op_string="-bin"), - name="binmaskstd") + name="binmaskstd") - #$FSLDIR/bin/fslmaths all_FA -mas mean_FA_mask all_FA + # $FSLDIR/bin/fslmaths all_FA -mas mean_FA_mask all_FA maskgroup2 = pe.Node(fsl.ImageMaths(op_string="-mas", - suffix="_masked"), - name="maskgroup2") + suffix="_masked"), + name="maskgroup2") tbss3.connect([ (groupmask, maskstd, [("out_file", "in_file2")]), @@ -321,10 +321,10 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): """ # Create inputnode inputnode = pe.Node(interface=util.IdentityInterface(fields=['groupmask', - 'skeleton_file', - 'meanfa_file', - 'mergefa_file', - 'skeleton_thresh']), + 'skeleton_file', + 'meanfa_file', + 'mergefa_file', + 'skeleton_thresh']), name='inputnode') # Mask the skeleton at the threshold @@ -343,8 +343,8 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): # Project the FA values onto the skeleton projectfa = pe.Node(fsl.TractSkeleton(project_data=True, - skeleton_file=True, - use_cingulum_mask=True), + skeleton_file=True, + use_cingulum_mask=True), name="projectfa") # Create tbss4 workflow @@ -352,8 +352,8 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): tbss4.connect([ (inputnode, invertmask, [("groupmask", "in_file")]), (inputnode, skeletonmask, [("skeleton_file", "in_file"), - (('skeleton_thresh', tbss4_op_string), - 'op_string')]), + (('skeleton_thresh', tbss4_op_string), + 'op_string')]), (inputnode, projectfa, [('skeleton_thresh', 'threshold'), ("meanfa_file", "in_file"), ("mergefa_file", "data_file")]), @@ -364,15 +364,15 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): # Create the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['projectedfa_file', - 'skeleton_mask', - 'distance_map', - 'skeleton_file']), + 'skeleton_mask', + 'distance_map', + 'skeleton_file']), name='outputnode') tbss4.connect([ (projectfa, outputnode, [('projected_data', 'projectedfa_file'), - ('skeleton_file', 'skeleton_file') - ]), + ('skeleton_file', 'skeleton_file') + ]), (distancemap, outputnode, [('distance_map', 'distance_map')]), (skeletonmask, outputnode, [('out_file', 'skeleton_mask')]) ]) @@ -406,7 +406,7 @@ def create_tbss_all(name='tbss_all', estimate_skeleton=True): # Define the inputnode inputnode = pe.Node(interface=util.IdentityInterface(fields=['fa_list', - 'skeleton_thresh']), + 'skeleton_thresh']), name='inputnode') tbss1 = create_tbss_1_preproc(name='tbss1') @@ -424,7 +424,7 @@ def create_tbss_all(name='tbss_all', estimate_skeleton=True): (inputnode, tbss4, [('skeleton_thresh', 'inputnode.skeleton_thresh')]), (tbss1, tbss2, [('outputnode.fa_list', 'inputnode.fa_list'), - ('outputnode.mask_list', 'inputnode.mask_list')]), + ('outputnode.mask_list', 'inputnode.mask_list')]), (tbss1, tbss3, [('outputnode.fa_list', 'inputnode.fa_list')]), (tbss2, tbss3, [('outputnode.field_list', 'inputnode.field_list')]), (tbss3, tbss4, [ @@ -437,13 +437,13 @@ def create_tbss_all(name='tbss_all', estimate_skeleton=True): # Define the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['groupmask', - 'skeleton_file3', - 'meanfa_file', - 'mergefa_file', - 'projectedfa_file', - 'skeleton_file4', - 'skeleton_mask', - 'distance_map']), + 'skeleton_file3', + 'meanfa_file', + 'mergefa_file', + 'projectedfa_file', + 'skeleton_file4', + 'skeleton_mask', + 'distance_map']), name='outputnode') outputall_node = pe.Node(interface=util.IdentityInterface( fields=['fa_list1', @@ -456,25 +456,25 @@ def create_tbss_all(name='tbss_all', estimate_skeleton=True): 'projectedfa_file4', 'skeleton_mask4', 'distance_map4']), - name='outputall_node') + name='outputall_node') tbss_all.connect([ (tbss3, outputnode, [('outputnode.meanfa_file', 'meanfa_file'), - ('outputnode.mergefa_file', 'mergefa_file'), - ('outputnode.groupmask', 'groupmask'), - ('outputnode.skeleton_file', 'skeleton_file3'), - ]), + ('outputnode.mergefa_file', 'mergefa_file'), + ('outputnode.groupmask', 'groupmask'), + ('outputnode.skeleton_file', 'skeleton_file3'), + ]), (tbss4, outputnode, [('outputnode.projectedfa_file', 'projectedfa_file'), - ('outputnode.skeleton_file', 'skeleton_file4'), - ('outputnode.skeleton_mask', 'skeleton_mask'), - ('outputnode.distance_map', 'distance_map'), - ]), + ('outputnode.skeleton_file', 'skeleton_file4'), + ('outputnode.skeleton_mask', 'skeleton_mask'), + ('outputnode.distance_map', 'distance_map'), + ]), (tbss1, outputall_node, [('outputnode.fa_list', 'fa_list1'), - ('outputnode.mask_list', 'mask_list1'), + ('outputnode.mask_list', 'mask_list1'), ]), (tbss2, outputall_node, [('outputnode.field_list', 'field_list2'), - ]), + ]), (tbss3, outputall_node, [ ('outputnode.meanfa_file', 'meanfa_file3'), ('outputnode.mergefa_file', 'mergefa_file3'), @@ -543,14 +543,14 @@ def create_tbss_non_FA(name='tbss_non_FA'): applywarp.inputs.ref_file = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz") # Merge the non FA files into a 4D file merge = pe.Node(fsl.Merge(dimension="t"), name="merge") - #merged_file="all_FA.nii.gz" + # merged_file="all_FA.nii.gz" maskgroup = pe.Node(fsl.ImageMaths(op_string="-mas", suffix="_masked"), name="maskgroup") projectfa = pe.Node(fsl.TractSkeleton(project_data=True, - #projected_data = 'test.nii.gz', - use_cingulum_mask=True - ), + # projected_data = 'test.nii.gz', + use_cingulum_mask=True + ), name="projectfa") tbss_non_FA = pe.Workflow(name=name) @@ -568,7 +568,7 @@ def create_tbss_non_FA(name='tbss_non_FA'): (inputnode, projectfa, [('skeleton_thresh', 'threshold'), ("meanfa_file", "in_file"), ("distance_map", "distance_map"), - ("all_FA_file", 'data_file') + ("all_FA_file", 'data_file') ]), ]) @@ -578,6 +578,6 @@ def create_tbss_non_FA(name='tbss_non_FA'): name='outputnode') tbss_non_FA.connect([ (projectfa, outputnode, [('projected_data', 'projected_nonFA_file'), - ]), + ]), ]) return tbss_non_FA diff --git a/nipype/workflows/dmri/fsl/tests/test_dti.py b/nipype/workflows/dmri/fsl/tests/test_dti.py index 2e94a4abbd..c012c1b3b6 100644 --- a/nipype/workflows/dmri/fsl/tests/test_dti.py +++ b/nipype/workflows/dmri/fsl/tests/test_dti.py @@ -78,7 +78,7 @@ def test_create_bedpostx_pipeline(): (slice_dwi, original_bedpostx, [("roi_file", "dwi")]), (slice_dwi, nipype_bedpostx, [("roi_file", "inputnode.dwi")]), - (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples",list_to_filename), "volume1")]), + (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", list_to_filename), "volume1")]), (original_bedpostx, test_f1, [("mean_fsamples", "volume2")]), ]) diff --git a/nipype/workflows/dmri/fsl/tests/test_tbss.py b/nipype/workflows/dmri/fsl/tests/test_tbss.py index 6548c4b5ac..9bd401ea45 100644 --- a/nipype/workflows/dmri/fsl/tests/test_tbss.py +++ b/nipype/workflows/dmri/fsl/tests/test_tbss.py @@ -58,7 +58,7 @@ def _tbss_test_helper(estimate_skeleton): tbss1_original_datasource.inputs.base_directory = tbss1_orig_dir tbss1_original_datasource.inputs.template = 'FA/%s_FA%s.nii.gz' tbss1_original_datasource.inputs.template_args = dict(fa_list=[[subjects, '']], - mask_list=[[subjects, '_mask']]) + mask_list=[[subjects, '_mask']]) tbss1_test_fa = pe.MapNode(util.AssertEqual(), name="tbss1_fa_test", iterfield=['volume1', 'volume2']) tbss1_test_mask = pe.MapNode(util.AssertEqual(), name="tbss1_mask_test", iterfield=['volume1', 'volume2']) @@ -78,15 +78,15 @@ def _tbss_test_helper(estimate_skeleton): pipeline.connect(tbss2_original_datasource, 'field_list', tbss2_test_field, 'volume2') tbss3_original_datasource = pe.Node(nio.DataGrabber(outfields=['groupmask', - 'skeleton_file', - 'meanfa_file', - 'mergefa_file'], sort_filelist=False), name='tbss3_original_datasource') + 'skeleton_file', + 'meanfa_file', + 'mergefa_file'], sort_filelist=False), name='tbss3_original_datasource') tbss3_original_datasource.inputs.base_directory = tbss3_orig_dir tbss3_original_datasource.inputs.template = 'stats/%s.nii.gz' tbss3_original_datasource.inputs.template_args = dict(groupmask=[['mean_FA_mask']], - skeleton_file=[['mean_FA_skeleton']], - meanfa_file=[['mean_FA']], - mergefa_file=[['all_FA']]) + skeleton_file=[['mean_FA_skeleton']], + meanfa_file=[['mean_FA']], + mergefa_file=[['all_FA']]) tbss3_test_groupmask = pe.Node(util.AssertEqual(), name="tbss3_test_groupmask") tbss3_test_skeleton_file = pe.Node(util.AssertEqual(), name="tbss3_test_skeleton_file") @@ -103,11 +103,11 @@ def _tbss_test_helper(estimate_skeleton): pipeline.connect(tbss3_original_datasource, 'mergefa_file', tbss3_test_mergefa_file, 'volume2') tbss4_original_datasource = pe.Node(nio.DataGrabber(outfields=['all_FA_skeletonised', - 'mean_FA_skeleton_mask'], sort_filelist=False), name='tbss4_original_datasource') + 'mean_FA_skeleton_mask'], sort_filelist=False), name='tbss4_original_datasource') tbss4_original_datasource.inputs.base_directory = tbss4_orig_dir tbss4_original_datasource.inputs.template = 'stats/%s.nii.gz' tbss4_original_datasource.inputs.template_args = dict(all_FA_skeletonised=[['all_FA_skeletonised']], - mean_FA_skeleton_mask=[['mean_FA_skeleton_mask']]) + mean_FA_skeleton_mask=[['mean_FA_skeleton_mask']]) tbss4_test_all_FA_skeletonised = pe.Node(util.AssertEqual(), name="tbss4_test_all_FA_skeletonised") tbss4_test_mean_FA_skeleton_mask = pe.Node(util.AssertEqual(), name="tbss4_test_mean_FA_skeleton_mask") @@ -120,13 +120,17 @@ def _tbss_test_helper(estimate_skeleton): os.chdir(old_dir) shutil.rmtree(test_dir) -#this test is disabled until we figure out what is wrong with TBSS in 5.0.9 +# this test is disabled until we figure out what is wrong with TBSS in 5.0.9 + + @skipif(no_fsl) @skipif(no_fsl_course_data) def disabled_tbss_est_skeleton(): _tbss_test_helper(True) -#this test is disabled until we figure out what is wrong with TBSS in 5.0.9 +# this test is disabled until we figure out what is wrong with TBSS in 5.0.9 + + @skipif(no_fsl) @skipif(no_fsl_course_data) def disabled_tbss_est_skeleton_use_precomputed_skeleton(): diff --git a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py index ec09fbc8c3..63a40fd013 100644 --- a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py +++ b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py @@ -130,7 +130,7 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 b-values and b-vectors stored in FSL's format are converted into a single encoding file for MRTrix. """ - fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(),name='fsl2mrtrix') + fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') """ Distortions induced by eddy currents are corrected prior to fitting the tensors. @@ -147,11 +147,11 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 * Fractional anisotropy """ - dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(),name='dwi2tensor') - tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(),name='tensor2vector') - tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(),name='tensor2adc') - tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(),name='tensor2fa') - MRconvert_fa = pe.Node(interface=mrtrix.MRConvert(),name='MRconvert_fa') + dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') + tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(), name='tensor2vector') + tensor2adc = pe.Node(interface=mrtrix.Tensor2ApparentDiffusion(), name='tensor2adc') + tensor2fa = pe.Node(interface=mrtrix.Tensor2FractionalAnisotropy(), name='tensor2fa') + MRconvert_fa = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert_fa') MRconvert_fa.inputs.extension = 'nii' """ @@ -161,11 +161,11 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 put through a simple thresholding routine, and smoothed using a 3x3 median filter. """ - MRconvert = pe.Node(interface=mrtrix.MRConvert(),name='MRconvert') + MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') MRconvert.inputs.extract_at_axis = 3 MRconvert.inputs.extract_at_coordinate = [0] - threshold_b0 = pe.Node(interface=mrtrix.Threshold(),name='threshold_b0') - median3d = pe.Node(interface=mrtrix.MedianFilter3D(),name='median3d') + threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') + median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3d') """ The brain mask is also used to help identify single-fiber voxels. @@ -174,11 +174,11 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 thresholding the result to obtain some highly anisotropic within-brain voxels. """ - erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_firstpass') - erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_secondpass') - MRmultiply = pe.Node(interface=mrtrix.MRMultiply(),name='MRmultiply') + erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_firstpass') + erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_secondpass') + MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') MRmult_merge = pe.Node(interface=util.Merge(2), name='MRmultiply_merge') - threshold_FA = pe.Node(interface=mrtrix.Threshold(),name='threshold_FA') + threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') threshold_FA.inputs.absolute_threshold_value = 0.7 """ @@ -187,9 +187,9 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 thresholding it at a reasonably high level. """ - bet = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0') - gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(),name='gen_WM_mask') - threshold_wmmask = pe.Node(interface=mrtrix.Threshold(),name='threshold_wmmask') + bet = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') + gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(), name='gen_WM_mask') + threshold_wmmask = pe.Node(interface=mrtrix.Threshold(), name='threshold_wmmask') threshold_wmmask.inputs.absolute_threshold_value = 0.4 """ @@ -202,9 +202,9 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 """ - estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(),name='estimateresponse') + estimateresponse = pe.Node(interface=mrtrix.EstimateResponseForSH(), name='estimateresponse') estimateresponse.inputs.maximum_harmonic_order = 6 - csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(),name='csdeconv') + csdeconv = pe.Node(interface=mrtrix.ConstrainedSphericalDeconvolution(), name='csdeconv') csdeconv.inputs.maximum_harmonic_order = 6 """ @@ -212,14 +212,14 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 The tracts are then used to generate a tract-density image, and they are also converted to TrackVis format. """ - probCSDstreamtrack = pe.Node(interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(),name='probCSDstreamtrack') + probCSDstreamtrack = pe.Node(interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(), name='probCSDstreamtrack') probCSDstreamtrack.inputs.inputmodel = 'SD_PROB' probCSDstreamtrack.inputs.desired_number_of_tracks = 150000 - tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(),name='tracks2prob') + tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') tracks2prob.inputs.colour = True MRconvert_tracks2prob = MRconvert_fa.clone(name='MRconvert_tracks2prob') - tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(),name='tck2trk') - trk2tdi = pe.Node(interface=dipy.TrackDensityMap(),name='trk2tdi') + tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') + trk2tdi = pe.Node(interface=dipy.TrackDensityMap(), name='trk2tdi') """ Structural segmentation nodes @@ -232,7 +232,7 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 so that they are in the same space as the regions of interest. """ - coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister') + coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') coregister.inputs.cost = ('normmi') """ @@ -292,7 +292,6 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 Here we connect our processing pipeline. """ - """ Connecting the inputs, FreeSurfer nodes, and conversions -------------------------------------------------------- @@ -304,37 +303,37 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 First, we connect the input node to the FreeSurfer input nodes. """ - mapping.connect([(inputnode_within, FreeSurferSource,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSource,[("subject_id","subject_id")])]) + mapping.connect([(inputnode_within, FreeSurferSource, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, FreeSurferSource, [("subject_id", "subject_id")])]) - mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subject_id","subject_id")])]) + mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subject_id", "subject_id")])]) - mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subject_id","subject_id")])]) + mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subject_id", "subject_id")])]) - mapping.connect([(inputnode_within, parcellate,[("subjects_dir","subjects_dir")])]) - mapping.connect([(inputnode_within, parcellate,[("subject_id","subject_id")])]) - mapping.connect([(parcellate, mri_convert_ROI_scale500,[('roi_file','in_file')])]) + mapping.connect([(inputnode_within, parcellate, [("subjects_dir", "subjects_dir")])]) + mapping.connect([(inputnode_within, parcellate, [("subject_id", "subject_id")])]) + mapping.connect([(parcellate, mri_convert_ROI_scale500, [('roi_file', 'in_file')])]) """ Nifti conversion for subject's stripped brain image from Freesurfer: """ - mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])]) + mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', 'in_file')])]) """ Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) """ - mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', 'in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', 'in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, [('inflated', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, [('inflated', 'in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, [('sphere', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, [('sphere', 'in_file')])]) """ The annotation files are converted using the pial surface as a map via the MRIsConvert interface. @@ -342,12 +341,11 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 specifically (rather than e.g. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. """ - mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])]) + mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [('pial', 'in_file')])]) + mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [('pial', 'in_file')])]) mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])]) - """ Diffusion Processing -------------------- @@ -355,17 +353,17 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 """ mapping.connect([(inputnode_within, fsl2mrtrix, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) - mapping.connect([(inputnode_within, eddycorrect,[("dwi","inputnode.in_file")])]) - mapping.connect([(eddycorrect, dwi2tensor,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(fsl2mrtrix, dwi2tensor,[("encoding_file","encoding_file")])]) - - mapping.connect([(dwi2tensor, tensor2vector,[['tensor','in_file']]), - (dwi2tensor, tensor2adc,[['tensor','in_file']]), - (dwi2tensor, tensor2fa,[['tensor','in_file']]), - ]) - mapping.connect([(tensor2fa, MRmult_merge,[("FA","in1")])]) - mapping.connect([(tensor2fa, MRconvert_fa,[("FA","in_file")])]) + ("bvals", "bval_file")])]) + mapping.connect([(inputnode_within, eddycorrect, [("dwi", "inputnode.in_file")])]) + mapping.connect([(eddycorrect, dwi2tensor, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", "encoding_file")])]) + + mapping.connect([(dwi2tensor, tensor2vector, [['tensor', 'in_file']]), + (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), + (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), + ]) + mapping.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) + mapping.connect([(tensor2fa, MRconvert_fa, [("FA", "in_file")])]) """ @@ -373,51 +371,51 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 fractional anisotropy image, and thresholds it to get the single-fiber voxels. """ - mapping.connect([(eddycorrect, MRconvert,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(MRconvert, threshold_b0,[("converted","in_file")])]) - mapping.connect([(threshold_b0, median3d,[("out_file","in_file")])]) - mapping.connect([(median3d, erode_mask_firstpass,[("out_file","in_file")])]) - mapping.connect([(erode_mask_firstpass, erode_mask_secondpass,[("out_file","in_file")])]) - mapping.connect([(erode_mask_secondpass, MRmult_merge,[("out_file","in2")])]) - mapping.connect([(MRmult_merge, MRmultiply,[("out","in_files")])]) - mapping.connect([(MRmultiply, threshold_FA,[("out_file","in_file")])]) + mapping.connect([(eddycorrect, MRconvert, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) + mapping.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) + mapping.connect([(median3d, erode_mask_firstpass, [("out_file", "in_file")])]) + mapping.connect([(erode_mask_firstpass, erode_mask_secondpass, [("out_file", "in_file")])]) + mapping.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", "in2")])]) + mapping.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) + mapping.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) """ Here the thresholded white matter mask is created for seeding the tractography. """ - mapping.connect([(eddycorrect, bet,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(eddycorrect, gen_WM_mask,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(bet, gen_WM_mask,[("mask_file","binary_mask")])]) - mapping.connect([(fsl2mrtrix, gen_WM_mask,[("encoding_file","encoding_file")])]) - mapping.connect([(gen_WM_mask, threshold_wmmask,[("WMprobabilitymap","in_file")])]) + mapping.connect([(eddycorrect, bet, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(eddycorrect, gen_WM_mask, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) + mapping.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", "encoding_file")])]) + mapping.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", "in_file")])]) """ Next we estimate the fiber response distribution. """ - mapping.connect([(eddycorrect, estimateresponse,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(fsl2mrtrix, estimateresponse,[("encoding_file","encoding_file")])]) - mapping.connect([(threshold_FA, estimateresponse,[("out_file","mask_image")])]) + mapping.connect([(eddycorrect, estimateresponse, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", "encoding_file")])]) + mapping.connect([(threshold_FA, estimateresponse, [("out_file", "mask_image")])]) """ Run constrained spherical deconvolution. """ - mapping.connect([(eddycorrect, csdeconv,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(gen_WM_mask, csdeconv,[("WMprobabilitymap","mask_image")])]) - mapping.connect([(estimateresponse, csdeconv,[("response","response_file")])]) - mapping.connect([(fsl2mrtrix, csdeconv,[("encoding_file","encoding_file")])]) + mapping.connect([(eddycorrect, csdeconv, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", "mask_image")])]) + mapping.connect([(estimateresponse, csdeconv, [("response", "response_file")])]) + mapping.connect([(fsl2mrtrix, csdeconv, [("encoding_file", "encoding_file")])]) """ Connect the tractography and compute the tract density image. """ - mapping.connect([(threshold_wmmask, probCSDstreamtrack,[("out_file","seed_file")])]) - mapping.connect([(csdeconv, probCSDstreamtrack,[("spherical_harmonics_image","in_file")])]) - mapping.connect([(probCSDstreamtrack, tracks2prob,[("tracked","in_file")])]) - mapping.connect([(eddycorrect, tracks2prob,[("outputnode.eddy_corrected","template_file")])]) - mapping.connect([(tracks2prob, MRconvert_tracks2prob,[("tract_image","in_file")])]) + mapping.connect([(threshold_wmmask, probCSDstreamtrack, [("out_file", "seed_file")])]) + mapping.connect([(csdeconv, probCSDstreamtrack, [("spherical_harmonics_image", "in_file")])]) + mapping.connect([(probCSDstreamtrack, tracks2prob, [("tracked", "in_file")])]) + mapping.connect([(eddycorrect, tracks2prob, [("outputnode.eddy_corrected", "template_file")])]) + mapping.connect([(tracks2prob, MRconvert_tracks2prob, [("tract_image", "in_file")])]) """ Structural Processing @@ -425,51 +423,51 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 First, we coregister the diffusion image to the structural image """ - mapping.connect([(eddycorrect, coregister,[("outputnode.eddy_corrected","in_file")])]) - mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])]) + mapping.connect([(eddycorrect, coregister, [("outputnode.eddy_corrected", "in_file")])]) + mapping.connect([(mri_convert_Brain, coregister, [('out_file', 'reference')])]) """ The MRtrix-tracked fibers are converted to TrackVis format (with voxel and data dimensions grabbed from the DWI). The connectivity matrix is created with the transformed .trk fibers and the parcellation file. """ - mapping.connect([(eddycorrect, tck2trk,[("outputnode.eddy_corrected","image_file")])]) - mapping.connect([(mri_convert_Brain, tck2trk,[("out_file","registration_image_file")])]) - mapping.connect([(coregister, tck2trk,[("out_matrix_file","matrix_file")])]) - mapping.connect([(probCSDstreamtrack, tck2trk,[("tracked","in_file")])]) - mapping.connect([(tck2trk, creatematrix,[("out_file","tract_file")])]) - mapping.connect([(tck2trk, trk2tdi,[("out_file","in_file")])]) + mapping.connect([(eddycorrect, tck2trk, [("outputnode.eddy_corrected", "image_file")])]) + mapping.connect([(mri_convert_Brain, tck2trk, [("out_file", "registration_image_file")])]) + mapping.connect([(coregister, tck2trk, [("out_matrix_file", "matrix_file")])]) + mapping.connect([(probCSDstreamtrack, tck2trk, [("tracked", "in_file")])]) + mapping.connect([(tck2trk, creatematrix, [("out_file", "tract_file")])]) + mapping.connect([(tck2trk, trk2tdi, [("out_file", "in_file")])]) mapping.connect(inputnode_within, 'resolution_network_file', creatematrix, 'resolution_network_file') - mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_file")])]) - mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_mat_file")])]) - mapping.connect([(parcellate, creatematrix,[("roi_file","roi_file")])]) + mapping.connect([(inputnode_within, creatematrix, [("subject_id", "out_matrix_file")])]) + mapping.connect([(inputnode_within, creatematrix, [("subject_id", "out_matrix_mat_file")])]) + mapping.connect([(parcellate, creatematrix, [("roi_file", "roi_file")])]) """ The merge nodes defined earlier are used here to create lists of the files which are destined for the CFFConverter. """ - mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])]) - mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])]) - mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])]) - mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])]) - mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])]) - mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])]) - mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])]) - mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])]) + mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) + mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) + mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", "in3")])]) + mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", "in4")])]) + mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", "in5")])]) + mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", "in6")])]) + mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", "in7")])]) + mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", "in8")])]) - mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])]) - mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])]) + mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", "in1")])]) + mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", "in2")])]) - mapping.connect([(parcellate, niftiVolumes,[("roi_file","in1")])]) - mapping.connect([(eddycorrect, niftiVolumes,[("outputnode.eddy_corrected","in2")])]) - mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])]) + mapping.connect([(parcellate, niftiVolumes, [("roi_file", "in1")])]) + mapping.connect([(eddycorrect, niftiVolumes, [("outputnode.eddy_corrected", "in2")])]) + mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])]) - mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", "in1")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", "in2")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", "in3")])]) + mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", "in4")])]) """ This block actually connects the merged lists to the CFF converter. We pass the surfaces @@ -479,34 +477,33 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 product. """ - mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])]) - mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])]) - mapping.connect([(creatematrix, CFFConverter,[("matrix_files","gpickled_networks")])]) - mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])]) - mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])]) - mapping.connect([(creatematrix, CFFConverter,[("filtered_tractography","tract_files")])]) - mapping.connect([(inputnode_within, CFFConverter,[("subject_id","title")])]) + mapping.connect([(giftiSurfaces, CFFConverter, [("out", "gifti_surfaces")])]) + mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) + mapping.connect([(creatematrix, CFFConverter, [("matrix_files", "gpickled_networks")])]) + mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) + mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) + mapping.connect([(creatematrix, CFFConverter, [("filtered_tractography", "tract_files")])]) + mapping.connect([(inputnode_within, CFFConverter, [("subject_id", "title")])]) """ The graph theoretical metrics which have been generated are placed into another CFF file. """ - mapping.connect([(inputnode_within, networkx,[("subject_id","inputnode.extra_field")])]) - mapping.connect([(creatematrix, networkx,[("intersection_matrix_file","inputnode.network_file")])]) - - mapping.connect([(networkx, NxStatsCFFConverter,[("outputnode.network_files","gpickled_networks")])]) - mapping.connect([(giftiSurfaces, NxStatsCFFConverter,[("out","gifti_surfaces")])]) - mapping.connect([(giftiLabels, NxStatsCFFConverter,[("out","gifti_labels")])]) - mapping.connect([(niftiVolumes, NxStatsCFFConverter,[("out","nifti_volumes")])]) - mapping.connect([(fiberDataArrays, NxStatsCFFConverter,[("out","data_files")])]) - mapping.connect([(inputnode_within, NxStatsCFFConverter,[("subject_id","title")])]) + mapping.connect([(inputnode_within, networkx, [("subject_id", "inputnode.extra_field")])]) + mapping.connect([(creatematrix, networkx, [("intersection_matrix_file", "inputnode.network_file")])]) - mapping.connect([(inputnode_within, cmats_to_csv,[("subject_id","inputnode.extra_field")])]) - mapping.connect([(creatematrix, cmats_to_csv,[("matlab_matrix_files","inputnode.matlab_matrix_files")])]) - mapping.connect([(creatematrix, nfibs_to_csv,[("stats_file","in_file")])]) - mapping.connect([(nfibs_to_csv, merge_nfib_csvs,[("csv_files","in_files")])]) - mapping.connect([(inputnode_within, merge_nfib_csvs,[("subject_id","extra_field")])]) + mapping.connect([(networkx, NxStatsCFFConverter, [("outputnode.network_files", "gpickled_networks")])]) + mapping.connect([(giftiSurfaces, NxStatsCFFConverter, [("out", "gifti_surfaces")])]) + mapping.connect([(giftiLabels, NxStatsCFFConverter, [("out", "gifti_labels")])]) + mapping.connect([(niftiVolumes, NxStatsCFFConverter, [("out", "nifti_volumes")])]) + mapping.connect([(fiberDataArrays, NxStatsCFFConverter, [("out", "data_files")])]) + mapping.connect([(inputnode_within, NxStatsCFFConverter, [("subject_id", "title")])]) + mapping.connect([(inputnode_within, cmats_to_csv, [("subject_id", "inputnode.extra_field")])]) + mapping.connect([(creatematrix, cmats_to_csv, [("matlab_matrix_files", "inputnode.matlab_matrix_files")])]) + mapping.connect([(creatematrix, nfibs_to_csv, [("stats_file", "in_file")])]) + mapping.connect([(nfibs_to_csv, merge_nfib_csvs, [("csv_files", "in_files")])]) + mapping.connect([(inputnode_within, merge_nfib_csvs, [("subject_id", "extra_field")])]) """ Create a higher-level workflow @@ -518,57 +515,57 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir"]), name="inputnode") - outputnode = pe.Node(interface = util.IdentityInterface(fields=["fa", - "struct", - "tracts", - "tracks2prob", - "connectome", - "nxstatscff", - "nxmatlab", - "nxcsv", - "fiber_csv", - "cmatrices_csv", - "nxmergedcsv", - "cmatrix", - "networks", - "filtered_tracts", - "rois", - "odfs", - "tdi", - "mean_fiber_length", - "median_fiber_length", - "fiber_length_std"]), - name="outputnode") + outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa", + "struct", + "tracts", + "tracks2prob", + "connectome", + "nxstatscff", + "nxmatlab", + "nxcsv", + "fiber_csv", + "cmatrices_csv", + "nxmergedcsv", + "cmatrix", + "networks", + "filtered_tracts", + "rois", + "odfs", + "tdi", + "mean_fiber_length", + "median_fiber_length", + "fiber_length_std"]), + name="outputnode") connectivity = pe.Workflow(name="connectivity") - connectivity.base_output_dir=name - connectivity.base_dir=name + connectivity.base_output_dir = name + connectivity.base_dir = name connectivity.connect([(inputnode, mapping, [("dwi", "inputnode_within.dwi"), - ("bvals", "inputnode_within.bvals"), - ("bvecs", "inputnode_within.bvecs"), - ("subject_id", "inputnode_within.subject_id"), - ("subjects_dir", "inputnode_within.subjects_dir")]) - ]) + ("bvals", "inputnode_within.bvals"), + ("bvecs", "inputnode_within.bvecs"), + ("subject_id", "inputnode_within.subject_id"), + ("subjects_dir", "inputnode_within.subjects_dir")]) + ]) connectivity.connect([(mapping, outputnode, [("tck2trk.out_file", "tracts"), - ("CFFConverter.connectome_file", "connectome"), - ("NxStatsCFFConverter.connectome_file", "nxstatscff"), - ("CreateMatrix.matrix_mat_file", "cmatrix"), - ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"), - ("CreateMatrix.median_fiber_length_matrix_mat_file", "median_fiber_length"), - ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"), - ("CreateMatrix.matrix_files", "networks"), - ("CreateMatrix.filtered_tractographies", "filtered_tracts"), - ("merge_nfib_csvs.csv_file", "fiber_csv"), - ("mri_convert_ROI_scale500.out_file", "rois"), - ("trk2tdi.out_file", "tdi"), - ("csdeconv.spherical_harmonics_image", "odfs"), - ("mri_convert_Brain.out_file", "struct"), - ("MRconvert_fa.converted", "fa"), - ("MRconvert_tracks2prob.converted", "tracks2prob")]) + ("CFFConverter.connectome_file", "connectome"), + ("NxStatsCFFConverter.connectome_file", "nxstatscff"), + ("CreateMatrix.matrix_mat_file", "cmatrix"), + ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"), + ("CreateMatrix.median_fiber_length_matrix_mat_file", "median_fiber_length"), + ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"), + ("CreateMatrix.matrix_files", "networks"), + ("CreateMatrix.filtered_tractographies", "filtered_tracts"), + ("merge_nfib_csvs.csv_file", "fiber_csv"), + ("mri_convert_ROI_scale500.out_file", "rois"), + ("trk2tdi.out_file", "tdi"), + ("csdeconv.spherical_harmonics_image", "odfs"), + ("mri_convert_Brain.out_file", "struct"), + ("MRconvert_fa.converted", "fa"), + ("MRconvert_tracks2prob.converted", "tracks2prob")]) ]) - connectivity.connect([(cmats_to_csv, outputnode,[("outputnode.csv_file","cmatrices_csv")])]) - connectivity.connect([(networkx, outputnode,[("outputnode.csv_files","nxcsv")])]) + connectivity.connect([(cmats_to_csv, outputnode, [("outputnode.csv_file", "cmatrices_csv")])]) + connectivity.connect([(networkx, outputnode, [("outputnode.csv_files", "nxcsv")])]) return connectivity diff --git a/nipype/workflows/dmri/mrtrix/diffusion.py b/nipype/workflows/dmri/mrtrix/diffusion.py index f6c9b1af51..52ac4692bd 100644 --- a/nipype/workflows/dmri/mrtrix/diffusion.py +++ b/nipype/workflows/dmri/mrtrix/diffusion.py @@ -3,7 +3,8 @@ from ....interfaces import fsl as fsl from ....interfaces import mrtrix as mrtrix -def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type = 'probabilistic'): + +def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type='probabilistic'): """Creates a pipeline that does the same diffusion processing as in the :doc:`../../users/examples/dmri_mrtrix_dti` example script. Given a diffusion-weighted image, b-values, and b-vectors, the workflow will return the tractography @@ -34,7 +35,7 @@ def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type = 'probabilisti """ - inputnode = pe.Node(interface = util.IdentityInterface(fields=["dwi", + inputnode = pe.Node(interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), name="inputnode") @@ -42,10 +43,10 @@ def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type = 'probabilisti bet = pe.Node(interface=fsl.BET(), name="bet") bet.inputs.mask = True - fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(),name='fsl2mrtrix') + fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') fsl2mrtrix.inputs.invert_y = True - dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(),name='dwi2tensor') + dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') tensor2vector = pe.Node(interface=mrtrix.Tensor2Vector(), name='tensor2vector') @@ -59,21 +60,21 @@ def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type = 'probabilisti erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(), name='erode_mask_secondpass') - threshold_b0 = pe.Node(interface=mrtrix.Threshold(),name='threshold_b0') + threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') - threshold_FA = pe.Node(interface=mrtrix.Threshold(),name='threshold_FA') + threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') threshold_FA.inputs.absolute_threshold_value = 0.7 threshold_wmmask = pe.Node(interface=mrtrix.Threshold(), name='threshold_wmmask') threshold_wmmask.inputs.absolute_threshold_value = 0.4 - MRmultiply = pe.Node(interface=mrtrix.MRMultiply(),name='MRmultiply') + MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') MRmult_merge = pe.Node(interface=util.Merge(2), name='MRmultiply_merge') - median3d = pe.Node(interface=mrtrix.MedianFilter3D(),name='median3D') + median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3D') - MRconvert = pe.Node(interface=mrtrix.MRConvert(),name='MRconvert') + MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') MRconvert.inputs.extract_at_axis = 3 MRconvert.inputs.extract_at_coordinate = [0] @@ -94,64 +95,64 @@ def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type = 'probabilisti name='CSDstreamtrack') CSDstreamtrack.inputs.desired_number_of_tracks = 15000 - tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(),name='tracks2prob') + tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') tracks2prob.inputs.colour = True - tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(),name='tck2trk') + tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') workflow = pe.Workflow(name=name) - workflow.base_output_dir=name + workflow.base_output_dir = name workflow.connect([(inputnode, fsl2mrtrix, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) - workflow.connect([(inputnode, dwi2tensor,[("dwi","in_file")])]) - workflow.connect([(fsl2mrtrix, dwi2tensor,[("encoding_file","encoding_file")])]) - - workflow.connect([(dwi2tensor, tensor2vector,[['tensor','in_file']]), - (dwi2tensor, tensor2adc,[['tensor','in_file']]), - (dwi2tensor, tensor2fa,[['tensor','in_file']]), - ]) - - workflow.connect([(inputnode, MRconvert,[("dwi","in_file")])]) - workflow.connect([(MRconvert, threshold_b0,[("converted","in_file")])]) - workflow.connect([(threshold_b0, median3d,[("out_file","in_file")])]) - workflow.connect([(median3d, erode_mask_firstpass,[("out_file","in_file")])]) - workflow.connect([(erode_mask_firstpass, erode_mask_secondpass,[("out_file","in_file")])]) - - workflow.connect([(tensor2fa, MRmult_merge,[("FA","in1")])]) - workflow.connect([(erode_mask_secondpass, MRmult_merge,[("out_file","in2")])]) - workflow.connect([(MRmult_merge, MRmultiply,[("out","in_files")])]) - workflow.connect([(MRmultiply, threshold_FA,[("out_file","in_file")])]) - workflow.connect([(threshold_FA, estimateresponse,[("out_file","mask_image")])]) - - workflow.connect([(inputnode, bet,[("dwi","in_file")])]) - workflow.connect([(inputnode, gen_WM_mask,[("dwi","in_file")])]) - workflow.connect([(bet, gen_WM_mask,[("mask_file","binary_mask")])]) - workflow.connect([(fsl2mrtrix, gen_WM_mask,[("encoding_file","encoding_file")])]) - - workflow.connect([(inputnode, estimateresponse,[("dwi","in_file")])]) - workflow.connect([(fsl2mrtrix, estimateresponse,[("encoding_file","encoding_file")])]) - - workflow.connect([(inputnode, csdeconv,[("dwi","in_file")])]) - workflow.connect([(gen_WM_mask, csdeconv,[("WMprobabilitymap","mask_image")])]) - workflow.connect([(estimateresponse, csdeconv,[("response","response_file")])]) - workflow.connect([(fsl2mrtrix, csdeconv,[("encoding_file","encoding_file")])]) - - workflow.connect([(gen_WM_mask, threshold_wmmask,[("WMprobabilitymap","in_file")])]) - workflow.connect([(threshold_wmmask, CSDstreamtrack,[("out_file","seed_file")])]) - workflow.connect([(csdeconv, CSDstreamtrack,[("spherical_harmonics_image","in_file")])]) + ("bvals", "bval_file")])]) + workflow.connect([(inputnode, dwi2tensor, [("dwi", "in_file")])]) + workflow.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", "encoding_file")])]) + + workflow.connect([(dwi2tensor, tensor2vector, [['tensor', 'in_file']]), + (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), + (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), + ]) + + workflow.connect([(inputnode, MRconvert, [("dwi", "in_file")])]) + workflow.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) + workflow.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) + workflow.connect([(median3d, erode_mask_firstpass, [("out_file", "in_file")])]) + workflow.connect([(erode_mask_firstpass, erode_mask_secondpass, [("out_file", "in_file")])]) + + workflow.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) + workflow.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", "in2")])]) + workflow.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) + workflow.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) + workflow.connect([(threshold_FA, estimateresponse, [("out_file", "mask_image")])]) + + workflow.connect([(inputnode, bet, [("dwi", "in_file")])]) + workflow.connect([(inputnode, gen_WM_mask, [("dwi", "in_file")])]) + workflow.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) + workflow.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", "encoding_file")])]) + + workflow.connect([(inputnode, estimateresponse, [("dwi", "in_file")])]) + workflow.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", "encoding_file")])]) + + workflow.connect([(inputnode, csdeconv, [("dwi", "in_file")])]) + workflow.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", "mask_image")])]) + workflow.connect([(estimateresponse, csdeconv, [("response", "response_file")])]) + workflow.connect([(fsl2mrtrix, csdeconv, [("encoding_file", "encoding_file")])]) + + workflow.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", "in_file")])]) + workflow.connect([(threshold_wmmask, CSDstreamtrack, [("out_file", "seed_file")])]) + workflow.connect([(csdeconv, CSDstreamtrack, [("spherical_harmonics_image", "in_file")])]) if tractography_type == 'probabilistic': - workflow.connect([(CSDstreamtrack, tracks2prob,[("tracked","in_file")])]) - workflow.connect([(inputnode, tracks2prob,[("dwi","template_file")])]) + workflow.connect([(CSDstreamtrack, tracks2prob, [("tracked", "in_file")])]) + workflow.connect([(inputnode, tracks2prob, [("dwi", "template_file")])]) - workflow.connect([(CSDstreamtrack, tck2trk,[("tracked","in_file")])]) - workflow.connect([(inputnode, tck2trk,[("dwi","image_file")])]) + workflow.connect([(CSDstreamtrack, tck2trk, [("tracked", "in_file")])]) + workflow.connect([(inputnode, tck2trk, [("dwi", "image_file")])]) output_fields = ["fa", "tracts_trk", "csdeconv", "tracts_tck"] if tractography_type == 'probabilistic': output_fields.append("tdi") - outputnode = pe.Node(interface = util.IdentityInterface(fields=output_fields), - name="outputnode") + outputnode = pe.Node(interface=util.IdentityInterface(fields=output_fields), + name="outputnode") workflow.connect([(CSDstreamtrack, outputnode, [("tracked", "tracts_tck")]), (csdeconv, outputnode, [("spherical_harmonics_image", "csdeconv")]), diff --git a/nipype/workflows/dmri/mrtrix/group_connectivity.py b/nipype/workflows/dmri/mrtrix/group_connectivity.py index 16e9e276c7..c4048c969a 100644 --- a/nipype/workflows/dmri/mrtrix/group_connectivity.py +++ b/nipype/workflows/dmri/mrtrix/group_connectivity.py @@ -16,6 +16,7 @@ else: import cmp + def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_dir, output_dir, template_args_dict=0): """Creates a pipeline that performs MRtrix structural connectivity processing on groups of subjects. Given a diffusion-weighted image, and text files containing @@ -67,14 +68,14 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ if template_args_dict == 0: info = dict(dwi=[['subject_id', 'dwi']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) + bvecs=[['subject_id', 'bvecs']], + bvals=[['subject_id', 'bvals']]) else: info = template_args_dict datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list(info.keys())), - name = 'datasource') + name='datasource') datasource.inputs.template = "%s/%s" datasource.inputs.base_directory = data_dir @@ -96,27 +97,27 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ l1pipeline = pe.Workflow(name="l1pipeline_"+group_id) l1pipeline.base_dir = output_dir l1pipeline.base_output_dir = group_id - l1pipeline.connect([(subj_infosource, conmapper,[('subject_id', 'inputnode.subject_id')])]) - l1pipeline.connect([(subj_infosource, datasource,[('subject_id', 'subject_id')])]) + l1pipeline.connect([(subj_infosource, conmapper, [('subject_id', 'inputnode.subject_id')])]) + l1pipeline.connect([(subj_infosource, datasource, [('subject_id', 'subject_id')])]) l1pipeline.connect([(datasource, conmapper, [("dwi", "inputnode.dwi"), - ("bvals", "inputnode.bvals"), - ("bvecs", "inputnode.bvecs"), + ("bvals", "inputnode.bvals"), + ("bvecs", "inputnode.bvecs"), ])]) l1pipeline.connect([(conmapper, datasink, [("outputnode.connectome", "@l1output.cff"), - ("outputnode.nxstatscff", "@l1output.nxstatscff"), - ("outputnode.nxmatlab", "@l1output.nxmatlab"), - ("outputnode.nxcsv", "@l1output.nxcsv"), - ("outputnode.fiber_csv", "@l1output.fiber_csv"), - ("outputnode.cmatrices_csv", "@l1output.cmatrices_csv"), - ("outputnode.fa", "@l1output.fa"), - ("outputnode.filtered_tracts", "@l1output.filtered_tracts"), - ("outputnode.cmatrix", "@l1output.cmatrix"), - ("outputnode.rois", "@l1output.rois"), - ("outputnode.odfs", "@l1output.odfs"), - ("outputnode.struct", "@l1output.struct"), - ("outputnode.networks", "@l1output.networks"), - ("outputnode.mean_fiber_length", "@l1output.mean_fiber_length"), - ("outputnode.fiber_length_std", "@l1output.fiber_length_std"), - ])]) - l1pipeline.connect([(group_infosource, datasink,[('group_id','@group_id')])]) + ("outputnode.nxstatscff", "@l1output.nxstatscff"), + ("outputnode.nxmatlab", "@l1output.nxmatlab"), + ("outputnode.nxcsv", "@l1output.nxcsv"), + ("outputnode.fiber_csv", "@l1output.fiber_csv"), + ("outputnode.cmatrices_csv", "@l1output.cmatrices_csv"), + ("outputnode.fa", "@l1output.fa"), + ("outputnode.filtered_tracts", "@l1output.filtered_tracts"), + ("outputnode.cmatrix", "@l1output.cmatrix"), + ("outputnode.rois", "@l1output.rois"), + ("outputnode.odfs", "@l1output.odfs"), + ("outputnode.struct", "@l1output.struct"), + ("outputnode.networks", "@l1output.networks"), + ("outputnode.mean_fiber_length", "@l1output.mean_fiber_length"), + ("outputnode.fiber_length_std", "@l1output.fiber_length_std"), + ])]) + l1pipeline.connect([(group_infosource, datasink, [('group_id', '@group_id')])]) return l1pipeline diff --git a/nipype/workflows/fmri/fsl/__init__.py b/nipype/workflows/fmri/fsl/__init__.py index 0cbe9ae677..c223176943 100644 --- a/nipype/workflows/fmri/fsl/__init__.py +++ b/nipype/workflows/fmri/fsl/__init__.py @@ -3,5 +3,5 @@ create_reg_workflow) from .estimate import create_modelfit_workflow, create_fixed_effects_flow -#backwards compatibility -from ...rsfmri.fsl.resting import create_resting_preproc \ No newline at end of file +# backwards compatibility +from ...rsfmri.fsl.resting import create_resting_preproc diff --git a/nipype/workflows/fmri/fsl/estimate.py b/nipype/workflows/fmri/fsl/estimate.py index beb48847af..79e14a68d3 100644 --- a/nipype/workflows/fmri/fsl/estimate.py +++ b/nipype/workflows/fmri/fsl/estimate.py @@ -44,7 +44,7 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): version = 0 if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): + LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): version = 507 modelfit = pe.Workflow(name=name) @@ -110,7 +110,6 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): 'parameter_estimates']), name='outputspec') - """ Setup the connections """ @@ -125,12 +124,12 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): (inputspec, modelestimate, [('film_threshold', 'threshold'), ('functional_data', 'in_file')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), - ('ev_files', 'ev_files')]), + ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), - (merge_contrasts, ztopval,[('out', 'in_file')]), + (merge_contrasts, ztopval, [('out', 'in_file')]), (ztopval, outputspec, [('out_file', 'pfiles')]), - (merge_contrasts, outputspec,[('out', 'zfiles')]), + (merge_contrasts, outputspec, [('out', 'zfiles')]), (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file')]), ]) @@ -139,11 +138,11 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): (modelgen, conestimate, [('con_file', 'tcon_file'), ('fcon_file', 'fcon_file')]), (modelestimate, conestimate, [('param_estimates', 'param_estimates'), - ('sigmasquareds', 'sigmasquareds'), - ('corrections', 'corrections'), - ('dof_file', 'dof_file')]), + ('sigmasquareds', 'sigmasquareds'), + ('corrections', 'corrections'), + ('dof_file', 'dof_file')]), (conestimate, merge_contrasts, [('zstats', 'in1'), - ('zfstats', 'in2')]), + ('zfstats', 'in2')]), (conestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), ]) @@ -154,7 +153,7 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): (modelestimate, merge_contrasts, [('zstats', 'in1'), ('zfstats', 'in2')]), (modelestimate, outputspec, [('copes', 'copes'), - ('varcopes', 'varcopes')]), + ('varcopes', 'varcopes')]), ]) return modelfit @@ -229,8 +228,8 @@ def create_fixed_effects_flow(name='fixedfx'): name="copemerge") varcopemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), - iterfield=['in_files'], - name="varcopemerge") + iterfield=['in_files'], + name="varcopemerge") """ Use :class:`nipype.interfaces.fsl.L2Model` to generate subject and condition @@ -282,8 +281,8 @@ def get_dofvolumes(dof_files, cope_files): (varcopemerge, flameo, [('merged_file', 'var_cope_file')]), (level2model, flameo, [('design_mat', 'design_file'), - ('design_con', 't_con_file'), - ('design_grp', 'cov_split_file')]), + ('design_con', 't_con_file'), + ('design_grp', 'cov_split_file')]), (gendof, flameo, [('dof_volume', 'dof_var_cope_file')]), (flameo, outputspec, [('res4d', 'res4d'), ('copes', 'copes'), diff --git a/nipype/workflows/fmri/fsl/preprocess.py b/nipype/workflows/fmri/fsl/preprocess.py index 608ab97e8d..2d8cbea2f3 100644 --- a/nipype/workflows/fmri/fsl/preprocess.py +++ b/nipype/workflows/fmri/fsl/preprocess.py @@ -12,8 +12,10 @@ from ...smri.freesurfer.utils import create_getmask_flow from .... import LooseVersion + def getthreshop(thresh): - return ['-thr %.10f -Tmin -bin'%(0.1*val[1]) for val in thresh] + return ['-thr %.10f -Tmin -bin' %(0.1*val[1]) for val in thresh] + def pickfirst(files): if isinstance(files, list): @@ -21,6 +23,7 @@ def pickfirst(files): else: return files + def pickmiddle(files): from nibabel import load import numpy as np @@ -29,6 +32,7 @@ def pickmiddle(files): middlevol.append(int(np.ceil(load(f).get_shape()[3] / 2))) return middlevol + def pickvol(filenames, fileidx, which): from nibabel import load import numpy as np @@ -42,23 +46,28 @@ def pickvol(filenames, fileidx, which): raise Exception('unknown value for volume selection : %s' % which) return idx + def getbtthresh(medianvals): return [0.75*val for val in medianvals] + def chooseindex(fwhm): - if fwhm<1: + if fwhm < 1: return [0] else: return [1] + def getmeanscale(medianvals): return ['-mul %.10f' % (10000. / val) for val in medianvals] + def getusans(x): return [[tuple([val[0], 0.75 * val[1]])] for val in x] tolist = lambda x: [x] -highpass_operand = lambda x:'-bptf %.10f -1'%x +highpass_operand = lambda x: '-bptf %.10f -1' %x + def create_parallelfeat_preproc(name='featpreproc', highpass=True): """Preprocess each run with FSL independently of the others @@ -118,26 +127,26 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): 'highpass']), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference', - 'motion_parameters', - 'realigned_files', - 'motion_plots', - 'mask', - 'smoothed_files', - 'highpassed_files', - 'mean']), - name='outputspec') + 'motion_parameters', + 'realigned_files', + 'motion_plots', + 'mask', + 'smoothed_files', + 'highpassed_files', + 'mean']), + name='outputspec') else: inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', 'fwhm']), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference', - 'motion_parameters', - 'realigned_files', - 'motion_plots', - 'mask', - 'smoothed_files', - 'mean']), - name='outputspec') + 'motion_parameters', + 'realigned_files', + 'motion_plots', + 'mask', + 'smoothed_files', + 'mean']), + name='outputspec') """ Set up a node to define outputs for the preprocessing workflow @@ -151,8 +160,8 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): """ img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float', - op_string = '', - suffix='_dtype'), + op_string='', + suffix='_dtype'), iterfield=['in_file'], name='img2float') featpreproc.connect(inputnode, 'func', img2float, 'in_file') @@ -163,7 +172,7 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): extract_ref = pe.MapNode(interface=fsl.ExtractROI(t_size=1), iterfield=['in_file', 't_min'], - name = 'extractref') + name='extractref') featpreproc.connect(img2float, 'out_file', extract_ref, 'in_file') featpreproc.connect(img2float, ('out_file', pickmiddle), extract_ref, 't_min') @@ -173,10 +182,10 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): Realign the functional runs to the reference (1st volume of first run) """ - motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True, - save_plots = True), + motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats=True, + save_plots=True), name='realign', - iterfield = ['in_file', 'ref_file']) + iterfield=['in_file', 'ref_file']) featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file') featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file') featpreproc.connect(motion_correct, 'par_file', outputnode, 'motion_parameters') @@ -187,8 +196,8 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): """ plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) + name='plot_motion', + iterfield=['in_file']) plot_motion.iterables = ('plot_type', ['rotations', 'translations']) featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots') @@ -197,7 +206,7 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): Extract the mean volume of the first functional run """ - meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string = '-Tmean', + meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), iterfield=['in_file'], name='meanfunc') @@ -207,11 +216,11 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): Strip the skull from the mean functional to generate a mask """ - meanfuncmask = pe.MapNode(interface=fsl.BET(mask = True, - no_output=True, - frac = 0.3), + meanfuncmask = pe.MapNode(interface=fsl.BET(mask=True, + no_output=True, + frac=0.3), iterfield=['in_file'], - name = 'meanfuncmask') + name='meanfuncmask') featpreproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file') """ @@ -221,27 +230,25 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), iterfield=['in_file', 'in_file2'], - name = 'maskfunc') + name='maskfunc') featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') featpreproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2') - """ Determine the 2nd and 98th percentile intensities of each functional run """ getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'), - iterfield = ['in_file'], + iterfield=['in_file'], name='getthreshold') featpreproc.connect(maskfunc, 'out_file', getthresh, 'in_file') - """ Threshold the first run of the functional data at 10% of the 98th percentile """ threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char', - suffix='_thresh'), + suffix='_thresh'), iterfield=['in_file', 'op_string'], name='threshold') featpreproc.connect(maskfunc, 'out_file', threshold, 'in_file') @@ -257,7 +264,7 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): """ medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield = ['in_file', 'mask_file'], + iterfield=['in_file', 'mask_file'], name='medianval') featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file') featpreproc.connect(threshold, 'out_file', medianval, 'mask_file') @@ -267,7 +274,7 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): """ dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil', - op_string='-dilF'), + op_string='-dilF'), iterfield=['in_file'], name='dilatemask') featpreproc.connect(threshold, 'out_file', dilatemask, 'in_file') @@ -279,8 +286,8 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): maskfunc2 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc2') + iterfield=['in_file', 'in_file2'], + name='maskfunc2') featpreproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file') featpreproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2') @@ -302,17 +309,16 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc3') + iterfield=['in_file', 'in_file2'], + name='maskfunc3') featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file') featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2') - concatnode = pe.Node(interface=util.Merge(2), name='concat') - featpreproc.connect(maskfunc2,('out_file', tolist), concatnode, 'in1') - featpreproc.connect(maskfunc3,('out_file', tolist), concatnode, 'in2') + featpreproc.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1') + featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2') """ The following nodes select smooth or unsmoothed data depending on the @@ -320,21 +326,20 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): voxel size of the input data if the fwhm parameter is less than 1/3 of the voxel size. """ - selectnode = pe.Node(interface=util.Select(),name='select') + selectnode = pe.Node(interface=util.Select(), name='select') featpreproc.connect(concatnode, 'out', selectnode, 'inlist') featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index') featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files') - """ Scale the median value of the run is set to 10000 """ meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'), - iterfield=['in_file','op_string'], - name='meanscale') + iterfield=['in_file', 'op_string'], + name='meanscale') featpreproc.connect(selectnode, 'out', meanscale, 'in_file') """ @@ -362,7 +367,7 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), iterfield=['in_file'], - name='meanfunc3') + name='meanfunc3') if highpass: featpreproc.connect(highpass, 'out_file', meanfunc3, 'in_file') else: @@ -370,9 +375,9 @@ def create_parallelfeat_preproc(name='featpreproc', highpass=True): featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean') - return featpreproc + def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle'): """Create a FEAT preprocessing workflow with registration to one volume of the first run @@ -421,7 +426,7 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') version = 0 if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): + LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): version = 507 featpreproc = pe.Workflow(name=name) @@ -437,26 +442,26 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') 'highpass']), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference', - 'motion_parameters', - 'realigned_files', - 'motion_plots', - 'mask', - 'smoothed_files', - 'highpassed_files', - 'mean']), - name='outputspec') + 'motion_parameters', + 'realigned_files', + 'motion_plots', + 'mask', + 'smoothed_files', + 'highpassed_files', + 'mean']), + name='outputspec') else: inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', 'fwhm']), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference', - 'motion_parameters', - 'realigned_files', - 'motion_plots', - 'mask', - 'smoothed_files', - 'mean']), - name='outputspec') + 'motion_parameters', + 'realigned_files', + 'motion_plots', + 'mask', + 'smoothed_files', + 'mean']), + name='outputspec') """ Set up a node to define outputs for the preprocessing workflow @@ -469,10 +474,9 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') run. """ - img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float', - op_string = '', - suffix='_dtype'), + op_string='', + suffix='_dtype'), iterfield=['in_file'], name='img2float') featpreproc.connect(inputnode, 'func', img2float, 'in_file') @@ -483,22 +487,21 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') if whichvol != 'mean': extract_ref = pe.Node(interface=fsl.ExtractROI(t_size=1), - iterfield=['in_file'], - name = 'extractref') + iterfield=['in_file'], + name='extractref') featpreproc.connect(img2float, ('out_file', pickfirst), extract_ref, 'in_file') featpreproc.connect(img2float, ('out_file', pickvol, 0, whichvol), extract_ref, 't_min') featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference') - """ Realign the functional runs to the reference (`whichvol` volume of first run) """ - motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True, - save_plots = True, - interpolation = 'spline'), + motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats=True, + save_plots=True, + interpolation='spline'), name='realign', - iterfield = ['in_file']) + iterfield=['in_file']) featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file') if whichvol != 'mean': featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file') @@ -514,8 +517,8 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') """ plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) + name='plot_motion', + iterfield=['in_file']) plot_motion.iterables = ('plot_type', ['rotations', 'translations']) featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots') @@ -524,19 +527,19 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') Extract the mean volume of the first functional run """ - meanfunc = pe.Node(interface=fsl.ImageMaths(op_string = '-Tmean', - suffix='_mean'), - name='meanfunc') + meanfunc = pe.Node(interface=fsl.ImageMaths(op_string='-Tmean', + suffix='_mean'), + name='meanfunc') featpreproc.connect(motion_correct, ('out_file', pickfirst), meanfunc, 'in_file') """ Strip the skull from the mean functional to generate a mask """ - meanfuncmask = pe.Node(interface=fsl.BET(mask = True, + meanfuncmask = pe.Node(interface=fsl.BET(mask=True, no_output=True, - frac = 0.3), - name = 'meanfuncmask') + frac=0.3), + name='meanfuncmask') featpreproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file') """ @@ -546,27 +549,25 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), iterfield=['in_file'], - name = 'maskfunc') + name='maskfunc') featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') featpreproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2') - """ Determine the 2nd and 98th percentile intensities of each functional run """ getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'), - iterfield = ['in_file'], + iterfield=['in_file'], name='getthreshold') featpreproc.connect(maskfunc, 'out_file', getthresh, 'in_file') - """ Threshold the first run of the functional data at 10% of the 98th percentile """ threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char', - suffix='_thresh'), + suffix='_thresh'), iterfield=['in_file', 'op_string'], name='threshold') featpreproc.connect(maskfunc, 'out_file', threshold, 'in_file') @@ -582,7 +583,7 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') """ medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield = ['in_file', 'mask_file'], + iterfield=['in_file', 'mask_file'], name='medianval') featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file') featpreproc.connect(threshold, 'out_file', medianval, 'mask_file') @@ -592,7 +593,7 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') """ dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil', - op_string='-dilF'), + op_string='-dilF'), iterfield=['in_file'], name='dilatemask') featpreproc.connect(threshold, 'out_file', dilatemask, 'in_file') @@ -604,8 +605,8 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') maskfunc2 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc2') + iterfield=['in_file', 'in_file2'], + name='maskfunc2') featpreproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file') featpreproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2') @@ -627,17 +628,16 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc3') + iterfield=['in_file', 'in_file2'], + name='maskfunc3') featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file') featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2') - concatnode = pe.Node(interface=util.Merge(2), name='concat') - featpreproc.connect(maskfunc2,('out_file', tolist), concatnode, 'in1') - featpreproc.connect(maskfunc3,('out_file', tolist), concatnode, 'in2') + featpreproc.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1') + featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2') """ The following nodes select smooth or unsmoothed data depending on the @@ -645,21 +645,20 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') voxel size of the input data if the fwhm parameter is less than 1/3 of the voxel size. """ - selectnode = pe.Node(interface=util.Select(),name='select') + selectnode = pe.Node(interface=util.Select(), name='select') featpreproc.connect(concatnode, 'out', selectnode, 'inlist') featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index') featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files') - """ Scale the median value of the run is set to 10000 """ meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'), - iterfield=['in_file','op_string'], - name='meanscale') + iterfield=['in_file', 'op_string'], + name='meanscale') featpreproc.connect(selectnode, 'out', meanscale, 'in_file') """ @@ -668,15 +667,14 @@ def create_featreg_preproc(name='featpreproc', highpass=True, whichvol='middle') featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale, 'op_string') - """ Generate a mean functional image from the first run """ meanfunc3 = pe.Node(interface=fsl.ImageMaths(op_string='-Tmean', - suffix='_mean'), - iterfield=['in_file'], - name='meanfunc3') + suffix='_mean'), + iterfield=['in_file'], + name='meanfunc3') featpreproc.connect(meanscale, ('out_file', pickfirst), meanfunc3, 'in_file') featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean') @@ -765,21 +763,20 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): """ smooth = pe.MapNode(interface=fsl.SUSAN(), - iterfield=['in_file', 'brightness_threshold','usans'], + iterfield=['in_file', 'brightness_threshold', 'usans'], name='smooth') """ Determine the median value of the functional runs using the mask """ - if separate_masks: median = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield = ['in_file', 'mask_file'], + iterfield=['in_file', 'mask_file'], name='median') else: median = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield = ['in_file'], + iterfield=['in_file'], name='median') susan_smooth.connect(inputnode, 'in_files', median, 'in_file') susan_smooth.connect(inputnode, 'mask_file', median, 'mask_file') @@ -806,9 +803,9 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): """ meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean', - suffix='_mean'), - iterfield=['in_file'], - name='meanfunc2') + suffix='_mean'), + iterfield=['in_file'], + name='meanfunc2') susan_smooth.connect(mask, 'out_file', meanfunc, 'in_file') """ @@ -816,9 +813,9 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): """ merge = pe.Node(interface=util.Merge(2, axis='hstack'), - name='merge') - susan_smooth.connect(meanfunc,'out_file', merge, 'in1') - susan_smooth.connect(median,'out_stat', merge, 'in2') + name='merge') + susan_smooth.connect(meanfunc, 'out_file', merge, 'in1') + susan_smooth.connect(median, 'out_stat', merge, 'in2') """ Define a function to get the brightness threshold for SUSAN @@ -829,7 +826,7 @@ def create_susan_smooth(name="susan_smooth", separate_masks=True): susan_smooth.connect(merge, ('out', getusans), smooth, 'usans') outputnode = pe.Node(interface=util.IdentityInterface(fields=['smoothed_files']), - name='outputnode') + name='outputnode') susan_smooth.connect(smooth, 'smoothed_file', outputnode, 'smoothed_files') @@ -895,16 +892,16 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): 'highpass']), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference', - 'motion_parameters', - 'realigned_files', - 'motion_plots', - 'mask_file', - 'smoothed_files', - 'highpassed_files', - 'reg_file', - 'reg_cost' - ]), - name='outputspec') + 'motion_parameters', + 'realigned_files', + 'motion_plots', + 'mask_file', + 'smoothed_files', + 'highpassed_files', + 'reg_file', + 'reg_cost' + ]), + name='outputspec') else: inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', 'fwhm', @@ -913,15 +910,15 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): ]), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['reference', - 'motion_parameters', - 'realigned_files', - 'motion_plots', - 'mask_file', - 'smoothed_files', - 'reg_file', - 'reg_cost' - ]), - name='outputspec') + 'motion_parameters', + 'realigned_files', + 'motion_plots', + 'mask_file', + 'smoothed_files', + 'reg_file', + 'reg_cost' + ]), + name='outputspec') """ Set up a node to define outputs for the preprocessing workflow @@ -935,35 +932,33 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): """ img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float', - op_string = '', - suffix='_dtype'), + op_string='', + suffix='_dtype'), iterfield=['in_file'], name='img2float') featpreproc.connect(inputnode, 'func', img2float, 'in_file') - """ Extract the first volume of the first run as the reference """ if whichvol != 'mean': extract_ref = pe.Node(interface=fsl.ExtractROI(t_size=1), - iterfield=['in_file'], - name = 'extractref') + iterfield=['in_file'], + name='extractref') featpreproc.connect(img2float, ('out_file', pickfirst), extract_ref, 'in_file') featpreproc.connect(img2float, ('out_file', pickvol, 0, whichvol), extract_ref, 't_min') featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference') - """ Realign the functional runs to the reference (1st volume of first run) """ - motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True, - save_plots = True, - interpolation = 'sinc'), + motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats=True, + save_plots=True, + interpolation='sinc'), name='realign', - iterfield = ['in_file']) + iterfield=['in_file']) featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file') if whichvol != 'mean': featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file') @@ -979,8 +974,8 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): """ plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) + name='plot_motion', + iterfield=['in_file']) plot_motion.iterables = ('plot_type', ['rotations', 'translations']) featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots') @@ -989,15 +984,14 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): """ maskflow = create_getmask_flow() - featpreproc.connect([(inputnode, maskflow, [('subject_id','inputspec.subject_id'), - ('subjects_dir', 'inputspec.subjects_dir')])]) + featpreproc.connect([(inputnode, maskflow, [('subject_id', 'inputspec.subject_id'), + ('subjects_dir', 'inputspec.subjects_dir')])]) maskflow.inputs.inputspec.contrast_type = 't2' if whichvol != 'mean': featpreproc.connect(extract_ref, 'roi_file', maskflow, 'inputspec.source_file') else: featpreproc.connect(motion_correct, ('mean_img', pickfirst), maskflow, 'inputspec.source_file') - """ Mask the functional runs with the extracted mask """ @@ -1005,7 +999,7 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), iterfield=['in_file'], - name = 'maskfunc') + name='maskfunc') featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), maskfunc, 'in_file2') @@ -1027,12 +1021,11 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file'], - name='maskfunc3') + iterfield=['in_file'], + name='maskfunc3') featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file') featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), maskfunc3, 'in_file2') - concatnode = pe.Node(interface=util.Merge(2), name='concat') featpreproc.connect(maskfunc, ('out_file', tolist), concatnode, 'in1') @@ -1044,21 +1037,20 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): voxel size of the input data if the fwhm parameter is less than 1/3 of the voxel size. """ - selectnode = pe.Node(interface=util.Select(),name='select') + selectnode = pe.Node(interface=util.Select(), name='select') featpreproc.connect(concatnode, 'out', selectnode, 'inlist') featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index') featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files') - """ Scale the median value of the run is set to 10000 """ meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'), - iterfield=['in_file','op_string'], - name='meanscale') + iterfield=['in_file', 'op_string'], + name='meanscale') featpreproc.connect(selectnode, 'out', meanscale, 'in_file') """ @@ -1066,7 +1058,7 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): """ medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield = ['in_file'], + iterfield=['in_file'], name='medianval') featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file') featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), medianval, 'mask_file') @@ -1095,6 +1087,7 @@ def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): return featpreproc + def create_reg_workflow(name='registration'): """Create a FEAT preprocessing workflow @@ -1134,11 +1127,11 @@ def create_reg_workflow(name='registration'): 'config_file']), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=['func2anat_transform', - 'anat2target_transform', - 'transformed_files', - 'transformed_mean', - ]), - name='outputspec') + 'anat2target_transform', + 'transformed_files', + 'transformed_mean', + ]), + name='outputspec') """ Estimate the tissue classes from the anatomical image. But use spm's segment @@ -1201,7 +1194,7 @@ def create_reg_workflow(name='registration'): """ anat2target_nonlinear = pe.Node(fsl.FNIRT(), name='anat2target_nonlinear') - anat2target_nonlinear.inputs.fieldcoeff_file=True + anat2target_nonlinear.inputs.fieldcoeff_file = True register.connect(anat2target_affine, 'out_matrix_file', anat2target_nonlinear, 'affine_file') register.connect(inputnode, 'anatomical_image', diff --git a/nipype/workflows/fmri/spm/preprocess.py b/nipype/workflows/fmri/spm/preprocess.py index 9c67d4c307..1bcfcc39c8 100644 --- a/nipype/workflows/fmri/spm/preprocess.py +++ b/nipype/workflows/fmri/spm/preprocess.py @@ -12,6 +12,7 @@ logger = pe.logger + def create_spm_preproc(name='preproc'): """Create an spm preprocessing workflow with freesurfer registration and artifact detection. @@ -80,7 +81,7 @@ def create_spm_preproc(name='preproc'): realign = pe.Node(spm.Realign(), name='realign') workflow.connect(inputnode, 'functionals', realign, 'in_files') maskflow = create_getmask_flow() - workflow.connect([(inputnode, maskflow, [('subject_id','inputspec.subject_id'), + workflow.connect([(inputnode, maskflow, [('subject_id', 'inputspec.subject_id'), ('subjects_dir', 'inputspec.subjects_dir')])]) maskflow.inputs.inputspec.contrast_type = 't2' workflow.connect(realign, 'mean_image', maskflow, 'inputspec.source_file') @@ -89,11 +90,11 @@ def create_spm_preproc(name='preproc'): workflow.connect(realign, 'realigned_files', smooth, 'in_files') artdetect = pe.Node(ra.ArtifactDetect(mask_type='file', parameter_source='SPM', - use_differences=[True,False], + use_differences=[True, False], use_norm=True, save_plot=True), name='artdetect') - workflow.connect([(inputnode, artdetect,[('norm_threshold', 'norm_threshold'), + workflow.connect([(inputnode, artdetect, [('norm_threshold', 'norm_threshold'), ('zintensity_threshold', 'zintensity_threshold')])]) workflow.connect([(realign, artdetect, [('realigned_files', 'realigned_files'), @@ -121,9 +122,9 @@ def create_spm_preproc(name='preproc'): (maskflow, outputnode, [(("outputspec.mask_file", poplist), "mask_file")]), (realign, outputnode, [('realignment_parameters', 'realignment_parameters')]), (smooth, outputnode, [('smoothed_files', 'smoothed_files')]), - (artdetect, outputnode,[('outlier_files', 'outlier_files'), - ('statistic_files','outlier_stats'), - ('plot_files','outlier_plots')]) + (artdetect, outputnode, [('outlier_files', 'outlier_files'), + ('statistic_files', 'outlier_stats'), + ('plot_files', 'outlier_plots')]) ]) return workflow @@ -194,7 +195,7 @@ def compute_icv(class_images): img = load(session[0][0]).get_data() + \ load(session[1][0]).get_data() + \ load(session[2][0]).get_data() - img_icv = (img>0.5).astype(int).sum()*voxel_volume*1e-3 + img_icv = (img > 0.5).astype(int).sum()*voxel_volume*1e-3 icv.append(img_icv) return icv @@ -214,13 +215,14 @@ def compute_icv(class_images): "icv" ]), name="outputspec") - workflow.connect([(dartel_template, outputnode, [('outputspec.template_file','template_file')]), + workflow.connect([(dartel_template, outputnode, [('outputspec.template_file', 'template_file')]), (norm2mni, outputnode, [("normalized_files", "normalized_files")]), (calc_icv, outputnode, [("icv", "icv")]), ]) return workflow + def create_DARTEL_template(name='dartel_template'): """Create a vbm workflow that generates DARTEL-based template @@ -251,28 +253,28 @@ def create_DARTEL_template(name='dartel_template'): name='inputspec') segment = pe.MapNode(spm.NewSegment(), - iterfield=['channel_files'], - name='segment') + iterfield=['channel_files'], + name='segment') workflow.connect(inputnode, 'structural_files', segment, 'channel_files') version = spm.Info.version() if version: spm_path = version['path'] if version['name'] == 'SPM8': - tissue1 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 1), 2, (True,True), (False, False)) - tissue2 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 2), 2, (True,True), (False, False)) - tissue3 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 3), 2, (True,False), (False, False)) - tissue4 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 4), 3, (False,False), (False, False)) - tissue5 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 5), 4, (False,False), (False, False)) - tissue6 = ((os.path.join(spm_path,'toolbox/Seg/TPM.nii'), 6), 2, (False,False), (False, False)) + tissue1 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 1), 2, (True, True), (False, False)) + tissue2 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 2), 2, (True, True), (False, False)) + tissue3 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 3), 2, (True, False), (False, False)) + tissue4 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 4), 3, (False, False), (False, False)) + tissue5 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 5), 4, (False, False), (False, False)) + tissue6 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 6), 2, (False, False), (False, False)) elif version['name'] == 'SPM12': spm_path = version['path'] - tissue1 = ((os.path.join(spm_path,'tpm/TPM.nii'), 1), 1, (True,True), (False, False)) - tissue2 = ((os.path.join(spm_path,'tpm/TPM.nii'), 2), 1, (True,True), (False, False)) - tissue3 = ((os.path.join(spm_path,'tpm/TPM.nii'), 3), 2, (True,False), (False, False)) - tissue4 = ((os.path.join(spm_path,'tpm/TPM.nii'), 4), 3, (False,False), (False, False)) - tissue5 = ((os.path.join(spm_path,'tpm/TPM.nii'), 5), 4, (False,False), (False, False)) - tissue6 = ((os.path.join(spm_path,'tpm/TPM.nii'), 6), 2, (False,False), (False, False)) + tissue1 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 1), 1, (True, True), (False, False)) + tissue2 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 2), 1, (True, True), (False, False)) + tissue3 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 3), 2, (True, False), (False, False)) + tissue4 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 4), 3, (False, False), (False, False)) + tissue5 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 5), 4, (False, False), (False, False)) + tissue6 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 6), 2, (False, False), (False, False)) else: logger.critical('Unsupported version of SPM') @@ -301,7 +303,7 @@ def get2classes(dartel_files): ]), name="outputspec") workflow.connect([ - (dartel, outputnode, [('final_template_file','template_file'), + (dartel, outputnode, [('final_template_file', 'template_file'), ('dartel_flow_fields', 'flow_fields')]), ]) diff --git a/nipype/workflows/misc/utils.py b/nipype/workflows/misc/utils.py index 5e8d0ecef9..cc3e196f15 100644 --- a/nipype/workflows/misc/utils.py +++ b/nipype/workflows/misc/utils.py @@ -60,14 +60,14 @@ def region_list_from_volume(in_file): def id_list_from_lookup_table(lookup_file, region_list): import numpy as np - LUTlabelsRGBA = np.loadtxt(lookup_file, skiprows=4, usecols=[0,1,2,3,4,5], comments='#', - dtype={'names': ('index', 'label', 'R', 'G', 'B', 'A'),'formats': ('int', '|S30', 'int', 'int', 'int', 'int')}) + LUTlabelsRGBA = np.loadtxt(lookup_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], comments='#', + dtype={'names': ('index', 'label', 'R', 'G', 'B', 'A'), 'formats': ('int', '|S30', 'int', 'int', 'int', 'int')}) numLUTLabels = np.size(LUTlabelsRGBA) LUTlabelDict = {} - for labels in range(0,numLUTLabels): + for labels in range(0, numLUTLabels): LUTlabelDict[LUTlabelsRGBA[labels][0]] = [LUTlabelsRGBA[labels][1], - LUTlabelsRGBA[labels][2], LUTlabelsRGBA[labels][3], - LUTlabelsRGBA[labels][4], LUTlabelsRGBA[labels][5]] + LUTlabelsRGBA[labels][2], LUTlabelsRGBA[labels][3], + LUTlabelsRGBA[labels][4], LUTlabelsRGBA[labels][5]] id_list = [] for region in region_list: label = LUTlabelDict[region][0] diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py index bd25829445..a8e3f70cb5 100644 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ b/nipype/workflows/rsfmri/fsl/resting.py @@ -37,6 +37,7 @@ def extract_noise_components(realigned_file, noise_mask_file, num_components): np.savetxt(components_file, components, fmt="%.10f") return components_file + def select_volume(filename, which): """Return the middle index of a file """ @@ -47,9 +48,10 @@ def select_volume(filename, which): elif which.lower() == 'middle': idx = int(np.ceil(load(filename).get_shape()[3] / 2)) else: - raise Exception('unknown value for volume selection : %s'%which) + raise Exception('unknown value for volume selection : %s' %which) return idx + def create_realign_flow(name='realign'): """Realign a time series to the middle volume using spline interpolation @@ -71,7 +73,7 @@ def create_realign_flow(name='realign'): outputnode = pe.Node(interface=util.IdentityInterface(fields=[ 'realigned_file', ]), - name='outputspec') + name='outputspec') realigner = pe.Node(fsl.MCFLIRT(save_mats=True, stats_imgs=True), name='realigner') splitter = pe.Node(fsl.Split(dimension='t'), name='splitter') @@ -91,6 +93,7 @@ def create_realign_flow(name='realign'): realignflow.connect(joiner, 'merged_file', outputnode, 'realigned_file') return realignflow + def create_resting_preproc(name='restpreproc'): """Create a "resting" time series preprocessing workflow @@ -136,19 +139,19 @@ def create_resting_preproc(name='restpreproc'): 'noise_mask_file', 'filtered_file', ]), - name='outputspec') + name='outputspec') slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer') realigner = create_realign_flow() tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr') getthresh = pe.Node(interface=fsl.ImageStats(op_string='-p 98'), - name='getthreshold') + name='getthreshold') threshold_stddev = pe.Node(fsl.Threshold(), name='threshold') compcor = pe.Node(util.Function(input_names=['realigned_file', 'noise_mask_file', 'num_components'], - output_names=['noise_components'], - function=extract_noise_components), - name='compcorr') + output_names=['noise_components'], + function=extract_noise_components), + name='compcorr') remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True), name='remove_noise') bandpass_filter = pe.Node(fsl.TemporalFilter(), @@ -181,4 +184,4 @@ def create_resting_preproc(name='restpreproc'): outputnode, 'noise_mask_file') restpreproc.connect(bandpass_filter, 'out_file', outputnode, 'filtered_file') - return restpreproc \ No newline at end of file + return restpreproc diff --git a/nipype/workflows/smri/ants/ANTSBuildTemplate.py b/nipype/workflows/smri/ants/ANTSBuildTemplate.py index fa3b88f38f..79b9918867 100644 --- a/nipype/workflows/smri/ants/ANTSBuildTemplate.py +++ b/nipype/workflows/smri/ants/ANTSBuildTemplate.py @@ -1,12 +1,12 @@ ################################################################################# -## Program: Build Template Parallel -## Language: Python +# Program: Build Template Parallel +# Language: Python ## -## Authors: Jessica Forbes, Grace Murray, and Hans Johnson, University of Iowa +# Authors: Jessica Forbes, Grace Murray, and Hans Johnson, University of Iowa ## -## This software is distributed WITHOUT ANY WARRANTY; without even -## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR -## PURPOSE. +# This software is distributed WITHOUT ANY WARRANTY; without even +# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +# PURPOSE. ## ################################################################################# from __future__ import print_function @@ -27,29 +27,31 @@ def GetFirstListElement(this_list): return this_list[0] + def MakeTransformListWithGradientWarps(averageAffineTranform, gradientStepWarp): return [averageAffineTranform, gradientStepWarp, gradientStepWarp, gradientStepWarp, gradientStepWarp] -def RenestDeformedPassiveImages(deformedPassiveImages,flattened_image_nametypes): + +def RenestDeformedPassiveImages(deformedPassiveImages, flattened_image_nametypes): import os """ Now make a list of lists of images where the outter list is per image type, and the inner list is the same size as the number of subjects to be averaged. In this case, the first element will be a list of all the deformed T2's, and the second element will be a list of all deformed POSTERIOR_AIR, etc.. """ - all_images_size=len(deformedPassiveImages) - image_dictionary_of_lists=dict() - nested_imagetype_list=list() - outputAverageImageName_list=list() - image_type_list=list() - ## make empty_list, this is not efficient, but it works + all_images_size = len(deformedPassiveImages) + image_dictionary_of_lists = dict() + nested_imagetype_list = list() + outputAverageImageName_list = list() + image_type_list = list() + # make empty_list, this is not efficient, but it works for name in flattened_image_nametypes: - image_dictionary_of_lists[name]=list() - for index in range(0,all_images_size): - curr_name=flattened_image_nametypes[index] - curr_file=deformedPassiveImages[index] + image_dictionary_of_lists[name] = list() + for index in range(0, all_images_size): + curr_name = flattened_image_nametypes[index] + curr_file = deformedPassiveImages[index] image_dictionary_of_lists[curr_name].append(curr_file) - for image_type,image_list in list(image_dictionary_of_lists.items()): + for image_type, image_list in list(image_dictionary_of_lists.items()): nested_imagetype_list.append(image_list) outputAverageImageName_list.append('AVG_'+image_type+'.nii.gz') image_type_list.append('WARP_AVG_'+image_type) @@ -57,45 +59,50 @@ def RenestDeformedPassiveImages(deformedPassiveImages,flattened_image_nametypes) print("HACK: ", nested_imagetype_list) print("HACK: ", outputAverageImageName_list) print("HACK: ", image_type_list) - return nested_imagetype_list,outputAverageImageName_list,image_type_list - -## Utility Function -## This will make a list of list pairs for defining the concatenation of transforms -## wp=['wp1.nii','wp2.nii','wp3.nii'] -## af=['af1.mat','af2.mat','af3.mat'] -## ll=map(list,zip(af,wp)) -## ll -##[['af1.mat', 'wp1.nii'], ['af2.mat', 'wp2.nii'], ['af3.mat', 'wp3.nii']] + return nested_imagetype_list, outputAverageImageName_list, image_type_list + +# Utility Function +# This will make a list of list pairs for defining the concatenation of transforms +# wp=['wp1.nii','wp2.nii','wp3.nii'] +# af=['af1.mat','af2.mat','af3.mat'] +# ll=map(list,zip(af,wp)) +# ll +# #[['af1.mat', 'wp1.nii'], ['af2.mat', 'wp2.nii'], ['af3.mat', 'wp3.nii']] + + def MakeListsOfTransformLists(warpTransformList, AffineTransformList): - return list(map(list, list(zip(warpTransformList,AffineTransformList)))) + return list(map(list, list(zip(warpTransformList, AffineTransformList)))) -## Flatten and return equal length transform and images lists. -def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries,transformation_series): +# Flatten and return equal length transform and images lists. + + +def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries, transformation_series): import sys print("HACK: DEBUG: ListOfPassiveImagesDictionaries\n{lpi}\n".format(lpi=ListOfPassiveImagesDictionaries)) - subjCount=len(ListOfPassiveImagesDictionaries) - tranCount=len(transformation_series) + subjCount = len(ListOfPassiveImagesDictionaries) + tranCount = len(transformation_series) if subjCount != tranCount: - print("ERROR: subjCount must equal tranCount {0} != {1}".format(subjCount,tranCount)) + print("ERROR: subjCount must equal tranCount {0} != {1}".format(subjCount, tranCount)) sys.exit(-1) - flattened_images=list() - flattened_image_nametypes=list() - flattened_transforms=list() + flattened_images = list() + flattened_image_nametypes = list() + flattened_transforms = list() passiveImagesCount = len(ListOfPassiveImagesDictionaries[0]) - for subjIndex in range(0,subjCount): - #if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): + for subjIndex in range(0, subjCount): + # if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): # print "ERROR: all image lengths must be equal {0} != {1}".format(passiveImagesCount,len(ListOfPassiveImagesDictionaries[subjIndex])) # sys.exit(-1) - subjImgDictionary=ListOfPassiveImagesDictionaries[subjIndex] - subjToAtlasTransform=transformation_series[subjIndex] - for imgname,img in list(subjImgDictionary.items()): + subjImgDictionary = ListOfPassiveImagesDictionaries[subjIndex] + subjToAtlasTransform = transformation_series[subjIndex] + for imgname, img in list(subjImgDictionary.items()): flattened_images.append(img) flattened_image_nametypes.append(imgname) flattened_transforms.append(subjToAtlasTransform) print("HACK: flattened images {0}\n".format(flattened_images)) print("HACK: flattened nametypes {0}\n".format(flattened_image_nametypes)) print("HACK: flattened txfms {0}\n".format(flattened_transforms)) - return flattened_images,flattened_transforms,flattened_image_nametypes + return flattened_images, flattened_transforms, flattened_image_nametypes + def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): """ @@ -113,25 +120,24 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): outputspec.passive_deformed_templates : """ - - TemplateBuildSingleIterationWF = pe.Workflow(name = 'ANTSTemplateBuildSingleIterationWF_'+str(str(iterationPhasePrefix)) ) + TemplateBuildSingleIterationWF = pe.Workflow(name='ANTSTemplateBuildSingleIterationWF_'+str(str(iterationPhasePrefix))) inputSpec = pe.Node(interface=util.IdentityInterface(fields=['images', 'fixed_image', - 'ListOfPassiveImagesDictionaries']), - run_without_submitting=True, - name='inputspec') - ## HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that - ## they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger - ## their hash to change. - ## HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes - ## HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. - outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template','transforms_list', - 'passive_deformed_templates']), - run_without_submitting=True, - name='outputspec') - - ### NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template - BeginANTS=pe.MapNode(interface=ANTS(), name = 'BeginANTS', iterfield=['moving_image']) + 'ListOfPassiveImagesDictionaries']), + run_without_submitting=True, + name='inputspec') + # HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that + # they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger + # their hash to change. + # HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes + # HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. + outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template', 'transforms_list', + 'passive_deformed_templates']), + run_without_submitting=True, + name='outputspec') + + # NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template + BeginANTS = pe.MapNode(interface=ANTS(), name='BeginANTS', iterfield=['moving_image']) BeginANTS.inputs.dimension = 3 BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix)+'_tfm' BeginANTS.inputs.metric = ['CC'] @@ -140,7 +146,7 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): BeginANTS.inputs.transformation_model = 'SyN' BeginANTS.inputs.gradient_step_length = 0.25 BeginANTS.inputs.number_of_iterations = [50, 35, 15] - BeginANTS.inputs.number_of_affine_iterations = [10000,10000,10000,10000,10000] + BeginANTS.inputs.number_of_affine_iterations = [10000, 10000, 10000, 10000, 10000] BeginANTS.inputs.use_histogram_matching = True BeginANTS.inputs.mi_option = [32, 16000] BeginANTS.inputs.regularization = 'Gauss' @@ -150,69 +156,69 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', BeginANTS, 'fixed_image') MakeTransformsLists = pe.Node(interface=util.Function(function=MakeListsOfTransformLists, - input_names=['warpTransformList', 'AffineTransformList'], - output_names=['out']), - run_without_submitting=True, - name='MakeTransformsLists') + input_names=['warpTransformList', 'AffineTransformList'], + output_names=['out']), + run_without_submitting=True, + name='MakeTransformsLists') MakeTransformsLists.inputs.ignore_exception = True TemplateBuildSingleIterationWF.connect(BeginANTS, 'warp_transform', MakeTransformsLists, 'warpTransformList') TemplateBuildSingleIterationWF.connect(BeginANTS, 'affine_transform', MakeTransformsLists, 'AffineTransformList') - ## Now warp all the input_images images - wimtdeformed = pe.MapNode(interface = WarpImageMultiTransform(), - iterfield=['transformation_series', 'input_image'], - name ='wimtdeformed') + # Now warp all the input_images images + wimtdeformed = pe.MapNode(interface=WarpImageMultiTransform(), + iterfield=['transformation_series', 'input_image'], + name='wimtdeformed') TemplateBuildSingleIterationWF.connect(inputSpec, 'images', wimtdeformed, 'input_image') TemplateBuildSingleIterationWF.connect(MakeTransformsLists, 'out', wimtdeformed, 'transformation_series') - ## Shape Update Next ===== - ## Now Average All input_images deformed images together to create an updated template average - AvgDeformedImages=pe.Node(interface=AverageImages(), name='AvgDeformedImages') + # Shape Update Next ===== + # Now Average All input_images deformed images together to create an updated template average + AvgDeformedImages = pe.Node(interface=AverageImages(), name='AvgDeformedImages') AvgDeformedImages.inputs.dimension = 3 AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix)+'.nii.gz' AvgDeformedImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", AvgDeformedImages, 'images') - ## Now average all affine transforms together - AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name = 'AvgAffineTransform') + # Now average all affine transforms together + AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name='AvgAffineTransform') AvgAffineTransform.inputs.dimension = 3 AvgAffineTransform.inputs.output_affine_transform = 'Avererage_'+str(iterationPhasePrefix)+'_Affine.mat' TemplateBuildSingleIterationWF.connect(BeginANTS, 'affine_transform', AvgAffineTransform, 'transforms') - ## Now average the warp fields togther - AvgWarpImages=pe.Node(interface=AverageImages(), name='AvgWarpImages') + # Now average the warp fields togther + AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') AvgWarpImages.inputs.dimension = 3 AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix)+'warp.nii.gz' AvgWarpImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(BeginANTS, 'warp_transform', AvgWarpImages, 'images') - ## Now average the images together - ## TODO: For now GradientStep is set to 0.25 as a hard coded default value. + # Now average the images together + # TODO: For now GradientStep is set to 0.25 as a hard coded default value. GradientStep = 0.25 - GradientStepWarpImage=pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') + GradientStepWarpImage = pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') GradientStepWarpImage.inputs.dimension = 3 GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_'+str(iterationPhasePrefix)+'_warp.nii.gz' TemplateBuildSingleIterationWF.connect(AvgWarpImages, 'output_average_image', GradientStepWarpImage, 'first_input') - ## Now create the new template shape based on the average of all deformed images - UpdateTemplateShape = pe.Node(interface = WarpImageMultiTransform(), name = 'UpdateTemplateShape') + # Now create the new template shape based on the average of all deformed images + UpdateTemplateShape = pe.Node(interface=WarpImageMultiTransform(), name='UpdateTemplateShape') UpdateTemplateShape.inputs.invert_affine = [1] TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', UpdateTemplateShape, 'reference_image') TemplateBuildSingleIterationWF.connect(AvgAffineTransform, 'affine_transform', UpdateTemplateShape, 'transformation_series') TemplateBuildSingleIterationWF.connect(GradientStepWarpImage, 'output_product_image', UpdateTemplateShape, 'input_image') ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node(interface=util.Function(function=MakeTransformListWithGradientWarps, - input_names=['averageAffineTranform', 'gradientStepWarp'], - output_names=['TransformListWithGradientWarps']), - run_without_submitting=True, - name='MakeTransformListWithGradientWarps') + input_names=['averageAffineTranform', 'gradientStepWarp'], + output_names=['TransformListWithGradientWarps']), + run_without_submitting=True, + name='MakeTransformListWithGradientWarps') ApplyInvAverageAndFourTimesGradientStepWarpImage.inputs.ignore_exception = True TemplateBuildSingleIterationWF.connect(AvgAffineTransform, 'affine_transform', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'averageAffineTranform') TemplateBuildSingleIterationWF.connect(UpdateTemplateShape, 'output_image', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'gradientStepWarp') - ReshapeAverageImageWithShapeUpdate = pe.Node(interface = WarpImageMultiTransform(), name = 'ReshapeAverageImageWithShapeUpdate') + ReshapeAverageImageWithShapeUpdate = pe.Node(interface=WarpImageMultiTransform(), name='ReshapeAverageImageWithShapeUpdate') ReshapeAverageImageWithShapeUpdate.inputs.invert_affine = [1] ReshapeAverageImageWithShapeUpdate.inputs.out_postfix = '_Reshaped' TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', ReshapeAverageImageWithShapeUpdate, 'input_image') @@ -222,44 +228,44 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): ###### ###### - ###### Process all the passive deformed images in a way similar to the main image used for registration + # Process all the passive deformed images in a way similar to the main image used for registration ###### ###### ###### ############################################## - ## Now warp all the ListOfPassiveImagesDictionaries images - FlattenTransformAndImagesListNode = pe.Node( Function(function=FlattenTransformAndImagesList, - input_names = ['ListOfPassiveImagesDictionaries','transformation_series'], - output_names = ['flattened_images','flattened_transforms','flattened_image_nametypes']), - run_without_submitting=True, name="99_FlattenTransformAndImagesList") - TemplateBuildSingleIterationWF.connect( inputSpec,'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries' ) - TemplateBuildSingleIterationWF.connect( MakeTransformsLists ,'out', FlattenTransformAndImagesListNode, 'transformation_series' ) - wimtPassivedeformed = pe.MapNode(interface = WarpImageMultiTransform(), - iterfield=['transformation_series', 'input_image'], - name ='wimtPassivedeformed') - TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image',wimtPassivedeformed,'reference_image') + # Now warp all the ListOfPassiveImagesDictionaries images + FlattenTransformAndImagesListNode = pe.Node(Function(function=FlattenTransformAndImagesList, + input_names=['ListOfPassiveImagesDictionaries', 'transformation_series'], + output_names=['flattened_images', 'flattened_transforms', 'flattened_image_nametypes']), + run_without_submitting=True, name="99_FlattenTransformAndImagesList") + TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries') + TemplateBuildSingleIterationWF.connect(MakeTransformsLists, 'out', FlattenTransformAndImagesListNode, 'transformation_series') + wimtPassivedeformed = pe.MapNode(interface=WarpImageMultiTransform(), + iterfield=['transformation_series', 'input_image'], + name='wimtPassivedeformed') + TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', wimtPassivedeformed, 'reference_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_transforms', wimtPassivedeformed, 'transformation_series') - RenestDeformedPassiveImagesNode = pe.Node( Function(function=RenestDeformedPassiveImages, - input_names = ['deformedPassiveImages','flattened_image_nametypes'], - output_names = ['nested_imagetype_list','outputAverageImageName_list','image_type_list']), - run_without_submitting=True, name="99_RenestDeformedPassiveImages") + RenestDeformedPassiveImagesNode = pe.Node(Function(function=RenestDeformedPassiveImages, + input_names=['deformedPassiveImages', 'flattened_image_nametypes'], + output_names=['nested_imagetype_list', 'outputAverageImageName_list', 'image_type_list']), + run_without_submitting=True, name="99_RenestDeformedPassiveImages") TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', RenestDeformedPassiveImagesNode, 'deformedPassiveImages') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_image_nametypes', RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') - ## Now Average All passive input_images deformed images together to create an updated template average - AvgDeformedPassiveImages=pe.MapNode(interface=AverageImages(), - iterfield=['images','output_average_image'], - name='AvgDeformedPassiveImages') + # Now Average All passive input_images deformed images together to create an updated template average + AvgDeformedPassiveImages = pe.MapNode(interface=AverageImages(), + iterfield=['images', 'output_average_image'], + name='AvgDeformedPassiveImages') AvgDeformedPassiveImages.inputs.dimension = 3 AvgDeformedPassiveImages.inputs.normalize = False TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "nested_imagetype_list", AvgDeformedPassiveImages, 'images') TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "outputAverageImageName_list", AvgDeformedPassiveImages, 'output_average_image') - ## -- TODO: Now neeed to reshape all the passive images as well - ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface = WarpImageMultiTransform(), - iterfield=['input_image','reference_image','out_postfix'], - name = 'ReshapeAveragePassiveImageWithShapeUpdate') + # -- TODO: Now neeed to reshape all the passive images as well + ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface=WarpImageMultiTransform(), + iterfield=['input_image', 'reference_image', 'out_postfix'], + name='ReshapeAveragePassiveImageWithShapeUpdate') ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_affine = [1] TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "image_type_list", ReshapeAveragePassiveImageWithShapeUpdate, 'out_postfix') TemplateBuildSingleIterationWF.connect(AvgDeformedPassiveImages, 'output_average_image', ReshapeAveragePassiveImageWithShapeUpdate, 'input_image') diff --git a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py index 46c95a4d96..9a5c9c7e8e 100644 --- a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py +++ b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py @@ -1,12 +1,12 @@ ################################################################################# -## Program: Build Template Parallel -## Language: Python +# Program: Build Template Parallel +# Language: Python ## -## Authors: Jessica Forbes, Grace Murray, and Hans Johnson, University of Iowa +# Authors: Jessica Forbes, Grace Murray, and Hans Johnson, University of Iowa ## -## This software is distributed WITHOUT ANY WARRANTY; without even -## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR -## PURPOSE. +# This software is distributed WITHOUT ANY WARRANTY; without even +# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +# PURPOSE. ## ################################################################################# from __future__ import print_function @@ -22,90 +22,96 @@ AverageImages, MultiplyImages, AverageAffineTransform) + def makeListOfOneElement(inputFile): - outputList=[inputFile] + outputList = [inputFile] return outputList + def GetFirstListElement(this_list): return this_list[0] + def MakeTransformListWithGradientWarps(averageAffineTranform, gradientStepWarp): return [averageAffineTranform, gradientStepWarp, gradientStepWarp, gradientStepWarp, gradientStepWarp] -def RenestDeformedPassiveImages(deformedPassiveImages,flattened_image_nametypes,interpolationMapping): +def RenestDeformedPassiveImages(deformedPassiveImages, flattened_image_nametypes, interpolationMapping): import os """ Now make a list of lists of images where the outter list is per image type, and the inner list is the same size as the number of subjects to be averaged. In this case, the first element will be a list of all the deformed T2's, and the second element will be a list of all deformed POSTERIOR_AIR, etc.. """ - all_images_size=len(deformedPassiveImages) - image_dictionary_of_lists=dict() - nested_imagetype_list=list() - outputAverageImageName_list=list() - image_type_list=list() - nested_interpolation_type=list() - ## make empty_list, this is not efficient, but it works + all_images_size = len(deformedPassiveImages) + image_dictionary_of_lists = dict() + nested_imagetype_list = list() + outputAverageImageName_list = list() + image_type_list = list() + nested_interpolation_type = list() + # make empty_list, this is not efficient, but it works for name in flattened_image_nametypes: - image_dictionary_of_lists[name]=list() - for index in range(0,all_images_size): - curr_name=flattened_image_nametypes[index] - curr_file=deformedPassiveImages[index] + image_dictionary_of_lists[name] = list() + for index in range(0, all_images_size): + curr_name = flattened_image_nametypes[index] + curr_file = deformedPassiveImages[index] image_dictionary_of_lists[curr_name].append(curr_file) - for image_type,image_list in list(image_dictionary_of_lists.items()): + for image_type, image_list in list(image_dictionary_of_lists.items()): nested_imagetype_list.append(image_list) outputAverageImageName_list.append('AVG_'+image_type+'.nii.gz') image_type_list.append('WARP_AVG_'+image_type) if image_type in interpolationMapping: nested_interpolation_type.append(interpolationMapping[image_type]) else: - nested_interpolation_type.append('Linear') #Linear is the default. + nested_interpolation_type.append('Linear') # Linear is the default. print("\n"*10) print("HACK: ", nested_imagetype_list) print("HACK: ", outputAverageImageName_list) print("HACK: ", image_type_list) print("HACK: ", nested_interpolation_type) - return nested_imagetype_list,outputAverageImageName_list,image_type_list,nested_interpolation_type + return nested_imagetype_list, outputAverageImageName_list, image_type_list, nested_interpolation_type + def SplitAffineAndWarpComponents(list_of_transforms_lists): - ### Nota bene: The outputs will include the initial_moving_transform from Registration (which depends on what - ### the invert_initial_moving_transform is set to) + # Nota bene: The outputs will include the initial_moving_transform from Registration (which depends on what + # the invert_initial_moving_transform is set to) affine_component_list = [] - warp_component_list = [] + warp_component_list = [] for transform in list_of_transforms_lists: affine_component_list.append(transform[0]) warp_component_list.append(transform[1]) print("HACK ", affine_component_list, " ", warp_component_list) return affine_component_list, warp_component_list -## Flatten and return equal length transform and images lists. -def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries,transforms,invert_transform_flags,interpolationMapping): +# Flatten and return equal length transform and images lists. + + +def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries, transforms, invert_transform_flags, interpolationMapping): import sys print("HACK: DEBUG: ListOfPassiveImagesDictionaries\n{lpi}\n".format(lpi=ListOfPassiveImagesDictionaries)) - subjCount=len(ListOfPassiveImagesDictionaries) - tranCount=len(transforms) + subjCount = len(ListOfPassiveImagesDictionaries) + tranCount = len(transforms) if subjCount != tranCount: - print("ERROR: subjCount must equal tranCount {0} != {1}".format(subjCount,tranCount)) + print("ERROR: subjCount must equal tranCount {0} != {1}".format(subjCount, tranCount)) sys.exit(-1) - invertTfmsFlagsCount=len(invert_transform_flags) + invertTfmsFlagsCount = len(invert_transform_flags) if subjCount != invertTfmsFlagsCount: - print("ERROR: subjCount must equal invertTfmsFlags {0} != {1}".format(subjCount,invertTfmsFlagsCount)) + print("ERROR: subjCount must equal invertTfmsFlags {0} != {1}".format(subjCount, invertTfmsFlagsCount)) sys.exit(-1) - flattened_images=list() - flattened_image_nametypes=list() - flattened_transforms=list() - flattened_invert_transform_flags=list() - flattened_interpolation_type=list() + flattened_images = list() + flattened_image_nametypes = list() + flattened_transforms = list() + flattened_invert_transform_flags = list() + flattened_interpolation_type = list() passiveImagesCount = len(ListOfPassiveImagesDictionaries[0]) - for subjIndex in range(0,subjCount): - #if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): + for subjIndex in range(0, subjCount): + # if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): # print "ERROR: all image lengths must be equal {0} != {1}".format(passiveImagesCount,len(ListOfPassiveImagesDictionaries[subjIndex])) # sys.exit(-1) - subjImgDictionary=ListOfPassiveImagesDictionaries[subjIndex] - subjToAtlasTransform=transforms[subjIndex] - subjToAtlasInvertFlags=invert_transform_flags[subjIndex] - for imgname,img in list(subjImgDictionary.items()): + subjImgDictionary = ListOfPassiveImagesDictionaries[subjIndex] + subjToAtlasTransform = transforms[subjIndex] + subjToAtlasInvertFlags = invert_transform_flags[subjIndex] + for imgname, img in list(subjImgDictionary.items()): flattened_images.append(img) flattened_image_nametypes.append(imgname) flattened_transforms.append(subjToAtlasTransform) @@ -113,44 +119,47 @@ def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries,transforms,inv if imgname in interpolationMapping: flattened_interpolation_type.append(interpolationMapping[imgname]) else: - flattened_interpolation_type.append('Linear') #Linear is the default. + flattened_interpolation_type.append('Linear') # Linear is the default. print("HACK: flattened images {0}\n".format(flattened_images)) print("HACK: flattened nametypes {0}\n".format(flattened_image_nametypes)) print("HACK: flattened txfms {0}\n".format(flattened_transforms)) print("HACK: flattened txfmsFlags{0}\n".format(flattened_invert_transform_flags)) - return flattened_images,flattened_transforms,flattened_invert_transform_flags,flattened_image_nametypes,flattened_interpolation_type + return flattened_images, flattened_transforms, flattened_invert_transform_flags, flattened_image_nametypes, flattened_interpolation_type -def GetMovingImages(ListOfImagesDictionaries,registrationImageTypes,interpolationMapping): +def GetMovingImages(ListOfImagesDictionaries, registrationImageTypes, interpolationMapping): """ This currently ONLY works when registrationImageTypes has length of exactly 1. When the new multi-variate registration is introduced, it will be expanded. """ - if len(registrationImageTypes) !=1: + if len(registrationImageTypes) != 1: print("ERROR: Multivariate imageing not supported yet!") return [] - moving_images=[ mdict[ registrationImageTypes[0] ] for mdict in ListOfImagesDictionaries ] - moving_interpolation_type=interpolationMapping[ registrationImageTypes[0] ] - return moving_images,moving_interpolation_type + moving_images = [mdict[registrationImageTypes[0]] for mdict in ListOfImagesDictionaries] + moving_interpolation_type = interpolationMapping[registrationImageTypes[0]] + return moving_images, moving_interpolation_type + -def GetPassiveImages(ListOfImagesDictionaries,registrationImageTypes): - if len(registrationImageTypes) !=1: +def GetPassiveImages(ListOfImagesDictionaries, registrationImageTypes): + if len(registrationImageTypes) != 1: print("ERROR: Multivariate imageing not supported yet!") return [dict()] - passive_images=list() + passive_images = list() for mdict in ListOfImagesDictionaries: - ThisSubjectPassiveImages=dict() - for key,value in list(mdict.items()): + ThisSubjectPassiveImages = dict() + for key, value in list(mdict.items()): if key not in registrationImageTypes: - ThisSubjectPassiveImages[key]=value + ThisSubjectPassiveImages[key] = value passive_images.append(ThisSubjectPassiveImages) return passive_images ## -## NOTE: The modes can be either 'SINGLE_IMAGE' or 'MULTI' -## 'SINGLE_IMAGE' is quick shorthand when you are building an atlas with a single subject, then registration can -## be short-circuted -## any other string indicates the normal mode that you would expect and replicates the shell script build_template_parallel.sh +# NOTE: The modes can be either 'SINGLE_IMAGE' or 'MULTI' +# 'SINGLE_IMAGE' is quick shorthand when you are building an atlas with a single subject, then registration can +# be short-circuted +# any other string indicates the normal mode that you would expect and replicates the shell script build_template_parallel.sh + + def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): """ @@ -167,123 +176,122 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): outputspec.transforms_list : outputspec.passive_deformed_templates : """ - TemplateBuildSingleIterationWF = pe.Workflow(name = 'antsRegistrationTemplateBuildSingleIterationWF_'+str(iterationPhasePrefix) ) + TemplateBuildSingleIterationWF = pe.Workflow(name='antsRegistrationTemplateBuildSingleIterationWF_'+str(iterationPhasePrefix)) inputSpec = pe.Node(interface=util.IdentityInterface(fields=[ 'ListOfImagesDictionaries', 'registrationImageTypes', - 'interpolationMapping','fixed_image']), + 'interpolationMapping', 'fixed_image']), run_without_submitting=True, name='inputspec') - ## HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that - ## they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger - ## their hash to change. - ## HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes - ## HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. - outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template','transforms_list', - 'passive_deformed_templates']), - run_without_submitting=True, - name='outputspec') - - - ### NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template - BeginANTS=pe.MapNode(interface=Registration(), name = 'BeginANTS', iterfield=['moving_image']) + # HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that + # they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger + # their hash to change. + # HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes + # HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. + outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template', 'transforms_list', + 'passive_deformed_templates']), + run_without_submitting=True, + name='outputspec') + + # NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template + BeginANTS = pe.MapNode(interface=Registration(), name='BeginANTS', iterfield=['moving_image']) BeginANTS.inputs.dimension = 3 BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix)+'_tfm' - BeginANTS.inputs.transforms = ["Affine", "SyN"] - BeginANTS.inputs.transform_parameters = [[0.9], [0.25,3.0,0.0]] - BeginANTS.inputs.metric = ['Mattes', 'CC'] - BeginANTS.inputs.metric_weight = [1.0, 1.0] + BeginANTS.inputs.transforms = ["Affine", "SyN"] + BeginANTS.inputs.transform_parameters = [[0.9], [0.25, 3.0, 0.0]] + BeginANTS.inputs.metric = ['Mattes', 'CC'] + BeginANTS.inputs.metric_weight = [1.0, 1.0] BeginANTS.inputs.radius_or_number_of_bins = [32, 5] BeginANTS.inputs.number_of_iterations = [[1000, 1000, 1000], [50, 35, 15]] - BeginANTS.inputs.use_histogram_matching = [True, True] + BeginANTS.inputs.use_histogram_matching = [True, True] BeginANTS.inputs.use_estimate_learning_rate_once = [False, False] - BeginANTS.inputs.shrink_factors = [[3,2,1], [3,2,1]] - BeginANTS.inputs.smoothing_sigmas = [[3,2,0], [3,2,0]] - BeginANTS.inputs.sigma_units = ["vox"]*2 + BeginANTS.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]] + BeginANTS.inputs.smoothing_sigmas = [[3, 2, 0], [3, 2, 0]] + BeginANTS.inputs.sigma_units = ["vox"]*2 GetMovingImagesNode = pe.Node(interface=util.Function(function=GetMovingImages, - input_names=['ListOfImagesDictionaries','registrationImageTypes','interpolationMapping'], - output_names=['moving_images','moving_interpolation_type']), - run_without_submitting=True, - name='99_GetMovingImagesNode') + input_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping'], + output_names=['moving_images', 'moving_interpolation_type']), + run_without_submitting=True, + name='99_GetMovingImagesNode') TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfImagesDictionaries', GetMovingImagesNode, 'ListOfImagesDictionaries') TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', GetMovingImagesNode, 'registrationImageTypes') - TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping',GetMovingImagesNode,'interpolationMapping') + TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', GetMovingImagesNode, 'interpolationMapping') TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_images', BeginANTS, 'moving_image') TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_interpolation_type', BeginANTS, 'interpolation') TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', BeginANTS, 'fixed_image') - ## Now warp all the input_images images - wimtdeformed = pe.MapNode(interface = ApplyTransforms(), - iterfield=['transforms','invert_transform_flags','input_image'], - name ='wimtdeformed') + # Now warp all the input_images images + wimtdeformed = pe.MapNode(interface=ApplyTransforms(), + iterfield=['transforms', 'invert_transform_flags', 'input_image'], + name='wimtdeformed') wimtdeformed.inputs.interpolation = 'Linear' wimtdeformed.default_value = 0 - TemplateBuildSingleIterationWF.connect(BeginANTS,'forward_transforms',wimtdeformed,'transforms') - TemplateBuildSingleIterationWF.connect(BeginANTS,'forward_invert_flags',wimtdeformed,'invert_transform_flags') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', wimtdeformed, 'transforms') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', wimtdeformed, 'invert_transform_flags') TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_images', wimtdeformed, 'input_image') TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', wimtdeformed, 'reference_image') - ## Shape Update Next ===== - ## Now Average All input_images deformed images together to create an updated template average - AvgDeformedImages=pe.Node(interface=AverageImages(), name='AvgDeformedImages') + # Shape Update Next ===== + # Now Average All input_images deformed images together to create an updated template average + AvgDeformedImages = pe.Node(interface=AverageImages(), name='AvgDeformedImages') AvgDeformedImages.inputs.dimension = 3 AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix)+'.nii.gz' AvgDeformedImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", AvgDeformedImages, 'images') - ## Now average all affine transforms together - AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name = 'AvgAffineTransform') + # Now average all affine transforms together + AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name='AvgAffineTransform') AvgAffineTransform.inputs.dimension = 3 AvgAffineTransform.inputs.output_affine_transform = 'Avererage_'+str(iterationPhasePrefix)+'_Affine.mat' SplitAffineAndWarpsNode = pe.Node(interface=util.Function(function=SplitAffineAndWarpComponents, - input_names=['list_of_transforms_lists'], - output_names=['affine_component_list', 'warp_component_list']), + input_names=['list_of_transforms_lists'], + output_names=['affine_component_list', 'warp_component_list']), run_without_submitting=True, name='99_SplitAffineAndWarpsNode') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms',SplitAffineAndWarpsNode,'list_of_transforms_lists') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', SplitAffineAndWarpsNode, 'list_of_transforms_lists') TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, 'affine_component_list', AvgAffineTransform, 'transforms') - ## Now average the warp fields togther - AvgWarpImages=pe.Node(interface=AverageImages(), name='AvgWarpImages') + # Now average the warp fields togther + AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') AvgWarpImages.inputs.dimension = 3 AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix)+'warp.nii.gz' AvgWarpImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, 'warp_component_list', AvgWarpImages, 'images') - ## Now average the images together - ## TODO: For now GradientStep is set to 0.25 as a hard coded default value. + # Now average the images together + # TODO: For now GradientStep is set to 0.25 as a hard coded default value. GradientStep = 0.25 - GradientStepWarpImage=pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') + GradientStepWarpImage = pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') GradientStepWarpImage.inputs.dimension = 3 GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_'+str(iterationPhasePrefix)+'_warp.nii.gz' TemplateBuildSingleIterationWF.connect(AvgWarpImages, 'output_average_image', GradientStepWarpImage, 'first_input') - ## Now create the new template shape based on the average of all deformed images - UpdateTemplateShape = pe.Node(interface = ApplyTransforms(), name = 'UpdateTemplateShape') + # Now create the new template shape based on the average of all deformed images + UpdateTemplateShape = pe.Node(interface=ApplyTransforms(), name='UpdateTemplateShape') UpdateTemplateShape.inputs.invert_transform_flags = [True] UpdateTemplateShape.inputs.interpolation = 'Linear' UpdateTemplateShape.default_value = 0 TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', UpdateTemplateShape, 'reference_image') - TemplateBuildSingleIterationWF.connect( [ (AvgAffineTransform, UpdateTemplateShape, [(('affine_transform', makeListOfOneElement ), 'transforms')] ), ]) + TemplateBuildSingleIterationWF.connect([(AvgAffineTransform, UpdateTemplateShape, [(('affine_transform', makeListOfOneElement), 'transforms')]), ]) TemplateBuildSingleIterationWF.connect(GradientStepWarpImage, 'output_product_image', UpdateTemplateShape, 'input_image') ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node(interface=util.Function(function=MakeTransformListWithGradientWarps, - input_names=['averageAffineTranform', 'gradientStepWarp'], - output_names=['TransformListWithGradientWarps']), - run_without_submitting=True, - name='99_MakeTransformListWithGradientWarps') + input_names=['averageAffineTranform', 'gradientStepWarp'], + output_names=['TransformListWithGradientWarps']), + run_without_submitting=True, + name='99_MakeTransformListWithGradientWarps') ApplyInvAverageAndFourTimesGradientStepWarpImage.inputs.ignore_exception = True TemplateBuildSingleIterationWF.connect(AvgAffineTransform, 'affine_transform', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'averageAffineTranform') TemplateBuildSingleIterationWF.connect(UpdateTemplateShape, 'output_image', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'gradientStepWarp') - ReshapeAverageImageWithShapeUpdate = pe.Node(interface = ApplyTransforms(), name = 'ReshapeAverageImageWithShapeUpdate') - ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [ True, False, False, False, False ] + ReshapeAverageImageWithShapeUpdate = pe.Node(interface=ApplyTransforms(), name='ReshapeAverageImageWithShapeUpdate') + ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [True, False, False, False, False] ReshapeAverageImageWithShapeUpdate.inputs.interpolation = 'Linear' ReshapeAverageImageWithShapeUpdate.default_value = 0 ReshapeAverageImageWithShapeUpdate.inputs.output_image = 'ReshapeAverageImageWithShapeUpdate.nii.gz' @@ -294,63 +302,63 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): ###### ###### - ###### Process all the passive deformed images in a way similar to the main image used for registration + # Process all the passive deformed images in a way similar to the main image used for registration ###### ###### ###### ############################################## - ## Now warp all the ListOfPassiveImagesDictionaries images - FlattenTransformAndImagesListNode = pe.Node( Function(function=FlattenTransformAndImagesList, - input_names = ['ListOfPassiveImagesDictionaries','transforms', - 'invert_transform_flags','interpolationMapping'], - output_names = ['flattened_images','flattened_transforms','flattened_invert_transform_flags', - 'flattened_image_nametypes','flattened_interpolation_type']), - run_without_submitting=True, name="99_FlattenTransformAndImagesList") + # Now warp all the ListOfPassiveImagesDictionaries images + FlattenTransformAndImagesListNode = pe.Node(Function(function=FlattenTransformAndImagesList, + input_names=['ListOfPassiveImagesDictionaries', 'transforms', + 'invert_transform_flags', 'interpolationMapping'], + output_names=['flattened_images', 'flattened_transforms', 'flattened_invert_transform_flags', + 'flattened_image_nametypes', 'flattened_interpolation_type']), + run_without_submitting=True, name="99_FlattenTransformAndImagesList") GetPassiveImagesNode = pe.Node(interface=util.Function(function=GetPassiveImages, - input_names=['ListOfImagesDictionaries','registrationImageTypes'], - output_names=['ListOfPassiveImagesDictionaries']), - run_without_submitting=True, - name='99_GetPassiveImagesNode') + input_names=['ListOfImagesDictionaries', 'registrationImageTypes'], + output_names=['ListOfPassiveImagesDictionaries']), + run_without_submitting=True, + name='99_GetPassiveImagesNode') TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfImagesDictionaries', GetPassiveImagesNode, 'ListOfImagesDictionaries') TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', GetPassiveImagesNode, 'registrationImageTypes') - TemplateBuildSingleIterationWF.connect( GetPassiveImagesNode,'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries' ) - TemplateBuildSingleIterationWF.connect( inputSpec,'interpolationMapping', FlattenTransformAndImagesListNode, 'interpolationMapping' ) - TemplateBuildSingleIterationWF.connect( BeginANTS,'forward_transforms', FlattenTransformAndImagesListNode, 'transforms' ) - TemplateBuildSingleIterationWF.connect( BeginANTS,'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags' ) - wimtPassivedeformed = pe.MapNode(interface = ApplyTransforms(), - iterfield=['transforms','invert_transform_flags', 'input_image','interpolation'], - name ='wimtPassivedeformed') + TemplateBuildSingleIterationWF.connect(GetPassiveImagesNode, 'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries') + TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', FlattenTransformAndImagesListNode, 'interpolationMapping') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', FlattenTransformAndImagesListNode, 'transforms') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags') + wimtPassivedeformed = pe.MapNode(interface=ApplyTransforms(), + iterfield=['transforms', 'invert_transform_flags', 'input_image', 'interpolation'], + name='wimtPassivedeformed') wimtPassivedeformed.default_value = 0 - TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image',wimtPassivedeformed,'reference_image') + TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', wimtPassivedeformed, 'reference_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_interpolation_type', wimtPassivedeformed, 'interpolation') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_transforms', wimtPassivedeformed, 'transforms') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_invert_transform_flags', wimtPassivedeformed, 'invert_transform_flags') - RenestDeformedPassiveImagesNode = pe.Node( Function(function=RenestDeformedPassiveImages, - input_names = ['deformedPassiveImages','flattened_image_nametypes','interpolationMapping'], - output_names = ['nested_imagetype_list','outputAverageImageName_list', - 'image_type_list','nested_interpolation_type']), - run_without_submitting=True, name="99_RenestDeformedPassiveImages") + RenestDeformedPassiveImagesNode = pe.Node(Function(function=RenestDeformedPassiveImages, + input_names=['deformedPassiveImages', 'flattened_image_nametypes', 'interpolationMapping'], + output_names=['nested_imagetype_list', 'outputAverageImageName_list', + 'image_type_list', 'nested_interpolation_type']), + run_without_submitting=True, name="99_RenestDeformedPassiveImages") TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', RenestDeformedPassiveImagesNode, 'interpolationMapping') TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', RenestDeformedPassiveImagesNode, 'deformedPassiveImages') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_image_nametypes', RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') - ## Now Average All passive input_images deformed images together to create an updated template average - AvgDeformedPassiveImages=pe.MapNode(interface=AverageImages(), - iterfield=['images','output_average_image'], - name='AvgDeformedPassiveImages') + # Now Average All passive input_images deformed images together to create an updated template average + AvgDeformedPassiveImages = pe.MapNode(interface=AverageImages(), + iterfield=['images', 'output_average_image'], + name='AvgDeformedPassiveImages') AvgDeformedPassiveImages.inputs.dimension = 3 AvgDeformedPassiveImages.inputs.normalize = False TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "nested_imagetype_list", AvgDeformedPassiveImages, 'images') TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "outputAverageImageName_list", AvgDeformedPassiveImages, 'output_average_image') - ## -- TODO: Now neeed to reshape all the passive images as well - ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface = ApplyTransforms(), - iterfield=['input_image','reference_image','output_image','interpolation'], - name = 'ReshapeAveragePassiveImageWithShapeUpdate') - ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [ True, False, False, False, False ] + # -- TODO: Now neeed to reshape all the passive images as well + ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface=ApplyTransforms(), + iterfield=['input_image', 'reference_image', 'output_image', 'interpolation'], + name='ReshapeAveragePassiveImageWithShapeUpdate') + ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [True, False, False, False, False] ReshapeAveragePassiveImageWithShapeUpdate.default_value = 0 TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, 'nested_interpolation_type', ReshapeAveragePassiveImageWithShapeUpdate, 'interpolation') TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, 'outputAverageImageName_list', ReshapeAveragePassiveImageWithShapeUpdate, 'output_image') diff --git a/nipype/workflows/smri/freesurfer/bem.py b/nipype/workflows/smri/freesurfer/bem.py index 67857ed12d..46c7b2f239 100644 --- a/nipype/workflows/smri/freesurfer/bem.py +++ b/nipype/workflows/smri/freesurfer/bem.py @@ -58,8 +58,8 @@ def create_bem_flow(name='bem', out_format='stl'): watershed_bem = pe.Node(interface=mne.WatershedBEM(), name='WatershedBEM') surfconvert = pe.MapNode(fs.MRIsConvert(out_datatype=out_format), - iterfield=['in_file'], - name='surfconvert') + iterfield=['in_file'], + name='surfconvert') """ Connect the nodes @@ -67,7 +67,7 @@ def create_bem_flow(name='bem', out_format='stl'): bemflow.connect([ (inputnode, watershed_bem, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), + ('subjects_dir', 'subjects_dir')]), (watershed_bem, surfconvert, [('mesh_files', 'in_file')]), ]) diff --git a/nipype/workflows/smri/freesurfer/recon.py b/nipype/workflows/smri/freesurfer/recon.py index 2a2ea375cb..f299fa1e8c 100644 --- a/nipype/workflows/smri/freesurfer/recon.py +++ b/nipype/workflows/smri/freesurfer/recon.py @@ -36,7 +36,7 @@ def create_skullstripped_recon_flow(name="skullstripped_recon_all"): name='inputspec') autorecon1 = pe.Node(fs.ReconAll(), name="autorecon1") - autorecon1.plugin_args={'submit_specs': 'request_memory = 2500'} + autorecon1.plugin_args = {'submit_specs': 'request_memory = 2500'} autorecon1.inputs.directive = "autorecon1" autorecon1.inputs.args = "-noskullstrip" autorecon1._interface._can_resume = False @@ -45,7 +45,6 @@ def create_skullstripped_recon_flow(name="skullstripped_recon_all"): wf.connect(inputnode, "subjects_dir", autorecon1, "subjects_dir") wf.connect(inputnode, "subject_id", autorecon1, "subject_id") - def link_masks(subjects_dir, subject_id): import os os.symlink(os.path.join(subjects_dir, subject_id, "mri", "T1.mgz"), @@ -55,22 +54,21 @@ def link_masks(subjects_dir, subject_id): return subjects_dir, subject_id masks = pe.Node(niu.Function(input_names=['subjects_dir', 'subject_id'], - output_names=['subjects_dir', 'subject_id'], - function=link_masks), name="link_masks") + output_names=['subjects_dir', 'subject_id'], + function=link_masks), name="link_masks") wf.connect(autorecon1, "subjects_dir", masks, "subjects_dir") wf.connect(autorecon1, "subject_id", masks, "subject_id") - autorecon_resume = pe.Node(fs.ReconAll(), name="autorecon_resume") - autorecon_resume.plugin_args={'submit_specs': 'request_memory = 2500'} + autorecon_resume.plugin_args = {'submit_specs': 'request_memory = 2500'} autorecon_resume.inputs.args = "-no-isrunning" wf.connect(masks, "subjects_dir", autorecon_resume, "subjects_dir") wf.connect(masks, "subject_id", autorecon_resume, "subject_id") outputnode = pe.Node(niu.IdentityInterface(fields=['subject_id', - 'subjects_dir']), - name='outputspec') + 'subjects_dir']), + name='outputspec') wf.connect(autorecon_resume, "subjects_dir", outputnode, "subjects_dir") wf.connect(autorecon_resume, "subject_id", outputnode, "subject_id") diff --git a/nipype/workflows/smri/freesurfer/utils.py b/nipype/workflows/smri/freesurfer/utils.py index 7e74fa044e..d3abf46160 100644 --- a/nipype/workflows/smri/freesurfer/utils.py +++ b/nipype/workflows/smri/freesurfer/utils.py @@ -85,35 +85,34 @@ def create_getmask_flow(name='getmask', dilate_mask=True): """ fssource = pe.Node(nio.FreeSurferSource(), - name = 'fssource') + name='fssource') threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'), - name='threshold') + name='threshold') register = pe.MapNode(fs.BBRegister(init='fsl'), - iterfield=['source_file'], - name='register') + iterfield=['source_file'], + name='register') voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True), - iterfield=['source_file', 'reg_file'], - name='transform') + iterfield=['source_file', 'reg_file'], + name='transform') """ Connect the nodes """ getmask.connect([ - (inputnode, fssource, [('subject_id','subject_id'), - ('subjects_dir','subjects_dir')]), + (inputnode, fssource, [('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir')]), (inputnode, register, [('source_file', 'source_file'), - ('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir'), - ('contrast_type', 'contrast_type')]), + ('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir'), + ('contrast_type', 'contrast_type')]), (inputnode, voltransform, [('subjects_dir', 'subjects_dir'), - ('source_file', 'source_file')]), + ('source_file', 'source_file')]), (fssource, threshold, [(('aparc_aseg', get_aparc_aseg), 'in_file')]), - (register, voltransform, [('out_reg_file','reg_file')]), - (threshold, voltransform, [('binary_file','target_file')]) + (register, voltransform, [('out_reg_file', 'reg_file')]), + (threshold, voltransform, [('binary_file', 'target_file')]) ]) - """ Add remaining nodes and connections @@ -122,8 +121,8 @@ def create_getmask_flow(name='getmask', dilate_mask=True): """ threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'), - iterfield=['in_file'], - name='threshold2') + iterfield=['in_file'], + name='threshold2') if dilate_mask: threshold2.inputs.dilate = 1 getmask.connect([ @@ -137,8 +136,8 @@ def create_getmask_flow(name='getmask', dilate_mask=True): outputnode = pe.Node(niu.IdentityInterface(fields=["mask_file", "reg_file", "reg_cost" - ]), - name="outputspec") + ]), + name="outputspec") getmask.connect([ (register, outputnode, [("out_reg_file", "reg_file")]), (register, outputnode, [("min_cost_file", "reg_cost")]), @@ -146,6 +145,7 @@ def create_getmask_flow(name='getmask', dilate_mask=True): ]) return getmask + def create_get_stats_flow(name='getstats', withreg=False): """Retrieves stats from labels @@ -197,9 +197,8 @@ def create_get_stats_flow(name='getstats', withreg=False): 'label_file']), name='inputspec') - statnode = pe.MapNode(fs.SegStats(), - iterfield=['segmentation_file','in_file'], + iterfield=['segmentation_file', 'in_file'], name='segstats') """ @@ -221,18 +220,18 @@ def switch_labels(inverse, transform_output, source_file, label_file): else: return label_file, transform_output - chooser = pe.MapNode(niu.Function(input_names = ['inverse', + chooser = pe.MapNode(niu.Function(input_names=['inverse', 'transform_output', 'source_file', 'label_file'], - output_names = ['label_file', + output_names=['label_file', 'source_file'], function=switch_labels), - iterfield=['transform_output','source_file'], + iterfield=['transform_output', 'source_file'], name='chooser') - getstats.connect(inputnode,'source_file', chooser, 'source_file') - getstats.connect(inputnode,'label_file', chooser, 'label_file') - getstats.connect(inputnode,'inverse', chooser, 'inverse') + getstats.connect(inputnode, 'source_file', chooser, 'source_file') + getstats.connect(inputnode, 'label_file', chooser, 'label_file') + getstats.connect(inputnode, 'inverse', chooser, 'inverse') getstats.connect(voltransform, 'transformed_file', chooser, 'transform_output') getstats.connect(chooser, 'label_file', statnode, 'segmentation_file') getstats.connect(chooser, 'source_file', statnode, 'in_file') @@ -245,7 +244,7 @@ def switch_labels(inverse, transform_output, source_file, label_file): """ outputnode = pe.Node(niu.IdentityInterface(fields=["stats_file" - ]), + ]), name="outputspec") getstats.connect([ (statnode, outputnode, [("summary_file", "stats_file")]), @@ -305,15 +304,15 @@ def create_tessellation_flow(name='tessellate', out_format='stl'): """ fssource = pe.Node(nio.FreeSurferSource(), - name = 'fssource') + name='fssource') volconvert = pe.Node(fs.MRIConvert(out_type='nii'), - name = 'volconvert') + name='volconvert') tessellate = pe.MapNode(fs.MRIMarchingCubes(), - iterfield=['label_value','out_file'], - name='tessellate') + iterfield=['label_value', 'out_file'], + name='tessellate') surfconvert = pe.MapNode(fs.MRIsConvert(out_datatype='stl'), - iterfield=['in_file'], - name='surfconvert') + iterfield=['in_file'], + name='surfconvert') smoother = pe.MapNode(mf.MeshFix(), iterfield=['in_file1'], name='smoother') @@ -325,12 +324,12 @@ def create_tessellation_flow(name='tessellate', out_format='stl'): smoother.inputs.laplacian_smoothing_steps = 1 region_list_from_volume_interface = Function(input_names=["in_file"], - output_names=["region_list"], - function=region_list_from_volume) + output_names=["region_list"], + function=region_list_from_volume) id_list_from_lookup_table_interface = Function(input_names=["lookup_file", "region_list"], - output_names=["id_list"], - function=id_list_from_lookup_table) + output_names=["id_list"], + function=id_list_from_lookup_table) region_list_from_volume_node = pe.Node(interface=region_list_from_volume_interface, name='region_list_from_volume_node') id_list_from_lookup_table_node = pe.Node(interface=id_list_from_lookup_table_interface, name='id_list_from_lookup_table_node') @@ -340,8 +339,8 @@ def create_tessellation_flow(name='tessellate', out_format='stl'): """ tessflow.connect([ - (inputnode, fssource, [('subject_id','subject_id'), - ('subjects_dir','subjects_dir')]), + (inputnode, fssource, [('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir')]), (fssource, volconvert, [('aseg', 'in_file')]), (volconvert, region_list_from_volume_node, [('out_file', 'in_file')]), (region_list_from_volume_node, tessellate, [('region_list', 'label_value')]), @@ -349,8 +348,8 @@ def create_tessellation_flow(name='tessellate', out_format='stl'): (inputnode, id_list_from_lookup_table_node, [('lookup_file', 'lookup_file')]), (id_list_from_lookup_table_node, tessellate, [('id_list', 'out_file')]), (fssource, tessellate, [('aseg', 'in_file')]), - (tessellate, surfconvert, [('surface','in_file')]), - (surfconvert, smoother, [('converted','in_file1')]), + (tessellate, surfconvert, [('surface', 'in_file')]), + (surfconvert, smoother, [('converted', 'in_file1')]), ]) """ diff --git a/setup.py b/setup.py index db042ba243..7f282eb7e2 100755 --- a/setup.py +++ b/setup.py @@ -51,6 +51,7 @@ from distutils.command.build_py import build_py from distutils import log + def get_comrec_build(pkg_dir, build_cmd=build_py): """ Return extended build command class for recording commit @@ -181,7 +182,7 @@ def version_getter(pkg_name): msgs = { 'missing': 'Cannot import package "%s" - is it installed?', 'missing opt': 'Missing optional package "%s"', - 'opt suffix' : '; you may get run-time errors', + 'opt suffix': '; you may get run-time errors', 'version too old': 'You have version %s of package "%s"' ' but we need version >= %s', } msgs.update(messages) @@ -208,7 +209,7 @@ def version_getter(pkg_name): log.warn(msgs['version too old'] % (have_version, pkg_name, version) - + msgs['opt suffix']) + + msgs['opt suffix']) return # setuptools mode if optional_tf and not isinstance(optional, string_types): @@ -223,7 +224,7 @@ def version_getter(pkg_name): optional, dependency) return - #_add_append_key(setuptools_args, 'install_requires', dependency) + # add_append_key(setuptools_args, 'install_requires', dependency) return @@ -254,11 +255,11 @@ def _package_status(pkg_name, version, version_getter, checker): tests_require=['nose'], test_suite='nose.collector', zip_safe=False, - extras_require = dict( + extras_require=dict( doc='Sphinx>=0.3', test='nose>=0.10.1'), ) - pkg_chk = partial(package_check, setuptools_args = extra_setuptools_args) + pkg_chk = partial(package_check, setuptools_args=extra_setuptools_args) else: extra_setuptools_args = {} pkg_chk = package_check @@ -276,7 +277,8 @@ def _package_status(pkg_name, version, version_getter, checker): ' provided by package ' '"python-dateutil"')} pkg_chk('dateutil', DATEUTIL_MIN_VERSION, - messages = custom_dateutil_messages) + messages=custom_dateutil_messages) + def main(**extra_args): setup(name=NAME, @@ -294,7 +296,7 @@ def main(**extra_args): version=VERSION, install_requires=REQUIRES, provides=PROVIDES, - packages = [ 'nipype', + packages=['nipype', 'nipype.algorithms', 'nipype.algorithms.tests', 'nipype.caching', @@ -414,7 +416,7 @@ def main(**extra_args): # above, but distutils is surely the worst piece of code in all of # python -- duplicating things into MANIFEST.in but this is admittedly # only a workaround to get things started -- not a solution - package_data = {'nipype': + package_data={'nipype': [pjoin('testing', 'data', '*'), pjoin('testing', 'data', 'dicomdir', '*'), pjoin('testing', 'data', 'bedpostxout', '*'), @@ -424,11 +426,11 @@ def main(**extra_args): pjoin('external', 'd3.js'), pjoin('interfaces', 'script_templates', '*'), pjoin('interfaces', 'tests', 'realign_json.json') - ]}, - scripts = glob('bin/*'), - cmdclass = cmdclass, + ]}, + scripts=glob('bin/*'), + cmdclass=cmdclass, **extra_args - ) + ) if __name__ == "__main__": main(**extra_setuptools_args) diff --git a/tools/apigen.py b/tools/apigen.py index 4e9d08bd72..348e0e0677 100644 --- a/tools/apigen.py +++ b/tools/apigen.py @@ -303,12 +303,12 @@ def _survives_exclude(self, matchstr, match_type): if matchstr[:L] == self.package_name: matchstr = matchstr[L:] for pat in patterns: - #print (pat, matchstr, match_type) #dbg + # print (pat, matchstr, match_type) #dbg try: pat.search except AttributeError: pat = re.compile(pat) - #print (pat.search(matchstr)) #dbg + # print (pat.search(matchstr)) #dbg if pat.search(matchstr): return False return True @@ -346,7 +346,7 @@ def discover_modules(self): for dirname in dirnames[:]: # copy list - we modify inplace package_uri = '.'.join((root_uri, dirname)) if (self._uri2path(package_uri) and - self._survives_exclude(package_uri, 'package')): + self._survives_exclude(package_uri, 'package')): modules.append(package_uri) else: dirnames.remove(dirname) @@ -355,9 +355,9 @@ def discover_modules(self): module_name = filename[:-3] module_uri = '.'.join((root_uri, module_name)) if (self._uri2path(module_uri) and - self._survives_exclude(module_uri, 'module')): + self._survives_exclude(module_uri, 'module')): modules.append(module_uri) - #print sorted(modules) #dbg + # print sorted(modules) #dbg return sorted(modules) def write_modules_api(self, modules, outdir): diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py index 09f38cfc30..07fe607be3 100755 --- a/tools/build_interface_docs.py +++ b/tools/build_interface_docs.py @@ -9,14 +9,14 @@ import os import sys -#***************************************************************************** +# ***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') - sys.path.insert(1,nipypepath) + sys.path.insert(1, nipypepath) # local imports from interfacedocgen import InterfaceHelpWriter package = 'nipype' - outdir = os.path.join('interfaces','generated') + outdir = os.path.join('interfaces', 'generated') docwriter = InterfaceHelpWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.external$', diff --git a/tools/build_modref_templates.py b/tools/build_modref_templates.py index a7bc309f08..8c868c6ffe 100755 --- a/tools/build_modref_templates.py +++ b/tools/build_modref_templates.py @@ -9,7 +9,7 @@ import os import sys -#***************************************************************************** +# ***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1, nipypepath) diff --git a/tools/checkspecs.py b/tools/checkspecs.py index 278ca102c2..8974428780 100644 --- a/tools/checkspecs.py +++ b/tools/checkspecs.py @@ -108,7 +108,7 @@ def _uri2path(self, uri): path = path.replace(self.package_name + os.path.sep, '') path = os.path.join(self.root_path, path) # XXX maybe check for extensions as well? - if os.path.exists(path + '.py'): # file + if os.path.exists(path + '.py'): # file path += '.py' elif os.path.exists(os.path.join(path, '__init__.py')): path = os.path.join(path, '__init__.py') @@ -128,7 +128,7 @@ def _parse_module(self, uri): filename = self._uri2path(uri) if filename is None: # nothing that we could handle here. - return ([],[]) + return ([], []) f = open(filename, 'rt') functions, classes = self._parse_lines(f, uri) f.close() @@ -171,7 +171,7 @@ def test_specs(self, uri): # get the names of all classes and functions _, classes = self._parse_module(uri) if not classes: - #print 'WARNING: Empty -',uri # dbg + # print 'WARNING: Empty -',uri # dbg return None # Make a shorter version of the uri that omits the package name for @@ -212,7 +212,7 @@ def test_specs(self, uri): ('.' * len(uri.split('.'))), 'from ..%s import %s' % (uri.split('.')[-1], c), ''] - cmd.append('def test_%s_inputs():' % c) + cmd.append('\ndef test_%s_inputs():' % c) input_fields = '' for traitname, trait in sorted(classinst.input_spec().traits(transient=None).items()): input_fields += '%s=dict(' % traitname @@ -246,7 +246,7 @@ def test_specs(self, uri): if 'parent' in trait.__dict__: parent_metadata = list(getattr(trait, 'parent').__dict__.keys()) if key not in allowed_keys + classinst._additional_metadata\ - + parent_metadata: + + parent_metadata: bad_specs.append([uri, c, 'Inputs', traitname, key]) if key == 'mandatory' and trait.mandatory is not None and not trait.mandatory: bad_specs.append([uri, c, 'Inputs', traitname, 'mandatory=False']) @@ -256,7 +256,7 @@ def test_specs(self, uri): if not os.path.exists(nonautotest): with open(testfile, 'at') as fp: - cmd = ['def test_%s_outputs():' % c] + cmd = ['\ndef test_%s_outputs():' % c] input_fields = '' for traitname, trait in sorted(classinst.output_spec().traits(transient=None).items()): input_fields += '%s=dict(' % traitname @@ -278,7 +278,7 @@ def test_specs(self, uri): for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value"""] - fp.writelines('\n'.join(cmd) + '\n\n') + fp.writelines('\n'.join(cmd) + '\n') for traitname, trait in sorted(classinst.output_spec().traits(transient=None).items()): for key in sorted(trait.__dict__): @@ -292,7 +292,6 @@ def test_specs(self, uri): bad_specs.append([uri, c, 'Outputs', traitname, key]) return bad_specs - def _survives_exclude(self, matchstr, match_type): ''' Returns True if *matchstr* does not match patterns @@ -358,10 +357,10 @@ def discover_modules(self): # Check directory names for packages root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) - for dirname in dirnames[:]: # copy list - we modify inplace + for dirname in dirnames[:]: # copy list - we modify inplace package_uri = '.'.join((root_uri, dirname)) if (self._uri2path(package_uri) and - self._survives_exclude(package_uri, 'package')): + self._survives_exclude(package_uri, 'package')): modules.append(package_uri) else: dirnames.remove(dirname) @@ -370,7 +369,7 @@ def discover_modules(self): module_name = filename[:-3] module_uri = '.'.join((root_uri, module_name)) if (self._uri2path(module_uri) and - self._survives_exclude(module_uri, 'module')): + self._survives_exclude(module_uri, 'module')): modules.append(module_uri) return sorted(modules) diff --git a/tools/github.py b/tools/github.py index 9d7ceba1a1..3f966383ee 100644 --- a/tools/github.py +++ b/tools/github.py @@ -93,6 +93,6 @@ def get_file_url(object): info = nipype.get_info() shortfile = os.path.join('nipype', filename.split('nipype/')[-1]) uri = 'http://github.com/nipy/nipype/tree/%s/%s#L%d' % \ - (info['commit_hash'], - shortfile, lines[1]) + (info['commit_hash'], + shortfile, lines[1]) return uri diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 3431a0645a..3cd930066b 100755 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -73,7 +73,7 @@ def copy_replace(replace_pairs, out_path, cp_globs=('*',), rep_globs=('*',), - renames = ()): + renames=()): out_fnames = cp_files(repo_path, cp_globs, out_path) renames = [(re.compile(in_exp), out_exp) for in_exp, out_exp in renames] fnames = [] diff --git a/tools/interfacedocgen.py b/tools/interfacedocgen.py index 27ef425e31..bc49e6f78b 100644 --- a/tools/interfacedocgen.py +++ b/tools/interfacedocgen.py @@ -37,6 +37,8 @@ from github import get_file_url # Functions and classes + + class InterfaceHelpWriter(object): ''' Class for automatic detection and parsing of API docs to Sphinx-parsable reST format''' @@ -167,7 +169,7 @@ def _uri2path(self, uri): path = path.replace(self.package_name + os.path.sep, '') path = os.path.join(self.root_path, path) # XXX maybe check for extensions as well? - if os.path.exists(path + '.py'): # file + if os.path.exists(path + '.py'): # file path += '.py' elif os.path.exists(os.path.join(path, '__init__.py')): path = os.path.join(path, '__init__.py') @@ -187,7 +189,7 @@ def _parse_module(self, uri): filename = self._uri2path(uri) if filename is None: # nothing that we could handle here. - return ([],[]) + return ([], []) f = open(filename, 'rt') functions, classes = self._parse_lines(f, uri) f.close() @@ -216,7 +218,6 @@ def _parse_lines(self, linesource, module): classes.sort() return functions, classes - def _write_graph_section(self, fname, title): ad = '\n%s\n%s\n\n' % (title, self.rst_section_levels[3] * len(title)) ad += '.. graphviz::\n\n' @@ -260,32 +261,32 @@ def generate_api_doc(self, uri): continue if isinstance(workflow, Workflow): - workflows.append((workflow,function, finst)) + workflows.append((workflow, function, finst)) if not classes and not workflows and not helper_functions: - print('WARNING: Empty -',uri) # dbg + print('WARNING: Empty -', uri) # dbg return '' # Make a shorter version of the uri that omits the package name for # titles uri_short = re.sub(r'^%s\.' % self.package_name, '', uri) - #uri_short = uri + # uri_short = uri ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' chap_title = uri_short - ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title) + ad += (chap_title+'\n' + self.rst_section_levels[1] * len(chap_title) + '\n\n') # Set the chapter title to read 'module' for all modules except for the # main packages - #if '.' in uri: + # if '.' in uri: # title = 'Module: :mod:`' + uri_short + '`' - #else: + # else: # title = ':mod:`' + uri_short + '`' - #ad += title + '\n' + self.rst_section_levels[2] * len(title) + # ad += title + '\n' + self.rst_section_levels[2] * len(title) - #ad += '\n' + 'Classes' + '\n' + \ + # ad += '\n' + 'Classes' + '\n' + \ # self.rst_section_levels[2] * 7 + '\n' for c in classes: __import__(uri) @@ -326,7 +327,7 @@ def generate_api_doc(self, uri): ad += '.. autofunction:: %s\n\n' % name """ - (_,fname) = tempfile.mkstemp(suffix=".dot") + (_, fname) = tempfile.mkstemp(suffix=".dot") workflow.write_graph(dotfilename=fname, graph2use='hierarchical') ad += self._write_graph_section(fname, 'Graph') + '\n' @@ -414,10 +415,10 @@ def discover_modules(self): # Check directory names for packages root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) - for dirname in dirnames[:]: # copy list - we modify inplace + for dirname in dirnames[:]: # copy list - we modify inplace package_uri = '.'.join((root_uri, dirname)) if (self._uri2path(package_uri) and - self._survives_exclude(package_uri, 'package')): + self._survives_exclude(package_uri, 'package')): modules.append(package_uri) else: dirnames.remove(dirname) @@ -426,11 +427,11 @@ def discover_modules(self): module_name = filename[:-3] module_uri = '.'.join((root_uri, module_name)) if (self._uri2path(module_uri) and - self._survives_exclude(module_uri, 'module')): + self._survives_exclude(module_uri, 'module')): modules.append(module_uri) return sorted(modules) - def write_modules_api(self, modules,outdir): + def write_modules_api(self, modules, outdir): # write the list written_modules = [] for m in modules: @@ -467,7 +468,7 @@ def write_api_docs(self, outdir): os.mkdir(outdir) # compose list of modules modules = self.discover_modules() - self.write_modules_api(modules,outdir) + self.write_modules_api(modules, outdir) def write_index(self, outdir, froot='gen', relative_to=None): """Make a reST API index file from written files @@ -496,11 +497,11 @@ def write_index(self, outdir, froot='gen', relative_to=None): relpath = outdir.replace(relative_to + os.path.sep, '') else: relpath = outdir - idx = open(path,'wt') + idx = open(path, 'wt') w = idx.write w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') w('.. toctree::\n') w(' :maxdepth: 2\n\n') for f in self.written_modules: - w(' %s\n' % os.path.join(relpath,f)) + w(' %s\n' % os.path.join(relpath, f)) idx.close() diff --git a/tools/make_examples.py b/tools/make_examples.py index 89fccf597a..20481427a4 100755 --- a/tools/make_examples.py +++ b/tools/make_examples.py @@ -5,9 +5,9 @@ """ from past.builtins import execfile -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Library imports -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Stdlib imports import os @@ -27,9 +27,9 @@ # Local tools from toollib import * -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Globals -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- examples_header = """ @@ -40,9 +40,9 @@ .. note_about_examples """ -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Function defintions -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # These global variables let show() be called by the scripts in the usual # manner, but when generating examples, we override it to write the figures to @@ -50,6 +50,8 @@ figure_basename = None # We must change the show command to save instead + + def show(): allfm = Gcf.get_all_fig_managers() for fcount, fm in enumerate(allfm): @@ -59,9 +61,9 @@ def show(): _mpl_show = plt.show plt.show = show -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Main script -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Work in examples directory cd('users/examples') diff --git a/tools/nipype_nightly.py b/tools/nipype_nightly.py index 2355649350..363fcc1988 100644 --- a/tools/nipype_nightly.py +++ b/tools/nipype_nightly.py @@ -12,6 +12,7 @@ dirname = '/home/cburns/src/nipy-sf/nipype/trunk/' + def run_cmd(cmd): print(cmd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, @@ -25,24 +26,28 @@ def run_cmd(cmd): raise Exception(msg) print(output) + def update_repos(): """Update svn repository.""" os.chdir(dirname) cmd = 'svn update' run_cmd(cmd) + def build_docs(): """Build the sphinx documentation.""" os.chdir(os.path.join(dirname, 'doc')) cmd = 'make html' run_cmd(cmd) + def push_to_sf(): """Push documentation to sourceforge.""" os.chdir(dirname + 'doc') cmd = 'make sf_cburns' run_cmd(cmd) + def setup_paths(): # Cron has no PYTHONPATH defined, so we need to add the paths to # all libraries we need. @@ -55,7 +60,7 @@ def setup_paths(): sys.path.insert(0, pkg_path) # Needed to add this to my path at one point otherwise import of # apigen failed. - #sys.path.insert(2, '/home/cburns/src/nipy-sf/nipype/trunk/tools') + # sys.path.insert(2, '/home/cburns/src/nipy-sf/nipype/trunk/tools') # Add networkx, twisted, zope.interface and foolscap. # Basically we need to add all the packages we need that are @@ -81,7 +86,7 @@ def setup_paths(): prev_dir = os.path.abspath(os.curdir) update_repos() build_docs() - #push_to_sf() + # push_to_sf() os.chdir(prev_dir) diff --git a/tools/report_coverage.py b/tools/report_coverage.py index c1a1fc2e66..61b8ced640 100644 --- a/tools/report_coverage.py +++ b/tools/report_coverage.py @@ -3,6 +3,7 @@ from __future__ import print_function import subprocess + def run_tests(cmd): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, @@ -15,6 +16,7 @@ def run_tests(cmd): # Nose returns the output in stderr return stderr + def grab_coverage(output): """Grab coverage lines from nose output.""" output = output.split('\n') diff --git a/tools/run_examples.py b/tools/run_examples.py index 1eb1f6e6ee..cd75067ed1 100644 --- a/tools/run_examples.py +++ b/tools/run_examples.py @@ -5,7 +5,7 @@ def run_examples(example, pipelines, plugin): - print('running example: %s with plugin: %s'%(example, plugin)) + print('running example: %s with plugin: %s' %(example, plugin)) from nipype import config config.enable_debug_mode() from nipype.interfaces.base import CommandLine @@ -17,18 +17,18 @@ def run_examples(example, pipelines, plugin): wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) if os.path.exists(wf.base_dir): rmtree(wf.base_dir) - wf.config = {'execution' :{'hash_method': 'timestamp', 'stop_on_first_rerun': 'true'}} + wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true'}} wf.run(plugin=plugin, plugin_args={'n_procs': 4}) - #run twice to check if nothing is rerunning + # run twice to check if nothing is rerunning wf.run(plugin=plugin) if __name__ == '__main__': path, file = os.path.split(__file__) sys.path.insert(0, os.path.realpath(os.path.join(path, '..', 'examples'))) - examples = {'fmri_fsl_reuse':['level1_workflow'], - 'fmri_spm_nested':['level1','l2pipeline'], - #'fmri_spm_dartel':['level1','l2pipeline'], - #'fmri_fsl_feeds':['l1pipeline'] + examples = {'fmri_fsl_reuse': ['level1_workflow'], + 'fmri_spm_nested': ['level1', 'l2pipeline'], + # 'fmri_spm_dartel':['level1','l2pipeline'], + # 'fmri_fsl_feeds':['l1pipeline'] } example = sys.argv[1] plugin = sys.argv[2] diff --git a/tools/setup.py b/tools/setup.py index a82ae6ed5f..1b427eaba5 100644 --- a/tools/setup.py +++ b/tools/setup.py @@ -9,5 +9,5 @@ author_email='nipy-devel@neuroimaging.scipy.org', url='http://nipy.sourceforge.net', scripts=['./nipype_nightly.py', './report_coverage.py'] - ) + ) diff --git a/tools/toollib.py b/tools/toollib.py index 3f6f122ff3..089620de69 100644 --- a/tools/toollib.py +++ b/tools/toollib.py @@ -16,9 +16,11 @@ # Utility functions -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Functions -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + + def sh(cmd): """Execute command in a subshell, return status code.""" return check_call(cmd, shell=True) @@ -26,9 +28,9 @@ def sh(cmd): def compile_tree(): """Compile all Python files below current directory.""" - vstr = '.'.join(map(str,sys.version_info[:2])) + vstr = '.'.join(map(str, sys.version_info[:2])) stat = os.system('python %s/lib/python%s/compileall.py .' % - (sys.prefix,vstr)) + (sys.prefix, vstr)) if stat: msg = '*** ERROR: Some Python files in tree do NOT compile! ***\n' msg += 'See messages above for the actual file that produced it.\n'