From 03dc215a1594a9a1c774535bd1d315274251b70d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 4 Jun 2014 16:12:09 +0200 Subject: [PATCH 01/42] Base GraftWorkflow --- nipype/pipeline/engine.py | 86 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index e3a7ad2757..d0c8c1a785 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1086,6 +1086,92 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=True, logger.debug('cross connection: ' + dotlist[-1]) return ('\n' + prefix).join(dotlist) +class GraftWorkflow(Workflow): + """ + A workflow ready to insert several internal workflows that share an + interface. + """ + def __init__(self, name, input_fields=[], output_fields=[], base_dir=None): + """Create a workflow object. + + Parameters + ---------- + name : alphanumeric string + unique identifier for the workflow + base_dir : string, optional + path to workflow storage + + """ + from nipype.interfaces.utility import IdentityInterface + + super(GraftWorkflow, self).__init__(name, base_dir) + self._children = dict() + + if isinstance(input_fields, str): + input_fields = [ input_fields ] + + if isinstance(output_fields, str): + output_fields = [ output_fields ] + + self._input_fields = input_fields + self._output_fields = output_fields + self._connection_list = [] + self._inputnode = Node(IdentityInterface(fields=input_fields), name='inputnode') + self._outputnode = Node(IdentityInterface(fields=output_fields), name='outputnode') + self._mergenodes = [] + self._consolidated = False + + def insert(self, workflow): + if workflow.name in self._children.keys(): + logger.debug('Trying to add an existing workflow to GraftWorkflow') + return False + + # Check that interfaces are satisfied + inputnames = workflow.inputs.inputnode.trait_names() + + for key in self._input_fields: + if not key in inputnames: + raise Exception('Input \'%s\' is not present in GraftWorkflow' % key ) + + logger.debug('Connecting %s to inputnode.%s' % (key,key) ) + self.connect([ ( self._inputnode, workflow, [ ('%s' % key, 'inputnode.%s' % key) ] ) ]) + + outputnames = workflow.outputs.outputnode.trait_names() + + for key in self._output_fields: + if not key in outputnames: + raise Exception('Output \'%s\' is not present in GraftWorkflow' % key ) + self._connection_list.append( (workflow, 'outputnode.%s' % key, self._outputnode, key) ) + + # Add to dictionary + self._children[workflow.name] = workflow + + logger.debug('Added %s to GraftWorkflow' % workflow.name) + + def _consolidate(self): + from nipype.interfaces.utility import Merge + + if not self._consolidated: + nchildren = len(self._children) + + for key in self._output_fields: + merge = Node(Merge(nchildren, no_flatten=True), name='merge_%s' % key ) + self.connect( merge, 'out', self._outputnode, key ) + + for i,(name,workflow) in enumerate(self._children.iteritems()): + self.connect(workflow, 'outputnode.%s' % key, + merge, 'in%01d' % (i+1) ) + self._consolidated = True + + + def write_graph(self, *args, **kwargs): + self._consolidate() + return super(GraftWorkflow,self).write_graph(*args, **kwargs) + + def run(self, *args, **kwargs): + self._consolidate() + return super(GraftWorkflow,self).run(*args, **kwargs) + class Node(WorkflowBase): """Wraps interface objects for use in pipeline From 1988441db5eb8a2d12b665ea3e36c3139ff6584b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 12 Jun 2014 16:55:45 +0200 Subject: [PATCH 02/42] Added get_graft_names --- nipype/pipeline/engine.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index d0c8c1a785..f5c59db2fa 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1148,6 +1148,9 @@ def insert(self, workflow): logger.debug('Added %s to GraftWorkflow' % workflow.name) + def get_graft_names(self): + return self._children.keys() + def _consolidate(self): from nipype.interfaces.utility import Merge From 6055c9133fa1bcba2c237136551f2623e7185bab Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 13 Jun 2014 19:08:26 +0200 Subject: [PATCH 03/42] added closing with insert --- nipype/pipeline/engine.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index f5c59db2fa..8a6d4b2da8 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1121,7 +1121,10 @@ def __init__(self, name, input_fields=[], output_fields=[], base_dir=None): self._mergenodes = [] self._consolidated = False - def insert(self, workflow): + def insert(self, workflow, consolidate=False): + if self._consolidated: + raise RuntimeError('The workflow is already consolidated') + if workflow.name in self._children.keys(): logger.debug('Trying to add an existing workflow to GraftWorkflow') return False @@ -1148,6 +1151,9 @@ def insert(self, workflow): logger.debug('Added %s to GraftWorkflow' % workflow.name) + if consolidate: + self._consolidate() + def get_graft_names(self): return self._children.keys() @@ -1165,6 +1171,7 @@ def _consolidate(self): self.connect(workflow, 'outputnode.%s' % key, merge, 'in%01d' % (i+1) ) self._consolidated = True + logger.debug('GraftWorkflow is now consolidated') def write_graph(self, *args, **kwargs): From 0807f8af158d921f1c3a64e1f4a308e44797b075 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 18 Jul 2014 17:51:33 +0200 Subject: [PATCH 04/42] Revising this feature --- nipype/pipeline/engine.py | 129 +++++++++++++++++++++++++++++++++++++- 1 file changed, 128 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 8a6d4b2da8..bfaf82dfd6 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1086,6 +1086,132 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=True, logger.debug('cross connection: ' + dotlist[-1]) return ('\n' + prefix).join(dotlist) + +class InterfacedWorkflow(Workflow): + """ + A workflow that automatically generates the input and output nodes to avoid + repetitive inputnode.* and outputnode.* connections. + """ + + @property + def input_names(self): + return self._input_names + + @property + def output_names(self): + return self._output_names + + def __init__(self, name, base_dir=None, input_names=[], output_names=[]): + from nipype.interfaces.utility import IdentityInterface + super(InterfacedWorkflow, self).__init__(name, base_dir) + + if isinstance(input_names, str): + input_names = [ input_names ] + + if isinstance(output_names, str): + output_names = [ output_names ] + + self._input_names = input_names + self._output_names = output_names + + self._inputnode = Node(IdentityInterface(fields=input_names), name='inputnode') + self._outputnode = Node(IdentityInterface(fields=output_names), name='outputnode') + + def connect(self, *args, **kwargs): + """ + Extends the connect method to accept void nodes for inputs and outputs. + + Parameters + ---------- + + args : same as superclass + + """ + if len(args) == 1: + connection_list = args[0] + elif len(args) == 4: + connection_list = [(args[0], args[2], [(args[1], args[3])])] + else: + raise Exception('unknown set of parameters to connect function') + + fixed_conns = [] + + for i,conn in enumerate(connection_list): + if conn[0] is None: + fixed_conns.append((self._inputnode, conn[1], conn[2])) + for c in conn[2]: + logger.debug(('Automatically detected input %s will be connected to ' + '%s') % c) + + elif conn[1] is None: + fixed_conns.append((conn[0], self._outputnode, conn[2])) + for c in conn[2]: + logger.debug(('Automatically detected output %s will be connected to ' + '%s') % c) + else: + fixed_conns.append(conn) + + super(InterfacedWorkflow, self).connect(fixed_conns, **kwargs) + + def _get_inputs(self): + """ + Returns the inputs of a workflow + + This function does not return any input ports that are already + connected, nor internal input ports + """ + inputdict = TraitedSpec() + node = self._inputnode + + taken_inputs = [] + for _, _, d in self._graph.in_edges_iter(nbunch=node, + data=True): + for cd in d['connect']: + taken_inputs.append(cd[1]) + + for key, trait in node.inputs.items(): + if key not in taken_inputs: + inputdict.add_trait(key, traits.Trait(trait, node=node)) + value = getattr(node.inputs, key) + setattr(inputdict, key, value) + + inputdict.on_trait_change(self._set_input) + return inputdict + + def _get_outputs(self): + """ + Returns all possible output ports of the output node that are not + already connected + """ + outputdict = TraitedSpec() + node = self._outputnode + + if node.outputs: + for key, _ in node.outputs.items(): + outputdict.add_trait(key, traits.Any(node=node)) + setattr(outputdict, key, None) + return outputdict + +class AutoconnectWorkflow(Workflow): + """ + A workflow composed by InterfacedWorkflows that automatically connects + inputs to outputs + """ + + def connect(wfprev, wfnext): + if (not isinstance(wfprev, InterfacedWorkflow) or + not isinstance(wfnext, InterfacedWorkflow)): + raise RuntimeError('Connecting workflows are not Interfaced') + if wfprev.outputs.names() != wfnext.inputs.names(): + raise RuntimeError('Interface missmatch between workflows') + + # TODO: yes, to do + connection_list = [] + for key in wfprev.outputs.names(): + connection_list.append((None, None, []) + + + class GraftWorkflow(Workflow): """ A workflow ready to insert several internal workflows that share an @@ -1136,7 +1262,7 @@ def insert(self, workflow, consolidate=False): if not key in inputnames: raise Exception('Input \'%s\' is not present in GraftWorkflow' % key ) - logger.debug('Connecting %s to inputnode.%s' % (key,key) ) + logger.debug('Graft: connecting %s to inputnode.%s' % (key,key) ) self.connect([ ( self._inputnode, workflow, [ ('%s' % key, 'inputnode.%s' % key) ] ) ]) outputnames = workflow.outputs.outputnode.trait_names() @@ -1145,6 +1271,7 @@ def insert(self, workflow, consolidate=False): if not key in outputnames: raise Exception('Output \'%s\' is not present in GraftWorkflow' % key ) self._connection_list.append( (workflow, 'outputnode.%s' % key, self._outputnode, key) ) + logger.debug('Graft: connecting %s to %s' % (key,self._outputnode) ) # Add to dictionary self._children[workflow.name] = workflow From 25f460cc9da070aa7338d2b5f9243f0614fd1819 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 21 Jul 2014 18:59:32 +0200 Subject: [PATCH 05/42] Added InterfacedWorkflow --- nipype/pipeline/engine.py | 107 +++++++++++++++++++++----------------- 1 file changed, 58 insertions(+), 49 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index bfaf82dfd6..8a22eaa37e 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -808,9 +808,9 @@ def _has_attr(self, parameter, subtype='in'): """Checks if a parameter is available as an input or output """ if subtype == 'in': - subobject = self.inputs + subobject = self._get_all_inputs() else: - subobject = self.outputs + subobject = self._get_all_outputs() attrlist = parameter.split('.') cur_out = subobject for attr in attrlist: @@ -824,9 +824,9 @@ def _get_parameter_node(self, parameter, subtype='in'): output parameter """ if subtype == 'in': - subobject = self.inputs + subobject = self._get_all_inputs() else: - subobject = self.outputs + subobject = self._get_all_outputs() attrlist = parameter.split('.') cur_out = subobject for attr in attrlist[:-1]: @@ -840,6 +840,9 @@ def _check_inputs(self, parameter): return self._has_attr(parameter, subtype='in') def _get_inputs(self): + return self._get_all_inputs() + + def _get_all_inputs(self): """Returns the inputs of a workflow This function does not return any input ports that are already @@ -849,7 +852,7 @@ def _get_inputs(self): for node in self._graph.nodes(): inputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): - setattr(inputdict, node.name, node.inputs) + setattr(inputdict, node.name, node._get_all_inputs()) else: taken_inputs = [] for _, _, d in self._graph.in_edges_iter(nbunch=node, @@ -869,13 +872,16 @@ def _get_inputs(self): return inputdict def _get_outputs(self): + return self._get_all_outputs() + + def _get_all_outputs(self): """Returns all possible output ports that are not already connected """ outputdict = TraitedSpec() for node in self._graph.nodes(): outputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): - setattr(outputdict, node.name, node.outputs) + setattr(outputdict, node.name, node._get_all_outputs()) elif node.outputs: outputs = TraitedSpec() for key, _ in node.outputs.items(): @@ -1128,30 +1134,47 @@ def connect(self, *args, **kwargs): """ if len(args) == 1: - connection_list = args[0] + conns = args[0] elif len(args) == 4: - connection_list = [(args[0], args[2], [(args[1], args[3])])] + conns = [(args[0], args[2], [(args[1], args[3])])] else: raise Exception('unknown set of parameters to connect function') - fixed_conns = [] + connection_list = [] + for conn in conns: + srcnode = conn[0] + dstnode = conn[1] + srcpref = '' + dstpref = '' + + if srcnode is None: + srcnode = self._inputnode + + if isinstance(srcnode, InterfacedWorkflow): + srcpref = 'outputnode.' + + if dstnode is None: + dstnode = self._outputnode + + if isinstance(dstnode, InterfacedWorkflow): + dstpref = 'inputnode.' + + if len(conn) == 2: + srcnames = srcnode.outputs.copyable_trait_names() + dstnames = dstnode.inputs.copyable_trait_names() + for n in srcnames: + if n not in dstnames: + raise RuntimeError(('Interface missmatch between workflows, %s port is not ' + 'present in destination node') % n) + ports = [(srcpref+k, dstpref+k) for k in srcnames] + else: + ports = conn[2] - for i,conn in enumerate(connection_list): - if conn[0] is None: - fixed_conns.append((self._inputnode, conn[1], conn[2])) - for c in conn[2]: - logger.debug(('Automatically detected input %s will be connected to ' - '%s') % c) + print ports - elif conn[1] is None: - fixed_conns.append((conn[0], self._outputnode, conn[2])) - for c in conn[2]: - logger.debug(('Automatically detected output %s will be connected to ' - '%s') % c) - else: - fixed_conns.append(conn) + connection_list.append((srcnode, dstnode, ports)) - super(InterfacedWorkflow, self).connect(fixed_conns, **kwargs) + super(InterfacedWorkflow, self).connect(connection_list, **kwargs) def _get_inputs(self): """ @@ -1192,25 +1215,6 @@ def _get_outputs(self): setattr(outputdict, key, None) return outputdict -class AutoconnectWorkflow(Workflow): - """ - A workflow composed by InterfacedWorkflows that automatically connects - inputs to outputs - """ - - def connect(wfprev, wfnext): - if (not isinstance(wfprev, InterfacedWorkflow) or - not isinstance(wfnext, InterfacedWorkflow)): - raise RuntimeError('Connecting workflows are not Interfaced') - if wfprev.outputs.names() != wfnext.inputs.names(): - raise RuntimeError('Interface missmatch between workflows') - - # TODO: yes, to do - connection_list = [] - for key in wfprev.outputs.names(): - connection_list.append((None, None, []) - - class GraftWorkflow(Workflow): """ @@ -1525,7 +1529,7 @@ def run(self, updatehash=False): else: self.config = merge_dict(deepcopy(config._sections), self.config) if not self._got_inputs: - self._get_inputs() + self._get_all_inputs() self._got_inputs = True outdir = self.output_dir() logger.info("Executing node %s in dir: %s" % (self._id, outdir)) @@ -1646,7 +1650,7 @@ def _parameterization_dir(self, param): def _get_hashval(self): """Return a hash of the input state""" if not self._got_inputs: - self._get_inputs() + self._get_all_inputs() self._got_inputs = True hashed_inputs, hashvalue = self.inputs.get_hashval( hash_method=self.config['execution']['hash_method']) @@ -1677,8 +1681,10 @@ def _save_hashfile(self, hashfile, hashed_inputs): else: logger.critical('Unable to open the file in write mode: %s' % hashfile) - def _get_inputs(self): + return self._get_all_inputs() + + def _get_all_inputs(self): """Retrieve inputs from pointers to results file This mechanism can be easily extended/replaced to retrieve data from @@ -2285,7 +2291,7 @@ def _set_mapnode_input(self, object, name, newvalue): def _get_hashval(self): """ Compute hash including iterfield lists.""" if not self._got_inputs: - self._get_inputs() + self._get_all_inputs() self._got_inputs = True self._check_iterfield() hashinputs = deepcopy(self._interface.inputs) @@ -2422,7 +2428,7 @@ def write_report(self, report_type=None, cwd=None): def get_subnodes(self): if not self._got_inputs: - self._get_inputs() + self._get_all_inputs() self._got_inputs = True self._check_iterfield() self.write_report(report_type='preexec', cwd=self.output_dir()) @@ -2430,17 +2436,20 @@ def get_subnodes(self): def num_subnodes(self): if not self._got_inputs: - self._get_inputs() + self._get_all_inputs() self._got_inputs = True self._check_iterfield() return len(filename_to_list(getattr(self.inputs, self.iterfield[0]))) def _get_inputs(self): + return self._get_all_inputs() + + def _get_all_inputs(self): old_inputs = self._inputs.get() self._inputs = self._create_dynamic_traits(self._interface.inputs, fields=self.iterfield) self._inputs.set(**old_inputs) - super(MapNode, self)._get_inputs() + super(MapNode, self)._get_all_inputs() def _check_iterfield(self): """Checks iterfield From 09b73277f69fd280f5953c5ebc3dd02ea65f7585 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 21 Jul 2014 19:40:41 +0200 Subject: [PATCH 06/42] Seems to be working! --- nipype/pipeline/engine.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 8a22eaa37e..83d4ff76fd 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1168,12 +1168,19 @@ def connect(self, *args, **kwargs): 'present in destination node') % n) ports = [(srcpref+k, dstpref+k) for k in srcnames] else: - ports = conn[2] + ports = [] + for c in conn[2]: + srcport = c[0] + dstport = c[1] + if len(srcpref) > 0 and not srcpref in srcport: + srcport = srcpref + srcport - print ports + if len(dstpref) > 0 and not dstpref in dstport: + dstport = dstpref + dstport - connection_list.append((srcnode, dstnode, ports)) + ports.append((srcport, dstport)) + connection_list.append((srcnode, dstnode, ports)) super(InterfacedWorkflow, self).connect(connection_list, **kwargs) def _get_inputs(self): From 87d983d67202b16dfa1af489abb5f6f9b14b151e Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 22 Jul 2014 17:51:59 +0200 Subject: [PATCH 07/42] Base InputMultiNode --- nipype/pipeline/engine.py | 448 +++++++++++++++++++++++++++++++------- 1 file changed, 364 insertions(+), 84 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 83d4ff76fd..0d185f3ec8 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -152,6 +152,12 @@ def __init__(self, name=None, base_dir=None): # for compatibility with node expansion using iterables self._id = self.name self._hierarchy = None + # this allows to connect several nodes to an InputMultiNode + self._multinput = False + + @property + def multinput(self): + return self._multinput @property def inputs(self): @@ -343,7 +349,7 @@ def connect(self, *args, **kwargs): # Currently datasource/sink/grabber.io modules # determine their inputs/outputs depending on # connection settings. Skip these modules in the check - if dest in connected_ports[destnode]: + if (not destnode.multinput and (dest in connected_ports[destnode])): raise Exception(""" Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already connected. @@ -1096,7 +1102,8 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=True, class InterfacedWorkflow(Workflow): """ A workflow that automatically generates the input and output nodes to avoid - repetitive inputnode.* and outputnode.* connections. + repetitive inputnode.* and outputnode.* connections and allow for fast pipeline + chaining. """ @property @@ -1108,15 +1115,31 @@ def output_names(self): return self._output_names def __init__(self, name, base_dir=None, input_names=[], output_names=[]): + """Create a workflow object. + + Parameters + ---------- + name : alphanumeric string + unique identifier for the workflow + base_dir : string, optional + path to workflow storage + + """ from nipype.interfaces.utility import IdentityInterface super(InterfacedWorkflow, self).__init__(name, base_dir) if isinstance(input_names, str): input_names = [ input_names ] + if input_names is None or not input_names: + raise ValueError('InterfacedWorkflow input_names must be a non-empty list') + if isinstance(output_names, str): output_names = [ output_names ] + if output_names is None or not output_names: + raise ValueError('InterfacedWorkflow output_names must be a non-empty list') + self._input_names = input_names self._output_names = output_names @@ -1142,20 +1165,22 @@ def connect(self, *args, **kwargs): connection_list = [] for conn in conns: - srcnode = conn[0] - dstnode = conn[1] srcpref = '' dstpref = '' - if srcnode is None: + if (isinstance(conn[0], str) and conn[0][0:2] == 'in'): srcnode = self._inputnode + else: + srcnode = conn[0] + + if (isinstance(conn[1], str) and conn[1][0:3] == 'out'): + dstnode = self._outputnode + else: + dstnode = conn[1] if isinstance(srcnode, InterfacedWorkflow): srcpref = 'outputnode.' - if dstnode is None: - dstnode = self._outputnode - if isinstance(dstnode, InterfacedWorkflow): dstpref = 'inputnode.' @@ -1181,6 +1206,7 @@ def connect(self, *args, **kwargs): ports.append((srcport, dstport)) connection_list.append((srcnode, dstnode, ports)) + super(InterfacedWorkflow, self).connect(connection_list, **kwargs) def _get_inputs(self): @@ -1223,101 +1249,75 @@ def _get_outputs(self): return outputdict -class GraftWorkflow(Workflow): +class GraftWorkflow(InterfacedWorkflow): """ - A workflow ready to insert several internal workflows that share an - interface. + A workflow ready to insert several internal workflows that share + i/o interfaces, and are run on the same input data. + This workflow produces as many outputs as inserted subworkflows, and + an outputnode with all the outputs merged. """ - def __init__(self, name, input_fields=[], output_fields=[], base_dir=None): - """Create a workflow object. - - Parameters - ---------- - name : alphanumeric string - unique identifier for the workflow - base_dir : string, optional - path to workflow storage + _children = dict() + _outnodes = dict() + def __init__(self, name, base_dir=None, fields_from=None, input_names=[], output_names=[]): + """ + Initializes the workflow from an existing InterfacedWorkflow + """ + from nipype.interfaces.utility import IdentityInterface + fields_undefined = ((input_names is None) or (output_names is None)) + wf_undefined = (fields_from is None) + if wf_undefined and fields_undefined: + raise ValueError(('An existing InterfacedWorkflow or the input/output_names are ' + 'required to initialize the GraftWorkflow')) + if not wf_undefined: + if not isinstance(fields_from, InterfacedWorkflow): + raise TypeError('Reference workflow is not an InterfacedWorkflow.') + input_names = fields_from.input_names + output_names = fields_from.output_names + if (((input_names is None) or (not input_names)) and + ((output_names is None) or (not output_names))): + raise ValueError(('A GraftWorkflow cannot be initialized without specifying either a ' + 'fields_from workflow or i/o names lists')) + + super(GraftWorkflow, self).__init__(name=name, base_dir=base_dir, + input_names=input_names, + output_names=output_names) + + self._outputnode = InputMultiNode(IdentityInterface(fields=output_names), name='outputnode') + + def insert(self, workflow): + """ + Inserts an InterfacedWorkflow into the workflow """ from nipype.interfaces.utility import IdentityInterface - super(GraftWorkflow, self).__init__(name, base_dir) - self._children = dict() - - if isinstance(input_fields, str): - input_fields = [ input_fields ] + if not isinstance(workflow, InterfacedWorkflow): + raise TypeError('Only InterfacedWorkflows can be inserted in a GraftWorkflow.') - if isinstance(output_fields, str): - output_fields = [ output_fields ] + ckey = workflow.name + cid = len(self._children) - self._input_fields = input_fields - self._output_fields = output_fields - self._connection_list = [] - self._inputnode = Node(IdentityInterface(fields=input_fields), name='inputnode') - self._outputnode = Node(IdentityInterface(fields=output_fields), name='outputnode') - self._mergenodes = [] - self._consolidated = False + if ckey in self._children.keys(): + raise RuntimeError('Trying to add an existing workflow to GraftWorkflow') - def insert(self, workflow, consolidate=False): - if self._consolidated: - raise RuntimeError('The workflow is already consolidated') + self._children[ckey] = workflow + self._outnodes[ckey] = Node(IdentityInterface(fields=self.output_names), + name='out%02d' % cid) - if workflow.name in self._children.keys(): - logger.debug('Trying to add an existing workflow to GraftWorkflow') - return False # Check that interfaces are satisfied - inputnames = workflow.inputs.inputnode.trait_names() - - for key in self._input_fields: - if not key in inputnames: - raise Exception('Input \'%s\' is not present in GraftWorkflow' % key ) - - logger.debug('Graft: connecting %s to inputnode.%s' % (key,key) ) - self.connect([ ( self._inputnode, workflow, [ ('%s' % key, 'inputnode.%s' % key) ] ) ]) - - outputnames = workflow.outputs.outputnode.trait_names() - - for key in self._output_fields: - if not key in outputnames: - raise Exception('Output \'%s\' is not present in GraftWorkflow' % key ) - self._connection_list.append( (workflow, 'outputnode.%s' % key, self._outputnode, key) ) - logger.debug('Graft: connecting %s to %s' % (key,self._outputnode) ) + if ((workflow.input_names != self.input_names) or + (workflow.output_names != self.output_names)): + raise RuntimeError('Workflow does not meet the general interface') - # Add to dictionary - self._children[workflow.name] = workflow - - logger.debug('Added %s to GraftWorkflow' % workflow.name) - - if consolidate: - self._consolidate() - - def get_graft_names(self): - return self._children.keys() - - def _consolidate(self): - from nipype.interfaces.utility import Merge - - if not self._consolidated: - nchildren = len(self._children) - - for key in self._output_fields: - merge = Node(Merge(nchildren, no_flatten=True), name='merge_%s' % key ) - self.connect( merge, 'out', self._outputnode, key ) - - for i,(name,workflow) in enumerate(self._children.iteritems()): - self.connect(workflow, 'outputnode.%s' % key, - merge, 'in%01d' % (i+1) ) - self._consolidated = True - logger.debug('GraftWorkflow is now consolidated') + self.connect([('in', workflow), (workflow, self._outnodes[ckey]), + (self._outnodes[ckey], 'out')]) def write_graph(self, *args, **kwargs): - self._consolidate() return super(GraftWorkflow,self).write_graph(*args, **kwargs) def run(self, *args, **kwargs): - self._consolidate() return super(GraftWorkflow,self).run(*args, **kwargs) @@ -1990,6 +1990,286 @@ def write_report(self, report_type=None, cwd=None): fp.close() +class InputMultiNode(Node): + """ + Wraps interface objects that join nodes into lists of inputs + """ + def __init__(self, interface, name, **kwargs): + """ + + Parameters + ---------- + interface : interface object + node specific interface (fsl.Bet(), spm.Coregister()) + name : alphanumeric string + node specific name + + See Node docstring for additional keyword arguments. + """ + + super(InputMultiNode, self).__init__(interface, name, **kwargs) + self._multinput = True + self.fields = self._interface.inputs.copyable_trait_names() + self._inputs = self._override_append_traits(self._interface.inputs, + self.fields) + self._inputs.on_trait_change(self._set_multinode_input) + self._got_inputs = False + + def _override_append_traits(self, basetraits, fields=None, nitems=None): + """ + Convert specific fields of a trait to accept multiple inputs + """ + output = DynamicTraitedSpec() + if fields is None: + fields = basetraits.copyable_trait_names() + for name, spec in basetraits.items(): + if name in fields and ((nitems is None) or (nitems > 1)): + logger.debug('adding multipath trait: %s' % name) + output.add_trait(name, InputMultiPath(spec.trait_type)) + else: + output.add_trait(name, traits.Trait(spec)) + setattr(output, name, Undefined) + value = getattr(basetraits, name) + if isdefined(value): + setattr(output, name, value) + value = getattr(output, name) + return output + + def set_input(self, parameter, val): + """ Set interface input value or nodewrapper attribute + + Priority goes to interface. + """ + logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), + parameter, + str(val))) + self._set_multinode_input(self.inputs, parameter, deepcopy(val)) + + def _set_multinode_input(self, object, name, newvalue): + logger.debug('setting mapnode(%s) input: %s -> %s' % (str(self), + name, + str(newvalue))) + if name in self.fields: + setattr(self._inputs, name, newvalue) + else: + setattr(self._interface.inputs, name, newvalue) + + def _get_hashval(self): + """ Compute hash including iterfield lists.""" + if not self._got_inputs: + self._get_all_inputs() + self._got_inputs = True + self._check_iterfield() + hashinputs = deepcopy(self._interface.inputs) + for name in self.fields: + hashinputs.remove_trait(name) + hashinputs.add_trait( + name, + InputMultiPath( + self._interface.inputs.traits()[name].trait_type)) + logger.debug('setting hashinput %s-> %s' % + (name, getattr(self._inputs, name))) + setattr(hashinputs, name, getattr(self._inputs, name)) + hashed_inputs, hashvalue = hashinputs.get_hashval( + hash_method=self.config['execution']['hash_method']) + rm_extra = self.config['execution']['remove_unnecessary_outputs'] + if str2bool(rm_extra) and self.needed_outputs: + hashobject = md5() + hashobject.update(hashvalue) + sorted_outputs = sorted(self.needed_outputs) + hashobject.update(str(sorted_outputs)) + hashvalue = hashobject.hexdigest() + hashed_inputs['needed_outputs'] = sorted_outputs + return hashed_inputs, hashvalue + + @property + def inputs(self): + return self._inputs + + @property + def outputs(self): + if self._interface._outputs(): + return Bunch(self._interface._outputs().get()) + else: + return None + + def _make_nodes(self, cwd=None): + if cwd is None: + cwd = self.output_dir() + nitems = len(filename_to_list(getattr(self.inputs, self.fields[0]))) + for i in range(nitems): + nodename = '_' + self.name + str(i) + node = Node(deepcopy(self._interface), name=nodename) + node.overwrite = self.overwrite + node.run_without_submitting = self.run_without_submitting + node.plugin_args = self.plugin_args + node._interface.inputs.set( + **deepcopy(self._interface.inputs.get())) + for field in self.fields: + fieldvals = filename_to_list(getattr(self.inputs, field)) + logger.debug('setting input %d %s %s' % (i, field, + fieldvals[i])) + setattr(node.inputs, field, + fieldvals[i]) + node.config = self.config + node.base_dir = os.path.join(cwd, 'mapflow') + yield i, node + + def _node_runner(self, nodes, updatehash=False): + for i, node in nodes: + err = None + try: + node.run(updatehash=updatehash) + except Exception, err: + if str2bool(self.config['execution']['stop_on_first_crash']): + self._result = node.result + raise + yield i, node, err + + def _collate_results(self, nodes): + self._result = InterfaceResult(interface=[], runtime=[], + provenance=[], inputs=[], + outputs=self.outputs) + returncode = [] + for i, node, err in nodes: + self._result.runtime.insert(i, None) + if node.result: + if hasattr(node.result, 'runtime'): + self._result.interface.insert(i, node.result.interface) + self._result.inputs.insert(i, node.result.inputs) + self._result.runtime[i] = node.result.runtime + if hasattr(node.result, 'provenance'): + self._result.provenance.insert(i, node.result.provenance) + returncode.insert(i, err) + if self.outputs: + for key, _ in self.outputs.items(): + rm_extra = (self.config['execution'] + ['remove_unnecessary_outputs']) + if str2bool(rm_extra) and self.needed_outputs: + if key not in self.needed_outputs: + continue + values = getattr(self._result.outputs, key) + if not isdefined(values): + values = [] + if node.result.outputs: + values.insert(i, node.result.outputs.get()[key]) + else: + values.insert(i, None) + defined_vals = [isdefined(val) for val in values] + if any(defined_vals) and self._result.outputs: + setattr(self._result.outputs, key, values) + if returncode and any([code is not None for code in returncode]): + msg = [] + for i, code in enumerate(returncode): + if code is not None: + msg += ['Subnode %d failed' % i] + msg += ['Error:', str(code)] + raise Exception('Subnodes of node: %s failed:\n%s' % + (self.name, '\n'.join(msg))) + + def write_report(self, report_type=None, cwd=None): + if not str2bool(self.config['execution']['create_report']): + return + if report_type == 'preexec': + super(MapNode, self).write_report(report_type=report_type, cwd=cwd) + if report_type == 'postexec': + super(MapNode, self).write_report(report_type=report_type, cwd=cwd) + report_dir = os.path.join(cwd, '_report') + report_file = os.path.join(report_dir, 'report.rst') + fp = open(report_file, 'at') + fp.writelines(write_rst_header('Subnode reports', level=1)) + nitems = len(filename_to_list( + getattr(self.inputs, self.fields[0]))) + subnode_report_files = [] + for i in range(nitems): + nodename = '_' + self.name + str(i) + subnode_report_files.insert(i, 'subnode %d' % i + ' : ' + + os.path.join(cwd, + 'mapflow', + nodename, + '_report', + 'report.rst')) + fp.writelines(write_rst_list(subnode_report_files)) + fp.close() + + def get_subnodes(self): + if not self._got_inputs: + self._get_all_inputs() + self._got_inputs = True + self._check_iterfield() + self.write_report(report_type='preexec', cwd=self.output_dir()) + return [node for _, node in self._make_nodes()] + + def num_subnodes(self): + if not self._got_inputs: + self._get_all_inputs() + self._got_inputs = True + self._check_iterfield() + return len(filename_to_list(getattr(self.inputs, self.fields[0]))) + + def _get_inputs(self): + return self._get_all_inputs() + + def _get_all_inputs(self): + old_inputs = self._inputs.get() + self._inputs = self._create_dynamic_traits(self._interface.inputs, + fields=self.fields) + self._inputs.set(**old_inputs) + super(MapNode, self)._get_all_inputs() + + def _check_iterfield(self): + """Checks iterfield + + * iterfield must be in inputs + * number of elements must match across iterfield + """ + for iterfield in self.fields: + if not isdefined(getattr(self.inputs, iterfield)): + raise ValueError(("Input %s was not set but it is listed " + "in iterfields.") % iterfield) + if len(self.fields) > 1: + first_len = len(filename_to_list(getattr(self.inputs, + self.fields[0]))) + for iterfield in self.fields[1:]: + if first_len != len(filename_to_list(getattr(self.inputs, + iterfield))): + raise ValueError(("All iterfields of a MapNode have to " + "have the same length. %s") % + str(self.inputs)) + + def _run_interface(self, execute=True, updatehash=False): + """Run the mapnode interface + + This is primarily intended for serial execution of mapnode. A parallel + execution requires creation of new nodes that can be spawned + """ + old_cwd = os.getcwd() + cwd = self.output_dir() + os.chdir(cwd) + self._check_iterfield() + if execute: + nitems = len(filename_to_list(getattr(self.inputs, + self.fields[0]))) + nodenames = ['_' + self.name + str(i) for i in range(nitems)] + # map-reduce formulation + self._collate_results(self._node_runner(self._make_nodes(cwd), + updatehash=updatehash)) + self._save_results(self._result, cwd) + # remove any node directories no longer required + dirs2remove = [] + for path in glob(os.path.join(cwd, 'mapflow', '*')): + if os.path.isdir(path): + if path.split(os.path.sep)[-1] not in nodenames: + dirs2remove.append(path) + for path in dirs2remove: + shutil.rmtree(path) + else: + self._result = self._load_results(cwd) + os.chdir(old_cwd) + + + + class JoinNode(Node): """Wraps interface objects that join inputs into a list. From 4547ff644d1cf43cd787fd8abeb4f5d9ae831780 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 22 Jul 2014 19:16:29 +0200 Subject: [PATCH 08/42] WIP... --- nipype/pipeline/engine.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 0d185f3ec8..9d8ea877da 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1477,9 +1477,9 @@ def output_dir(self): def set_input(self, parameter, val): """ Set interface input value""" - logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), - parameter, - str(val))) + logger.debug('Node: setting nodelevel(%s) input %s = %s' % (str(self), + parameter, + str(val))) setattr(self.inputs, parameter, deepcopy(val)) def get_output(self, parameter): @@ -2040,13 +2040,13 @@ def set_input(self, parameter, val): Priority goes to interface. """ - logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), - parameter, - str(val))) + logger.debug('InputMultiNode: setting nodelevel(%s) input %s = %s' % (str(self), + parameter, + str(val))) self._set_multinode_input(self.inputs, parameter, deepcopy(val)) def _set_multinode_input(self, object, name, newvalue): - logger.debug('setting mapnode(%s) input: %s -> %s' % (str(self), + logger.debug('setting multinode(%s) input: %s -> %s' % (str(self), name, str(newvalue))) if name in self.fields: @@ -2243,13 +2243,16 @@ def _run_interface(self, execute=True, updatehash=False): This is primarily intended for serial execution of mapnode. A parallel execution requires creation of new nodes that can be spawned """ + old_cwd = os.getcwd() cwd = self.output_dir() os.chdir(cwd) self._check_iterfield() + logger.debug('InputMultiNode.execute=%s' % execute) if execute: nitems = len(filename_to_list(getattr(self.inputs, self.fields[0]))) + logger.debug('found %d items to iterate' % nitems) nodenames = ['_' + self.name + str(i) for i in range(nitems)] # map-reduce formulation self._collate_results(self._node_runner(self._make_nodes(cwd), @@ -2561,9 +2564,9 @@ def set_input(self, parameter, val): Priority goes to interface. """ - logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), - parameter, - str(val))) + logger.debug('MapNode: setting nodelevel(%s) input %s = %s' % (str(self), + parameter, + str(val))) self._set_mapnode_input(self.inputs, parameter, deepcopy(val)) def _set_mapnode_input(self, object, name, newvalue): From a34868401b4fb2bb4847ea575b6b7ee58f9ef4fe Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 22 Jul 2014 23:04:31 +0200 Subject: [PATCH 09/42] fixed some PEP8 errors --- nipype/pipeline/engine.py | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 9d8ea877da..a20a835297 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1259,7 +1259,8 @@ class GraftWorkflow(InterfacedWorkflow): _children = dict() _outnodes = dict() - def __init__(self, name, base_dir=None, fields_from=None, input_names=[], output_names=[]): + def __init__(self, name, base_dir=None, fields_from=None, + input_names=[], output_names=[]): """ Initializes the workflow from an existing InterfacedWorkflow """ @@ -1267,23 +1268,27 @@ def __init__(self, name, base_dir=None, fields_from=None, input_names=[], output fields_undefined = ((input_names is None) or (output_names is None)) wf_undefined = (fields_from is None) if wf_undefined and fields_undefined: - raise ValueError(('An existing InterfacedWorkflow or the input/output_names are ' - 'required to initialize the GraftWorkflow')) + raise ValueError(('An existing InterfacedWorkflow or the in/output' + ' names are required to initialize the ' + 'GraftWorkflow')) if not wf_undefined: if not isinstance(fields_from, InterfacedWorkflow): - raise TypeError('Reference workflow is not an InterfacedWorkflow.') + raise TypeError('Workflow is not an InterfacedWorkflow.') input_names = fields_from.input_names output_names = fields_from.output_names if (((input_names is None) or (not input_names)) and - ((output_names is None) or (not output_names))): - raise ValueError(('A GraftWorkflow cannot be initialized without specifying either a ' - 'fields_from workflow or i/o names lists')) + ((output_names is None) or (not output_names))): + raise ValueError(('A GraftWorkflow cannot be initialized without ' + 'specifying either a fields_from workflow or i/o' + ' names lists')) super(GraftWorkflow, self).__init__(name=name, base_dir=base_dir, input_names=input_names, output_names=output_names) - self._outputnode = InputMultiNode(IdentityInterface(fields=output_names), name='outputnode') + self._outputnode = InputMultiNode(IdentityInterface( + fields=output_names), + name='outputnode') def insert(self, workflow): """ @@ -1292,33 +1297,34 @@ def insert(self, workflow): from nipype.interfaces.utility import IdentityInterface if not isinstance(workflow, InterfacedWorkflow): - raise TypeError('Only InterfacedWorkflows can be inserted in a GraftWorkflow.') + raise TypeError(('Only InterfacedWorkflows can be inserted in ' + 'a GraftWorkflow.')) ckey = workflow.name cid = len(self._children) if ckey in self._children.keys(): - raise RuntimeError('Trying to add an existing workflow to GraftWorkflow') + raise RuntimeError(('Trying to add an existing workflow to ' + 'GraftWorkflow')) self._children[ckey] = workflow - self._outnodes[ckey] = Node(IdentityInterface(fields=self.output_names), + self._outnodes[ckey] = Node(IdentityInterface( + fields=self.output_names), name='out%02d' % cid) - # Check that interfaces are satisfied if ((workflow.input_names != self.input_names) or - (workflow.output_names != self.output_names)): + (workflow.output_names != self.output_names)): raise RuntimeError('Workflow does not meet the general interface') self.connect([('in', workflow), (workflow, self._outnodes[ckey]), (self._outnodes[ckey], 'out')]) - def write_graph(self, *args, **kwargs): - return super(GraftWorkflow,self).write_graph(*args, **kwargs) + return super(GraftWorkflow, self).write_graph(*args, **kwargs) def run(self, *args, **kwargs): - return super(GraftWorkflow,self).run(*args, **kwargs) + return super(GraftWorkflow, self).run(*args, **kwargs) class Node(WorkflowBase): From 001bb6d2c071989265df9f32857582714092aa71 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 23 Jul 2014 00:12:21 +0200 Subject: [PATCH 10/42] WIP --- nipype/pipeline/engine.py | 78 ++++++++++++++++++++++++++++----------- 1 file changed, 56 insertions(+), 22 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index a20a835297..14d962954c 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1102,8 +1102,8 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=True, class InterfacedWorkflow(Workflow): """ A workflow that automatically generates the input and output nodes to avoid - repetitive inputnode.* and outputnode.* connections and allow for fast pipeline - chaining. + repetitive inputnode.* and outputnode.* connections and allow for agile + pipeline chaining. """ @property @@ -1129,22 +1129,26 @@ def __init__(self, name, base_dir=None, input_names=[], output_names=[]): super(InterfacedWorkflow, self).__init__(name, base_dir) if isinstance(input_names, str): - input_names = [ input_names ] + input_names = [input_names] if input_names is None or not input_names: - raise ValueError('InterfacedWorkflow input_names must be a non-empty list') + raise ValueError(('InterfacedWorkflow input_names must be a ' + 'non-empty list')) if isinstance(output_names, str): - output_names = [ output_names ] + output_names = [output_names] if output_names is None or not output_names: - raise ValueError('InterfacedWorkflow output_names must be a non-empty list') + raise ValueError(('InterfacedWorkflow output_names must be a ' + 'non-empty list')) self._input_names = input_names self._output_names = output_names - self._inputnode = Node(IdentityInterface(fields=input_names), name='inputnode') - self._outputnode = Node(IdentityInterface(fields=output_names), name='outputnode') + self._inputnode = Node(IdentityInterface(fields=input_names), + name='inputnode') + self._outputnode = Node(IdentityInterface(fields=output_names), + name='outputnode') def connect(self, *args, **kwargs): """ @@ -1189,7 +1193,8 @@ def connect(self, *args, **kwargs): dstnames = dstnode.inputs.copyable_trait_names() for n in srcnames: if n not in dstnames: - raise RuntimeError(('Interface missmatch between workflows, %s port is not ' + raise RuntimeError(('Interface missmatch between ' + 'workflows, %s port is not ' 'present in destination node') % n) ports = [(srcpref+k, dstpref+k) for k in srcnames] else: @@ -1255,6 +1260,26 @@ class GraftWorkflow(InterfacedWorkflow): i/o interfaces, and are run on the same input data. This workflow produces as many outputs as inserted subworkflows, and an outputnode with all the outputs merged. + + Example + ------- + >>> import nipype.pipeline.engine as npe + >>> import nipype.interfaces.utility as niu + >>> from nipype.algorithms.metrics import ErrorMap + >>> from nipype.interfaces.utility import IdentityInterface + >>> wf1 = npe.InterfacedWorkflow(name='testname1', input_names=['in_ref', + >>> 'in_tst', 'mask'], + >>> output_names=['out_map']) + >>> errormap = npe.Node(ErrorMap(), name='internalnode') + >>> wf1.connect([ ('in', errormap), (errormap, 'out') ]) + >>> wf2 = wf1.clone(name='testname2') + >>> wf = npe.GraftWorkflow(name='graft', fields_from=wf1) + >>> wf.insert(wf1) + >>> wf.insert(wf2) + >>> wf.inputs.in_ref = 'reference.nii' + >>> wf.inputs.in_tst = 'test.nii' + >>> wf.write_graph(format='pdf') # doctest: +SKIP + >>> wf.run() # doctest: +SKIP """ _children = dict() _outnodes = dict() @@ -1286,10 +1311,6 @@ def __init__(self, name, base_dir=None, fields_from=None, input_names=input_names, output_names=output_names) - self._outputnode = InputMultiNode(IdentityInterface( - fields=output_names), - name='outputnode') - def insert(self, workflow): """ Inserts an InterfacedWorkflow into the workflow @@ -1320,11 +1341,14 @@ def insert(self, workflow): self.connect([('in', workflow), (workflow, self._outnodes[ckey]), (self._outnodes[ckey], 'out')]) - def write_graph(self, *args, **kwargs): - return super(GraftWorkflow, self).write_graph(*args, **kwargs) + #def write_graph(self, *args, **kwargs): + # return super(GraftWorkflow, self).write_graph(*args, **kwargs) def run(self, *args, **kwargs): - return super(GraftWorkflow, self).run(*args, **kwargs) + logger.debug('Starting GraftWorkflow %s' % self) + runtime = super(GraftWorkflow, self).run(*args, **kwargs) + logger.debug('Ending GraftWorkflow %s' % self) + return runtime class Node(WorkflowBase): @@ -1694,6 +1718,7 @@ def _save_hashfile(self, hashfile, hashed_inputs): else: logger.critical('Unable to open the file in write mode: %s' % hashfile) + def _get_inputs(self): return self._get_all_inputs() @@ -1999,6 +2024,16 @@ def write_report(self, report_type=None, cwd=None): class InputMultiNode(Node): """ Wraps interface objects that join nodes into lists of inputs + + Examples + -------- + + >>> from nipype import InputMultiNode + >>> from nipype.interfaces.fsl import Threshold + >>> realign = InputMultiNode(spm.Realign(), 'realign') + >>> realign.inputs.in_files = 'functional.nii' + >>> realign.inputs.register_to_mean = True + >>> realign.run() # doctest: +SKIP """ def __init__(self, interface, name, **kwargs): """ @@ -2046,15 +2081,13 @@ def set_input(self, parameter, val): Priority goes to interface. """ - logger.debug('InputMultiNode: setting nodelevel(%s) input %s = %s' % (str(self), - parameter, - str(val))) + logger.debug(('InputMultiNode: setting nodelevel(%s) input ' + '%s = %s') % (str(self), parameter, str(val))) self._set_multinode_input(self.inputs, parameter, deepcopy(val)) def _set_multinode_input(self, object, name, newvalue): - logger.debug('setting multinode(%s) input: %s -> %s' % (str(self), - name, - str(newvalue))) + logger.debug(('setting multinode(%s) input: %s ->' + ' %s') % (str(self), name, str(newvalue))) if name in self.fields: setattr(self._inputs, name, newvalue) else: @@ -2504,6 +2537,7 @@ def _slot_value(self, field, index): " to hold the %s value at index %d: %s" % (self, slot_field, field, index, e)) + class MapNode(Node): """Wraps interface objects that need to be iterated on a list of inputs. From cd8919d34c80c5e782f26e1973a2c9609ef1a216 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 10:31:00 +0200 Subject: [PATCH 11/42] Finished merge --- nipype/interfaces/utility.py | 84 +++++++++ nipype/pipeline/engine.py | 334 ++--------------------------------- 2 files changed, 100 insertions(+), 318 deletions(-) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 005eb4a501..732fe18bd9 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -464,3 +464,87 @@ def _run_interface(self, runtime): assert_equal(data1, data2) return runtime + +class CollateInterfaceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): + _outputs = traits.Dict(traits.Any, value={}, usedefault=True) + + def __setattr__(self, key, value): + if key not in self.copyable_trait_names(): + if not isdefined(value): + super(CollateInterfaceInputSpec, self).__setattr__(key, value) + self._outputs[key] = value + else: + if key in self._outputs: + self._outputs[key] = value + super(CollateInterfaceInputSpec, self).__setattr__(key, value) + +class CollateInterface(IOBase): + """ + A simple interface to multiplex inputs through a unique output set. + Channel is defined by the prefix of the fields. In order to avoid + inconsistencies, output fields should be defined forehand at initialization.. + + Example + ------- + + >>> from nipype.interfaces.utility import CollateInterface + >>> coll = CollateInterface(fields=['file','miscdata']) + >>> coll.inputs.src1_file = 'scores.csv' + >>> coll.inputs.src2_file = 'scores2.csv' + >>> coll.inputs.src1_miscdata = 1.0 + >>> coll.inputs.src2_miscdata = 2.0 + >>> coll.run() # doctest: +SKIP + """ + + input_spec = CollateInterfaceInputSpec + output_spec = DynamicTraitedSpec + + def __init__(self, fields=None, fill_missing=False, **kwargs): + super(CollateInterface, self).__init__(**kwargs) + + if fields is None or not fields: + raise ValueError('CollateInterface fields must be a non-empty list') + # Each input must be in the fields. + self._fields = fields + self._fill_missing = fill_missing + + def _add_output_traits(self, base): + undefined_traits = {} + for key in self._fields: + base.add_trait(key, traits.Any) + undefined_traits[key] = Undefined + base.trait_set(trait_change_notify=False, **undefined_traits) + return base + + def _list_outputs(self): + #manual mandatory inputs check + valuedict = dict( (key, {}) for key in self._fields) + nodekeys = [] + + for inputkey, inputval in self.inputs._outputs.items(): + for key in self._fields: + if inputkey.endswith(key): + nodekey = inputkey[::-1].replace(key[::-1], '', 1)[::-1] + nodekeys.append(nodekey) + + if nodekey in valuedict[key].keys(): + msg = ('Trying to add field from existing node') + raise ValueError(msg) + valuedict[key][nodekey] = inputval + + nodekeys = sorted(set(nodekeys)) + outputs = self._outputs().get() + for key in self._fields: + outputs[key] = [] + for nk in nodekeys: + + if nk in valuedict[key]: + val = valuedict[key][nk] + else: + if self._fill_missing: + val = None + else: + raise RuntimeError('Input missing for field to collate.') + outputs[key].append(val) + + return outputs diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 14d962954c..8716c8ef4b 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -152,12 +152,6 @@ def __init__(self, name=None, base_dir=None): # for compatibility with node expansion using iterables self._id = self.name self._hierarchy = None - # this allows to connect several nodes to an InputMultiNode - self._multinput = False - - @property - def multinput(self): - return self._multinput @property def inputs(self): @@ -196,6 +190,8 @@ def _check_outputs(self, parameter): return hasattr(self.outputs, parameter) def _check_inputs(self, parameter): + if hasattr(self.inputs,'_outputs'): + return True return hasattr(self.inputs, parameter) def _verify_name(self, name): @@ -349,11 +345,11 @@ def connect(self, *args, **kwargs): # Currently datasource/sink/grabber.io modules # determine their inputs/outputs depending on # connection settings. Skip these modules in the check - if (not destnode.multinput and (dest in connected_ports[destnode])): - raise Exception(""" -Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already -connected. -""" % (srcnode, source, destnode, dest, dest, destnode)) + if (dest in connected_ports[destnode]): + msg = ('Trying to connect %s:%s to %s:%s but input \'%s\' of ' + 'node \'%s\' is already connected.') % (srcnode, source, + destnode, dest, dest, destnode) + raise Exception(msg) if not (hasattr(destnode, '_interface') and '.io' in str(destnode._interface.__class__)): if not destnode._check_inputs(dest): @@ -1289,7 +1285,7 @@ def __init__(self, name, base_dir=None, fields_from=None, """ Initializes the workflow from an existing InterfacedWorkflow """ - from nipype.interfaces.utility import IdentityInterface + from nipype.interfaces.utility import IdentityInterface, CollateInterface fields_undefined = ((input_names is None) or (output_names is None)) wf_undefined = (fields_from is None) if wf_undefined and fields_undefined: @@ -1310,6 +1306,7 @@ def __init__(self, name, base_dir=None, fields_from=None, super(GraftWorkflow, self).__init__(name=name, base_dir=base_dir, input_names=input_names, output_names=output_names) + self._outputnode = Node(CollateInterface(fields=output_names), name='outputnode') def insert(self, workflow): """ @@ -1328,10 +1325,10 @@ def insert(self, workflow): raise RuntimeError(('Trying to add an existing workflow to ' 'GraftWorkflow')) + childname = 'out%02d' % cid self._children[ckey] = workflow - self._outnodes[ckey] = Node(IdentityInterface( - fields=self.output_names), - name='out%02d' % cid) + self._outnodes[ckey] = Node(IdentityInterface(fields=self.output_names), + name=childname) # Check that interfaces are satisfied if ((workflow.input_names != self.input_names) or @@ -1339,16 +1336,8 @@ def insert(self, workflow): raise RuntimeError('Workflow does not meet the general interface') self.connect([('in', workflow), (workflow, self._outnodes[ckey]), - (self._outnodes[ckey], 'out')]) - - #def write_graph(self, *args, **kwargs): - # return super(GraftWorkflow, self).write_graph(*args, **kwargs) - - def run(self, *args, **kwargs): - logger.debug('Starting GraftWorkflow %s' % self) - runtime = super(GraftWorkflow, self).run(*args, **kwargs) - logger.debug('Ending GraftWorkflow %s' % self) - return runtime + (self._outnodes[ckey], 'out', + [(key, '%s_%s' % (childname, key)) for key in self.output_names])]) class Node(WorkflowBase): @@ -1728,9 +1717,9 @@ def _get_all_inputs(self): This mechanism can be easily extended/replaced to retrieve data from other data sources (e.g., XNAT, HTTP, etc.,.) """ - logger.debug('Setting node inputs') + logger.debug('Setting node %s inputs' % self.name) for key, info in self.input_source.items(): - logger.debug('input: %s' % key) + logger.debug('input: %s, info: %s' % (key, str(info))) results_file = info[0] logger.debug('results file: %s' % results_file) results = loadpkl(results_file) @@ -2021,297 +2010,6 @@ def write_report(self, report_type=None, cwd=None): fp.close() -class InputMultiNode(Node): - """ - Wraps interface objects that join nodes into lists of inputs - - Examples - -------- - - >>> from nipype import InputMultiNode - >>> from nipype.interfaces.fsl import Threshold - >>> realign = InputMultiNode(spm.Realign(), 'realign') - >>> realign.inputs.in_files = 'functional.nii' - >>> realign.inputs.register_to_mean = True - >>> realign.run() # doctest: +SKIP - """ - def __init__(self, interface, name, **kwargs): - """ - - Parameters - ---------- - interface : interface object - node specific interface (fsl.Bet(), spm.Coregister()) - name : alphanumeric string - node specific name - - See Node docstring for additional keyword arguments. - """ - - super(InputMultiNode, self).__init__(interface, name, **kwargs) - self._multinput = True - self.fields = self._interface.inputs.copyable_trait_names() - self._inputs = self._override_append_traits(self._interface.inputs, - self.fields) - self._inputs.on_trait_change(self._set_multinode_input) - self._got_inputs = False - - def _override_append_traits(self, basetraits, fields=None, nitems=None): - """ - Convert specific fields of a trait to accept multiple inputs - """ - output = DynamicTraitedSpec() - if fields is None: - fields = basetraits.copyable_trait_names() - for name, spec in basetraits.items(): - if name in fields and ((nitems is None) or (nitems > 1)): - logger.debug('adding multipath trait: %s' % name) - output.add_trait(name, InputMultiPath(spec.trait_type)) - else: - output.add_trait(name, traits.Trait(spec)) - setattr(output, name, Undefined) - value = getattr(basetraits, name) - if isdefined(value): - setattr(output, name, value) - value = getattr(output, name) - return output - - def set_input(self, parameter, val): - """ Set interface input value or nodewrapper attribute - - Priority goes to interface. - """ - logger.debug(('InputMultiNode: setting nodelevel(%s) input ' - '%s = %s') % (str(self), parameter, str(val))) - self._set_multinode_input(self.inputs, parameter, deepcopy(val)) - - def _set_multinode_input(self, object, name, newvalue): - logger.debug(('setting multinode(%s) input: %s ->' - ' %s') % (str(self), name, str(newvalue))) - if name in self.fields: - setattr(self._inputs, name, newvalue) - else: - setattr(self._interface.inputs, name, newvalue) - - def _get_hashval(self): - """ Compute hash including iterfield lists.""" - if not self._got_inputs: - self._get_all_inputs() - self._got_inputs = True - self._check_iterfield() - hashinputs = deepcopy(self._interface.inputs) - for name in self.fields: - hashinputs.remove_trait(name) - hashinputs.add_trait( - name, - InputMultiPath( - self._interface.inputs.traits()[name].trait_type)) - logger.debug('setting hashinput %s-> %s' % - (name, getattr(self._inputs, name))) - setattr(hashinputs, name, getattr(self._inputs, name)) - hashed_inputs, hashvalue = hashinputs.get_hashval( - hash_method=self.config['execution']['hash_method']) - rm_extra = self.config['execution']['remove_unnecessary_outputs'] - if str2bool(rm_extra) and self.needed_outputs: - hashobject = md5() - hashobject.update(hashvalue) - sorted_outputs = sorted(self.needed_outputs) - hashobject.update(str(sorted_outputs)) - hashvalue = hashobject.hexdigest() - hashed_inputs['needed_outputs'] = sorted_outputs - return hashed_inputs, hashvalue - - @property - def inputs(self): - return self._inputs - - @property - def outputs(self): - if self._interface._outputs(): - return Bunch(self._interface._outputs().get()) - else: - return None - - def _make_nodes(self, cwd=None): - if cwd is None: - cwd = self.output_dir() - nitems = len(filename_to_list(getattr(self.inputs, self.fields[0]))) - for i in range(nitems): - nodename = '_' + self.name + str(i) - node = Node(deepcopy(self._interface), name=nodename) - node.overwrite = self.overwrite - node.run_without_submitting = self.run_without_submitting - node.plugin_args = self.plugin_args - node._interface.inputs.set( - **deepcopy(self._interface.inputs.get())) - for field in self.fields: - fieldvals = filename_to_list(getattr(self.inputs, field)) - logger.debug('setting input %d %s %s' % (i, field, - fieldvals[i])) - setattr(node.inputs, field, - fieldvals[i]) - node.config = self.config - node.base_dir = os.path.join(cwd, 'mapflow') - yield i, node - - def _node_runner(self, nodes, updatehash=False): - for i, node in nodes: - err = None - try: - node.run(updatehash=updatehash) - except Exception, err: - if str2bool(self.config['execution']['stop_on_first_crash']): - self._result = node.result - raise - yield i, node, err - - def _collate_results(self, nodes): - self._result = InterfaceResult(interface=[], runtime=[], - provenance=[], inputs=[], - outputs=self.outputs) - returncode = [] - for i, node, err in nodes: - self._result.runtime.insert(i, None) - if node.result: - if hasattr(node.result, 'runtime'): - self._result.interface.insert(i, node.result.interface) - self._result.inputs.insert(i, node.result.inputs) - self._result.runtime[i] = node.result.runtime - if hasattr(node.result, 'provenance'): - self._result.provenance.insert(i, node.result.provenance) - returncode.insert(i, err) - if self.outputs: - for key, _ in self.outputs.items(): - rm_extra = (self.config['execution'] - ['remove_unnecessary_outputs']) - if str2bool(rm_extra) and self.needed_outputs: - if key not in self.needed_outputs: - continue - values = getattr(self._result.outputs, key) - if not isdefined(values): - values = [] - if node.result.outputs: - values.insert(i, node.result.outputs.get()[key]) - else: - values.insert(i, None) - defined_vals = [isdefined(val) for val in values] - if any(defined_vals) and self._result.outputs: - setattr(self._result.outputs, key, values) - if returncode and any([code is not None for code in returncode]): - msg = [] - for i, code in enumerate(returncode): - if code is not None: - msg += ['Subnode %d failed' % i] - msg += ['Error:', str(code)] - raise Exception('Subnodes of node: %s failed:\n%s' % - (self.name, '\n'.join(msg))) - - def write_report(self, report_type=None, cwd=None): - if not str2bool(self.config['execution']['create_report']): - return - if report_type == 'preexec': - super(MapNode, self).write_report(report_type=report_type, cwd=cwd) - if report_type == 'postexec': - super(MapNode, self).write_report(report_type=report_type, cwd=cwd) - report_dir = os.path.join(cwd, '_report') - report_file = os.path.join(report_dir, 'report.rst') - fp = open(report_file, 'at') - fp.writelines(write_rst_header('Subnode reports', level=1)) - nitems = len(filename_to_list( - getattr(self.inputs, self.fields[0]))) - subnode_report_files = [] - for i in range(nitems): - nodename = '_' + self.name + str(i) - subnode_report_files.insert(i, 'subnode %d' % i + ' : ' + - os.path.join(cwd, - 'mapflow', - nodename, - '_report', - 'report.rst')) - fp.writelines(write_rst_list(subnode_report_files)) - fp.close() - - def get_subnodes(self): - if not self._got_inputs: - self._get_all_inputs() - self._got_inputs = True - self._check_iterfield() - self.write_report(report_type='preexec', cwd=self.output_dir()) - return [node for _, node in self._make_nodes()] - - def num_subnodes(self): - if not self._got_inputs: - self._get_all_inputs() - self._got_inputs = True - self._check_iterfield() - return len(filename_to_list(getattr(self.inputs, self.fields[0]))) - - def _get_inputs(self): - return self._get_all_inputs() - - def _get_all_inputs(self): - old_inputs = self._inputs.get() - self._inputs = self._create_dynamic_traits(self._interface.inputs, - fields=self.fields) - self._inputs.set(**old_inputs) - super(MapNode, self)._get_all_inputs() - - def _check_iterfield(self): - """Checks iterfield - - * iterfield must be in inputs - * number of elements must match across iterfield - """ - for iterfield in self.fields: - if not isdefined(getattr(self.inputs, iterfield)): - raise ValueError(("Input %s was not set but it is listed " - "in iterfields.") % iterfield) - if len(self.fields) > 1: - first_len = len(filename_to_list(getattr(self.inputs, - self.fields[0]))) - for iterfield in self.fields[1:]: - if first_len != len(filename_to_list(getattr(self.inputs, - iterfield))): - raise ValueError(("All iterfields of a MapNode have to " - "have the same length. %s") % - str(self.inputs)) - - def _run_interface(self, execute=True, updatehash=False): - """Run the mapnode interface - - This is primarily intended for serial execution of mapnode. A parallel - execution requires creation of new nodes that can be spawned - """ - - old_cwd = os.getcwd() - cwd = self.output_dir() - os.chdir(cwd) - self._check_iterfield() - logger.debug('InputMultiNode.execute=%s' % execute) - if execute: - nitems = len(filename_to_list(getattr(self.inputs, - self.fields[0]))) - logger.debug('found %d items to iterate' % nitems) - nodenames = ['_' + self.name + str(i) for i in range(nitems)] - # map-reduce formulation - self._collate_results(self._node_runner(self._make_nodes(cwd), - updatehash=updatehash)) - self._save_results(self._result, cwd) - # remove any node directories no longer required - dirs2remove = [] - for path in glob(os.path.join(cwd, 'mapflow', '*')): - if os.path.isdir(path): - if path.split(os.path.sep)[-1] not in nodenames: - dirs2remove.append(path) - for path in dirs2remove: - shutil.rmtree(path) - else: - self._result = self._load_results(cwd) - os.chdir(old_cwd) - - - - class JoinNode(Node): """Wraps interface objects that join inputs into a list. From d1ea93e151e0e67514b96776fab3f1208da99698 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 10:53:43 +0200 Subject: [PATCH 12/42] Added doctest --- nipype/interfaces/utility.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 732fe18bd9..88542eaddb 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -489,13 +489,16 @@ class CollateInterface(IOBase): >>> from nipype.interfaces.utility import CollateInterface >>> coll = CollateInterface(fields=['file','miscdata']) - >>> coll.inputs.src1_file = 'scores.csv' - >>> coll.inputs.src2_file = 'scores2.csv' + >>> coll.inputs.src1_file = 'exfile1.csv' + >>> coll.inputs.src2_file = 'exfile2.csv' >>> coll.inputs.src1_miscdata = 1.0 >>> coll.inputs.src2_miscdata = 2.0 - >>> coll.run() # doctest: +SKIP + >>> res = coll.run() + >>> print res.outputs.file + ['exfile1.csv', 'exfile2.csv'] + >>> print res.outputs.miscdata + [1.0, 2.0] """ - input_spec = CollateInterfaceInputSpec output_spec = DynamicTraitedSpec From ebe03112db68a11e8e11f9138631cf6a1317e510 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 11:47:28 +0200 Subject: [PATCH 13/42] fixing doctests --- nipype/interfaces/utility.py | 2 +- nipype/pipeline/engine.py | 16 +++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 88542eaddb..caf9047f0a 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -480,7 +480,7 @@ def __setattr__(self, key, value): class CollateInterface(IOBase): """ - A simple interface to multiplex inputs through a unique output set. + A simple interface to multiplex inputs through a unique output node. Channel is defined by the prefix of the fields. In order to avoid inconsistencies, output fields should be defined forehand at initialization.. diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 8716c8ef4b..9185893a1f 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1261,21 +1261,19 @@ class GraftWorkflow(InterfacedWorkflow): ------- >>> import nipype.pipeline.engine as npe >>> import nipype.interfaces.utility as niu - >>> from nipype.algorithms.metrics import ErrorMap + >>> from nipype.interfaces.fsl import Threshold >>> from nipype.interfaces.utility import IdentityInterface - >>> wf1 = npe.InterfacedWorkflow(name='testname1', input_names=['in_ref', - >>> 'in_tst', 'mask'], - >>> output_names=['out_map']) - >>> errormap = npe.Node(ErrorMap(), name='internalnode') - >>> wf1.connect([ ('in', errormap), (errormap, 'out') ]) + >>> wf1 = npe.InterfacedWorkflow(name='testname1', input_names=['in_file', \ + 'thresh'], output_names=['out_file']) + >>> node = npe.Node(Threshold(), name='internalnode') + >>> wf1.connect([ ('in', node), (node, 'out') ]) >>> wf2 = wf1.clone(name='testname2') >>> wf = npe.GraftWorkflow(name='graft', fields_from=wf1) >>> wf.insert(wf1) >>> wf.insert(wf2) - >>> wf.inputs.in_ref = 'reference.nii' - >>> wf.inputs.in_tst = 'test.nii' + >>> wf.inputs.in_file = 'reference.nii' + >>> wf.inputs.thres = 1.0 >>> wf.write_graph(format='pdf') # doctest: +SKIP - >>> wf.run() # doctest: +SKIP """ _children = dict() _outnodes = dict() From eedc36e09adab5a9bbbbd68729ec327669879248 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 12:11:30 +0200 Subject: [PATCH 14/42] Fixed error in doctest --- nipype/pipeline/engine.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 9185893a1f..26b6dc7aaa 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1271,9 +1271,9 @@ class GraftWorkflow(InterfacedWorkflow): >>> wf = npe.GraftWorkflow(name='graft', fields_from=wf1) >>> wf.insert(wf1) >>> wf.insert(wf2) - >>> wf.inputs.in_file = 'reference.nii' - >>> wf.inputs.thres = 1.0 - >>> wf.write_graph(format='pdf') # doctest: +SKIP + >>> wf.inputs.in_file = 'structural.nii' + >>> wf.inputs.thresh = 1.0 + >>> wf.run() # doctest: +SKIP """ _children = dict() _outnodes = dict() From bcd3663912910bb7eca1352495458431c2be745b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 12:24:22 +0200 Subject: [PATCH 15/42] Added auto test --- .../tests/test_auto_CollateInterface.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 nipype/interfaces/tests/test_auto_CollateInterface.py diff --git a/nipype/interfaces/tests/test_auto_CollateInterface.py b/nipype/interfaces/tests/test_auto_CollateInterface.py new file mode 100644 index 0000000000..f32cc23031 --- /dev/null +++ b/nipype/interfaces/tests/test_auto_CollateInterface.py @@ -0,0 +1,25 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.utility import CollateInterface + +def test_CollateInterface_inputs(): + input_map = dict(_outputs=dict(usedefault=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + ) + inputs = CollateInterface.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + +def test_CollateInterface_outputs(): + output_map = dict() + outputs = CollateInterface.output_spec() + + for key, metadata in output_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(outputs.traits()[key], metakey), value + From a6746ac47faa0ffcda9d2e7f8b63f850b32651de Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 15:56:30 +0200 Subject: [PATCH 16/42] Added MultipleSelectInterface --- .../test_auto_MultipleSelectInterface.py | 25 ++++++++ nipype/interfaces/utility.py | 60 +++++++++++++++++++ 2 files changed, 85 insertions(+) create mode 100644 nipype/interfaces/tests/test_auto_MultipleSelectInterface.py diff --git a/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py b/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py new file mode 100644 index 0000000000..1adc223190 --- /dev/null +++ b/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py @@ -0,0 +1,25 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from nipype.testing import assert_equal +from nipype.interfaces.utility import MultipleSelectInterface + +def test_MultipleSelectInterface_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + index=dict(mandatory=True, + ), + ) + inputs = MultipleSelectInterface.input_spec() + + for key, metadata in input_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + +def test_MultipleSelectInterface_outputs(): + output_map = dict() + outputs = MultipleSelectInterface.output_spec() + + for key, metadata in output_map.items(): + for metakey, value in metadata.items(): + yield assert_equal, getattr(outputs.traits()[key], metakey), value + diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index caf9047f0a..5f2b57b7c9 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -551,3 +551,63 @@ def _list_outputs(self): outputs[key].append(val) return outputs + + +class MultipleSelectInputSpec(BaseInterfaceInputSpec, DynamicTraitedSpec): + index = InputMultiPath(traits.Int, mandatory=True, + desc='0-based indices of values to choose') + +class MultipleSelectInterface(IdentityInterface): + """ + Basic interface that demultiplexes lists generated by CollateInterface + + Example + ------- + + >>> from nipype.interfaces.utility import MultipleSelectInterface + >>> demux = MultipleSelectInterface(fields=['file','miscdata'], index=0) + >>> demux.inputs.file = ['exfile1.csv', 'exfile2.csv'] + >>> demux.inputs.miscdata = [1.0, 2.0] + >>> res = demux.run() + >>> print res.outputs.file + exfile1.csv + >>> print res.outputs.miscdata + 1.0 + """ + input_spec = MultipleSelectInputSpec + output_spec = DynamicTraitedSpec + + def __init__(self, fields=None, mandatory_inputs=True, **inputs): + super(IdentityInterface, self).__init__(**inputs) + if fields is None or not fields: + raise ValueError('Identity Interface fields must be a non-empty list') + # Each input must be in the fields. + for in_field in inputs: + if in_field not in fields and in_field != 'index': + raise ValueError('Identity Interface input is not in the fields: %s' % in_field) + self._fields = fields + self._mandatory_inputs = mandatory_inputs + add_traits(self.inputs, fields) + # Adding any traits wipes out all input values set in superclass initialization, + # even it the trait is not in the add_traits argument. The work-around is to reset + # the values after adding the traits. + self.inputs.set(**inputs) + + def _list_outputs(self): + #manual mandatory inputs check + if self._fields and self._mandatory_inputs: + for key in self._fields: + value = getattr(self.inputs, key) + if not isdefined(value): + msg = "%s requires a value for input '%s' because it was listed in 'fields'. \ + You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." % \ + (self.__class__.__name__, key) + raise ValueError(msg) + + outputs = self._outputs().get() + for key in self._fields: + val = getattr(self.inputs, key) + if isdefined(val): + outputs[key] = np.squeeze(np.array(val)[np.array(self.inputs.index)]).tolist() + return outputs + From a650cd745d9d8c39584a38b00f547d5380dfa438 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 16:49:21 +0200 Subject: [PATCH 17/42] Fixed MultipleSelectInterface input spec --- nipype/interfaces/utility.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 5f2b57b7c9..192b49280b 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -553,7 +553,7 @@ def _list_outputs(self): return outputs -class MultipleSelectInputSpec(BaseInterfaceInputSpec, DynamicTraitedSpec): +class MultipleSelectInputSpec(DynamicTraitedSpec): index = InputMultiPath(traits.Int, mandatory=True, desc='0-based indices of values to choose') From 49853ac6ab73f1612f5f2e779a09100229b6718b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 24 Jul 2014 17:00:36 +0200 Subject: [PATCH 18/42] Updated autogenerated test --- nipype/interfaces/tests/test_auto_MultipleSelectInterface.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py b/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py index 1adc223190..0df8228ef4 100644 --- a/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py +++ b/nipype/interfaces/tests/test_auto_MultipleSelectInterface.py @@ -3,10 +3,7 @@ from nipype.interfaces.utility import MultipleSelectInterface def test_MultipleSelectInterface_inputs(): - input_map = dict(ignore_exception=dict(nohash=True, - usedefault=True, - ), - index=dict(mandatory=True, + input_map = dict(index=dict(mandatory=True, ), ) inputs = MultipleSelectInterface.input_spec() From 79d1024dba44e37208c9f5fef4bd977966db511e Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 25 Jul 2014 11:42:48 +0200 Subject: [PATCH 19/42] Fixes MultipleSelectInterface It doesn't derive from IdentityInterface anymore (caused node removal in workflows). Also applied improved dynamictraited interfaces checking in workflows. --- nipype/interfaces/utility.py | 12 ++++++++++-- nipype/pipeline/engine.py | 2 +- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 192b49280b..0b337294a1 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -557,7 +557,7 @@ class MultipleSelectInputSpec(DynamicTraitedSpec): index = InputMultiPath(traits.Int, mandatory=True, desc='0-based indices of values to choose') -class MultipleSelectInterface(IdentityInterface): +class MultipleSelectInterface(IOBase): """ Basic interface that demultiplexes lists generated by CollateInterface @@ -578,7 +578,7 @@ class MultipleSelectInterface(IdentityInterface): output_spec = DynamicTraitedSpec def __init__(self, fields=None, mandatory_inputs=True, **inputs): - super(IdentityInterface, self).__init__(**inputs) + super(MultipleSelectInterface, self).__init__(**inputs) if fields is None or not fields: raise ValueError('Identity Interface fields must be a non-empty list') # Each input must be in the fields. @@ -593,6 +593,14 @@ def __init__(self, fields=None, mandatory_inputs=True, **inputs): # the values after adding the traits. self.inputs.set(**inputs) + def _add_output_traits(self, base): + undefined_traits = {} + for key in self._fields: + base.add_trait(key, traits.Any) + undefined_traits[key] = Undefined + base.trait_set(trait_change_notify=False, **undefined_traits) + return base + def _list_outputs(self): #manual mandatory inputs check if self._fields and self._mandatory_inputs: diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 26b6dc7aaa..2d8107da96 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -190,7 +190,7 @@ def _check_outputs(self, parameter): return hasattr(self.outputs, parameter) def _check_inputs(self, parameter): - if hasattr(self.inputs,'_outputs'): + if isinstance(self.inputs, DynamicTraitedSpec): return True return hasattr(self.inputs, parameter) From 8be56b62455e47073dd2d5ea780697b57872ae25 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 25 Jul 2014 12:05:20 +0200 Subject: [PATCH 20/42] Moved private members from class to instance --- nipype/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 2d8107da96..c434557607 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1275,14 +1275,14 @@ class GraftWorkflow(InterfacedWorkflow): >>> wf.inputs.thresh = 1.0 >>> wf.run() # doctest: +SKIP """ - _children = dict() - _outnodes = dict() def __init__(self, name, base_dir=None, fields_from=None, input_names=[], output_names=[]): """ Initializes the workflow from an existing InterfacedWorkflow """ + _children = dict() + _outnodes = dict() from nipype.interfaces.utility import IdentityInterface, CollateInterface fields_undefined = ((input_names is None) or (output_names is None)) wf_undefined = (fields_from is None) From 967d5b879a2a32c9062039a2add4662dc946a47b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 25 Jul 2014 12:11:39 +0200 Subject: [PATCH 21/42] fixed error --- nipype/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index c434557607..3bf225ddfc 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1281,8 +1281,8 @@ def __init__(self, name, base_dir=None, fields_from=None, """ Initializes the workflow from an existing InterfacedWorkflow """ - _children = dict() - _outnodes = dict() + self._children = dict() + self._outnodes = dict() from nipype.interfaces.utility import IdentityInterface, CollateInterface fields_undefined = ((input_names is None) or (output_names is None)) wf_undefined = (fields_from is None) From e5bdf21afe1ae1abccaef8750a62a19facf54d4a Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 25 Jul 2014 13:04:43 +0200 Subject: [PATCH 22/42] Added return cid in GraftWorkflow.insert --- nipype/pipeline/engine.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 3bf225ddfc..ddbce652f3 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1336,6 +1336,7 @@ def insert(self, workflow): self.connect([('in', workflow), (workflow, self._outnodes[ckey]), (self._outnodes[ckey], 'out', [(key, '%s_%s' % (childname, key)) for key in self.output_names])]) + return cid class Node(WorkflowBase): From 43c769406bceb3fce6c9f4b6b06cee75a7b3b2a6 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 25 Jul 2014 13:10:31 +0200 Subject: [PATCH 23/42] Changed access to child ids in GraftWorkflow --- nipype/pipeline/engine.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index ddbce652f3..3def20b628 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1282,6 +1282,7 @@ def __init__(self, name, base_dir=None, fields_from=None, Initializes the workflow from an existing InterfacedWorkflow """ self._children = dict() + self._cids = dict() self._outnodes = dict() from nipype.interfaces.utility import IdentityInterface, CollateInterface fields_undefined = ((input_names is None) or (output_names is None)) @@ -1306,6 +1307,9 @@ def __init__(self, name, base_dir=None, fields_from=None, output_names=output_names) self._outputnode = Node(CollateInterface(fields=output_names), name='outputnode') + def get_cid(self, name): + return self.cids[name] + def insert(self, workflow): """ Inserts an InterfacedWorkflow into the workflow @@ -1325,6 +1329,7 @@ def insert(self, workflow): childname = 'out%02d' % cid self._children[ckey] = workflow + self._cids[ckey] = cid self._outnodes[ckey] = Node(IdentityInterface(fields=self.output_names), name=childname) @@ -1336,7 +1341,6 @@ def insert(self, workflow): self.connect([('in', workflow), (workflow, self._outnodes[ckey]), (self._outnodes[ckey], 'out', [(key, '%s_%s' % (childname, key)) for key in self.output_names])]) - return cid class Node(WorkflowBase): From f7fe305de84f593373597e4675ddd54159a4a1ff Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 25 Jul 2014 13:14:48 +0200 Subject: [PATCH 24/42] back to returning cid in insert --- nipype/pipeline/engine.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 3def20b628..a32371f0a6 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1308,7 +1308,7 @@ def __init__(self, name, base_dir=None, fields_from=None, self._outputnode = Node(CollateInterface(fields=output_names), name='outputnode') def get_cid(self, name): - return self.cids[name] + return self._cids[name] def insert(self, workflow): """ @@ -1341,6 +1341,7 @@ def insert(self, workflow): self.connect([('in', workflow), (workflow, self._outnodes[ckey]), (self._outnodes[ckey], 'out', [(key, '%s_%s' % (childname, key)) for key in self.output_names])]) + return cid class Node(WorkflowBase): From 587107fd60ea0a2e87104d0a613bbfa77d8a69db Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 29 Jul 2014 15:30:28 +0200 Subject: [PATCH 25/42] Forbid dots in InterfacedWorkflow connections --- nipype/pipeline/engine.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 0e9ac64bf9..da83ffe265 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -512,7 +512,7 @@ def write_graph(self, dotfilename='graph.dot', graph2use='hierarchical', workflow nodes; flat - expands workflow nodes recursively; hierarchical - expands workflow nodes recursively with a - notion on hierarchy; + notion on hierarchy; colored - expands workflow nodes recursively with a notion on hierarchy in color; exec - expands workflows to depict iterables @@ -1211,10 +1211,10 @@ def connect(self, *args, **kwargs): for c in conn[2]: srcport = c[0] dstport = c[1] - if len(srcpref) > 0 and not srcpref in srcport: + if len(srcpref) > 0 and not '.' in srcport: srcport = srcpref + srcport - if len(dstpref) > 0 and not dstpref in dstport: + if len(dstpref) > 0 and not '.' in dstport: dstport = dstpref + dstport ports.append((srcport, dstport)) @@ -2297,7 +2297,7 @@ def __init__(self, interface, iterfield, name, serial=False, **kwargs): fields=self.iterfield) self._inputs.on_trait_change(self._set_mapnode_input) self._got_inputs = False - + self._serial = serial def _create_dynamic_traits(self, basetraits, fields=None, nitems=None): From a010a5008b6259dc26e44517675567f1d897e262 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 29 Jul 2014 16:06:50 +0200 Subject: [PATCH 26/42] Add doctests results missing --- nipype/pipeline/engine.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index da83ffe265..6481329745 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1283,7 +1283,9 @@ class GraftWorkflow(InterfacedWorkflow): >>> wf2 = wf1.clone(name='testname2') >>> wf = npe.GraftWorkflow(name='graft', fields_from=wf1) >>> wf.insert(wf1) + 0 >>> wf.insert(wf2) + 1 >>> wf.inputs.in_file = 'structural.nii' >>> wf.inputs.thresh = 1.0 >>> wf.run() # doctest: +SKIP From 6692acbe0d027f3d830aacfe8ae69f934866787f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 20 Mar 2015 14:55:37 +0100 Subject: [PATCH 27/42] Open documentation file for the PR --- doc/users/graft_workflow.rst | 21 +++++++++++++++++++++ doc/users/index.rst | 1 + 2 files changed, 22 insertions(+) create mode 100644 doc/users/graft_workflow.rst diff --git a/doc/users/graft_workflow.rst b/doc/users/graft_workflow.rst new file mode 100644 index 0000000000..598cf709a5 --- /dev/null +++ b/doc/users/graft_workflow.rst @@ -0,0 +1,21 @@ +.. _graft_workflow: + +===================================================== +Interfaced workflows and GraftWorkflow (experimental) +===================================================== + +:class:`nipype.pipeline.engine.InterfacedWorkflow` provides automatic input/output +nodes generation, with some other utilities such as fast connection (avoiding +to specify the connecting fields). + +:class:`nipype.pipeline.engine.GraftWorkflow` is intended to create evaluation workflows, +where all the inputs are the same but several different methods are to be compared, stacking +the outputs in lists. + + +Interfaced workflows +-------------------- + +:class:`nipype.pipeline.engine.InterfacedWorkflow` generates workflows with default +inputnode and outputnode. It also exposes the fields without the 'inputnode.' and +'outputnode.' prefix. \ No newline at end of file diff --git a/doc/users/index.rst b/doc/users/index.rst index c5ebbae1df..46f8146e9d 100644 --- a/doc/users/index.rst +++ b/doc/users/index.rst @@ -35,6 +35,7 @@ joinnode_and_itersource model_specification saving_workflows + graft_workflow spmmcr mipav nipypecmd From 8dc7ac6c3ab20d2ba808d96acb146161f3b56da7 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 23 Mar 2015 13:09:47 +0100 Subject: [PATCH 28/42] update documentation --- doc/users/graft_workflow.rst | 72 ++++++++++++++++++++++++++++++++++-- 1 file changed, 69 insertions(+), 3 deletions(-) diff --git a/doc/users/graft_workflow.rst b/doc/users/graft_workflow.rst index 598cf709a5..ba844b20a3 100644 --- a/doc/users/graft_workflow.rst +++ b/doc/users/graft_workflow.rst @@ -16,6 +16,72 @@ the outputs in lists. Interfaced workflows -------------------- -:class:`nipype.pipeline.engine.InterfacedWorkflow` generates workflows with default -inputnode and outputnode. It also exposes the fields without the 'inputnode.' and -'outputnode.' prefix. \ No newline at end of file +:class:`~nipype.pipeline.engine.InterfacedWorkflow` generates workflows with default +``inputnode`` and ``outputnode``. It also exposes the fields without the ``inputnode.`` and +``outputnode.`` prefix. + +Let's create a very simple workflow with a segmentation node. Please, notice the fundamental +differences with a standard :class:`~nipype.pipeline.engine.Workflow`: +1) No need for ``inputnode`` and ``outputnode``; 2) fast connection of fields. +:: + + import nipype.pipeline.engine as pe + from nipype.interfaces import fsl + segm0 = pe.Node(fsl.FAST(number_classes=3, probability_maps=True), + name='FSLFAST') + ifwf0 = pe.InterfacedWorkflow(name='testname0', input_names=['in_t1w'], + output_names=['out_tpm']) + ifwf0.connect([ + ('in', segm0, [('in_t1w', 'in_files')]), + (segm0, 'out', [('probability_maps', 'out_tpm')]) + ]) + + +We can connect an input to this workflow as usual +:: + + import nipype.interfaces.io as nio + ds = pe.Node(nio.DataGrabber(base_directory=os.getcwd(), template='t1.nii'), + name='DataSource') + mywf = pe.Workflow(name='FullWorkflow') + mywf.connect(ds, 't1', ifwf0, 'inputnode.in_t1w') + + +The InterfacedWorkflow is useful to create several segmentation alternatives that always take one input +named ``in_t1w`` and return one output named ``out_tpm``. Independently, +:class:`InterfacedWorkflows ` do not add much value +to the conventional :class:`Workflows `, but they are interesting as units inside +:class:`GraftWorkflows `. + + + +Workflows to run cross-comparisons of methods +--------------------------------------------- + +Say we want to compare segmentation algorithms: FAST from FSL, and Atropos from ANTS. +We want all the comparing methods to have the same names and number of inputs and outputs. + +We first create the :class:`~nipype.pipeline.engine.GraftWorkflow`, using a existing workflow +as reference. + +:: + + compare_wf = pe.GraftWorkflow(name='Comparison', fields_from=ifwf0) + +We create the alternate segmentation workflow:: + + from nipype.interfaces import ants + segm1 = pe.Node(ants.Atropos(dimension=3, number_of_tissue_classes=3), + name='Atropos') + ifwf1 = pe.InterfacedWorkflow(name='testname1', input_names=['in_t1w'], + output_names=['out_tpm']) + ifwf1.connect([ + ('in', segm1, [('in_t1w', 'intensity_images')]), + (segm1, 'out', [('posteriors', 'out_tpm')]) + ]) + +Finally, our workflows under comparison are inserted in the :class:`~nipype.pipeline.engine.GraftWorkflow` using +the ``insert()`` method:: + + compare_wf.insert([ifwf0, ifwf1]) + From 5674d5d53ff99a529df42ad1929f9637042c7712 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 23 Mar 2015 13:15:59 +0100 Subject: [PATCH 29/42] update CHANGES --- CHANGES | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES b/CHANGES index ecf067f8af..c9da0ee599 100644 --- a/CHANGES +++ b/CHANGES @@ -1,5 +1,7 @@ Next release ============ + +* ENH: [Interfaced|Graft]Workflows (https://github.com/nipy/nipype/pull/882) * ENH: Dropped support for now 7 years old Python 2.6 (https://github.com/nipy/nipype/pull/1069) * FIX: terminal_output is not mandatory anymore (https://github.com/nipy/nipype/pull/1070) * ENH: Added "nipype_cmd" tool for running interfaces from the command line (https://github.com/nipy/nipype/pull/795) From b8415b4318bf9832f622ddf20b5f22ce8dc98419 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 26 Apr 2015 20:42:58 +0200 Subject: [PATCH 30/42] add new files for EPI preprocessing --- .../workflows/preprocessing/epi/__init__.py | 11 + nipype/workflows/preprocessing/epi/bias.py | 70 ++ .../workflows/preprocessing/epi/complete.py | 189 ++++ nipype/workflows/preprocessing/epi/eddy.py | 130 +++ nipype/workflows/preprocessing/epi/fsl.py | 107 +++ nipype/workflows/preprocessing/epi/motion.py | 120 +++ .../preprocessing/epi/susceptibility.py | 301 +++++++ .../preprocessing/epi/tests/__init__.py | 3 + nipype/workflows/preprocessing/epi/utils.py | 851 ++++++++++++++++++ 9 files changed, 1782 insertions(+) create mode 100644 nipype/workflows/preprocessing/epi/__init__.py create mode 100644 nipype/workflows/preprocessing/epi/bias.py create mode 100644 nipype/workflows/preprocessing/epi/complete.py create mode 100644 nipype/workflows/preprocessing/epi/eddy.py create mode 100644 nipype/workflows/preprocessing/epi/fsl.py create mode 100644 nipype/workflows/preprocessing/epi/motion.py create mode 100644 nipype/workflows/preprocessing/epi/susceptibility.py create mode 100644 nipype/workflows/preprocessing/epi/tests/__init__.py create mode 100644 nipype/workflows/preprocessing/epi/utils.py diff --git a/nipype/workflows/preprocessing/epi/__init__.py b/nipype/workflows/preprocessing/epi/__init__.py new file mode 100644 index 0000000000..b9f7314fba --- /dev/null +++ b/nipype/workflows/preprocessing/epi/__init__.py @@ -0,0 +1,11 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: + +from bias import remove_bias +from eddy import ecc_fsl +from susceptibility import sdc_fmb, sdc_peb +from motion import hmc_flirt + +from fsl import all_dmri +from complete import all_fmb_pipeline, all_peb_pipeline \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/bias.py b/nipype/workflows/preprocessing/epi/bias.py new file mode 100644 index 0000000000..9331626154 --- /dev/null +++ b/nipype/workflows/preprocessing/epi/bias.py @@ -0,0 +1,70 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl +from .utils import * + +def remove_bias(name='bias_correct'): + """ + This workflow estimates a single multiplicative bias field from the + averaged *b0* image, as suggested in [Jeurissen2014]_. + + .. admonition:: References + + .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained + spherical deconvolution for improved analysis of multi-shell diffusion + MRI data `_. + NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061 + + + Example + ------- + + >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias + >>> bias = remove_bias() + >>> bias.inputs.inputnode.in_file = 'epi.nii' + >>> bias.inputs.inputnode.in_bval = 'diffusion.bval' + >>> bias.inputs.inputnode.in_mask = 'mask.nii' + >>> bias.run() # doctest: +SKIP + + """ + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_bval', 'in_mask']), name='inputnode') + + outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), + name='outputnode') + + avg_b0 = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval'], output_names=['out_file'], + function=b0_average), name='b0_avg') + n4 = pe.Node(ants.N4BiasFieldCorrection( + dimension=3, save_bias=True, bspline_fitting_distance=600), + name='Bias_b0') + split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') + mult = pe.MapNode(fsl.MultiImageMaths(op_string='-div %s'), + iterfield=['in_file'], name='RemoveBiasOfDWIs') + thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], + name='RemoveNegative') + merge = pe.Node(fsl.utils.Merge(dimension='t'), name='MergeDWIs') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, avg_b0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (avg_b0, n4, [('out_file', 'input_image')]), + (inputnode, n4, [('in_mask', 'mask_image')]), + (inputnode, split, [('in_file', 'in_file')]), + (n4, mult, [('bias_image', 'operand_files')]), + (split, mult, [('out_files', 'in_file')]), + (mult, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'out_file')]) + ]) + return wf \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/complete.py b/nipype/workflows/preprocessing/epi/complete.py new file mode 100644 index 0000000000..6d0acac12c --- /dev/null +++ b/nipype/workflows/preprocessing/epi/complete.py @@ -0,0 +1,189 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl +from .utils import * + +def all_fmb_pipeline(name='hmc_sdc_ecc', fugue_params=dict(smooth3d=2.0)): + """ + Builds a pipeline including three artifact corrections: head-motion + correction (HMC), susceptibility-derived distortion correction (SDC), + and Eddy currents-derived distortion correction (ECC). + + The displacement fields from each kind of distortions are combined. Thus, + only one interpolation occurs between input data and result. + + .. warning:: this workflow rotates the gradients table (*b*-vectors) + [Leemans09]_. + + + Examples + -------- + + >>> from nipype.workflows.dmri.fsl.artifacts import all_fmb_pipeline + >>> allcorr = all_fmb_pipeline() + >>> allcorr.inputs.inputnode.in_file = 'epi.nii' + >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' + >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' + >>> allcorr.inputs.inputnode.bmap_mag = 'magnitude.nii' + >>> allcorr.inputs.inputnode.bmap_pha = 'phase.nii' + >>> allcorr.inputs.inputnode.epi_param = 'epi_param.txt' + >>> allcorr.run() # doctest: +SKIP + + """ + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_bvec', 'in_bval', 'bmap_pha', 'bmap_mag', + 'epi_param']), name='inputnode') + + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_mask', 'out_bvec']), name='outputnode') + + list_b0 = pe.Node(niu.Function( + input_names=['in_bval'], output_names=['out_idx'], + function=b0_indices), name='B0indices') + + avg_b0_0 = pe.Node(niu.Function( + input_names=['in_file', 'index'], output_names=['out_file'], + function=time_avg), name='b0_avg_pre') + avg_b0_1 = pe.Node(niu.Function( + input_names=['in_file', 'index'], output_names=['out_file'], + function=time_avg), name='b0_avg_post') + + bet_dwi0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), + name='bet_dwi_pre') + bet_dwi1 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), + name='bet_dwi_post') + + hmc = hmc_pipeline() + sdc = sdc_fmb(fugue_params=fugue_params) + ecc = ecc_pipeline() + unwarp = apply_all_corrections() + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, hmc, [('in_file', 'inputnode.in_file'), + ('in_bvec', 'inputnode.in_bvec'), + ('in_bval', 'inputnode.in_bval')]), + (inputnode, list_b0, [('in_bval', 'in_bval')]), + (inputnode, avg_b0_0, [('in_file', 'in_file')]), + (list_b0, avg_b0_0, [('out_idx', 'index')]), + (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), + (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), + (hmc, sdc, [ + ('outputnode.out_file', 'inputnode.in_file')]), + (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), + (inputnode, sdc, [('bmap_pha', 'inputnode.bmap_pha'), + ('bmap_mag', 'inputnode.bmap_mag'), + ('epi_param', 'inputnode.settings')]), + (list_b0, sdc, [('out_idx', 'inputnode.in_ref')]), + (hmc, ecc, [ + ('outputnode.out_xfms', 'inputnode.in_xfms')]), + (inputnode, ecc, [('in_file', 'inputnode.in_file'), + ('in_bval', 'inputnode.in_bval')]), + (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), + (ecc, avg_b0_1, [('outputnode.out_file', 'in_file')]), + (list_b0, avg_b0_1, [('out_idx', 'index')]), + (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), + (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), + (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), + (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), + (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), + (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), + (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), + (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) + ]) + return wf + + +def all_peb_pipeline(name='hmc_sdc_ecc', + epi_params=dict(echospacing=0.77e-3, + acc_factor=3, + enc_dir='y-', + epi_factor=1), + altepi_params=dict(echospacing=0.77e-3, + acc_factor=3, + enc_dir='y', + epi_factor=1)): + """ + Builds a pipeline including three artifact corrections: head-motion + correction (HMC), susceptibility-derived distortion correction (SDC), + and Eddy currents-derived distortion correction (ECC). + + .. warning:: this workflow rotates the gradients table (*b*-vectors) + [Leemans09]_. + + + Examples + -------- + + >>> from nipype.workflows.dmri.fsl.artifacts import all_peb_pipeline + >>> allcorr = all_peb_pipeline() + >>> allcorr.inputs.inputnode.in_file = 'epi.nii' + >>> allcorr.inputs.inputnode.alt_file = 'epi_rev.nii' + >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' + >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' + >>> allcorr.run() # doctest: +SKIP + + """ + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_bvec', 'in_bval', 'alt_file']), + name='inputnode') + + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_mask', 'out_bvec']), name='outputnode') + + avg_b0_0 = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval'], output_names=['out_file'], + function=b0_average), name='b0_avg_pre') + avg_b0_1 = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval'], output_names=['out_file'], + function=b0_average), name='b0_avg_post') + bet_dwi0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), + name='bet_dwi_pre') + bet_dwi1 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), + name='bet_dwi_post') + + hmc = hmc_pipeline() + sdc = sdc_peb(epi_params=epi_params, altepi_params=altepi_params) + ecc = ecc_pipeline() + + unwarp = apply_all_corrections() + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, hmc, [('in_file', 'inputnode.in_file'), + ('in_bvec', 'inputnode.in_bvec'), + ('in_bval', 'inputnode.in_bval')]), + (inputnode, avg_b0_0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), + (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), + (hmc, sdc, [ + ('outputnode.out_file', 'inputnode.in_file')]), + (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), + (inputnode, sdc, [('in_bval', 'inputnode.in_bval'), + ('alt_file', 'inputnode.alt_file')]), + (inputnode, ecc, [('in_file', 'inputnode.in_file'), + ('in_bval', 'inputnode.in_bval')]), + (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), + (hmc, ecc, [ + ('outputnode.out_xfms', 'inputnode.in_xfms')]), + (ecc, avg_b0_1, [('outputnode.out_file', 'in_dwi')]), + (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), + (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), + (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), + (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), + (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), + (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), + (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), + (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), + (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) + ]) + return wf diff --git a/nipype/workflows/preprocessing/epi/eddy.py b/nipype/workflows/preprocessing/epi/eddy.py new file mode 100644 index 0000000000..4102acab3c --- /dev/null +++ b/nipype/workflows/preprocessing/epi/eddy.py @@ -0,0 +1,130 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl +from .utils import * + +def ecc_fsl(name='eddy_correct'): + """ + ECC stands for Eddy currents correction. + + Creates a pipeline that corrects for artifacts induced by Eddy currents in + dMRI sequences. + It takes a series of diffusion weighted images and linearly co-registers + them to one reference image (the average of all b0s in the dataset). + + DWIs are also modulated by the determinant of the Jacobian as indicated by + [Jones10]_ and [Rohde04]_. + + A list of rigid transformation matrices can be provided, sourcing from a + :func:`.hmc_pipeline` workflow, to initialize registrations in a *motion + free* framework. + + A list of affine transformation matrices is available as output, so that + transforms can be chained (discussion + `here `_). + + .. admonition:: References + + .. [Jones10] Jones DK, `The signal intensity must be modulated by the + determinant of the Jacobian when correcting for eddy currents in + diffusion MRI + `_, + Proc. ISMRM 18th Annual Meeting, (2010). + + .. [Rohde04] Rohde et al., `Comprehensive Approach for Correction of + Motion and Distortion in Diffusion-Weighted MRI + `_, MRM + 51:103-114 (2004). + + Example + ------- + + >>> from nipype.workflows.dmri.fsl.artifacts import ecc_pipeline + >>> ecc = ecc_pipeline() + >>> ecc.inputs.inputnode.in_file = 'diffusion.nii' + >>> ecc.inputs.inputnode.in_bval = 'diffusion.bval' + >>> ecc.inputs.inputnode.in_mask = 'mask.nii' + >>> ecc.run() # doctest: +SKIP + + Inputs:: + + inputnode.in_file - input dwi file + inputnode.in_mask - weights mask of reference image (a file with data \ +range sin [0.0, 1.0], indicating the weight of each voxel when computing the \ +metric. + inputnode.in_bval - b-values table + inputnode.in_xfms - list of matrices to initialize registration (from \ +head-motion correction) + + Outputs:: + + outputnode.out_file - corrected dwi file + outputnode.out_xfms - list of transformation matrices + """ + + from nipype.workflows.data import get_flirt_schedule + params = dict(dof=12, no_search=True, interp='spline', bgvalue=0, + schedule=get_flirt_schedule('ecc')) + # cost='normmi', cost_func='normmi', bins=64, + + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_bval', 'in_mask', 'in_xfms']), name='inputnode') + avg_b0 = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval'], output_names=['out_file'], + function=b0_average), name='b0_avg') + pick_dws = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval', 'b'], output_names=['out_file'], + function=extract_bval), name='ExtractDWI') + pick_dws.inputs.b = 'diff' + + flirt = dwi_flirt(flirt_param=params, excl_nodiff=True) + + mult = pe.MapNode(fsl.BinaryMaths(operation='mul'), name='ModulateDWIs', + iterfield=['in_file', 'operand_value']) + thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], + name='RemoveNegative') + + split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') + get_mat = pe.Node(niu.Function( + input_names=['in_bval', 'in_xfms'], output_names=['out_files'], + function=recompose_xfm), name='GatherMatrices') + merge = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval', 'in_corrected'], + output_names=['out_file'], function=recompose_dwi), name='MergeDWIs') + + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_xfms']), name='outputnode') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, avg_b0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (inputnode, pick_dws, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (inputnode, merge, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (inputnode, flirt, [('in_mask', 'inputnode.ref_mask'), + ('in_xfms', 'inputnode.in_xfms'), + ('in_bval', 'inputnode.in_bval')]), + (inputnode, get_mat, [('in_bval', 'in_bval')]), + (avg_b0, flirt, [('out_file', 'inputnode.reference')]), + (pick_dws, flirt, [('out_file', 'inputnode.in_file')]), + (flirt, get_mat, [('outputnode.out_xfms', 'in_xfms')]), + (flirt, mult, [(('outputnode.out_xfms', _xfm_jacobian), + 'operand_value')]), + (flirt, split, [('outputnode.out_file', 'in_file')]), + (split, mult, [('out_files', 'in_file')]), + (mult, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_corrected')]), + (get_mat, outputnode, [('out_files', 'out_xfms')]), + (merge, outputnode, [('out_file', 'out_file')]) + ]) + return wf diff --git a/nipype/workflows/preprocessing/epi/fsl.py b/nipype/workflows/preprocessing/epi/fsl.py new file mode 100644 index 0000000000..28f44f347a --- /dev/null +++ b/nipype/workflows/preprocessing/epi/fsl.py @@ -0,0 +1,107 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl +from .utils import * + + +def all_dmri(name='fsl_all_correct', + epi_params=dict(echospacing=0.77e-3, + acc_factor=3, + enc_dir='y-'), + altepi_params=dict(echospacing=0.77e-3, + acc_factor=3, + enc_dir='y')): + """ + Workflow that integrates FSL ``topup`` and ``eddy``. + + + .. warning:: this workflow rotates the gradients table (*b*-vectors) + [Leemans09]_. + + + .. warning:: this workflow does not perform jacobian modulation of each + *DWI* [Jones10]_. + + + Examples + -------- + + >>> from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline + >>> allcorr = all_fsl_pipeline() + >>> allcorr.inputs.inputnode.in_file = 'epi.nii' + >>> allcorr.inputs.inputnode.alt_file = 'epi_rev.nii' + >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' + >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' + >>> allcorr.run() # doctest: +SKIP + + """ + + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_bvec', 'in_bval', 'alt_file']), + name='inputnode') + + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_mask', 'out_bvec']), name='outputnode') + + def _gen_index(in_file): + import numpy as np + import nibabel as nb + import os + out_file = os.path.abspath('index.txt') + vols = nb.load(in_file).get_data().shape[-1] + np.savetxt(out_file, np.ones((vols,)).T) + return out_file + + avg_b0_0 = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval'], output_names=['out_file'], + function=b0_average), name='b0_avg_pre') + bet_dwi0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), + name='bet_dwi_pre') + + sdc = sdc_peb(epi_params=epi_params, altepi_params=altepi_params) + ecc = pe.Node(fsl.Eddy(method='jac'), name='fsl_eddy') + rot_bvec = pe.Node(niu.Function( + input_names=['in_bvec', 'eddy_params'], output_names=['out_file'], + function=eddy_rotate_bvecs), name='Rotate_Bvec') + avg_b0_1 = pe.Node(niu.Function( + input_names=['in_dwi', 'in_bval'], output_names=['out_file'], + function=b0_average), name='b0_avg_post') + bet_dwi1 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), + name='bet_dwi_post') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, avg_b0_0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), + (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), + (inputnode, sdc, [('in_file', 'inputnode.in_file'), + ('alt_file', 'inputnode.alt_file'), + ('in_bval', 'inputnode.in_bval')]), + (sdc, ecc, [('topup.out_enc_file', 'in_acqp'), + ('topup.out_fieldcoef', + 'in_topup_fieldcoef'), + ('topup.out_movpar', 'in_topup_movpar')]), + (bet_dwi0, ecc, [('mask_file', 'in_mask')]), + (inputnode, ecc, [('in_file', 'in_file'), + (('in_file', _gen_index), 'in_index'), + ('in_bval', 'in_bval'), + ('in_bvec', 'in_bvec')]), + (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), + (ecc, rot_bvec, [('out_parameter', 'eddy_params')]), + (ecc, avg_b0_1, [('out_corrected', 'in_dwi')]), + (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), + (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), + (ecc, outputnode, [('out_corrected', 'out_file')]), + (rot_bvec, outputnode, [('out_file', 'out_bvec')]), + (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) + ]) + return wf \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/motion.py b/nipype/workflows/preprocessing/epi/motion.py new file mode 100644 index 0000000000..f564e6b885 --- /dev/null +++ b/nipype/workflows/preprocessing/epi/motion.py @@ -0,0 +1,120 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl +from .utils import * + +def hmc_flirt(name='motion_correct'): + """ + HMC stands for head-motion correction. + + Creates a pipeline that corrects for head motion artifacts in dMRI + sequences. + It takes a series of diffusion weighted images and rigidly co-registers + them to one reference image. Finally, the `b`-matrix is rotated accordingly + [Leemans09]_ making use of the rotation matrix obtained by FLIRT. + + Search angles have been limited to 4 degrees, based on results in + [Yendiki13]_. + + A list of rigid transformation matrices is provided, so that transforms + can be chained. + This is useful to correct for artifacts with only one interpolation process + (as previously discussed `here + `_), + and also to compute nuisance regressors as proposed by [Yendiki13]_. + + .. warning:: This workflow rotates the `b`-vectors, so please be advised + that not all the dicom converters ensure the consistency between the + resulting nifti orientation and the gradients table (e.g. dcm2nii + checks it). + + .. admonition:: References + + .. [Leemans09] Leemans A, and Jones DK, `The B-matrix must be rotated + when correcting for subject motion in DTI data + `_, + Magn Reson Med. 61(6):1336-49. 2009. doi: 10.1002/mrm.21890. + + .. [Yendiki13] Yendiki A et al., `Spurious group differences due to head + motion in a diffusion MRI study + `_. + Neuroimage. 21(88C):79-90. 2013. doi: 10.1016/j.neuroimage.2013.11.027 + + Example + ------- + + >>> from nipype.workflows.dmri.fsl.artifacts import hmc_pipeline + >>> hmc = hmc_pipeline() + >>> hmc.inputs.inputnode.in_file = 'diffusion.nii' + >>> hmc.inputs.inputnode.in_bvec = 'diffusion.bvec' + >>> hmc.inputs.inputnode.in_bval = 'diffusion.bval' + >>> hmc.inputs.inputnode.in_mask = 'mask.nii' + >>> hmc.run() # doctest: +SKIP + + Inputs:: + + inputnode.in_file - input dwi file + inputnode.in_mask - weights mask of reference image (a file with data \ +range in [0.0, 1.0], indicating the weight of each voxel when computing the \ +metric. + inputnode.in_bvec - gradients file (b-vectors) + inputnode.ref_num (optional, default=0) index of the b0 volume that \ +should be taken as reference + + Outputs:: + + outputnode.out_file - corrected dwi file + outputnode.out_bvec - rotated gradient vectors table + outputnode.out_xfms - list of transformation matrices + + """ + from nipype.workflows.data import get_flirt_schedule + + params = dict(dof=6, bgvalue=0, save_log=True, no_search=True, + # cost='mutualinfo', cost_func='mutualinfo', bins=64, + schedule=get_flirt_schedule('hmc')) + + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'ref_num', 'in_bvec', 'in_bval', 'in_mask']), + name='inputnode') + split = pe.Node(niu.Function( + output_names=['out_ref', 'out_mov', 'out_bval', 'volid'], + input_names=['in_file', 'in_bval', 'ref_num'], function=hmc_split), + name='SplitDWI') + flirt = dwi_flirt(flirt_param=params) + insmat = pe.Node(niu.Function(input_names=['inlist', 'volid'], + output_names=['out'], function=insert_mat), + name='InsertRefmat') + rot_bvec = pe.Node(niu.Function( + function=rotate_bvecs, input_names=['in_bvec', 'in_matrix'], + output_names=['out_file']), name='Rotate_Bvec') + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_bvec', 'out_xfms']), name='outputnode') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, split, [('in_file', 'in_file'), + ('in_bval', 'in_bval'), + ('ref_num', 'ref_num')]), + (inputnode, flirt, [('in_mask', 'inputnode.ref_mask')]), + (split, flirt, [('out_ref', 'inputnode.reference'), + ('out_mov', 'inputnode.in_file'), + ('out_bval', 'inputnode.in_bval')]), + (flirt, insmat, [('outputnode.out_xfms', 'inlist')]), + (split, insmat, [('volid', 'volid')]), + (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), + (insmat, rot_bvec, [('out', 'in_matrix')]), + (rot_bvec, outputnode, [('out_file', 'out_bvec')]), + (flirt, outputnode, [('outputnode.out_file', 'out_file')]), + (insmat, outputnode, [('out', 'out_xfms')]) + ]) + return wf + diff --git a/nipype/workflows/preprocessing/epi/susceptibility.py b/nipype/workflows/preprocessing/epi/susceptibility.py new file mode 100644 index 0000000000..acfe4998cd --- /dev/null +++ b/nipype/workflows/preprocessing/epi/susceptibility.py @@ -0,0 +1,301 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl +from .utils import * + + +def sdc_fmb(name='fmb_correction', interp='Linear', + fugue_params=dict(smooth3d=2.0)): + """ + SDC stands for susceptibility distortion correction. FMB stands for + fieldmap-based. + + The fieldmap based (FMB) method implements SDC by using a mapping of the + B0 field as proposed by [Jezzard95]_. This workflow uses the implementation + of FSL (`FUGUE `_). Phase + unwrapping is performed using `PRELUDE + `_ + [Jenkinson03]_. Preparation of the fieldmap is performed reproducing the + script in FSL `fsl_prepare_fieldmap + `_. + + + + Example + ------- + + >>> from nipype.workflows.dmri.fsl.artifacts import sdc_fmb + >>> fmb = sdc_fmb() + >>> fmb.inputs.inputnode.in_file = 'diffusion.nii' + >>> fmb.inputs.inputnode.in_ref = range(0, 30, 6) + >>> fmb.inputs.inputnode.in_mask = 'mask.nii' + >>> fmb.inputs.inputnode.bmap_mag = 'magnitude.nii' + >>> fmb.inputs.inputnode.bmap_pha = 'phase.nii' + >>> fmb.inputs.inputnode.settings = 'epi_param.txt' + >>> fmb.run() # doctest: +SKIP + + .. warning:: Only SIEMENS format fieldmaps are supported. + + .. admonition:: References + + .. [Jezzard95] Jezzard P, and Balaban RS, `Correction for geometric + distortion in echo planar images from B0 field variations + `_, + MRM 34(1):65-73. (1995). doi: 10.1002/mrm.1910340111. + + .. [Jenkinson03] Jenkinson M., `Fast, automated, N-dimensional + phase-unwrapping algorithm `_, + MRM 49(1):193-197, 2003, doi: 10.1002/mrm.10354. + + """ + + epi_defaults = {'delta_te': 2.46e-3, 'echospacing': 0.77e-3, + 'acc_factor': 2, 'enc_dir': u'AP'} + + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_ref', 'in_mask', 'bmap_pha', 'bmap_mag', + 'settings']), name='inputnode') + + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_vsm', 'out_warp']), name='outputnode') + + r_params = pe.Node(JSONFileGrabber(defaults=epi_defaults), + name='SettingsGrabber') + eff_echo = pe.Node(niu.Function(function=_eff_t_echo, + input_names=['echospacing', 'acc_factor'], + output_names=['eff_echo']), name='EffEcho') + + firstmag = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='GetFirst') + n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') + bet = pe.Node(fsl.BET(frac=0.4, mask=True), name='BrainExtraction') + dilate = pe.Node(fsl.maths.MathsCommand( + nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate') + pha2rads = pe.Node(niu.Function( + input_names=['in_file'], output_names=['out_file'], + function=siemens2rads), name='PreparePhase') + prelude = pe.Node(fsl.PRELUDE(process3d=True), name='PhaseUnwrap') + rad2rsec = pe.Node(niu.Function( + input_names=['in_file', 'delta_te'], output_names=['out_file'], + function=rads2radsec), name='ToRadSec') + + baseline = pe.Node(niu.Function( + input_names=['in_file', 'index'], output_names=['out_file'], + function=time_avg), name='Baseline') + + fmm2b0 = pe.Node(ants.Registration(output_warped_image=True), + name="FMm_to_B0") + fmm2b0.inputs.transforms = ['Rigid'] * 2 + fmm2b0.inputs.transform_parameters = [(1.0,)] * 2 + fmm2b0.inputs.number_of_iterations = [[50], [20]] + fmm2b0.inputs.dimension = 3 + fmm2b0.inputs.metric = ['Mattes', 'Mattes'] + fmm2b0.inputs.metric_weight = [1.0] * 2 + fmm2b0.inputs.radius_or_number_of_bins = [64, 64] + fmm2b0.inputs.sampling_strategy = ['Regular', 'Random'] + fmm2b0.inputs.sampling_percentage = [None, 0.2] + fmm2b0.inputs.convergence_threshold = [1.e-5, 1.e-8] + fmm2b0.inputs.convergence_window_size = [20, 10] + fmm2b0.inputs.smoothing_sigmas = [[6.0], [2.0]] + fmm2b0.inputs.sigma_units = ['vox'] * 2 + fmm2b0.inputs.shrink_factors = [[6], [1]] # ,[1] ] + fmm2b0.inputs.use_estimate_learning_rate_once = [True] * 2 + fmm2b0.inputs.use_histogram_matching = [True] * 2 + fmm2b0.inputs.initial_moving_transform_com = 0 + fmm2b0.inputs.collapse_output_transforms = True + fmm2b0.inputs.winsorize_upper_quantile = 0.995 + + applyxfm = pe.Node(ants.ApplyTransforms( + dimension=3, interpolation=interp), name='FMp_to_B0') + + pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='PreliminaryFugue') + demean = pe.Node(niu.Function( + input_names=['in_file', 'in_mask'], output_names=['out_file'], + function=demean_image), name='DemeanFmap') + + cleanup = cleanup_edge_pipeline() + + addvol = pe.Node(niu.Function( + input_names=['in_file'], output_names=['out_file'], + function=add_empty_vol), name='AddEmptyVol') + + vsm = pe.Node(fsl.FUGUE(save_shift=True, **fugue_params), + name="ComputeVSM") + + split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') + merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') + unwarp = pe.MapNode(fsl.FUGUE(icorr=True, forward_warping=False), + iterfield=['in_file'], name='UnwarpDWIs') + thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], + name='RemoveNegative') + vsm2dfm = vsm2warp() + vsm2dfm.inputs.inputnode.scaling = 1.0 + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, r_params, [('settings', 'in_file')]), + (r_params, eff_echo, [('echospacing', 'echospacing'), + ('acc_factor', 'acc_factor')]), + (inputnode, pha2rads, [('bmap_pha', 'in_file')]), + (inputnode, firstmag, [('bmap_mag', 'in_file')]), + (inputnode, baseline, [('in_file', 'in_file'), + ('in_ref', 'index')]), + (firstmag, n4, [('roi_file', 'input_image')]), + (n4, bet, [('output_image', 'in_file')]), + (bet, dilate, [('mask_file', 'in_file')]), + (pha2rads, prelude, [('out_file', 'phase_file')]), + (n4, prelude, [('output_image', 'magnitude_file')]), + (dilate, prelude, [('out_file', 'mask_file')]), + (r_params, rad2rsec, [('delta_te', 'delta_te')]), + (prelude, rad2rsec, [('unwrapped_phase_file', 'in_file')]), + + (baseline, fmm2b0, [('out_file', 'fixed_image')]), + (n4, fmm2b0, [('output_image', 'moving_image')]), + (inputnode, fmm2b0, [('in_mask', 'fixed_image_mask')]), + (dilate, fmm2b0, [('out_file', 'moving_image_mask')]), + + (baseline, applyxfm, [('out_file', 'reference_image')]), + (rad2rsec, applyxfm, [('out_file', 'input_image')]), + (fmm2b0, applyxfm, [ + ('forward_transforms', 'transforms'), + ('forward_invert_flags', 'invert_transform_flags')]), + + (applyxfm, pre_fugue, [('output_image', 'fmap_in_file')]), + (inputnode, pre_fugue, [('in_mask', 'mask_file')]), + (pre_fugue, demean, [('fmap_out_file', 'in_file')]), + (inputnode, demean, [('in_mask', 'in_mask')]), + (demean, cleanup, [('out_file', 'inputnode.in_file')]), + (inputnode, cleanup, [('in_mask', 'inputnode.in_mask')]), + (cleanup, addvol, [('outputnode.out_file', 'in_file')]), + (inputnode, vsm, [('in_mask', 'mask_file')]), + (addvol, vsm, [('out_file', 'fmap_in_file')]), + (r_params, vsm, [('delta_te', 'asym_se_time')]), + (eff_echo, vsm, [('eff_echo', 'dwell_time')]), + (inputnode, split, [('in_file', 'in_file')]), + (split, unwarp, [('out_files', 'in_file')]), + (vsm, unwarp, [('shift_out_file', 'shift_in_file')]), + (r_params, unwarp, [ + (('enc_dir', _fix_enc_dir), 'unwarp_direction')]), + (unwarp, thres, [('unwarped_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (r_params, vsm2dfm, [ + (('enc_dir', _fix_enc_dir), 'inputnode.enc_dir')]), + (merge, vsm2dfm, [('merged_file', 'inputnode.in_ref')]), + (vsm, vsm2dfm, [('shift_out_file', 'inputnode.in_vsm')]), + (merge, outputnode, [('merged_file', 'out_file')]), + (vsm, outputnode, [('shift_out_file', 'out_vsm')]), + (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) + ]) + return wf + + +def sdc_peb(name='peb_correction', + epi_params=dict(echospacing=0.77e-3, + acc_factor=3, + enc_dir='y-', + epi_factor=1), + altepi_params=dict(echospacing=0.77e-3, + acc_factor=3, + enc_dir='y', + epi_factor=1)): + """ + SDC stands for susceptibility distortion correction. PEB stands for + phase-encoding-based. + + The phase-encoding-based (PEB) method implements SDC by acquiring + diffusion images with two different enconding directions [Andersson2003]_. + The most typical case is acquiring with opposed phase-gradient blips + (e.g. *A>>>P* and *P>>>A*, or equivalently, *-y* and *y*) + as in [Chiou2000]_, but it is also possible to use orthogonal + configurations [Cordes2000]_ (e.g. *A>>>P* and *L>>>R*, + or equivalently *-y* and *x*). + This workflow uses the implementation of FSL + (`TOPUP `_). + + Example + ------- + + >>> from nipype.workflows.dmri.fsl.artifacts import sdc_peb + >>> peb = sdc_peb() + >>> peb.inputs.inputnode.in_file = 'epi.nii' + >>> peb.inputs.inputnode.alt_file = 'epi_rev.nii' + >>> peb.inputs.inputnode.in_bval = 'diffusion.bval' + >>> peb.inputs.inputnode.in_mask = 'mask.nii' + >>> peb.run() # doctest: +SKIP + + .. admonition:: References + + .. [Andersson2003] Andersson JL et al., `How to correct susceptibility + distortions in spin-echo echo-planar images: application to diffusion + tensor imaging `_. + Neuroimage. 2003 Oct;20(2):870-88. doi: 10.1016/S1053-8119(03)00336-7 + + .. [Cordes2000] Cordes D et al., Geometric distortion correction in EPI + using two images with orthogonal phase-encoding directions, in Proc. + ISMRM (8), p.1712, Denver, US, 2000. + + .. [Chiou2000] Chiou JY, and Nalcioglu O, A simple method to correct + off-resonance related distortion in echo planar imaging, in Proc. + ISMRM (8), p.1712, Denver, US, 2000. + + """ + + inputnode = pe.Node(niu.IdentityInterface( + fields=['in_file', 'in_bval', 'in_mask', 'alt_file', 'ref_num']), + name='inputnode') + outputnode = pe.Node(niu.IdentityInterface( + fields=['out_file', 'out_vsm', 'out_warp']), name='outputnode') + + b0_ref = pe.Node(fsl.ExtractROI(t_size=1), name='b0_ref') + b0_alt = pe.Node(fsl.ExtractROI(t_size=1), name='b0_alt') + b0_comb = pe.Node(niu.Merge(2), name='b0_list') + b0_merge = pe.Node(fsl.Merge(dimension='t'), name='b0_merged') + + topup = pe.Node(fsl.TOPUP(), name='topup') + topup.inputs.encoding_direction = [epi_params['enc_dir'], + altepi_params['enc_dir']] + + readout = compute_readout(epi_params) + topup.inputs.readout_times = [readout, + compute_readout(altepi_params)] + + unwarp = pe.Node(fsl.ApplyTOPUP(in_index=[1], method='jac'), name='unwarp') + + # scaling = pe.Node(niu.Function(input_names=['in_file', 'enc_dir'], + # output_names=['factor'], function=_get_zoom), + # name='GetZoom') + # scaling.inputs.enc_dir = epi_params['enc_dir'] + vsm2dfm = vsm2warp() + vsm2dfm.inputs.inputnode.enc_dir = epi_params['enc_dir'] + vsm2dfm.inputs.inputnode.scaling = readout + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, b0_ref, [('in_file', 'in_file'), + (('ref_num', _checkrnum), 't_min')]), + (inputnode, b0_alt, [('alt_file', 'in_file'), + (('ref_num', _checkrnum), 't_min')]), + (b0_ref, b0_comb, [('roi_file', 'in1')]), + (b0_alt, b0_comb, [('roi_file', 'in2')]), + (b0_comb, b0_merge, [('out', 'in_files')]), + (b0_merge, topup, [('merged_file', 'in_file')]), + (topup, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'), + ('out_movpar', 'in_topup_movpar'), + ('out_enc_file', 'encoding_file')]), + (inputnode, unwarp, [('in_file', 'in_files')]), + (unwarp, outputnode, [('out_corrected', 'out_file')]), + # (b0_ref, scaling, [('roi_file', 'in_file')]), + # (scaling, vsm2dfm, [('factor', 'inputnode.scaling')]), + (b0_ref, vsm2dfm, [('roi_file', 'inputnode.in_ref')]), + (topup, vsm2dfm, [('out_field', 'inputnode.in_vsm')]), + (topup, outputnode, [('out_field', 'out_vsm')]), + (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) + ]) + return wf diff --git a/nipype/workflows/preprocessing/epi/tests/__init__.py b/nipype/workflows/preprocessing/epi/tests/__init__.py new file mode 100644 index 0000000000..1d26a84aa3 --- /dev/null +++ b/nipype/workflows/preprocessing/epi/tests/__init__.py @@ -0,0 +1,3 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/utils.py b/nipype/workflows/preprocessing/epi/utils.py new file mode 100644 index 0000000000..f084382eb7 --- /dev/null +++ b/nipype/workflows/preprocessing/epi/utils.py @@ -0,0 +1,851 @@ +# coding: utf-8 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import nipype.pipeline.engine as pe +from nipype.interfaces.io import JSONFileGrabber +from nipype.interfaces import utility as niu +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import ants +from nipype.interfaces import fsl + + +def _eff_t_echo(echospacing, acc_factor): + eff_echo = echospacing / (1.0 * acc_factor) + return eff_echo + + +def _fix_enc_dir(enc_dir): + enc_dir = enc_dir.lower() + if enc_dir == 'lr': + return 'x-' + if enc_dir == 'rl': + return 'x' + if enc_dir == 'ap': + return 'y-' + if enc_dir == 'pa': + return 'y' + return enc_dir + + +def _checkrnum(ref_num): + from nipype.interfaces.base import isdefined + if (ref_num is None) or not isdefined(ref_num): + return 0 + return ref_num + + +def _nonb0(in_bval): + import numpy as np + bvals = np.loadtxt(in_bval) + return np.where(bvals != 0)[0].tolist() + + +def _xfm_jacobian(in_xfm): + import numpy as np + from math import fabs + return [fabs(np.linalg.det(np.loadtxt(xfm))) for xfm in in_xfm] + + +def _get_zoom(in_file, enc_dir): + import nibabel as nb + + zooms = nb.load(in_file).get_header().get_zooms() + + if 'y' in enc_dir: + return zooms[1] + elif 'x' in enc_dir: + return zooms[0] + elif 'z' in enc_dir: + return zooms[2] + else: + raise ValueError('Wrong encoding direction string') + + + +def cleanup_edge_pipeline(name='Cleanup'): + """ + Perform some de-spiking filtering to clean up the edge of the fieldmap + (copied from fsl_prepare_fieldmap) + """ + inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'in_mask']), + name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), + name='outputnode') + + fugue = pe.Node(fsl.FUGUE(save_fmap=True, despike_2dfilter=True, + despike_threshold=2.1), name='Despike') + erode = pe.Node(fsl.maths.MathsCommand(nan2zeros=True, + args='-kernel 2D -ero'), name='MskErode') + newmsk = pe.Node(fsl.MultiImageMaths(op_string='-sub %s -thr 0.5 -bin'), + name='NewMask') + applymsk = pe.Node(fsl.ApplyMask(nan2zeros=True), name='ApplyMask') + join = pe.Node(niu.Merge(2), name='Merge') + addedge = pe.Node(fsl.MultiImageMaths(op_string='-mas %s -add %s'), + name='AddEdge') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, fugue, [('in_file', 'fmap_in_file'), + ('in_mask', 'mask_file')]), + (inputnode, erode, [('in_mask', 'in_file')]), + (inputnode, newmsk, [('in_mask', 'in_file')]), + (erode, newmsk, [('out_file', 'operand_files')]), + (fugue, applymsk, [('fmap_out_file', 'in_file')]), + (newmsk, applymsk, [('out_file', 'mask_file')]), + (erode, join, [('out_file', 'in1')]), + (applymsk, join, [('out_file', 'in2')]), + (inputnode, addedge, [('in_file', 'in_file')]), + (join, addedge, [('out', 'operand_files')]), + (addedge, outputnode, [('out_file', 'out_file')]) + ]) + return wf + + +def vsm2warp(name='Shiftmap2Warping'): + """ + Converts a voxel shift map (vsm) to a displacements field (warp). + """ + inputnode = pe.Node(niu.IdentityInterface(fields=['in_vsm', + 'in_ref', 'scaling', 'enc_dir']), name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=['out_warp']), + name='outputnode') + fixhdr = pe.Node(niu.Function(input_names=['in_file', 'in_file_hdr'], + output_names=['out_file'], function=copy_hdr), + name='Fix_hdr') + vsm = pe.Node(fsl.maths.BinaryMaths(operation='mul'), name='ScaleField') + vsm2dfm = pe.Node(fsl.ConvertWarp(relwarp=True, out_relwarp=True), + name='vsm2dfm') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, fixhdr, [('in_vsm', 'in_file'), + ('in_ref', 'in_file_hdr')]), + (inputnode, vsm, [('scaling', 'operand_value')]), + (fixhdr, vsm, [('out_file', 'in_file')]), + (vsm, vsm2dfm, [('out_file', 'shift_in_file')]), + (inputnode, vsm2dfm, [('in_ref', 'reference'), + ('enc_dir', 'shift_direction')]), + (vsm2dfm, outputnode, [('out_file', 'out_warp')]) + ]) + return wf + + +def dwi_flirt(name='DWICoregistration', excl_nodiff=False, + flirt_param={}): + """ + Generates a workflow for linear registration of dwi volumes + """ + inputnode = pe.Node(niu.IdentityInterface(fields=['reference', + 'in_file', 'ref_mask', 'in_xfms', 'in_bval']), + name='inputnode') + + initmat = pe.Node(niu.Function(input_names=['in_bval', 'in_xfms', + 'excl_nodiff'], output_names=['init_xfms'], + function=_checkinitxfm), name='InitXforms') + initmat.inputs.excl_nodiff = excl_nodiff + dilate = pe.Node(fsl.maths.MathsCommand(nan2zeros=True, + args='-kernel sphere 5 -dilM'), name='MskDilate') + split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') + pick_ref = pe.Node(niu.Select(), name='Pick_b0') + n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') + enhb0 = pe.Node(niu.Function(input_names=['in_file', 'in_mask', + 'clip_limit'], output_names=['out_file'], + function=enhance), name='B0Equalize') + enhb0.inputs.clip_limit = 0.015 + enhdw = pe.MapNode(niu.Function(input_names=['in_file', 'in_mask'], + output_names=['out_file'], function=enhance), + name='DWEqualize', iterfield=['in_file']) + flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration', + iterfield=['in_file', 'in_matrix_file']) + thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], + name='RemoveNegative') + merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') + outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', + 'out_xfms']), name='outputnode') + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, split, [('in_file', 'in_file')]), + (inputnode, dilate, [('ref_mask', 'in_file')]), + (inputnode, enhb0, [('ref_mask', 'in_mask')]), + (inputnode, initmat, [('in_xfms', 'in_xfms'), + ('in_bval', 'in_bval')]), + (inputnode, n4, [('reference', 'input_image'), + ('ref_mask', 'mask_image')]), + (dilate, flirt, [('out_file', 'ref_weight'), + ('out_file', 'in_weight')]), + (n4, enhb0, [('output_image', 'in_file')]), + (split, enhdw, [('out_files', 'in_file')]), + (dilate, enhdw, [('out_file', 'in_mask')]), + (enhb0, flirt, [('out_file', 'reference')]), + (enhdw, flirt, [('out_file', 'in_file')]), + (initmat, flirt, [('init_xfms', 'in_matrix_file')]), + (flirt, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'out_file')]), + (flirt, outputnode, [('out_matrix_file', 'out_xfms')]) + ]) + return wf + + +def apply_all_corrections(name='UnwarpArtifacts'): + """ + Combines two lists of linear transforms with the deformation field + map obtained typically after the SDC process. + Additionally, computes the corresponding bspline coefficients and + the map of determinants of the jacobian. + """ + + inputnode = pe.Node(niu.IdentityInterface(fields=['in_sdc', + 'in_hmc', 'in_ecc', 'in_dwi']), name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_warp', + 'out_coeff', 'out_jacobian']), name='outputnode') + warps = pe.MapNode(fsl.ConvertWarp(relwarp=True), + iterfield=['premat', 'postmat'], + name='ConvertWarp') + + selref = pe.Node(niu.Select(index=[0]), name='Reference') + + split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') + unwarp = pe.MapNode(fsl.ApplyWarp(), iterfield=['in_file', 'field_file'], + name='UnwarpDWIs') + + coeffs = pe.MapNode(fsl.WarpUtils(out_format='spline'), + iterfield=['in_file'], name='CoeffComp') + jacobian = pe.MapNode(fsl.WarpUtils(write_jacobian=True), + iterfield=['in_file'], name='JacobianComp') + jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'), + iterfield=['in_file', 'operand_files'], + name='ModulateDWIs') + + thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], + name='RemoveNegative') + merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') + + wf = pe.Workflow(name=name) + wf.connect([ + (inputnode, warps, [('in_sdc', 'warp1'), + ('in_hmc', 'premat'), + ('in_ecc', 'postmat'), + ('in_dwi', 'reference')]), + (inputnode, split, [('in_dwi', 'in_file')]), + (split, selref, [('out_files', 'inlist')]), + (warps, unwarp, [('out_file', 'field_file')]), + (split, unwarp, [('out_files', 'in_file')]), + (selref, unwarp, [('out', 'ref_file')]), + (selref, coeffs, [('out', 'reference')]), + (warps, coeffs, [('out_file', 'in_file')]), + (selref, jacobian, [('out', 'reference')]), + (coeffs, jacobian, [('out_file', 'in_file')]), + (unwarp, jacmult, [('out_file', 'in_file')]), + (jacobian, jacmult, [('out_jacobian', 'operand_files')]), + (jacmult, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (warps, outputnode, [('out_file', 'out_warp')]), + (coeffs, outputnode, [('out_file', 'out_coeff')]), + (jacobian, outputnode, [('out_jacobian', 'out_jacobian')]), + (merge, outputnode, [('merged_file', 'out_file')]) + ]) + return wf + + +def extract_bval(in_dwi, in_bval, b=0, out_file=None): + """ + Writes an image containing only the volumes with b-value specified at + input + """ + import numpy as np + import nibabel as nb + import os.path as op + + if out_file is None: + fname, ext = op.splitext(op.basename(in_dwi)) + if ext == ".gz": + fname, ext2 = op.splitext(fname) + ext = ext2 + ext + out_file = op.abspath("%s_tsoi%s" % (fname, ext)) + + im = nb.load(in_dwi) + dwidata = im.get_data() + bvals = np.loadtxt(in_bval) + + if b == 'diff': + selection = np.where(bvals != 0) + elif b == 'nodiff': + selection = np.where(bvals == 0) + else: + selection = np.where(bvals == b) + + extdata = np.squeeze(dwidata.take(selection, axis=3)) + hdr = im.get_header().copy() + hdr.set_data_shape(extdata.shape) + nb.Nifti1Image(extdata, im.get_affine(), + hdr).to_filename(out_file) + return out_file + + +def hmc_split(in_file, in_bval, ref_num=0, lowbval=5.0): + """ + Selects the reference and moving volumes from a dwi dataset + for the purpose of HMC. + """ + import numpy as np + import nibabel as nb + import os.path as op + from nipype.interfaces.base import isdefined + + im = nb.load(in_file) + data = im.get_data() + hdr = im.get_header().copy() + bval = np.loadtxt(in_bval) + + lowbs = np.where(bval <= lowbval)[0] + + volid = lowbs[0] + if (isdefined(ref_num) and (ref_num < len(lowbs))): + volid = [ref_num] + + if volid == 0: + data = data[..., 1:] + bval = bval[1:] + elif volid == (data.shape[-1] - 1): + data = data[..., :-1] + bval = bval[:-1] + else: + data = np.concatenate((data[..., :volid], data[..., (volid + 1):]), + axis=3) + bval = np.hstack((bval[:volid], bval[(volid + 1):])) + + out_ref = op.abspath('hmc_ref.nii.gz') + out_mov = op.abspath('hmc_mov.nii.gz') + out_bval = op.abspath('bval_split.txt') + + hdr.set_data_shape(refdata.shape) + refdata = data[..., volid] + nb.Nifti1Image(refdata, im.get_affine(), hdr).to_filename(out_ref) + + hdr.set_data_shape(data.shape) + nb.Nifti1Image(data, im.get_affine(), hdr).to_filename(out_mov) + np.savetxt(out_bval, bval) + return [out_ref, out_mov, out_bval, volid] + + +def remove_comp(in_file, in_bval, volid=0, out_file=None): + """ + Removes the volume ``volid`` from the 4D nifti file + """ + import numpy as np + import nibabel as nb + import os.path as op + + if out_file is None: + fname, ext = op.splitext(op.basename(in_file)) + if ext == ".gz": + fname, ext2 = op.splitext(fname) + ext = ext2 + ext + out_file = op.abspath("%s_extract%s" % (fname, ext)) + + im = nb.load(in_file) + data = im.get_data() + hdr = im.get_header().copy() + bval = np.loadtxt(in_bval) + + if volid == 0: + data = data[..., 1:] + bval = bval[1:] + elif volid == (data.shape[-1] - 1): + data = data[..., :-1] + bval = bval[:-1] + else: + data = np.concatenate((data[..., :volid], data[..., (volid + 1):]), + axis=3) + bval = np.hstack((bval[:volid], bval[(volid + 1):])) + hdr.set_data_shape(data.shape) + nb.Nifti1Image(data, im.get_affine(), hdr).to_filename(out_file) + + out_bval = op.abspath('bval_extract.txt') + np.savetxt(out_bval, bval) + return out_file, out_bval + + +def insert_mat(inlist, volid=0): + import numpy as np + import os.path as op + idfname = op.abspath('identity.mat') + out = inlist + np.savetxt(idfname, np.eye(4)) + out.insert(volid, idfname) + return out + + +def recompose_dwi(in_dwi, in_bval, in_corrected, out_file=None): + """ + Recompose back the dMRI data accordingly the b-values table after EC + correction + """ + import numpy as np + import nibabel as nb + import os.path as op + + if out_file is None: + fname, ext = op.splitext(op.basename(in_dwi)) + if ext == ".gz": + fname, ext2 = op.splitext(fname) + ext = ext2 + ext + out_file = op.abspath("%s_eccorrect%s" % (fname, ext)) + + im = nb.load(in_dwi) + dwidata = im.get_data() + bvals = np.loadtxt(in_bval) + dwis = np.where(bvals != 0)[0].tolist() + + if len(dwis) != len(in_corrected): + raise RuntimeError(('Length of DWIs in b-values table and after' + 'correction should match')) + + for bindex, dwi in zip(dwis, in_corrected): + dwidata[..., bindex] = nb.load(dwi).get_data() + + nb.Nifti1Image(dwidata, im.get_affine(), + im.get_header()).to_filename(out_file) + return out_file + + +def recompose_xfm(in_bval, in_xfms): + """ + Insert identity transformation matrices in b0 volumes to build up a list + """ + import numpy as np + import os.path as op + + bvals = np.loadtxt(in_bval) + out_matrix = np.array([np.eye(4)] * len(bvals)) + xfms = iter([np.loadtxt(xfm) for xfm in in_xfms]) + out_files = [] + + for i, b in enumerate(bvals): + if b == 0.0: + mat = np.eye(4) + else: + mat = xfms.next() + + out_name = op.abspath('eccor_%04d.mat' % i) + out_files.append(out_name) + np.savetxt(out_name, mat) + + return out_files + + +def time_avg(in_file, index=[0], out_file=None): + """ + Average the input time-series, selecting the indices given in index + + .. warning:: time steps should be already registered (corrected for + head motion artifacts). + + """ + import numpy as np + import nibabel as nb + import os.path as op + + if out_file is None: + fname, ext = op.splitext(op.basename(in_file)) + if ext == ".gz": + fname, ext2 = op.splitext(fname) + ext = ext2 + ext + out_file = op.abspath("%s_baseline%s" % (fname, ext)) + + index = np.atleast_1d(index).tolist() + + imgs = np.array(nb.four_to_three(nb.load(in_file)))[index] + if len(index) == 1: + data = imgs[0].get_data().astype(np.float32) + else: + data = np.average(np.array([im.get_data().astype(np.float32) for im in imgs]), + axis=0) + + hdr = imgs[0].get_header().copy() + hdr.set_data_shape(data.shape) + hdr.set_xyzt_units('mm') + hdr.set_data_dtype(np.float32) + nb.Nifti1Image(data, imgs[0].get_affine(), hdr).to_filename(out_file) + return out_file + + +def b0_indices(in_bval, max_b=10.0): + """ + Extract the indices of slices in a b-values file with a low b value + """ + import numpy as np + bval = np.loadtxt(in_bval) + return np.argwhere(bval <= max_b).flatten().tolist() + + +def b0_average(in_dwi, in_bval, max_b=10.0, out_file=None): + """ + A function that averages the *b0* volumes from a DWI dataset. + As current dMRI data are being acquired with all b-values > 0.0, + the *lowb* volumes are selected by specifying the parameter max_b. + + .. warning:: *b0* should be already registered (head motion artifact should + be corrected). + + """ + import numpy as np + import nibabel as nb + import os.path as op + + if out_file is None: + fname, ext = op.splitext(op.basename(in_dwi)) + if ext == ".gz": + fname, ext2 = op.splitext(fname) + ext = ext2 + ext + out_file = op.abspath("%s_avg_b0%s" % (fname, ext)) + + imgs = np.array(nb.four_to_three(nb.load(in_dwi))) + index = b0_indices(in_bval, max_b=max_b) + b0s = [im.get_data().astype(np.float32) + for im in imgs[index]] + b0 = np.average(np.array(b0s), axis=0) + + hdr = imgs[0].get_header().copy() + hdr.set_data_shape(b0.shape) + hdr.set_xyzt_units('mm') + hdr.set_data_dtype(np.float32) + nb.Nifti1Image(b0, imgs[0].get_affine(), hdr).to_filename(out_file) + return out_file + + +def rotate_bvecs(in_bvec, in_matrix): + """ + Rotates the input bvec file accordingly with a list of matrices. + + .. note:: the input affine matrix transforms points in the destination + image to their corresponding coordinates in the original image. + Therefore, this matrix should be inverted first, as we want to know + the target position of :math:`\\vec{r}`. + + """ + import os + import numpy as np + + name, fext = os.path.splitext(os.path.basename(in_bvec)) + if fext == '.gz': + name, _ = os.path.splitext(name) + out_file = os.path.abspath('%s_rotated.bvec' % name) + bvecs = np.loadtxt(in_bvec).T + new_bvecs = [] + + if len(bvecs) != len(in_matrix): + raise RuntimeError(('Number of b-vectors (%d) and rotation ' + 'matrices (%d) should match.') % (len(bvecs), + len(in_matrix))) + + for bvec, mat in zip(bvecs, in_matrix): + if np.all(bvec == 0.0): + new_bvecs.append(bvec) + else: + invrot = np.linalg.inv(np.loadtxt(mat))[:3, :3] + newbvec = invrot.dot(bvec) + new_bvecs.append((newbvec/np.linalg.norm(newbvec))) + + np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + return out_file + + +def eddy_rotate_bvecs(in_bvec, eddy_params): + """ + Rotates the input bvec file accordingly with a list of parameters sourced + from ``eddy``, as explained `here + `_. + """ + import os + import numpy as np + from math import sin, cos + + name, fext = os.path.splitext(os.path.basename(in_bvec)) + if fext == '.gz': + name, _ = os.path.splitext(name) + out_file = os.path.abspath('%s_rotated.bvec' % name) + bvecs = np.loadtxt(in_bvec).T + new_bvecs = [] + + params = np.loadtxt(eddy_params) + + if len(bvecs) != len(params): + raise RuntimeError(('Number of b-vectors and rotation ' + 'matrices should match.')) + + for bvec, row in zip(bvecs, params): + if np.all(bvec == 0.0): + new_bvecs.append(bvec) + else: + ax = row[3] + ay = row[4] + az = row[5] + + Rx = np.array([[1.0, 0.0, 0.0], + [0.0, cos(ax), -sin(ax)], + [0.0, sin(ax), cos(ax)]]) + Ry = np.array([[cos(ay), 0.0, sin(ay)], + [0.0, 1.0, 0.0], + [-sin(ay), 0.0, cos(ay)]]) + Rz = np.array([[cos(az), -sin(az), 0.0], + [sin(az), cos(az), 0.0], + [0.0, 0.0, 1.0]]) + R = Rx.dot(Ry).dot(Rz) + + invrot = np.linalg.inv(R) + newbvec = invrot.dot(bvec) + new_bvecs.append((newbvec/np.linalg.norm(newbvec))) + + np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + return out_file + + +def compute_readout(params): + """ + Computes readout time from epi params (see `eddy documentation + `_). + + .. warning:: ``params['echospacing']`` should be in *sec* units. + + + """ + epi_factor = 1.0 + acc_factor = 1.0 + try: + if params['epi_factor'] > 1: + epi_factor = float(params['epi_factor'] - 1) + except: + pass + try: + if params['acc_factor'] > 1: + acc_factor = 1.0 / params['acc_factor'] + except: + pass + return acc_factor * epi_factor * params['echospacing'] + + +def siemens2rads(in_file, out_file=None): + """ + Converts input phase difference map to rads + """ + import numpy as np + import nibabel as nb + import os.path as op + import math + + if out_file is None: + fname, fext = op.splitext(op.basename(in_file)) + if fext == '.gz': + fname, _ = op.splitext(fname) + out_file = op.abspath('./%s_rads.nii.gz' % fname) + + in_file = np.atleast_1d(in_file).tolist() + im = nb.load(in_file[0]) + data = im.get_data().astype(np.float32) + hdr = im.get_header().copy() + + if len(in_file) == 2: + data = nb.load(in_file[1]).get_data().astype(np.float32) - data + elif (data.ndim == 4) and (data.shape[-1] == 2): + data = np.squeeze(data[..., 1] - data[..., 0]) + hdr.set_data_shape(data.shape[:3]) + + imin = data.min() + imax = data.max() + data = (2.0 * math.pi * (data - imin)/(imax-imin)) - math.pi + hdr.set_data_dtype(np.float32) + hdr.set_xyzt_units('mm') + hdr['datatype'] = 16 + nb.Nifti1Image(data, im.get_affine(), hdr).to_filename(out_file) + return out_file + + +def rads2radsec(in_file, delta_te, out_file=None): + """ + Converts input phase difference map to rads + """ + import numpy as np + import nibabel as nb + import os.path as op + import math + + if out_file is None: + fname, fext = op.splitext(op.basename(in_file)) + if fext == '.gz': + fname, _ = op.splitext(fname) + out_file = op.abspath('./%s_radsec.nii.gz' % fname) + + im = nb.load(in_file) + data = im.get_data().astype(np.float32) * (1.0/delta_te) + nb.Nifti1Image(data, im.get_affine(), + im.get_header()).to_filename(out_file) + return out_file + + +def demean_image(in_file, in_mask=None, out_file=None): + """ + Demean image data inside mask + """ + import numpy as np + import nibabel as nb + import os.path as op + import math + + if out_file is None: + fname, fext = op.splitext(op.basename(in_file)) + if fext == '.gz': + fname, _ = op.splitext(fname) + out_file = op.abspath('./%s_demean.nii.gz' % fname) + + im = nb.load(in_file) + data = im.get_data().astype(np.float32) + msk = np.ones_like(data) + + if in_mask is not None: + msk = nb.load(in_mask).get_data().astype(np.float32) + msk[msk > 0] = 1.0 + msk[msk < 1] = 0.0 + + mean = np.median(data[msk == 1].reshape(-1)) + data[msk == 1] = data[msk == 1] - mean + nb.Nifti1Image(data, im.get_affine(), + im.get_header()).to_filename(out_file) + return out_file + + +def add_empty_vol(in_file, out_file=None): + """ + Adds an empty vol to the phase difference image + """ + import nibabel as nb + import os.path as op + import numpy as np + import math + + if out_file is None: + fname, fext = op.splitext(op.basename(in_file)) + if fext == '.gz': + fname, _ = op.splitext(fname) + out_file = op.abspath('./%s_4D.nii.gz' % fname) + + im = nb.load(in_file) + zim = nb.Nifti1Image(np.zeros_like(im.get_data()), im.get_affine(), + im.get_header()) + nb.funcs.concat_images([im, zim]).to_filename(out_file) + return out_file + + +def reorient_bvecs(in_dwi, old_dwi, in_bvec): + """ + Checks reorientations of ``in_dwi`` w.r.t. ``old_dwi`` and + reorients the in_bvec table accordingly. + """ + import os + import numpy as np + import nibabel as nb + + name, fext = os.path.splitext(os.path.basename(in_bvec)) + if fext == '.gz': + name, _ = os.path.splitext(name) + out_file = os.path.abspath('%s_reorient.bvec' % name) + bvecs = np.loadtxt(in_bvec).T + new_bvecs = [] + + N = nb.load(in_dwi).get_affine() + O = nb.load(old_dwi).get_affine() + RS = N.dot(np.linalg.inv(O))[:3, :3] + sc_idx = np.where((np.abs(RS) != 1) & (RS != 0)) + S = np.ones_like(RS) + S[sc_idx] = RS[sc_idx] + R = RS/S + + new_bvecs = [R.dot(b) for b in bvecs] + np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + return out_file + + +def copy_hdr(in_file, in_file_hdr, out_file=None): + import numpy as np + import nibabel as nb + import os.path as op + + if out_file is None: + fname, fext = op.splitext(op.basename(in_file)) + if fext == '.gz': + fname, _ = op.splitext(fname) + out_file = op.abspath('./%s_fixhdr.nii.gz' % fname) + + imref = nb.load(in_file_hdr) + hdr = imref.get_header().copy() + hdr.set_data_dtype(np.float32) + vsm = nb.load(in_file).get_data().astype(np.float32) + hdr.set_data_shape(vsm.shape) + hdr.set_xyzt_units('mm') + nii = nb.Nifti1Image(vsm, imref.get_affine(), hdr) + nii.to_filename(out_file) + return out_file + + +def enhance(in_file, clip_limit=0.010, in_mask=None, out_file=None): + import numpy as np + import nibabel as nb + import os.path as op + from skimage import exposure, img_as_int + + if out_file is None: + fname, fext = op.splitext(op.basename(in_file)) + if fext == '.gz': + fname, _ = op.splitext(fname) + out_file = op.abspath('./%s_enh.nii.gz' % fname) + + im = nb.load(in_file) + imdata = im.get_data() + imshape = im.get_shape() + + if in_mask is not None: + msk = nb.load(in_mask).get_data() + msk[msk > 0] = 1 + msk[msk < 1] = 0 + imdata = imdata * msk + + immin = imdata.min() + imdata = (imdata - immin).astype(np.uint16) + + adapted = exposure.equalize_adapthist(imdata.reshape(imshape[0], -1), + clip_limit=clip_limit) + + nb.Nifti1Image(adapted.reshape(imshape), im.get_affine(), + im.get_header()).to_filename(out_file) + + return out_file + + +def _checkinitxfm(in_bval, excl_nodiff, in_xfms=None): + from nipype.interfaces.base import isdefined + import numpy as np + import os.path as op + bvals = np.loadtxt(in_bval) + + gen_id = ((in_xfms is None) or + (not isdefined(in_xfms)) or + (len(in_xfms) != len(bvals))) + + init_xfms = [] + if excl_nodiff: + dws = np.where(bvals != 0)[0].tolist() + else: + dws = range(len(bvals)) + + if gen_id: + for i in dws: + xfm_file = op.abspath('init_%04d.mat' % i) + np.savetxt(xfm_file, np.eye(4)) + init_xfms.append(xfm_file) + else: + init_xfms = [in_xfms[i] for i in dws] + + return init_xfms From 39dba99de6b56bbcc27cfee16734121dfc21056d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 26 Apr 2015 21:12:45 +0200 Subject: [PATCH 31/42] ammend previous commit\n\nSorry for this, I thought explicit removal is more clear --- .../workflows/preprocessing/epi/__init__.py | 11 - nipype/workflows/preprocessing/epi/bias.py | 70 -- .../workflows/preprocessing/epi/complete.py | 189 ---- nipype/workflows/preprocessing/epi/eddy.py | 130 --- nipype/workflows/preprocessing/epi/fsl.py | 107 --- nipype/workflows/preprocessing/epi/motion.py | 120 --- .../preprocessing/epi/susceptibility.py | 301 ------- .../preprocessing/epi/tests/__init__.py | 3 - nipype/workflows/preprocessing/epi/utils.py | 851 ------------------ 9 files changed, 1782 deletions(-) delete mode 100644 nipype/workflows/preprocessing/epi/__init__.py delete mode 100644 nipype/workflows/preprocessing/epi/bias.py delete mode 100644 nipype/workflows/preprocessing/epi/complete.py delete mode 100644 nipype/workflows/preprocessing/epi/eddy.py delete mode 100644 nipype/workflows/preprocessing/epi/fsl.py delete mode 100644 nipype/workflows/preprocessing/epi/motion.py delete mode 100644 nipype/workflows/preprocessing/epi/susceptibility.py delete mode 100644 nipype/workflows/preprocessing/epi/tests/__init__.py delete mode 100644 nipype/workflows/preprocessing/epi/utils.py diff --git a/nipype/workflows/preprocessing/epi/__init__.py b/nipype/workflows/preprocessing/epi/__init__.py deleted file mode 100644 index b9f7314fba..0000000000 --- a/nipype/workflows/preprocessing/epi/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from bias import remove_bias -from eddy import ecc_fsl -from susceptibility import sdc_fmb, sdc_peb -from motion import hmc_flirt - -from fsl import all_dmri -from complete import all_fmb_pipeline, all_peb_pipeline \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/bias.py b/nipype/workflows/preprocessing/epi/bias.py deleted file mode 100644 index 9331626154..0000000000 --- a/nipype/workflows/preprocessing/epi/bias.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl -from .utils import * - -def remove_bias(name='bias_correct'): - """ - This workflow estimates a single multiplicative bias field from the - averaged *b0* image, as suggested in [Jeurissen2014]_. - - .. admonition:: References - - .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained - spherical deconvolution for improved analysis of multi-shell diffusion - MRI data `_. - NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061 - - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias - >>> bias = remove_bias() - >>> bias.inputs.inputnode.in_file = 'epi.nii' - >>> bias.inputs.inputnode.in_bval = 'diffusion.bval' - >>> bias.inputs.inputnode.in_mask = 'mask.nii' - >>> bias.run() # doctest: +SKIP - - """ - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_bval', 'in_mask']), name='inputnode') - - outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), - name='outputnode') - - avg_b0 = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval'], output_names=['out_file'], - function=b0_average), name='b0_avg') - n4 = pe.Node(ants.N4BiasFieldCorrection( - dimension=3, save_bias=True, bspline_fitting_distance=600), - name='Bias_b0') - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - mult = pe.MapNode(fsl.MultiImageMaths(op_string='-div %s'), - iterfield=['in_file'], name='RemoveBiasOfDWIs') - thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], - name='RemoveNegative') - merge = pe.Node(fsl.utils.Merge(dimension='t'), name='MergeDWIs') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, avg_b0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0, n4, [('out_file', 'input_image')]), - (inputnode, n4, [('in_mask', 'mask_image')]), - (inputnode, split, [('in_file', 'in_file')]), - (n4, mult, [('bias_image', 'operand_files')]), - (split, mult, [('out_files', 'in_file')]), - (mult, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'out_file')]) - ]) - return wf \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/complete.py b/nipype/workflows/preprocessing/epi/complete.py deleted file mode 100644 index 6d0acac12c..0000000000 --- a/nipype/workflows/preprocessing/epi/complete.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl -from .utils import * - -def all_fmb_pipeline(name='hmc_sdc_ecc', fugue_params=dict(smooth3d=2.0)): - """ - Builds a pipeline including three artifact corrections: head-motion - correction (HMC), susceptibility-derived distortion correction (SDC), - and Eddy currents-derived distortion correction (ECC). - - The displacement fields from each kind of distortions are combined. Thus, - only one interpolation occurs between input data and result. - - .. warning:: this workflow rotates the gradients table (*b*-vectors) - [Leemans09]_. - - - Examples - -------- - - >>> from nipype.workflows.dmri.fsl.artifacts import all_fmb_pipeline - >>> allcorr = all_fmb_pipeline() - >>> allcorr.inputs.inputnode.in_file = 'epi.nii' - >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' - >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> allcorr.inputs.inputnode.bmap_mag = 'magnitude.nii' - >>> allcorr.inputs.inputnode.bmap_pha = 'phase.nii' - >>> allcorr.inputs.inputnode.epi_param = 'epi_param.txt' - >>> allcorr.run() # doctest: +SKIP - - """ - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_bvec', 'in_bval', 'bmap_pha', 'bmap_mag', - 'epi_param']), name='inputnode') - - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_mask', 'out_bvec']), name='outputnode') - - list_b0 = pe.Node(niu.Function( - input_names=['in_bval'], output_names=['out_idx'], - function=b0_indices), name='B0indices') - - avg_b0_0 = pe.Node(niu.Function( - input_names=['in_file', 'index'], output_names=['out_file'], - function=time_avg), name='b0_avg_pre') - avg_b0_1 = pe.Node(niu.Function( - input_names=['in_file', 'index'], output_names=['out_file'], - function=time_avg), name='b0_avg_post') - - bet_dwi0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), - name='bet_dwi_pre') - bet_dwi1 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), - name='bet_dwi_post') - - hmc = hmc_pipeline() - sdc = sdc_fmb(fugue_params=fugue_params) - ecc = ecc_pipeline() - unwarp = apply_all_corrections() - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, hmc, [('in_file', 'inputnode.in_file'), - ('in_bvec', 'inputnode.in_bvec'), - ('in_bval', 'inputnode.in_bval')]), - (inputnode, list_b0, [('in_bval', 'in_bval')]), - (inputnode, avg_b0_0, [('in_file', 'in_file')]), - (list_b0, avg_b0_0, [('out_idx', 'index')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), - (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), - (hmc, sdc, [ - ('outputnode.out_file', 'inputnode.in_file')]), - (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), - (inputnode, sdc, [('bmap_pha', 'inputnode.bmap_pha'), - ('bmap_mag', 'inputnode.bmap_mag'), - ('epi_param', 'inputnode.settings')]), - (list_b0, sdc, [('out_idx', 'inputnode.in_ref')]), - (hmc, ecc, [ - ('outputnode.out_xfms', 'inputnode.in_xfms')]), - (inputnode, ecc, [('in_file', 'inputnode.in_file'), - ('in_bval', 'inputnode.in_bval')]), - (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), - (ecc, avg_b0_1, [('outputnode.out_file', 'in_file')]), - (list_b0, avg_b0_1, [('out_idx', 'index')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), - (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), - (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), - (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), - (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), - (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), - (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), - (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) - ]) - return wf - - -def all_peb_pipeline(name='hmc_sdc_ecc', - epi_params=dict(echospacing=0.77e-3, - acc_factor=3, - enc_dir='y-', - epi_factor=1), - altepi_params=dict(echospacing=0.77e-3, - acc_factor=3, - enc_dir='y', - epi_factor=1)): - """ - Builds a pipeline including three artifact corrections: head-motion - correction (HMC), susceptibility-derived distortion correction (SDC), - and Eddy currents-derived distortion correction (ECC). - - .. warning:: this workflow rotates the gradients table (*b*-vectors) - [Leemans09]_. - - - Examples - -------- - - >>> from nipype.workflows.dmri.fsl.artifacts import all_peb_pipeline - >>> allcorr = all_peb_pipeline() - >>> allcorr.inputs.inputnode.in_file = 'epi.nii' - >>> allcorr.inputs.inputnode.alt_file = 'epi_rev.nii' - >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' - >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> allcorr.run() # doctest: +SKIP - - """ - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_bvec', 'in_bval', 'alt_file']), - name='inputnode') - - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_mask', 'out_bvec']), name='outputnode') - - avg_b0_0 = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval'], output_names=['out_file'], - function=b0_average), name='b0_avg_pre') - avg_b0_1 = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval'], output_names=['out_file'], - function=b0_average), name='b0_avg_post') - bet_dwi0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), - name='bet_dwi_pre') - bet_dwi1 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), - name='bet_dwi_post') - - hmc = hmc_pipeline() - sdc = sdc_peb(epi_params=epi_params, altepi_params=altepi_params) - ecc = ecc_pipeline() - - unwarp = apply_all_corrections() - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, hmc, [('in_file', 'inputnode.in_file'), - ('in_bvec', 'inputnode.in_bvec'), - ('in_bval', 'inputnode.in_bval')]), - (inputnode, avg_b0_0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), - (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), - (hmc, sdc, [ - ('outputnode.out_file', 'inputnode.in_file')]), - (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), - (inputnode, sdc, [('in_bval', 'inputnode.in_bval'), - ('alt_file', 'inputnode.alt_file')]), - (inputnode, ecc, [('in_file', 'inputnode.in_file'), - ('in_bval', 'inputnode.in_bval')]), - (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), - (hmc, ecc, [ - ('outputnode.out_xfms', 'inputnode.in_xfms')]), - (ecc, avg_b0_1, [('outputnode.out_file', 'in_dwi')]), - (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), - (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), - (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), - (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), - (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), - (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), - (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), - (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) - ]) - return wf diff --git a/nipype/workflows/preprocessing/epi/eddy.py b/nipype/workflows/preprocessing/epi/eddy.py deleted file mode 100644 index 4102acab3c..0000000000 --- a/nipype/workflows/preprocessing/epi/eddy.py +++ /dev/null @@ -1,130 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl -from .utils import * - -def ecc_fsl(name='eddy_correct'): - """ - ECC stands for Eddy currents correction. - - Creates a pipeline that corrects for artifacts induced by Eddy currents in - dMRI sequences. - It takes a series of diffusion weighted images and linearly co-registers - them to one reference image (the average of all b0s in the dataset). - - DWIs are also modulated by the determinant of the Jacobian as indicated by - [Jones10]_ and [Rohde04]_. - - A list of rigid transformation matrices can be provided, sourcing from a - :func:`.hmc_pipeline` workflow, to initialize registrations in a *motion - free* framework. - - A list of affine transformation matrices is available as output, so that - transforms can be chained (discussion - `here `_). - - .. admonition:: References - - .. [Jones10] Jones DK, `The signal intensity must be modulated by the - determinant of the Jacobian when correcting for eddy currents in - diffusion MRI - `_, - Proc. ISMRM 18th Annual Meeting, (2010). - - .. [Rohde04] Rohde et al., `Comprehensive Approach for Correction of - Motion and Distortion in Diffusion-Weighted MRI - `_, MRM - 51:103-114 (2004). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import ecc_pipeline - >>> ecc = ecc_pipeline() - >>> ecc.inputs.inputnode.in_file = 'diffusion.nii' - >>> ecc.inputs.inputnode.in_bval = 'diffusion.bval' - >>> ecc.inputs.inputnode.in_mask = 'mask.nii' - >>> ecc.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - input dwi file - inputnode.in_mask - weights mask of reference image (a file with data \ -range sin [0.0, 1.0], indicating the weight of each voxel when computing the \ -metric. - inputnode.in_bval - b-values table - inputnode.in_xfms - list of matrices to initialize registration (from \ -head-motion correction) - - Outputs:: - - outputnode.out_file - corrected dwi file - outputnode.out_xfms - list of transformation matrices - """ - - from nipype.workflows.data import get_flirt_schedule - params = dict(dof=12, no_search=True, interp='spline', bgvalue=0, - schedule=get_flirt_schedule('ecc')) - # cost='normmi', cost_func='normmi', bins=64, - - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_bval', 'in_mask', 'in_xfms']), name='inputnode') - avg_b0 = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval'], output_names=['out_file'], - function=b0_average), name='b0_avg') - pick_dws = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval', 'b'], output_names=['out_file'], - function=extract_bval), name='ExtractDWI') - pick_dws.inputs.b = 'diff' - - flirt = dwi_flirt(flirt_param=params, excl_nodiff=True) - - mult = pe.MapNode(fsl.BinaryMaths(operation='mul'), name='ModulateDWIs', - iterfield=['in_file', 'operand_value']) - thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], - name='RemoveNegative') - - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - get_mat = pe.Node(niu.Function( - input_names=['in_bval', 'in_xfms'], output_names=['out_files'], - function=recompose_xfm), name='GatherMatrices') - merge = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval', 'in_corrected'], - output_names=['out_file'], function=recompose_dwi), name='MergeDWIs') - - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_xfms']), name='outputnode') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, avg_b0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (inputnode, pick_dws, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (inputnode, merge, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (inputnode, flirt, [('in_mask', 'inputnode.ref_mask'), - ('in_xfms', 'inputnode.in_xfms'), - ('in_bval', 'inputnode.in_bval')]), - (inputnode, get_mat, [('in_bval', 'in_bval')]), - (avg_b0, flirt, [('out_file', 'inputnode.reference')]), - (pick_dws, flirt, [('out_file', 'inputnode.in_file')]), - (flirt, get_mat, [('outputnode.out_xfms', 'in_xfms')]), - (flirt, mult, [(('outputnode.out_xfms', _xfm_jacobian), - 'operand_value')]), - (flirt, split, [('outputnode.out_file', 'in_file')]), - (split, mult, [('out_files', 'in_file')]), - (mult, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_corrected')]), - (get_mat, outputnode, [('out_files', 'out_xfms')]), - (merge, outputnode, [('out_file', 'out_file')]) - ]) - return wf diff --git a/nipype/workflows/preprocessing/epi/fsl.py b/nipype/workflows/preprocessing/epi/fsl.py deleted file mode 100644 index 28f44f347a..0000000000 --- a/nipype/workflows/preprocessing/epi/fsl.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl -from .utils import * - - -def all_dmri(name='fsl_all_correct', - epi_params=dict(echospacing=0.77e-3, - acc_factor=3, - enc_dir='y-'), - altepi_params=dict(echospacing=0.77e-3, - acc_factor=3, - enc_dir='y')): - """ - Workflow that integrates FSL ``topup`` and ``eddy``. - - - .. warning:: this workflow rotates the gradients table (*b*-vectors) - [Leemans09]_. - - - .. warning:: this workflow does not perform jacobian modulation of each - *DWI* [Jones10]_. - - - Examples - -------- - - >>> from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline - >>> allcorr = all_fsl_pipeline() - >>> allcorr.inputs.inputnode.in_file = 'epi.nii' - >>> allcorr.inputs.inputnode.alt_file = 'epi_rev.nii' - >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' - >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> allcorr.run() # doctest: +SKIP - - """ - - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_bvec', 'in_bval', 'alt_file']), - name='inputnode') - - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_mask', 'out_bvec']), name='outputnode') - - def _gen_index(in_file): - import numpy as np - import nibabel as nb - import os - out_file = os.path.abspath('index.txt') - vols = nb.load(in_file).get_data().shape[-1] - np.savetxt(out_file, np.ones((vols,)).T) - return out_file - - avg_b0_0 = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval'], output_names=['out_file'], - function=b0_average), name='b0_avg_pre') - bet_dwi0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), - name='bet_dwi_pre') - - sdc = sdc_peb(epi_params=epi_params, altepi_params=altepi_params) - ecc = pe.Node(fsl.Eddy(method='jac'), name='fsl_eddy') - rot_bvec = pe.Node(niu.Function( - input_names=['in_bvec', 'eddy_params'], output_names=['out_file'], - function=eddy_rotate_bvecs), name='Rotate_Bvec') - avg_b0_1 = pe.Node(niu.Function( - input_names=['in_dwi', 'in_bval'], output_names=['out_file'], - function=b0_average), name='b0_avg_post') - bet_dwi1 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), - name='bet_dwi_post') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, avg_b0_0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), - (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), - (inputnode, sdc, [('in_file', 'inputnode.in_file'), - ('alt_file', 'inputnode.alt_file'), - ('in_bval', 'inputnode.in_bval')]), - (sdc, ecc, [('topup.out_enc_file', 'in_acqp'), - ('topup.out_fieldcoef', - 'in_topup_fieldcoef'), - ('topup.out_movpar', 'in_topup_movpar')]), - (bet_dwi0, ecc, [('mask_file', 'in_mask')]), - (inputnode, ecc, [('in_file', 'in_file'), - (('in_file', _gen_index), 'in_index'), - ('in_bval', 'in_bval'), - ('in_bvec', 'in_bvec')]), - (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), - (ecc, rot_bvec, [('out_parameter', 'eddy_params')]), - (ecc, avg_b0_1, [('out_corrected', 'in_dwi')]), - (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), - (ecc, outputnode, [('out_corrected', 'out_file')]), - (rot_bvec, outputnode, [('out_file', 'out_bvec')]), - (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) - ]) - return wf \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/motion.py b/nipype/workflows/preprocessing/epi/motion.py deleted file mode 100644 index f564e6b885..0000000000 --- a/nipype/workflows/preprocessing/epi/motion.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl -from .utils import * - -def hmc_flirt(name='motion_correct'): - """ - HMC stands for head-motion correction. - - Creates a pipeline that corrects for head motion artifacts in dMRI - sequences. - It takes a series of diffusion weighted images and rigidly co-registers - them to one reference image. Finally, the `b`-matrix is rotated accordingly - [Leemans09]_ making use of the rotation matrix obtained by FLIRT. - - Search angles have been limited to 4 degrees, based on results in - [Yendiki13]_. - - A list of rigid transformation matrices is provided, so that transforms - can be chained. - This is useful to correct for artifacts with only one interpolation process - (as previously discussed `here - `_), - and also to compute nuisance regressors as proposed by [Yendiki13]_. - - .. warning:: This workflow rotates the `b`-vectors, so please be advised - that not all the dicom converters ensure the consistency between the - resulting nifti orientation and the gradients table (e.g. dcm2nii - checks it). - - .. admonition:: References - - .. [Leemans09] Leemans A, and Jones DK, `The B-matrix must be rotated - when correcting for subject motion in DTI data - `_, - Magn Reson Med. 61(6):1336-49. 2009. doi: 10.1002/mrm.21890. - - .. [Yendiki13] Yendiki A et al., `Spurious group differences due to head - motion in a diffusion MRI study - `_. - Neuroimage. 21(88C):79-90. 2013. doi: 10.1016/j.neuroimage.2013.11.027 - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import hmc_pipeline - >>> hmc = hmc_pipeline() - >>> hmc.inputs.inputnode.in_file = 'diffusion.nii' - >>> hmc.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> hmc.inputs.inputnode.in_bval = 'diffusion.bval' - >>> hmc.inputs.inputnode.in_mask = 'mask.nii' - >>> hmc.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - input dwi file - inputnode.in_mask - weights mask of reference image (a file with data \ -range in [0.0, 1.0], indicating the weight of each voxel when computing the \ -metric. - inputnode.in_bvec - gradients file (b-vectors) - inputnode.ref_num (optional, default=0) index of the b0 volume that \ -should be taken as reference - - Outputs:: - - outputnode.out_file - corrected dwi file - outputnode.out_bvec - rotated gradient vectors table - outputnode.out_xfms - list of transformation matrices - - """ - from nipype.workflows.data import get_flirt_schedule - - params = dict(dof=6, bgvalue=0, save_log=True, no_search=True, - # cost='mutualinfo', cost_func='mutualinfo', bins=64, - schedule=get_flirt_schedule('hmc')) - - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'ref_num', 'in_bvec', 'in_bval', 'in_mask']), - name='inputnode') - split = pe.Node(niu.Function( - output_names=['out_ref', 'out_mov', 'out_bval', 'volid'], - input_names=['in_file', 'in_bval', 'ref_num'], function=hmc_split), - name='SplitDWI') - flirt = dwi_flirt(flirt_param=params) - insmat = pe.Node(niu.Function(input_names=['inlist', 'volid'], - output_names=['out'], function=insert_mat), - name='InsertRefmat') - rot_bvec = pe.Node(niu.Function( - function=rotate_bvecs, input_names=['in_bvec', 'in_matrix'], - output_names=['out_file']), name='Rotate_Bvec') - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_bvec', 'out_xfms']), name='outputnode') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, split, [('in_file', 'in_file'), - ('in_bval', 'in_bval'), - ('ref_num', 'ref_num')]), - (inputnode, flirt, [('in_mask', 'inputnode.ref_mask')]), - (split, flirt, [('out_ref', 'inputnode.reference'), - ('out_mov', 'inputnode.in_file'), - ('out_bval', 'inputnode.in_bval')]), - (flirt, insmat, [('outputnode.out_xfms', 'inlist')]), - (split, insmat, [('volid', 'volid')]), - (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), - (insmat, rot_bvec, [('out', 'in_matrix')]), - (rot_bvec, outputnode, [('out_file', 'out_bvec')]), - (flirt, outputnode, [('outputnode.out_file', 'out_file')]), - (insmat, outputnode, [('out', 'out_xfms')]) - ]) - return wf - diff --git a/nipype/workflows/preprocessing/epi/susceptibility.py b/nipype/workflows/preprocessing/epi/susceptibility.py deleted file mode 100644 index acfe4998cd..0000000000 --- a/nipype/workflows/preprocessing/epi/susceptibility.py +++ /dev/null @@ -1,301 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl -from .utils import * - - -def sdc_fmb(name='fmb_correction', interp='Linear', - fugue_params=dict(smooth3d=2.0)): - """ - SDC stands for susceptibility distortion correction. FMB stands for - fieldmap-based. - - The fieldmap based (FMB) method implements SDC by using a mapping of the - B0 field as proposed by [Jezzard95]_. This workflow uses the implementation - of FSL (`FUGUE `_). Phase - unwrapping is performed using `PRELUDE - `_ - [Jenkinson03]_. Preparation of the fieldmap is performed reproducing the - script in FSL `fsl_prepare_fieldmap - `_. - - - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import sdc_fmb - >>> fmb = sdc_fmb() - >>> fmb.inputs.inputnode.in_file = 'diffusion.nii' - >>> fmb.inputs.inputnode.in_ref = range(0, 30, 6) - >>> fmb.inputs.inputnode.in_mask = 'mask.nii' - >>> fmb.inputs.inputnode.bmap_mag = 'magnitude.nii' - >>> fmb.inputs.inputnode.bmap_pha = 'phase.nii' - >>> fmb.inputs.inputnode.settings = 'epi_param.txt' - >>> fmb.run() # doctest: +SKIP - - .. warning:: Only SIEMENS format fieldmaps are supported. - - .. admonition:: References - - .. [Jezzard95] Jezzard P, and Balaban RS, `Correction for geometric - distortion in echo planar images from B0 field variations - `_, - MRM 34(1):65-73. (1995). doi: 10.1002/mrm.1910340111. - - .. [Jenkinson03] Jenkinson M., `Fast, automated, N-dimensional - phase-unwrapping algorithm `_, - MRM 49(1):193-197, 2003, doi: 10.1002/mrm.10354. - - """ - - epi_defaults = {'delta_te': 2.46e-3, 'echospacing': 0.77e-3, - 'acc_factor': 2, 'enc_dir': u'AP'} - - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_ref', 'in_mask', 'bmap_pha', 'bmap_mag', - 'settings']), name='inputnode') - - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_vsm', 'out_warp']), name='outputnode') - - r_params = pe.Node(JSONFileGrabber(defaults=epi_defaults), - name='SettingsGrabber') - eff_echo = pe.Node(niu.Function(function=_eff_t_echo, - input_names=['echospacing', 'acc_factor'], - output_names=['eff_echo']), name='EffEcho') - - firstmag = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='GetFirst') - n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') - bet = pe.Node(fsl.BET(frac=0.4, mask=True), name='BrainExtraction') - dilate = pe.Node(fsl.maths.MathsCommand( - nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate') - pha2rads = pe.Node(niu.Function( - input_names=['in_file'], output_names=['out_file'], - function=siemens2rads), name='PreparePhase') - prelude = pe.Node(fsl.PRELUDE(process3d=True), name='PhaseUnwrap') - rad2rsec = pe.Node(niu.Function( - input_names=['in_file', 'delta_te'], output_names=['out_file'], - function=rads2radsec), name='ToRadSec') - - baseline = pe.Node(niu.Function( - input_names=['in_file', 'index'], output_names=['out_file'], - function=time_avg), name='Baseline') - - fmm2b0 = pe.Node(ants.Registration(output_warped_image=True), - name="FMm_to_B0") - fmm2b0.inputs.transforms = ['Rigid'] * 2 - fmm2b0.inputs.transform_parameters = [(1.0,)] * 2 - fmm2b0.inputs.number_of_iterations = [[50], [20]] - fmm2b0.inputs.dimension = 3 - fmm2b0.inputs.metric = ['Mattes', 'Mattes'] - fmm2b0.inputs.metric_weight = [1.0] * 2 - fmm2b0.inputs.radius_or_number_of_bins = [64, 64] - fmm2b0.inputs.sampling_strategy = ['Regular', 'Random'] - fmm2b0.inputs.sampling_percentage = [None, 0.2] - fmm2b0.inputs.convergence_threshold = [1.e-5, 1.e-8] - fmm2b0.inputs.convergence_window_size = [20, 10] - fmm2b0.inputs.smoothing_sigmas = [[6.0], [2.0]] - fmm2b0.inputs.sigma_units = ['vox'] * 2 - fmm2b0.inputs.shrink_factors = [[6], [1]] # ,[1] ] - fmm2b0.inputs.use_estimate_learning_rate_once = [True] * 2 - fmm2b0.inputs.use_histogram_matching = [True] * 2 - fmm2b0.inputs.initial_moving_transform_com = 0 - fmm2b0.inputs.collapse_output_transforms = True - fmm2b0.inputs.winsorize_upper_quantile = 0.995 - - applyxfm = pe.Node(ants.ApplyTransforms( - dimension=3, interpolation=interp), name='FMp_to_B0') - - pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='PreliminaryFugue') - demean = pe.Node(niu.Function( - input_names=['in_file', 'in_mask'], output_names=['out_file'], - function=demean_image), name='DemeanFmap') - - cleanup = cleanup_edge_pipeline() - - addvol = pe.Node(niu.Function( - input_names=['in_file'], output_names=['out_file'], - function=add_empty_vol), name='AddEmptyVol') - - vsm = pe.Node(fsl.FUGUE(save_shift=True, **fugue_params), - name="ComputeVSM") - - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') - unwarp = pe.MapNode(fsl.FUGUE(icorr=True, forward_warping=False), - iterfield=['in_file'], name='UnwarpDWIs') - thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], - name='RemoveNegative') - vsm2dfm = vsm2warp() - vsm2dfm.inputs.inputnode.scaling = 1.0 - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, r_params, [('settings', 'in_file')]), - (r_params, eff_echo, [('echospacing', 'echospacing'), - ('acc_factor', 'acc_factor')]), - (inputnode, pha2rads, [('bmap_pha', 'in_file')]), - (inputnode, firstmag, [('bmap_mag', 'in_file')]), - (inputnode, baseline, [('in_file', 'in_file'), - ('in_ref', 'index')]), - (firstmag, n4, [('roi_file', 'input_image')]), - (n4, bet, [('output_image', 'in_file')]), - (bet, dilate, [('mask_file', 'in_file')]), - (pha2rads, prelude, [('out_file', 'phase_file')]), - (n4, prelude, [('output_image', 'magnitude_file')]), - (dilate, prelude, [('out_file', 'mask_file')]), - (r_params, rad2rsec, [('delta_te', 'delta_te')]), - (prelude, rad2rsec, [('unwrapped_phase_file', 'in_file')]), - - (baseline, fmm2b0, [('out_file', 'fixed_image')]), - (n4, fmm2b0, [('output_image', 'moving_image')]), - (inputnode, fmm2b0, [('in_mask', 'fixed_image_mask')]), - (dilate, fmm2b0, [('out_file', 'moving_image_mask')]), - - (baseline, applyxfm, [('out_file', 'reference_image')]), - (rad2rsec, applyxfm, [('out_file', 'input_image')]), - (fmm2b0, applyxfm, [ - ('forward_transforms', 'transforms'), - ('forward_invert_flags', 'invert_transform_flags')]), - - (applyxfm, pre_fugue, [('output_image', 'fmap_in_file')]), - (inputnode, pre_fugue, [('in_mask', 'mask_file')]), - (pre_fugue, demean, [('fmap_out_file', 'in_file')]), - (inputnode, demean, [('in_mask', 'in_mask')]), - (demean, cleanup, [('out_file', 'inputnode.in_file')]), - (inputnode, cleanup, [('in_mask', 'inputnode.in_mask')]), - (cleanup, addvol, [('outputnode.out_file', 'in_file')]), - (inputnode, vsm, [('in_mask', 'mask_file')]), - (addvol, vsm, [('out_file', 'fmap_in_file')]), - (r_params, vsm, [('delta_te', 'asym_se_time')]), - (eff_echo, vsm, [('eff_echo', 'dwell_time')]), - (inputnode, split, [('in_file', 'in_file')]), - (split, unwarp, [('out_files', 'in_file')]), - (vsm, unwarp, [('shift_out_file', 'shift_in_file')]), - (r_params, unwarp, [ - (('enc_dir', _fix_enc_dir), 'unwarp_direction')]), - (unwarp, thres, [('unwarped_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (r_params, vsm2dfm, [ - (('enc_dir', _fix_enc_dir), 'inputnode.enc_dir')]), - (merge, vsm2dfm, [('merged_file', 'inputnode.in_ref')]), - (vsm, vsm2dfm, [('shift_out_file', 'inputnode.in_vsm')]), - (merge, outputnode, [('merged_file', 'out_file')]), - (vsm, outputnode, [('shift_out_file', 'out_vsm')]), - (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) - ]) - return wf - - -def sdc_peb(name='peb_correction', - epi_params=dict(echospacing=0.77e-3, - acc_factor=3, - enc_dir='y-', - epi_factor=1), - altepi_params=dict(echospacing=0.77e-3, - acc_factor=3, - enc_dir='y', - epi_factor=1)): - """ - SDC stands for susceptibility distortion correction. PEB stands for - phase-encoding-based. - - The phase-encoding-based (PEB) method implements SDC by acquiring - diffusion images with two different enconding directions [Andersson2003]_. - The most typical case is acquiring with opposed phase-gradient blips - (e.g. *A>>>P* and *P>>>A*, or equivalently, *-y* and *y*) - as in [Chiou2000]_, but it is also possible to use orthogonal - configurations [Cordes2000]_ (e.g. *A>>>P* and *L>>>R*, - or equivalently *-y* and *x*). - This workflow uses the implementation of FSL - (`TOPUP `_). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import sdc_peb - >>> peb = sdc_peb() - >>> peb.inputs.inputnode.in_file = 'epi.nii' - >>> peb.inputs.inputnode.alt_file = 'epi_rev.nii' - >>> peb.inputs.inputnode.in_bval = 'diffusion.bval' - >>> peb.inputs.inputnode.in_mask = 'mask.nii' - >>> peb.run() # doctest: +SKIP - - .. admonition:: References - - .. [Andersson2003] Andersson JL et al., `How to correct susceptibility - distortions in spin-echo echo-planar images: application to diffusion - tensor imaging `_. - Neuroimage. 2003 Oct;20(2):870-88. doi: 10.1016/S1053-8119(03)00336-7 - - .. [Cordes2000] Cordes D et al., Geometric distortion correction in EPI - using two images with orthogonal phase-encoding directions, in Proc. - ISMRM (8), p.1712, Denver, US, 2000. - - .. [Chiou2000] Chiou JY, and Nalcioglu O, A simple method to correct - off-resonance related distortion in echo planar imaging, in Proc. - ISMRM (8), p.1712, Denver, US, 2000. - - """ - - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_file', 'in_bval', 'in_mask', 'alt_file', 'ref_num']), - name='inputnode') - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_vsm', 'out_warp']), name='outputnode') - - b0_ref = pe.Node(fsl.ExtractROI(t_size=1), name='b0_ref') - b0_alt = pe.Node(fsl.ExtractROI(t_size=1), name='b0_alt') - b0_comb = pe.Node(niu.Merge(2), name='b0_list') - b0_merge = pe.Node(fsl.Merge(dimension='t'), name='b0_merged') - - topup = pe.Node(fsl.TOPUP(), name='topup') - topup.inputs.encoding_direction = [epi_params['enc_dir'], - altepi_params['enc_dir']] - - readout = compute_readout(epi_params) - topup.inputs.readout_times = [readout, - compute_readout(altepi_params)] - - unwarp = pe.Node(fsl.ApplyTOPUP(in_index=[1], method='jac'), name='unwarp') - - # scaling = pe.Node(niu.Function(input_names=['in_file', 'enc_dir'], - # output_names=['factor'], function=_get_zoom), - # name='GetZoom') - # scaling.inputs.enc_dir = epi_params['enc_dir'] - vsm2dfm = vsm2warp() - vsm2dfm.inputs.inputnode.enc_dir = epi_params['enc_dir'] - vsm2dfm.inputs.inputnode.scaling = readout - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, b0_ref, [('in_file', 'in_file'), - (('ref_num', _checkrnum), 't_min')]), - (inputnode, b0_alt, [('alt_file', 'in_file'), - (('ref_num', _checkrnum), 't_min')]), - (b0_ref, b0_comb, [('roi_file', 'in1')]), - (b0_alt, b0_comb, [('roi_file', 'in2')]), - (b0_comb, b0_merge, [('out', 'in_files')]), - (b0_merge, topup, [('merged_file', 'in_file')]), - (topup, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'), - ('out_movpar', 'in_topup_movpar'), - ('out_enc_file', 'encoding_file')]), - (inputnode, unwarp, [('in_file', 'in_files')]), - (unwarp, outputnode, [('out_corrected', 'out_file')]), - # (b0_ref, scaling, [('roi_file', 'in_file')]), - # (scaling, vsm2dfm, [('factor', 'inputnode.scaling')]), - (b0_ref, vsm2dfm, [('roi_file', 'inputnode.in_ref')]), - (topup, vsm2dfm, [('out_field', 'inputnode.in_vsm')]), - (topup, outputnode, [('out_field', 'out_vsm')]), - (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) - ]) - return wf diff --git a/nipype/workflows/preprocessing/epi/tests/__init__.py b/nipype/workflows/preprocessing/epi/tests/__init__.py deleted file mode 100644 index 1d26a84aa3..0000000000 --- a/nipype/workflows/preprocessing/epi/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: \ No newline at end of file diff --git a/nipype/workflows/preprocessing/epi/utils.py b/nipype/workflows/preprocessing/epi/utils.py deleted file mode 100644 index f084382eb7..0000000000 --- a/nipype/workflows/preprocessing/epi/utils.py +++ /dev/null @@ -1,851 +0,0 @@ -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -import os - -import nipype.pipeline.engine as pe -from nipype.interfaces.io import JSONFileGrabber -from nipype.interfaces import utility as niu -from nipype.interfaces import freesurfer as fs -from nipype.interfaces import ants -from nipype.interfaces import fsl - - -def _eff_t_echo(echospacing, acc_factor): - eff_echo = echospacing / (1.0 * acc_factor) - return eff_echo - - -def _fix_enc_dir(enc_dir): - enc_dir = enc_dir.lower() - if enc_dir == 'lr': - return 'x-' - if enc_dir == 'rl': - return 'x' - if enc_dir == 'ap': - return 'y-' - if enc_dir == 'pa': - return 'y' - return enc_dir - - -def _checkrnum(ref_num): - from nipype.interfaces.base import isdefined - if (ref_num is None) or not isdefined(ref_num): - return 0 - return ref_num - - -def _nonb0(in_bval): - import numpy as np - bvals = np.loadtxt(in_bval) - return np.where(bvals != 0)[0].tolist() - - -def _xfm_jacobian(in_xfm): - import numpy as np - from math import fabs - return [fabs(np.linalg.det(np.loadtxt(xfm))) for xfm in in_xfm] - - -def _get_zoom(in_file, enc_dir): - import nibabel as nb - - zooms = nb.load(in_file).get_header().get_zooms() - - if 'y' in enc_dir: - return zooms[1] - elif 'x' in enc_dir: - return zooms[0] - elif 'z' in enc_dir: - return zooms[2] - else: - raise ValueError('Wrong encoding direction string') - - - -def cleanup_edge_pipeline(name='Cleanup'): - """ - Perform some de-spiking filtering to clean up the edge of the fieldmap - (copied from fsl_prepare_fieldmap) - """ - inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'in_mask']), - name='inputnode') - outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), - name='outputnode') - - fugue = pe.Node(fsl.FUGUE(save_fmap=True, despike_2dfilter=True, - despike_threshold=2.1), name='Despike') - erode = pe.Node(fsl.maths.MathsCommand(nan2zeros=True, - args='-kernel 2D -ero'), name='MskErode') - newmsk = pe.Node(fsl.MultiImageMaths(op_string='-sub %s -thr 0.5 -bin'), - name='NewMask') - applymsk = pe.Node(fsl.ApplyMask(nan2zeros=True), name='ApplyMask') - join = pe.Node(niu.Merge(2), name='Merge') - addedge = pe.Node(fsl.MultiImageMaths(op_string='-mas %s -add %s'), - name='AddEdge') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, fugue, [('in_file', 'fmap_in_file'), - ('in_mask', 'mask_file')]), - (inputnode, erode, [('in_mask', 'in_file')]), - (inputnode, newmsk, [('in_mask', 'in_file')]), - (erode, newmsk, [('out_file', 'operand_files')]), - (fugue, applymsk, [('fmap_out_file', 'in_file')]), - (newmsk, applymsk, [('out_file', 'mask_file')]), - (erode, join, [('out_file', 'in1')]), - (applymsk, join, [('out_file', 'in2')]), - (inputnode, addedge, [('in_file', 'in_file')]), - (join, addedge, [('out', 'operand_files')]), - (addedge, outputnode, [('out_file', 'out_file')]) - ]) - return wf - - -def vsm2warp(name='Shiftmap2Warping'): - """ - Converts a voxel shift map (vsm) to a displacements field (warp). - """ - inputnode = pe.Node(niu.IdentityInterface(fields=['in_vsm', - 'in_ref', 'scaling', 'enc_dir']), name='inputnode') - outputnode = pe.Node(niu.IdentityInterface(fields=['out_warp']), - name='outputnode') - fixhdr = pe.Node(niu.Function(input_names=['in_file', 'in_file_hdr'], - output_names=['out_file'], function=copy_hdr), - name='Fix_hdr') - vsm = pe.Node(fsl.maths.BinaryMaths(operation='mul'), name='ScaleField') - vsm2dfm = pe.Node(fsl.ConvertWarp(relwarp=True, out_relwarp=True), - name='vsm2dfm') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, fixhdr, [('in_vsm', 'in_file'), - ('in_ref', 'in_file_hdr')]), - (inputnode, vsm, [('scaling', 'operand_value')]), - (fixhdr, vsm, [('out_file', 'in_file')]), - (vsm, vsm2dfm, [('out_file', 'shift_in_file')]), - (inputnode, vsm2dfm, [('in_ref', 'reference'), - ('enc_dir', 'shift_direction')]), - (vsm2dfm, outputnode, [('out_file', 'out_warp')]) - ]) - return wf - - -def dwi_flirt(name='DWICoregistration', excl_nodiff=False, - flirt_param={}): - """ - Generates a workflow for linear registration of dwi volumes - """ - inputnode = pe.Node(niu.IdentityInterface(fields=['reference', - 'in_file', 'ref_mask', 'in_xfms', 'in_bval']), - name='inputnode') - - initmat = pe.Node(niu.Function(input_names=['in_bval', 'in_xfms', - 'excl_nodiff'], output_names=['init_xfms'], - function=_checkinitxfm), name='InitXforms') - initmat.inputs.excl_nodiff = excl_nodiff - dilate = pe.Node(fsl.maths.MathsCommand(nan2zeros=True, - args='-kernel sphere 5 -dilM'), name='MskDilate') - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - pick_ref = pe.Node(niu.Select(), name='Pick_b0') - n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') - enhb0 = pe.Node(niu.Function(input_names=['in_file', 'in_mask', - 'clip_limit'], output_names=['out_file'], - function=enhance), name='B0Equalize') - enhb0.inputs.clip_limit = 0.015 - enhdw = pe.MapNode(niu.Function(input_names=['in_file', 'in_mask'], - output_names=['out_file'], function=enhance), - name='DWEqualize', iterfield=['in_file']) - flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration', - iterfield=['in_file', 'in_matrix_file']) - thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], - name='RemoveNegative') - merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') - outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', - 'out_xfms']), name='outputnode') - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, split, [('in_file', 'in_file')]), - (inputnode, dilate, [('ref_mask', 'in_file')]), - (inputnode, enhb0, [('ref_mask', 'in_mask')]), - (inputnode, initmat, [('in_xfms', 'in_xfms'), - ('in_bval', 'in_bval')]), - (inputnode, n4, [('reference', 'input_image'), - ('ref_mask', 'mask_image')]), - (dilate, flirt, [('out_file', 'ref_weight'), - ('out_file', 'in_weight')]), - (n4, enhb0, [('output_image', 'in_file')]), - (split, enhdw, [('out_files', 'in_file')]), - (dilate, enhdw, [('out_file', 'in_mask')]), - (enhb0, flirt, [('out_file', 'reference')]), - (enhdw, flirt, [('out_file', 'in_file')]), - (initmat, flirt, [('init_xfms', 'in_matrix_file')]), - (flirt, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'out_file')]), - (flirt, outputnode, [('out_matrix_file', 'out_xfms')]) - ]) - return wf - - -def apply_all_corrections(name='UnwarpArtifacts'): - """ - Combines two lists of linear transforms with the deformation field - map obtained typically after the SDC process. - Additionally, computes the corresponding bspline coefficients and - the map of determinants of the jacobian. - """ - - inputnode = pe.Node(niu.IdentityInterface(fields=['in_sdc', - 'in_hmc', 'in_ecc', 'in_dwi']), name='inputnode') - outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_warp', - 'out_coeff', 'out_jacobian']), name='outputnode') - warps = pe.MapNode(fsl.ConvertWarp(relwarp=True), - iterfield=['premat', 'postmat'], - name='ConvertWarp') - - selref = pe.Node(niu.Select(index=[0]), name='Reference') - - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - unwarp = pe.MapNode(fsl.ApplyWarp(), iterfield=['in_file', 'field_file'], - name='UnwarpDWIs') - - coeffs = pe.MapNode(fsl.WarpUtils(out_format='spline'), - iterfield=['in_file'], name='CoeffComp') - jacobian = pe.MapNode(fsl.WarpUtils(write_jacobian=True), - iterfield=['in_file'], name='JacobianComp') - jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'), - iterfield=['in_file', 'operand_files'], - name='ModulateDWIs') - - thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], - name='RemoveNegative') - merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, warps, [('in_sdc', 'warp1'), - ('in_hmc', 'premat'), - ('in_ecc', 'postmat'), - ('in_dwi', 'reference')]), - (inputnode, split, [('in_dwi', 'in_file')]), - (split, selref, [('out_files', 'inlist')]), - (warps, unwarp, [('out_file', 'field_file')]), - (split, unwarp, [('out_files', 'in_file')]), - (selref, unwarp, [('out', 'ref_file')]), - (selref, coeffs, [('out', 'reference')]), - (warps, coeffs, [('out_file', 'in_file')]), - (selref, jacobian, [('out', 'reference')]), - (coeffs, jacobian, [('out_file', 'in_file')]), - (unwarp, jacmult, [('out_file', 'in_file')]), - (jacobian, jacmult, [('out_jacobian', 'operand_files')]), - (jacmult, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (warps, outputnode, [('out_file', 'out_warp')]), - (coeffs, outputnode, [('out_file', 'out_coeff')]), - (jacobian, outputnode, [('out_jacobian', 'out_jacobian')]), - (merge, outputnode, [('merged_file', 'out_file')]) - ]) - return wf - - -def extract_bval(in_dwi, in_bval, b=0, out_file=None): - """ - Writes an image containing only the volumes with b-value specified at - input - """ - import numpy as np - import nibabel as nb - import os.path as op - - if out_file is None: - fname, ext = op.splitext(op.basename(in_dwi)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_tsoi%s" % (fname, ext)) - - im = nb.load(in_dwi) - dwidata = im.get_data() - bvals = np.loadtxt(in_bval) - - if b == 'diff': - selection = np.where(bvals != 0) - elif b == 'nodiff': - selection = np.where(bvals == 0) - else: - selection = np.where(bvals == b) - - extdata = np.squeeze(dwidata.take(selection, axis=3)) - hdr = im.get_header().copy() - hdr.set_data_shape(extdata.shape) - nb.Nifti1Image(extdata, im.get_affine(), - hdr).to_filename(out_file) - return out_file - - -def hmc_split(in_file, in_bval, ref_num=0, lowbval=5.0): - """ - Selects the reference and moving volumes from a dwi dataset - for the purpose of HMC. - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.interfaces.base import isdefined - - im = nb.load(in_file) - data = im.get_data() - hdr = im.get_header().copy() - bval = np.loadtxt(in_bval) - - lowbs = np.where(bval <= lowbval)[0] - - volid = lowbs[0] - if (isdefined(ref_num) and (ref_num < len(lowbs))): - volid = [ref_num] - - if volid == 0: - data = data[..., 1:] - bval = bval[1:] - elif volid == (data.shape[-1] - 1): - data = data[..., :-1] - bval = bval[:-1] - else: - data = np.concatenate((data[..., :volid], data[..., (volid + 1):]), - axis=3) - bval = np.hstack((bval[:volid], bval[(volid + 1):])) - - out_ref = op.abspath('hmc_ref.nii.gz') - out_mov = op.abspath('hmc_mov.nii.gz') - out_bval = op.abspath('bval_split.txt') - - hdr.set_data_shape(refdata.shape) - refdata = data[..., volid] - nb.Nifti1Image(refdata, im.get_affine(), hdr).to_filename(out_ref) - - hdr.set_data_shape(data.shape) - nb.Nifti1Image(data, im.get_affine(), hdr).to_filename(out_mov) - np.savetxt(out_bval, bval) - return [out_ref, out_mov, out_bval, volid] - - -def remove_comp(in_file, in_bval, volid=0, out_file=None): - """ - Removes the volume ``volid`` from the 4D nifti file - """ - import numpy as np - import nibabel as nb - import os.path as op - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_extract%s" % (fname, ext)) - - im = nb.load(in_file) - data = im.get_data() - hdr = im.get_header().copy() - bval = np.loadtxt(in_bval) - - if volid == 0: - data = data[..., 1:] - bval = bval[1:] - elif volid == (data.shape[-1] - 1): - data = data[..., :-1] - bval = bval[:-1] - else: - data = np.concatenate((data[..., :volid], data[..., (volid + 1):]), - axis=3) - bval = np.hstack((bval[:volid], bval[(volid + 1):])) - hdr.set_data_shape(data.shape) - nb.Nifti1Image(data, im.get_affine(), hdr).to_filename(out_file) - - out_bval = op.abspath('bval_extract.txt') - np.savetxt(out_bval, bval) - return out_file, out_bval - - -def insert_mat(inlist, volid=0): - import numpy as np - import os.path as op - idfname = op.abspath('identity.mat') - out = inlist - np.savetxt(idfname, np.eye(4)) - out.insert(volid, idfname) - return out - - -def recompose_dwi(in_dwi, in_bval, in_corrected, out_file=None): - """ - Recompose back the dMRI data accordingly the b-values table after EC - correction - """ - import numpy as np - import nibabel as nb - import os.path as op - - if out_file is None: - fname, ext = op.splitext(op.basename(in_dwi)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_eccorrect%s" % (fname, ext)) - - im = nb.load(in_dwi) - dwidata = im.get_data() - bvals = np.loadtxt(in_bval) - dwis = np.where(bvals != 0)[0].tolist() - - if len(dwis) != len(in_corrected): - raise RuntimeError(('Length of DWIs in b-values table and after' - 'correction should match')) - - for bindex, dwi in zip(dwis, in_corrected): - dwidata[..., bindex] = nb.load(dwi).get_data() - - nb.Nifti1Image(dwidata, im.get_affine(), - im.get_header()).to_filename(out_file) - return out_file - - -def recompose_xfm(in_bval, in_xfms): - """ - Insert identity transformation matrices in b0 volumes to build up a list - """ - import numpy as np - import os.path as op - - bvals = np.loadtxt(in_bval) - out_matrix = np.array([np.eye(4)] * len(bvals)) - xfms = iter([np.loadtxt(xfm) for xfm in in_xfms]) - out_files = [] - - for i, b in enumerate(bvals): - if b == 0.0: - mat = np.eye(4) - else: - mat = xfms.next() - - out_name = op.abspath('eccor_%04d.mat' % i) - out_files.append(out_name) - np.savetxt(out_name, mat) - - return out_files - - -def time_avg(in_file, index=[0], out_file=None): - """ - Average the input time-series, selecting the indices given in index - - .. warning:: time steps should be already registered (corrected for - head motion artifacts). - - """ - import numpy as np - import nibabel as nb - import os.path as op - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_baseline%s" % (fname, ext)) - - index = np.atleast_1d(index).tolist() - - imgs = np.array(nb.four_to_three(nb.load(in_file)))[index] - if len(index) == 1: - data = imgs[0].get_data().astype(np.float32) - else: - data = np.average(np.array([im.get_data().astype(np.float32) for im in imgs]), - axis=0) - - hdr = imgs[0].get_header().copy() - hdr.set_data_shape(data.shape) - hdr.set_xyzt_units('mm') - hdr.set_data_dtype(np.float32) - nb.Nifti1Image(data, imgs[0].get_affine(), hdr).to_filename(out_file) - return out_file - - -def b0_indices(in_bval, max_b=10.0): - """ - Extract the indices of slices in a b-values file with a low b value - """ - import numpy as np - bval = np.loadtxt(in_bval) - return np.argwhere(bval <= max_b).flatten().tolist() - - -def b0_average(in_dwi, in_bval, max_b=10.0, out_file=None): - """ - A function that averages the *b0* volumes from a DWI dataset. - As current dMRI data are being acquired with all b-values > 0.0, - the *lowb* volumes are selected by specifying the parameter max_b. - - .. warning:: *b0* should be already registered (head motion artifact should - be corrected). - - """ - import numpy as np - import nibabel as nb - import os.path as op - - if out_file is None: - fname, ext = op.splitext(op.basename(in_dwi)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_avg_b0%s" % (fname, ext)) - - imgs = np.array(nb.four_to_three(nb.load(in_dwi))) - index = b0_indices(in_bval, max_b=max_b) - b0s = [im.get_data().astype(np.float32) - for im in imgs[index]] - b0 = np.average(np.array(b0s), axis=0) - - hdr = imgs[0].get_header().copy() - hdr.set_data_shape(b0.shape) - hdr.set_xyzt_units('mm') - hdr.set_data_dtype(np.float32) - nb.Nifti1Image(b0, imgs[0].get_affine(), hdr).to_filename(out_file) - return out_file - - -def rotate_bvecs(in_bvec, in_matrix): - """ - Rotates the input bvec file accordingly with a list of matrices. - - .. note:: the input affine matrix transforms points in the destination - image to their corresponding coordinates in the original image. - Therefore, this matrix should be inverted first, as we want to know - the target position of :math:`\\vec{r}`. - - """ - import os - import numpy as np - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('%s_rotated.bvec' % name) - bvecs = np.loadtxt(in_bvec).T - new_bvecs = [] - - if len(bvecs) != len(in_matrix): - raise RuntimeError(('Number of b-vectors (%d) and rotation ' - 'matrices (%d) should match.') % (len(bvecs), - len(in_matrix))) - - for bvec, mat in zip(bvecs, in_matrix): - if np.all(bvec == 0.0): - new_bvecs.append(bvec) - else: - invrot = np.linalg.inv(np.loadtxt(mat))[:3, :3] - newbvec = invrot.dot(bvec) - new_bvecs.append((newbvec/np.linalg.norm(newbvec))) - - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') - return out_file - - -def eddy_rotate_bvecs(in_bvec, eddy_params): - """ - Rotates the input bvec file accordingly with a list of parameters sourced - from ``eddy``, as explained `here - `_. - """ - import os - import numpy as np - from math import sin, cos - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('%s_rotated.bvec' % name) - bvecs = np.loadtxt(in_bvec).T - new_bvecs = [] - - params = np.loadtxt(eddy_params) - - if len(bvecs) != len(params): - raise RuntimeError(('Number of b-vectors and rotation ' - 'matrices should match.')) - - for bvec, row in zip(bvecs, params): - if np.all(bvec == 0.0): - new_bvecs.append(bvec) - else: - ax = row[3] - ay = row[4] - az = row[5] - - Rx = np.array([[1.0, 0.0, 0.0], - [0.0, cos(ax), -sin(ax)], - [0.0, sin(ax), cos(ax)]]) - Ry = np.array([[cos(ay), 0.0, sin(ay)], - [0.0, 1.0, 0.0], - [-sin(ay), 0.0, cos(ay)]]) - Rz = np.array([[cos(az), -sin(az), 0.0], - [sin(az), cos(az), 0.0], - [0.0, 0.0, 1.0]]) - R = Rx.dot(Ry).dot(Rz) - - invrot = np.linalg.inv(R) - newbvec = invrot.dot(bvec) - new_bvecs.append((newbvec/np.linalg.norm(newbvec))) - - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') - return out_file - - -def compute_readout(params): - """ - Computes readout time from epi params (see `eddy documentation - `_). - - .. warning:: ``params['echospacing']`` should be in *sec* units. - - - """ - epi_factor = 1.0 - acc_factor = 1.0 - try: - if params['epi_factor'] > 1: - epi_factor = float(params['epi_factor'] - 1) - except: - pass - try: - if params['acc_factor'] > 1: - acc_factor = 1.0 / params['acc_factor'] - except: - pass - return acc_factor * epi_factor * params['echospacing'] - - -def siemens2rads(in_file, out_file=None): - """ - Converts input phase difference map to rads - """ - import numpy as np - import nibabel as nb - import os.path as op - import math - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_rads.nii.gz' % fname) - - in_file = np.atleast_1d(in_file).tolist() - im = nb.load(in_file[0]) - data = im.get_data().astype(np.float32) - hdr = im.get_header().copy() - - if len(in_file) == 2: - data = nb.load(in_file[1]).get_data().astype(np.float32) - data - elif (data.ndim == 4) and (data.shape[-1] == 2): - data = np.squeeze(data[..., 1] - data[..., 0]) - hdr.set_data_shape(data.shape[:3]) - - imin = data.min() - imax = data.max() - data = (2.0 * math.pi * (data - imin)/(imax-imin)) - math.pi - hdr.set_data_dtype(np.float32) - hdr.set_xyzt_units('mm') - hdr['datatype'] = 16 - nb.Nifti1Image(data, im.get_affine(), hdr).to_filename(out_file) - return out_file - - -def rads2radsec(in_file, delta_te, out_file=None): - """ - Converts input phase difference map to rads - """ - import numpy as np - import nibabel as nb - import os.path as op - import math - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_radsec.nii.gz' % fname) - - im = nb.load(in_file) - data = im.get_data().astype(np.float32) * (1.0/delta_te) - nb.Nifti1Image(data, im.get_affine(), - im.get_header()).to_filename(out_file) - return out_file - - -def demean_image(in_file, in_mask=None, out_file=None): - """ - Demean image data inside mask - """ - import numpy as np - import nibabel as nb - import os.path as op - import math - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_demean.nii.gz' % fname) - - im = nb.load(in_file) - data = im.get_data().astype(np.float32) - msk = np.ones_like(data) - - if in_mask is not None: - msk = nb.load(in_mask).get_data().astype(np.float32) - msk[msk > 0] = 1.0 - msk[msk < 1] = 0.0 - - mean = np.median(data[msk == 1].reshape(-1)) - data[msk == 1] = data[msk == 1] - mean - nb.Nifti1Image(data, im.get_affine(), - im.get_header()).to_filename(out_file) - return out_file - - -def add_empty_vol(in_file, out_file=None): - """ - Adds an empty vol to the phase difference image - """ - import nibabel as nb - import os.path as op - import numpy as np - import math - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_4D.nii.gz' % fname) - - im = nb.load(in_file) - zim = nb.Nifti1Image(np.zeros_like(im.get_data()), im.get_affine(), - im.get_header()) - nb.funcs.concat_images([im, zim]).to_filename(out_file) - return out_file - - -def reorient_bvecs(in_dwi, old_dwi, in_bvec): - """ - Checks reorientations of ``in_dwi`` w.r.t. ``old_dwi`` and - reorients the in_bvec table accordingly. - """ - import os - import numpy as np - import nibabel as nb - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('%s_reorient.bvec' % name) - bvecs = np.loadtxt(in_bvec).T - new_bvecs = [] - - N = nb.load(in_dwi).get_affine() - O = nb.load(old_dwi).get_affine() - RS = N.dot(np.linalg.inv(O))[:3, :3] - sc_idx = np.where((np.abs(RS) != 1) & (RS != 0)) - S = np.ones_like(RS) - S[sc_idx] = RS[sc_idx] - R = RS/S - - new_bvecs = [R.dot(b) for b in bvecs] - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') - return out_file - - -def copy_hdr(in_file, in_file_hdr, out_file=None): - import numpy as np - import nibabel as nb - import os.path as op - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_fixhdr.nii.gz' % fname) - - imref = nb.load(in_file_hdr) - hdr = imref.get_header().copy() - hdr.set_data_dtype(np.float32) - vsm = nb.load(in_file).get_data().astype(np.float32) - hdr.set_data_shape(vsm.shape) - hdr.set_xyzt_units('mm') - nii = nb.Nifti1Image(vsm, imref.get_affine(), hdr) - nii.to_filename(out_file) - return out_file - - -def enhance(in_file, clip_limit=0.010, in_mask=None, out_file=None): - import numpy as np - import nibabel as nb - import os.path as op - from skimage import exposure, img_as_int - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_enh.nii.gz' % fname) - - im = nb.load(in_file) - imdata = im.get_data() - imshape = im.get_shape() - - if in_mask is not None: - msk = nb.load(in_mask).get_data() - msk[msk > 0] = 1 - msk[msk < 1] = 0 - imdata = imdata * msk - - immin = imdata.min() - imdata = (imdata - immin).astype(np.uint16) - - adapted = exposure.equalize_adapthist(imdata.reshape(imshape[0], -1), - clip_limit=clip_limit) - - nb.Nifti1Image(adapted.reshape(imshape), im.get_affine(), - im.get_header()).to_filename(out_file) - - return out_file - - -def _checkinitxfm(in_bval, excl_nodiff, in_xfms=None): - from nipype.interfaces.base import isdefined - import numpy as np - import os.path as op - bvals = np.loadtxt(in_bval) - - gen_id = ((in_xfms is None) or - (not isdefined(in_xfms)) or - (len(in_xfms) != len(bvals))) - - init_xfms = [] - if excl_nodiff: - dws = np.where(bvals != 0)[0].tolist() - else: - dws = range(len(bvals)) - - if gen_id: - for i in dws: - xfm_file = op.abspath('init_%04d.mat' % i) - np.savetxt(xfm_file, np.eye(4)) - init_xfms.append(xfm_file) - else: - init_xfms = [in_xfms[i] for i in dws] - - return init_xfms From d0af5cc3224955f26c46f5eb837bf50ec1796a64 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 30 Dec 2015 00:39:24 +0100 Subject: [PATCH 32/42] resolve unmerged conflict --- nipype/pipeline/engine/workflows.py | 159 ---------------------------- 1 file changed, 159 deletions(-) diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index dc21462cfc..808a83b93e 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -999,165 +999,6 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, return ('\n' + prefix).join(dotlist) -<<<<<<< HEAD:nipype/pipeline/engine.py -def _write_inputs(node): - lines = [] - nodename = node.fullname.replace('.', '_') - for key, _ in node.inputs.items(): - val = getattr(node.inputs, key) - if isdefined(val): - if type(val) == str: - try: - func = create_function_from_source(val) - except RuntimeError, e: - lines.append("%s.inputs.%s = '%s'" % (nodename, key, val)) - else: - funcname = [name for name in func.func_globals - if name != '__builtins__'][0] - lines.append(cPickle.loads(val)) - if funcname == nodename: - lines[-1] = lines[-1].replace(' %s(' % funcname, - ' %s_1(' % funcname) - funcname = '%s_1' % funcname - lines.append('from nipype.utils.misc import getsource') - lines.append("%s.inputs.%s = getsource(%s)" % (nodename, - key, - funcname)) - else: - lines.append('%s.inputs.%s = %s' % (nodename, key, val)) - return lines - - -def format_node(node, format='python', include_config=False): - """Format a node in a given output syntax.""" - lines = [] - name = node.fullname.replace('.', '_') - if format == 'python': - klass = node._interface - importline = 'from %s import %s' % (klass.__module__, - klass.__class__.__name__) - comment = '# Node: %s' % node.fullname - spec = inspect.getargspec(node._interface.__init__) - args = spec.args[1:] - if args: - filled_args = [] - for arg in args: - if hasattr(node._interface, '_%s' % arg): - filled_args.append('%s=%s' % (arg, getattr(node._interface, - '_%s' % arg))) - args = ', '.join(filled_args) - else: - args = '' - klass_name = klass.__class__.__name__ - if isinstance(node, MapNode): - nodedef = '%s = MapNode(%s(%s), iterfield=%s, name="%s")' \ - % (name, klass_name, args, node.iterfield, name) - else: - nodedef = '%s = Node(%s(%s), name="%s")' \ - % (name, klass_name, args, name) - lines = [importline, comment, nodedef] - - if include_config: - lines = [importline, "from collections import OrderedDict", - comment, nodedef] - lines.append('%s.config = %s' % (name, node.config)) - - if node.iterables is not None: - lines.append('%s.iterables = %s' % (name, node.iterables)) - lines.extend(_write_inputs(node)) - - return lines - - -class WorkflowBase(object): - """Defines common attributes and functions for workflows and nodes.""" - - def __init__(self, name=None, base_dir=None): - """ Initialize base parameters of a workflow or node - - Parameters - ---------- - name : string (mandatory) - Name of this node. Name must be alphanumeric and not contain any - special characters (e.g., '.', '@'). - base_dir : string - base output directory (will be hashed before creations) - default=None, which results in the use of mkdtemp - - """ - self.base_dir = base_dir - self.config = None - self._verify_name(name) - self.name = name - # for compatibility with node expansion using iterables - self._id = self.name - self._hierarchy = None - - @property - def inputs(self): - raise NotImplementedError - - @property - def outputs(self): - raise NotImplementedError - - @property - def fullname(self): - fullname = self.name - if self._hierarchy: - fullname = self._hierarchy + '.' + self.name - return fullname - - def clone(self, name): - """Clone a workflowbase object - - Parameters - ---------- - - name : string (mandatory) - A clone of node or workflow must have a new name - """ - if (name is None) or (name == self.name): - raise Exception('Cloning requires a new name') - self._verify_name(name) - clone = deepcopy(self) - clone.name = name - clone._id = name - clone._hierarchy = None - return clone - - def _check_outputs(self, parameter): - return hasattr(self.outputs, parameter) - - def _check_inputs(self, parameter): - if isinstance(self.inputs, DynamicTraitedSpec): - return True - return hasattr(self.inputs, parameter) - - def _verify_name(self, name): - valid_name = bool(re.match('^[\w-]+$', name)) - if not valid_name: - raise Exception('the name must not contain any special characters') - - def __repr__(self): - if self._hierarchy: - return '.'.join((self._hierarchy, self._id)) - else: - return self._id - - def save(self, filename=None): - if filename is None: - filename = 'temp.pklz' - savepkl(filename, self) - - def load(self, filename): - if '.npz' in filename: - DeprecationWarning(('npz files will be deprecated in the next ' - 'release. you can use numpy to open them.')) - return np.load(filename) - return loadpkl(filename) - - class InterfacedWorkflow(Workflow): """ A workflow that automatically generates the input and output nodes to avoid From 7a6001e5fa99d748ba8bd22c48f6b2398af40cbc Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 31 Dec 2015 12:52:24 +0100 Subject: [PATCH 33/42] add missing imports in engine --- nipype/pipeline/__init__.py | 3 ++- nipype/pipeline/engine/__init__.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/__init__.py b/nipype/pipeline/__init__.py index b7a6afe20e..802052ab36 100644 --- a/nipype/pipeline/__init__.py +++ b/nipype/pipeline/__init__.py @@ -7,4 +7,5 @@ from __future__ import absolute_import __docformat__ = 'restructuredtext' -from .engine import Node, MapNode, JoinNode, Workflow +from .engine import (Node, MapNode, JoinNode, Workflow, + InterfacedWorkflow, GraftWorkflow) diff --git a/nipype/pipeline/engine/__init__.py b/nipype/pipeline/engine/__init__.py index e950086307..5d46fe3570 100644 --- a/nipype/pipeline/engine/__init__.py +++ b/nipype/pipeline/engine/__init__.py @@ -9,6 +9,6 @@ from __future__ import absolute_import __docformat__ = 'restructuredtext' -from .workflows import Workflow +from .workflows import Workflow, InterfacedWorkflow, GraftWorkflow from .nodes import Node, MapNode, JoinNode from .utils import generate_expanded_graph From 38155fd74571d332000d306f90acbd22f947ec66 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 2 Jan 2016 12:18:01 +0100 Subject: [PATCH 34/42] fixing merge problems --- nipype/pipeline/engine/workflows.py | 43 +++++++++++++++++++---------- 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 808a83b93e..ad1713334d 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -209,10 +209,11 @@ def connect(self, *args, **kwargs): # determine their inputs/outputs depending on # connection settings. Skip these modules in the check if dest in connected_ports[destnode]: - raise Exception(""" -Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already -connected. -""" % (srcnode, source, destnode, dest, dest, destnode)) + raise Exception( + 'Trying to connect %s:%s to %s:%s but input \'%s\' of ' + 'node \'%s\' is already connected.' % + (srcnode, source, destnode, dest, dest, destnode)) + if not (hasattr(destnode, '_interface') and '.io' in str(destnode._interface.__class__)): if not destnode._check_inputs(dest): @@ -708,9 +709,9 @@ def _has_attr(self, parameter, subtype='in'): """Checks if a parameter is available as an input or output """ if subtype == 'in': - subobject = self.inputs + subobject = self._get_all_inputs() else: - subobject = self.outputs + subobject = self._get_all_outputs() attrlist = parameter.split('.') cur_out = subobject for attr in attrlist: @@ -724,9 +725,9 @@ def _get_parameter_node(self, parameter, subtype='in'): output parameter """ if subtype == 'in': - subobject = self.inputs + subobject = self._get_all_inputs() else: - subobject = self.outputs + subobject = self._get_all_outputs() attrlist = parameter.split('.') cur_out = subobject for attr in attrlist[:-1]: @@ -740,8 +741,10 @@ def _check_inputs(self, parameter): return self._has_attr(parameter, subtype='in') def _get_inputs(self): - """Returns the inputs of a workflow + return self._get_all_inputs() + def _get_all_inputs(self): + """Returns the inputs of a workflow This function does not return any input ports that are already connected """ @@ -749,7 +752,7 @@ def _get_inputs(self): for node in self._graph.nodes(): inputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): - setattr(inputdict, node.name, node.inputs) + setattr(inputdict, node.name, node._get_all_inputs()) else: taken_inputs = [] for _, _, d in self._graph.in_edges_iter(nbunch=node, @@ -757,7 +760,7 @@ def _get_inputs(self): for cd in d['connect']: taken_inputs.append(cd[1]) unconnectedinputs = TraitedSpec() - for key, trait in list(node.inputs.items()): + for key, trait in node.inputs.items(): if key not in taken_inputs: unconnectedinputs.add_trait(key, traits.Trait(trait, @@ -769,16 +772,19 @@ def _get_inputs(self): return inputdict def _get_outputs(self): + return self._get_all_outputs() + + def _get_all_outputs(self): """Returns all possible output ports that are not already connected """ outputdict = TraitedSpec() for node in self._graph.nodes(): outputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): - setattr(outputdict, node.name, node.outputs) + setattr(outputdict, node.name, node._get_all_outputs()) elif node.outputs: outputs = TraitedSpec() - for key, _ in list(node.outputs.items()): + for key, _ in node.outputs.items(): outputs.add_trait(key, traits.Any(node=node)) setattr(outputs, key, None) setattr(outputdict, node.name, outputs) @@ -1065,7 +1071,16 @@ def connect(self, *args, **kwargs): elif len(args) == 4: conns = [(args[0], args[2], [(args[1], args[3])])] else: - raise Exception('unknown set of parameters to connect function') + raise TypeError('connect() takes either 4 arguments, or 1 list of' + ' connection tuples (%d args given)' % len(args)) + + disconnect = False + if kwargs: + disconnect = kwargs.get('disconnect', False) + + if disconnect: + self.disconnect(conns) + return connection_list = [] for conn in conns: From 5a90816e1fea945affc2f8e9c415982ef66cb99c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 2 Jan 2016 12:49:38 +0100 Subject: [PATCH 35/42] fixing print calls without parentheses --- nipype/interfaces/utility.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index c886e0cc65..fe6c6885b5 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -501,6 +501,7 @@ def __setattr__(self, key, value): self._outputs[key] = value super(CollateInterfaceInputSpec, self).__setattr__(key, value) + class CollateInterface(IOBase): """ A simple interface to multiplex inputs through a unique output node. @@ -517,9 +518,9 @@ class CollateInterface(IOBase): >>> coll.inputs.src1_miscdata = 1.0 >>> coll.inputs.src2_miscdata = 2.0 >>> res = coll.run() - >>> print res.outputs.file + >>> print(res.outputs.file) ['exfile1.csv', 'exfile2.csv'] - >>> print res.outputs.miscdata + >>> print(res.outputs.miscdata) [1.0, 2.0] """ input_spec = CollateInterfaceInputSpec @@ -580,6 +581,7 @@ class MultipleSelectInputSpec(DynamicTraitedSpec): index = InputMultiPath(traits.Int, mandatory=True, desc='0-based indices of values to choose') + class MultipleSelectInterface(IOBase): """ Basic interface that demultiplexes lists generated by CollateInterface @@ -592,9 +594,9 @@ class MultipleSelectInterface(IOBase): >>> demux.inputs.file = ['exfile1.csv', 'exfile2.csv'] >>> demux.inputs.miscdata = [1.0, 2.0] >>> res = demux.run() - >>> print res.outputs.file + >>> print(res.outputs.file) exfile1.csv - >>> print res.outputs.miscdata + >>> print(res.outputs.miscdata) 1.0 """ input_spec = MultipleSelectInputSpec From cc0076b1e5006556270d3852f2487644472255f1 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 2 Jan 2016 18:22:14 +0100 Subject: [PATCH 36/42] add regression tests --- nipype/pipeline/engine/tests/test_graft.py | 115 +++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 nipype/pipeline/engine/tests/test_graft.py diff --git a/nipype/pipeline/engine/tests/test_graft.py b/nipype/pipeline/engine/tests/test_graft.py new file mode 100644 index 0000000000..f949479a93 --- /dev/null +++ b/nipype/pipeline/engine/tests/test_graft.py @@ -0,0 +1,115 @@ + +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +from copy import deepcopy +import os.path as op +from tempfile import mkdtemp +from shutil import rmtree + +from ...testing import (assert_raises, assert_equal, + assert_true, assert_false) +from ...interfaces import base as nib +from ...interfaces import utility as niu +from ...interfaces import io as nio +from ...pipeline import engine as pe + +ifresult = None + + +class SetInputSpec(nib.TraitedSpec): + val = nib.traits.Int(2, mandatory=True, desc='input') + + +class SetOutputSpec(nib.TraitedSpec): + out = nib.traits.Int(desc='ouput') + + +class SetInterface(nib.BaseInterface): + input_spec = SetInputSpec + output_spec = SetOutputSpec + _always_run = True + + def _run_interface(self, runtime): + global ifresult + runtime.returncode = 0 + ifresult = self.inputs.val + return runtime + + def _list_outputs(self): + global ifresult + outputs = self._outputs().get() + outputs['out'] = self.inputs.val + return outputs + + +def test_interfaced_workflow(): + global ifresult + + x = lambda: pe.InterfacedWorkflow(name='ShouldRaise') + yield assert_raises, ValueError, x + x = lambda: pe.InterfacedWorkflow(name='ShouldRaise', + input_names=['input0']) + yield assert_raises, ValueError, x + x = lambda: pe.InterfacedWorkflow(name='ShouldRaise', + output_names=['output0']) + yield assert_raises, ValueError, x + + wf = pe.InterfacedWorkflow( + name='InterfacedWorkflow', input_names=['input0'], + output_names=['output0']) + + outputs = wf.outputs.get() + yield assert_equal, outputs, {'output0': None} + + inputs = wf.inputs.get() + yield assert_equal, inputs, {'input0': None} + + # test connections + mynode = pe.Node(SetInterface(), name='internalnode') + wf.connect('in', 'input0', mynode, 'val') + wf.connect(mynode, 'out', 'out', 'output0') + + wf.inputs.input0 = 5 + wf.run() + + yield assert_equal, ifresult, 5 + + +def _base_workflow(name='InterfacedWorkflow', b=0): + def _sum(a): + return a + b + 1 + + wf = pe.InterfacedWorkflow( + name=name, input_names=['input0'], + output_names=['output0']) + sum0 = pe.Node(niu.Function( + input_names=['a'], output_names=['out'], function=_sum), + name='testnode') + # test connections + wf.connect('in', 'input0', sum0, 'a') + wf.connect(sum0, 'out', 'out', 'output0') + return wf + + +def test_graft_workflow(): + global ifresult + wf1 = _base_workflow('Inner0') + wf = pe.GraftWorkflow( + name='GraftWorkflow', fields_from=wf1) + wf.insert(wf1) + wf.insert(_base_workflow('Inner1', 2)) + + outer = pe.Workflow('OuterWorkflow') + mynode = pe.Node(SetInterface(), name='internalnode') + + outer.connect([ + (wf, mynode, [('outputnode.out', 'val')]) + ]) + + wf.inputs.input0 = 3 + + ifresult = None + wf.run() + yield assert_equal, ifresult, [4, 6] From 58f5a220082f26d467692e573f88e2410529b1b0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 2 Jan 2016 18:47:12 +0100 Subject: [PATCH 37/42] fix imports --- nipype/pipeline/engine/tests/test_graft.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_graft.py b/nipype/pipeline/engine/tests/test_graft.py index f949479a93..2ed347163a 100644 --- a/nipype/pipeline/engine/tests/test_graft.py +++ b/nipype/pipeline/engine/tests/test_graft.py @@ -8,12 +8,12 @@ from tempfile import mkdtemp from shutil import rmtree -from ...testing import (assert_raises, assert_equal, - assert_true, assert_false) -from ...interfaces import base as nib -from ...interfaces import utility as niu -from ...interfaces import io as nio -from ...pipeline import engine as pe +from ....testing import (assert_raises, assert_equal, + assert_true, assert_false) +from ... import engine as pe +from ....interfaces import base as nib +from ....interfaces import utility as niu + ifresult = None From d6e5a96587dbf54521c38cb051381e897e8ee806 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 2 Jan 2016 21:54:55 +0100 Subject: [PATCH 38/42] fix tests --- nipype/pipeline/engine/tests/test_graft.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_graft.py b/nipype/pipeline/engine/tests/test_graft.py index 2ed347163a..9aa287680d 100644 --- a/nipype/pipeline/engine/tests/test_graft.py +++ b/nipype/pipeline/engine/tests/test_graft.py @@ -63,30 +63,31 @@ def test_interfaced_workflow(): outputs = wf.outputs.get() yield assert_equal, outputs, {'output0': None} - inputs = wf.inputs.get() - yield assert_equal, inputs, {'input0': None} - # test connections mynode = pe.Node(SetInterface(), name='internalnode') wf.connect('in', 'input0', mynode, 'val') wf.connect(mynode, 'out', 'out', 'output0') wf.inputs.input0 = 5 + yield assert_equal, wf.inputs.get(), {'input0': 5} + wf.run() yield assert_equal, ifresult, 5 def _base_workflow(name='InterfacedWorkflow', b=0): - def _sum(a): + def _sum(a, b): return a + b + 1 wf = pe.InterfacedWorkflow( name=name, input_names=['input0'], output_names=['output0']) sum0 = pe.Node(niu.Function( - input_names=['a'], output_names=['out'], function=_sum), + input_names=['a', 'b'], output_names=['out'], function=_sum), name='testnode') + sum0.inputs.b = b + # test connections wf.connect('in', 'input0', sum0, 'a') wf.connect(sum0, 'out', 'out', 'output0') @@ -105,11 +106,11 @@ def test_graft_workflow(): mynode = pe.Node(SetInterface(), name='internalnode') outer.connect([ - (wf, mynode, [('outputnode.out', 'val')]) + (wf, mynode, [('outputnode.output0', 'val')]) ]) wf.inputs.input0 = 3 ifresult = None - wf.run() + outer.run() yield assert_equal, ifresult, [4, 6] From dbf8a75cc07f8f64eb0b22563161ccbaac3eba08 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sat, 2 Jan 2016 22:54:58 +0100 Subject: [PATCH 39/42] fix failing test --- nipype/pipeline/engine/tests/test_graft.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_graft.py b/nipype/pipeline/engine/tests/test_graft.py index 9aa287680d..8271cef645 100644 --- a/nipype/pipeline/engine/tests/test_graft.py +++ b/nipype/pipeline/engine/tests/test_graft.py @@ -19,11 +19,11 @@ class SetInputSpec(nib.TraitedSpec): - val = nib.traits.Int(2, mandatory=True, desc='input') + val = nib.traits.Any(mandatory=True, desc='input') class SetOutputSpec(nib.TraitedSpec): - out = nib.traits.Int(desc='ouput') + out = nib.traits.Any(desc='ouput') class SetInterface(nib.BaseInterface): From 04fea529c02563e237b11b5095cb48bb9f96b975 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 3 Jan 2016 11:45:52 +0100 Subject: [PATCH 40/42] fix error in CHANGES file --- CHANGES | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGES b/CHANGES index 1ee5a75fd2..0b1c8ded9a 100644 --- a/CHANGES +++ b/CHANGES @@ -38,7 +38,6 @@ Release 0.11.0 (September 15, 2015) (https://github.com/nipy/nipype/pull/1085) * ENH: New interface for FSL fslcpgeom utility (https://github.com/nipy/nipype/pull/1152) * ENH: Added SLURMGraph plugin for submitting jobs to SLURM with dependencies (https://github.com/nipy/nipype/pull/1136) ->>>>>>> enh/RefactorEngineModule * FIX: Enable absolute path definitions in DCMStack (https://github.com/nipy/nipype/pull/1089, replaced by https://github.com/nipy/nipype/pull/1093) * ENH: New mesh.MeshWarpMaths to operate on surface-defined warpings From f983d69422f415dc5018299bd13434fd8fb9d9af Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 3 Jan 2016 12:51:15 +0100 Subject: [PATCH 41/42] fix error when call to inputs(), add tests --- nipype/pipeline/engine/tests/test_graft.py | 28 ++++++++++++++++++++-- nipype/pipeline/engine/workflows.py | 3 ++- 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_graft.py b/nipype/pipeline/engine/tests/test_graft.py index 8271cef645..d9be6e0794 100644 --- a/nipype/pipeline/engine/tests/test_graft.py +++ b/nipype/pipeline/engine/tests/test_graft.py @@ -1,8 +1,8 @@ - #!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: + from copy import deepcopy import os.path as op from tempfile import mkdtemp @@ -60,6 +60,10 @@ def test_interfaced_workflow(): name='InterfacedWorkflow', input_names=['input0'], output_names=['output0']) + # Check it doesn't expose inputs/outputs of internal nodes + inputs = wf.inputs.get() + yield assert_equal, inputs, {'input0': nib.Undefined} + outputs = wf.outputs.get() yield assert_equal, outputs, {'output0': None} @@ -68,13 +72,33 @@ def test_interfaced_workflow(): wf.connect('in', 'input0', mynode, 'val') wf.connect(mynode, 'out', 'out', 'output0') + # test setting input wf.inputs.input0 = 5 yield assert_equal, wf.inputs.get(), {'input0': 5} wf.run() - yield assert_equal, ifresult, 5 + # Try to insert a sub-workflow with an outbound connection + outernode = pe.Node(SetInterface(), name='outernode') + wf.connect(mynode, 'out', outernode, 'val') + wf2 = pe.InterfacedWorkflow( + name='InterfacedWorkflow2', input_names=['input0'], + output_names=['output0']) + x = lambda: wf2.connect('in', 'input0', wf, 'input0') + yield assert_raises, Exception, x + + # Try to insert a sub-workflow without an outbound + # connection, and add it later + wf.disconnect(mynode, 'out', outernode, 'val') + wf3 = pe.InterfacedWorkflow( + name='InterfacedWorkflow3', input_names=['input0'], + output_names=['output0']) + wf3.connect('in', 'input0', wf, 'input0') + wf3.connect(wf, 'output0', 'out', 'output0') + wf.connect(mynode, 'out', outernode, 'val') + yield assert_raises, Exception, wf3.run + def _base_workflow(name='InterfacedWorkflow', b=0): def _sum(a, b): diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index ad1713334d..cf3982390d 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -1055,6 +1055,7 @@ def __init__(self, name, base_dir=None, input_names=[], output_names=[]): name='inputnode') self._outputnode = Node(IdentityInterface(fields=output_names), name='outputnode') + self.add_nodes([self._inputnode, self._outputnode]) def connect(self, *args, **kwargs): """ @@ -1140,7 +1141,7 @@ def _get_inputs(self): node = self._inputnode taken_inputs = [] - for _, _, d in self._graph.in_edges_iter(nbunch=node, + for _, _, d in self._graph.in_edges_iter(nbunch=[node], data=True): for cd in d['connect']: taken_inputs.append(cd[1]) From 00c24457b635a906bb84202a09aefdd6036e5425 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 4 Jan 2016 13:35:42 +0100 Subject: [PATCH 42/42] rewrite some tests --- nipype/pipeline/engine/tests/test_graft.py | 114 +++++++++++++++------ 1 file changed, 83 insertions(+), 31 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_graft.py b/nipype/pipeline/engine/tests/test_graft.py index d9be6e0794..d2e7aa6821 100644 --- a/nipype/pipeline/engine/tests/test_graft.py +++ b/nipype/pipeline/engine/tests/test_graft.py @@ -44,6 +44,36 @@ def _list_outputs(self): return outputs +def _base_workflow(name='InterfacedWorkflow'): + wf = pe.InterfacedWorkflow( + name=name, input_names=['input0'], output_names=['output0']) + + mynode = pe.Node(SetInterface(), name='internalnode') + wf.connect('in', 'input0', mynode, 'val') + wf.connect(mynode, 'out', 'out', 'output0') + return wf + + +def _sum_workflow(name='InterfacedSumWorkflow', b=0): + name += '%02d' % b + + def _sum(a, b): + return a + b + 1 + + wf = pe.InterfacedWorkflow( + name=name, input_names=['input0'], + output_names=['output0']) + sum0 = pe.Node(niu.Function( + input_names=['a', 'b'], output_names=['out'], function=_sum), + name='testnode') + sum0.inputs.b = b + + # test connections + wf.connect('in', 'input0', sum0, 'a') + wf.connect(sum0, 'out', 'out', 'output0') + return wf + + def test_interfaced_workflow(): global ifresult @@ -79,52 +109,74 @@ def test_interfaced_workflow(): wf.run() yield assert_equal, ifresult, 5 - # Try to insert a sub-workflow with an outbound connection - outernode = pe.Node(SetInterface(), name='outernode') - wf.connect(mynode, 'out', outernode, 'val') - wf2 = pe.InterfacedWorkflow( - name='InterfacedWorkflow2', input_names=['input0'], - output_names=['output0']) - x = lambda: wf2.connect('in', 'input0', wf, 'input0') + # Try to create an outbound connection from an inner node + wf = _base_workflow() + outerwf = pe.Workflow('OuterWorkflow') + outernode = pe.Node(niu.IdentityInterface(fields=['val']), + name='outernode') + x = lambda: outerwf.connect(wf, 'internalnode.out', outernode, 'val') yield assert_raises, Exception, x - # Try to insert a sub-workflow without an outbound - # connection, and add it later - wf.disconnect(mynode, 'out', outernode, 'val') - wf3 = pe.InterfacedWorkflow( - name='InterfacedWorkflow3', input_names=['input0'], - output_names=['output0']) - wf3.connect('in', 'input0', wf, 'input0') - wf3.connect(wf, 'output0', 'out', 'output0') - wf.connect(mynode, 'out', outernode, 'val') - yield assert_raises, Exception, wf3.run + # Try to create an inbound connection from an outer node + wf = _base_workflow() + outerwf = pe.Workflow('OuterWorkflow') + outernode = pe.Node(niu.IdentityInterface(fields=['val']), + name='outernode') + x = lambda: outerwf.connect(outernode, 'val', wf, 'internalnode.val') + yield assert_raises, Exception, x + # Try to insert a sub-workflow with an outbound connection + outerwf = pe.Workflow('OuterWorkflow') + outernode = pe.Node(niu.IdentityInterface(fields=['val']), + name='outernode') + + subwf = pe.Workflow('SubWorkflow') + inputnode = pe.Node(niu.IdentityInterface(fields=['in']), name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=['out']), + name='outputnode') + subnode = pe.Node(SetInterface(), name='internalnode') + subwf.connect([ + (inputnode, subnode, [('in', 'val')]), + (subnode, outputnode, [('out', 'out')]), + ]) -def _base_workflow(name='InterfacedWorkflow', b=0): - def _sum(a, b): - return a + b + 1 + outerwf.connect(subwf, 'internalnode.out', outernode, 'val') wf = pe.InterfacedWorkflow( - name=name, input_names=['input0'], + name='InterfacedWorkflow', input_names=['input0'], output_names=['output0']) - sum0 = pe.Node(niu.Function( - input_names=['a', 'b'], output_names=['out'], function=_sum), - name='testnode') - sum0.inputs.b = b + x = lambda: wf.connect('in', 'input0', subwf, 'inputnode.in') + yield assert_raises, Exception, x - # test connections - wf.connect('in', 'input0', sum0, 'a') - wf.connect(sum0, 'out', 'out', 'output0') - return wf + # Try to insert a sub-workflow with an inbound connection + outerwf = pe.Workflow('OuterWorkflow') + outernode = pe.Node(niu.IdentityInterface(fields=['val']), + name='outernode') + + subwf = pe.Workflow('SubWorkflow') + inputnode = pe.Node(niu.IdentityInterface(fields=['in']), name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=['out']), + name='outputnode') + subnode = pe.Node(SetInterface(), name='internalnode') + subwf.connect([ + (subnode, outputnode, [('out', 'out')]), + ]) + + outerwf.connect(outernode, 'val', subwf, 'internalnode.val') + wf = pe.InterfacedWorkflow( + name='InterfacedWorkflow', input_names=['input0'], + output_names=['output0']) + x = lambda: wf.connect('in', 'input0', subwf, 'inputnode.in') + yield assert_raises, Exception, x def test_graft_workflow(): global ifresult - wf1 = _base_workflow('Inner0') + wf1 = _sum_workflow() wf = pe.GraftWorkflow( name='GraftWorkflow', fields_from=wf1) wf.insert(wf1) - wf.insert(_base_workflow('Inner1', 2)) + wf.insert(_sum_workflow(b=2)) outer = pe.Workflow('OuterWorkflow') mynode = pe.Node(SetInterface(), name='internalnode')