diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 42798502c1..2384904f3d 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -14,7 +14,6 @@ import nibabel as nb import numpy as np from numpy.polynomial import Legendre -from scipy import linalg from .. import config, logging from ..external.due import BibTeX @@ -1186,7 +1185,7 @@ def compute_noise_components(imgseries, mask_images, num_components, # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." - u, _, _ = linalg.svd(M, full_matrices=False) + u, _, _ = np.linalg.svd(M, full_matrices=False) if components is None: components = u[:, :num_components] else: diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index 5d5ec1c39f..a508eb5037 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -5,8 +5,8 @@ import os import numpy as np from numpy import ones, kron, mean, eye, hstack, dot, tile +from numpy.linalg import pinv import nibabel as nb -from scipy.linalg import pinv from ..interfaces.base import BaseInterfaceInputSpec, TraitedSpec, \ BaseInterface, traits, File from ..utils import NUMPY_MMAP diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index d9074c48d3..0c35352f34 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -14,9 +14,6 @@ import nibabel as nb import numpy as np -from scipy.ndimage.morphology import binary_erosion -from scipy.spatial.distance import cdist, euclidean, dice, jaccard -from scipy.ndimage.measurements import center_of_mass, label from .. import config, logging @@ -74,6 +71,7 @@ class Distance(BaseInterface): _hist_filename = "hist.pdf" def _find_border(self, data): + from scipy.ndimage.morphology import binary_erosion eroded = binary_erosion(data) border = np.logical_and(data, np.logical_not(eroded)) return border @@ -87,6 +85,7 @@ def _get_coordinates(self, data, affine): return coordinates[:3, :] def _eucl_min(self, nii1, nii2): + from scipy.spatial.distance import cdist, euclidean origdata1 = nii1.get_data().astype(np.bool) border1 = self._find_border(origdata1) @@ -105,6 +104,8 @@ def _eucl_min(self, nii1, nii2): set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]) def _eucl_cog(self, nii1, nii2): + from scipy.spatial.distance import cdist + from scipy.ndimage.measurements import center_of_mass, label origdata1 = np.logical_and(nii1.get_data() != 0, np.logical_not(np.isnan(nii1.get_data()))) cog_t = np.array(center_of_mass(origdata1.copy())).reshape(-1, 1) @@ -128,6 +129,7 @@ def _eucl_cog(self, nii1, nii2): return np.mean(dist_matrix) def _eucl_mean(self, nii1, nii2, weighted=False): + from scipy.spatial.distance import cdist origdata1 = nii1.get_data().astype(np.bool) border1 = self._find_border(origdata1) @@ -154,6 +156,7 @@ def _eucl_mean(self, nii1, nii2, weighted=False): return np.mean(min_dist_matrix) def _eucl_max(self, nii1, nii2): + from scipy.spatial.distance import cdist origdata1 = nii1.get_data() origdata1 = np.logical_not( np.logical_or(origdata1 == 0, np.isnan(origdata1))) @@ -287,6 +290,7 @@ class Overlap(BaseInterface): output_spec = OverlapOutputSpec def _bool_vec_dissimilarity(self, booldata1, booldata2, method): + from scipy.spatial.distance import dice, jaccard methods = {'dice': dice, 'jaccard': jaccard} if not (np.any(booldata1) or np.any(booldata2)): return 0 diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 8671767eaa..a4ecd3a5e2 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -15,10 +15,7 @@ import nibabel as nb import numpy as np from math import floor, ceil -from scipy.ndimage.morphology import grey_dilation -import scipy.io as sio import itertools -import scipy.stats as stats import warnings from .. import logging @@ -103,6 +100,7 @@ def _get_brodmann_area(self): newdata[:int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 if self.inputs.dilation_size != 0: + from scipy.ndimage.morphology import grey_dilation newdata = grey_dilation(newdata, (2 * self.inputs.dilation_size + 1, 2 * self.inputs.dilation_size + 1, @@ -356,6 +354,7 @@ class Matlab2CSV(BaseInterface): output_spec = Matlab2CSVOutputSpec def _run_interface(self, runtime): + import scipy.io as sio in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) # Check if the file has multiple variables in it. If it does, loop @@ -393,6 +392,7 @@ def _run_interface(self, runtime): return runtime def _list_outputs(self): + import scipy.io as sio outputs = self.output_spec().get() in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) saved_variables = list() @@ -909,6 +909,7 @@ def calc_moments(timeseries_file, moment): timeseries_file -- text file with white space separated timepoints in rows """ + import scipy.stats as stats timeseries = np.genfromtxt(timeseries_file) m2 = stats.moment(timeseries, 2, axis=0) diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 763e19fbc3..75abd8410b 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -20,7 +20,6 @@ from nibabel import load import numpy as np -from scipy.special import gammaln from ..utils import NUMPY_MMAP from ..interfaces.base import (BaseInterface, TraitedSpec, InputMultiPath, @@ -84,6 +83,7 @@ def spm_hrf(RT, P=None, fMRI_T=16): -1.46257507e-04] """ + from scipy.special import gammaln p = np.array([6, 16, 1, 1, 6, 0, 32], dtype=float) if P is not None: p[0:len(P)] = P diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index a4d5b592c9..d412493714 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -21,8 +21,6 @@ from nibabel import load, funcs, Nifti1Image import numpy as np -from scipy import signal -import scipy.io as sio from ..utils import NUMPY_MMAP from ..interfaces.base import (BaseInterface, traits, InputMultiPath, @@ -151,7 +149,8 @@ def _calc_norm_affine(affines, use_differences, brain_pts=None): (3, all_pts.shape[1])), axis=0))) else: - newpos = np.abs(signal.detrend(newpos, axis=0, type='constant')) + from scipy.signal import detrend + newpos = np.abs(detrend(newpos, axis=0, type='constant')) normdata = np.sqrt(np.mean(np.power(newpos, 2), axis=1)) return normdata, displacement @@ -411,6 +410,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): """ Core routine for detecting outliers """ + from scipy import signal if not cwd: cwd = os.getcwd() @@ -750,6 +750,7 @@ def _get_spm_submatrix(self, spmmat, sessidx, rows=None): def _run_interface(self, runtime): """Execute this module. """ + import scipy.io as sio motparamlist = self.inputs.realignment_parameters intensityfiles = self.inputs.intensity_values spmmat = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index 6177b449f9..5c62c82726 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -1164,12 +1164,10 @@ class LibraryBaseInterface(BaseInterface): def __init__(self, check_import=True, *args, **kwargs): super(LibraryBaseInterface, self).__init__(*args, **kwargs) if check_import: - import importlib + import pkgutil failed_imports = [] for pkg in (self._pkg,) + tuple(self.imports): - try: - importlib.import_module(pkg) - except ImportError: + if pkgutil.find_loader(pkg) is None: failed_imports.append(pkg) if failed_imports: iflogger.warning('Unable to import %s; %s interface may fail to ' diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index c26220c6b0..20293ab630 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -11,7 +11,6 @@ import numpy as np import nibabel as nb import networkx as nx -import scipy.io as sio from ... import logging from ...utils.filemanip import split_filename @@ -178,6 +177,7 @@ def cmat(track_file, endpoint_name, intersections=False): """ Create the connection matrix for each resolution using fibers and ROIs. """ + import scipy.io as sio stats = {} iflogger.info('Running cmat function') diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 1b58494f2c..cd6ad4877e 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -10,7 +10,6 @@ import numpy as np import networkx as nx -import scipy.io as sio from ... import logging from ...utils.filemanip import split_filename @@ -94,6 +93,7 @@ def average_networks(in_files, ntwk_res_file, group_id): """ import networkx as nx import os.path as op + import scipy.io as sio iflogger.info('Creating average network for group: %s', group_id) matlab_network_list = [] if len(in_files) == 1: @@ -442,6 +442,7 @@ class NetworkXMetrics(BaseInterface): output_spec = NetworkXMetricsOutputSpec def _run_interface(self, runtime): + import scipy.io as sio global gpickled, nodentwks, edgentwks, kntwks, matlab gpickled = list() nodentwks = list() diff --git a/nipype/interfaces/image.py b/nipype/interfaces/image.py index 8c47420063..061bd1e2cc 100644 --- a/nipype/interfaces/image.py +++ b/nipype/interfaces/image.py @@ -2,9 +2,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -import numpy as np -import nibabel as nb - from ..utils.filemanip import fname_presuffix from .base import (SimpleInterface, TraitedSpec, BaseInterfaceInputSpec, traits, File) @@ -63,6 +60,9 @@ class Rescale(SimpleInterface): output_spec = RescaleOutputSpec def _run_interface(self, runtime): + import numpy as np + import nibabel as nb + img = nb.load(self.inputs.in_file) data = img.get_data() ref_data = nb.load(self.inputs.ref_file).get_data() @@ -171,6 +171,8 @@ class Reorient(SimpleInterface): output_spec = ReorientOutputSpec def _run_interface(self, runtime): + import numpy as np + import nibabel as nb from nibabel.orientations import ( axcodes2ornt, ornt_transform, inv_ornt_aff) @@ -211,6 +213,8 @@ def _run_interface(self, runtime): def _as_reoriented_backport(img, ornt): """Backport of img.as_reoriented as of nibabel 2.2.0""" + import numpy as np + import nibabel as nb from nibabel.orientations import inv_ornt_aff if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]): return img diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 627d9ca7ac..e11ba47479 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -29,44 +29,15 @@ from os.path import join, dirname from warnings import warn -import sqlite3 - from .. import config, logging from ..utils.filemanip import ( copyfile, simplify_list, ensure_list, - get_related_files, related_filetype_sets) + get_related_files) from ..utils.misc import human_order_sorted, str2bool from .base import ( TraitedSpec, traits, Str, File, Directory, BaseInterface, InputMultiPath, - isdefined, OutputMultiPath, DynamicTraitedSpec, Undefined, BaseInterfaceInputSpec) - -have_pybids = True -try: - import bids -except ImportError: - have_pybids = False - -if have_pybids: - try: - from bids import layout as bidslayout - except ImportError: - from bids import grabbids as bidslayout - -try: - import pyxnat -except: - pass - -try: - import paramiko -except: - pass - -try: - import boto - from boto.s3.connection import S3Connection, OrdinaryCallingFormat -except: - pass + isdefined, OutputMultiPath, DynamicTraitedSpec, Undefined, BaseInterfaceInputSpec, + LibraryBaseInterface) iflogger = logging.getLogger('nipype.interface') @@ -536,8 +507,6 @@ def _fetch_bucket(self, bucket_name): ''' # Import packages - import logging - try: import boto3 import botocore @@ -607,7 +576,6 @@ def _upload_to_s3(self, bucket, src, dst): # Import packages import hashlib - import logging import os from botocore.exceptions import ClientError @@ -849,7 +817,7 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc='Information to plug into template') -class S3DataGrabber(IOBase): +class S3DataGrabber(LibraryBaseInterface, IOBase): """ Generic datagrabber module that wraps around glob in an intelligent way for neuroimaging tasks to grab files from Amazon S3 @@ -865,6 +833,7 @@ class S3DataGrabber(IOBase): input_spec = S3DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True + _pkg = 'boto' def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -919,6 +888,7 @@ def _add_output_traits(self, base): def _list_outputs(self): # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically # hence manual check + import boto if self._infields: for key in self._infields: value = getattr(self.inputs, key) @@ -1035,6 +1005,7 @@ def _list_outputs(self): # Takes an s3 address and downloads the file to a local # directory, returning the local path. def s3tolocal(self, s3path, bkt): + import boto # path formatting if not os.path.split(self.inputs.local_directory)[1] == '': self.inputs.local_directory += '/' @@ -1817,7 +1788,7 @@ class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): cache_dir = Directory(desc='Cache directory') -class XNATSource(IOBase): +class XNATSource(LibraryBaseInterface, IOBase): """ Generic XNATSource module that wraps around the pyxnat module in an intelligent way for neuroimaging tasks to grab files and data from an XNAT server. @@ -1852,6 +1823,7 @@ class XNATSource(IOBase): """ input_spec = XNATSourceInputSpec output_spec = DynamicTraitedSpec + _pkg = 'pyxnat' def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -1901,6 +1873,7 @@ def _add_output_traits(self, base): def _list_outputs(self): # infields are mandatory, however I could not figure out # how to set 'mandatory' flag dynamically, hence manual check + import pyxnat cache_dir = self.inputs.cache_dir or tempfile.gettempdir() @@ -2034,16 +2007,18 @@ def __setattr__(self, key, value): super(XNATSinkInputSpec, self).__setattr__(key, value) -class XNATSink(IOBase): +class XNATSink(LibraryBaseInterface, IOBase): """ Generic datasink module that takes a directory containing a list of nifti files and provides a set of structured output fields. """ input_spec = XNATSinkInputSpec + _pkg = 'pyxnat' def _list_outputs(self): """Execute this module. """ + import pyxnat # setup XNAT connection cache_dir = self.inputs.cache_dir or tempfile.gettempdir() @@ -2202,7 +2177,7 @@ class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): table_name = Str(mandatory=True) -class SQLiteSink(IOBase): +class SQLiteSink(LibraryBaseInterface, IOBase): """ Very simple frontend for storing values into SQLite database. .. warning:: @@ -2222,6 +2197,7 @@ class SQLiteSink(IOBase): """ input_spec = SQLiteSinkInputSpec + _pkg = 'sqlite3' def __init__(self, input_names, **inputs): @@ -2233,6 +2209,7 @@ def __init__(self, input_names, **inputs): def _list_outputs(self): """Execute this module. """ + import sqlite3 conn = sqlite3.connect( self.inputs.database_file, check_same_thread=False) c = conn.cursor() @@ -2333,7 +2310,7 @@ class SSHDataGrabberInputSpec(DataGrabberInputSpec): desc='If set SSH commands will be logged to the given file') -class SSHDataGrabber(DataGrabber): +class SSHDataGrabber(LibraryBaseInterface, DataGrabber): """ Extension of DataGrabber module that downloads the file list and optionally the files from a SSH server. The SSH operation must not need user and password so an SSH agent must be active in @@ -2397,6 +2374,7 @@ class SSHDataGrabber(DataGrabber): input_spec = SSHDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = False + _pkg = 'paramiko' def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -2411,11 +2389,6 @@ def __init__(self, infields=None, outfields=None, **kwargs): See class examples for usage """ - try: - paramiko - except NameError: - warn("The library paramiko needs to be installed" - " for this module to run.") if not outfields: outfields = ['outfiles'] kwargs = kwargs.copy() @@ -2490,11 +2463,7 @@ def _get_files_over_ssh(self, template): return outfiles def _list_outputs(self): - try: - paramiko - except NameError: - raise ImportError("The library paramiko needs to be installed" - " for this module to run.") + import paramiko if len(self.inputs.ssh_log_to_file) > 0: paramiko.util.log_to_file(self.inputs.ssh_log_to_file) @@ -2574,6 +2543,7 @@ def _list_outputs(self): return outputs def _get_ssh_client(self): + import paramiko config = paramiko.SSHConfig() config.parse(open(os.path.expanduser('~/.ssh/config'))) host = config.lookup(self.inputs.hostname) @@ -2765,7 +2735,7 @@ class BIDSDataGrabberInputSpec(DynamicTraitedSpec): 'ignore derivatives/, sourcedata/, etc.)') -class BIDSDataGrabber(IOBase): +class BIDSDataGrabber(LibraryBaseInterface, IOBase): """ BIDS datagrabber module that wraps around pybids to allow arbitrary querying of BIDS datasets. @@ -2798,6 +2768,7 @@ class BIDSDataGrabber(IOBase): input_spec = BIDSDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True + _pkg = 'bids' def __init__(self, infields=None, **kwargs): """ @@ -2815,7 +2786,12 @@ def __init__(self, infields=None, **kwargs): } # If infields is empty, use all BIDS entities - if infields is None and have_pybids: + if infields is None: + # Version resilience + try: + from bids import layout as bidslayout + except ImportError: + from bids import grabbids as bidslayout bids_config = join(dirname(bidslayout.__file__), 'config', 'bids.json') bids_config = json.load(open(bids_config, 'r')) infields = [i['name'] for i in bids_config['entities']] @@ -2830,18 +2806,16 @@ def __init__(self, infields=None, **kwargs): self.inputs.trait_set(trait_change_notify=False, **undefined_traits) - def _run_interface(self, runtime): - if not have_pybids: - raise ImportError( - "The BIDSEventsGrabber interface requires pybids." - " Please make sure it is installed.") - return runtime - def _list_outputs(self): + # Version resilience + try: + from bids import BIDSLayout + except ImportError: + from bids.grabbids import BIDSLayout exclude = None if self.inputs.strict: exclude = ['derivatives/', 'code/', 'sourcedata/'] - layout = bidslayout.BIDSLayout(self.inputs.base_dir, exclude=exclude) + layout = BIDSLayout(self.inputs.base_dir, exclude=exclude) # If infield is not given nm input value, silently ignore filters = {} diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index 8746728990..0c2ddf4334 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -5,12 +5,9 @@ import os -import nibabel as nb -import numpy as np - from ...utils import NUMPY_MMAP -from .base import NipyBaseInterface, have_nipy +from .base import NipyBaseInterface from ..base import (TraitedSpec, traits, File, OutputMultiPath, BaseInterfaceInputSpec, isdefined) @@ -87,6 +84,8 @@ class FitGLM(NipyBaseInterface): output_spec = FitGLMOutputSpec def _run_interface(self, runtime): + import nibabel as nb + import numpy as np import nipy.modalities.fmri.glm as GLM import nipy.modalities.fmri.design_matrix as dm try: @@ -282,6 +281,8 @@ class EstimateContrast(NipyBaseInterface): output_spec = EstimateContrastOutputSpec def _run_interface(self, runtime): + import nibabel as nb + import numpy as np import nipy.modalities.fmri.glm as GLM beta_nii = nb.load(self.inputs.beta) diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index 214a6e7a2f..fd93dfc522 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -25,7 +25,6 @@ # Third-party imports from nibabel import load import numpy as np -from scipy.io import savemat # Local imports from ... import logging @@ -572,6 +571,7 @@ def _make_matlab_command(self, contents, postscript=None): (self.jobtype, self.jobname), contents[0]) else: + from scipy.io import savemat jobdef = { 'jobs': [{ self.jobtype: [{ diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 3e26ab6e2a..5293346dbb 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -14,7 +14,6 @@ # Third-party imports import numpy as np -import scipy.io as sio # Local imports from ... import logging @@ -273,6 +272,7 @@ def _parse_inputs(self): return einputs def _list_outputs(self): + import scipy.io as sio outputs = self._outputs().get() pth = os.path.dirname(self.inputs.spm_mat_file) outtype = 'nii' if '12' in self.version.split('.')[0] else 'img' @@ -475,6 +475,7 @@ def _make_matlab_command(self, _): return script def _list_outputs(self): + import scipy.io as sio outputs = self._outputs().get() pth, _ = os.path.split(self.inputs.spm_mat_file) spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index 80a7089be8..864951f36a 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -16,7 +16,6 @@ import os import re import numpy as np -import nibabel as nb from ..base import (traits, TraitedSpec, DynamicTraitedSpec, File, Undefined, isdefined, OutputMultiPath, InputMultiPath, BaseInterface, @@ -416,7 +415,7 @@ class AssertEqual(BaseInterface): input_spec = AssertEqualInputSpec def _run_interface(self, runtime): - + import nibabel as nb data1 = nb.load(self.inputs.volume1).get_data() data2 = nb.load(self.inputs.volume2).get_data() diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 7df4fa15ca..3d961126d5 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -21,7 +21,6 @@ from functools import reduce import numpy as np -import networkx as nx from future import standard_library from ... import logging, config, LooseVersion @@ -55,12 +54,6 @@ logger = logging.getLogger('nipype.workflow') PY3 = sys.version_info[0] > 2 -try: - dfs_preorder = nx.dfs_preorder -except AttributeError: - dfs_preorder = nx.dfs_preorder_nodes - logger.debug('networkx 1.4 dev or higher detected') - def _parameterization_dir(param): """ @@ -543,6 +536,7 @@ def _create_dot_graph(graph, show_connectinfo=False, simple_form=True): Ensures that edge info is pickleable. """ logger.debug('creating dot graph') + import networkx as nx pklgraph = nx.DiGraph() for edge in graph.edges(): data = graph.get_edge_data(*edge) @@ -569,6 +563,7 @@ def _write_detailed_dot(graph, dotfilename): struct1:f2 -> struct3:here; } """ + import networkx as nx text = ['digraph structs {', 'node [shape=record];'] # write nodes edges = [] @@ -748,6 +743,7 @@ def evaluate_connect_function(function_source, args, first_arg): def get_levels(G): + import networkx as nx levels = {} for n in nx.topological_sort(G): levels[n] = 0 @@ -891,6 +887,7 @@ def _identity_nodes(graph, include_iterables): are included if and only if the include_iterables flag is set to True. """ + import networkx as nx return [ node for node in nx.topological_sort(graph) if isinstance(node.interface, IdentityInterface) and ( @@ -994,6 +991,12 @@ def generate_expanded_graph(graph_in): and b=[3,4] this procedure will generate a graph with sub-graphs parameterized as (a=1,b=3), (a=1,b=4), (a=2,b=3) and (a=2,b=4). """ + import networkx as nx + try: + dfs_preorder = nx.dfs_preorder + except AttributeError: + dfs_preorder = nx.dfs_preorder_nodes + logger.debug("PE: expanding iterables") graph_in = _remove_nonjoin_identity_nodes(graph_in, keep_iterables=True) # standardize the iterables as {(field, function)} dictionaries @@ -1222,6 +1225,7 @@ def _iterable_nodes(graph_in): Return the iterable nodes list """ + import networkx as nx nodes = nx.topological_sort(graph_in) inodes = [node for node in nodes if node.iterables is not None] inodes_no_src = [node for node in inodes if not node.itersource] @@ -1349,6 +1353,7 @@ def export_graph(graph_in, Indicates whether to show the edge data on the graph. This makes the graph rather cluttered. default [False] """ + import networkx as nx graph = deepcopy(graph_in) if use_execgraph: graph = generate_expanded_graph(graph) @@ -1716,6 +1721,7 @@ def write_workflow_resources(graph, filename=None, append=None): def topological_sort(graph, depth_first=False): """Returns a depth first sorted order if depth_first is True """ + import networkx as nx nodesort = list(nx.topological_sort(graph)) if not depth_first: return nodesort, None diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 14738cae1d..8a3649660f 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -19,7 +19,6 @@ import shutil import numpy as np -import networkx as nx from ... import config, logging from ...utils.misc import str2bool @@ -56,6 +55,7 @@ def __init__(self, name, base_dir=None): path to workflow storage """ + import networkx as nx super(Workflow, self).__init__(name, base_dir) self._graph = nx.DiGraph() @@ -366,6 +366,7 @@ def get_node(self, name): def list_node_names(self): """List names of all nodes in a workflow """ + import networkx as nx outlist = [] for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): @@ -482,6 +483,7 @@ def export(self, whether to include node and workflow config values """ + import networkx as nx formats = ["python"] if format not in formats: raise ValueError('format must be one of: %s' % '|'.join(formats)) @@ -870,6 +872,7 @@ def _reset_hierarchy(self): def _generate_flatgraph(self): """Generate a graph containing only Nodes or MapNodes """ + import networkx as nx logger.debug('expanding workflow: %s', self) nodes2remove = [] if not nx.is_directed_acyclic_graph(self._graph): @@ -942,6 +945,7 @@ def _get_dot(self, level=0): """Create a dot file with connection info """ + import networkx as nx if prefix is None: prefix = ' ' if hierarchy is None: diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 122b398b23..a30838a323 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -16,12 +16,11 @@ from traceback import format_exception import numpy as np -import scipy.sparse as ssp from ... import logging from ...utils.filemanip import loadpkl from ...utils.misc import str2bool -from ..engine.utils import (nx, dfs_preorder, topological_sort) +from ..engine.utils import topological_sort from ..engine import MapNode from .tools import report_crash, report_nodes_not_run, create_pyscript @@ -235,6 +234,7 @@ def _clean_queue(self, jobid, graph, result=None): return self._remove_node_deps(jobid, crashfile, graph) def _submit_mapnode(self, jobid): + import scipy.sparse as ssp if jobid in self.mapnodes: return True self.mapnodes.append(jobid) @@ -391,6 +391,8 @@ def _task_finished_cb(self, jobid, cached=False): def _generate_dependency_list(self, graph): """ Generates a dependency list for a list of graphs. """ + import networkx as nx + self.procs, _ = topological_sort(graph) try: self.depidx = nx.to_scipy_sparse_matrix( @@ -403,6 +405,11 @@ def _generate_dependency_list(self, graph): self.proc_pending = np.zeros(len(self.procs), dtype=bool) def _remove_node_deps(self, jobid, crashfile, graph): + import networkx as nx + try: + dfs_preorder = nx.dfs_preorder + except AttributeError: + dfs_preorder = nx.dfs_preorder_nodes subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])] for node in subnodes: idx = self.procs.index(node) @@ -538,6 +545,7 @@ def __init__(self, plugin_args=None): super(GraphPluginBase, self).__init__(plugin_args=plugin_args) def run(self, graph, config, updatehash=False): + import networkx as nx pyfiles = [] dependencies = {} self._config = config diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 41f5c998fe..3f8ec51463 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -7,11 +7,9 @@ absolute_import) import os - -import networkx as nx from .base import (PluginBase, logger, report_crash, report_nodes_not_run, str2bool) -from ..engine.utils import dfs_preorder, topological_sort +from ..engine.utils import topological_sort class LinearPlugin(PluginBase): @@ -27,6 +25,11 @@ def run(self, graph, config, updatehash=False): graph : networkx digraph defines order of execution """ + import networkx as nx + try: + dfs_preorder = nx.dfs_preorder + except AttributeError: + dfs_preorder = nx.dfs_preorder_nodes if not isinstance(graph, nx.DiGraph): raise ValueError('Input must be a networkx digraph object')