From 6c6cd787e71ca730fceae0764a2f55dacdd5baf4 Mon Sep 17 00:00:00 2001 From: oesteban Date: Tue, 20 Sep 2016 20:27:18 -0700 Subject: [PATCH 1/4] [FIXES] Several fixes related to unicode literals - Close #1655: fixes the References:: leftover in help() of interfaces - Close #1644: fixes the parameterized paths generated with python 2 - All uses of repr and re-definitions of __repr__ have been revised (again, this is related to #1644, #1621, etc). --- nipype/algorithms/modelgen.py | 2 +- nipype/caching/memory.py | 10 +++---- nipype/external/due.py | 2 +- nipype/interfaces/base.py | 9 ++++-- nipype/interfaces/fsl/dti.py | 2 +- nipype/pipeline/engine/base.py | 2 +- nipype/pipeline/engine/nodes.py | 50 +++++++++++++-------------------- nipype/pipeline/engine/utils.py | 6 ++-- nipype/pipeline/plugins/sge.py | 10 +++---- nipype/utils/filemanip.py | 4 +-- 10 files changed, 43 insertions(+), 54 deletions(-) diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index df3d929b51..3536a257bf 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -547,7 +547,7 @@ def _generate_design(self, infolist=None): out = np.array([]) if out.size > 0: - iflogger.debug('fname=%s, out=%s, nscans=%s', filename, out, repr(sum(nscans[0:i]))) + iflogger.debug('fname=%s, out=%s, nscans=%d', filename, out, sum(nscans[0:i])) sumscans = out.astype(int) + sum(nscans[0:i]) if out.size == 1: diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index 2e672478dc..a4801fc3fb 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -93,10 +93,9 @@ def __call__(self, **kwargs): return out def __repr__(self): - return '%s(%s.%s, base_dir=%s)' % (self.__class__.__name__, - self.interface.__module__, - self.interface.__name__, - self.base_dir) + return '{}({}.{}}, base_dir={})'.format( + self.__class__.__name__, self.interface.__module__, self.interface.__name__, + self.base_dir) ################################################################################ # Memory manager: provide some tracking about what is computed when, to @@ -302,5 +301,4 @@ def _clear_all_but(self, runs, warn=True): job_names, warn=warn) def __repr__(self): - return '%s(base_dir=%s)' % (self.__class__.__name__, - self.base_dir) + return '{}(base_dir={})'.format(self.__class__.__name__, self.base_dir) diff --git a/nipype/external/due.py b/nipype/external/due.py index 35d35d7ee3..b053765183 100644 --- a/nipype/external/due.py +++ b/nipype/external/due.py @@ -42,7 +42,7 @@ def nondecorating_decorator(func): cite = load = add = _donothing def __repr__(self): - return self.__class__.__name__ + '()' + return '{}()'.format(self.__class__.__name__) def _donothing_func(*args, **kwargs): diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 188053c639..0d09a3c1d0 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -72,7 +72,7 @@ def __init__(self, value): self.value = value def __str__(self): - return repr(self.value) + return '{}'.format(self.value) def _exists_in_path(cmd, environ): """ @@ -381,7 +381,7 @@ def __repr__(self): outstr = [] for name, value in sorted(self.trait_get().items()): outstr.append('%s = %s' % (name, value)) - return '\n' + '\n'.join(outstr) + '\n' + return '\n{}\n'.format('\n'.join(outstr)) def _generate_handlers(self): """Find all traits with the 'xor' metadata and attach an event @@ -808,10 +808,13 @@ def help(cls, returnhelp=False): def _refs_help(cls): """ Prints interface references. """ + if not cls.references_: + return [] + helpstr = ['References::'] for r in cls.references_: - helpstr += [repr(r['entry'])] + helpstr += ['{}'.format(r['entry'])] return helpstr diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index 8e394392b7..fdbc80e0ca 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -946,7 +946,7 @@ def _list_outputs(self): cwd, base_name = os.path.split(name) outputs['out_files'].append(self._gen_fname( base_name, cwd=cwd, - suffix='_proj_seg_thr_' + repr(self.inputs.threshold))) + suffix='_proj_seg_thr_{}'.format(self.inputs.threshold))) return outputs diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 7bb319e315..25f2e3c0e7 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -105,7 +105,7 @@ def __repr__(self): if self._hierarchy: return '.'.join((self._hierarchy, self._id)) else: - return self._id + return '{}'.format(self._id) def save(self, filename=None): if filename is None: diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index e19a41dcb8..d047298ba6 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -205,20 +205,16 @@ def output_dir(self): if self._hierarchy: outputdir = op.join(outputdir, *self._hierarchy.split('.')) if self.parameterization: + params_str = ['{}'.format(p) for p in self.parameterization] if not str2bool(self.config['execution']['parameterize_dirs']): - param_dirs = [self._parameterization_dir(p) for p in - self.parameterization] - outputdir = op.join(outputdir, *param_dirs) - else: - outputdir = op.join(outputdir, *self.parameterization) + params_str = [self._parameterization_dir(p) for p in params_str] + outputdir = op.join(outputdir, *params_str) return op.abspath(op.join(outputdir, self.name)) def set_input(self, parameter, val): """ Set interface input value""" - logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), - parameter, - str(val))) + logger.debug('setting nodelevel(%s) input %s = %s', self.name, parameter, val) setattr(self.inputs, parameter, deepcopy(val)) def get_output(self, parameter): @@ -278,7 +274,7 @@ def run(self, updatehash=False): self._get_inputs() self._got_inputs = True outdir = self.output_dir() - logger.info("Executing node %s in dir: %s" % (self._id, outdir)) + logger.info("Executing node %s in dir: %s", self._id, outdir) if op.exists(outdir): logger.debug(os.listdir(outdir)) hash_info = self.hash_exists(updatehash=updatehash) @@ -301,17 +297,11 @@ def run(self, updatehash=False): len(glob(json_unfinished_pat)) == 0) if need_rerun: logger.debug("Rerunning node") - logger.debug(("updatehash = %s, " - "self.overwrite = %s, " - "self._interface.always_run = %s, " - "os.path.exists(%s) = %s, " - "hash_method = %s") % - (str(updatehash), - str(self.overwrite), - str(self._interface.always_run), - hashfile, - str(op.exists(hashfile)), - self.config['execution']['hash_method'].lower())) + logger.debug( + "updatehash = %s, self.overwrite = %s, self._interface.always_run = %s, " + "os.path.exists(%s) = %s, hash_method = %s", updatehash, self.overwrite, + self._interface.always_run, hashfile, op.exists(hashfile), + self.config['execution']['hash_method'].lower()) log_debug = config.get('logging', 'workflow_level') == 'DEBUG' if log_debug and not op.exists(hashfile): exp_hash_paths = glob(json_pat) @@ -319,7 +309,7 @@ def run(self, updatehash=False): split_out = split_filename(exp_hash_paths[0]) exp_hash_file_base = split_out[1] exp_hash = exp_hash_file_base[len('_0x'):] - logger.debug("Previous node hash = %s" % exp_hash) + logger.debug("Previous node hash = %s", exp_hash) try: prev_inputs = load_json(exp_hash_paths[0]) except: @@ -343,26 +333,26 @@ def run(self, updatehash=False): self._interface.can_resume) and not isinstance(self, MapNode)) if rm_outdir: - logger.debug("Removing old %s and its contents" % outdir) + logger.debug("Removing old %s and its contents", outdir) try: rmtree(outdir) except OSError as ex: outdircont = os.listdir(outdir) if ((ex.errno == errno.ENOTEMPTY) and (len(outdircont) == 0)): - logger.warn(('An exception was raised trying to remove old %s, ' - 'but the path seems empty. Is it an NFS mount?. ' - 'Passing the exception.') % outdir) + logger.warn( + 'An exception was raised trying to remove old %s, but the path ' + 'seems empty. Is it an NFS mount?. Passing the exception.', outdir) elif ((ex.errno == errno.ENOTEMPTY) and (len(outdircont) != 0)): - logger.debug(('Folder contents (%d items): ' - '%s') % (len(outdircont), outdircont)) + logger.debug( + 'Folder contents (%d items): %s', len(outdircont), outdircont) raise ex else: raise ex else: - logger.debug(("%s found and can_resume is True or Node is a " - "MapNode - resuming execution") % - hashfile_unfinished) + logger.debug( + "%s found and can_resume is True or Node is a MapNode - resuming execution", + hashfile_unfinished) if isinstance(self, MapNode): # remove old json files for filename in glob(op.join(outdir, '_0x*.json')): diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 1d0519a68b..0ee8442503 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -1060,12 +1060,12 @@ def make_output_dir(outdir): # this odd approach deals with concurrent directory cureation try: if not os.path.exists(os.path.abspath(outdir)): - logger.debug("Creating %s" % outdir) + logger.debug("Creating %s", outdir) os.makedirs(outdir) except OSError: - logger.debug("Problem creating %s" % outdir) + logger.debug("Problem creating %s", outdir) if not os.path.exists(outdir): - raise OSError('Could not create %s'%outdir) + raise OSError('Could not create %s', outdir) return outdir diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index c87cb418a9..f27ffc4b7a 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -51,12 +51,10 @@ def __init__(self, job_num, job_queue_state, job_time, job_queue_name, job_slots self._qsub_command_line = qsub_command_line def __repr__(self): - return str(self._job_num).ljust(8) \ - + str(self._job_queue_state).ljust(12) \ - + str(self._job_slots).ljust(3) \ - + time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(self._job_time)).ljust(20) \ - + str(self._job_queue_name).ljust(8) \ - + str(self._qsub_command_line) + return '{:<8d}{:12}{:<3d}{:20}{:8}{}'.format( + self._job_num, self._queue_state, self._job_slots, + time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(self._job_time)), + self._job_queue_name, self._qsub_command_line) def is_initializing(self): return self._job_queue_state == "initializing" diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 7653cf4cc5..f342d81bae 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -111,7 +111,7 @@ def encode_dict_py27(value): nels = len(value) for i, v in enumerate(value): venc = encode_dict_py27(v) - if venc.startswith("u'") or venc.startswith('u"'): + if venc.startswith(("u'", 'u"')): venc = venc[1:] retval += venc @@ -124,7 +124,7 @@ def encode_dict_py27(value): return retval retval = repr(value).decode() - if retval.startswith("u'") or retval.startswith('u"'): + if retval.startswith(("u'", 'u"')): retval = retval[1:] return retval From f1e19a5d428a1e160651be26a6915b015865b11f Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 21 Sep 2016 00:25:07 -0700 Subject: [PATCH 2/4] working on parameterization encoding --- nipype/pipeline/engine/nodes.py | 3 +-- nipype/pipeline/engine/utils.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index d047298ba6..347b02df20 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -209,8 +209,7 @@ def output_dir(self): if not str2bool(self.config['execution']['parameterize_dirs']): params_str = [self._parameterization_dir(p) for p in params_str] outputdir = op.join(outputdir, *params_str) - return op.abspath(op.join(outputdir, - self.name)) + return op.abspath(op.join(outputdir, self.name)) def set_input(self, parameter, val): """ Set interface input value""" diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 0ee8442503..9a1cda6b34 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -7,6 +7,7 @@ from __future__ import print_function, division, unicode_literals, absolute_import from builtins import str, open, map, next, zip, range +import sys from future import standard_library standard_library.install_aliases() from collections import defaultdict @@ -330,6 +331,9 @@ def _get_valid_pathstr(pathstr): Replaces: ',' -> '.' """ pathstr = pathstr.replace(os.sep, '..') + if sys.version_info[0] < 3: + # Remove those u'string' patterns + pathstr = re.sub(r'''([^\w])u['"]([\w\d -\.:;,]*)['"]''', r'\1\2', pathstr) pathstr = re.sub(r'''[][ (){}?:<>#!|"';]''', '', pathstr) pathstr = pathstr.replace(',', '.') return pathstr @@ -515,9 +519,11 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, rootnode = Gc.nodes()[nodeidx] paramstr = '' for key, val in sorted(params.items()): - paramstr = '_'.join((paramstr, _get_valid_pathstr(key), - _get_valid_pathstr(str(val)))) + paramstr = '{}_{}_{}'.format( + paramstr, _get_valid_pathstr(key), _get_valid_pathstr(str(val))) rootnode.set_input(key, val) + + logger.debug('Parameterization: paramstr=%s', paramstr) levels = get_levels(Gc) for n in Gc.nodes(): """ From e3c1c3339a1dfa8877ceabcdf0b70d1b7e3c45ab Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 21 Sep 2016 09:56:53 -0700 Subject: [PATCH 3/4] cleanup of debugging messages --- nipype/interfaces/base.py | 6 +- nipype/pipeline/engine/nodes.py | 100 +++++++++++++--------------- nipype/pipeline/engine/utils.py | 9 ++- nipype/pipeline/engine/workflows.py | 53 +++++++-------- nipype/utils/filemanip.py | 24 +++++-- 5 files changed, 99 insertions(+), 93 deletions(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 0d09a3c1d0..f3d3f52ab3 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -37,7 +37,7 @@ from ..utils.provenance import write_provenance from ..utils.misc import is_container, trim, str2bool from ..utils.filemanip import (md5, hash_infile, FileNotFoundError, hash_timestamp, - split_filename, encode_dict) + split_filename, to_str) from .traits_extension import ( traits, Undefined, TraitDictObject, TraitListObject, TraitError, isdefined, File, Directory, DictStrStr, has_metadata) @@ -271,7 +271,7 @@ def _get_bunch_hash(self): # Sort the items of the dictionary, before hashing the string # representation so we get a predictable order of the # dictionary. - sorted_dict = encode_dict(sorted(dict_nofilename.items())) + sorted_dict = to_str(sorted(dict_nofilename.items())) return dict_withhash, md5(sorted_dict.encode()).hexdigest() def __pretty__(self, p, cycle): @@ -581,7 +581,7 @@ def get_hashval(self, hash_method=None): dict_withhash.append((name, self._get_sorteddict(val, True, hash_method=hash_method, hash_files=hash_files))) - return dict_withhash, md5(encode_dict(dict_nofilename).encode()).hexdigest() + return dict_withhash, md5(to_str(dict_nofilename).encode()).hexdigest() def _get_sorteddict(self, objekt, dictwithhash=False, hash_method=None, diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 347b02df20..6391b2ac5a 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -42,7 +42,7 @@ copyfiles, fnames_presuffix, loadpkl, split_filename, load_json, savepkl, write_rst_header, write_rst_dict, - write_rst_list) + write_rst_list, to_str) from ...interfaces.base import (traits, InputMultiPath, CommandLine, Undefined, TraitedSpec, DynamicTraitedSpec, Bunch, InterfaceResult, md5, Interface, @@ -213,7 +213,8 @@ def output_dir(self): def set_input(self, parameter, val): """ Set interface input value""" - logger.debug('setting nodelevel(%s) input %s = %s', self.name, parameter, val) + logger.debug('setting nodelevel(%s) input %s = %s', + self.name, parameter, to_str(val)) setattr(self.inputs, parameter, deepcopy(val)) def get_output(self, parameter): @@ -238,18 +239,18 @@ def hash_exists(self, updatehash=False): hashed_inputs, hashvalue = self._get_hashval() outdir = self.output_dir() if op.exists(outdir): - logger.debug(os.listdir(outdir)) + logger.debug('Output dir: %s', to_str(os.listdir(outdir))) hashfiles = glob(op.join(outdir, '_0x*.json')) - logger.debug(hashfiles) + logger.debug('Found hashfiles: %s', to_str(hashfiles)) if len(hashfiles) > 1: logger.info(hashfiles) logger.info('Removing multiple hashfiles and forcing node to rerun') for hashfile in hashfiles: os.unlink(hashfile) hashfile = op.join(outdir, '_0x%s.json' % hashvalue) - logger.debug(hashfile) + logger.debug('Final hashfile: %s', hashfile) if updatehash and op.exists(outdir): - logger.debug("Updating hash: %s" % hashvalue) + logger.debug("Updating hash: %s", hashvalue) for file in glob(op.join(outdir, '_0x*.json')): os.remove(file) self._save_hashfile(hashfile, hashed_inputs) @@ -275,17 +276,17 @@ def run(self, updatehash=False): outdir = self.output_dir() logger.info("Executing node %s in dir: %s", self._id, outdir) if op.exists(outdir): - logger.debug(os.listdir(outdir)) + logger.debug('Output dir: %s', to_str(os.listdir(outdir))) hash_info = self.hash_exists(updatehash=updatehash) hash_exists, hashvalue, hashfile, hashed_inputs = hash_info - logger.debug(('updatehash, overwrite, always_run, hash_exists', - updatehash, self.overwrite, self._interface.always_run, - hash_exists)) + logger.debug( + 'updatehash=%s, overwrite=%s, always_run=%s, hash_exists=%s', + updatehash, self.overwrite, self._interface.always_run, hash_exists) if (not updatehash and (((self.overwrite is None and self._interface.always_run) or self.overwrite) or not hash_exists)): - logger.debug("Node hash: %s" % hashvalue) + logger.debug("Node hash: %s", hashvalue) # by rerunning we mean only nodes that did finish to run previously json_pat = op.join(outdir, '_0x*.json') @@ -295,8 +296,8 @@ def run(self, updatehash=False): len(glob(json_pat)) != 0 and len(glob(json_unfinished_pat)) == 0) if need_rerun: - logger.debug("Rerunning node") logger.debug( + "Rerunning node:\n" "updatehash = %s, self.overwrite = %s, self._interface.always_run = %s, " "os.path.exists(%s) = %s, hash_method = %s", updatehash, self.overwrite, self._interface.always_run, hashfile, op.exists(hashfile), @@ -371,15 +372,15 @@ def run(self, updatehash=False): self.write_report(report_type='postexec', cwd=outdir) else: if not op.exists(op.join(outdir, '_inputs.pklz')): - logger.debug('%s: creating inputs file' % self.name) + logger.debug('%s: creating inputs file', self.name) savepkl(op.join(outdir, '_inputs.pklz'), self.inputs.get_traitsfree()) if not op.exists(op.join(outdir, '_node.pklz')): - logger.debug('%s: creating node file' % self.name) + logger.debug('%s: creating node file', self.name) savepkl(op.join(outdir, '_node.pklz'), self) logger.debug("Hashfile exists. Skipping execution") self._run_interface(execute=False, updatehash=updatehash) - logger.debug('Finished running %s in dir: %s\n' % (self._id, outdir)) + logger.debug('Finished running %s in dir: %s\n', self._id, outdir) return self._result # Private functions @@ -424,10 +425,10 @@ def _save_hashfile(self, hashfile, hashed_inputs): with open(hashfile, 'wt') as fd: fd.writelines(str(hashed_inputs)) - logger.debug(('Unable to write a particular type to the json ' - 'file')) + logger.debug( + 'Unable to write a particular type to the json file') else: - logger.critical('Unable to open the file in write mode: %s' % + logger.critical('Unable to open the file in write mode: %s', hashfile) def _get_inputs(self): @@ -438,9 +439,9 @@ def _get_inputs(self): """ logger.debug('Setting node inputs') for key, info in list(self.input_source.items()): - logger.debug('input: %s' % key) + logger.debug('input: %s', key) results_file = info[0] - logger.debug('results file: %s' % results_file) + logger.debug('results file: %s', results_file) results = loadpkl(results_file) output_value = Undefined if isinstance(info[1], tuple): @@ -456,7 +457,7 @@ def _get_inputs(self): output_value = results.outputs.get()[output_name] except TypeError: output_value = results.outputs.dictcopy()[output_name] - logger.debug('output: %s' % output_name) + logger.debug('output: %s', output_name) try: self.set_input(key, deepcopy(output_value)) except traits.TraitError as e: @@ -487,7 +488,7 @@ def _save_results(self, result, cwd): basedir=cwd)) savepkl(resultsfile, result) - logger.debug('saved results in %s' % resultsfile) + logger.debug('saved results in %s', resultsfile) if result.outputs: result.outputs.set(**outputs) @@ -524,11 +525,11 @@ def _load_resultfile(self, cwd): except (traits.TraitError, AttributeError, ImportError) as err: if isinstance(err, (AttributeError, ImportError)): attribute_error = True - logger.debug(('attribute error: %s probably using ' - 'different trait pickled file') % str(err)) + logger.debug('attribute error: %s probably using ' + 'different trait pickled file', str(err)) else: - logger.debug(('some file does not exist. hence trait ' - 'cannot be set')) + logger.debug( + 'some file does not exist. hence trait cannot be set') else: if result.outputs: try: @@ -540,8 +541,8 @@ def _load_resultfile(self, cwd): relative=False, basedir=cwd)) except FileNotFoundError: - logger.debug(('conversion to full path results in ' - 'non existent file')) + logger.debug('conversion to full path results in ' + 'non existent file') aggregate = False pkl_file.close() logger.debug('Aggregate: %s', aggregate) @@ -640,8 +641,8 @@ def _strip_temp(self, files, wd): def _copyfiles_to_wd(self, outdir, execute, linksonly=False): """ copy files over and change the inputs""" if hasattr(self._interface, '_get_filecopy_info'): - logger.debug('copying files to wd [execute=%s, linksonly=%s]' % - (str(execute), str(linksonly))) + logger.debug('copying files to wd [execute=%s, linksonly=%s]', + str(execute), str(linksonly)) if execute and linksonly: olddir = outdir outdir = op.join(outdir, '_tempinput') @@ -689,7 +690,7 @@ def write_report(self, report_type=None, cwd=None): if not op.exists(report_dir): os.makedirs(report_dir) if report_type == 'preexec': - logger.debug('writing pre-exec report to %s' % report_file) + logger.debug('writing pre-exec report to %s', report_file) fp = open(report_file, 'wt') fp.writelines(write_rst_header('Node: %s' % get_print_name(self), level=0)) @@ -698,7 +699,7 @@ def write_report(self, report_type=None, cwd=None): fp.writelines(write_rst_header('Original Inputs', level=1)) fp.writelines(write_rst_dict(self.inputs.get())) if report_type == 'postexec': - logger.debug('writing post-exec report to %s' % report_file) + logger.debug('writing post-exec report to %s', report_file) fp = open(report_file, 'at') fp.writelines(write_rst_header('Execution Inputs', level=1)) fp.writelines(write_rst_dict(self.inputs.get())) @@ -854,7 +855,7 @@ def _add_join_item_fields(self): newfields = dict([(field, self._add_join_item_field(field, idx)) for field in self.joinfield]) # increment the join slot index - logger.debug("Added the %s join item fields %s." % (self, newfields)) + logger.debug("Added the %s join item fields %s.", self, newfields) self._next_slot_index += 1 return newfields @@ -900,10 +901,9 @@ def _override_join_traits(self, basetraits, fields): item_trait = trait.inner_traits[0] dyntraits.add_trait(name, item_trait) setattr(dyntraits, name, Undefined) - logger.debug("Converted the join node %s field %s" - " trait type from %s to %s" - % (self, name, trait.trait_type.info(), - item_trait.info())) + logger.debug( + "Converted the join node %s field %s trait type from %s to %s", + self, name, trait.trait_type.info(), item_trait.info()) else: dyntraits.add_trait(name, traits.Any) setattr(dyntraits, name, Undefined) @@ -931,8 +931,8 @@ def _collate_join_field_inputs(self): val = getattr(self._inputs, field) if isdefined(val): setattr(self._interface.inputs, field, val) - logger.debug("Collated %d inputs into the %s node join fields" - % (self._next_slot_index, self)) + logger.debug("Collated %d inputs into the %s node join fields", + self._next_slot_index, self) def _collate_input_value(self, field): """ @@ -1023,7 +1023,7 @@ def _create_dynamic_traits(self, basetraits, fields=None, nitems=None): fields = basetraits.copyable_trait_names() for name, spec in list(basetraits.items()): if name in fields and ((nitems is None) or (nitems > 1)): - logger.debug('adding multipath trait: %s' % name) + logger.debug('adding multipath trait: %s', name) if self.nested: output.add_trait(name, InputMultiPath(traits.Any())) else: @@ -1042,15 +1042,13 @@ def set_input(self, parameter, val): Priority goes to interface. """ - logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), - parameter, - str(val))) + logger.debug('setting nodelevel(%s) input %s = %s', + to_str(self), parameter, to_str(val)) self._set_mapnode_input(self.inputs, parameter, deepcopy(val)) def _set_mapnode_input(self, object, name, newvalue): - logger.debug('setting mapnode(%s) input: %s -> %s' % (str(self), - name, - str(newvalue))) + logger.debug('setting mapnode(%s) input: %s -> %s', + to_str(self), name, to_str(newvalue)) if name in self.iterfield: setattr(self._inputs, name, newvalue) else: @@ -1069,8 +1067,8 @@ def _get_hashval(self): name, InputMultiPath( self._interface.inputs.traits()[name].trait_type)) - logger.debug('setting hashinput %s-> %s' % - (name, getattr(self._inputs, name))) + logger.debug('setting hashinput %s-> %s', + name, getattr(self._inputs, name)) if self.nested: setattr(hashinputs, name, flatten(getattr(self._inputs, name))) else: @@ -1118,10 +1116,8 @@ def _make_nodes(self, cwd=None): fieldvals = flatten(filename_to_list(getattr(self.inputs, field))) else: fieldvals = filename_to_list(getattr(self.inputs, field)) - logger.debug('setting input %d %s %s' % (i, field, - fieldvals[i])) - setattr(node.inputs, field, - fieldvals[i]) + logger.debug('setting input %d %s %s', i, field, fieldvals[i]) + setattr(node.inputs, field, fieldvals[i]) node.config = self.config node.base_dir = op.join(cwd, 'mapflow') yield i, node diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 9a1cda6b34..b1e1a23493 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -30,7 +30,7 @@ import networkx as nx -from ...utils.filemanip import (fname_presuffix, FileNotFoundError, +from ...utils.filemanip import (fname_presuffix, FileNotFoundError, to_str, filename_to_list, get_related_files) from ...utils.misc import create_function_from_source, str2bool from ...interfaces.base import (CommandLine, isdefined, Undefined, @@ -330,10 +330,9 @@ def _get_valid_pathstr(pathstr): Removes: [][ (){}?:<>#!|"';] Replaces: ',' -> '.' """ + if not isinstance(pathstr, (str, bytes)): + pathstr = to_str(pathstr) pathstr = pathstr.replace(os.sep, '..') - if sys.version_info[0] < 3: - # Remove those u'string' patterns - pathstr = re.sub(r'''([^\w])u['"]([\w\d -\.:;,]*)['"]''', r'\1\2', pathstr) pathstr = re.sub(r'''[][ (){}?:<>#!|"';]''', '', pathstr) pathstr = pathstr.replace(',', '.') return pathstr @@ -520,7 +519,7 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, paramstr = '' for key, val in sorted(params.items()): paramstr = '{}_{}_{}'.format( - paramstr, _get_valid_pathstr(key), _get_valid_pathstr(str(val))) + paramstr, _get_valid_pathstr(key), _get_valid_pathstr(val)) rootnode.set_input(key, val) logger.debug('Parameterization: paramstr=%s', paramstr) diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 9eca5f7235..607fc6ac1c 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -48,7 +48,7 @@ copyfiles, fnames_presuffix, loadpkl, split_filename, load_json, savepkl, write_rst_header, write_rst_dict, - write_rst_list) + write_rst_list, to_str) from .utils import (generate_expanded_graph, modify_paths, export_graph, make_output_dir, write_workflow_prov, clean_working_directory, format_dot, topological_sort, @@ -238,13 +238,13 @@ def connect(self, *args, **kwargs): for srcnode, destnode, connects in connection_list: edge_data = self._graph.get_edge_data(srcnode, destnode, None) if edge_data: - logger.debug('(%s, %s): Edge data exists: %s' - % (srcnode, destnode, str(edge_data))) + logger.debug('(%s, %s): Edge data exists: %s', srcnode, destnode, + to_str(edge_data)) for data in connects: if data not in edge_data['connect']: edge_data['connect'].append(data) if disconnect: - logger.debug('Removing connection: %s' % str(data)) + logger.debug('Removing connection: %s', to_str(data)) edge_data['connect'].remove(data) if edge_data['connect']: self._graph.add_edges_from([(srcnode, @@ -252,16 +252,14 @@ def connect(self, *args, **kwargs): edge_data)]) else: # pass - logger.debug('Removing connection: %s->%s' % (srcnode, - destnode)) + logger.debug('Removing connection: %s->%s', srcnode, destnode) self._graph.remove_edges_from([(srcnode, destnode)]) elif not disconnect: - logger.debug('(%s, %s): No edge data' % (srcnode, destnode)) + logger.debug('(%s, %s): No edge data', srcnode, destnode) self._graph.add_edges_from([(srcnode, destnode, {'connect': connects})]) edge_data = self._graph.get_edge_data(srcnode, destnode) - logger.debug('(%s, %s): new edge data: %s' % (srcnode, destnode, - str(edge_data))) + logger.debug('(%s, %s): new edge data: %s', srcnode, destnode, to_str(edge_data)) def disconnect(self, *args): """Disconnect nodes @@ -276,7 +274,7 @@ def disconnect(self, *args): 'of connection tuples (%d args given)' % len(args)) for srcnode, dstnode, conn in connection_list: - logger.debug('disconnect(): %s->%s %s' % (srcnode, dstnode, conn)) + logger.debug('disconnect(): %s->%s %s', srcnode, dstnode, to_str(conn)) if self in [srcnode, dstnode]: raise IOError( 'Workflow connect cannot contain itself as node: src[%s] ' @@ -296,10 +294,10 @@ def disconnect(self, *args): idx = ed_conns.index(edge) remove.append((edge[0], edge[1])) - logger.debug('disconnect(): remove list %s' % remove) + logger.debug('disconnect(): remove list %s', to_str(remove)) for el in remove: edge_data['connect'].remove(el) - logger.debug('disconnect(): removed connection %s' % str(el)) + logger.debug('disconnect(): removed connection %s', to_str(el)) if not edge_data['connect']: self._graph.remove_edge(srcnode, dstnode) @@ -330,8 +328,7 @@ def add_nodes(self, nodes): return for node in newnodes: if not issubclass(node.__class__, EngineBase): - raise Exception('Node %s must be a subclass of EngineBase' % - str(node)) + raise Exception('Node %s must be a subclass of EngineBase', node) self._check_nodes(newnodes) for node in newnodes: if node._hierarchy is None: @@ -566,7 +563,7 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): crash_dir = self.config['crashdump_dir'] self.config['execution']['crashdump_dir'] = crash_dir del self.config['crashdump_dir'] - logger.info(str(sorted(self.config))) + logger.info('Workflow %s settings: %s', self.name, to_str(sorted(self.config))) self._set_needed_outputs(flatgraph) execgraph = generate_expanded_graph(deepcopy(flatgraph)) for index, node in enumerate(execgraph.nodes()): @@ -794,7 +791,7 @@ def _set_node_input(self, node, param, source, sourceinfo): newval = dict(val) if isinstance(val, TraitListObject): newval = val[:] - logger.debug('setting node input: %s->%s', param, str(newval)) + logger.debug('setting node input: %s->%s', param, to_str(newval)) node.set_input(param, deepcopy(newval)) def _get_all_nodes(self): @@ -844,29 +841,29 @@ def _generate_flatgraph(self): '(DAG)') % self.name) nodes = nx.topological_sort(self._graph) for node in nodes: - logger.debug('processing node: %s' % node) + logger.debug('processing node: %s', node) if isinstance(node, Workflow): nodes2remove.append(node) # use in_edges instead of in_edges_iter to allow # disconnections to take place properly. otherwise, the # edge dict is modified. for u, _, d in self._graph.in_edges(nbunch=node, data=True): - logger.debug('in: connections-> %s' % str(d['connect'])) + logger.debug('in: connections-> %s', to_str(d['connect'])) for cd in deepcopy(d['connect']): - logger.debug("in: %s" % str(cd)) + logger.debug("in: %s", to_str(cd)) dstnode = node._get_parameter_node(cd[1], subtype='in') srcnode = u srcout = cd[0] dstin = cd[1].split('.')[-1] - logger.debug('in edges: %s %s %s %s' % - (srcnode, srcout, dstnode, dstin)) + logger.debug('in edges: %s %s %s %s', srcnode, srcout, + dstnode, dstin) self.disconnect(u, cd[0], node, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # do not use out_edges_iter for reasons stated in in_edges for _, v, d in self._graph.out_edges(nbunch=node, data=True): - logger.debug('out: connections-> %s' % str(d['connect'])) + logger.debug('out: connections-> %s', to_str(d['connect'])) for cd in deepcopy(d['connect']): - logger.debug("out: %s" % str(cd)) + logger.debug("out: %s", to_str(cd)) dstnode = v if isinstance(cd[0], tuple): parameter = cd[0][0] @@ -881,10 +878,8 @@ def _generate_flatgraph(self): else: srcout = parameter.split('.')[-1] dstin = cd[1] - logger.debug('out edges: %s %s %s %s' % (srcnode, - srcout, - dstnode, - dstin)) + logger.debug('out edges: %s %s %s %s', + srcnode, srcout, dstnode, dstin) self.disconnect(node, cd[0], v, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # expand the workflow node @@ -963,7 +958,7 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, subnode)['connect']: dotlist.append('%s -> %s;' % (nodename, subnodename)) - logger.debug('connection: ' + dotlist[-1]) + logger.debug('connection: %s', dotlist[-1]) # add between workflow connections for u, v, d in self._graph.edges_iter(data=True): uname = '.'.join(hierarchy + [u.fullname]) @@ -987,5 +982,5 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, if uname1.split('.')[:-1] != vname1.split('.')[:-1]: dotlist.append('%s -> %s;' % (uname1.replace('.', '_'), vname1.replace('.', '_'))) - logger.debug('cross connection: ' + dotlist[-1]) + logger.debug('cross connection: %s', dotlist[-1]) return ('\n' + prefix).join(dotlist) diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index f342d81bae..7cf81c0649 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -89,7 +89,7 @@ def split_filename(fname): return pth, fname, ext -def encode_dict(value): +def to_str(value): """ Manipulates ordered dicts before they are hashed (Py2/3 compat.) @@ -97,20 +97,36 @@ def encode_dict(value): if sys.version_info[0] > 2: retval = str(value) else: - retval = encode_dict_py27(value) + retval = to_str_py27(value) return retval -def encode_dict_py27(value): +def to_str_py27(value): """ Encode dictionary for python 2 """ + if isinstance(value, dict): + entry = '{}: {}'.format + retval = '{' + for key, val in list(value.items()): + if len(retval) > 1: + retval += ', ' + kenc = repr(key) + if kenc.startswith(("u'", 'u"')): + kenc = kenc[1:] + venc = to_str_py27(val) + if venc.startswith(("u'", 'u"')): + venc = venc[1:] + retval+= entry(kenc, venc) + retval += '}' + return retval + istuple = isinstance(value, tuple) if isinstance(value, (tuple, list)): retval = '(' if istuple else '[' nels = len(value) for i, v in enumerate(value): - venc = encode_dict_py27(v) + venc = to_str_py27(v) if venc.startswith(("u'", 'u"')): venc = venc[1:] retval += venc From 648e80b4bfc58aa32cf8e4b4ef16584fec670ee3 Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 22 Sep 2016 10:58:54 -0700 Subject: [PATCH 4/4] update CHANGES --- CHANGES | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES b/CHANGES index 92b6d4d762..8dc4331a8d 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,7 @@ Upcoming release 0.13 ===================== +* FIX: Minor bugfixes related to unicode literals (https://github.com/nipy/nipype/pull/1656) * ENH: Add a DVARS calculation interface (https://github.com/nipy/nipype/pull/1606) * ENH: New interface to b0calc of FSL-POSSUM (https://github.com/nipy/nipype/pull/1399) * ENH: Convenient load/save of interface inputs (https://github.com/nipy/nipype/pull/1591)