42
42
copyfiles , fnames_presuffix , loadpkl ,
43
43
split_filename , load_json , savepkl ,
44
44
write_rst_header , write_rst_dict ,
45
- write_rst_list )
45
+ write_rst_list , to_str )
46
46
from ...interfaces .base import (traits , InputMultiPath , CommandLine ,
47
47
Undefined , TraitedSpec , DynamicTraitedSpec ,
48
48
Bunch , InterfaceResult , md5 , Interface ,
@@ -213,7 +213,8 @@ def output_dir(self):
213
213
214
214
def set_input (self , parameter , val ):
215
215
""" Set interface input value"""
216
- logger .debug ('setting nodelevel(%s) input %s = %s' , self .name , parameter , val )
216
+ logger .debug ('setting nodelevel(%s) input %s = %s' ,
217
+ self .name , parameter , to_str (val ))
217
218
setattr (self .inputs , parameter , deepcopy (val ))
218
219
219
220
def get_output (self , parameter ):
@@ -238,18 +239,18 @@ def hash_exists(self, updatehash=False):
238
239
hashed_inputs , hashvalue = self ._get_hashval ()
239
240
outdir = self .output_dir ()
240
241
if op .exists (outdir ):
241
- logger .debug (os .listdir (outdir ))
242
+ logger .debug ('Output dir: %s' , to_str ( os .listdir (outdir ) ))
242
243
hashfiles = glob (op .join (outdir , '_0x*.json' ))
243
- logger .debug (hashfiles )
244
+ logger .debug ('Found hashfiles: %s' , to_str ( hashfiles ) )
244
245
if len (hashfiles ) > 1 :
245
246
logger .info (hashfiles )
246
247
logger .info ('Removing multiple hashfiles and forcing node to rerun' )
247
248
for hashfile in hashfiles :
248
249
os .unlink (hashfile )
249
250
hashfile = op .join (outdir , '_0x%s.json' % hashvalue )
250
- logger .debug (hashfile )
251
+ logger .debug ('Final hashfile: %s' , hashfile )
251
252
if updatehash and op .exists (outdir ):
252
- logger .debug ("Updating hash: %s" % hashvalue )
253
+ logger .debug ("Updating hash: %s" , hashvalue )
253
254
for file in glob (op .join (outdir , '_0x*.json' )):
254
255
os .remove (file )
255
256
self ._save_hashfile (hashfile , hashed_inputs )
@@ -275,17 +276,17 @@ def run(self, updatehash=False):
275
276
outdir = self .output_dir ()
276
277
logger .info ("Executing node %s in dir: %s" , self ._id , outdir )
277
278
if op .exists (outdir ):
278
- logger .debug (os .listdir (outdir ))
279
+ logger .debug ('Output dir: %s' , to_str ( os .listdir (outdir ) ))
279
280
hash_info = self .hash_exists (updatehash = updatehash )
280
281
hash_exists , hashvalue , hashfile , hashed_inputs = hash_info
281
- logger .debug (( 'updatehash, overwrite, always_run, hash_exists' ,
282
- updatehash , self . overwrite , self . _interface . always_run ,
283
- hash_exists ) )
282
+ logger .debug (
283
+ ' updatehash=%s, overwrite=%s, always_run=%s, hash_exists=%s' ,
284
+ updatehash , self . overwrite , self . _interface . always_run , hash_exists )
284
285
if (not updatehash and (((self .overwrite is None and
285
286
self ._interface .always_run ) or
286
287
self .overwrite ) or not
287
288
hash_exists )):
288
- logger .debug ("Node hash: %s" % hashvalue )
289
+ logger .debug ("Node hash: %s" , hashvalue )
289
290
290
291
# by rerunning we mean only nodes that did finish to run previously
291
292
json_pat = op .join (outdir , '_0x*.json' )
@@ -295,8 +296,8 @@ def run(self, updatehash=False):
295
296
len (glob (json_pat )) != 0 and
296
297
len (glob (json_unfinished_pat )) == 0 )
297
298
if need_rerun :
298
- logger .debug ("Rerunning node" )
299
299
logger .debug (
300
+ "Rerunning node:\n "
300
301
"updatehash = %s, self.overwrite = %s, self._interface.always_run = %s, "
301
302
"os.path.exists(%s) = %s, hash_method = %s" , updatehash , self .overwrite ,
302
303
self ._interface .always_run , hashfile , op .exists (hashfile ),
@@ -371,15 +372,15 @@ def run(self, updatehash=False):
371
372
self .write_report (report_type = 'postexec' , cwd = outdir )
372
373
else :
373
374
if not op .exists (op .join (outdir , '_inputs.pklz' )):
374
- logger .debug ('%s: creating inputs file' % self .name )
375
+ logger .debug ('%s: creating inputs file' , self .name )
375
376
savepkl (op .join (outdir , '_inputs.pklz' ),
376
377
self .inputs .get_traitsfree ())
377
378
if not op .exists (op .join (outdir , '_node.pklz' )):
378
- logger .debug ('%s: creating node file' % self .name )
379
+ logger .debug ('%s: creating node file' , self .name )
379
380
savepkl (op .join (outdir , '_node.pklz' ), self )
380
381
logger .debug ("Hashfile exists. Skipping execution" )
381
382
self ._run_interface (execute = False , updatehash = updatehash )
382
- logger .debug ('Finished running %s in dir: %s\n ' % ( self ._id , outdir ) )
383
+ logger .debug ('Finished running %s in dir: %s\n ' , self ._id , outdir )
383
384
return self ._result
384
385
385
386
# Private functions
@@ -424,10 +425,10 @@ def _save_hashfile(self, hashfile, hashed_inputs):
424
425
with open (hashfile , 'wt' ) as fd :
425
426
fd .writelines (str (hashed_inputs ))
426
427
427
- logger .debug (( 'Unable to write a particular type to the json '
428
- ' file') )
428
+ logger .debug (
429
+ 'Unable to write a particular type to the json file' )
429
430
else :
430
- logger .critical ('Unable to open the file in write mode: %s' %
431
+ logger .critical ('Unable to open the file in write mode: %s' ,
431
432
hashfile )
432
433
433
434
def _get_inputs (self ):
@@ -438,9 +439,9 @@ def _get_inputs(self):
438
439
"""
439
440
logger .debug ('Setting node inputs' )
440
441
for key , info in list (self .input_source .items ()):
441
- logger .debug ('input: %s' % key )
442
+ logger .debug ('input: %s' , key )
442
443
results_file = info [0 ]
443
- logger .debug ('results file: %s' % results_file )
444
+ logger .debug ('results file: %s' , results_file )
444
445
results = loadpkl (results_file )
445
446
output_value = Undefined
446
447
if isinstance (info [1 ], tuple ):
@@ -456,7 +457,7 @@ def _get_inputs(self):
456
457
output_value = results .outputs .get ()[output_name ]
457
458
except TypeError :
458
459
output_value = results .outputs .dictcopy ()[output_name ]
459
- logger .debug ('output: %s' % output_name )
460
+ logger .debug ('output: %s' , output_name )
460
461
try :
461
462
self .set_input (key , deepcopy (output_value ))
462
463
except traits .TraitError as e :
@@ -487,7 +488,7 @@ def _save_results(self, result, cwd):
487
488
basedir = cwd ))
488
489
489
490
savepkl (resultsfile , result )
490
- logger .debug ('saved results in %s' % resultsfile )
491
+ logger .debug ('saved results in %s' , resultsfile )
491
492
492
493
if result .outputs :
493
494
result .outputs .set (** outputs )
@@ -524,11 +525,11 @@ def _load_resultfile(self, cwd):
524
525
except (traits .TraitError , AttributeError , ImportError ) as err :
525
526
if isinstance (err , (AttributeError , ImportError )):
526
527
attribute_error = True
527
- logger .debug (( 'attribute error: %s probably using '
528
- 'different trait pickled file' ) % str (err ))
528
+ logger .debug ('attribute error: %s probably using '
529
+ 'different trait pickled file' , str (err ))
529
530
else :
530
- logger .debug (( 'some file does not exist. hence trait '
531
- ' cannot be set') )
531
+ logger .debug (
532
+ 'some file does not exist. hence trait cannot be set' )
532
533
else :
533
534
if result .outputs :
534
535
try :
@@ -540,8 +541,8 @@ def _load_resultfile(self, cwd):
540
541
relative = False ,
541
542
basedir = cwd ))
542
543
except FileNotFoundError :
543
- logger .debug (( 'conversion to full path results in '
544
- 'non existent file' ) )
544
+ logger .debug ('conversion to full path results in '
545
+ 'non existent file' )
545
546
aggregate = False
546
547
pkl_file .close ()
547
548
logger .debug ('Aggregate: %s' , aggregate )
@@ -640,8 +641,8 @@ def _strip_temp(self, files, wd):
640
641
def _copyfiles_to_wd (self , outdir , execute , linksonly = False ):
641
642
""" copy files over and change the inputs"""
642
643
if hasattr (self ._interface , '_get_filecopy_info' ):
643
- logger .debug ('copying files to wd [execute=%s, linksonly=%s]' %
644
- ( str (execute ), str (linksonly ) ))
644
+ logger .debug ('copying files to wd [execute=%s, linksonly=%s]' ,
645
+ str (execute ), str (linksonly ))
645
646
if execute and linksonly :
646
647
olddir = outdir
647
648
outdir = op .join (outdir , '_tempinput' )
@@ -689,7 +690,7 @@ def write_report(self, report_type=None, cwd=None):
689
690
if not op .exists (report_dir ):
690
691
os .makedirs (report_dir )
691
692
if report_type == 'preexec' :
692
- logger .debug ('writing pre-exec report to %s' % report_file )
693
+ logger .debug ('writing pre-exec report to %s' , report_file )
693
694
fp = open (report_file , 'wt' )
694
695
fp .writelines (write_rst_header ('Node: %s' % get_print_name (self ),
695
696
level = 0 ))
@@ -698,7 +699,7 @@ def write_report(self, report_type=None, cwd=None):
698
699
fp .writelines (write_rst_header ('Original Inputs' , level = 1 ))
699
700
fp .writelines (write_rst_dict (self .inputs .get ()))
700
701
if report_type == 'postexec' :
701
- logger .debug ('writing post-exec report to %s' % report_file )
702
+ logger .debug ('writing post-exec report to %s' , report_file )
702
703
fp = open (report_file , 'at' )
703
704
fp .writelines (write_rst_header ('Execution Inputs' , level = 1 ))
704
705
fp .writelines (write_rst_dict (self .inputs .get ()))
@@ -854,7 +855,7 @@ def _add_join_item_fields(self):
854
855
newfields = dict ([(field , self ._add_join_item_field (field , idx ))
855
856
for field in self .joinfield ])
856
857
# increment the join slot index
857
- logger .debug ("Added the %s join item fields %s." % ( self , newfields ) )
858
+ logger .debug ("Added the %s join item fields %s." , self , newfields )
858
859
self ._next_slot_index += 1
859
860
return newfields
860
861
@@ -900,10 +901,9 @@ def _override_join_traits(self, basetraits, fields):
900
901
item_trait = trait .inner_traits [0 ]
901
902
dyntraits .add_trait (name , item_trait )
902
903
setattr (dyntraits , name , Undefined )
903
- logger .debug ("Converted the join node %s field %s"
904
- " trait type from %s to %s"
905
- % (self , name , trait .trait_type .info (),
906
- item_trait .info ()))
904
+ logger .debug (
905
+ "Converted the join node %s field %s trait type from %s to %s" ,
906
+ self , name , trait .trait_type .info (), item_trait .info ())
907
907
else :
908
908
dyntraits .add_trait (name , traits .Any )
909
909
setattr (dyntraits , name , Undefined )
@@ -931,8 +931,8 @@ def _collate_join_field_inputs(self):
931
931
val = getattr (self ._inputs , field )
932
932
if isdefined (val ):
933
933
setattr (self ._interface .inputs , field , val )
934
- logger .debug ("Collated %d inputs into the %s node join fields"
935
- % ( self ._next_slot_index , self ) )
934
+ logger .debug ("Collated %d inputs into the %s node join fields" ,
935
+ self ._next_slot_index , self )
936
936
937
937
def _collate_input_value (self , field ):
938
938
"""
@@ -1023,7 +1023,7 @@ def _create_dynamic_traits(self, basetraits, fields=None, nitems=None):
1023
1023
fields = basetraits .copyable_trait_names ()
1024
1024
for name , spec in list (basetraits .items ()):
1025
1025
if name in fields and ((nitems is None ) or (nitems > 1 )):
1026
- logger .debug ('adding multipath trait: %s' % name )
1026
+ logger .debug ('adding multipath trait: %s' , name )
1027
1027
if self .nested :
1028
1028
output .add_trait (name , InputMultiPath (traits .Any ()))
1029
1029
else :
@@ -1042,15 +1042,13 @@ def set_input(self, parameter, val):
1042
1042
1043
1043
Priority goes to interface.
1044
1044
"""
1045
- logger .debug ('setting nodelevel(%s) input %s = %s' % (str (self ),
1046
- parameter ,
1047
- str (val )))
1045
+ logger .debug ('setting nodelevel(%s) input %s = %s' ,
1046
+ to_str (self ), parameter , to_str (val ))
1048
1047
self ._set_mapnode_input (self .inputs , parameter , deepcopy (val ))
1049
1048
1050
1049
def _set_mapnode_input (self , object , name , newvalue ):
1051
- logger .debug ('setting mapnode(%s) input: %s -> %s' % (str (self ),
1052
- name ,
1053
- str (newvalue )))
1050
+ logger .debug ('setting mapnode(%s) input: %s -> %s' ,
1051
+ to_str (self ), name , to_str (newvalue ))
1054
1052
if name in self .iterfield :
1055
1053
setattr (self ._inputs , name , newvalue )
1056
1054
else :
@@ -1069,8 +1067,8 @@ def _get_hashval(self):
1069
1067
name ,
1070
1068
InputMultiPath (
1071
1069
self ._interface .inputs .traits ()[name ].trait_type ))
1072
- logger .debug ('setting hashinput %s-> %s' %
1073
- ( name , getattr (self ._inputs , name ) ))
1070
+ logger .debug ('setting hashinput %s-> %s' ,
1071
+ name , getattr (self ._inputs , name ))
1074
1072
if self .nested :
1075
1073
setattr (hashinputs , name , flatten (getattr (self ._inputs , name )))
1076
1074
else :
@@ -1118,10 +1116,8 @@ def _make_nodes(self, cwd=None):
1118
1116
fieldvals = flatten (filename_to_list (getattr (self .inputs , field )))
1119
1117
else :
1120
1118
fieldvals = filename_to_list (getattr (self .inputs , field ))
1121
- logger .debug ('setting input %d %s %s' % (i , field ,
1122
- fieldvals [i ]))
1123
- setattr (node .inputs , field ,
1124
- fieldvals [i ])
1119
+ logger .debug ('setting input %d %s %s' , i , field , fieldvals [i ])
1120
+ setattr (node .inputs , field , fieldvals [i ])
1125
1121
node .config = self .config
1126
1122
node .base_dir = op .join (cwd , 'mapflow' )
1127
1123
yield i , node
0 commit comments