Skip to content

Commit f34b6d6

Browse files
committed
Merge pull request #3 from dclark87/master
Fix divide by 0 bug
2 parents f107efd + e2f51f6 commit f34b6d6

File tree

1 file changed

+38
-20
lines changed

1 file changed

+38
-20
lines changed

nipype/interfaces/io.py

Lines changed: 38 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,6 @@ def __init__(self, filename):
144144

145145
# Import packages
146146
import threading
147-
import os
148147

149148
# Initialize data attributes
150149
self._filename = filename
@@ -162,7 +161,10 @@ def __call__(self, bytes_amount):
162161
# With the lock on, print upload status
163162
with self._lock:
164163
self._seen_so_far += bytes_amount
165-
percentage = (self._seen_so_far / self._size) * 100
164+
if self._size != 0:
165+
percentage = (self._seen_so_far / self._size) * 100
166+
else:
167+
percentage = 0
166168
progress_str = '%d / %d (%.2f%%)\r'\
167169
% (self._seen_so_far, self._size, percentage)
168170

@@ -381,15 +383,17 @@ def _check_s3_base_dir(self):
381383
S3 bucket path
382384
'''
383385

384-
# Import packages
385-
import os
386-
import sys
387-
388386
# Init variables
389387
s3_str = 's3://'
390388
sep = os.path.sep
391389
base_directory = self.inputs.base_directory
392390

391+
# Explicitly lower-case the "s3"
392+
if base_directory.lower().startswith(s3_str):
393+
base_dir_sp = base_directory.split('/')
394+
base_dir_sp[0] = base_dir_sp[0].lower()
395+
base_directory = '/'.join(base_dir_sp)
396+
393397
# Check if 's3://' in base dir
394398
if base_directory.startswith(s3_str):
395399
try:
@@ -419,9 +423,8 @@ def _return_aws_keys(self, creds_path):
419423
Parameters
420424
----------
421425
creds_path : string (filepath)
422-
path to the csv file with 'AWSAccessKeyId=' followed by access
423-
key in the first row and 'AWSSecretAccessKey=' followed by
424-
secret access key in the second row
426+
path to the csv file downloaded from AWS; can either be root
427+
or user credentials
425428
426429
Returns
427430
-------
@@ -431,19 +434,28 @@ def _return_aws_keys(self, creds_path):
431434
string of the AWS secret access key
432435
'''
433436

434-
# Import packages
435-
import csv
436-
437437
# Init variables
438-
csv_reader = csv.reader(open(creds_path, 'r'))
439-
440-
# Grab csv rows
441-
row1 = csv_reader.next()[0]
442-
row2 = csv_reader.next()[0]
438+
with open(creds_path, 'r') as creds_in:
439+
# Grab csv rows
440+
row1 = creds_in.readline()
441+
row2 = creds_in.readline()
442+
443+
# Are they root or user keys
444+
if 'User Name' in row1:
445+
# And split out for keys
446+
aws_access_key_id = row2.split(',')[1]
447+
aws_secret_access_key = row2.split(',')[2]
448+
elif 'AWSAccessKeyId' in row1:
449+
# And split out for keys
450+
aws_access_key_id = row1.split('=')[1]
451+
aws_secret_access_key = row2.split('=')[1]
452+
else:
453+
err_msg = 'Credentials file not recognized, check file is correct'
454+
raise Exception(err_msg)
443455

444-
# And split out for keys
445-
aws_access_key_id = row1.split('=')[1]
446-
aws_secret_access_key = row2.split('=')[1]
456+
# Strip any carriage return/line feeds
457+
aws_access_key_id = aws_access_key_id.replace('\r', '').replace('\n', '')
458+
aws_secret_access_key = aws_secret_access_key.replace('\r', '').replace('\n', '')
447459

448460
# Return keys
449461
return aws_access_key_id, aws_secret_access_key
@@ -556,6 +568,12 @@ def _upload_to_s3(self, src, dst):
556568
s3_str = 's3://'
557569
s3_prefix = os.path.join(s3_str, bucket.name)
558570

571+
# Explicitly lower-case the "s3"
572+
if dst.lower().startswith(s3_str):
573+
dst_sp = dst.split('/')
574+
dst_sp[0] = dst_sp[0].lower()
575+
dst = '/'.join(dst_sp)
576+
559577
# If src is a directory, collect files (this assumes dst is a dir too)
560578
if os.path.isdir(src):
561579
src_files = []

0 commit comments

Comments
 (0)