Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion FastSurferCNN/data_loader/conform.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,7 +623,6 @@ def conform(
mdc_affine = mdc_affine / np.linalg.norm(mdc_affine, axis=1)
h1["Mdc"] = np.linalg.inv(mdc_affine)

print(h1.get_zooms())
h1["fov"] = max(i * v for i, v in zip(h1.get_data_shape(), h1.get_zooms(), strict=False))
center = np.asarray(img.shape[:3], dtype=float) / 2.0
h1["Pxyz_c"] = img.affine.dot(np.hstack((center, [1.0])))[:3]
Expand Down
32 changes: 23 additions & 9 deletions brun_fastsurfer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ parallel_pipelines="1"
num_parallel_surf="1"
num_parallel_seg="1"
statusfile=""
python="python3.10 -s"

function usage()
{
Expand Down Expand Up @@ -111,7 +112,7 @@ function verify_parallel()
{
# 1: flag, 2: value
value="$(echo "$2" | tr '[:upper:]' '[:lower:]')"
if [[ "$value" =~ ^(max|-[0-9]+|0)$ ]] ; then verify_value=max
if [[ "$value" =~ ^(max|-[0-9]+|0)$ ]] ; then verify_value="max"
elif [[ "$value" =~ ^[0-9]+$ ]] ; then verify_value="$value"
else echo "ERROR: Invalid value for $1: '$2', must be integer or 'max'." ; exit 1
fi
Expand Down Expand Up @@ -193,6 +194,7 @@ case $key in
--parallel_surf)
parallel_pipelines=2 ; verify_parallel "$key" "$1" ; num_parallel_surf="$verify_value" ; shift ;;
--statusfile) statusfile="$1" ; shift ;;
--py) python="$1" ; POSITIONAL_FASTSURFER+=(--py "$1") ; shift ;; # this may be needed to get the device count
--debug) debug="true" ;;
--help) usage ; exit ;;
# run_fastsurfer.sh options, with extra effect in brun_fastsurfer
Expand Down Expand Up @@ -233,7 +235,7 @@ function get_device_list()
if [[ "$1" =~ ^(cpu|mps|auto|cuda)$ ]] || [[ "$1" =~ ^cuda:[0-9]+(,[0-9]+)*$ ]] ; then list=",$1"
elif [[ "$1" =~ ^cuda:[0-9]+([-,][0-9]+(-[0-9]+)?)* ]] ; then
IFS="," ; host=${1:0:5}
for i in ${1:5} ; do IFS="-" ; list+=",$(seq -s, $i)" ; done
for i in ${1:5} ; do IFS="-" ; v=($i) ; list+=",$(seq -s"," "${v[0]}" "$(("${v[1]}" - 1))")" ; done
else
echo "ERROR: Invalid format for device|viewagg_device: $1 must be auto|cpu|mps|cuda[:X[,Y][-Z]...]"
exit 1
Expand All @@ -249,14 +251,26 @@ function get_device_list()
}

# we do not know here, whether a gpu is available, so even if we have none or one, give a warning message.
if [[ "$num_parallel_seg" != 1 ]] && [[ "$res_device" =~ ^auto|cuda$ ]] && \
[[ "$(ls /dev/nvidia[0-9] | wc -w)" -gt 1 ]] && [[ "${CUDA_VISIBLE_DEVICES/,/}" != "$CUDA_VISIBLE_DEVICES" ]]
if [[ "$num_parallel_seg" != 1 ]] && [[ "$surf_only" != "true" ]] && [[ "$res_device" =~ ^auto|cuda$ ]]
then
echo "WARNING: --device '$res_device' only uses the first gpu for parallel processing in the segmentation pipeline"
echo " (--parallel_seg 2+)! Manually specify --device cuda:0 or --device:0-3 (to use multiple gpus)."
# device is auto or cuda, auto-detect the device count and make it match with num_parallel_seg
detected_devices=$($python -c "import torch.cuda.device_count as d ; print(*range(d()), sep=',')")
_devices=($detected_devices)
num_devices="${#_devices[@]}"
echo "INFO: Auto-detecting CUDA-capable devices to parallelize segmentation, found $num_devices device(s)."
if [[ "${#detected_devices[@]}" -le 1 ]] ; then echo " => No changes!" # keep auto/cuda
elif [[ "$num_parallel_seg" == "max" ]] || [[ "$num_parallel_seg" -gt "$num_devices" ]] ; then
res_device="cuda:$detected_devices"
num_parallel_seg=$num_devices
echo " => Setting number of parallel segmentations to number of devices (one segmentation per device)."
else
res_device="cuda:$(seq -s"," 0 $((num_parallel_seg - 1)))"
echo " => Limited by $num_parallel_seg parallel segmentations."
fi
else
get_device_list "$res_device" ; res_device="$device_value"
get_device_list "$res_viewagg_device" ; res_viewagg_device="$device_value"
fi
if [[ -n "$res_device" ]] ; then get_device_list "$res_device" ; res_device="$device_value" ; fi
if [[ -n "$res_viewagg_device" ]] ; then get_device_list "$res_viewagg_device" ; res_viewagg_device="$device_value" ; fi
if [[ "$subjects_stdin" == "true" ]]
then
if [[ -t 0 ]] || [[ "$debug" == "true" ]]; then
Expand Down Expand Up @@ -601,7 +615,7 @@ function process_by_token()
# wait for jobs to finish
if [[ "$debug" == "true" ]]
then
echo "DEBUG: Finished scheduling $mode-jobs... waiting for ${#running_jobs} jobs to finish:"
echo "DEBUG: Finished scheduling $mode-jobs... waiting for ${#running_jobs[@]} jobs to finish:"
IFS=" "
echo " ${running_jobs[*]}"
wait "${running_jobs[@]}"
Expand Down
39 changes: 21 additions & 18 deletions run_fastsurfer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ cereb_flags=()
hypo_segfile=""
hypo_statsfile=""
hypvinn_flags=()
hypvinn_regmode="coreg"
conformed_name=""
conformed_name_t2=""
norm_name=""
Expand Down Expand Up @@ -470,7 +471,7 @@ case $key in
--hypo_statsfile) hypo_statsfile="$1" ; shift ;;
--reg_mode)
mode=$(echo "$1" | tr "[:upper:]" "[:lower:]")
if [[ "$mode" =~ ^(none|coreg|robust)$ ]] ; then hypvinn_flags+=(--regmode "$mode")
if [[ "$mode" =~ ^(none|coreg|robust)$ ]] ; then hypvinn_regmode="$mode"
else echo "Invalid --reg_mode option, must be 'none', 'coreg' or 'robust'." ; exit 1
fi
shift # past value
Expand Down Expand Up @@ -612,20 +613,14 @@ if [[ -z "$norm_name" ]] ; then norm_name="${sd}/${subject}/mri/orig_nu.mgz" ; f
if [[ -z "$norm_name_t2" ]] ; then norm_name_t2="${sd}/${subject}/mri/T2_nu.mgz" ; fi
if [[ -z "$seg_log" ]] ; then seg_log="${sd}/${subject}/scripts/deep-seg.log" ; fi
if [[ -z "$build_log" ]] ; then build_log="${sd}/${subject}/scripts/build.log" ; fi
if [[ -n "$t2" ]]
# T2 image is only used in segmentation pipeline (but registration is done even if hypvinn is off)
if [[ -n "$t2" ]] && [[ "$run_seg_pipeline" == 1 ]]
then
if [[ ! -f "$t2" ]]
then
echo "ERROR: T2 file $t2 does not exist!"
exit 1
fi
if [[ ! -f "$t2" ]] ; then echo "ERROR: T2 file $t2 does not exist!" ; exit 1 ; fi
copy_name_T2="${sd}/${subject}/mri/orig/T2.001.mgz"
fi

if [[ -z "$PYTHONUNBUFFERED" ]]
then
export PYTHONUNBUFFERED=0
fi
if [[ -z "$PYTHONUNBUFFERED" ]] ; then export PYTHONUNBUFFERED=0 ; fi

# check the vox_size setting
if [[ "$vox_size" =~ ^[0-9]+([.][0-9]+)?$ ]]
Expand Down Expand Up @@ -707,9 +702,19 @@ then
exit 1
fi

if [[ "$run_surf_pipeline" == "1" ]] || [[ "$run_talairach_registration" == "true" ]]
what_needs_license=""
if [[ "$run_surf_pipeline" == 1 ]] ; then what_needs_license+=" and the surface pipeline" ; fi
if [[ "$run_seg_pipeline" == 1 ]] ; then
if [[ "$run_biasfield" == 1 ]] && [[ "$run_talairach_registration" == "true" ]] ; then
what_needs_license+=" and the talairach-registration in the segmentation pipeline"
fi
if [[ -n "$t2" ]] && [[ "$hypvinn_regmode" != "none" ]] ; then
what_needs_license+=" and the T1-T2 registration in the segmentation pipeline"
fi
fi
if [[ -n "$what_needs_license" ]]
then
msg="The surface pipeline and the talairach-registration in the segmentation pipeline require a FreeSurfer License"
msg="T${what_needs_license:6} require(s) a FreeSurfer License"
if [[ -z "$FS_LICENSE" ]]
then
msg="$msg, but no license was provided via --fs_license or the FS_LICENSE environment variable"
Expand Down Expand Up @@ -790,9 +795,7 @@ set +eo > /dev/null
mkdir -p "$(dirname "$seg_log")"


if [[ -f "$seg_log" ]]; then log_existed="true"
else log_existed="false"
fi
if [[ -f "$seg_log" ]]; then log_existed="true" ; else log_existed="false" ; fi

{
echo "========================================================="
Expand Down Expand Up @@ -914,7 +917,7 @@ then

echo "INFO: Robust scaling (partial conforming) of T2 image..."
cmd=($python "${fastsurfercnndir}/data_loader/conform.py" --no_strict_lia
--no_vox_size --no_img_size "$t2" "$conformed_name_t2")
--no_iso_vox --no_img_size -i "$t2" -o "$conformed_name_t2")
echo_quoted "${cmd[@]}"
"${cmd[@]}" 2>&1
echo "Done."
Expand Down Expand Up @@ -1058,7 +1061,7 @@ then
then
# currently, the order of the T2 preprocessing only is registration to T1w
cmd=($python "$hypvinndir/run_prediction.py" --sd "${sd}" --sid "${subject}"
"${hypvinn_flags[@]}" --threads "$threads_seg" --async_io
"${hypvinn_flags[@]}" --reg_mode "$hypvinn_regmode" --threads "$threads_seg" --async_io
--batch_size "$batch_size" --seg_log "$seg_log" --device "$device"
--viewagg_device "$viewagg" --t1)
if [[ "$run_biasfield" == "1" ]]
Expand Down
30 changes: 16 additions & 14 deletions srun_fastsurfer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ Data- and subject-related options:
--sid subject-101 --t1 <data-path>/raw/T1w-101A.nii.gz --vox_size 0.9

FastSurfer options:
--fs_license: path to the freesurfer license (either absolute path or relative to pwd)
--fs_license: path to the freesurfer license (either absolute path or relative to pwd, for surfaces & registration(s))
--seg_only: only run the segmentation pipeline
--surf_only: only run the surface pipeline (--sd must contain previous --seg_only processing)
--***: also standard FastSurfer options can be passed, like --3T, --no_cereb, etc.
Expand Down Expand Up @@ -443,7 +443,7 @@ if [[ -n "$subject_list" ]]
then
# the test for files (check_subject_images) requires paths to be wrt
cases=$(translate_cases "$in_dir" "$subject_list" "$in_dir" "${subject_list_delim}" "${subject_list_awk_code_sid}" "${subject_list_awk_code_args}")
check_subject_images "$cases"
check_subject_images "$in_dir" "$cases"
if [[ "$debug" == "true" ]]
then
log "Debug output of the parsed subject_list:"
Expand Down Expand Up @@ -542,15 +542,11 @@ then
--statusfile /data/scripts/subject_success
# run_fastsurfer options (inside singularity)
--sd "/data/cases" --threads "$num_cpus_per_task"
--seg_only "${POSITIONAL_FASTSURFER[@]}")
--seg_only "${POSITIONAL_FASTSURFER[@]}"
--fs_license /data/scripts/.fs_license)

seg_cmd_filename=$hpc_work/scripts/slurm_cmd_seg.sh
if [[ "$submit_jobs" == "true" ]]
then
seg_cmd_file=$seg_cmd_filename
else
seg_cmd_file=$(mktemp)
fi # END OF NEW
if [[ "$submit_jobs" == "true" ]] ; then seg_cmd_file=$seg_cmd_filename ; else seg_cmd_file=$(mktemp) ; fi

slurm_part_=$(first_non_empty_arg "$partition_seg" "$partition")
if [[ -z "$slurm_part_" ]] ; then slurm_partition=() ; else slurm_partition=(-p "$slurm_part_") ; fi
Expand All @@ -559,8 +555,11 @@ then
echo "module load singularity"
echo "singularity exec --nv -B \"$hpc_work:/data,$in_dir:/source:ro\" --no-mount home,cwd\\"
echo " --cleanenv --env TQDM_DISABLE=1 \\"
if [[ -n "$extra_singularity_options" ]] || [[ -n "$extra_singularity_options_seg" ]]; then
echo " $extra_singularity_options $extra_singularity_options_seg\\"
if [[ -n "$extra_singularity_options" ]] || [[ -n "$extra_singularity_options_seg" ]] ; then
echo " $extra_singularity_options $extra_singularity_options_seg \\"
fi
if [[ "$jobarray_size" -gt 1 ]] ; then
echo " --env SLURM_ARRAY_TASK_ID=\$SLURM_ARRAY_TASK_ID --env SLURM_ARRAY_TASK_COUNT=\$SLURM_ARRAY_TASK_COUNT \\"
fi
echo " $hpc_work/images/fastsurfer.sif \\"
echo " /data/$brun_fastsurfer ${fastsurfer_options[*]} ${fastsurfer_seg_options[*]}"
Expand Down Expand Up @@ -647,16 +646,19 @@ then
echo " --ntasks=1 --time=$timelimit_surf --nodes=1"
echo " --cpus-per-task=$num_cpus_surf --mem=${mem_surf}G"
echo " --hint=nomultithread"
echo " singularity exec --no-home -B '$hpc_work:/data'"
echo " singularity exec --no-mount home,cwd --cleanenv -B '$hpc_work:/data'"
echo " -B '$in_dir:/source:ro'"
if [[ -n "$extra_singularity_options" ]] || [[ -n "$extra_singularity_options_surf" ]]; then
echo " $extra_singularity_options $extra_singularity_options_surf"
echo " $extra_singularity_options $extra_singularity_options_surf \\"
fi
# SLUM_ARRAY_TASK_ID (and SLURM_ARRAY_TASK_COUNT) are only needed, if brun_fastsurfer is run inside the container,
# but here it is run in the sbatch context
echo " '$hpc_work/images/fastsurfer.sif'"
echo " /fastsurfer/run_fastsurfer.sh)"
echo "$hpc_work/$brun_fastsurfer --run_fastsurfer \"\${run_fastsurfer[*]}\" \\"
echo " ${fastsurfer_options[*]} ${fastsurfer_surf_options[*]}"
} > "$surf_cmd_file"
surf_slurm_sched=("--mem-per-cpu=${mem_per_core}G" "--cpus-per-task=$cores_per_task"
surf_slurm_sched=("--mem-per-cpu=${mem_per_core}G" "--cpus-per-task=$num_cpus_surf"
"--ntasks=$real_num_cases_per_task"
"--nodes=1-$real_num_cases_per_task" "--hint=nomultithread"
"${jobarray_option[@]}" "$surf_depend"
Expand Down
108 changes: 59 additions & 49 deletions stools.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,40 +44,34 @@ function translate_cases ()
#param4 optional, delimiter
#param5 optional, awk snippets to modify the subject_id, default '$1'
#param6 optional, awk snippets to modify the image_path, default '$2'
if [[ "$#" -gt 3 ]]
then
delimiter=$4
else
delimiter=","
fi
if [[ "$#" -gt 4 ]]
then
subid_awk="$5"
else
subid_awk='$1'
fi
if [[ "$#" -gt 5 ]]
then
subpath_awk="$6"
else
subpath_awk='$2'
fi
#param7 optional, second delimiter, default="[[:space::]]--t2[[:space:]]+"
# this param only supports T2 right now!
source_dir="$([[ "$1" =~ ^(.*[^/])/+$ ]] && echo "${BASH_REMATCH[1]}" || echo "$1")"
target_dir="$([[ "$3" =~ ^(.*[^/])/+$ ]] && echo "${BASH_REMATCH[1]}" || echo "$3")"
# both source_dir and target_dir are without trailing /
delimiter=${4-,}
subid_awk="${5-\$1}"
subpath_awk="${6-\$2}"
t2_lookbehind="${7-[[:space:]]--[tT]2[[:space:]]+}"
script="
BEGIN {
regex=\"^(\" source_dir \"|\" target_dir \")\";
regex2=\",(\" source_dir \"|\" target_dir \")/*\";
regex=\"(\" source_dir \"|\" target_dir \")/+\";
}
length(\$NF) > 1 {
subid=${subid_awk};
subpath=${subpath_awk};
gsub(regex, \"\", subpath);
gsub(regex2, \",\" target_dir \"/\", subpath);
gsub(\"^\" regex, \"\", subpath);
gsub(\"$t2_lookbehind\" regex, \" --t2 \" target_dir \"/\", subpath);
gsub(\"/{2,}\", \"/\", subpath);
print subid \"=\" target_dir \"/\" subpath;
}"
#>&2 echo "awk -F \"$delimiter\" -v target_dir=\"$3\" -v source_dir=\"$1\" \"$script\" \"$2\""
#>&2 cat "$2"
#>&2 awk -F "$delimiter" -v target_dir="$3" -v source_dir="$1" "$script" "$2"
awk -F "$delimiter" -v target_dir="$3" -v source_dir="$1" "$script" "$2"
# >&2 echo "DEBUG awk script"
# >&2 echo "========="
# >&2 echo "awk -F \"$delimiter\" -v target_dir=\"$target_dir\" -v source_dir=\"$source_dir\" \"$script\" \"$2\""
# >&2 cat "$2"
# >&2 awk -F "$delimiter" -v target_dir="$target_dir" -v source_dir="$source_dir" "$script" "$2"
# >&2 echo "========="
awk -F "$delimiter" -v target_dir="$target_dir" -v source_dir="$source_dir" "$script" "$2"
}

# step zero: make directories
Expand Down Expand Up @@ -169,42 +163,58 @@ function check_seg_surf_only ()
}
function check_subject_images ()
{
#param1 cases
if [[ "$#" -lt 1 ]]; then >&2 echo "check_subject_images is missing parameters!"; exit 1; fi
#param1 data dir
#param2 cases
if [[ "$#" -lt 2 ]]; then >&2 echo "check_subject_images is missing parameters!"; exit 1; fi
missing_subject_ids=""
missing_subject_imgs=""
for subject in $1
symlink_subject_imgs=""
data_dir="$1"
OLD_IFS=$IFS
IFS=$'\n'
for subject in $2
do
subject_id=$(echo "$subject" | cut -d= -f1)
image_parameters=$(echo "$subject" | cut -d= -f2)
image_parameters=$(echo "$subject" | cut -d= -f2-1000)
i=0
OLD_IFS=$IFS
IFS=","
first_img=1
IFS=" "
for arg in $image_parameters
do
if [[ "$i" == 0 ]]; then image_path="$arg"; fi
if [[ "$i" == 0 ]]
then
# expecting a path
if [[ ! -e "$arg" ]]
then
if [[ $first_img == 1 ]]; then missing_subject_ids+=", $subject_id" ; first_img=0 ; fi
missing_subject_imgs+=", $arg"
i=$((i + 1))
continue
fi
real_data="$(realpath "$data_dir")"
real_arg="$(realpath "$arg")"
arg0="${arg:0:${#data_dir}}"
if [[ "$real_data" != "$(realpath "$arg0")" ]] || [[ "${real_arg:${#real_data}}" != "${arg:${#data_dir}}" ]] # this is a symlink
then
echo ":$(realpath "$arg"):=?=:$arg:"
if [[ $first_img == 1 ]]; then missing_subject_ids+=", $subject_id" ; first_img=0 ; fi
symlink_subject_imgs+=", $arg => $(realpath "$arg")"
fi
elif [[ "$arg" == "--t2" ]] ; then i=-1 ;
fi
i=$((i + 1))
done
IFS=$OLD_IFS
#TODO: also check here, if any of the folders up to the mounted dir leading to the file are symlinks
#TODO: if so, this will lead to problems
if [[ ! -e "$image_path" ]]
then
if [[ -n "$missing_subject_ids" ]]
then
missing_subject_ids="$missing_subject_ids, "
missing_subject_imgs="$missing_subject_imgs, "
fi
missing_subject_ids="$missing_subject_ids$subject_id"
missing_subject_imgs="$missing_subject_imgs$image_path"
fi
done
if [[ -n "$missing_subject_ids" ]]
then
echo "ERROR: Some images are missing!"
echo "Subject IDs: $missing_subject_ids"
echo "Files: $missing_subject_imgs"
exit 1
condition=$([[ -n "$missing_subject_imgs" ]] && echo " or missing")
condition+=$([[ -n "$symlink_subject_imgs" ]] && echo " or symlinks")
echo "$([[ "${condition:4:1}" == m ]] && echo "ERROR" || echo "WARNING"): Some images are ${condition:4}!"
echo "Subject IDs: ${missing_subject_ids:2}"
if [[ -n "$missing_subject_imgs" ]] ; then echo "Missing files: ${missing_subject_imgs:2}" ; fi
if [[ -n "$symlink_subject_imgs" ]] ; then echo "Symlink files: ${symlink_subject_imgs:2}" ; fi
if [[ -n "$missing_subject_imgs" ]] ; then exit 1 ; fi
fi
}

Expand Down