From 92dac2e2b6a49514fe1adaffddbd19b6c88025a4 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Tue, 20 Apr 2021 12:00:30 -0400 Subject: [PATCH 01/13] Added NF file neuromod-process-spinalcord.nf This file is a copy of neuromod-process-anat.nf and will be adapted in subsequent commits. --- neuromod-process-spinalcord.nf | 621 +++++++++++++++++++++++++++++++++ 1 file changed, 621 insertions(+) create mode 100644 neuromod-process-spinalcord.nf diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf new file mode 100644 index 0000000..f844505 --- /dev/null +++ b/neuromod-process-spinalcord.nf @@ -0,0 +1,621 @@ +#!/usr/bin/env nextflow + +/* +This workflow contains pre- and post-processing steps to +calculate Magnetization Transfer Saturation Index (MTsat) map along +with a longitudinal relaxation time (T1) map. + +Dependencies: + These dependencies must be installed if Docker is not going + to be used. + - Advanced notmarization tools (ANTs, https://github.com/ANTsX/ANTs) + - FSL + - qMRLab (https://qmrlab.org) + - git + +Docker: + - https://hub.docker.com/u/qmrlab + - qmrlab/minimal:v2.3.1 + - qmrlab/antsfsl:latest + +Author: + Agah Karakuzu 2019 + agahkarakuzu@gmail.com + +Users: Please see USAGE for further details + */ + +/*Set defaults for parameters determining logic flow to false*/ +params.bids = false +params.help = false + +/* Call to the mt_sat_wrapper.m will be invoked by params.runcmd. +Depending on the params.platform selection, params.runcmd +may point to MATLAB or Octave. +*/ +if (params.platform == "octave"){ + + if (params.octave_path){ + log.info "Using Octave executable declared in nextflow.config." + params.octave = params.octave_path + " --no-gui --eval" + }else{ + log.info "Using Octave in Docker or (if local) from the sys path." + params.octave = "octave --no-gui --eval" + } + + params.runcmd = params.octave +} + +if (params.platform == "matlab"){ + + if (params.matlab_path){ + log.info "Using MATLAB executable declared in nextflow.config." + params.matlab = params.matlab_path + " -nodisplay -nosplash -nodesktop -r" + }else{ + log.info "Using MATLAB from the sys path." + params.matlab = "matlab -nodisplay -nosplash -nodesktop -r" + } + + params.runcmd = params.matlab +} + +params.wrapper_repo = "https://github.com/qMRLab/qMRWrappers.git" + +workflow.onComplete { + log.info "Pipeline completed at: $workflow.complete" + log.info "Execution status: ${ workflow.success ? 'OK' : 'failed' }" + log.info "Execution duration: $workflow.duration" +} + +/*Define bindings for --help*/ +if(params.help) { + usage = file("$baseDir/USAGE") + + cpu_count = Runtime.runtime.availableProcessors() + bindings = ["ants_dim":"$params.ants_dim", + "ants_metric":"$params.ants_metric", + "ants_metric_weight":"$params.ants_metric_weight", + "ants_metric_bins":"$params.ants_metric_bins", + "ants_metric_sampling":"$params.ants_metric_sampling", + "ants_metric_samplingprct":"$params.ants_metric_samplingprct", + "ants_transform":"$params.ants_transform", + "ants_convergence":"$params.ants_convergence", + "ants_shrink":"$params.ants_shrink", + "ants_smoothing":"$params.ants_smoothing", + "use_b1cor":"$params.use_b1cor", + "b1cor_factor":"$params.b1cor_factor", + "use_bet":"$params.use_bet", + "bet_recursive":"$params.bet_recursive", + "bet_threshold":"$params.bet_threshold", + "platform":"$params.platform", + "matlab_path":"$params.matlab_path", + "octave_path":"$params.octave_path", + "qmrlab_path":"$params.qmrlab_path" + ] + + engine = new groovy.text.SimpleTemplateEngine() + template = engine.createTemplate(usage.text).make(bindings) + + print template.toString() + return +} + +/*Scrape file names from a BIDS-compatible dataset +Note: + BIDS for qMRI is currently under development (BEP001,https://github.com/bids-standard/bep001) + The current format is valid as of late 2019 and subjected to change. + For B1plusmaps, there is not a specification yet. To circumvent this + issue, these (optional) maps are assumed to be located at the fmap + folder with _B1plusmap suffix. +*/ +if(params.bids){ + log.info "Input: $params.bids" + bids = file(params.bids) + + /* ==== BIDS: MTSat inputs ==== */ + /* Here, alphabetical indexes matter. Therefore, MToff -> MTon -> T1w */ + in_data = Channel + .fromFilePairs("$bids/**/**/anat/sub-*_acq-{MToff,MTon,T1w}_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) + (pdw, mtw, t1w) = in_data + .map{sid, MToff, MTon, T1w -> [ tuple(sid, MToff), + tuple(sid, MTon), + tuple(sid, T1w)]} + .separate(3) + + in_data = Channel + .fromFilePairs("$bids/**/**/anat/sub-*_acq-{MToff,MTon,T1w}_MTS.json", maxDepth: 3, size: 3, flat: true) + (pdwj, mtwj, t1wj) = in_data + .map{sid, MToff, MTon, T1w -> [ tuple(sid, MToff), + tuple(sid, MTon), + tuple(sid, T1w)]} + .separate(3) + + /* ==== BIDS: B1 map ==== */ + /* Look for B1map in fmap folder */ + b1_data = Channel + .fromFilePairs("$bids/**/**/fmap/sub-*_acq-flipangle_dir-AP_B1plusmap.nii.gz", maxDepth:3, size:1, flat:true) + .set {b1raw} + //(b1raw) = b1_data + // .map{sid, B1plusmap -> [tuple(sid, B1plusmap)]} + // .separate(1) +} +else{ + error "ERROR: Argument (--bids) must be passed. See USAGE." +} + +/*Each data type is defined as a channel. To pass all the channels + to the same process accurately, these channels must be joined. +*/ + +/*Split T1w into three channels + t1w_pre_ch1 --> mtsat_for_alignment + t1w_pre_ch2 --> t1w_for_bet + t1w_pre_ch3 --> t1w_post +*/ +t1w.into{t1w_pre_ch1; t1w_for_bet; t1w_post} + +/* Merge PDw, MTw and T1w for alignment*/ +pdw + .join(mtw) + .join(t1w_pre_ch1) + .set{mtsat_for_alignment} + +log.info "qMRflow: MTsat pipeline" +log.info "=======================" +log.info "" + +log.info "## # ###### # # ##### #### # # #### ##### " +log.info " # # # # # # # # # # ## ## # # # # " +log.info " # # # ##### # # # # # # # ## # # # # # " +log.info " # # # # # # ##### # # # # # # # # " +log.info " # ## # # # # # # # # # # # # # " +log.info " # # ###### #### # # #### # # #### ##### " +log.info "" +log.info "Start time: $workflow.start" +log.info "" +log.info "" +log.info "DATA" +log.info "====" +log.info "" +log.info "BIDS option has been enabled." +log.warn "qMRI protocols will be read from sidecar .json files for MTSAT and MTR." +log.warn "Some protocols for MP2RAGE are hardcoded." +log.info "" +log.info "OPTIONS" +log.info "=======" +log.info "" +log.info "[GLOBAL]" +log.info "---------------" +log.info "Selected platform: $params.platform" +log.info "BET enabled: $params.use_bet" +log.info "B1+ correction enabled: $params.use_b1cor" +log.info "" +log.info "[ANTs Registration]" +log.info "-------------------" +log.info "Dimensionality: $params.ants_dim" +log.info "Metric: $params.ants_metric" +log.info "Weight: $params.ants_metric_weight" +log.info "Number of bins: $params.ants_metric_bins" +log.info "Sampling type: $params.ants_metric_sampling" +log.info "Sampling percentage: $params.ants_metric_samplingprct" +log.info "Transform: $params.ants_transform" +log.info "Convergence: $params.ants_convergence" +log.info "Shrink factors: $params.ants_shrink" +log.info "Smoothing sigmas: $params.ants_smoothing" +log.info "" +log.info "[FSL BET]" +log.info "---------------" +log.info "Enabled: $params.use_bet" +log.info "Fractional intensity threshold: $params.bet_threshold" +log.info "Robust brain center estimation: $params.bet_recursive" +log.info "" +log.info "[qMRLab mt_sat]" +log.info "---------------" +log.warn "Acquisition protocols will be read from sidecar .json files (BIDS)." +if (params.use_b1cor){ +log.info "B1+ correction has been ENABLED." +log.warn "Process will be skipped for participants missing a B1map file." +log.info "B1 correction factor: $params.b1cor_factor"} +if (!params.use_b1cor){ +log.info "B1+ correction has been DISABLED." +log.warn "Process will NOT take any (possibly) existing B1maps into account." +} +log.info "" +log.info "=======================" + +/*Perform rigid registration to correct for head movement across scans: + - MTw (moving) --> T1w (fixed) + - PDw (moving) --> T1w (fixed) +*/ + +process Align_Input_Volumes { + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + input: + tuple val(sid), file(pdw), file(mtw), file(t1w) from mtsat_for_alignment + + output: + tuple val(sid), "${sid}_acq-MTon_MTS_aligned.nii.gz", "${sid}_acq-MToff_MTS_aligned.nii.gz"\ + into mtsat_from_alignment + file "${sid}_acq-MTon_MTS_aligned.nii.gz" + file "${sid}_acq-MToff_MTS_aligned.nii.gz" + file "${sid}_mtw_to_t1w_displacement.*.mat" + file "${sid}_pdw_to_t1w_displacement.*.mat" + + script: + """ + antsRegistration -d $params.ants_dim \ + --float 0 \ + -o [${sid}_mtw_to_t1w_displacement.mat,${sid}_acq-MTon_MTS_aligned.nii.gz] \ + --transform $params.ants_transform \ + --metric $params.ants_metric[$t1w,$mtw,$params.ants_metric_weight, $params.ants_metric_bins,$params.ants_metric_sampling,$params.ants_metric_samplingprct] \ + --convergence $params.ants_convergence \ + --shrink-factors $params.ants_shrink \ + --smoothing-sigmas $params.ants_smoothing + + antsRegistration -d $params.ants_dim \ + --float 0 \ + -o [${sid}_pdw_to_t1w_displacement.mat,${sid}_acq-MToff_MTS_aligned.nii.gz] \ + --transform $params.ants_transform \ + --metric $params.ants_metric[$t1w,$pdw,$params.ants_metric_weight, $params.ants_metric_bins,$params.ants_metric_sampling,$params.ants_metric_samplingprct] \ + --convergence $params.ants_convergence \ + --shrink-factors $params.ants_shrink \ + --smoothing-sigmas $params.ants_smoothing + """ +} + + +process Extract_Brain{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + when: + params.use_bet == true + + input: + tuple val(sid), file(t1w) from t1w_for_bet + + output: + tuple val(sid), "${sid}_acq-T1w_mask.nii.gz" optional true into mask_from_bet + file "${sid}_acq-T1w_mask.nii.gz" + + script: + if (params.bet_recursive){ + """ + bet $t1w ${sid}_acq-T1w.nii.gz -m -R -n -f $params.bet_threshold + """} + else{ + """ + bet $t1w ${sid}_acq-T1w.nii.gz -m -n -f $params.bet_threshold + """ + } + +} + +/* Split t1w_post into two to deal with B1map cases */ +t1w_post.into{t1w_post_ch1;t1w_post_ch2; t1w_post_ch3} + +/* Split mtsat_from_alignment into two to deal with B1map cases */ +mtsat_from_alignment.into{mfa_ch1;mfa_ch2} + +/* There is no input optional true concept in nextflow +The process consuming the individual input channels will +only execute if the channel is populated. +*/ + +/* We need to conditionally create channels with +input data or as empty channels. +*/ +if (!params.use_bet){ + Channel + .empty() + .set{mask_from_bet} +} + +/* Split mask_from_bet into two to deal with B1map cases later. */ +mask_from_bet.into{mask_from_bet_ch1;mask_from_bet_ch2;mask_from_bet_ch3} + +t1wj.into{t1wj_ch1;t1wj_ch2} +pdwj.into{pdwj_ch1;pdwj_ch2} +mtwj.into{mtwj_ch1;mtwj_ch2} + +t1w_post_ch3 + .join(b1raw) + .set{b1_for_alignment} + +/* For now, just use identity transform (i.e. upsample w/o additional transformation).*/ + +process B1_Align{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + when: + params.use_b1cor == true + + input: + tuple val(sid), file(t1w), file(b1raw) from b1_for_alignment + + + output: + tuple val(sid), "${sid}_B1plusmap_aligned.nii.gz" optional true into b1_aligned + file "${sid}_B1plusmap_aligned.nii.gz" + + script: + """ + antsApplyTransforms -d 3 -e 0 -i $b1raw \ + -r $t1w \ + -o ${sid}_B1plusmap_aligned.nii.gz \ + -t identity + """ + +} + +if (!params.use_b1cor){ + Channel + .empty() + .set{b1_aligned} +} + +b1_aligned.into{b1_aligned_ch1;b1_for_smooth_without_mask} + +b1_aligned_ch1 + .join(mask_from_bet_ch3) + .set{b1_for_smooth_with_mask} + +process B1_Smooth_With_Mask{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + if (!params.matlab_path_exception){ + container 'qmrlab/minimal:v2.3.1' + } + + when: + params.use_b1cor == true && params.use_bet == true + + input: + tuple val(sid), file(b1aligned), file(mask) from b1_for_smooth_with_mask + + output: + tuple val(sid), "${sid}_B1plusmap_filtered.nii.gz" optional true into b1_filtered_w_mask + file "${sid}_B1plusmap_filtered.nii.gz" + file "${sid}_B1plusmap_filtered.json" + + script: + if (params.matlab_path_exception){ + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.matlab_path_exception -nodesktop -nosplash -r "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned', 'mask', '$mask', 'type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path_exception','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" + """ + }else{ + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.runcmd "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned', 'mask', '$mask', 'type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" + """ + + } + +} + +process B1_Smooth_Without_Mask{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + container 'qmrlab/minimal:v2.3.1' + + when: + params.use_b1cor == true && params.use_bet == false + + input: + tuple val(sid), file(b1aligned) from b1_for_smooth_without_mask + + output: + tuple val(sid), "${sid}_B1plusmap_filtered.nii.gz" optional true into b1_filtered_wo_mask + file "${sid}_B1plusmap_filtered.nii.gz" + file "${sid}_B1plusmap_filtered.json" + + script: + if (params.matlab_path_exception){ + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.matlab_path_exception -nodesktop -nosplash -r "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned','type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path_exception','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" + """ + }else{ + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.runcmd "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned', 'type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" + """ + + } + +} + +if (params.use_bet){ +/*Merge tw1_post with mtsat_from_alignment and b1plus.*/ +t1w_post_ch1 + .join(mfa_ch1) + .join(b1_filtered_w_mask) + .join(t1wj_ch1) + .join(mtwj_ch1) + .join(pdwj_ch1) + .set{mtsat_for_fitting_with_b1} +}else{ +t1w_post_ch1 + .join(mfa_ch1) + .join(b1_filtered_wo_mask) + .join(t1wj_ch1) + .join(mtwj_ch1) + .join(pdwj_ch1) + .set{mtsat_for_fitting_with_b1} + +} + + + +mtsat_for_fitting_with_b1.into{mtsat_with_b1_bet;mtsat_with_b1} + +/*Merge tw1_post with mtsat_from_alignment only. +WITHOUT B1 MAP +*/ +t1w_post_ch2 + .join(mfa_ch2) + .join(t1wj_ch2) + .join(mtwj_ch2) + .join(pdwj_ch2) + .set{mtsat_for_fitting_without_b1} + +mtsat_for_fitting_without_b1.into{mtsat_without_b1_bet;mtsat_without_b1} + +/* We need to join these channels to avoid problems. +WITH B1 MAP +*/ +mtsat_with_b1_bet + .join(mask_from_bet_ch1) + .set{mtsat_with_b1_bet_merged} + +/* Depeding on the nextflow.config +settings for use_b1cor and use_bet, one of th +following 4 processes will be executed. +*/ + +process Fit_MTsat_With_B1map_With_Bet{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + when: + params.use_b1cor == true && params.use_bet == true + + input: + tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ + file(b1map), file(t1wj), file(mtwj), file(pdwj), file(mask) from mtsat_with_b1_bet_merged + + output: + file "${sid}_T1map.nii.gz" + file "${sid}_MTsat.nii.gz" + file "${sid}_T1map.json" + file "${sid}_MTsat.json" + file "${sid}_mt_sat.qmrlab.mat" + + script: + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','mask','$mask','b1map','$b1map','b1factor',$params.b1cor_factor,'qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" + """ +} + +process Fit_MTsat_With_B1map_Without_Bet{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + when: + params.use_b1cor == true && params.use_bet == false + + input: + tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ + file(b1map), file(t1wj), file(mtwj), file(pdwj) from mtsat_with_b1 + + output: + file "${sid}_T1map.nii.gz" + file "${sid}_MTsat.nii.gz" + file "${sid}_T1map.json" + file "${sid}_MTsat.json" + file "${sid}_mt_sat.qmrlab.mat" + + script: + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','b1map','$b1map','b1factor',$params.b1cor_factor,'qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" + """ + +} + + +/* We need to join these channels to avoid problems.*/ +mtsat_without_b1_bet + .join(mask_from_bet_ch2) + .set{mtsat_without_b1_bet_merged} + +process Fit_MTsat_Without_B1map_With_Bet{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + + when: + params.use_b1cor == false && params.use_bet==true + + input: + tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ + file(t1wj), file(mtwj), file(pdwj), file(mask) from mtsat_without_b1_bet_merged + + output: + file "${sid}_T1map.nii.gz" + file "${sid}_MTsat.nii.gz" + file "${sid}_T1map.json" + file "${sid}_MTsat.json" + file "${sid}_mt_sat.qmrlab.mat" + + script: + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','mask','$mask','qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" + """ +} + +process Fit_MTsat_Without_B1map_Without_Bet{ + tag "${sid}" + publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + programVersion = '2.7.3-beta' + (full, major, minor, patch, flavor) = (programVersion =~ /(\d+)\.(\d+)\.(\d+)-?(.+)/)[0] + + when: + params.use_b1cor == false && params.use_bet==false + + input: + tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ + file(t1wj), file(mtwj), file(pdwj) from mtsat_without_b1 + + output: + file "${sid}_T1map.nii.gz" + file "${sid}_MTsat.nii.gz" + file "${sid}_T1map.json" + file "${sid}_MTsat.json" + file "${sid}_mt_sat.qmrlab.mat" + + script: + """ + git clone $params.wrapper_repo + cd qMRWrappers + sh init_qmrlab_wrapper.sh $params.wrapper_version + cd .. + + $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" + """ +} From 26653cebedb466ef227ff0f7966330c62876920a Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Tue, 20 Apr 2021 14:04:40 -0400 Subject: [PATCH 02/13] Started adapting pipeline for spinal cord (WIP) --- neuromod-process-spinalcord.nf | 422 ++------------------------------- 1 file changed, 20 insertions(+), 402 deletions(-) diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index f844505..3d8b65e 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -2,28 +2,22 @@ /* This workflow contains pre- and post-processing steps to -calculate Magnetization Transfer Saturation Index (MTsat) map along -with a longitudinal relaxation time (T1) map. +calculate metrics in the spinal cord. Dependencies: These dependencies must be installed if Docker is not going to be used. - - Advanced notmarization tools (ANTs, https://github.com/ANTsX/ANTs) - - FSL - - qMRLab (https://qmrlab.org) + - SCT - git -Docker: - - https://hub.docker.com/u/qmrlab - - qmrlab/minimal:v2.3.1 - - qmrlab/antsfsl:latest - Author: - Agah Karakuzu 2019 - agahkarakuzu@gmail.com + Agah Karakuzu, Julien Cohen-Adad 2021 + jcohen@polymtl.ca Users: Please see USAGE for further details - */ +// TODO: where is "USAGE"? +// TODO: add usage for testing pipeline in one subject +*/ /*Set defaults for parameters determining logic flow to false*/ params.bids = false @@ -160,16 +154,16 @@ pdw .join(t1w_pre_ch1) .set{mtsat_for_alignment} -log.info "qMRflow: MTsat pipeline" -log.info "=======================" +log.info "SCT: spinal cord analysis pipeline" +log.info "==================================" log.info "" - -log.info "## # ###### # # ##### #### # # #### ##### " -log.info " # # # # # # # # # # ## ## # # # # " -log.info " # # # ##### # # # # # # # ## # # # # # " -log.info " # # # # # # ##### # # # # # # # # " -log.info " # ## # # # # # # # # # # # # # " -log.info " # # ###### #### # # #### # # #### ##### " +// Artwork created with https://patorjk.com/software/taag/#p=display&f=Doom&t=Neuromod%20-%20SCT +log.info "_ _ _ _____ _____ _____ " +log.info "| \ | | | | / ___/ __ \_ _|" +log.info "| \| | ___ _ _ _ __ ___ _ __ ___ ___ __| | ______ \ `--.| / \/ | |" +log.info "| . ` |/ _ \ | | | '__/ _ \| '_ ` _ \ / _ \ / _` | |______| `--. \ | | |" +log.info "| |\ | __/ |_| | | | (_) | | | | | | (_) | (_| | /\__/ / \__/\ | |" +log.info "\_| \_/\___|\__,_|_| \___/|_| |_| |_|\___/ \__,_| \____/ \____/ \_/" log.info "" log.info "Start time: $workflow.start" log.info "" @@ -223,14 +217,9 @@ log.warn "Process will NOT take any (possibly) existing B1maps into account." log.info "" log.info "=======================" -/*Perform rigid registration to correct for head movement across scans: - - MTw (moving) --> T1w (fixed) - - PDw (moving) --> T1w (fixed) -*/ - -process Align_Input_Volumes { +process T2_Segment_SpinalCord { tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' + publishDir "$bids/derivatives/sct/${sid}", mode: 'copy' input: tuple val(sid), file(pdw), file(mtw), file(t1w) from mtsat_for_alignment @@ -240,382 +229,11 @@ process Align_Input_Volumes { into mtsat_from_alignment file "${sid}_acq-MTon_MTS_aligned.nii.gz" file "${sid}_acq-MToff_MTS_aligned.nii.gz" - file "${sid}_mtw_to_t1w_displacement.*.mat" - file "${sid}_pdw_to_t1w_displacement.*.mat" script: + // TODO: add QC report """ - antsRegistration -d $params.ants_dim \ - --float 0 \ - -o [${sid}_mtw_to_t1w_displacement.mat,${sid}_acq-MTon_MTS_aligned.nii.gz] \ - --transform $params.ants_transform \ - --metric $params.ants_metric[$t1w,$mtw,$params.ants_metric_weight, $params.ants_metric_bins,$params.ants_metric_sampling,$params.ants_metric_samplingprct] \ - --convergence $params.ants_convergence \ - --shrink-factors $params.ants_shrink \ - --smoothing-sigmas $params.ants_smoothing - - antsRegistration -d $params.ants_dim \ - --float 0 \ - -o [${sid}_pdw_to_t1w_displacement.mat,${sid}_acq-MToff_MTS_aligned.nii.gz] \ - --transform $params.ants_transform \ - --metric $params.ants_metric[$t1w,$pdw,$params.ants_metric_weight, $params.ants_metric_bins,$params.ants_metric_sampling,$params.ants_metric_samplingprct] \ - --convergence $params.ants_convergence \ - --shrink-factors $params.ants_shrink \ - --smoothing-sigmas $params.ants_smoothing + sct_deepseg_sc -i ${sid}_bp-cspine_T2w.nii.gz -c t2 """ } - -process Extract_Brain{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - when: - params.use_bet == true - - input: - tuple val(sid), file(t1w) from t1w_for_bet - - output: - tuple val(sid), "${sid}_acq-T1w_mask.nii.gz" optional true into mask_from_bet - file "${sid}_acq-T1w_mask.nii.gz" - - script: - if (params.bet_recursive){ - """ - bet $t1w ${sid}_acq-T1w.nii.gz -m -R -n -f $params.bet_threshold - """} - else{ - """ - bet $t1w ${sid}_acq-T1w.nii.gz -m -n -f $params.bet_threshold - """ - } - -} - -/* Split t1w_post into two to deal with B1map cases */ -t1w_post.into{t1w_post_ch1;t1w_post_ch2; t1w_post_ch3} - -/* Split mtsat_from_alignment into two to deal with B1map cases */ -mtsat_from_alignment.into{mfa_ch1;mfa_ch2} - -/* There is no input optional true concept in nextflow -The process consuming the individual input channels will -only execute if the channel is populated. -*/ - -/* We need to conditionally create channels with -input data or as empty channels. -*/ -if (!params.use_bet){ - Channel - .empty() - .set{mask_from_bet} -} - -/* Split mask_from_bet into two to deal with B1map cases later. */ -mask_from_bet.into{mask_from_bet_ch1;mask_from_bet_ch2;mask_from_bet_ch3} - -t1wj.into{t1wj_ch1;t1wj_ch2} -pdwj.into{pdwj_ch1;pdwj_ch2} -mtwj.into{mtwj_ch1;mtwj_ch2} - -t1w_post_ch3 - .join(b1raw) - .set{b1_for_alignment} - -/* For now, just use identity transform (i.e. upsample w/o additional transformation).*/ - -process B1_Align{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - when: - params.use_b1cor == true - - input: - tuple val(sid), file(t1w), file(b1raw) from b1_for_alignment - - - output: - tuple val(sid), "${sid}_B1plusmap_aligned.nii.gz" optional true into b1_aligned - file "${sid}_B1plusmap_aligned.nii.gz" - - script: - """ - antsApplyTransforms -d 3 -e 0 -i $b1raw \ - -r $t1w \ - -o ${sid}_B1plusmap_aligned.nii.gz \ - -t identity - """ - -} - -if (!params.use_b1cor){ - Channel - .empty() - .set{b1_aligned} -} - -b1_aligned.into{b1_aligned_ch1;b1_for_smooth_without_mask} - -b1_aligned_ch1 - .join(mask_from_bet_ch3) - .set{b1_for_smooth_with_mask} - -process B1_Smooth_With_Mask{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - if (!params.matlab_path_exception){ - container 'qmrlab/minimal:v2.3.1' - } - - when: - params.use_b1cor == true && params.use_bet == true - - input: - tuple val(sid), file(b1aligned), file(mask) from b1_for_smooth_with_mask - - output: - tuple val(sid), "${sid}_B1plusmap_filtered.nii.gz" optional true into b1_filtered_w_mask - file "${sid}_B1plusmap_filtered.nii.gz" - file "${sid}_B1plusmap_filtered.json" - - script: - if (params.matlab_path_exception){ - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.matlab_path_exception -nodesktop -nosplash -r "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned', 'mask', '$mask', 'type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path_exception','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" - """ - }else{ - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.runcmd "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned', 'mask', '$mask', 'type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" - """ - - } - -} - -process B1_Smooth_Without_Mask{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - container 'qmrlab/minimal:v2.3.1' - - when: - params.use_b1cor == true && params.use_bet == false - - input: - tuple val(sid), file(b1aligned) from b1_for_smooth_without_mask - - output: - tuple val(sid), "${sid}_B1plusmap_filtered.nii.gz" optional true into b1_filtered_wo_mask - file "${sid}_B1plusmap_filtered.nii.gz" - file "${sid}_B1plusmap_filtered.json" - - script: - if (params.matlab_path_exception){ - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.matlab_path_exception -nodesktop -nosplash -r "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned','type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path_exception','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" - """ - }else{ - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.runcmd "addpath(genpath('qMRWrappers')); filter_map_wrapper('$b1aligned', 'type','$params.b1_filter_type','order',$params.b1_filter_order,'dimension','$params.b1_filter_dimension','size',$params.b1_filter_size,'qmrlab_path','$params.qmrlab_path','siemens','$params.b1_filter_siemens', 'sid','${sid}'); exit();" - """ - - } - -} - -if (params.use_bet){ -/*Merge tw1_post with mtsat_from_alignment and b1plus.*/ -t1w_post_ch1 - .join(mfa_ch1) - .join(b1_filtered_w_mask) - .join(t1wj_ch1) - .join(mtwj_ch1) - .join(pdwj_ch1) - .set{mtsat_for_fitting_with_b1} -}else{ -t1w_post_ch1 - .join(mfa_ch1) - .join(b1_filtered_wo_mask) - .join(t1wj_ch1) - .join(mtwj_ch1) - .join(pdwj_ch1) - .set{mtsat_for_fitting_with_b1} - -} - - - -mtsat_for_fitting_with_b1.into{mtsat_with_b1_bet;mtsat_with_b1} - -/*Merge tw1_post with mtsat_from_alignment only. -WITHOUT B1 MAP -*/ -t1w_post_ch2 - .join(mfa_ch2) - .join(t1wj_ch2) - .join(mtwj_ch2) - .join(pdwj_ch2) - .set{mtsat_for_fitting_without_b1} - -mtsat_for_fitting_without_b1.into{mtsat_without_b1_bet;mtsat_without_b1} - -/* We need to join these channels to avoid problems. -WITH B1 MAP -*/ -mtsat_with_b1_bet - .join(mask_from_bet_ch1) - .set{mtsat_with_b1_bet_merged} - -/* Depeding on the nextflow.config -settings for use_b1cor and use_bet, one of th -following 4 processes will be executed. -*/ - -process Fit_MTsat_With_B1map_With_Bet{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - when: - params.use_b1cor == true && params.use_bet == true - - input: - tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ - file(b1map), file(t1wj), file(mtwj), file(pdwj), file(mask) from mtsat_with_b1_bet_merged - - output: - file "${sid}_T1map.nii.gz" - file "${sid}_MTsat.nii.gz" - file "${sid}_T1map.json" - file "${sid}_MTsat.json" - file "${sid}_mt_sat.qmrlab.mat" - - script: - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','mask','$mask','b1map','$b1map','b1factor',$params.b1cor_factor,'qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" - """ -} - -process Fit_MTsat_With_B1map_Without_Bet{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - when: - params.use_b1cor == true && params.use_bet == false - - input: - tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ - file(b1map), file(t1wj), file(mtwj), file(pdwj) from mtsat_with_b1 - - output: - file "${sid}_T1map.nii.gz" - file "${sid}_MTsat.nii.gz" - file "${sid}_T1map.json" - file "${sid}_MTsat.json" - file "${sid}_mt_sat.qmrlab.mat" - - script: - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','b1map','$b1map','b1factor',$params.b1cor_factor,'qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" - """ - -} - - -/* We need to join these channels to avoid problems.*/ -mtsat_without_b1_bet - .join(mask_from_bet_ch2) - .set{mtsat_without_b1_bet_merged} - -process Fit_MTsat_Without_B1map_With_Bet{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - - when: - params.use_b1cor == false && params.use_bet==true - - input: - tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ - file(t1wj), file(mtwj), file(pdwj), file(mask) from mtsat_without_b1_bet_merged - - output: - file "${sid}_T1map.nii.gz" - file "${sid}_MTsat.nii.gz" - file "${sid}_T1map.json" - file "${sid}_MTsat.json" - file "${sid}_mt_sat.qmrlab.mat" - - script: - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','mask','$mask','qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" - """ -} - -process Fit_MTsat_Without_B1map_Without_Bet{ - tag "${sid}" - publishDir "$bids/derivatives/qMRLab/${sid}", mode: 'copy' - programVersion = '2.7.3-beta' - (full, major, minor, patch, flavor) = (programVersion =~ /(\d+)\.(\d+)\.(\d+)-?(.+)/)[0] - - when: - params.use_b1cor == false && params.use_bet==false - - input: - tuple val(sid), file(t1w), file(mtw_reg), file(pdw_reg),\ - file(t1wj), file(mtwj), file(pdwj) from mtsat_without_b1 - - output: - file "${sid}_T1map.nii.gz" - file "${sid}_MTsat.nii.gz" - file "${sid}_T1map.json" - file "${sid}_MTsat.json" - file "${sid}_mt_sat.qmrlab.mat" - - script: - """ - git clone $params.wrapper_repo - cd qMRWrappers - sh init_qmrlab_wrapper.sh $params.wrapper_version - cd .. - - $params.runcmd "addpath(genpath('qMRWrappers')); mt_sat_wrapper('$mtw_reg','$pdw_reg','$t1w','$mtwj','$pdwj','$t1wj','qmrlab_path','$params.qmrlab_path', 'sid','${sid}'); exit();" - """ -} From 7d7f68e7192b1e29af81521df8124354e374acb3 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Tue, 20 Apr 2021 14:47:07 -0400 Subject: [PATCH 03/13] Updated spinal cord pipeline --- neuromod-process-spinalcord.nf | 424 +++++++++++++++++++++++---------- 1 file changed, 302 insertions(+), 122 deletions(-) diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index 3d8b65e..57e94c3 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -1,28 +1,43 @@ #!/usr/bin/env nextflow /* -This workflow contains pre- and post-processing steps to -calculate metrics in the spinal cord. +WIP DSL2 workflow for Neuromod anat processing. Dependencies: These dependencies must be installed if Docker is not going to be used. - - SCT + - Advanced notmarization tools (ANTs, https://github.com/ANTsX/ANTs) + - FSL + - qMRLab (https://qmrlab.org) - git +Docker: + - https://hub.docker.com/u/qmrlab + - qmrlab/minimal:v2.5.0b + - qmrlab/antsfsl:latest + Author: - Agah Karakuzu, Julien Cohen-Adad 2021 - jcohen@polymtl.ca + Agah Karakuzu 2019 + agahkarakuzu@gmail.com Users: Please see USAGE for further details -// TODO: where is "USAGE"? -// TODO: add usage for testing pipeline in one subject -*/ + */ + /*Set defaults for parameters determining logic flow to false*/ +nextflow.enable.dsl=2 +include { getSubSesEntity; checkSesFolders } from './modules/bids_patterns' + params.bids = false params.help = false +log.info "## # ###### # # ##### #### # # #### ##### " +log.info " # # # # # # # # # # ## ## # # # # " +log.info " # # # ##### # # # # # # # ## # # # # # " +log.info " # # # # # # ##### # # # # # # # # " +log.info " # ## # # # # # # # # # # # # # " +log.info " # # ###### #### # # #### # # #### ##### " + /* Call to the mt_sat_wrapper.m will be invoked by params.runcmd. Depending on the params.platform selection, params.runcmd may point to MATLAB or Octave. @@ -59,6 +74,7 @@ workflow.onComplete { log.info "Pipeline completed at: $workflow.complete" log.info "Execution status: ${ workflow.success ? 'OK' : 'failed' }" log.info "Execution duration: $workflow.duration" + log.info "Mnemonic ID: $workflow.runName" } /*Define bindings for --help*/ @@ -94,146 +110,310 @@ if(params.help) { return } -/*Scrape file names from a BIDS-compatible dataset -Note: - BIDS for qMRI is currently under development (BEP001,https://github.com/bids-standard/bep001) - The current format is valid as of late 2019 and subjected to change. - For B1plusmaps, there is not a specification yet. To circumvent this - issue, these (optional) maps are assumed to be located at the fmap - folder with _B1plusmap suffix. -*/ +entity = checkSesFolders() + if(params.bids){ log.info "Input: $params.bids" bids = file(params.bids) + derivativesDir = "$params.qmrlab_derivatives" + log.info "Derivatives: $params.qmrlab_derivatives" + log.info "Nextflow Work Dir: $workflow.workDir" + + Channel + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) + .multiMap {sid, MToff, MTon, T1w -> + PDw: tuple(sid, MToff) + MTw: tuple(sid, MTon) + T1w: tuple(sid, T1w) + } + .set {niiMTS} - /* ==== BIDS: MTSat inputs ==== */ - /* Here, alphabetical indexes matter. Therefore, MToff -> MTon -> T1w */ - in_data = Channel - .fromFilePairs("$bids/**/**/anat/sub-*_acq-{MToff,MTon,T1w}_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) - (pdw, mtw, t1w) = in_data - .map{sid, MToff, MTon, T1w -> [ tuple(sid, MToff), - tuple(sid, MTon), - tuple(sid, T1w)]} - .separate(3) - - in_data = Channel - .fromFilePairs("$bids/**/**/anat/sub-*_acq-{MToff,MTon,T1w}_MTS.json", maxDepth: 3, size: 3, flat: true) - (pdwj, mtwj, t1wj) = in_data - .map{sid, MToff, MTon, T1w -> [ tuple(sid, MToff), - tuple(sid, MTon), - tuple(sid, T1w)]} - .separate(3) + Channel + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_MTS.json", maxDepth: 3, size: 3, flat: true) + .multiMap {sid, MToff, MTon, T1w -> + PDw: tuple(sid, MToff) + MTw: tuple(sid, MTon) + T1w: tuple(sid, T1w) + } + .set {jsonMTS} + + Channel + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_UNIT1.nii.gz", maxDepth: 3, size: 1, flat: true) + .multiMap { it -> UNIT1: it } + .set {niiMP2RAGE} + + Channel + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_UNIT1.json", maxDepth: 3, size: 1, flat: true) + .multiMap { it -> UNIT1: it } + .set {jsonMP2RAGE} + + /* ==== BIDS: B1 map ==== */ /* ==== BIDS: B1 map ==== */ /* Look for B1map in fmap folder */ - b1_data = Channel + Channel .fromFilePairs("$bids/**/**/fmap/sub-*_acq-flipangle_dir-AP_B1plusmap.nii.gz", maxDepth:3, size:1, flat:true) - .set {b1raw} - //(b1raw) = b1_data - // .map{sid, B1plusmap -> [tuple(sid, B1plusmap)]} - // .separate(1) + .multiMap { it -> AngleMap: it } + .set {B1} } else{ error "ERROR: Argument (--bids) must be passed. See USAGE." } -/*Each data type is defined as a channel. To pass all the channels - to the same process accurately, these channels must be joined. -*/ +// First, join nii & json pairs with explicit notations +// provided by the multimap +niiMTS.PDw + .join(jsonMTS.PDw) + .set {pairPDw} -/*Split T1w into three channels - t1w_pre_ch1 --> mtsat_for_alignment - t1w_pre_ch2 --> t1w_for_bet - t1w_pre_ch3 --> t1w_post -*/ -t1w.into{t1w_pre_ch1; t1w_for_bet; t1w_post} +PDw = pairPDw + .multiMap { it -> + Nii: tuple(it[0],it[1]) + Json: tuple(it[0],it[2]) + } + +niiMTS.MTw + .join(jsonMTS.MTw) + .set {pairMTw} + +MTw = pairMTw + .multiMap { it -> + Nii: tuple(it[0],it[1]) + Json: tuple(it[0],it[2]) + } -/* Merge PDw, MTw and T1w for alignment*/ -pdw - .join(mtw) - .join(t1w_pre_ch1) +niiMTS.T1w + .join(jsonMTS.T1w) + .set {pairT1w} + +T1w = pairT1w + .multiMap { it -> + Nii: tuple(it[0],it[1]) + Json: tuple(it[0],it[2]) + } + + +// ================================== IMPORTANT +// TUPLE ORDER: PDW --> MTW --> T1W +// NII --> JSON +// CRITICAL TO FOLLOW THE SAME ORDER IN INPUTS + +PDw.Nii + .join(MTw.Nii) + .join(T1w.Nii) .set{mtsat_for_alignment} -log.info "SCT: spinal cord analysis pipeline" -log.info "==================================" -log.info "" -// Artwork created with https://patorjk.com/software/taag/#p=display&f=Doom&t=Neuromod%20-%20SCT -log.info "_ _ _ _____ _____ _____ " -log.info "| \ | | | | / ___/ __ \_ _|" -log.info "| \| | ___ _ _ _ __ ___ _ __ ___ ___ __| | ______ \ `--.| / \/ | |" -log.info "| . ` |/ _ \ | | | '__/ _ \| '_ ` _ \ / _ \ / _` | |______| `--. \ | | |" -log.info "| |\ | __/ |_| | | | (_) | | | | | | (_) | (_| | /\__/ / \__/\ | |" -log.info "\_| \_/\___|\__,_|_| \___/|_| |_| |_|\___/ \__,_| \____/ \____/ \_/" -log.info "" -log.info "Start time: $workflow.start" -log.info "" -log.info "" -log.info "DATA" -log.info "====" -log.info "" -log.info "BIDS option has been enabled." -log.warn "qMRI protocols will be read from sidecar .json files for MTSAT and MTR." -log.warn "Some protocols for MP2RAGE are hardcoded." -log.info "" -log.info "OPTIONS" -log.info "=======" -log.info "" -log.info "[GLOBAL]" -log.info "---------------" -log.info "Selected platform: $params.platform" -log.info "BET enabled: $params.use_bet" -log.info "B1+ correction enabled: $params.use_b1cor" -log.info "" -log.info "[ANTs Registration]" -log.info "-------------------" -log.info "Dimensionality: $params.ants_dim" -log.info "Metric: $params.ants_metric" -log.info "Weight: $params.ants_metric_weight" -log.info "Number of bins: $params.ants_metric_bins" -log.info "Sampling type: $params.ants_metric_sampling" -log.info "Sampling percentage: $params.ants_metric_samplingprct" -log.info "Transform: $params.ants_transform" -log.info "Convergence: $params.ants_convergence" -log.info "Shrink factors: $params.ants_shrink" -log.info "Smoothing sigmas: $params.ants_smoothing" -log.info "" -log.info "[FSL BET]" -log.info "---------------" -log.info "Enabled: $params.use_bet" -log.info "Fractional intensity threshold: $params.bet_threshold" -log.info "Robust brain center estimation: $params.bet_recursive" -log.info "" -log.info "[qMRLab mt_sat]" -log.info "---------------" -log.warn "Acquisition protocols will be read from sidecar .json files (BIDS)." -if (params.use_b1cor){ -log.info "B1+ correction has been ENABLED." -log.warn "Process will be skipped for participants missing a B1map file." -log.info "B1 correction factor: $params.b1cor_factor"} -if (!params.use_b1cor){ -log.info "B1+ correction has been DISABLED." -log.warn "Process will NOT take any (possibly) existing B1maps into account." +niiMP2RAGE.UNIT1 + .join(jsonMP2RAGE.UNIT1) + .set{pairMP2RAGE} + + +process publishOutputs { + + exec: + out = getSubSesEntity("${sid}") + + input: + tuple val(sid), \ + path(mtw_aligned), path(pdw_aligned), \ + path(mtw_disp), path(pdw_disp), \ + path(t1map), path(mtsat), path(t1mapj), \ + path(mtsatj), path(qmrmodel), path(mp2raget1), \ + path(mp2raget1j),path(mp2rager1),path(mp2rager1j), path(mp2ragemodel), \ + path(mtrnii), path(mtrjson), path(mtrmodel) + + publishDir "${derivativesDir}/${out.sub}/${out.ses}anat", mode: 'move', overwrite: true + + output: + tuple val(sid), \ + path(mtw_aligned), path(pdw_aligned), \ + path(mtw_disp), path(pdw_disp), \ + path(t1map), path(mtsat), path(t1mapj), \ + path(mtsatj), path(qmrmodel), path(mp2raget1), \ + path(mp2raget1j),path(mp2rager1),path(mp2rager1j), path(mp2ragemodel), \ + path(mtrnii), path(mtrjson), path(mtrmodel) + + script: + """ + mkdir -p ${derivativesDir} + echo "Transferring ${mtw_aligned} to ${derivativesDir}/${out.sub}/${out.ses}anat folder..." + """ } -log.info "" -log.info "=======================" -process T2_Segment_SpinalCord { - tag "${sid}" - publishDir "$bids/derivatives/sct/${sid}", mode: 'copy' +process publishOutputsFmap { + + exec: + out = getSubSesEntity("${sid}") input: - tuple val(sid), file(pdw), file(mtw), file(t1w) from mtsat_for_alignment + tuple val(sid), \ + path(b1res), path(smooth) + + publishDir "${derivativesDir}/${out.sub}/${out.ses}fmap", mode: 'move', overwrite: true output: - tuple val(sid), "${sid}_acq-MTon_MTS_aligned.nii.gz", "${sid}_acq-MToff_MTS_aligned.nii.gz"\ - into mtsat_from_alignment - file "${sid}_acq-MTon_MTS_aligned.nii.gz" - file "${sid}_acq-MToff_MTS_aligned.nii.gz" + tuple val(sid), \ + path(b1res),path(smooth) script: - // TODO: add QC report """ - sct_deepseg_sc -i ${sid}_bp-cspine_T2w.nii.gz -c t2 + mkdir -p ${derivativesDir} """ } +// Here we include all the processes from modules/ +include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' + +// Pipeline starts here +workflow { + +fitMp2rageUni(pairMP2RAGE) + +publish_mp2rage = fitMp2rageUni.out.mp2rage_output +// EXECUTE PROCESS (tuple order: sid, pdw, mtw, t1w) +alignMtsatInputs(mtsat_for_alignment) + +// Get aligned images (tuple order: sid, pdw, mtw, pdwdisp, mtwdisp) +mtsat_from_alignment = alignMtsatInputs.out.mtsat_from_alignment + +// All these files will be eventually published, but we need a subsample of them +// to proceed with the workflow, which are first 3 tuple elements (sid, pdw, mtw) +mtsat_from_alignment + .multiMap{it -> + Publish: it + Fit: tuple(it[0],it[1],it[2]) + Mtr: tuple(it[0],it[1],it[2]) + } + .set {Aligned} + +// EXECUTE PROCESS (tuple order: sid, t1w) +extractBrain(T1w.Nii) + +mask_from_bet = extractBrain.out.mask_from_bet + +if (!params.use_bet){ + Channel + .empty() + .set{mask_from_bet} +} + +// Clone +Mask = mask_from_bet + //.multiMap { it -> Split1: Split2: Split3: it } + +// Join channels by tuple index for resampling b1+ map (ref t1w) +T1w.Nii + .join(B1.AngleMap) + .set{b1_for_alignment} + +// Process val(sid), file(t1w), file(b1raw) +resampleB1(b1_for_alignment) + +// Collect output +b1_resampled = resampleB1.out.b1_resampled + +// Create empty channel as b1_resampled output is optional. +if (!params.use_b1cor){ + Channel + .empty() + .set{b1_resampled} +} + + +// Join channels for smoothing with map +b1_resampled + .join(Mask) + .set {b1_for_smoothing_with_mask} + +// EXECUTE PROCESS (tuple order: sid, b1, mask) +smoothB1WithMask(b1_for_smoothing_with_mask) + +// Collect ouputs +b1_filtered_w_mask = smoothB1WithMask.out.b1_filtered_w_mask + .multiMap{it-> + Publish: it + Nii: tuple(it[0],it[1]) + } + +// EXECUTE PROCESS (tuple order: sid, b1) +smoothB1WithoutMask(b1_resampled) + +// Collect ouputs +b1_filtered_wo_mask = smoothB1WithoutMask.out.b1_filtered_wo_mask + .multiMap{it-> + Publish: it + Nii: tuple(it[0],it[1]) + } + +// Join data channels based on parameter selection +// Fit with B1 +if (params.use_bet){ + +Aligned.Fit + .join(T1w.Nii) + .join(PDw.Json) + .join(MTw.Json) + .join(T1w.Json) + .join(b1_filtered_w_mask.Nii) + .set{fitting_with_b1} + +}else{ + +Aligned.Fit + .join(T1w.Nii) + .join(PDw.Json) + .join(MTw.Json) + .join(T1w.Json) + .join(b1_filtered_wo_mask.Nii) + .set{fitting_with_b1} + +} + +// MTR with mask +Aligned.Mtr + .join(Mask) + .set{fitting_mtr} + +// Fit without B1 map channel +Aligned.Fit + .join(T1w.Nii) + .join(PDw.Json) + .join(MTw.Json) + .join(T1w.Json) + .set{mtsat_fitting_without_b1} + +mtsat_fitting_without_b1 + .join(Mask) + .set{ fitting_without_b1_bet} + +fitting_with_b1 + .join(Mask) + .set{mtsat_with_b1_bet} + +fitMtsatWithB1Mask(mtsat_with_b1_bet) + +fitMtsatWithB1(fitting_with_b1) + +fitMtsatWithBet(fitting_with_b1) + +fitMtsat(mtsat_fitting_without_b1) + +// Fit MTR +fitMtratioWithMask(fitting_mtr) + +Aligned.Publish + .join(fitMtsatWithB1Mask.out.publish_mtsat) + .join(publish_mp2rage) + .join(fitMtratioWithMask.out.mtratio_output) + .set {publish} + +b1_resampled + .join(b1_filtered_w_mask.Nii) + .set{publishfmap} + +publishOutputs(publish) +publishOutputsFmap(publishfmap) + +} + + From 72d30aaf5d5f3100f6ed447bb68741c8ff7e5ac3 Mon Sep 17 00:00:00 2001 From: Agah Karakuzu Date: Wed, 28 Apr 2021 22:44:24 -0400 Subject: [PATCH 04/13] Add placeholder examples for SC workflow --- USAGE-ANAT | 157 ++++++++++ USAGE-SCT | 79 +++++ modules/spinalcord_mtsat.nf | 29 ++ modules/spinalcord_segmentation.nf | 27 ++ neuromod-process-spinalcord.config | 40 +++ neuromod-process-spinalcord.nf | 458 +++++++++++------------------ neuromod_anat_dsl2.nf | 4 +- 7 files changed, 505 insertions(+), 289 deletions(-) create mode 100644 USAGE-ANAT create mode 100644 USAGE-SCT create mode 100644 modules/spinalcord_mtsat.nf create mode 100644 modules/spinalcord_segmentation.nf create mode 100644 neuromod-process-spinalcord.config diff --git a/USAGE-ANAT b/USAGE-ANAT new file mode 100644 index 0000000..3686989 --- /dev/null +++ b/USAGE-ANAT @@ -0,0 +1,157 @@ +qMRflow mt_sat pipeline +https://qmrlab.org +=================== + +USAGE + +nextflow run mtsatflow_BIDS.nf [OPTIONAL_ARGUMENTS] (--root) + +DESCRIPTION + + --root=/path/to/[root] Root folder containing multiple subjects + +FOLDER ORGANIZATION + +BIDS convention For mtsatflow_BIDS.nf + + [root] + ├── sub-01 + │ │── anat + │ │ ├── sub-01_acq-MTon_MTS.nii.gz + | | ├── sub-01_acq-MTon_MTS.json + │ │ ├── sub-01_acq-MToff_MTS.nii.gz + │ │ ├── sub-01_acq-MToff_MTS.json + │ │ ├── sub-01_acq-T1w_MTS.nii.gz + │ │ └── sub-01_acq-T1w_MTS.json + │ └── fmap + │ └── sub-01_B1plusmap.nii.gz (optional) + └── sub-02 + │── anat + │ ├── sub-02_acq-MTon_MTS.nii.gz + | ├── sub-02_acq-MTon_MTS.json + │ ├── sub-02_acq-MToff_MTS.nii.gz + │ ├── sub-02_acq-MToff_MTS.json + │ ├── sub-02_acq-T1w_MTS.nii.gz + │ └── sub-02_acq-T1w_MTS.json + └── fmap + └── sub-02_B1plusmap.nii.gz (optional) + +OPTIONAL ARGUMENTS + +--platform ["octave"/"matlab"] Platform choice. +--qmrlab_dir ["/path/to/qMRLab" OR null] Absolute path to the qMRLab's + root directory. If docker is enabled, MUST be set + to null (without double quotes). If docker is NOT enabled, + then the absolute path to the qMRLab MUST be provided. + Note that qMRLab version MUST be equal or greater than v2.3.1. +--octave_path ["/path/to/octave_exec" OR null] Absolute path to Octave's + executable. If docker is enabled, or, if you'd like to use + Octave executable saved to your system path, MUST be set to + null (without double quotes). +--matlab_path ["/path/to/matlab_exec" OR null] Absolute path to MATLAB's + executable. If you'd like to use MATLAB executable saved to + your system path, MUST be set to null (without double quotes). + Note that qMRLab requires MATLAB > R2014b. Docker image + containing MCR compiled version of this application is NOT + available yet. Therefore, container declerations for the + processes starting with Fit_ prefix MUST be set to null + (without double quotes). +--ants_dim [2/3/4] This option forces the image to be treated + as a specified-dimensional image. If not specified, + ANTs tries to infer the dimensionality. +--ants_metric ["MI"] Confined to MI: Mutual information, for this + particular pipeline. +--ants_metric_weight [0-1] If multimodal (i.e. changing contrast) use weight 1. + This parameter is used to modulate the per stagehting + of the metrics. +--ants_metric_bins [e.g. 32] Number of bins. +--ants_metric_sampling ["Regular","Random:]The point set can be on a regular + lattice or a random lattice of points slightly perturbed + to minimize aliasing artifacts. +--ants_metric_samplingprct [0-100] The fraction of points to select from the domain +--ants_transform "Rigid" + "Affine" + "CompositeAffine" + "Similarity" + "Translation" + "BSpline" +--ants_convergence [MxNxO,,] + Convergence is determined from the number of iterations per level + and is determined by fitting a line to the normalized energy + profile of the last N iterations (where N is specified by the window + size) and determining the slope which is then compared with the convergence threshold. +--ants_shrink [MxNxO] Specify the shrink factor for the virtual domain (typically + the fixed image) at each level. +--ants_smoothing [MxNxO] Specify the sigma of gaussian smoothing at each level. + Units are given in terms of voxels ('vox') or physical spacing ('mm'). + Example usage is '4x2x1mm' and '4x2x1vox' where no units implies voxel spacing. +--use_b1cor [true/false] Use and RF transmit field to correct for flip + angle imperfections. +--b1cor_factor [0-1] Correction factor (empirical) for the transmit RF. Only + corrects MTSAT, not T1. Default 0.4. +--use_bet Use FSL's BET for skull stripping. +--bet_recursive [true/false] This option runs more "robust" brain center estimation. +--bet_threshold [0-1] Fractional intensity threshold (0->1); default=0.45; + smaller values give larger brain outline estimates + + +NOTES + +- BIDS: + + mtsatflow_BIDS.nf To process BIDSified MTsat data. Note that BIDS for + quantitative MRI data is under development as of + early 2020. You can visit the GitHub project page + [here](https://github.com/bids-standard/bep001). +- Example datasets: + + Custom-organized data TBA + BIDSified MTsat data https://osf.io/k4bs5/ + +- Files should be compressed Nifti files (.nii.gz) + +- Timing parameters in the .json files MUST be in seconds. + +- Subject IDs are used as the primary process ID and tag throughout the pipeline. + +- We adhere to a strict one-process one-container mapping, where possible using off-the shelf + qMRLab containers. + +- All the OPTIONAL ARGUMENTS can be modified in the `nextflow.config` file. The same + config file is consumed by both `mtsatflow.nf` and `mtsatflow_BIDS.nf`. + +- Unless the docker option is enabled in the `nextflow.config`, the following + dependencies must be installed and added to the system path: + * ANTs registration (https://github.com/ANTsX/ANTs) + * FSL (https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/) + * Octave/MATLAB (https://www.gnu.org/software/octave/) + (https://www.mathworks.com/) + * qMRLab > v2.3.1 (https://qmrlab.org) + * git + +If you have Docker installed, enabling docker option will make use of the +following Docker images to execute processes: + - qmrlab/minimal (https://hub.docker.com/repository/docker/qmrlab/minimal) + 594MB extends to 1.5GB + Built at each qMRLab release. + - qmrlab/antsfsl (https://hub.docker.com/repository/docker/qmrlab/antsfsl) + 374MB extends to 1.2GB + Dockerfile is available at qMRLab/qMRflow. + +- You can take advantage of Nextflow's comprehensive tracing and visualization + features while executing this pipeline: https://www.nextflow.io/docs/latest/tracing.html. + +- For any requests, questions or contributions, please feel free to open + an issue at qMRflow's GitHub repo at https://github.com/qMRLab/qMRflow. + +REFERENCES + +Please cite the following if you use this module: + + Helms, G. et al. 2008. High-resolution maps of magnetization transfer with inherent correction + for RF inhomogeneity and T1 relaxation obtained from 3D FLASH MRI. Magn. Reson. Med. 60, 1396?1407. + +In addition to: + + Karakuzu A. et al. 2019 The qMRLab workflow: From acquisition to publication., ISMRM 27th Annual + Meeting and Exhibition, Montreal, Canada. \ No newline at end of file diff --git a/USAGE-SCT b/USAGE-SCT new file mode 100644 index 0000000..0c36d7a --- /dev/null +++ b/USAGE-SCT @@ -0,0 +1,79 @@ +Neuromod spinal cord data processing pipeline +=================== + +USAGE + +Prototype: +---------- +nextflow -C neuromod-process-spinalcord.config run neuromod-process-spinalcord.nf [OPTIONAL_ARGUMENTS] (--bids) + +Example: +--------- +nextflow -C neuromod-process-spinalcord.config -log log_directory/sct.log run neuromod-process-spinalcord.nf --bids /neuromod_bids_dir -w /work_directory -with-report report_spinal.html + +DESCRIPTION + + --bids=/path/to/[root] Root folder containing BIDS data + -C Use the specified configuration file overriding any defaults. + -log Set nextflow log file path + -w The working directory. Note that if you delete or move the pipeline + work directory, this will prevent to use the resume feature in subsequent + runs. Also note that the pipeline work directory is intended to be used as a + temporary scratch area. The final workflow outputs are expected to be stored in a + different location specified using the publishDir directive. + -with-report Specifcy a report name (e.g. report.html) for having interactive workflow execution report. + -resume Pass as the last argument if you'd like to resume a previously cached + workflow, which was interrupted previously. + +FOLDER ORGANIZATION + +BIDS convention For MTsat data + + [root] + ├── sub-01 + │── ses-001/anat + │ ├── sub-01_acq-MTon_bp-spine_MTS.nii.gz + | ├── sub-01_acq-MTon_bp-spine_MTS.json + │ ├── sub-01_acq-MToff_bp-spine_MTS.nii.gz + │ ├── sub-01_acq-MToff_bp-spine_MTS.json + │ ├── sub-01_acq-T1w_bp-spine_MTS.nii.gz + │ └── sub-01_acq-T1w_bp-spine_MTS.json + └── fmap + └── sub-01_TB1map.nii.gz (optional) + + +OPTIONAL ARGUMENTS + +When passed as an argument during `nextflow run` call, the respective value defined for that parameter +in the neuromod-process-spinalcord.config will be overridden. + +--sct_parameter [/this/that/type] Explanation. +--sct_another_parameter [/this/that/type] Explanation. + + +NOTES + +- Timing parameters in the .json files MUST be in seconds. + +- Subject IDs are used as the primary process ID and tag throughout the pipeline. Defined by the + following BIDS entities: + - sub + - ses + - rec + - acq (unless reserved for inferring file pairs) + + + +- All the OPTIONAL ARGUMENTS can be modified in the `nextflow.config` file. The same + config file is consumed by both `mtsatflow.nf` and `mtsatflow_BIDS.nf`. + +- Unless the docker option is enabled in the `nextflow.config`, the following + dependencies must be installed and added to the system path: + * SCT + +- You can take advantage of Nextflow's comprehensive tracing and visualization + features while executing this pipeline: https://www.nextflow.io/docs/latest/tracing.html. + +REFERENCES + +Please cite the following if you use this module: diff --git a/modules/spinalcord_mtsat.nf b/modules/spinalcord_mtsat.nf new file mode 100644 index 0000000..202a11f --- /dev/null +++ b/modules/spinalcord_mtsat.nf @@ -0,0 +1,29 @@ +// Enable DSL2 so that this can be imported as a module +nextflow.enable.dsl=2 + +/* This PLACEHOLDER process is to explain the relationship +between the files emitted by the input channels, process and +how those files are process to emit outputs */ + +process MTS_Align_SpinalCord{ + tag { sid } + + input: + tuple val(sid), file(pdw), file(mtw), file(t1w) + + output: + tuple val(sid), \ + path("${sid}_MTSAT_example.txt"), \ + emit: publish_spinal_mtsat + + script: + """ + echo "sct_parameter was $params.sct_parameter\n" >> ${sid}_MTSAT_example.txt + echo "the other one was $params.sct_another_parameter\n" >> ${sid}_MTSAT_example.txt + echo "sid is ${sid}\n" >> ${sid}_MTSAT_example.txt + echo "MTon is $mtw\n" >> ${sid}_MTSAT_example.txt + echo "MToff is $pdw\n" >> ${sid}_MTSAT_example.txt + echo "T1w is $t1w\n" >> ${sid}_MTSAT_example.txt + echo "ENV vars can be captured: $PATH\n" >> ${sid}_MTSAT_example.txt + """ +} \ No newline at end of file diff --git a/modules/spinalcord_segmentation.nf b/modules/spinalcord_segmentation.nf new file mode 100644 index 0000000..835645d --- /dev/null +++ b/modules/spinalcord_segmentation.nf @@ -0,0 +1,27 @@ +// Enable DSL2 so that this can be imported as a module +nextflow.enable.dsl=2 + +/* This PLACEHOLDER process is to explain the relationship +between the files emitted by the input channels, process and +how those files are process to emit outputs */ + +process T2_Segment_SpinalCord{ + tag { sid } + + input: + tuple val(sid), file(t2w) + + output: + tuple val(sid), \ + path("${sid}_seg-gm_mask.txt"), \ + path("${sid}_seg-wm_mask.txt"), \ + path("${sid}_seg-csf_mask.txt"), \ + emit: publish_spinal_seg + + script: + """ + echo "From $t2w segmented GM\n" >> ${sid}_seg-gm_mask.txt + echo "From $t2w segmented WM" >> ${sid}_seg-wm_mask.txt + echo "From $t2w segmented CSF" >> ${sid}_seg-csf_mask.txt + """ +} \ No newline at end of file diff --git a/neuromod-process-spinalcord.config b/neuromod-process-spinalcord.config new file mode 100644 index 0000000..1783e05 --- /dev/null +++ b/neuromod-process-spinalcord.config @@ -0,0 +1,40 @@ +//** Official documentation for config files: https://www.nextflow.io/docs/latest/config.html **// + +process { + + withName: MTS_Align_SpinalCord { + cpus = 2 + memory = 1.GB + maxForks = 4 + } + +} + +params { + + sct_parameter= "This is how default params are defined" + sct_another_parameter= 99 + +} + +//** Default one is standard (local), but other options are easily possible **// + +profiles { + + standard { + process.executor = 'local' + } + + cluster { + process.executor = 'slurm' + process.queue = 'long' + process.memory = '10GB' + } + + cloud { + process.executor = 'awsbatch' + process.container = 'docker.io/neuropoly/sct' + docker.enabled = true + } + +} diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index 57e94c3..910c754 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -1,75 +1,38 @@ #!/usr/bin/env nextflow -/* -WIP DSL2 workflow for Neuromod anat processing. +/* +Header commment goes here. -Dependencies: - These dependencies must be installed if Docker is not going - to be used. - - Advanced notmarization tools (ANTs, https://github.com/ANTsX/ANTs) - - FSL - - qMRLab (https://qmrlab.org) - - git - -Docker: - - https://hub.docker.com/u/qmrlab - - qmrlab/minimal:v2.5.0b - - qmrlab/antsfsl:latest - -Author: - Agah Karakuzu 2019 - agahkarakuzu@gmail.com - -Users: Please see USAGE for further details - */ +TO DISPLAY HELP: +---------------- +nextflow -C neuromod-process-spinalcord.config run neuromod-process-spinalcord.nf --help +SIMPLEST USE: +------------- +nextflow -C neuromod-process-spinalcord.config run neuromod-process-spinalcord.nf --bids ~/neuromod_bids_dir +*/ -/*Set defaults for parameters determining logic flow to false*/ +// Enable NEXTFLOW DSL2 to be able to include modules. nextflow.enable.dsl=2 + +// Include bids_patterns module to infer session-level file +// organization. This determines the depth of directories +// to fetch input file pairs. include { getSubSesEntity; checkSesFolders } from './modules/bids_patterns' +// Default behaviour if user passes +// --bids or --help during workflow call, will be overridden. params.bids = false params.help = false -log.info "## # ###### # # ##### #### # # #### ##### " -log.info " # # # # # # # # # # ## ## # # # # " -log.info " # # # ##### # # # # # # # ## # # # # # " -log.info " # # # # # # ##### # # # # # # # # " -log.info " # ## # # # # # # # # # # # # # " -log.info " # # ###### #### # # #### # # #### ##### " - -/* Call to the mt_sat_wrapper.m will be invoked by params.runcmd. -Depending on the params.platform selection, params.runcmd -may point to MATLAB or Octave. -*/ -if (params.platform == "octave"){ - - if (params.octave_path){ - log.info "Using Octave executable declared in nextflow.config." - params.octave = params.octave_path + " --no-gui --eval" - }else{ - log.info "Using Octave in Docker or (if local) from the sys path." - params.octave = "octave --no-gui --eval" - } - - params.runcmd = params.octave -} - -if (params.platform == "matlab"){ - - if (params.matlab_path){ - log.info "Using MATLAB executable declared in nextflow.config." - params.matlab = params.matlab_path + " -nodisplay -nosplash -nodesktop -r" - }else{ - log.info "Using MATLAB from the sys path." - params.matlab = "matlab -nodisplay -nosplash -nodesktop -r" - } - - params.runcmd = params.matlab -} +// Print some fancy ASCII art. +log.info "███  ██ ███████ ██  ██ ██████  ██████  ███  ███  ██████  ██████  ███████  ██████ ████████ " +log.info "████  ██ ██      ██  ██ ██   ██ ██    ██ ████  ████ ██    ██ ██   ██  ██      ██         ██    " +log.info "██ ██  ██ █████  ██  ██ ██████  ██  ██ ██ ████ ██ ██  ██ ██  ██ █████ ███████ ██  ██  " +log.info "██  ██ ██ ██     ██  ██ ██   ██ ██  ██ ██  ██  ██ ██  ██ ██  ██            ██ ██  ██  " +log.info "██   ████ ███████  ██████  ██  ██  ██████  ██      ██  ██████  ██████   ███████  ██████  ██  " -params.wrapper_repo = "https://github.com/qMRLab/qMRWrappers.git" - +// This log block is executed when the workflow is completed without errors workflow.onComplete { log.info "Pipeline completed at: $workflow.complete" log.info "Execution status: ${ workflow.success ? 'OK' : 'failed' }" @@ -77,30 +40,15 @@ workflow.onComplete { log.info "Mnemonic ID: $workflow.runName" } -/*Define bindings for --help*/ +/*Define what is shown when --help is called*/ if(params.help) { - usage = file("$baseDir/USAGE") + // To print USAGE-SCT file + usage = file("$baseDir/USAGE-SCT") + // To print paremeter-values cpu_count = Runtime.runtime.availableProcessors() - bindings = ["ants_dim":"$params.ants_dim", - "ants_metric":"$params.ants_metric", - "ants_metric_weight":"$params.ants_metric_weight", - "ants_metric_bins":"$params.ants_metric_bins", - "ants_metric_sampling":"$params.ants_metric_sampling", - "ants_metric_samplingprct":"$params.ants_metric_samplingprct", - "ants_transform":"$params.ants_transform", - "ants_convergence":"$params.ants_convergence", - "ants_shrink":"$params.ants_shrink", - "ants_smoothing":"$params.ants_smoothing", - "use_b1cor":"$params.use_b1cor", - "b1cor_factor":"$params.b1cor_factor", - "use_bet":"$params.use_bet", - "bet_recursive":"$params.bet_recursive", - "bet_threshold":"$params.bet_threshold", - "platform":"$params.platform", - "matlab_path":"$params.matlab_path", - "octave_path":"$params.octave_path", - "qmrlab_path":"$params.qmrlab_path" + bindings = ["sct_parameter":"$params.sct_parameter", + "sct_another_parameter":"$params.sct_another_parameter" ] engine = new groovy.text.SimpleTemplateEngine() @@ -110,26 +58,56 @@ if(params.help) { return } +// Infer entity-level file organization. +// "$bids/${entity.dirInputLevel}" will inform nextflow to look for +// inputs in the correct directory. entity = checkSesFolders() +// Fetch input channels only when the BIDS directory is provided. Terminate otherwise. if(params.bids){ log.info "Input: $params.bids" bids = file(params.bids) - derivativesDir = "$params.qmrlab_derivatives" - log.info "Derivatives: $params.qmrlab_derivatives" + // DEFINE DERIVATIVES DIRECTORY + derivativesDir = "$params.bids/derivatives/SCT" + log.info "Derivatives: $derivativesDir" log.info "Nextflow Work Dir: $workflow.workDir" + // As channels define which files will be symbolically linked to the work directory, we need to + // explicitly define what we need. For example, we cannot emit a channel for nii.gz files, then + // expect a process to find .json by doing some string manipulation. That's why we'll use the + // same pattern once again for json files. + + // Initialize a channel for input file pairs of MTsat spinal cord data. + // "$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_bp-spinalcord_MTS.nii.gz" describes that + // file pairs of MToff, MTon and T1w will be fetched for the glob pattern that allows multiple subjects + // and sessions. This way, we will have all the MTsat related BIDS(nii) inputs fetched, across subjects and + // sessions. Channel - .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) + // maxDepth is 3 anat/sub/ses size is 3, because we have 3 inputs for MTsat. + // The fromFilePairs requires the flat:true option to have the file pairs as separate elements + // in the produced tuples. + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_bp-cspine_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) + // 1) sid is the subject ID, i.e. whatever grabbed by * above (e.g. sub-01_ses-003) + // 2) .multiMap allows us to forward the items emitted by a source channel to two or + // more output channels mapping each input value as a separate element. .multiMap {sid, MToff, MTon, T1w -> PDw: tuple(sid, MToff) MTw: tuple(sid, MTon) T1w: tuple(sid, T1w) } + // This is how we set the channel's name! .set {niiMTS} + // Now, this is what we expect to find under niiMTS: + // - niiMTS.PWD [[sub-01_ses001, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz], + // [sub-01_ses002, sub-01_ses-002_acq-MToff_bp-spinalcord_MTS.nii.gz]...] + // - niiMTS.T1w [[sub-01_ses001, sub-01_ses-001_acq-T1w_bp-spinalcord_MTS.nii.gz], + // [sub-01_ses002, sub-01_ses-002_acq-T1w_bp-spinalcord_MTS.nii.gz]...] + // Similar for MTw. + // This will do the same thing for spinal cord MTS iput pairs to fetch json files under 3 + // channels that lives under jsonMTS namespace. Again, each channel contains tuples of [[sid, filename],...]. Channel - .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_MTS.json", maxDepth: 3, size: 3, flat: true) + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_bp-cspine_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) .multiMap {sid, MToff, MTon, T1w -> PDw: tuple(sid, MToff) MTw: tuple(sid, MTon) @@ -137,41 +115,66 @@ if(params.bids){ } .set {jsonMTS} + // Create a channel for spinal cord T2w inputs Channel - .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_UNIT1.nii.gz", maxDepth: 3, size: 1, flat: true) - .multiMap { it -> UNIT1: it } - .set {niiMP2RAGE} - - Channel - .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_UNIT1.json", maxDepth: 3, size: 1, flat: true) - .multiMap { it -> UNIT1: it } - .set {jsonMP2RAGE} - + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T2w.nii.gz", maxDepth: 3, size: 1, flat: true) + .multiMap { it -> Nii: it } + .set {T2w} - /* ==== BIDS: B1 map ==== */ - /* ==== BIDS: B1 map ==== */ - /* Look for B1map in fmap folder */ - Channel - .fromFilePairs("$bids/**/**/fmap/sub-*_acq-flipangle_dir-AP_B1plusmap.nii.gz", maxDepth:3, size:1, flat:true) - .multiMap { it -> AngleMap: it } - .set {B1} } else{ - error "ERROR: Argument (--bids) must be passed. See USAGE." + error "ERROR: Argument (--bids) must be passed. See USAGE-SCT." } -// First, join nii & json pairs with explicit notations -// provided by the multimap +/** +>>>> What is the function of the .join operation? + +The join operator creates a channel that joins together the items emitted by two channels for which +exits a matching key. In our case, matching key is sid. + +Given the following channels: + Ch-nii [sub-01_ses-001,sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz] + Ch-json [sub-01_ses-001,sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.json] + +and the following expression: + Ch-nii + .join(Ch-json) + .set(pairExample) + +The new pairExample channel will look like: + [sub-01_ses-001, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz] + +>>>> IMPORTANT <<<<<< + +The order at which you joing channels is highly critical. In this example we joined Ch-json into +Ch-nii. Therefore the order will be [sid, nii, json] for indexes [0,1,2]. Let's say we will send this +pairExample channel to a process as an input: + +myProcess(pairExample) + +In pairExample, the input should be: + +input: + tuple(sid), file(nii), file(json) + +So that $nii and $json represent a nii and json file, respectively. Swapping the order of +file(nii) and file(json) would result in $nii variable representing json files and vice versa. +**/ + +// Create a channel with name pairPDw that contains [sid, nii, json] tuples +// for all the *_acq-MToff_ files found in the dataset. niiMTS.PDw .join(jsonMTS.PDw) .set {pairPDw} +// Re-organize pairPDW so that we can access NIfTI files via PDw.Nii and json +// files via PDw.Json. This reduces the chance of messing up the tuple order. PDw = pairPDw .multiMap { it -> Nii: tuple(it[0],it[1]) Json: tuple(it[0],it[2]) } - +// Follow the same semantic channel organization for the remaining MTsat file pairs. niiMTS.MTw .join(jsonMTS.MTw) .set {pairMTw} @@ -192,228 +195,109 @@ T1w = pairT1w Json: tuple(it[0],it[2]) } +// At this point we have PDW, MTw and T1w (each containing Nii and Json sub-channels) +// for all the spinal-cord data. Now we can combine them as required by the process we +// will subject them to. -// ================================== IMPORTANT -// TUPLE ORDER: PDW --> MTW --> T1W -// NII --> JSON -// CRITICAL TO FOLLOW THE SAME ORDER IN INPUTS +/** >> EXAMPLE +Let's say we have a process named alignInputs that needs nifti files only. +**/ +// This is how we collect inputs for the alignInputs process: PDw.Nii .join(MTw.Nii) .join(T1w.Nii) + // ..._for_... convention indicates that this channel is intended as an input to a process. .set{mtsat_for_alignment} -niiMP2RAGE.UNIT1 - .join(jsonMP2RAGE.UNIT1) - .set{pairMP2RAGE} +/** +Now mtsat_for_alignment is a tuple that looks like: + [[sub-01_ses-001, sub-01_ses-001_acq-MToff..nii.gz, sub-01_ses-001_acq-MTon..nii.gz, sub-01_ses-001_acq-T1w..nii.gz]] +This is the reason why input declaration of alignInputs process MUST respect the following order: -process publishOutputs { +input: + tuple(sid), file(PDw), file(MTw), file(T1w) - exec: - out = getSubSesEntity("${sid}") - input: - tuple val(sid), \ - path(mtw_aligned), path(pdw_aligned), \ - path(mtw_disp), path(pdw_disp), \ - path(t1map), path(mtsat), path(t1mapj), \ - path(mtsatj), path(qmrmodel), path(mp2raget1), \ - path(mp2raget1j),path(mp2rager1),path(mp2rager1j), path(mp2ragemodel), \ - path(mtrnii), path(mtrjson), path(mtrmodel) +*/ +/* >>>>>>> DEFINE PROCESS FOR PUBLISHING OUTPUTS <<<<<<<<< - publishDir "${derivativesDir}/${out.sub}/${out.ses}anat", mode: 'move', overwrite: true +/* Process publishOutputs is a typical pattern in DSL2 to put final +outputs where they belong to (by copying/moving them from the work folder). +In BIDS case, it is a derivatives folder `derivatives/sub-01/ses-001/anat/...`. - output: - tuple val(sid), \ - path(mtw_aligned), path(pdw_aligned), \ - path(mtw_disp), path(pdw_disp), \ - path(t1map), path(mtsat), path(t1mapj), \ - path(mtsatj), path(qmrmodel), path(mp2raget1), \ - path(mp2raget1j),path(mp2rager1),path(mp2rager1j), path(mp2ragemodel), \ - path(mtrnii), path(mtrjson), path(mtrmodel) +We define this process in the body of the main workflow file as it has to +access certain variables declared during runtime. - script: - """ - mkdir -p ${derivativesDir} - echo "Transferring ${mtw_aligned} to ${derivativesDir}/${out.sub}/${out.ses}anat folder..." - """ -} +TODO: + Test if { sid } instead of "${sid}" can help process access variable when + it is declared to the scope. Same for derivativesDir If so, make publishOutputs a module. -process publishOutputsFmap { +*/ + +process publishOutputs { + // Infer session-level file organization. exec: out = getSubSesEntity("${sid}") + // Again, order matters. input: - tuple val(sid), \ - path(b1res), path(smooth) + tuple val(sid), file(mtsExample), \ + file(segGM), file(segWM), file(segCSF) - publishDir "${derivativesDir}/${out.sub}/${out.ses}fmap", mode: 'move', overwrite: true + // This is where the files will be dropped. Mode move indicates that + // the files will be moved (alternatives are copying or symlinking) + // We set overwrite true, if there are files sharing the same name with the + // outputs, they'll be overwritten. + publishDir "${derivativesDir}/${out.sub}/${out.ses}anat", mode: 'move', overwrite: true + // Output mirrors input as we are simply moving files. output: - tuple val(sid), \ - path(b1res),path(smooth) + tuple val(sid), file(mtsExample), \ + file(segGM), file(segWM), file(segCSF) + // Generate derivatives folder script: """ mkdir -p ${derivativesDir} """ } -// Here we include all the processes from modules/ -include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' - -// Pipeline starts here -workflow { - -fitMp2rageUni(pairMP2RAGE) - -publish_mp2rage = fitMp2rageUni.out.mp2rage_output -// EXECUTE PROCESS (tuple order: sid, pdw, mtw, t1w) -alignMtsatInputs(mtsat_for_alignment) - -// Get aligned images (tuple order: sid, pdw, mtw, pdwdisp, mtwdisp) -mtsat_from_alignment = alignMtsatInputs.out.mtsat_from_alignment - -// All these files will be eventually published, but we need a subsample of them -// to proceed with the workflow, which are first 3 tuple elements (sid, pdw, mtw) -mtsat_from_alignment - .multiMap{it -> - Publish: it - Fit: tuple(it[0],it[1],it[2]) - Mtr: tuple(it[0],it[1],it[2]) - } - .set {Aligned} - -// EXECUTE PROCESS (tuple order: sid, t1w) -extractBrain(T1w.Nii) - -mask_from_bet = extractBrain.out.mask_from_bet - -if (!params.use_bet){ - Channel - .empty() - .set{mask_from_bet} -} - -// Clone -Mask = mask_from_bet - //.multiMap { it -> Split1: Split2: Split3: it } - -// Join channels by tuple index for resampling b1+ map (ref t1w) -T1w.Nii - .join(B1.AngleMap) - .set{b1_for_alignment} - -// Process val(sid), file(t1w), file(b1raw) -resampleB1(b1_for_alignment) - -// Collect output -b1_resampled = resampleB1.out.b1_resampled - -// Create empty channel as b1_resampled output is optional. -if (!params.use_b1cor){ - Channel - .empty() - .set{b1_resampled} -} - +/* >>>>>>> INCLUDE MODULES <<<<<<<<< -// Join channels for smoothing with map -b1_resampled - .join(Mask) - .set {b1_for_smoothing_with_mask} - -// EXECUTE PROCESS (tuple order: sid, b1, mask) -smoothB1WithMask(b1_for_smoothing_with_mask) - -// Collect ouputs -b1_filtered_w_mask = smoothB1WithMask.out.b1_filtered_w_mask - .multiMap{it-> - Publish: it - Nii: tuple(it[0],it[1]) - } - -// EXECUTE PROCESS (tuple order: sid, b1) -smoothB1WithoutMask(b1_resampled) - -// Collect ouputs -b1_filtered_wo_mask = smoothB1WithoutMask.out.b1_filtered_wo_mask - .multiMap{it-> - Publish: it - Nii: tuple(it[0],it[1]) - } - -// Join data channels based on parameter selection -// Fit with B1 -if (params.use_bet){ - -Aligned.Fit - .join(T1w.Nii) - .join(PDw.Json) - .join(MTw.Json) - .join(T1w.Json) - .join(b1_filtered_w_mask.Nii) - .set{fitting_with_b1} - -}else{ - -Aligned.Fit - .join(T1w.Nii) - .join(PDw.Json) - .join(MTw.Json) - .join(T1w.Json) - .join(b1_filtered_wo_mask.Nii) - .set{fitting_with_b1} - -} - -// MTR with mask -Aligned.Mtr - .join(Mask) - .set{fitting_mtr} - -// Fit without B1 map channel -Aligned.Fit - .join(T1w.Nii) - .join(PDw.Json) - .join(MTw.Json) - .join(T1w.Json) - .set{mtsat_fitting_without_b1} - -mtsat_fitting_without_b1 - .join(Mask) - .set{ fitting_without_b1_bet} - -fitting_with_b1 - .join(Mask) - .set{mtsat_with_b1_bet} +The reason we include modules here is that the sid variable is declared at this point. +If we do that at the very beginning, processes won't be able to access that important +variable. +*/ -fitMtsatWithB1Mask(mtsat_with_b1_bet) +// Include T2_Segment_SpinalCord process from spinalcord_segmentation module. +include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' +include { MTS_Align_SpinalCord } from './modules/spinalcord_mtsat' -fitMtsatWithB1(fitting_with_b1) +// >>>>>>>>>>>>>>>>> WORKFLOW DESCRIPTION START <<<<<<<<<<<<<<<<<< +workflow { -fitMtsatWithBet(fitting_with_b1) +// Send files collected by mtsat_for_alignment to the +// relevant process. +MTS_Align_SpinalCord(mtsat_for_alignment) -fitMtsat(mtsat_fitting_without_b1) +// Collect outputs emitted by publish_spinal_mtsat channel. +// CONVENTION: process_name.out.emit_channel_name +mtsat_from_alignment = MTS_Align_SpinalCord.out.publish_spinal_mtsat -// Fit MTR -fitMtratioWithMask(fitting_mtr) +// Same for segmenting T2w +T2_Segment_SpinalCord(T2w.Nii) +masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg -Aligned.Publish - .join(fitMtsatWithB1Mask.out.publish_mtsat) - .join(publish_mp2rage) - .join(fitMtratioWithMask.out.mtratio_output) +// Join channels +mtsat_from_alignment + .join(masks_from_segmentation) .set {publish} -b1_resampled - .join(b1_filtered_w_mask.Nii) - .set{publishfmap} - +// Move files from work directory to where we'd like to find them (derivatives). publishOutputs(publish) -publishOutputsFmap(publishfmap) - } diff --git a/neuromod_anat_dsl2.nf b/neuromod_anat_dsl2.nf index bedc235..078861d 100644 --- a/neuromod_anat_dsl2.nf +++ b/neuromod_anat_dsl2.nf @@ -28,8 +28,8 @@ Users: Please see USAGE for further details nextflow.enable.dsl=2 include { getSubSesEntity; checkSesFolders } from './modules/bids_patterns' -params.bids = false params.help = false +params.bids = false log.info "## # ###### # # ##### #### # # #### ##### " log.info " # # # # # # # # # # ## ## # # # # " @@ -79,7 +79,7 @@ workflow.onComplete { /*Define bindings for --help*/ if(params.help) { - usage = file("$baseDir/USAGE") + usage = file("$baseDir/USAGE-ANAT") cpu_count = Runtime.runtime.availableProcessors() bindings = ["ants_dim":"$params.ants_dim", From 1cae389d2ca2a932b4147c7591973a737e2a65d4 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Thu, 29 Apr 2021 10:19:36 -0400 Subject: [PATCH 05/13] Small typo --- neuromod-process-spinalcord.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index 910c754..ab3493c 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -45,7 +45,7 @@ if(params.help) { // To print USAGE-SCT file usage = file("$baseDir/USAGE-SCT") - // To print paremeter-values + // To print parameter-values cpu_count = Runtime.runtime.availableProcessors() bindings = ["sct_parameter":"$params.sct_parameter", "sct_another_parameter":"$params.sct_another_parameter" From 2227bbefabfa5bf4f0a4230b176c942affcfe5c1 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Thu, 29 Apr 2021 12:02:31 -0400 Subject: [PATCH 06/13] Fixed small typos --- neuromod-process-spinalcord.nf | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index ab3493c..f6b34c3 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -98,8 +98,8 @@ if(params.bids){ // This is how we set the channel's name! .set {niiMTS} // Now, this is what we expect to find under niiMTS: - // - niiMTS.PWD [[sub-01_ses001, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz], - // [sub-01_ses002, sub-01_ses-002_acq-MToff_bp-spinalcord_MTS.nii.gz]...] + // - niiMTS.PDw [[sub-01_ses001, sub-01_ses-001_acq-PDw_bp-spinalcord_MTS.nii.gz], + // [sub-01_ses002, sub-01_ses-002_acq-PDw_bp-spinalcord_MTS.nii.gz]...] // - niiMTS.T1w [[sub-01_ses001, sub-01_ses-001_acq-T1w_bp-spinalcord_MTS.nii.gz], // [sub-01_ses002, sub-01_ses-002_acq-T1w_bp-spinalcord_MTS.nii.gz]...] // Similar for MTw. @@ -107,7 +107,7 @@ if(params.bids){ // This will do the same thing for spinal cord MTS iput pairs to fetch json files under 3 // channels that lives under jsonMTS namespace. Again, each channel contains tuples of [[sid, filename],...]. Channel - .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_bp-cspine_MTS.nii.gz", maxDepth: 3, size: 3, flat: true) + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_acq-{MToff,MTon,T1w}_bp-cspine_MTS.json", maxDepth: 3, size: 3, flat: true) .multiMap {sid, MToff, MTon, T1w -> PDw: tuple(sid, MToff) MTw: tuple(sid, MTon) @@ -139,14 +139,14 @@ Given the following channels: and the following expression: Ch-nii .join(Ch-json) - .set(pairExample) + .set{pairExample} The new pairExample channel will look like: - [sub-01_ses-001, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz] + [sub-01_ses-001, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.nii.gz, sub-01_ses-001_acq-MToff_bp-spinalcord_MTS.json] >>>> IMPORTANT <<<<<< -The order at which you joing channels is highly critical. In this example we joined Ch-json into +The order at which you join channels is highly critical. In this example we joined Ch-json into Ch-nii. Therefore the order will be [sid, nii, json] for indexes [0,1,2]. Let's say we will send this pairExample channel to a process as an input: @@ -243,6 +243,8 @@ process publishOutputs { out = getSubSesEntity("${sid}") // Again, order matters. + // "val" is a Nextflow convention to declare a variable to include in the process + // The input variables are outputs in the process. Eg: "segGM" is listed spinalcordsegmentation.process.output. Again, order matters. input: tuple val(sid), file(mtsExample), \ file(segGM), file(segWM), file(segCSF) From ad708d5d55a603d4ca14eb7dda806fc5f644a803 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Mon, 3 May 2021 13:14:58 -0400 Subject: [PATCH 07/13] Added T2w segmentation process --- modules/spinalcord_segmentation.nf | 8 ++------ neuromod-process-spinalcord.config | 9 +++++---- neuromod-process-spinalcord.nf | 25 ++++++++++++------------- 3 files changed, 19 insertions(+), 23 deletions(-) diff --git a/modules/spinalcord_segmentation.nf b/modules/spinalcord_segmentation.nf index 835645d..d87465b 100644 --- a/modules/spinalcord_segmentation.nf +++ b/modules/spinalcord_segmentation.nf @@ -13,15 +13,11 @@ process T2_Segment_SpinalCord{ output: tuple val(sid), \ - path("${sid}_seg-gm_mask.txt"), \ - path("${sid}_seg-wm_mask.txt"), \ - path("${sid}_seg-csf_mask.txt"), \ + path("${sid}_bp-spine_T2w_seg_nii.gz"), \ emit: publish_spinal_seg script: """ - echo "From $t2w segmented GM\n" >> ${sid}_seg-gm_mask.txt - echo "From $t2w segmented WM" >> ${sid}_seg-wm_mask.txt - echo "From $t2w segmented CSF" >> ${sid}_seg-csf_mask.txt + sct_deepseg_sc -i $t2w -c t2 -qc $params.qcDir """ } \ No newline at end of file diff --git a/neuromod-process-spinalcord.config b/neuromod-process-spinalcord.config index 1783e05..dcb04a4 100644 --- a/neuromod-process-spinalcord.config +++ b/neuromod-process-spinalcord.config @@ -2,10 +2,10 @@ process { - withName: MTS_Align_SpinalCord { - cpus = 2 - memory = 1.GB - maxForks = 4 + withName: T2_Segment_SpinalCord { + cpus = 4 + memory = 2.GB + maxForks = 3 } } @@ -14,6 +14,7 @@ params { sct_parameter= "This is how default params are defined" sct_another_parameter= 99 + qcDir = "$params.bids/derivatives/SCT/qc" } diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index f6b34c3..4dc4218 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -68,7 +68,7 @@ if(params.bids){ log.info "Input: $params.bids" bids = file(params.bids) // DEFINE DERIVATIVES DIRECTORY - derivativesDir = "$params.bids/derivatives/SCT" + derivativesDir = "$params.bids/derivatives/SCT" log.info "Derivatives: $derivativesDir" log.info "Nextflow Work Dir: $workflow.workDir" @@ -246,8 +246,8 @@ process publishOutputs { // "val" is a Nextflow convention to declare a variable to include in the process // The input variables are outputs in the process. Eg: "segGM" is listed spinalcordsegmentation.process.output. Again, order matters. input: - tuple val(sid), file(mtsExample), \ - file(segGM), file(segWM), file(segCSF) + tuple val(sid), \ + file(T2wSeg) // This is where the files will be dropped. Mode move indicates that // the files will be moved (alternatives are copying or symlinking) @@ -257,8 +257,8 @@ process publishOutputs { // Output mirrors input as we are simply moving files. output: - tuple val(sid), file(mtsExample), \ - file(segGM), file(segWM), file(segCSF) + tuple val(sid), \ + file(T2wSeg) // Generate derivatives folder script: @@ -275,7 +275,7 @@ variable. */ // Include T2_Segment_SpinalCord process from spinalcord_segmentation module. -include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' +include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' addParams(qcDir: params.qcDir) include { MTS_Align_SpinalCord } from './modules/spinalcord_mtsat' // >>>>>>>>>>>>>>>>> WORKFLOW DESCRIPTION START <<<<<<<<<<<<<<<<<< @@ -283,23 +283,22 @@ workflow { // Send files collected by mtsat_for_alignment to the // relevant process. -MTS_Align_SpinalCord(mtsat_for_alignment) +// MTS_Align_SpinalCord(mtsat_for_alignment) // Collect outputs emitted by publish_spinal_mtsat channel. // CONVENTION: process_name.out.emit_channel_name -mtsat_from_alignment = MTS_Align_SpinalCord.out.publish_spinal_mtsat +// mtsat_from_alignment = MTS_Align_SpinalCord.out.publish_spinal_mtsat // Same for segmenting T2w T2_Segment_SpinalCord(T2w.Nii) masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg // Join channels -mtsat_from_alignment - .join(masks_from_segmentation) - .set {publish} +// mtsat_from_alignment +// .join(masks_from_segmentation) +// .set {publish} // Move files from work directory to where we'd like to find them (derivatives). -publishOutputs(publish) +publishOutputs(masks_from_segmentation) } - From a6a2ed8abfd30c185c57a7315eceaf79dd0cc9f7 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Mon, 3 May 2021 13:16:53 -0400 Subject: [PATCH 08/13] Specified subject name for output QC --- modules/spinalcord_segmentation.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/spinalcord_segmentation.nf b/modules/spinalcord_segmentation.nf index d87465b..a7611a7 100644 --- a/modules/spinalcord_segmentation.nf +++ b/modules/spinalcord_segmentation.nf @@ -18,6 +18,6 @@ process T2_Segment_SpinalCord{ script: """ - sct_deepseg_sc -i $t2w -c t2 -qc $params.qcDir + sct_deepseg_sc -i $t2w -c t2 -qc $params.qcDir -qc-subject ${sid} """ } \ No newline at end of file From d7059c5ac015c7ab9e03ad4146eca836de97807b Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Mon, 3 May 2021 13:18:41 -0400 Subject: [PATCH 09/13] Fixed typo in output file name --- modules/spinalcord_segmentation.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/spinalcord_segmentation.nf b/modules/spinalcord_segmentation.nf index a7611a7..5d0784c 100644 --- a/modules/spinalcord_segmentation.nf +++ b/modules/spinalcord_segmentation.nf @@ -13,7 +13,7 @@ process T2_Segment_SpinalCord{ output: tuple val(sid), \ - path("${sid}_bp-spine_T2w_seg_nii.gz"), \ + path("${sid}_bp-spine_T2w_seg.nii.gz"), \ emit: publish_spinal_seg script: From bcf3064cd1194e25322f56f784c5a4374c8b4c5f Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Tue, 4 May 2021 23:04:31 -0400 Subject: [PATCH 10/13] Working spinal cord segmentation pipeline --- modules/spinalcord_segmentation.nf | 3 ++- neuromod-process-spinalcord.config | 4 ++-- neuromod-process-spinalcord.nf | 24 +++++++++++++++++++----- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/modules/spinalcord_segmentation.nf b/modules/spinalcord_segmentation.nf index 5d0784c..49f2875 100644 --- a/modules/spinalcord_segmentation.nf +++ b/modules/spinalcord_segmentation.nf @@ -13,11 +13,12 @@ process T2_Segment_SpinalCord{ output: tuple val(sid), \ - path("${sid}_bp-spine_T2w_seg.nii.gz"), \ + path("${sid}_bp-cspine_T2w_seg.nii.gz"), \ emit: publish_spinal_seg script: """ sct_deepseg_sc -i $t2w -c t2 -qc $params.qcDir -qc-subject ${sid} """ + } \ No newline at end of file diff --git a/neuromod-process-spinalcord.config b/neuromod-process-spinalcord.config index dcb04a4..a1408fa 100644 --- a/neuromod-process-spinalcord.config +++ b/neuromod-process-spinalcord.config @@ -4,8 +4,8 @@ process { withName: T2_Segment_SpinalCord { cpus = 4 - memory = 2.GB - maxForks = 3 + memory = 4.GB + maxForks = 2 } } diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index 4dc4218..9158c13 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -248,17 +248,21 @@ process publishOutputs { input: tuple val(sid), \ file(T2wSeg) + // file(T2wSeg), \ + // file(T2wSegLabeled) // This is where the files will be dropped. Mode move indicates that // the files will be moved (alternatives are copying or symlinking) // We set overwrite true, if there are files sharing the same name with the // outputs, they'll be overwritten. - publishDir "${derivativesDir}/${out.sub}/${out.ses}anat", mode: 'move', overwrite: true + publishDir "${derivativesDir}/${out.sub}/${out.ses}anat", mode: 'copy', overwrite: true // Output mirrors input as we are simply moving files. output: tuple val(sid), \ file(T2wSeg) + // file(T2wSeg), \ + // file(T2wSegLabeled) // Generate derivatives folder script: @@ -276,7 +280,7 @@ variable. // Include T2_Segment_SpinalCord process from spinalcord_segmentation module. include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' addParams(qcDir: params.qcDir) -include { MTS_Align_SpinalCord } from './modules/spinalcord_mtsat' +include { T2_Vertebral_Labeling } from './modules/spinalcord_vertebral_labeling' addParams(qcDir: params.qcDir) // >>>>>>>>>>>>>>>>> WORKFLOW DESCRIPTION START <<<<<<<<<<<<<<<<<< workflow { @@ -291,7 +295,18 @@ workflow { // Same for segmenting T2w T2_Segment_SpinalCord(T2w.Nii) -masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg +to_be_published = T2_Segment_SpinalCord.out.publish_spinal_seg + +// masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg +// T2w.Nii +// .join(masks_from_segmentation) +// .set{inputs_for_vertebral_labeling} +// +// T2_Vertebral_Labeling(inputs_for_vertebral_labeling) +// labels_from_labeling = T2_Vertebral_Labeling.out.publish_spinal_seg +// masks_from_segmentation +// .join(labels_from_labeling) +// .set{to_be_published} // Join channels // mtsat_from_alignment @@ -299,6 +314,5 @@ masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg // .set {publish} // Move files from work directory to where we'd like to find them (derivatives). -publishOutputs(masks_from_segmentation) +publishOutputs(to_be_published) } - From 3e58e0db5bd3e7aa6a103356ad070d46e55fe254 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Tue, 4 May 2021 23:26:41 -0400 Subject: [PATCH 11/13] Added vertebral labeling --- neuromod-process-spinalcord.nf | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index 9158c13..6cdb2f5 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -247,9 +247,8 @@ process publishOutputs { // The input variables are outputs in the process. Eg: "segGM" is listed spinalcordsegmentation.process.output. Again, order matters. input: tuple val(sid), \ - file(T2wSeg) - // file(T2wSeg), \ - // file(T2wSegLabeled) + file(T2wSeg), \ + file(T2wSegLabeled) // This is where the files will be dropped. Mode move indicates that // the files will be moved (alternatives are copying or symlinking) @@ -260,9 +259,8 @@ process publishOutputs { // Output mirrors input as we are simply moving files. output: tuple val(sid), \ - file(T2wSeg) - // file(T2wSeg), \ - // file(T2wSegLabeled) + file(T2wSeg), \ + file(T2wSegLabeled) // Generate derivatives folder script: @@ -295,18 +293,17 @@ workflow { // Same for segmenting T2w T2_Segment_SpinalCord(T2w.Nii) -to_be_published = T2_Segment_SpinalCord.out.publish_spinal_seg - -// masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg -// T2w.Nii -// .join(masks_from_segmentation) -// .set{inputs_for_vertebral_labeling} -// -// T2_Vertebral_Labeling(inputs_for_vertebral_labeling) -// labels_from_labeling = T2_Vertebral_Labeling.out.publish_spinal_seg -// masks_from_segmentation -// .join(labels_from_labeling) -// .set{to_be_published} +// to_be_published = T2_Segment_SpinalCord.out.publish_spinal_seg +masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg +T2w.Nii + .join(masks_from_segmentation) + .set{inputs_for_vertebral_labeling} + +T2_Vertebral_Labeling(inputs_for_vertebral_labeling) +labels_from_labeling = T2_Vertebral_Labeling.out.publish_spinal_seg +masks_from_segmentation + .join(labels_from_labeling) + .set{to_be_published} // Join channels // mtsat_from_alignment From 2d5ec7aa8bdde0b2b1f854986a9a9714c8334bcc Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Mon, 7 Jun 2021 18:29:11 -0400 Subject: [PATCH 12/13] Simplified NF file to have only one script for spinal cord WIP --- modules/spinalcord_segmentation.nf | 19 +++++++++++ neuromod-process-spinalcord.config | 2 +- neuromod-process-spinalcord.nf | 54 ++++++++++++++++++++---------- 3 files changed, 56 insertions(+), 19 deletions(-) diff --git a/modules/spinalcord_segmentation.nf b/modules/spinalcord_segmentation.nf index 49f2875..a6fbfc6 100644 --- a/modules/spinalcord_segmentation.nf +++ b/modules/spinalcord_segmentation.nf @@ -5,6 +5,25 @@ nextflow.enable.dsl=2 between the files emitted by the input channels, process and how those files are process to emit outputs */ +// TODO: find a way to avoid this code duplication below +process T1_Segment_SpinalCord{ + tag { sid } + + input: + tuple val(sid), file(t2w) + + output: + tuple val(sid), \ + path("${sid}_bp-cspine_T1w_seg.nii.gz"), \ + emit: publish_spinalcord + + script: + """ + sct_deepseg_sc -i $t2w -c t1 -qc $params.qcDir -qc-subject ${sid} + """ + +} + process T2_Segment_SpinalCord{ tag { sid } diff --git a/neuromod-process-spinalcord.config b/neuromod-process-spinalcord.config index a1408fa..b165370 100644 --- a/neuromod-process-spinalcord.config +++ b/neuromod-process-spinalcord.config @@ -2,7 +2,7 @@ process { - withName: T2_Segment_SpinalCord { + withName: SpinalCord { cpus = 4 memory = 4.GB maxForks = 2 diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index 6cdb2f5..e3caca0 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -115,11 +115,21 @@ if(params.bids){ } .set {jsonMTS} + // Create a channel for spinal cord T1w inputs + Channel + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T1w.nii.gz", maxDepth: 3, size: 1, flat: true) + .multiMap { it -> Nii: it } + .set {T1w} + // Create a channel for spinal cord T2w inputs Channel .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T2w.nii.gz", maxDepth: 3, size: 1, flat: true) .multiMap { it -> Nii: it } .set {T2w} + + Channel + .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T2w.nii.gz", maxDepth: 3, size: 1, flat: true) + .set {SubjectID} } else{ @@ -247,8 +257,7 @@ process publishOutputs { // The input variables are outputs in the process. Eg: "segGM" is listed spinalcordsegmentation.process.output. Again, order matters. input: tuple val(sid), \ - file(T2wSeg), \ - file(T2wSegLabeled) + file(T2w) // This is where the files will be dropped. Mode move indicates that // the files will be moved (alternatives are copying or symlinking) @@ -259,8 +268,7 @@ process publishOutputs { // Output mirrors input as we are simply moving files. output: tuple val(sid), \ - file(T2wSeg), \ - file(T2wSegLabeled) + file(T2w) // Generate derivatives folder script: @@ -277,8 +285,10 @@ variable. */ // Include T2_Segment_SpinalCord process from spinalcord_segmentation module. -include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' addParams(qcDir: params.qcDir) -include { T2_Vertebral_Labeling } from './modules/spinalcord_vertebral_labeling' addParams(qcDir: params.qcDir) +include { SpinalCord } from './modules/spinalcord' addParams(qcDir: params.qcDir) +// include { T2_Segment_SpinalCord } from './modules/spinalcord_segmentation' addParams(qcDir: params.qcDir) +// include { T1_Segment_SpinalCord } from './modules/spinalcord_segmentation' addParams(qcDir: params.qcDir) +// include { T1_Vertebral_Labeling } from './modules/spinalcord_vertebral_labeling' addParams(qcDir: params.qcDir) // >>>>>>>>>>>>>>>>> WORKFLOW DESCRIPTION START <<<<<<<<<<<<<<<<<< workflow { @@ -287,23 +297,31 @@ workflow { // relevant process. // MTS_Align_SpinalCord(mtsat_for_alignment) +SpinalCord(SubjectID) + +to_publish = SpinalCord.out.publish_spinalcord // Collect outputs emitted by publish_spinal_mtsat channel. // CONVENTION: process_name.out.emit_channel_name // mtsat_from_alignment = MTS_Align_SpinalCord.out.publish_spinal_mtsat // Same for segmenting T2w -T2_Segment_SpinalCord(T2w.Nii) +// T2_Segment_SpinalCord(T2w.Nii) // to_be_published = T2_Segment_SpinalCord.out.publish_spinal_seg -masks_from_segmentation = T2_Segment_SpinalCord.out.publish_spinal_seg -T2w.Nii - .join(masks_from_segmentation) - .set{inputs_for_vertebral_labeling} - -T2_Vertebral_Labeling(inputs_for_vertebral_labeling) -labels_from_labeling = T2_Vertebral_Labeling.out.publish_spinal_seg -masks_from_segmentation - .join(labels_from_labeling) - .set{to_be_published} +// segmentation_on_t2 = T2_Segment_SpinalCord.out.publish_spinal_seg +// +// T1_Segment_SpinalCord(T1w.Nii) +// segmentation_on_t1 = T1_Segment_SpinalCord.out.publish_spinal_seg +// T1w.Nii +// .join(segmentation_on_t1) +// .set{inputs_for_vertebral_labeling} +// +// T1_Vertebral_Labeling(inputs_for_vertebral_labeling) +// vertebral_labeling = T1_Vertebral_Labeling.out.publish_spinal_seg +// +// segmentation_on_t2 +// .join(segmentation_on_t1) +// .join(vertebral_labeling) +// .set{to_be_published} // Join channels // mtsat_from_alignment @@ -311,5 +329,5 @@ masks_from_segmentation // .set {publish} // Move files from work directory to where we'd like to find them (derivatives). -publishOutputs(to_be_published) +publishOutputs(to_publish) } From 9a8408e3665bd516b9bb9f06f204c4a5c3505285 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Mon, 7 Jun 2021 18:49:06 -0400 Subject: [PATCH 13/13] Added segmentation of T2 --- neuromod-process-spinalcord.nf | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/neuromod-process-spinalcord.nf b/neuromod-process-spinalcord.nf index e3caca0..95ed8eb 100644 --- a/neuromod-process-spinalcord.nf +++ b/neuromod-process-spinalcord.nf @@ -114,19 +114,21 @@ if(params.bids){ T1w: tuple(sid, T1w) } .set {jsonMTS} - + // Create a channel for spinal cord T1w inputs Channel .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T1w.nii.gz", maxDepth: 3, size: 1, flat: true) .multiMap { it -> Nii: it } .set {T1w} - + // Create a channel for spinal cord T2w inputs Channel .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T2w.nii.gz", maxDepth: 3, size: 1, flat: true) .multiMap { it -> Nii: it } .set {T2w} + + // Fetch subject ID Channel .fromFilePairs("$bids/${entity.dirInputLevel}sub-*_bp-cspine_T2w.nii.gz", maxDepth: 3, size: 1, flat: true) .set {SubjectID}