diff --git a/README.md b/README.md index 1c81096..8ce3904 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,15 @@ =================== Run the RecobundlesX pipeline. +To access the example atlases: +https://zenodo.org/record/4104300#.X4xdG51KhdU If you use this pipeline, please cite: ``` +Rheault, Francois. Analyse et reconstruction de faisceaux de la matière blanche. +page 137-170, (2020), https://savoirs.usherbrooke.ca/handle/11143/17255 + Garyfallidis, Eleftherios, et al. Recognition of white matter bundles using local and global streamline-based registration and clustering. NeuroImage 170 (2018) https://doi.org/10.1016/j.neuroimage.2017.07.015 diff --git a/USAGE b/USAGE index 63fd91f..3c09647 100755 --- a/USAGE +++ b/USAGE @@ -3,15 +3,15 @@ Pipeline for RecobundlesX tractography USAGE -nextflow run main.nf --root ... [OPTIONAL_ARGUMENTS] +nextflow run main.nf --input ... [OPTIONAL_ARGUMENTS] DESCRIPTION ---root=/path/to/[root] Root folder containing multiple subjects +--input=/path/to/[root] Root folder containing multiple subjects [root] ├── S1 - │ ├── *anat.nii.gz + │ ├── *fa.nii.gz │ └── *tracking.trk └── S2 └── * diff --git a/main.nf b/main.nf index a6d18b2..740f095 100644 --- a/main.nf +++ b/main.nf @@ -53,32 +53,36 @@ log.info "Outlier Removal Alpha: $params.outlier_alpha" log.info "" log.info "" -log.info "Input: $params.root" -root = file(params.root) +log.info "Input: $params.input" +root = file(params.input) /* Watch out, files are ordered alphabetically in channel */ - in_data = Channel - .fromFilePairs("$root/**/{*tracking.trk,*anat.nii.gz}", - size: 2, - maxDepth:2, - flat: true) {it.parent.name} - - atlas_anat = Channel.fromPath("$params.atlas_anat") - atlas_config = Channel.fromPath("$params.atlas_config") - atlas_directory = Channel.fromPath("$params.atlas_directory") - - if (params.atlas_centroids) { - atlas_centroids = Channel.fromPath("$params.atlas_centroids/*.trk") - } - else { - atlas_centroids = Channel.empty() - } - -(anat_for_registration, anat_for_reference, tractogram_for_recognition) = in_data - .map{sid, anat, tractogram -> - [tuple(sid, anat), - tuple(sid, anat), - tuple(sid, tractogram)]} - .separate(3) +Channel + .fromPath("$root/**/*tracking*.trk", + maxDepth:1) + .map{[it.parent.name, it]} + .set{tractogram_for_recognition} + +Channel + .fromPath("$root/**/*fa.nii.gz", + maxDepth:1) + .map{[it.parent.name, it]} + .into{anat_for_registration;anat_for_reference_centroids;anat_for_reference_bundles} + +if (!(params.atlas_anat) || !(params.atlas_config) || !(params.atlas_directory)) { + error "You must specify all 3 atlas related input. --atlas_anat, " + + "--atlas_config and --atlas_directory all are mandatory." +} + +atlas_anat = Channel.fromPath("$params.atlas_anat") +atlas_config = Channel.fromPath("$params.atlas_config") +atlas_directory = Channel.fromPath("$params.atlas_directory") + +if (params.atlas_centroids) { + atlas_centroids = Channel.fromPath("$params.atlas_centroids/*_centroid.trk") +} +else { + atlas_centroids = Channel.empty() +} workflow.onComplete { log.info "Pipeline completed at: $workflow.complete" @@ -91,21 +95,22 @@ anat_for_registration .set{anats_for_registration} process Register_Anat { cpus params.register_processes + memory '2 GB' + input: set sid, file(native_anat), file(atlas) from anats_for_registration output: - set sid, "${sid}__output0GenericAffine.txt" into transformation_for_recognition, transformation_for_centroids + set sid, "${sid}__output0GenericAffine.mat" into transformation_for_recognition, transformation_for_centroids file "${sid}__outputWarped.nii.gz" script: """ antsRegistrationSyNQuick.sh -d 3 -f ${native_anat} -m ${atlas} -n ${params.register_processes} -o ${sid}__output -t a - ConvertTransformFile 3 ${sid}__output0GenericAffine.mat ${sid}__output0GenericAffine.txt --hm --ras """ } -anat_for_reference +anat_for_reference_centroids .join(transformation_for_centroids, by: 0) .set{anat_and_transformation} process Transform_Centroids { @@ -116,29 +121,35 @@ process Transform_Centroids { file "${sid}__${centroid.baseName}.trk" script: """ - scil_apply_transform_to_tractogram.py ${centroid} ${anat} ${transfo} ${sid}__${centroid.baseName}.trk --inverse --remove_invalid + scil_apply_transform_to_tractogram.py ${centroid} ${anat} ${transfo} ${sid}__${centroid.baseName}.trk --inverse --cut_invalid """ } + tractogram_for_recognition - .combine(transformation_for_recognition, by: 0) + .join(anat_for_reference_bundles) + .join(transformation_for_recognition) .combine(atlas_config) .combine(atlas_directory) .set{tractogram_and_transformation} process Recognize_Bundles { cpus params.rbx_processes + memory params.rbx_memory_limit + input: - set sid, file(tractogram), file(transfo), file(config), file(directory) from tractogram_and_transformation + set sid, file(tractogram), file(refenrence), file(transfo), file(config), file(directory) from tractogram_and_transformation output: set sid, "*.trk" into bundles_for_cleaning file "results.json" file "logfile.txt" script: """ + scil_remove_invalid_streamlines.py ${tractogram} tractogram_ic.trk --reference ${refenrence} --remove_single_point --remove_overlapping_points mkdir tmp/ - scil_recognize_multi_bundles.py ${tractogram} ${config} ${directory}/*/ ${transfo} --inverse --output tmp/ \ + scil_recognize_multi_bundles.py tractogram_ic.trk ${config} ${directory}/*/ ${transfo} --inverse --out_dir tmp/ \ --log_level DEBUG --multi_parameters $params.multi_parameters --minimal_vote_ratio $params.minimal_vote_ratio \ --tractogram_clustering_thr $params.wb_clustering_thr --seeds $params.seeds --processes $params.rbx_processes + rm tractogram_ic.trk mv tmp/* ./ """ } diff --git a/nextflow.config b/nextflow.config index 46d40aa..fc48cee 100644 --- a/nextflow.config +++ b/nextflow.config @@ -25,11 +25,16 @@ params { //**Number of processes per tasks**// register_processes=4 rbx_processes=4 + params.rbx_memory_limit='8 GB' } singularity.autoMounts = true profiles { + large_dataset { + params.rbx_memory_limit='16 GB' + } + fully_reproducible { params.register_processes=1 params.rbx_processes=1