diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..63d05a5 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,21 @@ +name: CI +on: [push, pull_request] + +jobs: + pipeline-compilation: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: nf-core/setup-nextflow@v1 + - name: Run pipeline + run: | + nextflow run ${GITHUB_WORKSPACE} --help + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking,infant + nextflow run ${GITHUB_WORKSPACE} --help -profile connectomics + nextflow run ${GITHUB_WORKSPACE} --help -profile connectomics,infant + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking,connectomics + nextflow run ${GITHUB_WORKSPACE} --help -profile tracking,connectomics,infant + nextflow run ${GITHUB_WORKSPACE} --help -profile freesurfer + nextflow run ${GITHUB_WORKSPACE} --help -profile freesurfer,connectomics + nextflow run ${GITHUB_WORKSPACE} --help -profile freesurfer,connectomics,tracking \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..351a4e3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +.devcontainer +.vscode +.nextflow* +.DS_Store +*.code-workspace \ No newline at end of file diff --git a/README.md b/README.md index fececc0..2c6fc29 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,83 @@ -Infant-DWI -=============== -Complete pipeline to perform tractography from infant diffusion MRI data. Adapted from the SCIL TractoFlow Pipeline (https://github.com/scilus/tractoflow.git) and Connectoflow Pipeline (https://github.com/scilus/connectoflow.git). +ChildBrainFlow Pipeline +======================= -SINGULARITY ------------ -If you are running this pipeline on Linux, it is recommended you use the SCIL singularity container that contains all the relevant dependencies. -You can get the image by running this command: +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipelines to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +processes have been taken: -`` sudo singularity pull scilus.sif docker://scilus/scilus:latest`` + 1. TractoFlow (https://github.com/scilus/tractoflow.git) [1] + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) -DOCKER ------- -If you are on MacOS or Windows, you can use Docker to run Infant-DWI. -Prebuilt image are available here: +> [!NOTE] +> Please note that some steps have been removed from the original pipelines if they were not relevant for pediatric data. If you need some of these steps, please use the original pipelines. + +Nextflow +-------- +To install nextflow, please see : https://www.nextflow.io/docs/latest/getstarted.html#requirements + +The pipeline export by default a `` parameters.json `` within the output directory to provide a documentation of the parameters used during the execution. For a more detailed report (excluding execution's parameters), the default feature of nextflow `` -with-report `` can be used to export a html report. Simply add this your command line when launching the pipeline: -https://hub.docker.com/r/scilus/scilus +``` +nextflow run main.nf --input -with-report +``` -DEPENDENCIES ------------- -You can also run Infant-DWI without any container, but you need these dependencies installed on your machine to make it work: +Apptainer +--------- +If you are running this pipeline on Linux, it is recommended to run the pipeline using an apptainer image. +The pipeline comes with a recipe file (`` /containers/apptainer_recipe.def ``) containing all the required +dependencies to successfully run every profiles. To build the apptainer image, run this command: -* Scilpy (https://github.com/scilus/scilpy.git) -* Mrtrix3 (https://www.mrtrix.org/) -* FSL (https://fsl.fmrib.ox.ac.uk/fsl/fslwiki) -* ANTs (http://stnava.github.io/ANTs/) +``` +sudo apptainer build +``` -All these dependencies must be in your PATH environment variable in order to launch Infant-Tracking. +Docker +------ +If you are on MacOS or Windows, you can use Docker to run ChildBrainFlow. The pipeline comes with +a Dockerfile containing all the dependencies required to successfully run every profiles of the pipeline. +Simply run this command from inside the `` /containers/ `` folder: + +``` +docker build -t . +``` +> [!WARNING] +> Due to the high number of dependencies (ANTs, FSL, MRtrix3, Scilpy, Freesurfer, FastSurfer, etc.), the resulting docker image can be pretty large (~ 40Gb). -USAGE +Usage ----- See _USAGE_ or run `` nextflow run main.nf --help `` for more details. -REFERENCES +References ---------- -This pipeline is adapted from the SCIL TractoFlow pipeline, see: +If you used this pipeline, please cite : + +[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., + TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline + leveraging Nextflow & Singularity, NeuroImage, + https://doi.org/10.1016/j.neuroimage.2020.116889. + +[2] Kurtzer GM, Sochat V, Bauer MW Singularity: Scientific containers for mobility of compute. PLoS ONE 12(5) + (2017): e0177459. https://doi.org/10.1371/journal.pone.0177459 + +[3] P. Di Tommaso, et al. Nextflow enables reproducible computational workflows. Nature Biotechnology 35, + 316–319 (2017) https://doi.org/10.1038/nbt.3820 + +[4] Garyfallidis, E., Brett, M., Amirbekian, B., Rokem, A., Van Der Walt, S., Descoteaux, M., Nimmo-Smith, I., + 2014. Dipy, a library for the analysis of diffusion mri data. Frontiers in neuroinformatics 8, 8. + https://doi.org/10.3389/fninf.2014.00008 + +[5] Tournier, J. D., Smith, R. E., Raffelt, D. A., Tabbara, R., Dhollander, T., Pietsch, M., … & Connelly, A. + (2019). MRtrix3: A fast, flexible and open software framework for medical image processing and visualisation. + NeuroImage 202, https://doi.org/10.1016/j.neuroimage.2019.116137 + +[6] Avants, B. B., Tustison, N., & Song, G. (2009). Advanced normalization tools (ANTS). Insight j, 2(365), 1-35. -Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. +[7] Jenkinson, M., Beckmann, C.F., Behrens, T.E., Woolrich, M.W., Smith, S.M., 2012. Fsl. Neuroimage 62, + 782–790. https://doi.org/10.1016/j.neuroimage.2011.09.015 +[8] Fischl, B., Salat, D.H., Busa, E., Albert, M., Dieterich, M., Haselgrove, C., van der Kouwe, A., Killiany, + R., Kennedy, D., Klaveness, S., Montillo, A., Makris, N., Rosen, B., Dale, A.M., 2002. Whole brain + segmentation: automated labeling of neuroanatomical structures in the human brain. Neuron 33, 341-355. + https://doi.org/10.1016/s0896-6273(02)00569-x diff --git a/USAGE b/USAGE index 7482974..7158c3d 100644 --- a/USAGE +++ b/USAGE @@ -1,238 +1,47 @@ -Infant-DWI Pipeline -======================== -Pipeline adapted from the SCIL Tractoflow pipeline (https://github.com/scilus/tractoflow.git) [1]. -Made for use on newborn diffusion MRI data. +ChildBrainFlow Pipeline +======================= -This pipeline performs tractography on newborn dMRI data using already segmented WM and brain -mask. Those mask can come from any structural segmentation pipeline (dHCP, Infant-Freesurfer, -Neocivet, etc.). It is RECOMMENDED to provide an already brain-extracted T2w volume, but if -it is not the case, please use the --run_bet_t2w option. +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: -To simply perform tractography, use -profile tracking. The pipeline will only perform the tracking -related processes. + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) -It is possible to also run a connectivity analysis following tracking. Using -profile connectomics, -the pipeline will perform connectivity analysis based on atlas segmentation. The connectomics processes -are imported from the Connectoflow pipeline (https://github.com/scilus/connectoflow.git). If you are using -only the connectomics profile, you need to provide all the necessary files to transform labels, run commit, -run afd_fixel and compute metrics (see structure below). +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** -Both analysis (tracking and connectomics) can be performed one after another automatically (using --profile tracking,connectomics). The pipeline will then reorganised channel to provide the correct inputs. +Steps Selection +--------------- -[1] Theaud, G., Houde, J.-C., Boré, A., Rheault, F., Morency, F., Descoteaux, M., - TractoFlow: A robust, efficient and reproducible diffusion MRI pipeline - leveraging Nextflow & Singularity, NeuroImage, - https://doi.org/10.1016/j.neuroimage.2020.116889. +It is possible to choose which part of the pipeline will be run by using the -profile parameter. Depending +on which -profile is selected, input files will differ. To get a list of the required input files, use this +command: + nextflow run ChildBrainFlow/main.nf --help -profile {desired_profile} -Run Tracking Pipeline +Available Profiles +------------------ -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking +Here is a list of available profiles: -Run Connectomics Pipeline + 1. tracking : If selected, preprocessing of DWI and Anatomical data will be performed followed by + local modelling and tractography (see [1] for details). + 2. connectomics : If selected, labels registration, tractogram segmentation and connectivity will be + performed. + 3. freesurfer : If selected, FreeSurfer Recon-all (or FastSurfer) will be run on input T1s and label files will be + generated (available atlases: freesurfer's atlases, Brainnetome, Brainnetome Child and Glasser). + Only available if T1 volume is supplied as input (therefore, not with -profile infant). + 4. infant : If selected, the pipeline will assume the data is from infant patients (< 2 years old) + and adapt some parameters to perform tractography and connectomics. -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics +Multiple profiles can be selected at the same time, the pipeline will simply organised channels to seemlessly +connect each steps. -Run Both Pipeline +To view the required input files, select all the profiles you want to run (ex: tracking, connectomics and infant) +and run this command : -nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics - -DESCRIPTION - - --input=/path/to/[input_folder] Input folder containing multiple subjects - - [Input] - ├-- S1 - | ├-- *dwi.nii.gz [Required for all profiles] - | ├-- *.bval [Required for all profiles] - | ├-- *.bvec [Required for all profiles] - | ├-- *revb0.nii.gz [Required only for tracking] - | ├-- *t2w_warped.nii.gz [Required for all profiles] - | ├-- *brain_mask.nii.gz [Required only for tracking] - | ├-- *wm_mask.nii.gz [Required only for tracking] - | ├-- *.trk [Required only for connectomics] - | ├-- *labels.nii.gz [Required only for connectomics] - | ├-- *peaks.nii.gz [Required only for connectomics] - | ├-- *fodf.nii.gz [Required only for connectomics] - | ├-- OGenericAffine.mat [Required only for connectomics] - | ├-- synoutput0Warp.nii.gz [Required only for connectomics] - | ├-- maskoutput0Warp.nii.gz [Required only for connectomics] - | └-- metrics - | └-- METRIC_NAME.nii.gz [Optional] - └-- S2 - ├-- *dwi.nii.gz [Required for all profiles] - ├-- *bval [Required for all profiles] - ├-- *bvec [Required for all profiles] - ├-- *revb0.nii.gz [Required only for tracking] - ├-- *t2w.nii.gz [Required for all profiles] - ├-- *brain_mask.nii.gz [Required only for tracking] - ├-- *wm_mask.nii.gz [Required only for tracking] - ├-- *.trk [Required only for connectomics] - ├-- *labels.nii.gz [Required only for connectomics] - ├-- *peaks.nii.gz [Required only for connectomics] - ├-- *fodf.nii.gz [Required only for connectomics] - ├-- OGenericAffine.mat [Required only for connectomics] - ├-- synoutput0Warp.nii.gz [Required only for connectomics] - ├-- maskoutput0Warp.nii.gz [Required only for connectomics] - └-- metrics - └-- METRIC_NAME.nii.gz [Optional] - -OPTIONAL ARGUMENTS (current value) - -[TRACKING OPTIONS] - - --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) - --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same b-value. - ($dwi_shell_tolerance) - - BET DWI OPTIONS - --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) - --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) - - BET T2 OPTIONS - --run_bet_t2w If set, will perform brain extraction on the input T2w volume. ($run_bet_t2w) - Default settings are soft to make sure an already brain extracted volume is not impacted - by the bet command. The goal is to clean volumes that still have portions of non-brain - structures. - --bet_t2w_f Fractional intensity threshold for bet. ($bet_t2w_f) - - EDDY AND TOPUP OPTIONS - --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) - --readout Readout time. ($readout) - --topup_bet_f Fractional intensity threshold for bet before EDDY (generate brain mask). - ($topup_bet_f) - --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) - --use_slice_drop_correction If set, will use the slice drop correction from EDDY. ($use_slice_drop_correction) - - NORMALIZATION OPTIONS - --fa_mask_threshold Threshold to use when creating the fa mask for normalization. ($fa_mask_threshold) - - RESAMPLE OPTIONS - --t2w_resolution Resampling resolution of the T2w image. ($t2w_resolution) - --t2w_interpolation Interpolation method to use after resampling. ($t2w_interpolation) - --mask_interpolation Interpolation method to use on the anatomical masks after resampling. ($mask_interpolation) - --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) - --dwi_interpolation Interpolation method to use after resampling of the dwi volume. ($dwi_interpolation) - --mask_dwi_interpolation Interpolation method to use on the b0 mask after resampling. ($mask_dwi_interpolation) - - DTI OPTIONS - --max_dti_shell_value Maximum b-value threshold to select DTI shells. (b <= $max_dti_shell_value) - This is the default behavior unless --dti_shells is specified. - --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). - They need to be supplied between quotes e.g. (--dti_shells "0 1000"). - If supplied, will overwrite --max_dti_shell_value. - - SH OPTIONS - --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and output the SH coefficients - in a Nifti file. ($sh_fitting) - --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even number). ($sh_fitting_order) - Rules : --sh_fitting_order=8 for 45 directions - --sh_fitting_order=6 for 28 directions - --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. ($sh_fitting_basis) - --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is used. - They need to be supplied between quotes e.g. (--sh_fitting_shells "0 1500"). - NOTE: SH fitting works only on single shell. The b0 shell has to be included. - - FODF OPTIONS - --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell (b >= $min_fodf_shell_value) - This is the default behavior unless --fodf_shells is provided. - --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). - They need to be supplied between quotes e.g. (--fodf_shells "0 1500") - If supplied, will overwrite --min_fodf_shell_value. - --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. ($max_fa_in_ventricle) - --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. ($min_md_in_ventricle) - --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) - --basis fODF basis [descoteaux07, tournier07]. ($basis) - --sh_order Sperical Harmonics order ($sh_order) - Rules : --sh_fitting_order=8 for 45 directions - --sh_fitting_order=6 for 28 directions - - FRF OPTIONS - --mean_frf Mean the FRF of all subjects. ($mean_frf) - USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER - AND HAVE THE SAME ACQUISITION. - --fa Initial FA threshold to compute the frf. ($fa) - --min_fa Minimum FA threshold to compute the frf. ($min_fa) - --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) - --roi_radius Region of interest radius to compute the frf. ($roi_radius) - --set_frf If selected, will manually set the frf. ($set_frf) - --manual_frf FRF set manually (--manual_frf "$manual_frf") - - SEEDING AND TRAKING OPTIONS - --use_brain_mask_as_tracking_mask If selected, will use the complete brain mask (including GM, CSF and WM) as a tracking mask. - Be careful when examining your results, if the hemispheres are not properly separated by the mask, - streamlines could connect both hemisphere in the superior regions. Default is false and WM mask - is used as a tracking mask. ($use_brain_mask_as_tracking_mask) - --fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding and tracking. - ($fa_seeding_mask_thr) - --algo Tracking algorithm [prob, det]. ($algo) - --nb_seeds Number of seeds related to the seeding type param. ($nb_seeds) - --seeding Seeding type [npv, nt]. ($seeding) - --step_size Step size ($step_size) - --theta Maximum angle between 2 steps. ($theta) - --min_len Minimum length for a streamline. ($min_len) - --max_len Maximum length for a streamline. ($max_len) - --compress_value Compression error threshold. ($compress_value) - --tracking_seed List of random seed numbers for the random number generator. ($tracking_seed) - Please write them as a list separated by commas without space e.g. (--tracking_seed 1,2,3) - - PROCESSES OPTIONS - --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) - --processes_eddy Number of processes for EDDY task. ($processes_eddy) - --processes_registration Number of processes for registration task. ($processes_registration) - --processes_fodf Number of processes for fODF task. ($processes_fodf) - -[CONNECTOMICS OPTIONS] - - DECOMPOSE OPTIONS - --no_pruning If set, will not prune on length ($no_pruning) - --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) - --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) - --min_length Pruning minimal segment length ($min_length) - --max_length Pruning maximal segment length ($max_length) - --loop_max_angle Maximal winding angle over which a streamline is considered as looping - ($loop_max_angle) - --outlier_threshold Outlier removal threshold when using hierarchical QB ($outlier_threshold) - - COMMIT OPTIONS - --nbr_dir Number of directions, (half sphere), representing the possible orientations of the - response functions ($nbr_dir) - --para_diff Parallel diffusivity in mm^2/s ($para_diff) - --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) - - PROCESSES OPTIONS - --processes_commit Number of processes for COMMIT task ($processes_commit) - --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) - --processes_connectivity Number of processes for connectivity task ($processes_connectivity) - -[GLOBAL OPTIONS] - - OUTPUT OPTIONS - --output_dir Directory to write the final results. Default is "./Results_Infant_Tracking/". - -AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) - -no_symlink When used, results will be directly copied in the output folder and symlink will not - be used. - -macos When used, the scratch folder will be modified for MacOS users. - -tracking When used, will perform the tracking pipeline to generate the whole-brain - tractogram from raw diffusion images. - -connectomics When used, will perform connectivity analysis between atlas-based segmentation. - -NOTES - -The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the -Singularity container is used. - -The intermediate working directory is, by default, set to './work'. -To change it, use the '-w WORK_DIR' argument. - -The default config file is tractoflow/nextflow.config. -Use '-C config_file.config' to specify a non-default configuration file. -The '-C config_file.config' must be inserted after the nextflow call -like 'nextflow -C config_file.config run ...'. \ No newline at end of file + nextflow run ChildBrainFlow/main.nf --help -profile tracking,connectomics,infant diff --git a/containers/Dockerfile b/containers/Dockerfile new file mode 100644 index 0000000..f579b32 --- /dev/null +++ b/containers/Dockerfile @@ -0,0 +1,78 @@ +FROM scilus/scilus:1.6.0 + +LABEL version="ChildBrainFlow-1.0.0" + +RUN wget -O FS_BN_GL_SF_utils.tar.gz "https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0" && \ + tar -xzvf FS_BN_GL_SF_utils.tar.gz && \ + rm FS_BN_GL_SF_utils.tar.gz + +# Installing dependencies. +RUN apt-get update && \ + apt-get install -y csh tcsh && \ + apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev && \ + echo "/usr/local/lib" >> /etc/ld.so.conf && \ + ldconfig +RUN wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 \ + tar xjf parallel-latest.tar.bz2 \ + cd parallel-* && ./configure && make && make install \ + echo 'will cite' | parallel --citation 1> /dev/null 2> /dev/null & + +# Setup Conda for FastSurfer +ARG CONDA_FILE=Miniconda3-py38_4.11.0-Linux-x86_64.sh +RUN wget --no-check-certificate -qO ~/miniconda.sh https://repo.continuum.io/miniconda/${CONDA_FILE} && \ + chmod +x ~/miniconda.sh && \ + ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh +ENV PATH /opt/conda/bin:$PATH + +# Setup FastSurfer +WORKDIR / +RUN wget -O FastSurfer.tar.gz "https://github.com/Deep-MI/FastSurfer/archive/refs/tags/v2.1.2.tar.gz" && \ + tar -xzvf FastSurfer.tar.gz && \ + mv FastSurfer-2.1.2 FastSurfer && \ + rm FastSurfer.tar.gz && \ + conda env create -f FastSurfer/fastsurfer_env_cpu.yml + +# Install conda-pack: +RUN conda install -c conda-forge conda-pack + +# Use conda-pack to create a standalone env in /venv: +RUN conda-pack -n fastsurfer_cpu -o /tmp/env.tar && \ + mkdir /venv && cd /venv && tar xf /tmp/env.tar && \ + rm /tmp/env.tar + +# Fix paths. +RUN /venv/bin/conda-unpack +ENV PATH /venv/bin:$PATH + +# Installing freesurfer on top of scilus:1.6.0 +WORKDIR / +RUN wget "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-centos7_x86_64-7.3.2.tar.gz" -O fs.tar.gz && \ + tar --no-same-owner -xzvf fs.tar.gz && \ + rm fs.tar.gz +RUN wget -O freesurfer/license.txt "https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0" + +# Setup freesurfer env +ENV OS=Linux +ENV PATH=${PATH}:/FastSurfer:/freesurfer/bin:/freesurfer/fsfast/bin:/freesurfer/tktools:/freesurfer/mni/bin:/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +ENV FREESURFER_HOME=/freesurfer +ENV FREESURFER=/freesurfer +ENV SUBJECTS_DIR=/freesurfer/subjects +ENV LOCAL_DIR=/freesurfer/local +ENV FSFAST_HOME=/freesurfer/fsfast +ENV FMRI_ANALYSIS_DIR=/freesurfer/fsfast +ENV FUNCTIONALS_DIR=/freesurfer/sessions +ENV FS_LICENSE=/freesurfer/license.txt + +# set default fs options +ENV FS_OVERRIDE=0 +ENV FIX_VERTEX_AREA="" +ENV FSF_OUTPUT_FORMAT=nii.gz + +# mni env requirements +ENV MINC_BIN_DIR=/freesurfer/mni/bin +ENV MINC_LIB_DIR=/freesurfer/mni/lib +ENV MNI_DIR=/freesurfer/mni +ENV MNI_DATAPATH=/freesurfer/mni/data +ENV MNI_PERL5LIB=/freesurfer/mni/share/perl5 +ENV PERL5LIB=/freesurfer/mni/share/perl5 \ No newline at end of file diff --git a/containers/apptainer_recipe.def b/containers/apptainer_recipe.def new file mode 100644 index 0000000..2dd2ab7 --- /dev/null +++ b/containers/apptainer_recipe.def @@ -0,0 +1,113 @@ +Bootstrap: docker +From: scilus/scilus:1.6.0 + +%labels + version ChildBrainFlow-1.0.0 + +%post + # Install required tcsh and csh. + apt-get update + apt-get install -y csh tcsh + + # Installing Conda for FastSurfer + export CONDA_FILE=Miniconda3-py38_4.11.0-Linux-x86_64.sh + wget --no-check-certificate -qO ~/miniconda.sh https://repo.continuum.io/miniconda/${CONDA_FILE} + chmod +x ~/miniconda.sh + ~/miniconda.sh -b -p /opt/conda + rm ~/miniconda.sh + export PATH=/opt/conda/bin:$PATH + + # Installing FastSurfer latest stable release. + wget -O FastSurfer.tar.gz "https://github.com/Deep-MI/FastSurfer/archive/refs/tags/v2.1.2.tar.gz" + tar -xzvf FastSurfer.tar.gz -C $APPTAINER_ROOTFS/ + mv $APPTAINER_ROOTFS/FastSurfer-2.1.2 $APPTAINER_ROOTFS/FastSurfer + rm FastSurfer.tar.gz + conda env create -f $APPTAINER_ROOTFS/FastSurfer/fastsurfer_env_cpu.yml + + # Install conda-pack + conda install -c conda-forge conda-pack + + # Use conda-pack to create a standalone env in /venv: + conda-pack -n fastsurfer_cpu -o /tmp/env.tar && \ + mkdir /venv && cd /venv && tar xf /tmp/env.tar && \ + rm /tmp/env.tar + + # Fix paths. + /venv/bin/conda-unpack + export PATH=/venv/bin:$PATH + export PYTHONPATH=/FastSurfer:$PYTHONPATH + + # Downloading checkpoints. + python3.8 $APPTAINER_ROOTFS/FastSurfer/FastSurferCNN/download_checkpoints.py --all + + # Installing FreeSurfer on top of scilus:1.6.0 + cd /root + wget "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.3.2/freesurfer-linux-centos7_x86_64-7.3.2.tar.gz" -O fs.tar.gz + tar --no-same-owner -xzvf fs.tar.gz + mv freesurfer $APPTAINER_ROOTFS/ + rm fs.tar.gz + + # Install lib* + apt-get install -y libglu1-mesa libxt6 libxmu6 libgl1 freeglut3-dev + echo "/usr/local/lib" >> /etc/ld.so.conf + ldconfig + + # Download additional files + wget -O $APPTAINER_ROOTFS/freesurfer/license.txt "https://www.dropbox.com/scl/fi/0s8lp6lydyd0rxawxb4jm/license.txt?rlkey=hz54oc0d4sor69avqphtrjvgn&dl=0" + wget -O FS_BN_GL_SF_utils.tar.gz "https://www.dropbox.com/scl/fi/6s1tc4eanf2sutejw7fkd/FS_BN_GL_SF_utils.tar.gz?rlkey=3gvhvpepv7ldkqef3go10cb5e&dl=0" && \ + tar -xzvf FS_BN_GL_SF_utils.tar.gz -C $APPTAINER_ROOTFS/ && \ + rm FS_BN_GL_SF_utils.tar.gz + + # Setup parallel + wget http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 + tar xjf parallel-latest.tar.bz2 + cd parallel-* && ./configure + make && make install + echo 'will cite' | parallel --citation 1> /dev/null 2> /dev/null & + + # Setup FreeSurfer environment + export OS=Linux + export PATH=${PATH}:$APPTAINER_ROOTFS/freesurfer/bin:$APPTAINER_ROOTFS/freesurfer/fsfast/bin:$APPTAINER_ROOTFS/freesurfer/tktools:$APPTAINER_ROOTFS/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export FREESURFER_HOME=$APPTAINER_ROOTFS/freesurfer + export FREESURFER=$APPTAINER_ROOTFS/freesurfer + export SUBJECTS_DIR=$APPTAINER_ROOTFS/freesurfer/subjects + export LOCAL_DIR=$APPTAINER_ROOTFS/freesurfer/local + export FSFAST_HOME=$APPTAINER_ROOTFS/freesurfer/fsfast + export FMRI_ANALYSIS_DIR=$APPTAINER_ROOTFS/freesurfer/fsfast + export FUNCTIONALS_DIR=$APPTAINER_ROOTFS/freesurfer/sessions + export FS_LICENSE=$APPTAINER_ROOTFS/freesurfer/license.txt + + # Set default FreeSurfer options + export FS_OVERRIDE=0 + export FIX_VERTEX_AREA="" + export FSF_OUTPUT_FORMAT=nii.gz + + # Set MNI environment requirements + export MINC_BIN_DIR=$APPTAINER_ROOTFS/freesurfer/mni/bin + export MINC_LIB_DIR=$APPTAINER_ROOTFS/freesurfer/mni/lib + export MNI_DIR=$APPTAINER_ROOTFS/freesurfer/mni + export MNI_DATAPATH=$APPTAINER_ROOTFS/freesurfer/mni/data + export MNI_PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + export PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + +%environment + export OS=Linux + export PATH=${PATH}:$APPTAINER_ROOTFS/freesurfer/bin:$APPTAINER_ROOTFS/freesurfer/fsfast/bin:$APPTAINER_ROOTFS/freesurfer/tktools:$APPTAINER_ROOTFS/freesurfer/mni/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + export FREESURFER_HOME=$APPTAINER_ROOTFS/freesurfer + export FREESURFER=$APPTAINER_ROOTFS/freesurfer + export SUBJECTS_DIR=$APPTAINER_ROOTFS/freesurfer/subjects + export LOCAL_DIR=$APPTAINER_ROOTFS/freesurfer/local + export FSFAST_HOME=$APPTAINER_ROOTFS/freesurfer/fsfast + export FMRI_ANALYSIS_DIR=$APPTAINER_ROOTFS/freesurfer/fsfast + export FUNCTIONALS_DIR=$APPTAINER_ROOTFS/freesurfer/sessions + export FS_LICENSE=$APPTAINER_ROOTFS/freesurfer/license.txt + export FS_OVERRIDE=0 + export FIX_VERTEX_AREA="" + export FSF_OUTPUT_FORMAT=nii.gz + export MINC_BIN_DIR=$APPTAINER_ROOTFS/freesurfer/mni/bin + export MINC_LIB_DIR=$APPTAINER_ROOTFS/freesurfer/mni/lib + export MNI_DIR=$APPTAINER_ROOTFS/freesurfer/mni + export MNI_DATAPATH=$APPTAINER_ROOTFS/freesurfer/mni/data + export MNI_PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + export PERL5LIB=$APPTAINER_ROOTFS/freesurfer/mni/share/perl5 + export PATH=/venv/bin:$PATH diff --git a/main.nf b/main.nf index af09bf8..2d7bd85 100644 --- a/main.nf +++ b/main.nf @@ -1,114 +1,224 @@ #!/usr/bin/env nextflow +import groovy.json.JsonOutput + nextflow.enable.dsl=2 params.help = false -// Importing modules and processes -include { fetch_id; - get_data_tracking; - get_data_connectomics } from "./modules/io.nf" -include { PREPROCESSING } from "./modules/tracking/workflows/preprocessing.nf" -include { DTI } from "./modules/tracking/workflows/DTI.nf" -include { SH } from "./modules/tracking/workflows/SH.nf" -include { REGISTRATION } from "./modules/tracking/workflows/registration.nf" -include { FODF } from "./modules/tracking/workflows/FODF.nf" -include { TRACKING } from "./modules/tracking/workflows/tracking.nf" -include { CONNECTOMICS } from "./modules/connectomics/workflows/connectomics.nf" +// ** Importing modules and processes ** // +include { fetch_id; + get_data_freesurfer; + get_data_tracking; + get_data_tracking_infant; + get_data_connectomics; + get_data_connectomics_infant; + get_data_template; + display_run_info } from "./modules/io.nf" +include { DWI; + ANAT } from "./modules/tracking/workflows/preprocessing.nf" +include { DTI } from "./modules/tracking/workflows/DTI.nf" +include { SH } from "./modules/tracking/workflows/SH.nf" +include { REGISTRATION } from "./modules/tracking/workflows/registration.nf" +include { FODF } from "./modules/tracking/workflows/FODF.nf" +include { TRACKING } from "./modules/tracking/workflows/tracking.nf" +include { CONNECTOMICS } from "./modules/connectomics/workflows/connectomics.nf" +include { POPULATION_TEMPLATE } from "./modules/template/workflows/pop_template.nf" +include { FREESURFERFLOW } from "./modules/freesurfer/workflows/freesurferflow.nf" +include { PRIORS } from "./modules/priors/workflows/priors.nf" workflow { - if (params.help) display_usage() + if (params.help) { display_usage() } else { display_run_info() - if ( params.run_tracking ) { + // ** Checking compatibility between profiles. ** // + if ( params.infant_config && params.run_freesurfer ) { + error "Profiles infant_config and freesurfer are not compatible since infant_freesurfer is not implemented." + } + + if ( params.template_config ) { + data = get_data_template() + + POPULATION_TEMPLATE(data.anat, + data.dwi, + data.fa, + data.anat_ref, + data.fa_ref) + } + + if ( params.run_freesurfer ) { + data = get_data_freesurfer() + + utils = Channel.fromPath(params.atlas_utils_folder) + FREESURFERFLOW(data.anat, utils) + + } + + if ( params.priors && !params.run_tracking ) { + data = get_data_tracking() - PREPROCESSING(data.dwi, - data.rev, - data.anat, - data.brain_mask, - data.wm_mask) - - DTI(PREPROCESSING.out.dwi_bval_bvec, - PREPROCESSING.out.b0_and_mask) + PRIORS(data.dwi) + + } - if(params.sh_fitting) { - SH(PREPROCESSING.out.dwi_bval_bvec) + if ( params.run_tracking ) { + if ( params.infant_config ) { + data = get_data_tracking_infant() + } else { + data = get_data_tracking() } - fa_channel = DTI.out.fa_and_md - .map{[it[0], it[1]]} + // ** Merging mask and anat if -profile infant. ** // + if ( params.infant_config ) { + anat_channel = data.anat + .combine(data.wm_mask, by: 0) + } + else { + anat_channel = data.anat + } + // ** Anatomical preprocessing ** // + ANAT(anat_channel) + + // ** DWI preprocessing ** // + DWI(data.dwi, + data.rev) + + // ** DTI modelling ** // + DTI(DWI.out.dwi_bval_bvec, + DWI.out.b0_and_mask) - REGISTRATION(PREPROCESSING.out.dwi_bval_bvec, - PREPROCESSING.out.t2w_and_mask, - fa_channel) + // ** SH fitting if set ** // + if ( params.sh_fitting ) { + SH(DWI.out.dwi_bval_bvec) + } + + // ** Registration of anatomical volume on diffusion volumes. ** // + REGISTRATION(DTI.out.fa_and_md, + ANAT.out.anat_and_mask, + DWI.out.b0_and_mask.map{ [it[0], it[1]] }) - b0_mask_channel = PREPROCESSING.out.b0_and_mask - .map{[it[0], it[2]]} + // ** Extracting b0 ** // + b0_mask_channel = DWI.out.b0_and_mask + .map{[it[0], it[2]]} - FODF(PREPROCESSING.out.dwi_bval_bvec, + // ** Modelling FODF ** // + FODF(DWI.out.dwi_bval_bvec, b0_mask_channel, DTI.out.fa_and_md) - - masks_channel = REGISTRATION.out.warped_anat - .map{[it[0], it[2], it[3]]} - TRACKING(masks_channel, + // ** FA channel for tracking maps ** // + fa_channel = DTI.out.fa_and_md + .map{[it[0], it[1]]} + + // ** Tracking ** // + TRACKING(REGISTRATION.out.warped_anat, FODF.out.fodf, fa_channel) } + if ( params.priors && params.run_tracking ) { + + data = get_data_tracking() + + PRIORS(DWI.out.dwi_bval_bvec) + + } + if ( params.run_connectomics && params.run_tracking ) { + // ** Fetch tracking data ** // tracking = TRACKING.out.trk - // ** Labels needs to be provided as an input, since they are not computed at some point in the pipeline ** // - labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) - { fetch_id(it.parent, input) } + // ** Fetching labels from freesurferflow if -profile freesurfer is used, if not, ** // + // ** fetching it from input files. ** // + input = file(params.input) + if ( !params.run_freesurfer ) { + labels = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + } else { + labels = FREESURFERFLOW.out.labels + } - dwi_peaks = PREPROCESSING.out.dwi_bval_bvec - .combine(FODF.out.peaks, by: 0) + // ** Preparing metrics channel ** // + dwi_peaks = DWI.out.dwi_bval_bvec + .combine(FODF.out.peaks, by: 0) fodf = FODF.out.fodf - // ** Default metrics will be used with combined metrics provided in the input folder ** // - provided_metrics = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, flat: true) - { fetch_id(it.parent, input) } def_metrics = DTI.out.fa_and_md .combine(DTI.out.ad_and_rd, by: 0) .combine(FODF.out.afd_and_nufo, by: 0) - metrics = provided_metrics - .combine(def_metrics, by: 0) + .map{ sid, fa, md, ad, rd, afd, nufo -> tuple(sid, [fa, md, ad, rd, afd, nufo])} + .transpose() + + if ( file("$input/**/metrics/*.nii.gz") ) { + // ** Default metrics will be used with combined metrics provided in the input folder ** // + provided_metrics = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, flat: false) + { fetch_id(it.parent.parent, input) } + .transpose() + + def_metrics = def_metrics + .concat(provided_metrics) + } + + // ** Flattening metrics channel ** // + metrics_flat = def_metrics.groupTuple() - t2w = PREPROCESSING.out.t2w_and_mask - .map{ [it[0], it[1]] } + // ** Fetching anat ** // + t2w = REGISTRATION.out.warped_anat + .map{ [it[0], it[1]] } + // ** Fetching transformation files ** // transfos = REGISTRATION.out.transfos + // ** Fetching FA, MD, AD, RD for priors computation ** // + fa_md_ad_rd_channel = DTI.out.fa_and_md + .combine(DTI.out.ad_and_rd, by: 0) + + // ** Launching connectomics workflow ** // CONNECTOMICS(tracking, - labels, - dwi_peaks, - fodf, - metrics, - t2w, - transfos) + labels, + dwi_peaks, + fodf, + metrics_flat, + t2w, + transfos, + fa_md_ad_rd_channel) } if ( params.run_connectomics && !params.run_tracking ) { - data = get_data_connectomics() + if ( params.infant_config ) { + data = get_data_connectomics_infant() + } else { + data = get_data_connectomics() + } + + if ( params.run_freesurfer ) { + labels = FREESURFERFLOW.out.labels + anat = FREESURFERFLOW.out.t1 + } else { + labels = data.labels + anat = data.anat + } + + metrics = data.metrics.transpose().groupTuple() CONNECTOMICS(data.trk, - data.labels, - data.dwi_peaks, - data.fodf, - data.metrics, - data.t2w, - data.transfos) + labels, + data.dwi_peaks, + data.fodf, + metrics, + anat, + data.transfos, + []) } } } if (!params.help) { workflow.onComplete = { + jsonStr = JsonOutput.toJson(params) + file("${params.output_dir}/parameters.json").text = JsonOutput.prettyPrint(jsonStr) log.info "Pipeline completed at : $workflow.complete" log.info "Execution status : ${ workflow.success ? 'COMPLETED' : 'FAILED'}" log.info "Execution duration : $workflow.duration" @@ -116,14 +226,36 @@ if (!params.help) { } def display_usage () { - usage = file("$projectDir/USAGE") + + if (params.run_tracking && !params.infant_config && !params.run_connectomics && !params.run_freesurfer ) { + usage = file("$projectDir/modules/tracking/USAGE") + } else if (params.run_tracking && params.infant_config && !params.run_connectomics && !params.run_freesurfer ) { + usage = file("$projectDir/modules/tracking/USAGE_INFANT") + } else if (params.run_connectomics && !params.infant_config && !params.run_tracking && !params.run_freesurfer ) { + usage = file("$projectDir/modules/connectomics/USAGE") + } else if (params.run_connectomics && params.infant_config && !params.run_tracking && !params.run_freesurfer ) { + usage = file("$projectDir/modules/connectomics/USAGE_INFANT") + } else if ( params.run_tracking && params.run_connectomics && !params.infant_config && !params.run_freesurfer ) { + usage = file("$projectDir/modules/connectomics/USAGE_TRACKING") + } else if ( params.run_tracking && params.run_connectomics && params.infant_config && !params.run_freesurfer ) { + usage = file("$projectDir/modules/connectomics/USAGE_TRACKING_INFANT") + } else if ( params.run_freesurfer && !params.run_tracking && !params.run_connectomics ) { + usage = file("$projectDir/modules/freesurfer/USAGE") + } else if ( params.run_freesurfer && !params.run_tracking && params.run_connectomics ) { + usage = file("$projectDir/modules/freesurfer/USAGE_CONN") + } else if ( params.run_freesurfer && params.run_tracking && params.run_connectomics ) { + usage = file("$projectDir/modules/connectomics/USAGE_ALL") + } else { + usage = file("$projectDir/USAGE") + } cpu_count = Runtime.runtime.availableProcessors() bindings = ["b0_thr":"$params.b0_thr", + "skip_dwi_preprocessing":"$params.skip_dwi_preprocessing", "initial_bet_f":"$params.initial_bet_f", "final_bet_f":"$params.final_bet_f", - "run_bet_t2w":"$params.run_bet_t2w", - "bet_t2w_f":"$params.bet_t2w_f", + "run_bet_anat":"$params.run_bet_anat", + "bet_anat_f":"$params.bet_anat_f", "topup_config":"$params.topup_config", "encoding_direction":"$params.encoding_direction", "readout":"$params.readout", @@ -131,19 +263,30 @@ def display_usage () { "eddy_cmd":"$params.eddy_cmd", "topup_bet_f":"$params.topup_bet_f", "use_slice_drop_correction":"$params.use_slice_drop_correction", + "run_synthbet":"$params.run_synthbet", + "shells":"$params.shells", + "shell_thr":"$params.shell_thr", + "gpu":"$params.gpu", + "border":"$params.border", + "nocsf":"$params.nocsf", + "weights":"$params.weights", "dwi_shell_tolerance":"$params.dwi_shell_tolerance", "fa_mask_threshold":"$params.fa_mask_threshold", - "t2w_resolution":"$params.t2w_resolution", - "t2w_interpolation":"$params.t2w_interpolation", + "anat_resolution":"$params.anat_resolution", + "anat_interpolation":"$params.anat_interpolation", "mask_interpolation":"$params.mask_interpolation", + "template_t1":"$params.template_t1", "dwi_resolution":"$params.dwi_resolution", "dwi_interpolation":"$params.dwi_interpolation", "mask_dwi_interpolation":"$params.mask_dwi_interpolation", + "dti_shells":"$params.dti_shells", "max_dti_shell_value":"$params.max_dti_shell_value", "sh_fitting":"$params.sh_fitting", "sh_fitting_order":"$params.sh_fitting_order", "sh_fitting_basis":"$params.sh_fitting_basis", + "fodf_shells":"$params.fodf_shells", "min_fodf_shell_value":"$params.min_fodf_shell_value", + "fodf_metrics_a_facotr":"$params.fodf_metrics_a_factor", "max_fa_in_ventricle":"$params.max_fa_in_ventricle", "min_md_in_ventricle":"$params.min_md_in_ventricle", "relative_threshold":"$params.relative_threshold", @@ -156,20 +299,47 @@ def display_usage () { "roi_radius":"$params.roi_radius", "set_frf":"$params.set_frf", "manual_frf":"$params.manual_frf", - "fa_seeding_mask_thr":"$params.fa_seeding_mask_thr", - "algo":"$params.algo", - "seeding":"$params.seeding", - "nb_seeds":"$params.nb_seeds", - "tracking_seed":"$params.tracking_seed", - "step_size":"$params.step_size", - "theta":"$params.theta", - "sfthres":"$params.sfthres", - "min_len":"$params.min_len", - "max_len":"$params.max_len", - "use_brain_mask_as_tracking_mask":"$params.use_brain_mask_as_tracking_mask", - "compress_value":"$params.compress_value", + "number_of_tissues":"$params.number_of_tissues", + "run_pft_tracking":"$params.run_pft_tracking", + "pft_compress_streamlines":"$params.pft_compress_streamlines", + "pft_seeding_mask_type":"$params.pft_seeding_mask_type", + "pft_fa_seeding_mask_thr":"$params.pft_fa_seeding_mask_thr", + "pft_algo":"$params.pft_algo", + "pft_nbr_seeds":"$params.pft_nbr_seeds", + "pft_seeding":"$params.pft_seeding", + "pft_step_size":"$params.pft_step_size", + "pft_theta":"$params.pft_theta", + "pft_sfthres":"$params.pft_sfthres", + "pft_sfthres_init":"$params.pft_sfthres_init", + "pft_min_len":"$params.pft_min_len", + "pft_max_len":"$params.pft_max_len", + "pft_particles":"$params.pft_particles", + "pft_back":"$params.pft_back", + "pft_front":"$params.pft_front", + "pft_compress_value":"$params.pft_compress_value", + "pft_random_seed":"$params.pft_random_seed", + "run_local_tracking":"$params.run_local_tracking", + "local_compress_streamlines":"$params.local_compress_streamlines", + "local_fa_seeding_mask_thr":"$params.local_fa_seeding_mask_thr", + "local_seeding_mask_type":"$params.local_seeding_mask_type", + "local_fa_tracking_mask_thr":"$params.local_fa_tracking_mask_thr", + "local_tracking_mask_type":"$params.local_tracking_mask_type", + "local_algo":"$params.local_algo", + "local_seeding":"$params.local_seeding", + "local_nbr_seeds":"$params.local_nbr_seeds", + "local_tracking_seed":"$params.local_tracking_seed", + "local_step_size":"$params.local_step_size", + "local_theta":"$params.local_theta", + "local_sfthres":"$params.local_sfthres", + "local_sfthres_init":"$params.local_sfthres_init", + "local_min_len":"$params.local_min_len", + "local_max_len":"$params.local_max_len", + "local_erosion":"$params.local_erosion", + "local_compress_value":"$params.local_compress_value", "output_dir":"$params.output_dir", "processes_denoise_dwi":"$params.processes_denoise_dwi", + "processes_denoise_t1":"$params.processes_denoise_t1", + "processes_bet_t1":"$params.processes_bet_t1", "processes_eddy":"$params.processes_eddy", "processes_registration":"$params.processes_registration", "processes_fodf":"$params.processes_fodf", @@ -180,151 +350,56 @@ def display_usage () { "max_length":"$params.max_length", "loop_max_angle":"$params.loop_max_angle", "outlier_threshold":"$params.outlier_threshold", + "compute_priors":"$params.compute_priors", + "fa_min_priors":"$params.fa_min_priors", + "fa_max_priors":"$params.fa_max_priors", + "md_min_priors":"$params.md_min_priors", + "roi_radius_priors":"$params.roi_radius", + "run_commit":"$params.run_commit", + "use_commit2":"$params.use_commit2", + "use_both_commit":"$params.use_both_commit", + "commit_on_trk":"$params.commit_on_trk", + "b_thr":"$params.b_thr", + "ball_stick":"$params.ball_stick", "nbr_dir":"$params.nbr_dir", "para_diff":"$params.para_diff", + "perp_diff":"$params.perp_diff", "iso_diff":"$params.iso_diff", "processes_commit":"$params.processes_commit", "processes_afd_fixel":"$params.processes_afd_fixel", "processes_connectivity":"$params.processes_connectivity", + "references":"$params.references", + "recon_all":"$params.recon_all", + "recon_surf":"$params.recon_surf", + "use_freesurfer_atlas":"$params.use_freesurfer_atlas", + "use_brainnetome_atlas":"$params.use_brainnetome_atlas", + "use_brainnetome_child_atlas":"$params.use_brainnetome_child_atlas", + "use_glasser_atlas":"$params.use_glasser_atlas", + "use_schaefer_100_atlas":"$params.use_schaefer_100_atlas", + "use_schaefer_200_atlas":"$params.use_schaefer_200_atlas", + "use_schaefer_400_atlas":"$params.use_schaefer_400_atlas", + "use_lausanne_1_atlas":"$params.use_lausanne_1_atlas", + "use_lausanne_2_atlas":"$params.use_lausanne_2_atlas", + "use_lausanne_3_atlas":"$params.use_lausanne_3_atlas", + "use_lausanne_4_atlas":"$params.use_lausanne_4_atlas", + "use_lausanne_5_atlas":"$params.use_lausanne_5_atlas", + "use_dilated_labels":"$params.use_dilated_labels", + "nb_threads":"$params.nb_threads", + "atlas_utils_folder":"$params.atlas_utils_folder", + "compute_FS_BN_GL_SF":"$params.compute_FS_BN_GL_SF", + "compute_BN_child":"$params.compute_BN_child", + "compute_lausanne_multiscale":"$params.compute_lausanne_multiscale", + "compute_lobes":"$params.compute_lobes", + "run_freesurfer":"$params.run_freesurfer", "run_tracking":"$params.run_tracking", - "run_connectomics":"$params.run_connectomics" + "run_connectomics":"$params.run_connectomics", + "template_config":"$params.template_config", + "processes":"$params.processes", + "cpu_count":"$cpu_count" ] engine = new groovy.text.SimpleTemplateEngine() template = engine.createTemplate(usage.text).make(bindings) print template.toString() -} - -def display_run_info () { - log.info "" - log.info "Infant-DWI pipeline" - log.info "========================" - log.info "Pipeline adapted from the SCIL Tractoflow pipeline " - log.info "(https://github.com/scilus/tractoflow.git) and the " - log.info "Connectoflow Pipeline (https://github.com/scilus/connectoflow.git)." - log.info "Made for use on newborn diffusion MRI data." - log.info "" - log.info "Start time: $workflow.start" - log.info "" - - log.debug "[Command-line]" - log.debug "$workflow.commandLine" - log.debug "" - - log.info "[Git Info]" - log.info "$workflow.repository - $workflow.revision [$workflow.commitId]" - log.info "" - - log.info "[Inputs]" - log.info "Input: $params.input" - log.info "Output Directory: $params.output_dir" - log.info "" - - if ( params.run_tracking ) { - log.info "[Tracking Options]" - log.info "" - log.info "GLOBAL OPTIONS" - log.info "Threshold for b0: $params.b0_thr" - log.info "DWI Shell Tolerance: $params.dwi_shell_tolerance" - log.info "" - log.info "BET DWI OPTIONS" - log.info "Initial fractional value for BET: $params.initial_bet_f" - log.info "Finale fractional value for BET: $params.final_bet_f" - log.info "" - log.info "BET T2W OPTIONS" - log.info "Run BET on T2W image: $params.run_bet_t2w" - log.info "Fractional value for T2W BET: $params.bet_t2w_f" - log.info "" - log.info "EDDY AND TOPUP OPTIONS" - log.info "Configuration for topup: $params.topup_config" - log.info "Encoding direction: $params.encoding_direction" - log.info "Readout: $params.readout" - log.info "Topup prefix: $params.topup_prefix" - log.info "Topup BET fractional value: $params.topup_bet_f" - log.info "Eddy command: $params.eddy_cmd" - log.info "Run slice drop correction: $params.use_slice_drop_correction" - log.info "" - log.info "NORMALIZE OPTIONS" - log.info "FA threshold for masking: $params.fa_mask_threshold" - log.info "" - log.info "RESAMPLE ANAT OPTIONS" - log.info "Resampling resolution for T2W: $params.t2w_resolution" - log.info "Interpolation method for T2W: $params.t2w_interpolation" - log.info "Interpolation method for masks: $params.mask_interpolation" - log.info "" - log.info "RESAMPLE DWI OPTIONS" - log.info "Resampling resolution for DWI: $params.dwi_resolution" - log.info "Interpolation method for DWI: $params.dwi_interpolation" - log.info "Interpolation method for DWI mask: $params.mask_dwi_interpolation" - log.info "" - log.info "EXTRACT DWI SHELLS OPTIONS" - log.info "Maximum DTI shell value: $params.max_dti_shell_value" - log.info "" - log.info "SH FITTING OPTIONS" - log.info "Run SH fitting: $params.sh_fitting" - log.info "SH fitting order: $params.sh_fitting_order" - log.info "SH fitting basis: $params.sh_fitting_basis" - log.info "" - log.info "FODF OPTIONS" - log.info "Minimum fODF shell value: $params.min_fodf_shell_value" - log.info "Maximum FA value in ventricles: $params.max_fa_in_ventricle" - log.info "Minimum MD value in ventricles: $params.min_md_in_ventricle" - log.info "Relative threshold (RT): $params.relative_threshold" - log.info "SH basis: $params.basis" - log.info "SH order: $params.sh_order" - log.info "" - log.info "FRF OPTIONS" - log.info "Run mean FRF: $params.mean_frf" - log.info "FA threshold for single fiber voxel: $params.fa" - log.info "Minimum FA for selecting voxel: $params.min_fa" - log.info "Minimum number of voxels: $params.min_nvox" - log.info "ROI radius: $params.roi_radius" - log.info "Set FRF: $params.set_frf" - log.info "Manual FRF: $params.manual_frf" - log.info "" - log.info "SEEDING AND TRACKING OPTIONS" - log.info "FA threshold for seeding mask: $params.fa_seeding_mask_thr" - log.info "Algorithm for tracking: $params.algo" - log.info "Number of seeds per voxel: $params.nb_seeds" - log.info "Seeding method: $params.seeding" - log.info "Step size: $params.step_size" - log.info "Theta threshold: $params.theta" - log.info "Spherical function relative threshold: $params.sfthres" - log.info "Minimum fiber length: $params.min_len" - log.info "Maximum fiber length: $params.max_len" - log.info "Random tracking seed: $params.tracking_seed" - log.info "Use brain mask for tracking: $params.use_brain_mask_as_tracking_mask" - log.info "Compression value: $params.compress_value" - log.info "" - log.info "PROCESSES PER TASKS" - log.info "Processes for denoising DWI: $params.processes_denoise_dwi" - log.info "Processes for EDDY: $params.processes_eddy" - log.info "Processes for registration: $params.processes_registration" - log.info "Processes for FODF: $params.processes_fodf" - log.info "" - } - - if ( params.run_connectomics ) { - log.info "[Connectomics Options]" - log.info "" - log.info "DECOMPOSE OPTIONS" - log.info "No pruning: $params.no_pruning" - log.info "No remove loops: $params.no_remove_loops" - log.info "No remove outliers: $params.no_remove_outliers" - log.info "Minimal outlier length: $params.min_length" - log.info "Maximal outlier lenght: $params.max_length" - log.info "Maximum looping angle: $params.loop_max_angle" - log.info "" - log.info "COMMIT OPTIONS" - log.info "Number of directions: $params.nbr_dir" - log.info "Parallel diffusivity: $params.para_diff" - log.info "Isotropic diffusivity: $params.iso_diff" - log.info "" - log.info "PROCESSES OPTIONS" - log.info "Number of processes for COMMIT: $params.processes_commit" - log.info "Number of processes for AFD_FIXEL: $params.processes_afd_fixel" - log.info "Number of processes for CONNECTIVITY: $params.processes_connectivity" - log.info "" - } } \ No newline at end of file diff --git a/modules/connectomics/USAGE b/modules/connectomics/USAGE new file mode 100644 index 0000000..1c91f4e --- /dev/null +++ b/modules/connectomics/USAGE @@ -0,0 +1,120 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Connectomics Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | |-- *t1.nii.gz [Registered to diff space.] + | ├-- *.trk + | ├-- *labels.nii.gz [Native t1 space] + | ├-- *peaks.nii.gz + | ├-- *fodf.nii.gz + | ├-- OGenericAffine.mat + | ├-- output1Warp.nii.gz + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + |-- *t1.nii.gz [Registered to diff space.] + ├-- *.trk + ├-- *labels.nii.gz [Native t1 space] + ├-- *peaks.nii.gz + ├-- *fodf.nii.gz + ├-- OGenericAffine.mat + ├-- output1Warp.nii.gz + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + data. ($use_both_commit) + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_ALL b/modules/connectomics/USAGE_ALL new file mode 100644 index 0000000..bdb1ef8 --- /dev/null +++ b/modules/connectomics/USAGE_ALL @@ -0,0 +1,326 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics,freesurfer + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | └-- *t1.nii.gz + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + └-- *t1.nii.gz + + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET T1 OPTIONS + --template_t1 Absolute path to the template T1 directory for antsBrainExtraction. + The folder must contain t1_template.nii.gz and + t1_brain_probability_map.nii.gz. The default path is the human_data + folder in the singularity container ($template_t1). + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + SEGMENT TISSUES OPTIONS + --number_of_tissues Number of tissues classes to segment. ($number_of_tissues) + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_bet_t1 Number of processes for BET T1 task ($processes_bet_t1) + --processes_denoise_t1 Number of processes for T1 denoising task ($processes_denoise_t1) + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[FREESURFERFLOW OPTIONS] + + --recon_all If set, will use traditional freesurfer recon-all command to produce + anatomical surfaces. ($recon_all) + --recon_surf If set, will use CNN based FastSurfer and recon-surf to produce + anatomical surfaces (way faster!). ($recon_surf) + --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. + ($use_freesurfer_atlas) + --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. + This is the default setting. ($use_brainnetome_atlas) + --use_brainnetome_child_atlas If set, will use the brainnetome child atlas if -profile connectomcis + is used. This is the default setting. ($use_brainnetome_child_atlas) + --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. + ($use_glasser_atlas) + --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. + ($use_schaefer_100_atlas) + --use_schaefer_200_atlas If set, will use the Schaefer 200 atlas if -profile connectomics is used. + ($use_schaefer_200_atlas) + --use_schaefer_400_atlas If set, will use the Schaefer 400 atlas if -profile connectomics is used. + ($use_schaefer_400_atlas) + --use_lausanne_1_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_1_atlas) + --use_lausanne_2_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_2_atlas) + --use_lausanne_3_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_3_atlas) + --use_lausanne_4_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_4_atlas) + --use_lausanne_5_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_5_atlas) + --use_dilated_labels If set, will use the dilated version of the atlas selected above. + ($use_dilated_labels) + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases. Default is + the path of folder within the container. ($atlas_utils_folder) + --nb_threads Number of threads used by recon-all and the atlases creation + ($nb_threads) + --compute_BN_child Compute the connectivity-friendly Brainnetome Child atlas. + ($compute_BN_child) + --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) + * FreeSurfer (adapted) + * Brainnetome + * Glasser + * Schaefer (100/200/400) + --compute_lausanne_multiscale Compute the connectivity multiscale atlases from Lausanne + ($compute_lausanne_multiscale) + --compute_lobes Compute the lobes atlas. ($compute_lobes) + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + data. ($use_both_commit) + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_INFANT b/modules/connectomics/USAGE_INFANT new file mode 100644 index 0000000..05908a4 --- /dev/null +++ b/modules/connectomics/USAGE_INFANT @@ -0,0 +1,128 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Connectomics Pipeline Infant Config + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile connectomics,infant + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | |-- *t2w.nii.gz [Registered to diff space.] + | ├-- *.trk + | ├-- *labels.nii.gz [Native t2w space.] + | ├-- *peaks.nii.gz + | ├-- *fodf.nii.gz + | ├-- OGenericAffine.mat + | ├-- output1Warp.nii.gz + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + |-- *t2w.nii.gz [Registered to diff space.] + ├-- *.trk + ├-- *labels.nii.gz [Native t2w space.] + ├-- *peaks.nii.gz + ├-- *fodf.nii.gz + ├-- OGenericAffine.mat + ├-- output1Warp.nii.gz + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_TRACKING b/modules/connectomics/USAGE_TRACKING new file mode 100644 index 0000000..099a338 --- /dev/null +++ b/modules/connectomics/USAGE_TRACKING @@ -0,0 +1,283 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking and Connectomics Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | └-- *t1.nii.gz + | ├-- *labels.nii.gz [Native t1 space] + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t1.nii.gz + ├-- *labels.nii.gz [Native t1 space] + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET T1 OPTIONS + --template_t1 Absolute path to the template T1 directory for antsBrainExtraction. + The folder must contain t1_template.nii.gz and + t1_brain_probability_map.nii.gz. The default path is the human_data + folder in the singularity container ($template_t1). + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + SEGMENT TISSUES OPTIONS + --number_of_tissues Number of tissues classes to segment. ($number_of_tissues) + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_bet_t1 Number of processes for BET T1 task ($processes_bet_t1) + --processes_denoise_t1 Number of processes for T1 denoising task ($processes_denoise_t1) + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --use_both_commit If set, COMMIT2 will be run first followed by COMMIT1 for multi-shell + data. ($use_both_commit) + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/USAGE_TRACKING_INFANT b/modules/connectomics/USAGE_TRACKING_INFANT new file mode 100644 index 0000000..7a96fd1 --- /dev/null +++ b/modules/connectomics/USAGE_TRACKING_INFANT @@ -0,0 +1,280 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking and Connectomics Pipeline Infant Config + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,connectomics,infant + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | ├-- *t2w.nii.gz + | ├-- *wm_mask.nii.gz + | ├-- *labels.nii.gz [Native t2w space.] + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t2w.nii.gz + ├-- *wm_mask.nii.gz + ├-- *labels.nii.gz [Native t2w space.] + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET ANAT OPTIONS + --run_bet_anat If set, will perform brain extraction on the input anat volume. + ($run_bet_anat) + Default settings are soft to make sure an already brain extracted volume + is not impacted + by the bet command. The goal is to clean volumes that still have + portions of non-brain structures. + --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMPUTE_PRIORS OPTIONS + --compute_priors If set, priors will individually computed for each subject before being + fed to COMMIT. ($compute_priors) + --fa_min_priors Minimal FA value to consider a voxel a single fiber population. + ($fa_min_priors) + --fa_max_priors Maximal FA value to consider a voxel as being in a ventricle. + ($fa_max_priors) + --md_min_priors Minimal MD value to consider a voxel as being in a ventricle. + ($md_min_priors) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/connectomics/processes/commit.nf b/modules/connectomics/processes/commit.nf index f507985..9af904b 100644 --- a/modules/connectomics/processes/commit.nf +++ b/modules/connectomics/processes/commit.nf @@ -2,21 +2,150 @@ nextflow.enable.dsl=2 -process COMMIT2 { +process COMMIT { cpus params.processes_commit - memory params.commit_memory_limit - label "COMMIT2" + memory { 31.GB * task.attempt } + time { 8.hour * task.attempt } input: - tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks) + tuple val(sid), path(h5), path(dwi), path(bval), path(bvec), path(peaks), path(para_diff), path(iso_diff), path(perp_diff) output: - tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit + tuple val(sid), path("${sid}__decompose_commit.h5"), emit: h5_commit, optional: true + tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit, optional: true + tuple val(sid), path("${sid}__results_bzs/"), optional: true + tuple val(sid), path("${sid}__results_bzs_1/"), optional: true + tuple val(sid), path("${sid}__results_bzs_2/"), optional: true + when: + params.run_commit + + script: + def para_diff_arg = para_diff ? "--para_diff \$(cat $para_diff)" : "--para_diff $params.para_diff" + def iso_diff_arg = iso_diff ? "--iso_diff \$(cat $iso_diff)" : "--iso_diff $params.iso_diff" + def perp_diff_arg = perp_diff ? "--perp_diff \$(cat $perp_diff)" : "--perp_diff $params.perp_diff" + def ball_stick_arg = params.ball_stick ? "--ball_stick" : "" + + if ( params.use_commit2 && !params.use_both_commit ) { + """ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --ball_stick --commit2 \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ + $para_diff_arg $iso_diff_arg + mv "${sid}__results_bzs/commit_2/decompose_commit.h5" "./${sid}__decompose_commit.h5" + """ + } + else if ( params.use_both_commit ) { + """ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs_1/" --ball_stick --commit2 \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ + $para_diff_arg $iso_diff_arg + scil_run_commit.py ${sid}__results_bzs_1/commit_2/essential_tractogram.trk $dwi $bval $bvec "${sid}__results_bzs_2/"\ + --in_peaks $peaks --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir\ + $para_diff_arg $iso_diff_arg $perp_diff_arg + mv "${sid}__results_bzs_2/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" + """ + } + else { + """ + scil_run_commit.py $h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ + $para_diff_arg $iso_diff_arg $perp_diff_arg + mv "${sid}__results_bzs/commit_1/decompose_commit.h5" "./${sid}__decompose_commit.h5" + """ + } +} + +process COMMIT_ON_TRK { + cpus params.processes_commit + memory { params.commit_memory_limit * task.attempt } + time { 4.hour * task.attempt } + + input: + tuple val(sid), path(trk_h5), path(dwi), path(bval), path(bvec), path(peaks) + output: + tuple val(sid), path("${sid}__essential_tractogram.trk"), emit: trk_commit tuple val(sid), path("${sid}__results_bzs/") + when: + params.run_commit + + script: + ball_stick_arg="" + perp_diff_arg="" + if ( params.ball_stick ) { + ball_stick_arg="--ball_stick" + } + else { + perp_diff_arg="--perp_diff $params.perp_diff" + } + """ + scil_run_commit.py $trk_h5 $dwi $bval $bvec "${sid}__results_bzs/" --in_peaks $peaks \ + --processes $params.processes_commit --b_thr $params.b_thr --nbr_dir $params.nbr_dir $ball_stick_arg \ + --para_diff $params.para_diff $perp_diff_arg --iso_diff $params.iso_diff + mv "${sid}__results_bzs/commit_1/essential_tractogram.trk" "./${sid}__essential_tractogram.trk" + """ +} + +process COMPUTE_PRIORS { + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(fa), path(md), path(ad), path(rd) + output: + tuple val("Priors"), path("${sid}__para_diff.txt"), emit: para_diff + tuple val("Priors"), path("${sid}__iso_diff.txt"), emit: iso_diff + tuple val("Priors"), path("${sid}__perp_diff.txt"), emit: perp_diff, optional: true + tuple val(sid), path("${sid}__mask_1fiber.nii.gz"), emit: mask_1fiber + tuple val(sid), path("${sid}__mask_ventricles.nii.gz"), emit: mask_ventricles + + when: + params.run_commit && params.compute_priors + + script: + if ( params.multishell ) { + """ + scil_compute_NODDI_priors.py $fa $ad $rd $md \ + --out_txt_1fiber_para ${sid}__para_diff.txt \ + --out_txt_ventricles ${sid}__iso_diff.txt \ + --out_txt_1fiber_perp ${sid}__perp_diff.txt \ + --out_mask_1fiber ${sid}__mask_1fiber.nii.gz \ + --out_mask_ventricles ${sid}__mask_ventricles.nii.gz \ + --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors \ + --md_min $params.md_min_priors --roi_radius $params.roi_radius_priors + """ + } + else { + """ + scil_compute_NODDI_priors.py $fa $ad $md \ + --out_txt_1fiber ${sid}__para_diff.txt \ + --out_txt_ventricles ${sid}__iso_diff.txt \ + --out_mask_1fiber ${sid}__mask_1fiber.nii.gz \ + --out_mask_ventricles ${sid}__mask_ventricles.nii.gz \ + --fa_min $params.fa_min_priors --fa_max $params.fa_max_priors \ + --md_min $params.md_min_priors --roi_radius $params.roi_radius_priors + """ + } +} + +process AVERAGE_PRIORS { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(para_diff), path(iso_diff), path(perp_diff) + + output: + path("mean_para_diff.txt"), emit: mean_para_diff + path("mean_iso_diff.txt"), emit: mean_iso_diff + path("mean_perp_diff.txt"), emit: mean_perp_diff + script: """ - scil_run_commit.py $h5 $dwi $bval $bvec ${sid}__results_bzs/ --in_peaks $peaks \ - --processes $params.processes_commit --b_thr $params.b0_thr --nbr_dir $params.nbr_dir \ - --commit2 --ball_stick --para_diff $params.para_diff --iso_diff $params.iso_diff -v - mv ${sid}__results_bzs/commit_2/decompose_commit.h5 ./${sid}__decompose_commit.h5 + cat $para_diff > all_para_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_para_diff.txt > mean_para_diff.txt + cat $iso_diff > all_iso_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_iso_diff.txt > mean_iso_diff.txt + cat $perp_diff > all_perp_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_perp_diff.txt > mean_perp_diff.txt """ -} \ No newline at end of file +} diff --git a/modules/connectomics/processes/compute_metrics.nf b/modules/connectomics/processes/compute_metrics.nf index a4d96c6..c314343 100644 --- a/modules/connectomics/processes/compute_metrics.nf +++ b/modules/connectomics/processes/compute_metrics.nf @@ -4,8 +4,8 @@ nextflow.enable.dsl=2 process COMPUTE_AFD_FIXEL { cpus params.processes_afd_fixel - memory '2 GB' - label "COMPUTE_AFD_FIXEL" + memory { 8.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(h5), path(fodf) @@ -20,8 +20,8 @@ process COMPUTE_AFD_FIXEL { process COMPUTE_CONNECTIVITY { cpus params.processes_connectivity - memory '2 GB' - label "COMPUTE_CONNECTIVITY" + memory { 8.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(h5), path(labels), path(metrics) diff --git a/modules/connectomics/processes/decompose.nf b/modules/connectomics/processes/decompose.nf index a384319..9293ad4 100644 --- a/modules/connectomics/processes/decompose.nf +++ b/modules/connectomics/processes/decompose.nf @@ -4,8 +4,8 @@ nextflow.enable.dsl=2 process DECOMPOSE_CONNECTIVITY { cpus 1 - memory { 7.B * trk.size() } - label "DECOMPOSE_CONNECTIVITY" + memory { 31.GB * task.attempt } + time { 12.hour * task.attempt } input: tuple val(sid), path(trk), path(labels) @@ -36,4 +36,4 @@ process DECOMPOSE_CONNECTIVITY { $no_pruning_arg $no_remove_loops_arg $no_remove_outliers_arg --min_length $params.min_length --max_length $params.max_length \ --loop_max_angle $params.loop_max_angle --outlier_threshold $params.outlier_threshold -v """ -} \ No newline at end of file +} diff --git a/modules/connectomics/processes/transform.nf b/modules/connectomics/processes/transform.nf new file mode 100644 index 0000000..ac61d16 --- /dev/null +++ b/modules/connectomics/processes/transform.nf @@ -0,0 +1,39 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process TRANSFORM_LABELS { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(labels), path(t2), path(mat), path(syn) + output: + tuple val(sid), path("${sid}__labels_warped.nii.gz"), emit: labels_warped + script: + """ + antsApplyTransforms -d 3 -i $labels -r $t2 -o ${sid}__labels_warped.nii.gz \ + -t $syn $mat -n NearestNeighbor + scil_image_math.py convert ${sid}__labels_warped.nii.gz ${sid}__labels_warped.nii.gz \ + --data_type int16 -f + """ +} + +process TRANSFORM_T1 { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(t1), path(dwi), path(bval), path(bvec), path(mat), path(syn) + output: + tuple val(sid), path("${sid}__t1_warped.nii.gz"), emit: t1_warped + script: + """ + scil_extract_b0.py $dwi $bval $bvec b0.nii.gz --mean\ + --b0_thr $params.b0_thr --force_b0_threshold + antsApplyTransforms -d 3 -i $t1 -r b0.nii.gz -o ${sid}__t1_warped.nii.gz \ + -t $syn $mat -n Linear + """ +} \ No newline at end of file diff --git a/modules/connectomics/processes/transform_labels.nf b/modules/connectomics/processes/transform_labels.nf deleted file mode 100644 index 2677e05..0000000 --- a/modules/connectomics/processes/transform_labels.nf +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl=2 - -process TRANSFORM_LABELS { - cpus 1 - memory '2 GB' - label "TRANSFORM_LABELS" - - input: - tuple val(sid), path(labels), path(t2), path(mat), path(syn), path(masksyn) - output: - tuple val(sid), path("${sid}__labels_warped.nii.gz"), emit: labels_warped - script: - """ - antsApplyTransforms -d 3 -i $labels -r $t2 -o ${sid}__labels_warped.nii.gz \ - -t $masksyn $syn $mat -n NearestNeighbor - scil_image_math.py convert ${sid}__labels_warped.nii.gz ${sid}__labels_warped.nii.gz \ - --data_type int16 -f - """ -} \ No newline at end of file diff --git a/modules/connectomics/processes/viz.nf b/modules/connectomics/processes/viz.nf index 29ae3d4..d0f44cb 100644 --- a/modules/connectomics/processes/viz.nf +++ b/modules/connectomics/processes/viz.nf @@ -4,8 +4,8 @@ nextflow.enable.dsl=2 process VISUALIZE_CONNECTIVITY { cpus 1 - label "VIZ" - memory "2 GB" + memory { 2.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(npy) diff --git a/modules/connectomics/workflows/connectomics.nf b/modules/connectomics/workflows/connectomics.nf index f7f61ce..cb9e650 100644 --- a/modules/connectomics/workflows/connectomics.nf +++ b/modules/connectomics/workflows/connectomics.nf @@ -2,9 +2,13 @@ nextflow.enable.dsl=2 -include { TRANSFORM_LABELS } from "../processes/transform_labels.nf" -include { DECOMPOSE_CONNECTIVITY } from "../processes/decompose.nf" -include { COMMIT2 } from "../processes/commit.nf" +include { TRANSFORM_LABELS; + TRANSFORM_T1 } from "../processes/transform.nf" +include { DECOMPOSE_CONNECTIVITY as INITIAL_DECOMPOSE } from "../processes/decompose.nf" +include { DECOMPOSE_CONNECTIVITY as FINAL_DECOMPOSE } from "../processes/decompose.nf" +include { COMMIT; + COMMIT_ON_TRK; + COMPUTE_PRIORS } from "../processes/commit.nf" include { COMPUTE_AFD_FIXEL; COMPUTE_CONNECTIVITY } from "../processes/compute_metrics.nf" include { VISUALIZE_CONNECTIVITY } from "../processes/viz.nf" @@ -16,37 +20,116 @@ workflow CONNECTOMICS { dwi_peaks_channel fodf_channel metrics_channel - t2w_channel + anat_channel transfos_channel + fa_md_ad_rd_channel + main: + // ** Computing priors for COMMIT ** // + COMPUTE_PRIORS(fa_md_ad_rd_channel) - channel_for_transfo = labels_channel - .combine(t2w_channel, by: 0) - .combine(transfos_channel, by: 0) + // ** If -profile freesurfer, transform t1 to diff space. ** // + if ( params.run_freesurfer && !params.run_tracking ) { + t1_for_transfo = anat_channel + .combine(dwi_peaks_channel.map{ [it[0], it[1], it[2], it[3]] }, by: 0) + .combine(transfos_channel, by: 0) + TRANSFORM_T1(t1_for_transfo) + channel_for_transfo = labels_channel + .combine(TRANSFORM_T1.out.t1_warped, by: 0) + .combine(transfos_channel, by: 0) + } else { + channel_for_transfo = labels_channel + .combine(anat_channel, by: 0) + .combine(transfos_channel, by: 0) + } + // ** Transforming labels to diff space ** // TRANSFORM_LABELS(channel_for_transfo) - decompose_channel = tracking_channel - .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + // ** If -profile infant is used, first part will be run. COMMIT1 is the only supported ** // + // ** method as of now, since running commit2 requires a decomposition first, which is not an ** // + // ** easy task on infant data. This will be improved in the future. ** // + if ( params.commit_on_trk ) { + + // ** COMMIT1 processing on trk ** // + commit_channel = tracking_channel + .combine(dwi_peaks_channel, by: 0) + COMMIT_ON_TRK(commit_channel) + + // ** Decomposing tractogram ** // + decompose_channel = COMMIT_ON_TRK.out.trk_commit + .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + INITIAL_DECOMPOSE(decompose_channel) - DECOMPOSE_CONNECTIVITY(decompose_channel) + // ** Setting output channel ** // + afd_fixel_channel = INITIAL_DECOMPOSE.out.decompose + .combine(fodf_channel, by: 0) + } + else { + // ** Decomposing tractogram ** // + decompose_channel = tracking_channel + .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + INITIAL_DECOMPOSE(decompose_channel) - commit_channel = DECOMPOSE_CONNECTIVITY.out.decompose - .combine(dwi_peaks_channel, by: 0) + // ** Running COMMIT1 or COMMIT2 ** // + if ( params.use_both_commit ) { - COMMIT2(commit_channel) + if ( params.compute_priors ) { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(COMPUTE_PRIORS.out.para_diff, by: 0) + .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) + .combine(COMPUTE_PRIORS.out.perp_diff, by: 0) + COMMIT(commit_channel) + } else { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + COMMIT(commit_channel) + } - afd_fixel_channel = COMMIT2.out.h5_commit - .combine(fodf_channel, by: 0) + decompose_channel = COMMIT.out.trk_commit + .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) + FINAL_DECOMPOSE(decompose_channel) + // ** Setting output channel ** // + afd_fixel_channel = FINAL_DECOMPOSE.out.decompose + .combine(fodf_channel, by: 0) + } + else { + + if ( params.compute_priors ) { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(COMPUTE_PRIORS.out.para_diff, by: 0) + .combine(COMPUTE_PRIORS.out.iso_diff, by: 0) + .combine(COMPUTE_PRIORS.out.perp_diff, by: 0) + COMMIT(commit_channel) + } else { + commit_channel = INITIAL_DECOMPOSE.out.decompose + .combine(dwi_peaks_channel, by: 0) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + .combine(Channel.of([[]])) + COMMIT(commit_channel) + } + // ** Setting output channel ** // + afd_fixel_channel = COMMIT.out.h5_commit + .combine(fodf_channel, by: 0) + } + } + + // ** Computing AFD fixel ** // COMPUTE_AFD_FIXEL(afd_fixel_channel) + // ** Computing Connectivity ** // compute_metrics_channel = COMPUTE_AFD_FIXEL.out.decompose_afd .combine(TRANSFORM_LABELS.out.labels_warped, by: 0) .combine(metrics_channel, by: 0) - COMPUTE_CONNECTIVITY(compute_metrics_channel) + // ** Visualizing Connectivity ** // VISUALIZE_CONNECTIVITY(COMPUTE_CONNECTIVITY.out.metrics) - } \ No newline at end of file diff --git a/modules/freesurfer/USAGE b/modules/freesurfer/USAGE new file mode 100644 index 0000000..bff00e6 --- /dev/null +++ b/modules/freesurfer/USAGE @@ -0,0 +1,116 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run FreeSurferFlow Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile freesurfer + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | └-- *t1.nii.gz + └-- S2 + └-- *t1.nii.gz + + --recon_all If set, will use traditional freesurfer recon-all command to produce + anatomical surfaces. ($recon_all) + --recon_surf If set, will use CNN based FastSurfer and recon-surf to produce + anatomical surfaces (way faster!). ($recon_surf) + --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. + ($use_freesurfer_atlas) + --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. + ($use_brainnetome_atlas) + --use_brainnetome_child_atlas If set, will use the brainnetome child atlas if -profile connectomcis + is used. This is the default setting. ($use_brainnetome_child_atlas) + --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. + ($use_glasser_atlas) + --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. + ($use_schaefer_100_atlas) + --use_schaefer_200_atlas If set, will use the Schaefer 200 atlas if -profile connectomics is used. + ($use_schaefer_200_atlas) + --use_schaefer_400_atlas If set, will use the Schaefer 400 atlas if -profile connectomics is used. + ($use_schaefer_400_atlas) + --use_lausanne_1_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_1_atlas) + --use_lausanne_2_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_2_atlas) + --use_lausanne_3_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_3_atlas) + --use_lausanne_4_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_4_atlas) + --use_lausanne_5_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_5_atlas) + --use_dilated_labels If set, will use the dilated version of the atlas selected above. + ($use_dilated_labels) + + +OPTIONAL ARGUMENTS (current value) + +[FREESURFERFLOW OPTIONS] + + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases. Default is + the path of folder within the container. ($atlas_utils_folder) + --nb_threads Number of threads used by recon-all and the atlases creation + ($nb_threads) + --compute_BN_child Compute the connectivity-friendly Brainnetome Child atlas. + ($compute_BN_child) + --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) + * FreeSurfer (adapted) + * Brainnetome + * Glasser + * Schaefer (100/200/400) + --compute_lausanne_multiscale Compute the connectivity multiscale atlases from Lausanne + ($compute_lausanne_multiscale) + --compute_lobes Compute the lobes atlas. ($compute_lobes) + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/freesurfer/USAGE_CONN b/modules/freesurfer/USAGE_CONN new file mode 100644 index 0000000..5ab9bc8 --- /dev/null +++ b/modules/freesurfer/USAGE_CONN @@ -0,0 +1,170 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run FreeSurferFlow Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile freesurfer,connectomics + +DESCRIPTION + + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | |-- *t1.nii.gz [Raw t1 image.] + | ├-- *.trk + | ├-- *peaks.nii.gz + | ├-- *fodf.nii.gz + | ├-- OGenericAffine.mat + | ├-- output1Warp.nii.gz + | └-- metrics + | └-- METRIC_NAME.nii.gz [Optional] + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + |-- *t1.nii.gz [Raw t1 image.] + ├-- *.trk + ├-- *peaks.nii.gz + ├-- *fodf.nii.gz + ├-- OGenericAffine.mat + ├-- output1Warp.nii.gz + └-- metrics + └-- METRIC_NAME.nii.gz [Optional] + + --recon_all If set, will use traditional freesurfer recon-all command to produce + anatomical surfaces. ($recon_all) + --recon_surf If set, will use CNN based FastSurfer and recon-surf to produce + anatomical surfaces (way faster!). ($recon_surf) + --use_freesurfer_atlas If set, will use the freesurfer atlas if -profile connectomics is used. + ($use_freesurfer_atlas) + --use_brainnetome_atlas If set, will use the brainnetome atlas if -profile connectomics is used. + This is the default setting. ($use_brainnetome_atlas) + --use_brainnetome_child_atlas If set, will use the brainnetome child atlas if -profile connectomcis + is used. This is the default setting. ($use_brainnetome_child_atlas) + --use_glasser_atlas If set, will use the Glasser atlas if -profile connectomics is used. + ($use_glasser_atlas) + --use_schaefer_100_atlas If set, will use the Schaefer 100 atlas if -profile connectomics is used. + ($use_schaefer_100_atlas) + --use_schaefer_200_atlas If set, will use the Schaefer 200 atlas if -profile connectomics is used. + ($use_schaefer_200_atlas) + --use_schaefer_400_atlas If set, will use the Schaefer 400 atlas if -profile connectomics is used. + ($use_schaefer_400_atlas) + --use_lausanne_1_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_1_atlas) + --use_lausanne_2_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_2_atlas) + --use_lausanne_3_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_3_atlas) + --use_lausanne_4_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_4_atlas) + --use_lausanne_5_atlas If set, will use the lausanne scale 1 atlas if -profile connectomics is + used. ($use_lausanne_5_atlas) + --use_dilated_labels If set, will use the dilated version of the atlas selected above. + ($use_dilated_labels) + + +OPTIONAL ARGUMENTS (current value) + +[FREESURFERFLOW OPTIONS] + + --atlas_utils_folder Folder needed to convert freesurfer atlas to other atlases. Default is + the path of folder within the container. ($atlas_utils_folder) + --nb_threads Number of threads used by recon-all and the atlases creation + ($nb_threads) + --compute_BN_child Compute the connectivity-friendly Brainnetome Child atlas. + ($compute_BN_child) + --compute_FS_BN_GL_SF Compute the connectivity-friendly atlases : ($compute_FS_BN_GL_SF) + * FreeSurfer (adapted) + * Brainnetome + * Glasser + * Schaefer (100/200/400) + --compute_lausanne_multiscale Compute the connectivity multiscale atlases from Lausanne + ($compute_lausanne_multiscale) + --compute_lobes Compute the lobes atlas. ($compute_lobes) + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + +[CONNECTOMICS OPTIONS] + + DECOMPOSE OPTIONS + --no_pruning If set, will not prune on length ($no_pruning) + --no_remove_loops If set, will not remove streamlines making loops ($no_remove_loops) + --no_remove_outliers If set, will not remove outliers using QB ($no_remove_outliers) + --min_length Pruning minimal segment length ($min_length) + --max_length Pruning maximal segment length ($max_length) + --loop_max_angle Maximal winding angle over which a streamline is considered as looping + ($loop_max_angle) + --outlier_threshold Outlier removal threshold when using hierarchical QB + ($outlier_threshold) + + COMMIT OPTIONS + --run_commit If set, COMMIT will be run on the tractogram. ($run_commit) + --use_commit2 If set, COMMIT2 will be use rather than COMMIT1. ($use_commit2) + COMMIT2 output will replaced the COMMIT1 output. + --b_thr Tolerance value to considier bvalues to be the same shell. + --nbr_dir Number of directions, (half sphere), representing the possible + orientations of the response functions ($nbr_dir) + --ball_stick If set, will use the ball&stick model and disable the zeppelin + compartment for single-shell data. ($ball_stick) + --para_diff Parallel diffusivity in mm^2/s ($para_diff) + --perp_diff Perpendicular diffusivity in mm^2/s ($perp_diff) + --iso_diff Isotropic diffusivity in mm^2/s ($iso_diff) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_commit Number of processes for COMMIT task ($processes_commit) + --processes_afd_fixel Number of processes for AFD_FIXEL task ($processes_afd_fixel) + --processes_connectivity Number of processes for connectivity task ($processes_connectivity) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/freesurfer/processes/atlases.nf b/modules/freesurfer/processes/atlases.nf new file mode 100644 index 0000000..5db6e9b --- /dev/null +++ b/modules/freesurfer/processes/atlases.nf @@ -0,0 +1,152 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process FS_BN_GL_SF { + cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 4.hour * task.attempt } + + input: + tuple val(sid), path(folder), path(utils) + + output: + tuple val(sid), path("*freesurfer_v5.nii.gz"), emit: freesurfer + tuple val(sid), path("*freesurfer_v5_dilate.nii.gz"), emit: freesurfer_dilated + tuple val(sid), path("*brainnetome_v5.nii.gz"), emit: brainnetome + tuple val(sid), path("*brainnetome_v5_dilate.nii.gz"), emit: brainnetome_dilated + tuple val(sid), path("*glasser_v5.nii.gz"), emit: glasser + tuple val(sid), path("*glasser_v5_dilate.nii.gz"), emit: glasser_dilated + tuple val(sid), path("*schaefer_100_v5.nii.gz"), emit: schaefer_100 + tuple val(sid), path("*schaefer_100_v5_dilate.nii.gz"), emit: schaefer_100_dilated + tuple val(sid), path("*schaefer_200_v5.nii.gz"), emit: schaefer_200 + tuple val(sid), path("*schaefer_200_v5_dilate.nii.gz"), emit: schaefer_200_dilated + tuple val(sid), path("*schaefer_400_v5.nii.gz"), emit: schaefer_400 + tuple val(sid), path("*schaefer_400_v5_dilate.nii.gz"), emit: schaefer_400_dilated + path("*[brainnetome,freesurfer,glasser,schaefer]*.txt") + path("*[brainnetome,freesurfer,glasser,schaefer]*.json") + + when: + params.compute_FS_BN_GL_SF + + script: + """ + ln -s $utils/fsaverage \$(dirname ${folder})/ + bash $utils/freesurfer_utils/generate_atlas_FS_BN_GL_SF_v5.sh \$(dirname ${folder}) \ + ${sid} ${params.nb_threads} FS_BN_GL_SF_Atlas/ + cp $sid/FS_BN_GL_SF_Atlas/* ./ + """ +} + +process BN_CHILD { + cpus params.nb_threads + memory { 31.GB * task.attempt } + time { 2.hour * task.attempt } + + input: + tuple val(sid), path(folder), path(utils) + + output: + tuple val(sid), path("*brainnetome_child_v1.nii.gz"), emit: brainnetome_child + tuple val(sid), path("*brainnetome_child_v1_dilate.nii.gz"), emit: brainnetome_child_dilated + path("*[brainnetome_child]*.txt") + path("*[brainnetome_child]*.json") + path("*.stats") + + when: + params.compute_BN_child + + script: + """ + ln -s $utils/fsaverage \$(dirname ${folder})/ + bash $utils/freesurfer_utils/generate_atlas_BN_child.sh \$(dirname ${folder}) \ + ${sid} ${params.nb_threads} Child_Atlas/ + cp $sid/Child_Atlas/* ./ + """ +} + +process LOBES { + cpus params.nb_threads + memory { 24.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(folder), path(utils) + + output: + path("*lobes*.nii.gz"), emit: lobes + path("*lobes*.txt") + path("*lobes*.json") + + when: + params.compute_lobes + + script: + """ + mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz + + mri_convert ${folder}/mri/wmparc.mgz wmparc.nii.gz + scil_reshape_to_reference.py wmparc.nii.gz rawavg.nii.gz wmparc.nii.gz --interpolation nearest -f + scil_image_math.py convert wmparc.nii.gz wmparc.nii.gz --data_type uint16 -f + + mri_convert ${folder}/mri/brainmask.mgz brain_mask.nii.gz + scil_image_math.py lower_threshold brain_mask.nii.gz 0.001 brain_mask.nii.gz --data_type uint8 -f + scil_image_math.py dilation brain_mask.nii.gz 1 brain_mask.nii.gz -f + scil_reshape_to_reference.py brain_mask.nii.gz rawavg.nii.gz brain_mask.nii.gz --interpolation nearest -f + scil_image_math.py convert brain_mask.nii.gz brain_mask.nii.gz --data_type uint8 -f + + scil_combine_labels.py atlas_lobes_v5.nii.gz --volume_ids wmparc.nii.gz 1003 1012 1014 1017 1018 1019 1020 \ + 1024 1027 1028 1032 --volume_ids wmparc.nii.gz 1008 1022 1025 1029 1031 --volume_ids wmparc.nii.gz 1005 \ + 1011 1013 1021 --volume_ids wmparc.nii.gz 1001 1006 1007 1009 1015 1015 1030 1033 --volume_ids wmparc.nii.gz \ + 1002 1010 1023 1026 --volume_ids wmparc.nii.gz 8 --volume_ids wmparc.nii.gz 10 11 12 13 17 18 26 28 \ + --volume_ids wmparc.nii.gz 2003 2012 2014 2017 2018 2019 2020 2024 2027 2028 2032 --volume_ids wmparc.nii.gz \ + 2008 2022 2025 2029 2031 --volume_ids wmparc.nii.gz 2005 2011 2013 2021 --volume_ids wmparc.nii.gz 2001 2006 \ + 2007 2009 2015 2015 2030 2033 --volume_ids wmparc.nii.gz 2002 2010 2023 2026 --volume_ids wmparc.nii.gz 49 50 \ + 51 52 53 54 58 60 --volume_ids wmparc.nii.gz 47 --volume_ids wmparc.nii.gz 16 --merge + scil_dilate_labels.py atlas_lobes_v5.nii.gz atlas_lobes_v5_dilate.nii.gz --distance 2 \ + --labels_to_dilate 1 2 3 4 5 6 8 9 10 11 12 14 15 --mask brain_mask.nii.gz + cp $utils/freesurfer_utils/*lobes_v5* ./ + """ +} + +process LAUSANNE { + cpus 1 + memory { 24.GB * task.attempt } + time { 4.hour * task.attempt } + + input: + tuple val(sid), path(folder), path(utils) + each scale + + output: + tuple val(sid), path("lausanne_2008_scale_1*.nii.gz"), emit: lausanne_1, optional: true + tuple val(sid), path("lausanne_2008_scale_2*.nii.gz"), emit: lausanne_2, optional: true + tuple val(sid), path("lausanne_2008_scale_3*.nii.gz"), emit: lausanne_3, optional: true + tuple val(sid), path("lausanne_2008_scale_4*.nii.gz"), emit: lausanne_4, optional: true + tuple val(sid), path("lausanne_2008_scale_5*.nii.gz"), emit: lausanne_5, optional: true + path("*.txt") + path("*.json") + + when: + params.compute_lausanne_multiscale + + script: + """ + ln -s $utils/fsaverage \$(dirname ${folder})/ + freesurfer_home=\$(dirname \$(dirname \$(which mri_label2vol))) + /usr/bin/python $utils/lausanne_multi_scale_atlas/generate_multiscale_parcellation.py \ + \$(dirname ${folder}) ${sid} \$freesurfer_home --scale ${scale} --dilation_factor 0 --log_level DEBUG + + mri_convert ${folder}/mri/rawavg.mgz rawavg.nii.gz + scil_image_math.py lower_threshold rawavg.nii.gz 0.001 mask.nii.gz --data_type uint8 + scil_reshape_to_reference.py ${folder}/mri/lausanne2008.scale${scale}+aseg.nii.gz mask.nii.gz \ + lausanne_2008_scale_${scale}.nii.gz --interpolation nearest + scil_image_math.py convert lausanne_2008_scale_${scale}.nii.gz lausanne_2008_scale_${scale}.nii.gz \ + --data_type int16 -f + scil_dilate_labels.py lausanne_2008_scale_${scale}.nii.gz lausanne_2008_scale_${scale}_dilate.nii.gz \ + --distance 2 --mask mask.nii.gz + + cp $utils/lausanne_multi_scale_atlas/*.txt ./ + cp $utils/lausanne_multi_scale_atlas/*.json ./ + """ +} diff --git a/modules/freesurfer/processes/freesurfer.nf b/modules/freesurfer/processes/freesurfer.nf new file mode 100644 index 0000000..a37651a --- /dev/null +++ b/modules/freesurfer/processes/freesurfer.nf @@ -0,0 +1,51 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process FREESURFER { + cpus params.nb_threads + memory { 31.GB * task.attempt } + time { 6.hour * task.attempt } + + input: + tuple val(sid), path(anat) + output: + tuple val(sid), path("$sid/"), emit: folders + tuple val(sid), path("${sid}__final_t1.nii.gz"), emit: final_t1 + + script: + """ + export SUBJECTS_DIR=. + recon-all -i $anat -s $sid -all -parallel -openmp $params.nb_threads + mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz + """ +} + +process RECON_SURF { + cpus params.nb_threads + memory { 31.GB * task.attempt } + time { 10.hour * task.attempt } + + + input: + tuple val(sid), path(anat) + output: + tuple val(sid), path("$sid/"), emit: folders + tuple val(sid), path("${sid}__final_t1.nii.gz"), emit: final_t1 + + script: + // ** Adding a registration to .gca atlas to generate the talairach.m3z file (subcortical atlas segmentation ** // + // ** wont work without it). A little time consuming but necessary. For FreeSurfer 7.3.2, RB_all_2020-01-02.gca ** // + // ** is the default atlas. Update when bumping FreeSurfer version. ** // + """ + mkdir output/ + bash /FastSurfer/run_fastsurfer.sh --sd \$(readlink -f ./) --sid $sid \\ + --t1 \$(readlink -f $anat) \ + --fs_license /freesurfer/license.txt \ + --parallel --device cpu --threads $params.nb_threads --allow_root + mri_ca_register -align-after -nobigventricles -mask $sid/mri/brainmask.mgz \ + -T $sid/mri/transforms/talairach.lta -threads $params.nb_threads $sid/mri/norm.mgz \ + \${FREESURFER_HOME}/average/RB_all_2020-01-02.gca $sid/mri/transforms/talairach.m3z + mri_convert $sid/mri/antsdn.brain.mgz ${sid}__final_t1.nii.gz + """ +} diff --git a/modules/freesurfer/workflows/freesurferflow.nf b/modules/freesurfer/workflows/freesurferflow.nf new file mode 100644 index 0000000..b46583c --- /dev/null +++ b/modules/freesurfer/workflows/freesurferflow.nf @@ -0,0 +1,143 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +include { + FREESURFER; + RECON_SURF +} from '../processes/freesurfer.nf' +include { + FS_BN_GL_SF; + BN_CHILD; + LOBES; + LAUSANNE +} from '../processes/atlases.nf' + +workflow FREESURFERFLOW { + take: + anat + utils + + main: + + if ( params.recon_all ) { + // ** Lauching FreeSurfer Recon-all ** // + FREESURFER(anat) + folder_channel = FREESURFER.out.folders + t1 = FREESURFER.out.final_t1 + } else if ( params.recon_surf ) { + // ** Launching FastSurfer ** // + RECON_SURF(anat) + folder_channel = RECON_SURF.out.folders + t1 = RECON_SURF.out.final_t1 + } + + // ** Combining atlases and utils channels ** // + atlases_channel = folder_channel.combine(utils) + + // ** Computing FS_BN_GL_SF atlases ** // + FS_BN_GL_SF(atlases_channel) + + // ** Computing BN_CHILD Atlas ** // + BN_CHILD(atlases_channel) + + // ** Computing lobes atlases ** // + LOBES(atlases_channel) + + // ** Computing lausanne atlas ** // + scales = Channel.from(1,2,3,4,5) + LAUSANNE(atlases_channel, + scales) + + // ** Reorganizing Lausanne multiscale atlas channel ** // + lausanne1 = LAUSANNE.out.lausanne_1.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_1.map{ [it[1]] }.flatMap()) + lausanne2 = LAUSANNE.out.lausanne_2.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_2.map{ [it[1]] }.flatMap()) + lausanne3 = LAUSANNE.out.lausanne_3.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_3.map{ [it[1]] }.flatMap()) + lausanne4 = LAUSANNE.out.lausanne_4.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_4.map{ [it[1]] }.flatMap()) + lausanne5 = LAUSANNE.out.lausanne_5.map{ [it[0]] } + .merge(LAUSANNE.out.lausanne_5.map{ [it[1]] }.flatMap()) + + // ** Work out a way for the user to select which atlas to use. ** // + // ** Could be cleaner than a bunch of if statements in the future. ** // + if ( params.use_freesurfer_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.freesurfer_dilated + } else { + labels = FS_BN_GL_SF.out.freesurfer + } + } else if ( params.use_brainnetome_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.brainnetome_dilated + } else { + labels = FS_BN_GL_SF.out.brainnetome + } + } else if ( params.use_brainnetome_child_atlas ) { + if ( params.use_dilated_labels ) { + labels = BN_CHILD.out.brainnetome_child_dilated + } else { + labels = BN_CHILD.out.brainnetome_child + } + } else if ( params.use_glasser_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.glasser_dilated + } else { + labels = FS_BN_GL_SF.out.glasser + } + } else if ( params.use_schaefer_100_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.schaefer_100_dilated + } else { + labels = FS_BN_GL_SF.out.schaefer_100 + } + } else if ( params.use_schaefer_200_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.schaefer_200_dilated + } else { + labels = FS_BN_GL_SF.out.schaefer_200 + } + } else if ( params.use_schaefer_400_atlas ) { + if ( params.use_dilated_labels ) { + labels = FS_BN_GL_SF.out.schaefer_400_dilated + } else { + labels = FS_BN_GL_SF.out.schaefer_400 + } + } else if ( params.use_lausanne_1_atlas ) { + if ( params.use_dilated_labels ) { + labels = lausanne1.map{ [it[0], it[2]] } + } else { + labels = lausanne1.map{ [it[0], it[1]] } + } + } else if ( params.use_lausanne_2_atlas ) { + if ( params.use_dilated_labels ) { + labels = lausanne2.map{ [it[0], it[2]] } + } else { + labels = lausanne2.map{ [it[0], it[1]] } + } + } else if ( params.use_lausanne_3_atlas ) { + if ( params.use_dilated_labels ) { + labels = lausanne3.map{ [it[0], it[2]] } + } else { + labels = lausanne3.map{ [it[0], it[1]] } + } + } else if ( params.use_lausanne_4_atlas ) { + if ( params.use_dilated_labels ) { + labels = lausanne4.map{ [it[0], it[2]] } + } else { + labels = lausanne4.map{ [it[0], it[1]] } + } + } else if ( params.use_lausanne_5_atlas ) { + if ( params.use_dilated_labels ) { + labels = lausanne5.map{ [it[0], it[2]] } + } else { + labels = lausanne5.map{ [it[0], it[1]] } + } + } + + emit: + labels + t1 +} \ No newline at end of file diff --git a/modules/io.nf b/modules/io.nf index 3618743..f560eec 100644 --- a/modules/io.nf +++ b/modules/io.nf @@ -3,6 +3,7 @@ nextflow.enable.dsl=2 params.input = false +params.references = false def fetch_id ( dir, dir_base ) { return dir_base.relativize(dir) @@ -10,6 +11,30 @@ def fetch_id ( dir, dir_base ) { .join("_") } +// ** Getting data for the -profile freesurfer ** // +workflow get_data_freesurfer { + main: + if (! params.input ) { + log.info "You must provide an input folder containing all images required for FreesurferFlow :" + log.info " --input=/path/to/[input_folder] Input folder containing your subjects." + log.info " [input]" + log.info " ├-- S1" + log.info " | └-- *t1.nii.gz" + log.info " └-- S2" + log.info " └-- *t1.nii.gz" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + + // ** Loading files ** // + anat_channel = Channel.fromPath("$input/**/*t1.nii.gz") + .map{ch1 -> [ch1.parent.name, ch1]} + + emit: + anat = anat_channel +} + // ** Decided to split the data fetching steps for different profiles in different functions ** // // ** for easier code-reading. ** // workflow get_data_tracking { @@ -21,48 +46,86 @@ workflow get_data_tracking { log.info " [Input]" log.info " ├-- S1" log.info " | ├-- *dwi.nii.gz" - log.info " | ├-- *bval" - log.info " | ├-- *bvec" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" log.info " | ├-- *revb0.nii.gz" - log.info " | ├-- *T2w.nii.gz" - log.info " | ├-- *brain_mask.nii.gz" + log.info " | └-- *t1.nii.gz" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *bval" + log.info " ├-- *bvec" + log.info " ├-- *revb0.nii.gz" + log.info " └-- *t1.nii.gz" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + + // ** Loading all files. ** // + dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true) + { fetch_id(it.parent, input) } + rev_channel = Channel.fromFilePairs("$input/**/*revb0.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + anat_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + + // ** Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. ** // + dwi_channel = dwi_channel.map{sid, bvals, bvecs, dwi -> tuple(sid, dwi, bvals, bvecs)} + + emit: + dwi = dwi_channel + rev = rev_channel + anat = anat_channel +} + +// ** Getting data for the -profile tracking,infant ** // +workflow get_data_tracking_infant { + main: + if ( !params.input ) { + log.info "You must provide an input folder containing all images using:" + log.info " --input=/path/to/[input_folder] Input folder containing multiple subjects for tracking" + log.info "" + log.info " [Input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *revb0.nii.gz" + log.info " | ├-- *t2w.nii.gz" log.info " | └-- *wm_mask.nii.gz" log.info " └-- S2" log.info " ├-- *dwi.nii.gz" log.info " ├-- *bval" log.info " ├-- *bvec" log.info " ├-- *revb0.nii.gz" - log.info " ├-- *T2w.nii.gz" - log.info " ├-- *brain_mask.nii.gz" + log.info " ├-- *t2w.nii.gz" log.info " └-- *wm_mask.nii.gz" error "Please resubmit your command with the previous file structure." } input = file(params.input) - // Loading all files. + // ** Loading all files. ** // dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true) { fetch_id(it.parent, input) } rev_channel = Channel.fromFilePairs("$input/**/*revb0.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } anat_channel = Channel.fromFilePairs("$input/**/*t2w.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - brain_mask_channel = Channel.fromFilePairs("$input/**/*brain_mask.nii.gz", size: 1, flat: true) - { fetch_id(it.parent, input) } wm_mask_channel = Channel.fromFilePairs("$input/**/*wm_mask.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. + // ** Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. ** // dwi_channel = dwi_channel.map{sid, bvals, bvecs, dwi -> tuple(sid, dwi, bvals, bvecs)} emit: dwi = dwi_channel rev = rev_channel anat = anat_channel - brain_mask = brain_mask_channel wm_mask = wm_mask_channel } +// ** Fetching data for -profile connectomics ** // workflow get_data_connectomics { main: if ( !params.input ) { @@ -72,30 +135,28 @@ workflow get_data_connectomics { log.info " [Input]" log.info " ├-- S1" log.info " | ├-- *dwi.nii.gz" - log.info " | ├-- *bval" - log.info " | ├-- *bvec" - log.info " | ├-- *t2w.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *t1.nii.gz" log.info " | ├-- *.trk" log.info " | ├-- *labels.nii.gz" log.info " | ├-- *peaks.nii.gz" log.info " | ├-- *fodf.nii.gz" log.info " | ├-- OGenericAffine.mat" - log.info " | ├-- synoutput0Warp.nii.gz" - log.info " | ├-- maskoutput0Warp.nii.gz" + log.info " | ├-- output1Warp.nii.gz" log.info " | └-- metrics" log.info " | └-- METRIC_NAME.nii.gz [Optional]" log.info " └-- S2" log.info " ├-- *dwi.nii.gz" log.info " ├-- *bval" log.info " ├-- *bvec" - log.info " ├-- *t2w.nii.gz" + log.info " ├-- *t1.nii.gz" log.info " ├-- *.trk" log.info " ├-- *labels.nii.gz" log.info " ├-- *peaks.nii.gz" log.info " ├-- *fodf.nii.gz" log.info " ├-- OGenericAffine.mat" - log.info " ├-- synoutput0Warp.nii.gz" - log.info " ├-- maskoutput0Warp.nii.gz" + log.info " ├-- output1Warp.nii.gz" log.info " └-- metrics" log.info " └-- METRIC_NAME.nii.gz [Optional]" error "Please resubmit your command with the previous file structure." @@ -114,20 +175,81 @@ workflow get_data_connectomics { { fetch_id(it.parent, input) } metrics_channel = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, maxDepth: 2) { it.parent.parent.name } - t2w_channel = Channel.fromFilePairs("$input/**/*t2w_warped.nii.gz", size: 1, flat: true) + t1_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) { fetch_id(it.parent, input) } - transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,synoutput0Warp.nii.gz,maskoutput0Warp.nii.gz}", size: 3, flat: true) + transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,output1Warp.nii.gz}", size: 2, flat: true) { fetch_id(it.parent, input) } // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. dwi_peaks_channel = dwi_peaks_channel.map{sid, bvals, bvecs, dwi, peaks -> tuple(sid, dwi, bvals, bvecs, peaks)} - // Setting up transfos channel in this order : sid, affine, syn, masksyn - transfos_channel = transfos_channel.map{sid, affine, masksyn, syn -> tuple(sid, affine, syn, masksyn)} + emit: + trk = tracking_channel + labels = labels_channel + dwi_peaks = dwi_peaks_channel + fodf = fodf_channel + metrics = metrics_channel + anat = t1_channel + transfos = transfos_channel +} + +// ** Fetching data for -profile connectomics,infant ** // +workflow get_data_connectomics_infant { + main: + if ( !params.input ) { + log.info "You must provide an input folder containing all images using:" + log.info " --input=/path/to/[input_folder] Input folder containing multiple subjects" + log.info "" + log.info " [Input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *t2w.nii.gz" + log.info " | ├-- *.trk" + log.info " | ├-- *labels.nii.gz" + log.info " | ├-- *peaks.nii.gz" + log.info " | ├-- *fodf.nii.gz" + log.info " | ├-- OGenericAffine.mat" + log.info " | ├-- output1Warp.nii.gz" + log.info " | └-- metrics" + log.info " | └-- METRIC_NAME.nii.gz [Optional]" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *bval" + log.info " ├-- *bvec" + log.info " ├-- *t2w.nii.gz" + log.info " ├-- *.trk" + log.info " ├-- *labels.nii.gz" + log.info " ├-- *peaks.nii.gz" + log.info " ├-- *fodf.nii.gz" + log.info " ├-- OGenericAffine.mat" + log.info " ├-- output1Warp.nii.gz" + log.info " └-- metrics" + log.info " └-- METRIC_NAME.nii.gz [Optional]" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + + // Loading all files. + tracking_channel = Channel.fromFilePairs("$input/**/*.trk", size: 1, flat: true) + { fetch_id(it.parent, input) } + labels_channel = Channel.fromFilePairs("$input/**/*labels.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + dwi_peaks_channel = Channel.fromFilePairs("$input/**/{*dwi.nii.gz,*.bval,*.bvec,*peaks.nii.gz}", size: 4, flat: true) + { fetch_id(it.parent, input) } + fodf_channel = Channel.fromFilePairs("$input/**/*fodf.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + metrics_channel = Channel.fromFilePairs("$input/**/metrics/*.nii.gz", size: -1, maxDepth: 2) + { it.parent.parent.name } + t2w_channel = Channel.fromFilePairs("$input/**/*t2w.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + transfos_channel = Channel.fromFilePairs("$input/**/{0GenericAffine.mat,output1Warp.nii.gz}", size: 2, flat: true) + { fetch_id(it.parent, input) } - // Flattening metrics channel. - metrics_channel = metrics_channel.transpose().groupTuple() - .flatMap{ sid, metrics -> metrics.collect{ [sid, it] } } + // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. + dwi_peaks_channel = dwi_peaks_channel.map{sid, bvals, bvecs, dwi, peaks -> tuple(sid, dwi, bvals, bvecs, peaks)} emit: trk = tracking_channel @@ -135,6 +257,253 @@ workflow get_data_connectomics { dwi_peaks = dwi_peaks_channel fodf = fodf_channel metrics = metrics_channel - t2w = t2w_channel + anat = t2w_channel transfos = transfos_channel +} + +workflow get_data_template { + main: + if ( !params.input ) { + log.info "You must provide an input folder containing all images using:" + log.info " --input=/path/to/[input_folder] Input folder containing multiple subjects for tracking" + log.info "" + log.info " [Input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *fa.nii.gz" + log.info " | ├-- *t2w.nii.gz" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *dwi.bvec" + log.info " ├-- *fa.nii.gz" + log.info " └-- *t2w.nii.gz" + log.info " [References]" + log.info " ├-- *fa_ref.nii.gz" + log.info " └-- *t2w_ref.nii.gz" + error "Please resubmit your command with the previous file structure." + } + + input = file(params.input) + references = file(params.references) + + // Loading all files. + dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bvec}", size: 2, flat: true) + { fetch_id(it.parent, input) } + fa_channel = Channel.fromFilePairs("$input/**/*fa.nii.gz", size:1, flat: true) + { fetch_id(it.parent, input) } + anat_channel = Channel.fromFilePairs("$input/**/*t2w.nii.gz", size: 1, flat: true) + { fetch_id(it.parent, input) } + anat_ref = Channel.fromPath("$references/*t2w_ref.nii.gz") + fa_ref = Channel.fromPath("$references/*fa_ref.nii.gz") + + // Setting up dwi channel in this order : sid, dwi, bval, bvec for lisibility. + dwi_channel = dwi_channel.map{sid, bvecs, dwi -> tuple(sid, dwi, bvecs)} + + emit: + dwi = dwi_channel + anat = anat_channel + fa = fa_channel + anat_ref = anat_ref + fa_ref = fa_ref +} + +def display_run_info () { + log.info "" + log.info "ChildBrainFlow pipeline" + log.info "========================" + log.info "ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. " + log.info "It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs " + log.info "between flows with some parameters tuned for pediatric brain scans. " + log.info "" + log.info "Start time: $workflow.start" + log.info "" + + log.debug "[Command-line]" + log.debug "$workflow.commandLine" + log.debug "" + + log.info "[Git Info]" + log.info "$workflow.repository - $workflow.revision [$workflow.commitId]" + log.info "" + + log.info "[Inputs]" + log.info "Input: $params.input" + log.info "Output Directory: $params.output_dir" + log.info "" + + if ( params.run_tracking ) { + log.info "[Tracking Options]" + log.info "" + log.info "GLOBAL OPTIONS" + log.info "Threshold for b0: $params.b0_thr" + log.info "DWI Shell Tolerance: $params.dwi_shell_tolerance" + log.info "Skip DWI preprocessing: $params.skip_dwi_preprocessing" + log.info "" + log.info "BET DWI OPTIONS" + log.info "Initial fractional value for BET: $params.initial_bet_f" + log.info "Finale fractional value for BET: $params.final_bet_f" + log.info "" + if ( params.infant_config ) { + log.info "BET T2W OPTIONS" + log.info "Run BET on T2W image: $params.run_bet_anat" + log.info "Fractional value for T2W BET: $params.bet_anat_f" + } + else { + log.info "BET T1W OPTIONS" + log.info "T1 Tempalte: $params.template_t1" + } + log.info "" + log.info "EDDY AND TOPUP OPTIONS" + log.info "Configuration for topup: $params.topup_config" + log.info "Encoding direction: $params.encoding_direction" + log.info "Readout: $params.readout" + log.info "Topup prefix: $params.topup_prefix" + log.info "Topup BET fractional value: $params.topup_bet_f" + log.info "Eddy command: $params.eddy_cmd" + log.info "Run slice drop correction: $params.use_slice_drop_correction" + log.info "" + log.info "NORMALIZE OPTIONS" + log.info "FA threshold for masking: $params.fa_mask_threshold" + log.info "" + log.info "RESAMPLE ANAT OPTIONS" + log.info "Resampling resolution for Anatomical file: $params.anat_resolution" + log.info "Interpolation method for Anatomical file: $params.anat_interpolation" + log.info "Interpolation method for masks: $params.mask_interpolation" + log.info "" + log.info "RESAMPLE DWI OPTIONS" + log.info "Resampling resolution for DWI: $params.dwi_resolution" + log.info "Interpolation method for DWI: $params.dwi_interpolation" + log.info "Interpolation method for DWI mask: $params.mask_dwi_interpolation" + log.info "" + log.info "EXTRACT DWI SHELLS OPTIONS" + if ( params.dti_shells ) { + log.info "DTI Shells: $params.dti_shells" + } else { + log.info "Maximum DTI shell value: $params.max_dti_shell_value" + } + log.info "" + log.info "SH FITTING OPTIONS" + log.info "Run SH fitting: $params.sh_fitting" + log.info "SH fitting order: $params.sh_fitting_order" + log.info "SH fitting basis: $params.sh_fitting_basis" + log.info "" + log.info "FODF OPTIONS" + if ( params.fodf_shells ) { + log.info "FODF Shells: $params.fodf_shells" + } else { + log.info "Minimum fODF shell value: $params.min_fodf_shell_value" + } + log.info "FODF Metrics A factor: $params.fodf_metrics_a_factor" + log.info "Maximum FA value in ventricles: $params.max_fa_in_ventricle" + log.info "Minimum MD value in ventricles: $params.min_md_in_ventricle" + log.info "Relative threshold (RT): $params.relative_threshold" + log.info "SH basis: $params.basis" + log.info "SH order: $params.sh_order" + log.info "" + log.info "FRF OPTIONS" + log.info "Run mean FRF: $params.mean_frf" + log.info "FA threshold for single fiber voxel: $params.fa" + log.info "Minimum FA for selecting voxel: $params.min_fa" + log.info "Minimum number of voxels: $params.min_nvox" + log.info "ROI radius: $params.roi_radius" + log.info "Set FRF: $params.set_frf" + log.info "Manual FRF: $params.manual_frf" + log.info "" + log.info "SEGMENT TISSUES OPTIONS" + log.info "Number of tissues: $params.number_of_tissues" + log.info "" + log.info "SEEDING AND TRACKING OPTIONS" + if ( params.run_pft_tracking ) { + log.info "Algorithm for tracking: $params.pft_algo" + log.info "Number of seeds per voxel: $params.pft_nbr_seeds" + log.info "Seeding method: $params.pft_seeding" + log.info "Step size: $params.pft_step_size" + log.info "Theta threshold: $params.pft_theta" + log.info "Minimum fiber length: $params.pft_min_len" + log.info "Maximum fiber length: $params.pft_max_len" + log.info "Compression: $params.pft_compress_streamlines" + } + else { + log.info "Algorithm for tracking: $params.local_algo" + log.info "Number of seeds per voxel: $params.local_nbr_seeds" + log.info "Seeding method: $params.local_seeding" + log.info "Step size: $params.local_step_size" + log.info "Theta threshold: $params.local_theta" + log.info "Minimum fiber length: $params.local_min_len" + log.info "Maximum fiber length: $params.local_max_len" + log.info "Compression: $params.local_compress_streamlines" + } + log.info "" + log.info "PROCESSES PER TASKS" + if ( !params.infant_config ) { + log.info "Processes for denoising T1: $params.processes_denoise_t1" + log.info "Processes for BET T1: $params.processes_bet_t1" + } + log.info "Processes for denoising DWI: $params.processes_denoise_dwi" + log.info "Processes for EDDY: $params.processes_eddy" + log.info "Processes for registration: $params.processes_registration" + log.info "Processes for FODF: $params.processes_fodf" + log.info "" + } + + if ( params.run_freesurfer ) { + log.info "[Freesurfer Options]" + log.info "" + log.info "Use Recon-all: $params.recon_all" + log.info "Use FastSurfer + Recon-Surf: $params.recon_surf" + log.info "Atlas utils folder: $params.atlas_utils_folder" + log.info "Compute FS, BN, GL, SF: $params.compute_FS_BN_GL_SF" + log.info "Compute BN Child: $params.compute_BN_child" + log.info "Compute lobes: $params.compute_lobes" + log.info "Compute lausanne multiscale: $params.compute_lausanne_multiscale" + log.info "Number of threads: $params.nb_threads" + log.info "" + log.info "ATLAS SELECTION" + log.info "Use Freesurfer atlas: $params.use_freesurfer_atlas" + log.info "Use Brainnetome atlas: $params.use_brainnetome_atlas" + log.info "Use Brainnetome Child atlas: $params.use_brainnetome_child_atlas" + log.info "Use Glasser atlas: $params.use_glasser_atlas" + log.info "Use Schaefer 100 atlas: $params.use_schaefer_100_atlas" + log.info "Use Schaefer 200 atlas: $params.use_schaefer_200_atlas" + log.info "Use Schaefer 400 atlas: $params.use_schaefer_400_atlas" + log.info "Use Lausanne 1 atlas: $params.use_lausanne_1_atlas" + log.info "Use Lausanne 2 atlas: $params.use_lausanne_2_atlas" + log.info "Use Lausanne 3 atlas: $params.use_lausanne_3_atlas" + log.info "Use Lausanne 4 atlas: $params.use_lausanne_4_atlas" + log.info "Use Lausanne 5 atlas: $params.use_lausanne_5_atlas" + log.info "Use dilated labels: $params.use_dilated_labels" + log.info "" + } + + if ( params.run_connectomics ) { + log.info "[Connectomics Options]" + log.info "" + log.info "DECOMPOSE OPTIONS" + log.info "No pruning: $params.no_pruning" + log.info "No remove loops: $params.no_remove_loops" + log.info "No remove outliers: $params.no_remove_outliers" + log.info "Minimal length: $params.min_length" + log.info "Maximal length: $params.max_length" + log.info "Maximum looping angle: $params.loop_max_angle" + log.info "Outlier treshold: $params.outlier_threshold" + log.info "" + log.info "COMMIT OPTIONS" + log.info "Run COMMIT: $params.run_commit" + log.info "Use COMMIT2: $params.use_commit2" + log.info "Use both COMMIT: $params.use_both_commit" + log.info "COMMIT on trk: $params.commit_on_trk" + log.info "B-value threshold: $params.b_thr" + log.info "Number of directions: $params.nbr_dir" + log.info "Ball and stick: $params.ball_stick" + log.info "Parallel diffusivity: $params.para_diff" + log.info "Perpendicular diffusivity: $params.perp_diff" + log.info "Isotropic diffusivity: $params.iso_diff" + log.info "" + log.info "PROCESSES OPTIONS" + log.info "Number of processes for COMMIT: $params.processes_commit" + log.info "Number of processes for AFD_FIXEL: $params.processes_afd_fixel" + log.info "Number of processes for CONNECTIVITY: $params.processes_connectivity" + log.info "" + } } \ No newline at end of file diff --git a/modules/priors/workflows/priors.nf b/modules/priors/workflows/priors.nf new file mode 100644 index 0000000..a3b0955 --- /dev/null +++ b/modules/priors/workflows/priors.nf @@ -0,0 +1,54 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +include { DWI_MASK } from "../../tracking/processes/preprocess.nf" +include { EXTRACT_DTI_SHELL; + DTI_METRICS } from "../../tracking/processes/DTI_processes.nf" +include { COMPUTE_PRIORS; + AVERAGE_PRIORS } from "../../connectomics/processes/commit.nf" +include { COMPUTE_FRF; + MEAN_FRF } from "../../tracking/processes/FODF_processes.nf" + +workflow PRIORS { + take: + dwi_channel + + main: + + // ** Generate the mask ** // + DWI_MASK(dwi_channel) + + // ** Extract the DTI shell ** // + EXTRACT_DTI_SHELL(dwi_channel) + + // ** Compute the DTI metrics ** // + dti_channel = EXTRACT_DTI_SHELL.out.dti_files + .combine(DWI_MASK.out.dwi_mask, by: 0) + DTI_METRICS(dti_channel) + + // ** Compute the priors ** // + priors_channel = DTI_METRICS.out.fa_and_md + .combine(DTI_METRICS.out.ad_and_rd, by: 0) + COMPUTE_PRIORS(priors_channel) + + // ** AVERAGE THE PRIORS ** // + avg_priors_channel = COMPUTE_PRIORS.out.para_diff + .join(COMPUTE_PRIORS.out.iso_diff) + .join(COMPUTE_PRIORS.out.perp_diff) + .groupTuple() + + AVERAGE_PRIORS(avg_priors_channel) + + // ** Compute the FRF ** // + frf_channel = dwi_channel + .combine(DWI_MASK.out.dwi_mask, by: 0) + COMPUTE_FRF(frf_channel) + + // ** Compute the mean FRF ** // + all_frf = COMPUTE_FRF.out.frf + .map{[it[1]]} + .collect() + MEAN_FRF(all_frf) + +} diff --git a/modules/template/processes/average.nf b/modules/template/processes/average.nf new file mode 100644 index 0000000..8d4c64f --- /dev/null +++ b/modules/template/processes/average.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process AVERAGE_VOLUMES_ANAT { + label "AVERAGE_VOLUMES_ANAT" + cpus 4 + publishDir = params.Pop_Avg_Publish_Dir + + input: + path(volumes) + output: + tuple path("population_avg_anat.nii.gz"), + path("population_avg_anat_bet.nii.gz"), + path("population_avg_anat_bet_mask.nii.gz"), emit: popavg + script: + """ + scil_image_math.py mean $volumes population_avg_anat.nii.gz + bet population_avg_anat.nii.gz temp.nii.gz -f 0.7 -R + bet temp.nii.gz population_avg_anat_bet -m -R + """ +} + +process AVERAGE_DWI { + label "AVERAGE_DWI" + cpus 4 + publishDir = params.Pop_Avg_Publish_Dir + + input: + path(volumes) + output: + path("population_avg_dwi.nii.gz"), emit: dwipopavg + + script: + """ + mrmath $volumes mean population_avg_dwi.nii.gz + """ +} \ No newline at end of file diff --git a/modules/template/processes/registration.nf b/modules/template/processes/registration.nf new file mode 100644 index 0000000..2f536ff --- /dev/null +++ b/modules/template/processes/registration.nf @@ -0,0 +1,102 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +process REGISTER_POP { + label "REGISTRATION_POP" + cpus params.processes_registration + + input: + tuple val(sid), path(anat), path(ref) + output: + tuple val(sid), path("${sid}__t2w_warped.nii.gz"), emit: warped_anat + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsRegistration --dimensionality 3 --float 0 \ + --collapse-output-transforms 1 \ + --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ + --interpolation Linear --use-histogram-matching 0 \ + --winsorize-image-intensities [ 0.005,0.995 ] \ + --initial-moving-transform [ $ref,$anat,1 ] \ + --transform Rigid[ 0.1 ] \ + --metric MI[ $ref,$anat,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform Affine[ 0.1 ] --metric MI[ $ref,$anat,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform SyN[ 0.1,3,0 ] \ + --metric CC[ $ref,$anat,1,4 ] \ + --convergence [ 200x150x200x200,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox + mv outputWarped.nii.gz ${sid}__t2w_warped.nii.gz + """ +} + +process REGISTER_FA { + label "REGISTRATION_FA" + cpus params.processes_registration + + input: + tuple val(sid), path(moving), path(ref) + output: + tuple val(sid), path("${sid}__fa_warped.nii.gz"), emit: fa_warped + tuple val(sid), + path("output0GenericAffine.mat"), + path("output1Warp.nii.gz"), emit: transfos + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsRegistration --dimensionality 3 --float 0 \ + --collapse-output-transforms 1 \ + --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ + --interpolation Linear --use-histogram-matching 0 \ + --winsorize-image-intensities [ 0.005,0.995 ] \ + --initial-moving-transform [ $ref,$moving,1 ] \ + --transform Rigid[ 0.1 ] \ + --metric MI[ $ref,$moving,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform Affine[ 0.1 ] --metric MI[ $ref,$moving,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform SyN[ 0.1,3,0 ] \ + --metric CC[ $ref,$moving,1,4 ] \ + --convergence [ 200x150x200x200,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox + mv outputWarped.nii.gz ${sid}__fa_warped.nii.gz + """ +} + +process APPLY_TRANSFORM_DWI_BVECS { + label "APPLY_TRANSFORM" + cpus 1 + + input: + tuple val(sid), path(warped_fa), path(dwi), path(bvec), path(mat), path(warp) + output: + tuple val(sid), path("${sid}__warped_dwi.nii.gz"), emit: dwi_warped + tuple val(sid), path("${sid}__warped_dwi.bvec"), emit: bvec_warped + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsApplyTransforms -d 3 -e 3 \ + -i $dwi -r $warped_fa \ + -n Linear \ + -t $warp $mat \ + -o ${sid}__warped_dwi.nii.gz + scil_image_math.py convert ${sid}__warped_dwi.nii.gz ${sid}__warped_dwi.nii.gz --data_type float32 -f + scil_apply_transform_to_bvecs.py $bvec $mat ${sid}__warped_dwi.bvec + """ +} \ No newline at end of file diff --git a/modules/template/workflows/pop_template.nf b/modules/template/workflows/pop_template.nf new file mode 100644 index 0000000..6184277 --- /dev/null +++ b/modules/template/workflows/pop_template.nf @@ -0,0 +1,59 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl=2 + +include { + BET_T2 +} from '../../tracking/processes/preprocess.nf' + +include { + REGISTER_POP; + REGISTER_FA; + APPLY_TRANSFORM_DWI_BVECS +} from '../processes/registration.nf' + +include { + AVERAGE_VOLUMES_ANAT; + AVERAGE_DWI +} from '../processes/average.nf' + +workflow POPULATION_TEMPLATE { + take: + anat_channel + dwi_channel + fa_channel + anat_ref_channel + fa_ref_channel + main: + + BET_T2(anat_channel) + + reg_channel = BET_T2.out.bet_t2 + .combine(anat_ref_channel) + + REGISTER_POP(reg_channel) + + all_anats = REGISTER_POP.out.warped_anat + .map{ [it[1]] } + .collect() + + AVERAGE_VOLUMES_ANAT(all_anats) + + reg_fa_channel = fa_channel + .combine(fa_ref_channel) + + REGISTER_FA(reg_fa_channel) + + apply_transfo_channel = REGISTER_FA.out.fa_warped + .combine(dwi_channel, by: 0) + .combine(REGISTER_FA.out.transfos, by: 0) + + APPLY_TRANSFORM_DWI_BVECS(apply_transfo_channel) + + all_dwis = APPLY_TRANSFORM_DWI_BVECS.out.dwi_warped + .map{ [it[1]] } + .collect() + + AVERAGE_DWI(all_dwis) + +} \ No newline at end of file diff --git a/modules/tracking/USAGE b/modules/tracking/USAGE new file mode 100644 index 0000000..2ef2e77 --- /dev/null +++ b/modules/tracking/USAGE @@ -0,0 +1,232 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking Pipeline + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | └-- *t1.nii.gz + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + └-- *t1.nii.gz + + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET T1 OPTIONS + --template_t1 Absolute path to the template T1 directory for antsBrainExtraction. + The folder must contain t1_template.nii.gz and + t1_brain_probability_map.nii.gz. The default path is the human_data + folder in the singularity container ($template_t1). + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + SEGMENT TISSUES OPTIONS + --number_of_tissues Number of tissues classes to segment. ($number_of_tissues) + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_bet_t1 Number of processes for BET T1 task ($processes_bet_t1) + --processes_denoise_t1 Number of processes for T1 denoising task ($processes_denoise_t1) + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/tracking/USAGE_INFANT b/modules/tracking/USAGE_INFANT new file mode 100644 index 0000000..3041c8e --- /dev/null +++ b/modules/tracking/USAGE_INFANT @@ -0,0 +1,231 @@ + +ChildBrainFlow Pipeline +======================= + +ChildBrainFlow is an end-to-end pipeline that performs tractography, t1 reconstruction and connectomics. +It is essentially a merged version of multiple individual pipeline to avoid the handling of inputs/outputs +between flows with some parameters tuned for pediatric brain scans. Here is a list of flows from which +process have been taken: + + 1. TractoFlow (https://github.com/scilus/tractoflow.git) + 2. FreeSurfer-Flow (https://github.com/scilus/freesurfer_flow) + 3. Connectoflow (https://github.com/scilus/connectoflow) + +*** Please note that some steps have been removed from the original pipelines if they were not relevant *** +*** for pediatric data. If you need some of these steps, please use the original pipelines. *** + +Run Tracking Pipeline Infant Config + +nextflow run main.nf [OPTIONAL_ARGUMENTS] --input [input_folder] -profile tracking,infant + +DESCRIPTION + + --input=/path/to/[input_folder] Input folder containing multiple subjects + + [Input] + ├-- S1 + | ├-- *dwi.nii.gz + | ├-- *.bval + | ├-- *.bvec + | ├-- *revb0.nii.gz + | ├-- *t2w.nii.gz + | └-- *wm_mask.nii.gz + └-- S2 + ├-- *dwi.nii.gz + ├-- *bval + ├-- *bvec + ├-- *revb0.nii.gz + ├-- *t2w.nii.gz + └-- *wm_mask.nii.gz + +OPTIONAL ARGUMENTS (current value) + +[TRACKING OPTIONS] + + --b0_thr All b-values below b0_thr will be considered b=0 images. ($b0_thr) + --dwi_shell_tolerance All b-values +/- dwi_shell_tolerance will be considered the same + b-value. ($dwi_shell_tolerance) + --skip_dwi_preprocessing If set, will skip all preprocessing steps and go straight to local + modelling. Useful when input data is already preprocessed. + ($skip_dwi_preprocessing) + + BET DWI OPTIONS + --initial_bet_f Fractional intensity threshold for initial bet. ($initial_bet_f) + --final_bet_f Fractional intensity threshold for final bet. ($final_bet_f) + + BET ANAT OPTIONS + --run_bet_anat If set, will perform brain extraction on the input anat volume. + ($run_bet_anat) + Default settings are soft to make sure an already brain extracted volume + is not impacted + by the bet command. The goal is to clean volumes that still have + portions of non-brain structures. + --bet_anat_f Fractional intensity threshold for bet. ($bet_anat_f) + + EDDY AND TOPUP OPTIONS + --encoding_direction Encoding direction of the dwi [x, y, z]. ($encoding_direction) + --readout Readout time. ($readout) + --topup_bet_f Fractional intensity threshold for bet before EDDY + (generate brain mask). ($topup_bet_f) + --eddy_cmd Eddy command to use [eddy_openmp, eddy_cpu, eddy_cuda]. ($eddy_cmd) + --use_slice_drop_correction If set, will use the slice drop correction from EDDY. + ($use_slice_drop_correction) + + SYNTHSTRIP OPTIONS + --run_synthbet Run SynthStrip to perform brain extraction on the DWI volume. + ($run_synthbet) + --shells Shell to use when computing the powder average prior to + SynthStrip. ($shells) + --shell_thr B-values threshold for shell extraction. ($shell_thr) + --gpu Run on GPU. ($gpu) + --border Mask border threshold in mm. ($border) + --nocsf Exclude CSF from brain border. ($nocsf) + --weights Alternative model weights file. ($weights) + + NORMALIZATION OPTIONS + --fa_mask_threshold Threshold to use when creating the fa mask for normalization. + ($fa_mask_threshold) + + RESAMPLE OPTIONS + --anat_resolution Resampling resolution of the T2w image. ($anat_resolution) + --anat_interpolation Interpolation method to use after resampling. ($anat_interpolation) + --mask_interpolation Interpolation method to use on the anatomical masks after resampling. + ($mask_interpolation) + --dwi_resolution Resampling resolution of the dwi volume. ($dwi_resolution) + --dwi_interpolation Interpolation method to use after resampling of the dwi volume. + ($dwi_interpolation) + + DTI OPTIONS + --max_dti_shell_value Maximum b-value threshold to select DTI shells. + (b <= $max_dti_shell_value) + This is the default behavior unless --dti_shells is specified. + --dti_shells Shells selected to compute DTI metrics (generally b <= 1200). + They need to be supplied between quotes e.g. (--dti_shells "0 1000"). + If supplied, will overwrite --max_dti_shell_value. + + SH OPTIONS + --sh_fitting If true, will compute a Sperical Harmonics fitting onto the DWI and + output the SH coefficients in a Nifti file. ($sh_fitting) + --sh_fitting_order SH order to use for the optional SH fitting (needs to be an even + number). ($sh_fitting_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + --sh_fitting_basis SH basis to use for the optional SH fitting [descoteaux07, tournier07]. + ($sh_fitting_basis) + --sh_fitting_shells Shells selected to compute the SH fitting. Mandatory if --sh_fitting is + used. They need to be supplied between quotes e.g. (--sh_fitting_shells + "0 1500"). NOTE: SH fitting works only on single shell. The b0 shell has + to be included. + + FODF OPTIONS + --min_fodf_shell_value Minimum shell threshold to be used as a FODF shell + (b >= $min_fodf_shell_value) + This is the default behavior unless --fodf_shells is provided. + --fodf_shells Shells selected to compute the FODF metrics (generally b >= 700). + They need to be supplied between quotes e.g. (--fodf_shells "0 1500") + If supplied, will overwrite --min_fodf_shell_value. + --max_fa_in_ventricle Maximal threshold of FA to be considered in a ventricle voxel. + ($max_fa_in_ventricle) + --min_md_in_ventricle Minimum threshold of MD to be considered in a ventricle voxel. + ($min_md_in_ventricle) + --relative_threshold Relative threshold on fODF amplitude in [0,1] ($relative_threshold) + --basis fODF basis [descoteaux07, tournier07]. ($basis) + --sh_order Sperical Harmonics order ($sh_order) + Rules : --sh_fitting_order=8 for 45 directions + --sh_fitting_order=6 for 28 directions + + FRF OPTIONS + --mean_frf Mean the FRF of all subjects. ($mean_frf) + USE ONLY IF ALL OF SUBJECTS COME FROM THE SAME SCANNER + AND HAVE THE SAME ACQUISITION. + --fa Initial FA threshold to compute the frf. ($fa) + --min_fa Minimum FA threshold to compute the frf. ($min_fa) + --min_nvox Minimum number of voxels to compute the frf. ($min_nvox) + --roi_radius Region of interest radius to compute the frf. ($roi_radius) + --set_frf If selected, will manually set the frf. ($set_frf) + --manual_frf FRF set manually (--manual_frf "$manual_frf") + + LOCAL SEEDING AND TRAKING OPTIONS + --run_local_tracking If set, local tracking will be performed. ($run_local_tracking) + --local_compress_streamlines If set, will compress streamlines. ($local_compress_streamlines) + --local_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($local_fa_seeding_mask_thr) + --local_seeding_mask_type Seeding mask type [fa, wm]. ($local_seeding_mask_type) + --local_fa_tracking_mask_thr Minimal FA threshold to generate a binary fa mask for tracking. + ($local_fa_tracking_mask_thr) + --local_tracking_mask_type Tracking mask type [fa, wm]. ($local_tracking_mask_type) + --local_erosion Number of voxel to remove from brain mask. Use to remove aberrant + voxel in fa maps. ($local_erosion) + --local_algo Tracking algorithm [prob, det]. ($local_algo) + --local_nbr_seeds Number of seeds related to the seeding type param. ($local_nbr_seeds) + --local_seeding Seeding type [npv, nt]. ($local_seeding) + --local_step_size Step size ($local_step_size) + --local_theta Maximum angle between 2 steps. ($local_theta) + --local_min_len Minimum length for a streamline. ($local_min_len) + --local_max_len Maximum length for a streamline. ($local_max_len) + --local_compress_value Compression error threshold. ($local_compress_value) + --local_tracking_seed List of random seed numbers for the random number generator. + ($local_tracking_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PFT SEEDING AND TRAKING OPTIONS + --run_pft_tracking If set, local tracking will be performed. ($run_pft_tracking) + --pft_compress_streamlines If set, will compress streamlines. ($pft_compress_streamlines) + --pft_fa_seeding_mask_thr Minimal FA threshold to generate a binary fa mask for seeding. + ($pft_fa_seeding_mask_thr) + --pft_seeding_mask_type Seeding mask type [fa, wm]. ($pft_seeding_mask_type) + --pft_algo Tracking algorithm [prob, det]. ($pft_algo) + --pft_nbr_seeds Number of seeds related to the seeding type param. ($pft_nbr_seeds) + --pft_seeding Seeding type [npv, nt]. ($pft_seeding) + --pft_step_size Step size ($pft_step_size) + --pft_theta Maximum angle between 2 steps. ($pft_theta) + --pft_min_len Minimum length for a streamline. ($pft_min_len) + --pft_max_len Maximum length for a streamline. ($pft_max_len) + --pft_compress_value Compression error threshold. ($pft_compress_value) + --pft_random_seed List of random seed numbers for the random number generator. + ($pft_random_seed) + Please write them as a list separated by commas without space e.g. + (--tracking_seed 1,2,3) + + PROCESSES OPTIONS + --processes The number of parallel processes to launch ($cpu_count). + Only affects the local scheduler. + --processes_denoise_dwi Number of processes for DWI denoising task ($processes_denoise_dwi) + --processes_eddy Number of processes for EDDY task. ($processes_eddy) + --processes_registration Number of processes for registration task. ($processes_registration) + --processes_fodf Number of processes for fODF task. ($processes_fodf) + +[GLOBAL OPTIONS] + + OUTPUT OPTIONS + --output_dir Directory to write the final results. Default is + "./Results_ChildBrainFlow/". + +AVAILABLE PROFILES (using -profile option (e.g. -profile no_symlink,macos,tracking)) + +no_symlink When used, results will be directly copied in the output folder and + symlink will not be used. + +macos When used, the scratch folder will be modified for MacOS users. + +tracking When used, will perform the tracking pipeline to generate the + whole-brain tractogram from raw diffusion images. + +freesurfer When used, will run recon-all and atlases generation from t1 volumes. + +connectomics When used, will perform connectivity analysis between atlas-based + segmentation. + +NOTES + +The 'scilpy/scripts' folder should be in your PATH environment variable. Not necessary if the +Singularity container is used. + +The intermediate working directory is, by default, set to './work'. +To change it, use the '-w WORK_DIR' argument. + +The default config file is ChildBrainFlow/nextflow.config. +Use '-C config_file.config' to specify a non-default configuration file. +The '-C config_file.config' must be inserted after the nextflow call +like 'nextflow -C config_file.config run ...'. \ No newline at end of file diff --git a/modules/tracking/processes/DTI_processes.nf b/modules/tracking/processes/DTI_processes.nf index 3d8577b..7987c30 100644 --- a/modules/tracking/processes/DTI_processes.nf +++ b/modules/tracking/processes/DTI_processes.nf @@ -3,8 +3,9 @@ nextflow.enable.dsl=2 process EXTRACT_DTI_SHELL { - label "EXTRACT_DTI_SHELL" - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -13,32 +14,33 @@ process EXTRACT_DTI_SHELL { path("${sid}__bvec_dti"), emit: dti_files script: if (params.dti_shells) - """ + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_extract_dwi_shell.py $dwi \ - $bval $bvec $params.dti_shells ${sid}__dwi_dti.nii.gz \ - ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f - """ + $bval $bvec $params.dti_shells ${sid}__dwi_dti.nii.gz \ + ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f + """ else - """ + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 shells=\$(awk -v max="$params.max_dti_shell_value" '{for (i = 1; i <= NF; i++) {v = int(\$i);if (v <= max) shells[v] = 1;}}END {for (v in shells) print v;}' "$bval" |\ - sort -n | tr '\n' ' ') + sort -n | tr '\n' ' ') scil_extract_dwi_shell.py $dwi \ - $bval $bvec \$shells ${sid}__dwi_dti.nii.gz \ - ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f - """ + $bval $bvec \$shells ${sid}__dwi_dti.nii.gz \ + ${sid}__bval_dti ${sid}__bvec_dti -t $params.dwi_shell_tolerance -f + """ } process DTI_METRICS { - label "DTI_METRICS" - cpus 3 + cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) diff --git a/modules/tracking/processes/FODF_processes.nf b/modules/tracking/processes/FODF_processes.nf index c13af32..5cfa147 100644 --- a/modules/tracking/processes/FODF_processes.nf +++ b/modules/tracking/processes/FODF_processes.nf @@ -3,8 +3,9 @@ nextflow.enable.dsl=2 process FODF_SHELL { - label "FODF" - cpus 3 + cpus 1 + memory { 8.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -13,36 +14,37 @@ process FODF_SHELL { path("${sid}__bvec_fodf"), emit: dwi_fodf script: if (params.fodf_shells) - """ + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_extract_dwi_shell.py $dwi \ - $bval $bvec $params.fodf_shells ${sid}__dwi_fodf.nii.gz \ - ${sid}__bval_fodf ${sid}__bvec_fodf -t $params.dwi_shell_tolerance -f - """ + $bval $bvec $params.fodf_shells ${sid}__dwi_fodf.nii.gz \ + ${sid}__bval_fodf ${sid}__bvec_fodf -t $params.dwi_shell_tolerance -f + """ else - """ - export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 - export OMP_NUM_THREADS=1 - export OPENBLAS_NUM_THREADS=1 + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 - shells=\$(awk -v min_fodf="$params.min_fodf_shell_value" -v b0_thr="$params.b0_thr" '{for (i = 1; i <= NF; i++) - {v = int(\$i);if (v >= min_fodf || v <= b0_thr) shells[v] = 1;}} - END { - for (v in shells) print v; - } - ' "$bval" | sort -n | tr '\n' ' ') + shells=\$(awk -v min_fodf="$params.min_fodf_shell_value" -v b0_thr="$params.b0_thr" '{for (i = 1; i <= NF; i++) + {v = int(\$i);if (v >= min_fodf || v <= b0_thr) shells[v] = 1;}} + END { + for (v in shells) print v; + } + ' "$bval" | sort -n | tr '\n' ' ') - scil_extract_dwi_shell.py $dwi \ + scil_extract_dwi_shell.py $dwi \ $bval $bvec \$shells ${sid}__dwi_fodf.nii.gz \ ${sid}__bval_fodf ${sid}__bvec_fodf -t $params.dwi_shell_tolerance -f - """ + """ } process COMPUTE_FRF { - label "FRF" - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) @@ -71,9 +73,10 @@ process COMPUTE_FRF { } process MEAN_FRF { - label "FRF" cpus 1 - publishDir = params.Mean_FRF_Publish_Dir + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + publishDir = "${params.output_dir}/MEAN_FRF" input: path(all_frf) @@ -89,8 +92,9 @@ process MEAN_FRF { } process FODF_METRICS { - label "FODF" cpus params.processes_fodf + memory { 8.GB * task.attempt } + time { 3.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask), path(fa), path(md), path(frf) @@ -119,7 +123,7 @@ process FODF_METRICS { --fa_t $params.max_fa_in_ventricle --md_t $params.min_md_in_ventricle\ -f - a_threshold=\$( echo " 2 * `awk '{for(i=1;i<=NF;i++) if(\$i>maxval) maxval=\$i;}; END { print maxval;}' ventricles_fodf_max_value.txt`" | bc ) + a_threshold=\$( echo " $params.fodf_metrics_a_factor * `awk '{for(i=1;i<=NF;i++) if(\$i>maxval) maxval=\$i;}; END { print maxval;}' ventricles_fodf_max_value.txt`" | bc ) scil_compute_fodf_metrics.py ${sid}__fodf.nii.gz --mask $b0_mask --sh_basis $params.basis\ --peaks ${sid}__peaks.nii.gz --peak_indices ${sid}__peak_indices.nii.gz\ diff --git a/modules/tracking/processes/SH_processes.nf b/modules/tracking/processes/SH_processes.nf index 882af77..5344ff3 100644 --- a/modules/tracking/processes/SH_processes.nf +++ b/modules/tracking/processes/SH_processes.nf @@ -3,8 +3,9 @@ nextflow.enable.dsl=2 process SH_FITTING_SHELL { - label "SH_FITTING" - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.h * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -23,8 +24,9 @@ process SH_FITTING_SHELL { } process SH_FITTING { - label "SH_FITTING" cpus 1 + memory { 8.GB * task.attempt } + time { 1.h * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) diff --git a/modules/tracking/processes/preprocess.nf b/modules/tracking/processes/preprocess.nf index 4f21a03..4ff9870 100644 --- a/modules/tracking/processes/preprocess.nf +++ b/modules/tracking/processes/preprocess.nf @@ -3,28 +3,36 @@ nextflow.enable.dsl=2 process BET_DWI { - label "BET" - cpus 2 + cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) output: tuple val(sid), path("${sid}__dwi_bet.nii.gz"), emit: bet_dwi + tuple val(sid), path("${sid}__powder_avg_bet_mask.nii.gz"), emit: bet_mask + when: + !params.skip_dwi_preprocessing + script: + // ** Using a combination of preliminary bet, powder average computation and then final bet. ** // + // ** This might not be necessary for good quality data, but returns much more robust results on ** // + // ** infant data. ** // """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_extract_b0.py $dwi $bval $bvec ${sid}__b0_mean.nii.gz\ --b0_thr $params.b0_thr --force_b0_threshold --mean - bet2 ${sid}__b0_mean.nii.gz ${sid}__b0_bet -f $params.initial_bet_f -m + bet ${sid}__b0_mean.nii.gz ${sid}__b0_bet -f $params.initial_bet_f -m -R scil_image_math.py convert ${sid}__b0_bet_mask.nii.gz ${sid}__b0_bet_mask.nii.gz\ --data_type uint8 -f mrcalc $dwi ${sid}__b0_bet_mask.nii.gz -mult ${sid}__dwi_bet_prelim.nii.gz\ -quiet -force -nthreads 1 scil_compute_powder_average.py ${sid}__dwi_bet_prelim.nii.gz $bval\ ${sid}__powder_avg.nii.gz --b0_thr $params.b0_thr -f - bet2 ${sid}__powder_avg.nii.gz ${sid}__powder_avg_bet -m -f $params.final_bet_f + bet ${sid}__powder_avg.nii.gz ${sid}__powder_avg_bet -m -R -f $params.final_bet_f scil_image_math.py convert ${sid}__powder_avg_bet_mask.nii.gz ${sid}__powder_avg_bet_mask.nii.gz\ --data_type uint8 -f mrcalc $dwi ${sid}__powder_avg_bet_mask.nii.gz -mult ${sid}__dwi_bet.nii.gz\ @@ -32,50 +40,98 @@ process BET_DWI { """ } +process SYNTHSTRIP { + cpus 12 + memory { 16.GB * task.attempt } + time { 6.hour * task.attempt } + + input: + tuple val(sid), path(dwi), path(bval), path(weights) + output: + tuple val(sid), path("${sid}__dwi_bet.nii.gz"), emit: bet_dwi + tuple val(sid), path("${sid}__dwi_bet_mask.nii.gz"), emit: bet_mask + when: + !params.skip_dwi_preprocessing + + script: + def b0_thr = params.b0_thr ? "--b0_thr ${params.b0_thr}" : '' + def shells = params.shells ? "--shells ${params.shells}" : '' + def shell_thr = params.shell_thr ? "--shell_thr ${params.shell_thr}" : '' + + def gpu = params.gpu ? "--gpu" : "" + def border = params.border ? "-b " + params.border : "" + def nocsf = params.nocsf ? "--no-csf" : "" + def model = "$weights" ? "--model $weights" : "" + + // ** Using SynthStrip with infant weights on powder average image. ** // + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_powder_average.py $dwi $bval ${sid}__powder_avg.nii.gz \ + $b0_thr $shells $shell_thr + mri_synthstrip -i ${sid}__powder_avg.nii.gz --out image_bet.nii.gz\ + --mask ${sid}__dwi_bet_mask.nii.gz $gpu $border $nocsf $model + mrcalc $dwi ${sid}__dwi_bet_mask.nii.gz -mult ${sid}__dwi_bet.nii.gz\ + -quiet -force -nthreads 1 + """ +} + process BET_T2 { - label "BET" - cpus 2 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(anat) output: - tuple val(sid), path("${sid}__t2w_bet.nii.gz"), emit: bet_t2 + tuple val(sid), path("${sid}__t2w_bet.nii.gz"), emit: t2_bet + when: + params.infant_config script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - bet2 $anat ${sid}__t2w_bet.nii.gz -f $params.bet_t2w_f + bet $anat ${sid}__t2w_bet.nii.gz -f $params.bet_anat_f -R """ } process DENOISING { - label "DENOISING" cpus params.processes_denoise_dwi + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi) output: tuple val(sid), path("${sid}__dwi_denoised.nii.gz"), emit: denoised_dwi + when: + !params.skip_dwi_preprocessing + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - dwidenoise $dwi ${sid}__dwi_denoised.nii.gz -extent 7 -nthreads 6 + dwidenoise $dwi ${sid}__dwi_denoised.nii.gz -extent 7 -nthreads $task.cpus fslmaths ${sid}__dwi_denoised.nii.gz -thr 0 ${sid}__dwi_denoised.nii.gz """ } process TOPUP { - label "TOPUP" cpus 4 + memory { 8.GB * task.attempt } + time { 4.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(revb0) output: tuple val(sid), path("${sid}__corrected_b0s.nii.gz"), path("${params.topup_prefix}_fieldcoef.nii.gz"), path("${params.topup_prefix}_movpar.txt"), emit: topup_result + when: + !params.skip_dwi_preprocessing + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 @@ -99,9 +155,9 @@ process TOPUP { } process EDDY_TOPUP { - label "EDDY_TOPUP" cpus params.processes_eddy - memory { 5.GB * task.attempt } + memory { 16.GB * task.attempt } + time { 16.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0s_corrected), path(field), path(movpar) @@ -109,6 +165,9 @@ process EDDY_TOPUP { tuple val(sid), path("${sid}__dwi_corrected.nii.gz"), path("${sid}__bval_eddy"), path("${sid}__dwi_eddy_corrected.bvec"), emit: dwi_bval_bvec tuple val(sid), path("${sid}__b0_bet_mask.nii.gz"), emit: b0_mask + when: + !params.skip_dwi_preprocessing + script: slice_drop_flag="" if (params.use_slice_drop_correction) @@ -135,13 +194,17 @@ process EDDY_TOPUP { } process N4 { - label "N4" cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) output: tuple val(sid), path("${sid}__dwi_n4.nii.gz"), emit: dwi_n4 + when: + !params.skip_dwi_preprocessing + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus @@ -159,8 +222,9 @@ process N4 { } process CROP_DWI { - label "CROP_VOLUMES" cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(b0_mask) @@ -182,66 +246,164 @@ process CROP_DWI { """ } +process DENOISE_T1 { + cpus params.processes_denoise_t1 + memory { 2.GB * task.attempt } + time { 2.hour * task.attempt } + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), path("${sid}__t1_denoised.nii.gz"), emit: t1_denoised + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_run_nlmeans.py $t1 ${sid}__t1_denoised.nii.gz 1\ + --processes $task.cpus -f + """ +} + +process N4_T1 { + cpus 1 + memory { 2.GB * task.attempt } + time { 2.hour * task.attempt } + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), path("${sid}__t1_n4.nii.gz"), emit: t1_n4 + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + N4BiasFieldCorrection -i $t1\ + -o [${sid}__t1_n4.nii.gz, bias_field_t1.nii.gz]\ + -c [300x150x75x50, 1e-6] -v 1 + """ +} + process CROP_ANAT { - label "CROP_VOLUMES" cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(anat), path(mask) + output: + tuple val(sid), + path("${sid}__anat_cropped.nii.gz"), + path("${sid}__mask_cropped.nii.gz"), emit: cropped_anat_and_mask + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_crop_volume.py $anat ${sid}__anat_cropped.nii.gz\ + --output_bbox boundingBox.pkl -f + scil_crop_volume.py $mask ${sid}__mask_cropped.nii.gz\ + --input_bbox boundingBox.pkl -f + """ +} + +process RESAMPLE_T1 { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: - tuple val(sid), path(t2w), path(brain_mask), path(wm_mask) + tuple val(sid), path(t1) output: - tuple val(sid), path("${sid}__t2w_cropped.nii.gz"), path("${sid}__brain_mask_cropped.nii.gz"), - path("${sid}__wm_mask_cropped.nii.gz"), emit: cropped_t2w_and_mask + tuple val(sid), path("${sid}__t1_resampled.nii.gz"), emit: t1_resampled + when: + !params.infant_config + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - scil_crop_volume.py $t2w ${sid}__t2w_cropped.nii.gz\ - --output_bbox t2w_boundingBox.pkl -f - scil_crop_volume.py $brain_mask ${sid}__brain_mask_cropped.nii.gz\ - --input_bbox t2w_boundingBox.pkl -f - scil_crop_volume.py $wm_mask ${sid}__wm_mask_cropped.nii.gz\ - --input_bbox t2w_boundingBox.pkl -f + scil_resample_volume.py $t1 ${sid}__t1_resampled.nii.gz\ + --voxel_size $params.anat_resolution \ + --interp $params.anat_interpolation + """ +} + +process BET_T1 { + cpus params.processes_bet_t1 + memory { 16.GB * task.attempt } + time { 2.hour * task.attempt } + + input: + tuple val(sid), path(t1) + output: + tuple val(sid), + path("${sid}__t1_bet.nii.gz"), + path("${sid}__t1_bet_mask.nii.gz"), emit: t1_and_mask_bet + when: + !params.infant_config + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsBrainExtraction.sh -d 3 -a $t1 -e $params.template_t1/t1_template.nii.gz\ + -o bet/ -m $params.template_t1/t1_brain_probability_map.nii.gz -u 0 + scil_image_math.py convert bet/BrainExtractionMask.nii.gz ${sid}__t1_bet_mask.nii.gz\ + --data_type uint8 + mrcalc $t1 ${sid}__t1_bet_mask.nii.gz -mult ${sid}__t1_bet.nii.gz -nthreads 1 """ } process RESAMPLE_ANAT { - label "RESAMPLE_VOLUMES" cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } input: - tuple val(sid), path(t2w), path(brain_mask), path(wm_mask) + tuple val(sid), path(t2w), path(mask) output: - tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__brain_mask_resampled.nii.gz"), - path("${sid}__wm_mask_resampled.nii.gz"), emit: t2w_and_mask + tuple val(sid), path("${sid}__t2w_resampled.nii.gz"), path("${sid}__mask_resampled.nii.gz"), emit: t2w_and_mask + when: + params.infant_config + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 scil_resample_volume.py $t2w ${sid}__t2w_resampled.nii.gz\ - --voxel_size $params.t2w_resolution --interp $params.t2w_interpolation -f - scil_resample_volume.py $brain_mask ${sid}__brain_mask_resampled.nii.gz\ - --voxel_size $params.t2w_resolution --interp $params.mask_interpolation\ - -f - scil_image_math.py convert ${sid}__brain_mask_resampled.nii.gz ${sid}__brain_mask_resampled.nii.gz\ - --data_type uint8 -f - scil_resample_volume.py $wm_mask ${sid}__wm_mask_resampled.nii.gz\ - --voxel_size $params.t2w_resolution --interp $params.mask_interpolation\ + --voxel_size $params.anat_resolution --interp $params.anat_interpolation -f + scil_resample_volume.py $mask ${sid}__mask_resampled.nii.gz\ + --voxel_size $params.anat_resolution --interp $params.mask_interpolation\ -f - scil_image_math.py convert ${sid}__wm_mask_resampled.nii.gz ${sid}__wm_mask_resampled.nii.gz\ + scil_image_math.py convert ${sid}__mask_resampled.nii.gz ${sid}__mask_resampled.nii.gz\ --data_type uint8 -f """ } process NORMALIZE { - label "NORMALIZE_DWI" cpus 3 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec), path(b0_mask) output: tuple val(sid), path("${sid}__dwi_normalized.nii.gz"), emit: dwi_normalized + when: + !params.skip_dwi_preprocessing + script: if (params.dti_shells) """ @@ -263,7 +425,7 @@ process NORMALIZE { export OPENBLAS_NUM_THREADS=1 shells=\$(awk -v max="$params.max_dti_shell_value" '{for (i = 1; i <= NF; i++) {v = int(\$i);if (v <= max) shells[v] = 1;}}END {for (v in shells) print v;}' "$bval" |\ - sort -n | tr '\n' ' ') + sort -n | tr '\n' ' ') scil_extract_dwi_shell.py $dwi $bval $bvec \$shells\ dwi_dti.nii.gz bval_dti bvec_dti -t $params.dwi_shell_tolerance @@ -276,8 +438,9 @@ process NORMALIZE { } process RESAMPLE_DWI { - label "RESAMPLE_DWI" - cpus 3 + cpus 1 + memory { 6.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(mask) @@ -301,8 +464,9 @@ process RESAMPLE_DWI { } process EXTRACT_B0 { - label "EXTRACT_B0" - cpus 3 + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: tuple val(sid), path(dwi), path(bval), path(bvec) @@ -318,4 +482,26 @@ process EXTRACT_B0 { mrthreshold ${sid}__b0_resampled.nii.gz ${sid}__b0_mask_resampled.nii.gz\ --abs 0.00001 -nthreads 1 """ +} + +process DWI_MASK { + cpus 1 + memory { 8.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(dwi), path(bval), path(bvec) + output: + tuple val(sid), path("${sid}__b0_mask.nii.gz"), emit: dwi_mask + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_extract_b0.py $dwi $bval $bvec b0.nii.gz --mean\ + --b0_thr $params.b0_thr --force_b0_threshold + mrthreshold b0.nii.gz ${sid}__b0_mask.nii.gz\ + --abs 0.00001 -nthreads 1 + """ } \ No newline at end of file diff --git a/modules/tracking/processes/registration_processes.nf b/modules/tracking/processes/registration_processes.nf index e7ccf08..b68bf10 100644 --- a/modules/tracking/processes/registration_processes.nf +++ b/modules/tracking/processes/registration_processes.nf @@ -2,79 +2,108 @@ nextflow.enable.dsl=2 -process REGISTER_ANAT { - label "REGISTER_ANAT" +process REGISTER_T2 { cpus params.processes_registration + memory { 16.GB * task.attempt } + time { 4.hour * task.attempt } input: - tuple val(sid), path(dwi), path(bval), path(t2w), path(brain_mask), path(wm_mask), - path(fa) + tuple val(sid), path(md), path(t2w), path(wm_mask) output: - tuple val(sid), path("${sid}__pwd_avg.nii.gz"), emit: pwd_avg tuple val(sid), path("${sid}__t2w_warped.nii.gz"), - path("${sid}__brain_mask_warped.nii.gz"), path("${sid}__wm_mask_warped.nii.gz"), emit: warped_anat tuple val(sid), path("output0GenericAffine.mat"), - path("synoutput0Warp.nii.gz"), - path("maskoutput0Warp.nii.gz"), emit: transfos + path("output1Warp.nii.gz"), emit: transfos tuple val(sid), - path("outputInverseWarped.nii.gz"), - path("synoutput0InverseWarp.nii.gz"), - path("maskoutput0InverseWarp.nii.gz"), emit: inverse_transfo + path("outputInverseWarped.nii.gz"), emit: inverse_transfo + when: + params.infant_config script: - //** For some reason, mapping of the masks isn't as good as the t2w, performing a final SyN registration **// - //** to fit the brain mask properly. **// + // ** Registration from t2w to diff space in infant returns better result when using the MD map due ** // + // ** to similar intensities (white = CSF in both volumes). See: Uus A, Pietsch M, Grigorescu I, ** // + // ** Christiaens D, Tournier JD, Grande LC, Hutter J, Edwards D, Hajnal J, Deprez M. Multi-channel ** // + // ** Registration for Diffusion MRI: Longitudinal Analysis for the Neonatal Brain. Biomedical Image ** // + // ** Registration. 2020 May 13;12120:111–21. doi: 10.1007/978-3-030-50120-4_11. PMCID: PMC7279935. ** // + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + antsRegistration --dimensionality 3 --float 0 \ + --collapse-output-transforms 1 \ + --output [ output,outputWarped.nii.gz,outputInverseWarped.nii.gz ] \ + --interpolation Linear --use-histogram-matching 0 \ + --winsorize-image-intensities [ 0.005,0.995 ] \ + --initial-moving-transform [ $md,$t2w,1 ] \ + --transform Rigid[ 0.1 ] \ + --metric MI[ $md,$t2w,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform Affine[ 0.1 ] --metric MI[ $md,$t2w,1,32,Regular,0.25 ] \ + --convergence [ 1000x500x250x100,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox \ + --transform SyN[ 0.1,3,0 ] \ + --metric CC[ $md,$t2w,1,4 ] \ + --convergence [ 200x150x200x200,1e-6,10 ] \ + --shrink-factors 8x4x2x1 --smoothing-sigmas 3x2x1x0vox + mv outputWarped.nii.gz ${sid}__t2w_warped.nii.gz + antsApplyTransforms -d 3 -i $wm_mask -r ${sid}__t2w_warped.nii.gz \ + -o ${sid}__wm_mask_warped.nii.gz -n NearestNeighbor \ + -t output1Warp.nii.gz output0GenericAffine.mat -u int + """ +} + +process REGISTER_T1 { + cpus params.processes_registration + memory { 24.GB * task.attempt } + time { 6.hour * task.attempt } + + input: + tuple val(sid), path(fa), path(t1), path(t1_mask), path(b0) + output: + tuple val(sid), path("${sid}__t1_warped.nii.gz"), emit: t1_warped + tuple val(sid), path("${sid}__output0GenericAffine.mat"), + path("${sid}__output1Warp.nii.gz"), emit: transfos + tuple val(sid), path("${sid}__output1InverseWarp.nii.gz"), emit: inverse_transfo + tuple val(sid), path("${sid}__t1_mask_warped.nii.gz"), emit: t1_mask_warped + when: + !params.infant_config + + script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 export ANTS_RANDOM_SEED=1234 - scil_compute_powder_average.py $dwi $bval ${sid}__pwd_avg.nii.gz\ - --b0_thr $params.b0_thr -f antsRegistration --dimensionality 3 --float 0\ --output [output,outputWarped.nii.gz,outputInverseWarped.nii.gz]\ --interpolation Linear --use-histogram-matching 0\ --winsorize-image-intensities [0.005,0.995]\ - --initial-moving-transform [${sid}__pwd_avg.nii.gz,$t2w,1]\ + --initial-moving-transform [$b0,$t1,1]\ --transform Rigid['0.2']\ - --metric MI[${sid}__pwd_avg.nii.gz,$t2w,1,32,Regular,0.25]\ + --metric MI[$b0,$t1,1,32,Regular,0.25]\ --convergence [500x250x125x50,1e-6,10] --shrink-factors 8x4x2x1\ --smoothing-sigmas 3x2x1x0\ --transform Affine['0.2']\ - --metric MI[${sid}__pwd_avg.nii.gz,$t2w,1,32,Regular,0.25]\ + --metric MI[$b0,$t1,1,32,Regular,0.25]\ --convergence [500x250x125x50,1e-6,10] --shrink-factors 8x4x2x1\ --smoothing-sigmas 3x2x1x0\ - --verbose 1 - mv outputWarped.nii.gz ${sid}__t2w_affine_warped.nii.gz - antsRegistration --dimensionality 3 --float 0\ - --output [synoutput,synoutputWarped.nii.gz,synoutputInverseWarped.nii.gz]\ - --interpolation Linear --use-histogram-matching 0\ - --winsorize-image-intensities [0.005,0.995]\ - --transform SyN[0.1,3,0]\ - --metric MI[${sid}__pwd_avg.nii.gz,$t2w,1,32]\ - --metric CC[$fa,$t2w,1,4]\ - --convergence [200x150x200,1e-6,10] --shrink-factors 4x2x1\ - --smoothing-sigmas 3x2x1\ - --verbose 1 - mv synoutputWarped.nii.gz ${sid}__t2w_warped.nii.gz - antsApplyTransforms -d 3 -i $brain_mask -r ${sid}__pwd_avg.nii.gz\ - -o ${sid}__brain_mask_initial_warp.nii.gz -n NearestNeighbor\ - -t synoutput0Warp.nii.gz output0GenericAffine.mat -u int - antsRegistration -d 3 --float 0\ - --output [maskoutput,maskoutputWarped.nii.gz,maskoutputInverseWarped.nii.gz]\ - --interpolation NearestNeighbor --use-histogram-matching 0\ - --winsorize-image-intensities [0.005,0.995]\ --transform SyN[0.1,3,0]\ - --metric MI[${sid}__pwd_avg.nii.gz,${sid}__brain_mask_initial_warp.nii.gz]\ - --convergence [500x250x125,1e-6,10] --shrink-factors 4x2x1\ - --smoothing-sigmas 3x2x1\ - --verbose 1 - mv maskoutputWarped.nii.gz ${sid}__brain_mask_warped.nii.gz - antsApplyTransforms -d 3 -i $wm_mask -r ${sid}__pwd_avg.nii.gz\ - -o ${sid}__wm_mask_warped.nii.gz -n NearestNeighbor\ - -t maskoutput0Warp.nii.gz synoutput0Warp.nii.gz output0GenericAffine.mat -u int + --metric MI[$b0,$t1,1,32]\ + --metric CC[$fa,$t1,1,4]\ + --convergence [50x25x10,1e-6,10] --shrink-factors 4x2x1\ + --smoothing-sigmas 3x2x1 + mv outputWarped.nii.gz ${sid}__t1_warped.nii.gz + mv output0GenericAffine.mat ${sid}__output0GenericAffine.mat + mv output1InverseWarp.nii.gz ${sid}__output1InverseWarp.nii.gz + mv output1Warp.nii.gz ${sid}__output1Warp.nii.gz + antsApplyTransforms -d 3 -i $t1_mask -r ${sid}__t1_warped.nii.gz\ + -o ${sid}__t1_mask_warped.nii.gz -n NearestNeighbor\ + -t ${sid}__output1Warp.nii.gz ${sid}__output0GenericAffine.mat + scil_image_math.py convert ${sid}__t1_mask_warped.nii.gz ${sid}__t1_mask_warped.nii.gz\ + --data_type uint8 -f """ -} \ No newline at end of file +} diff --git a/modules/tracking/processes/tracking_processes.nf b/modules/tracking/processes/tracking_processes.nf index 1ead6bc..9c5d996 100644 --- a/modules/tracking/processes/tracking_processes.nf +++ b/modules/tracking/processes/tracking_processes.nf @@ -2,62 +2,257 @@ nextflow.enable.dsl=2 +process SEGMENT_TISSUES { + cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(anat) + output: + tuple val(sid), path("${sid}__map_wm.nii.gz"), path("${sid}__map_gm.nii.gz"), + path("${sid}__map_csf.nii.gz"), emit: maps + tuple val(sid), path("${sid}__mask_wm.nii.gz"), path("${sid}__mask_gm.nii.gz"), + path("${sid}__mask_csf.nii.gz"), emit: masks + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + fast -t 1 -n $params.number_of_tissues\ + -H 0.1 -I 4 -l 20.0 -g -o anat.nii.gz $anat + scil_image_math.py convert anat_seg_2.nii.gz ${sid}__mask_wm.nii.gz --data_type uint8 + scil_image_math.py convert anat_seg_1.nii.gz ${sid}__mask_gm.nii.gz --data_type uint8 + scil_image_math.py convert anat_seg_0.nii.gz ${sid}__mask_csf.nii.gz --data_type uint8 + mv anat_pve_2.nii.gz ${sid}__map_wm.nii.gz + mv anat_pve_1.nii.gz ${sid}__map_gm.nii.gz + mv anat_pve_0.nii.gz ${sid}__map_csf.nii.gz + """ +} + +process ATROPOS_SEG { + cpus 1 + memory { 8.GB * task.attempt } + time { 2.hour * task.attempt } + + input: + tuple val(sid), path(anat), path(mask) + output: + tuple val(sid), path("${sid}__map_wm.nii.gz"), emit: wm_map + tuple val(sid), path("${sid}__map_gm.nii.gz"), emit: gm_map + tuple val(sid), path("${sid}__map_csf.nii.gz"), emit: csf_map + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + antsAtroposN4.sh -d 3 -a $anat -x $mask -c 3 \ + -o ${sid}__ -m $params.atropos_m -n $params.atropos_n \ + -b $params.atropos_formulation + mv ${sid}__SegmentationPosteriors3.nii.gz ${sid}__map_wm.nii.gz + mv ${sid}__SegmentationPosteriors2.nii.gz ${sid}__map_gm.nii.gz + mv ${sid}__SegmentationPosteriors1.nii.gz ${sid}__map_csf.nii.gz + """ +} + process GENERATE_MASKS { - label "GENERATE_MASKS" cpus 1 + memory { 4.GB * task.attempt } + time { 1.hour * task.attempt } input: - tuple val(sid), path(brain_mask), path(wm_mask), path(fa) + tuple val(sid), path(wm_mask), path(fa) output: - tuple val(sid), path("${sid}__wm_mask_final.nii.gz"), - path("${sid}__brain_mask.nii.gz"), emit: masks + tuple val(sid), path("${sid}__seeding_mask.nii.gz"), + path("${sid}__tracking_mask.nii.gz"), emit: masks + tuple val(sid), path("${sid}__fa_mask.nii.gz") script: """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - mrthreshold $fa ${sid}__fa_mask.nii.gz -abs $params.fa_seeding_mask_thr -nthreads 1 -force + bet2 $fa fa_bet -m -f 0.16 + scil_image_math.py erosion fa_bet_mask.nii.gz $params.erosion fa_bet_mask.nii.gz -f + mrcalc fa_bet.nii.gz fa_bet_mask.nii.gz -mult fa_eroded.nii.gz + mrthreshold fa_eroded.nii.gz ${sid}__fa_mask.nii.gz -abs $params.local_fa_seeding_mask_thr -nthreads 1 -force scil_image_math.py union ${sid}__fa_mask.nii.gz $wm_mask\ - wm_mask_temp.nii.gz --data_type uint8 -f - scil_image_math.py intersection wm_mask_temp.nii.gz $brain_mask\ - ${sid}__wm_mask_final.nii.gz --data_type uint8 -f - scil_image_math.py convert $brain_mask ${sid}__brain_mask.nii.gz\ - --data_type uint8 -f + ${sid}__seeding_mask.nii.gz --data_type uint8 -f + cp ${sid}__seeding_mask.nii.gz ${sid}__tracking_mask.nii.gz """ } +process LOCAL_TRACKING_MASK { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(wm), path(fa) + output: + tuple val(sid), path("${sid}__local_tracking_mask.nii.gz"), emit: tracking_mask + when: + params.run_local_tracking + + script: + if (params.local_tracking_mask_type == "wm") + """ + mv $wm ${sid}__local_tracking_mask.nii.gz + """ + else if (params.local_tracking_mask_type == "fa") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + mrcalc $fa $params.local_fa_tracking_mask_thr -ge ${sid}__local_tracking_mask.nii.gz\ + -datatype uint8 + """ +} + +process LOCAL_SEEDING_MASK { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(wm), path(fa) + output: + tuple val(sid), path("${sid}__local_seeding_mask.nii.gz"), emit: seeding_mask + when: + params.run_local_tracking + + script: + if (params.local_seeding_mask_type == "wm") + """ + mv $wm ${sid}__local_seeding_mask.nii.gz + """ + else if (params.local_seeding_mask_type == "fa") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + mrcalc $fa $params.local_fa_seeding_mask_thr -ge ${sid}__local_seeding_mask.nii.gz\ + -datatype uint8 + """ +} + process LOCAL_TRACKING { - label "TRACKING" cpus 2 + memory { 16.GB * task.attempt } + time { 12.hour * task.attempt } input: - tuple val(sid), path(fodf), path(wm_mask), path(brain_mask) + tuple val(sid), path(fodf), path(seeding_mask), path(tracking_mask) output: tuple val(sid), path("${sid}__local_tracking.trk"), emit: tractogram + when: + params.run_local_tracking + script: - if (params.use_brain_mask_as_tracking_mask) + compress = params.local_compress_streamlines ? '--compress ' + params.local_compress_value : '' """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 - scil_compute_local_tracking.py $fodf $wm_mask $brain_mask\ - tmp.trk --algo $params.algo --$params.seeding $params.nb_seeds\ - --seed $params.tracking_seed --step $params.step_size --theta $params.theta\ - --sfthres $params.sfthres --min_length $params.min_len\ - --max_length $params.max_len --sh_basis $params.sh_fitting_basis\ - --compress $params.compress_value + scil_compute_local_tracking.py $fodf $seeding_mask $tracking_mask\ + tmp.trk --algo $params.local_algo --$params.local_seeding $params.local_nbr_seeds\ + --seed $params.local_tracking_seed --step $params.local_step_size --theta $params.local_theta\ + --sfthres $params.local_sfthres --min_length $params.local_min_len\ + --max_length $params.local_max_len $compress --sh_basis $params.basis scil_remove_invalid_streamlines.py tmp.trk\ ${sid}__local_tracking.trk --remove_single_point """ - else +} + +process PFT_SEEDING_MASK { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(wm), path(fa), path(interface_mask) + output: + tuple val(sid), path("${sid}__pft_seeding_mask.nii.gz"), emit: seeding_mask + when: + params.run_pft_tracking + + script: + if (params.pft_seeding_mask_type == "wm") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_image_math.py union $wm $interface_mask ${sid}__pft_seeding_mask.nii.gz\ + --data_type uint8 + """ + else if (params.pft_seeding_mask_type == "interface") + """ + mv $interface_mask ${sid}__pft_seeding_mask.nii.gz + """ + else if (params.pft_seeding_mask_type == "fa") + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + mrcalc $fa $params.pft_fa_seeding_mask_thr -ge ${sid}__pft_seeding_mask.nii.gz\ + -datatype uint8 + """ +} + +process PFT_TRACKING_MASK { + cpus 1 + memory { 2.GB * task.attempt } + time { 1.hour * task.attempt } + + input: + tuple val(sid), path(wm), path(gm), path(csf) + output: + tuple val(sid), path("${sid}__map_include.nii.gz"), path("${sid}__map_exclude.nii.gz"), emit: tracking_maps + tuple val(sid), path("${sid}__interface.nii.gz"), emit: interface_map + when: + params.run_pft_tracking + + script: + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_maps_for_particle_filter_tracking.py $wm $gm $csf\ + --include ${sid}__map_include.nii.gz\ + --exclude ${sid}__map_exclude.nii.gz\ + --interface ${sid}__interface.nii.gz -f + """ +} + +process PFT_TRACKING { + cpus 2 + memory { 31.GB * task.attempt } + time { 24.hour * task.attempt } + + input: + tuple val(sid), path(fodf), path(include), path(exclude), path(seed) + + output: + tuple val(sid), path("${sid}__pft_tracking.trk"), emit: tractogram + when: + params.run_pft_tracking + + script: + compress = params.pft_compress_streamlines ? '--compress ' + params.pft_compress_value : '' """ - scil_compute_local_tracking.py $fodf $wm_mask $wm_mask\ - tmp.trk --algo $params.algo --$params.seeding $params.nb_seeds\ - --seed $params.tracking_seed --step $params.step_size --theta $params.theta\ - --sfthres $params.sfthres --min_length $params.min_len\ - --max_length $params.max_len --sh_basis $params.sh_fitting_basis\ - --compress $params.compress_value + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + scil_compute_pft.py $fodf $seed $include $exclude\ + tmp.trk\ + --algo $params.pft_algo --$params.pft_seeding $params.pft_nbr_seeds\ + --seed $params.pft_random_seed --step $params.pft_step_size --theta $params.pft_theta\ + --sfthres $params.pft_sfthres --sfthres_init $params.pft_sfthres_init\ + --min_length $params.pft_min_len --max_length $params.pft_max_len\ + --particles $params.pft_particles --back $params.pft_back\ + --forward $params.pft_front $compress --sh_basis $params.basis scil_remove_invalid_streamlines.py tmp.trk\ - ${sid}__local_tracking.trk --remove_single_point + ${sid}__pft_tracking.trk --remove_single_point """ -} \ No newline at end of file +} diff --git a/modules/tracking/workflows/preprocessing.nf b/modules/tracking/workflows/preprocessing.nf index 26e3c5b..b418179 100644 --- a/modules/tracking/workflows/preprocessing.nf +++ b/modules/tracking/workflows/preprocessing.nf @@ -4,92 +4,166 @@ nextflow.enable.dsl=2 include { BET_DWI; + SYNTHSTRIP; BET_T2; + BET_T1; DENOISING; + DENOISE_T1; TOPUP; EDDY_TOPUP; N4; + N4_T1; CROP_DWI; CROP_ANAT; RESAMPLE_ANAT; + RESAMPLE_T1; NORMALIZE; RESAMPLE_DWI; - EXTRACT_B0 + EXTRACT_B0; + DWI_MASK } from '../processes/preprocess.nf' -workflow PREPROCESSING { +workflow DWI { take: dwi_channel rev_channel - anat_channel - brain_mask_channel - wm_mask_channel main: - BET_DWI(dwi_channel) - DENOISING(BET_DWI.out) + // ** Denoising ** // + denoising_channel = dwi_channel + .map{[it[0], it[1]]} + DENOISING(denoising_channel) + // ** Topup ** // topup_channel = dwi_channel .map{[it[0], it[2], it[3]]} - .combine(DENOISING.out, by: 0) + .combine(DENOISING.out.denoised_dwi, by: 0) .combine(rev_channel, by: 0) .map{ sid, bvals, bvecs, dwi, rev -> tuple(sid, dwi, bvals, bvecs, rev)} - TOPUP(topup_channel) + // ** Eddy ** // eddy_channel = dwi_channel .map{[it[0], it[2], it[3]]} .combine(DENOISING.out, by: 0) .combine(TOPUP.out.topup_result, by: 0) - .map{ sid, bvals, bvecs, dwi, corrected_b0s, field, movpar -> tuple(sid, dwi, bvals, bvecs, corrected_b0s, field, movpar)} - + .map{ sid, bvals, bvecs, dwi, corrected_b0s, field, movpar -> tuple(sid, dwi, bvals, bvecs, corrected_b0s, + field, movpar)} EDDY_TOPUP(eddy_channel) - n4_channel = EDDY_TOPUP.out.dwi_bval_bvec - .combine(EDDY_TOPUP.out.b0_mask, by: 0) + // ** Bet ** // + if ( params.run_synthbet ) { + if ( params.weights ) { + weights_ch = Channel.fromPath(params.weights) + } else { + weights_ch = [] + } + synthstrip_channel = EDDY_TOPUP.out.dwi_bval_bvec + .map{ [it[0], it[1], it[2]] } + .combine(weights_ch) + .map{ it[0..2] + [it[3] ?: []] } + SYNTHSTRIP(synthstrip_channel) + dwi = SYNTHSTRIP.out.bet_dwi + mask = SYNTHSTRIP.out.bet_mask + } else { + BET_DWI(EDDY_TOPUP.out.dwi_bval_bvec) + dwi = BET_DWI.out.bet_dwi + mask = BET_DWI.out.bet_mask + } + // ** N4 ** // + n4_channel = dwi.combine(EDDY_TOPUP.out.dwi_bval_bvec, by: 0) + .map{ [it[0], it[1], it[3], it[4]] } + .combine(mask, by: 0) N4(n4_channel) + // ** Crop ** // + if ( params.skip_dwi_preprocessing ) { + DWI_MASK(dwi_channel) + crop_channel = dwi_channel.map{ [it[0], it[1]] } + .combine(DWI_MASK.out.dwi_mask, by: 0) + CROP_DWI(crop_channel) + } else { dwi_crop_channel = N4.out - .join(EDDY_TOPUP.out.b0_mask) - + .combine(EDDY_TOPUP.out.b0_mask, by: 0) CROP_DWI(dwi_crop_channel) - - anat_crop_channel = anat_channel - - if (params.run_bet_t2w) { - BET_T2(anat_channel) - anat_crop_channel = BET_T2.out.bet_t2 } - anat_crop_channel = anat_crop_channel - .combine(brain_mask_channel, by: 0) - .combine(wm_mask_channel, by:0) - - CROP_ANAT(anat_crop_channel) - RESAMPLE_ANAT(CROP_ANAT.out.cropped_t2w_and_mask) - + // ** Normalization ** // normalize_channel = CROP_DWI.out.dwi .combine(EDDY_TOPUP.out.dwi_bval_bvec.map{[it[0], it[2], it[3]]}, by: 0) .combine(CROP_DWI.out.mask, by: 0) - NORMALIZE(normalize_channel) + // ** Resampling ** // + if ( params.skip_dwi_preprocessing ) { + resample_channel = CROP_DWI.out.dwi + .combine(CROP_DWI.out.mask, by: 0) + RESAMPLE_DWI(resample_channel) + } else { resample_dwi_channel = NORMALIZE.out.dwi_normalized .combine(CROP_DWI.out.mask, by: 0) - RESAMPLE_DWI(resample_dwi_channel) + } + // ** Extracting b0 ** // + if ( params.skip_dwi_preprocessing ) { + extract_b0_channel = RESAMPLE_DWI.out.dwi_resampled + .combine(dwi_channel.map{ [it[0], it[2], it[3]] }, by: 0) + EXTRACT_B0(extract_b0_channel) + } else { extract_b0_channel = EDDY_TOPUP.out.dwi_bval_bvec .map{[it[0], it[2], it[3]]} .combine(RESAMPLE_DWI.out.dwi_resampled, by: 0) .map{ sid, bval, bvec, dwi -> tuple(sid, dwi, bval, bvec)} - EXTRACT_B0(extract_b0_channel) + } emit: dwi_bval_bvec = extract_b0_channel b0_and_mask = EXTRACT_B0.out.b0_and_mask - t2w_and_mask = RESAMPLE_ANAT.out.t2w_and_mask -} \ No newline at end of file +} + +workflow ANAT { + take: + anat_channel + + main: + if ( ! params.infant_config ) { + // ** Denoising ** // + DENOISE_T1(anat_channel) + + // ** N4 ** // + N4_T1(DENOISE_T1.out.t1_denoised) + } + + // ** Resampling ** // + if ( params.infant_config ) { + // ** Resample if -profile infant ** // + RESAMPLE_ANAT(anat_channel) + } else { + RESAMPLE_T1(N4_T1.out.t1_n4) + } + + // ** Bet ** // + if ( params.infant_config ) { + // ** Bet if -profile infant ** // + BET_T2(RESAMPLE_ANAT.out.t2w_and_mask.map{ [it[0], it[1]] }) + } else { + BET_T1(RESAMPLE_T1.out.t1_resampled) + } + + // ** Crop ** // + if ( params.infant_config ) { + crop_channel = BET_T2.out.t2_bet + .combine(RESAMPLE_ANAT.out.t2w_and_mask.map{ [it[0], it[2]] }, by: 0) + CROP_ANAT(crop_channel) + } + else { + CROP_ANAT(BET_T1.out.t1_and_mask_bet) + } + + emit: + anat_and_mask = CROP_ANAT.out.cropped_anat_and_mask +} diff --git a/modules/tracking/workflows/registration.nf b/modules/tracking/workflows/registration.nf index 79c0177..1e060ec 100644 --- a/modules/tracking/workflows/registration.nf +++ b/modules/tracking/workflows/registration.nf @@ -3,24 +3,40 @@ nextflow.enable.dsl=2 include { - REGISTER_ANAT + REGISTER_T1; + REGISTER_T2 } from '../processes/registration_processes.nf' workflow REGISTRATION { take: - dwi_channel - t2w_and_mask - fa_channel + fa_md_channel + anat_and_mask + b0_channel main: + + // ** If -profile infant is selected, will do registration from t2w on MD. ** // + t2_reg_channel = fa_md_channel + .map{ [it[0], it[2]] } + .combine(anat_and_mask, by: 0) + REGISTER_T2(t2_reg_channel) - register_channel = dwi_channel - .map{[it[0], it[1], it[2]]} - .combine(t2w_and_mask, by: 0) - .combine(fa_channel, by: 0) + // ** Classical registration from t1w to b0/FA. ** // + t1_reg_channel = fa_md_channel + .map{ [it[0], it[1]] } + .combine(anat_and_mask, by: 0) + .combine(b0_channel, by: 0) + REGISTER_T1(t1_reg_channel) - REGISTER_ANAT(register_channel) + // ** Organising channel for output. ** // + if ( params.infant_config ) { + warped_anat = REGISTER_T2.out.warped_anat + transfos = REGISTER_T2.out.transfos + } else { + warped_anat = REGISTER_T1.out.t1_warped + transfos = REGISTER_T1.out.transfos + } emit: - warped_anat = REGISTER_ANAT.out.warped_anat - transfos = REGISTER_ANAT.out.transfos + warped_anat = warped_anat + transfos = transfos } \ No newline at end of file diff --git a/modules/tracking/workflows/tracking.nf b/modules/tracking/workflows/tracking.nf index 890564e..9dfd2c2 100644 --- a/modules/tracking/workflows/tracking.nf +++ b/modules/tracking/workflows/tracking.nf @@ -3,25 +3,75 @@ nextflow.enable.dsl=2 include { + SEGMENT_TISSUES; GENERATE_MASKS; - LOCAL_TRACKING + LOCAL_TRACKING_MASK; + LOCAL_SEEDING_MASK; + LOCAL_TRACKING; + PFT_SEEDING_MASK; + PFT_TRACKING_MASK; + PFT_TRACKING } from '../processes/tracking_processes.nf' workflow TRACKING { take: - brain_wm_mask_channel + anat_and_mask_channel fodf_channel fa_channel main: - masks_channel = brain_wm_mask_channel - .combine(fa_channel, by: 0) - GENERATE_MASKS(masks_channel) + if ( params.infant_config ) { + // ** Creating masks channel and generating seeding and tracking masks. ** // + masks_channel = anat_and_mask_channel + .map{ [it[0], it[2]] } + .combine(fa_channel, by: 0) + GENERATE_MASKS(masks_channel) + + // ** Performing local tracking. ** // + tracking_channel = fodf_channel + .combine(GENERATE_MASKS.out.masks, by: 0) + LOCAL_TRACKING(tracking_channel) + } else { + // ** Segmenting tissues using fslfast ** // + anat_channel = anat_and_mask_channel.map{ [it[0], it[1]] } + SEGMENT_TISSUES(anat_channel) + + // ** If --run_local_tracking is set, this will be run ** // + // ** Generating seeding and tracking mask ** // + local_masks_channel = SEGMENT_TISSUES.out.masks + .map{ [it[0], it[1]] } + .combine(fa_channel, by: 0) + LOCAL_TRACKING_MASK(local_masks_channel) + LOCAL_SEEDING_MASK(local_masks_channel) + + // ** Performing local tracking ** // + local_tracking_channel = fodf_channel + .combine(LOCAL_SEEDING_MASK.out.seeding_mask, by: 0) + .combine(LOCAL_TRACKING_MASK.out.tracking_mask, by: 0) + LOCAL_TRACKING(local_tracking_channel) + + // ** If --run_pft_tracking is set, this will be run ** // + // ** Creating PFT masks. ** // + PFT_TRACKING_MASK(SEGMENT_TISSUES.out.maps) + pft_masks_channel = SEGMENT_TISSUES.out.masks + .map{ [it[0], it[1]] } + .combine(fa_channel, by: 0) + .combine(PFT_TRACKING_MASK.out.interface_map, by: 0) + PFT_SEEDING_MASK(pft_masks_channel) + + // ** Performing PFT tracking ** // + pft_tracking_channel = fodf_channel + .combine(PFT_TRACKING_MASK.out.tracking_maps, by: 0) + .combine(PFT_SEEDING_MASK.out.seeding_mask, by: 0) + PFT_TRACKING(pft_tracking_channel) + } + + if ( params.run_local_tracking ) { + out_channel = LOCAL_TRACKING.out.tractogram + } else { + out_channel = PFT_TRACKING.out.tractogram + } - tracking_channel = fodf_channel - .combine(GENERATE_MASKS.out.masks, by: 0) - - LOCAL_TRACKING(tracking_channel) emit: - trk = LOCAL_TRACKING.out.tractogram + trk = out_channel } \ No newline at end of file diff --git a/nextflow.config b/nextflow.config index 381d70a..02be61d 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,5 +1,5 @@ process { - publishDir = {"./Results_Infant_Tracking/$sid/$task.process"} + publishDir = {"${params.output_dir}/$sid/${task.process.replaceAll(':', '-')}"} scratch = true errorStrategy = { task.attempt <= 3 ? 'retry' : 'ignore' } maxRetries = 3 @@ -17,17 +17,28 @@ params { // ** TRACKING PARAMS ** // - // Global Options + //** Global Options **// b0_thr = 10 dwi_shell_tolerance = 20 + skip_dwi_preprocessing = false + template_t1 = "/human-data/mni_152_sym_09c/t1" - // BET DWI Options - initial_bet_f = 0.5 - final_bet_f = 0.35 + //** BET DWI Options **// + initial_bet_f = 0.16 + final_bet_f = 0.16 - // BET T2 Options - run_bet_t2w = false - bet_t2w_f = 0.16 + //** SYNTHSTRIP Options **// + run_synthbet = false + shells = "0 1500" + shell_thr = 50 + gpu = false + border = false + nocsf = false + weights = false + + //** BET ANAT Options **// + run_bet_anat = false + bet_anat_f = 0.16 // EDDY and TOPUP Options topup_config = "b02b0.cnf" @@ -39,11 +50,11 @@ params { use_slice_drop_correction = true // NORMALIZE Options - fa_mask_threshold = 0.10 + fa_mask_threshold = 0.4 // RESAMPLE_ANAT Options - t2w_resolution = 1 - t2w_interpolation = "lin" + anat_resolution = 1 + anat_interpolation = "lin" mask_interpolation = "nn" // RESAMPLE_DWI Options @@ -52,6 +63,7 @@ params { mask_dwi_interpolation = "nn" // EXTRACT_DTI_SHELLS Options + dti_shells = false max_dti_shell_value = 1200 // SH_FITTING_SHELL Options @@ -60,9 +72,11 @@ params { sh_fitting_basis = "descoteaux07" // FODF Options + fodf_shells = false min_fodf_shell_value = 700 + fodf_metrics_a_factor = 2.0 max_fa_in_ventricle = 0.1 - min_md_in_ventricle = 0.00185 + min_md_in_ventricle = 0.003 relative_threshold = 0.1 basis = "descoteaux07" sh_order = 8 @@ -74,25 +88,60 @@ params { min_nvox = 300 roi_radius = 20 set_frf = true - manual_frf = "12,7,7" - - // Seeding and tracking Options - fa_seeding_mask_thr = 0.1 - algo = "prob" - nb_seeds = 10 - seeding = "npv" - step_size = 0.5 - theta = 20 - sfthres = 0.1 - min_len = 10 - max_len = 200 - tracking_seed = 0 - use_brain_mask_as_tracking_mask = false - compress_value = 0.2 + manual_frf = "15,4,4" + + // ** Segment Tissues Options ** // + number_of_tissues = 3 + + //** PFT Seeding and Tracking Options **// + run_pft_tracking = true + pft_compress_streamlines = true + + pft_seeding_mask_type = "wm" + pft_fa_seeding_mask_thr = 0.1 + + pft_algo = "prob" + pft_nbr_seeds = 10 + pft_seeding = "npv" + pft_step_size = 0.5 + pft_theta = 20 + pft_sfthres = 0.1 + pft_sfthres_init = 0.5 + pft_min_len = 20 + pft_max_len = 200 + pft_particles = 15 + pft_back = 2 + pft_front = 1 + pft_compress_value = 0.2 + pft_random_seed = 0 + + //** Local Seeding and Tracking Options **// + run_local_tracking = false + local_compress_streamlines = true + + local_fa_tracking_mask_thr = 0.1 + local_tracking_mask_type = "wm" + local_fa_seeding_mask_thr = 0.1 + local_seeding_mask_type = "wm" + + local_algo = "prob" + local_nbr_seeds = 10 + local_seeding = "npv" + local_step_size = 0.5 + local_theta = 20 + local_sfthres = 0.1 + local_sfthres_init = 0.5 + local_min_len = 20 + local_max_len = 200 + local_tracking_seed = 0 + local_compress_value = 0.2 + local_erosion = 0 // Processes per tasks processes_denoise_dwi = 4 - processes_eddy = 1 + processes_denoise_t1 = 4 + processes_bet_t1 = 4 + processes_eddy = 4 processes_registration = 4 processes_fodf = 4 @@ -102,35 +151,81 @@ params { no_pruning = false no_remove_loops = false no_remove_outliers = false - min_length = 10 + min_length = 20 max_length = 200 loop_max_angle = 330 - outlier_threshold = 0.4 + outlier_threshold = 0.5 - // COMMIT Options + //** COMPUTE_PRIORS Options **// + compute_priors = false + fa_min_priors = 0.7 + fa_max_priors = 0.1 + md_min_priors = 0.002 + roi_radius_priors = 20 + multishell = false + + //** COMMIT Options **// + run_commit = true + use_commit2 = true + use_both_commit = false + commit_on_trk = false + b_thr = 50 nbr_dir = 500 + ball_stick = true para_diff = "1.7E-3" + perp_diff = "0.51E-3" iso_diff = "2.0E-3" // Processes per tasks processes_commit = 8 processes_afd_fixel = 4 processes_connectivity = 4 - params.commit_memory_limit = '6.GB' - - // Output Directory - output_dir = false + params.commit_memory_limit = '24.GB' // Profiles Options + run_freesurfer = false run_tracking = false run_connectomics = false + infant_config = false + template_config = false + priors = false - Mean_FRF_Publish_Dir = "./Results_Infant_Tracking/Mean_FRF" + // Template Options // + references = "./references/" + + Mean_FRF_Publish_Dir = "./Results_ChildBrainFlow/Mean_FRF" + Pop_Avg_Publish_Dir = "./Results_ChildBrainFlow/Pop_Avg" + + // ** FreeSurfer Options ** // + recon_all = false + recon_surf = true + use_freesurfer_atlas = false + use_brainnetome_atlas = false + use_brainnetome_child_atlas = true + use_glasser_atlas = false + use_schaefer_100_atlas = false + use_schaefer_200_atlas = false + use_schaefer_400_atlas = false + use_lausanne_1_atlas = false + use_lausanne_2_atlas = false + use_lausanne_3_atlas = false + use_lausanne_4_atlas = false + use_lausanne_5_atlas = false + use_dilated_labels = false + nb_threads = 4 + atlas_utils_folder = "/FS_BN_GL_SF_utils/" + compute_FS_BN_GL_SF = false + compute_BN_child = true + compute_lausanne_multiscale = false + compute_lobes = false + + // ** Output Options ** // + output_dir = "./Results_ChildBrainFlow/" } -if(params.output_dir) { - process.publishDir = {"$params.output_dir/$sid/$task.process"} +if ( params.output_dir ) { params.Mean_FRF_Publish_Dir = "${params.output_dir}/Mean_FRF" + params.Pop_Avg_Publish_Dir = "${params.output_dir}/Pop_Avg" } if(params.processes) { @@ -151,7 +246,7 @@ singularity.autoMounts = true profiles { no_symlink { process{ - publishDir = [path: {"./Results_Infant_Tracking/$sid/$task.process"}, mode: 'copy'] + publishDir = [path: {"${params.output_dir}/$sid/${task.process.replaceAll(':', '-')}"}, mode: 'copy'] } } @@ -159,6 +254,27 @@ profiles { process.scratch="/tmp" } + hcp { + process { + executor = 'slurm' + errorStrategy = 'retry' + maxRetries = 1 + } + executor { + pollInterval = '180 sec' + queueGlobalStatus = true + queueStatInterval = '3 min' + submitRateLimit = '100/1min' + maxForks = 1000 + queueSize = 1000 + exitReadTimeout = '600 sec' + } + } + + freesurfer { + params.run_freesurfer = true + } + tracking { params.run_tracking = true } @@ -167,4 +283,61 @@ profiles { params.run_connectomics = true } -} \ No newline at end of file + + priors { + params.priors = true + params.run_commit = true + params.compute_priors = true + params.set_frf = false + params.mean_frf = true + } + + infant { + + params.infant_config = true + + //** BET DWI Options **// + params.initial_bet_f = 0.5 + params.final_bet_f = 0.35 + + //** SYNTHSTRIP Options **// + params.run_synthbet = true + + //** NORMALIZE Options **// + params.fa_mask_threshold = 0.10 + + //** FODF Options **// + params.max_fa_in_ventricle = 0.1 + params.min_md_in_ventricle = 0.00185 + + //** FRF Options **// + params.manual_frf = "12,5,5" + + //** LOCAL TRACKING **// + params.run_pft_tracking = false + params.run_local_tracking = true + params.erosion = 6 + params.local_min_len = 15 + params.local_fa_seeding_mask_thr = 0.1 + + // ** DECOMPOSE Options ** // + min_length = 10 + outlier_threshold = 0.4 + + // ** COMMIT Options ** // + params.run_commit = true + params.use_commit2 = true + // params.commit_on_trk = true + params.para_diff = "1.2E-3" + params.iso_diff = "2.0E-3" + + } + + template { + params.template_config = true + + //** BET ANAT Options **// + params.bet_anat_f = 0.1 + + } +}