General-purpose Snakemake workflow for diffusion-based subcortical parcellation

public public 1yr ago 0 bookmarks

Snakemake workflow: diffparc-smk

General-purpose Snakemake workflow for diffusion-based subcortical parcellation

This workflow is customized for the basal forebrain, run on HCP1200 7T diffusion data.

Uses HCP-MMP cortical parcella

Code Snippets

13
shell: 'fslmaths {input.lh} -max {input.rh} {output.lh_rh} &> {log}'
23
24
shell:
    'cp {input} {output}'
35
36
shell:
    'c3d {input} -dilate 1 3x3x3vox -o {output}'
56
57
shell:
    'antsApplyTransforms -d 3 --interpolation NearestNeighbor -i {input.seed} -o {output} -r {input.ref}  -t {input.invwarp} &> {log}'
73
74
75
shell:
    'fslmaths {input.dwi} -bin {output.mask} &&'
    'mri_convert {output.mask} -vs {params.seed_resolution} {params.seed_resolution} {params.seed_resolution} {output.mask_res} -rt nearest &> {log}'
90
91
shell:
    'reg_resample -flo {input.targets} -res {output.targets_res} -ref {input.mask_res} -NN 0  &> {log}'
139
140
141
142
143
run:
    f = open(output.target_txt,'w')
    for s in params.target_seg:
        f.write(f'{s}\n')
    f.close()
167
168
169
170
171
shell:
    'mkdir -p {output.probtrack_dir} && singularity exec -e --nv {params.container} '
    'probtrackx2_gpu --samples={params.bedpost_merged}  --mask={input.mask} --seed={input.seed_res} '
    '--targetmasks={input.target_txt} --seedref={input.seed_res} --nsamples={params.nsamples} '
    '--dir={output.probtrack_dir} {params.probtrack_opts} -V 2  &> {log}'
192
193
shell:
    'mkdir -p {output} && ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 parallel  --jobs {threads} antsApplyTransforms -d 3 --interpolation Linear -i {{1}} -o {{2}}  -r {input.ref} -t {input.warp}  &> {log} :::  {params.in_connmap_3d} :::+ {params.out_connmap_3d}' 
209
script: '../scripts/save_connmap_template_npz.py'
220
script: '../scripts/gather_connmap_group.py'
235
script: '../scripts/spectral_clustering.py'
248
249
shell:
    '{input.shell_script} {input.seg} {output.seg} {wildcards.k} &> {log}'
26
27
28
shell: 
    'mkdir -p {params.out_folder} && tar --extract --file={input.tar} {params.file_in_tar} && '
    'mv {params.file_in_tar} {output.filename}'
47
shell: 'FS_LICENSE={params.license} mris_convert {input} {output} &> {log}'
57
shell: 'FS_LICENSE={params.license} mri_convert {input} {output} &> {log}'
69
shell: 'FS_LICENSE={params.license} mri_info {input.t1} --tkr2scanner > {output.tkr2scanner} 2> {log}'
81
shell: 'wb_command -surface-apply-affine {input.surf} {input.tkr2scanner} {output.surf} &> {log}'
94
shell: 'wb_command -surface-average {output.midthickness} -surf {input.white} -surf {input.pial} &> {log}'
111
shell: 'wb_command -surface-resample {input.surf} {input.current_sphere} {input.new_sphere} {params.method} {output.surf} &> {log}'
130
131
132
133
shell: 
    'wb_command -label-resample {input.label} {input.current_sphere} {input.new_sphere}'
    ' {params.method} {output.label}'
    ' -area-surfs {input.current_surf} {input.new_surf} &> {log}'
150
151
152
153
shell:
    'wb_command -label-to-volume-mapping {input.label} {input.surf} {input.vol_ref} {output.label_vol}'
    ' -ribbon-constrained {input.white_surf} {input.pial_surf}'
    ' -greedy &> {log}'
172
173
174
shell:
    'wb_command -label-to-volume-mapping {input.label} {input.surf} {input.vol_ref} {output.label_vol}'
    ' -nearest-vertex {params.nearest_vertex} &> {log}'
15
16
shell:
    'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 parallel  --jobs {threads} antsApplyTransforms -d 3 --interpolation NearestNeighbor -i {{1}} -o {{2}}  -r {input.ref}  -t {input.invwarp} &> {log} :::  {input.cluster_k} :::+ {output.cluster_k}' 
33
34
35
shell:
    'fslmaths {input.dwi} -bin {output.mask} &&'
    'mri_convert {output.mask} -vs {params.seed_resolution} {params.seed_resolution} {params.seed_resolution} {output.mask_res} -rt nearest &> {log}'
 98
 99
100
101
102
103
shell:
    'mkdir -p {output.probtrack_dir} && '
    '{params.extract_seed_cmd} && singularity exec -e --nv {params.container} '
    'probtrackx2_gpu --samples={params.bedpost_merged}  --mask={input.mask} --seed={output.probtrack_dir}/in_seed.nii.gz '
    '--seedref={output.probtrack_dir}/in_seed.nii.gz --nsamples={params.nsamples} '
    '--dir={output.probtrack_dir} {params.probtrack_opts} -V 2  &> {log}'
118
119
shell:
    'fslmerge -t {output.tractmaps_4d} {input.tractmaps} &> {log}'
138
139
shell:
    'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1  antsApplyTransforms -d 3 --interpolation Linear -i {input.tractmap} -o {output.tractmap}  -r {input.ref} -t {input.warp}  &> {log}'
153
154
shell:
    'fslmerge -t {output.tractmaps_4d} {input.tractmaps} &> {log}'
170
171
shell:
    'AverageImages 4 {output} 0 {input}'
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
import numpy as np

#load first file to get shape
data = np.load(snakemake.input.connmap_npz[0])
affine = data['affine']
mask = data['mask']
conn_shape = data['conn'].shape
nsubjects = len(snakemake.input.connmap_npz)
conn_group = np.zeros([nsubjects,conn_shape[0],conn_shape[1]])

for i,npz in enumerate(snakemake.input.connmap_npz):
    data = np.load(npz)
    conn_group[i,:,:] = data['conn']

#save conn_group, mask and affine
np.savez(snakemake.output.connmap_group_npz, conn_group=conn_group,mask=mask,affine=affine)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
import nibabel as nib
import numpy as np
mask_nib = nib.load(snakemake.input.mask)
mask_vol = mask_nib.get_fdata()
mask_indices = mask_vol > 0 
masked = mask_vol[mask_indices]

nvoxels = len(masked)
ntargets = len(snakemake.params.connmap_3d)
conn = np.zeros((nvoxels,ntargets))
for i,conn_file in enumerate(snakemake.params.connmap_3d):
    vol = nib.load(conn_file).get_fdata()
    masked =  vol[mask_indices].T
    conn[:,i] = masked
np.savez(snakemake.output.connmap_npz, conn=conn,mask=mask_vol,affine=mask_nib.affine)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import sklearn
import numpy as np
import nibabel as nib

# Define a function for saving niftis 
def save_label_nii (labels,mask,affine,out_nifti):
    labels_vol = np.zeros(mask.shape)
    labels_vol[mask > 0] = labels+1 #add a 1 so label 0 is diff from bgnd
    labels_nib = nib.Nifti1Image(labels_vol,affine)
    nib.save(labels_nib,out_nifti)

data = np.load(snakemake.input.connmap_group_npz)
cluster_range = range(2,snakemake.params.max_k+1)
out_nii_list = snakemake.output

conn_group = data['conn_group']
mask = data['mask']
affine = data['affine']

# Concat subjects
conn_group_m = np.moveaxis(conn_group,0,2)
conn_concat = conn_group_m.reshape([conn_group_m.shape[0],conn_group_m.shape[1]*conn_group_m.shape[2]])

# Run spectral clustering and save output nifti
for i,k in enumerate(cluster_range):
    from sklearn.cluster import SpectralClustering
    clustering = SpectralClustering(n_clusters=k, assign_labels="discretize",random_state=0,affinity='cosine').fit(conn_concat)
    print(f'i={i}, k={k},saving {out_nii_list[i]}')
    save_label_nii(clustering.labels_,mask,affine,out_nii_list[i])
ShowHide 28 more snippets with no or duplicated tags.

Login to post a comment if you would like to share your experience with this workflow.

Do you know this workflow well? If so, you can request seller status , and start supporting this workflow.

Free

Created: 1yr ago
Updated: 1yr ago
Maitainers: public
URL: https://github.com/akhanf/diffparc-smk
Name: diffparc-smk
Version: 1
Badge:
workflow icon

Insert copied code into your website to add a link to this workflow.

Downloaded: 0
Copyright: Public Domain
License: None
  • Future updates

Related Workflows

cellranger-snakemake-gke
snakemake workflow to run cellranger on a given bucket using gke.
A Snakemake workflow for running cellranger on a given bucket using Google Kubernetes Engine. The usage of this workflow ...