Snakemake workflow for pre-processing dwi data to re-train using b1000 hcp data

public public 1yr ago 0 bookmarks

SUPERCEDED BY http://github.com/khanlab/hippunfold-train-b500

prep_retraining_dwi_hippunfold

Snakemake workflow for pre-processing dwi data to re-train using b1000 hcp data

Code Snippets

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import json
import nibabel as nib
import numpy as np
import sys

with open(snakemake.input.shells) as f:
  shells_dict = json.load(f)

#get bval parameter:
bval = snakemake.params.bval

#input dwi
dwi_nib = nib.load(snakemake.input.dwi)
print(dwi_nib.shape)

#create output shape
newshape = np.array(dwi_nib.shape[:3])

avg_shell = np.zeros(newshape.astype(int))


indices = shells_dict['shell_to_vol'][bval] 

if len(dwi_nib.shape) == 3 and len(indices) == 1 and indices[0] == 0:
    #we have 3d vol (e.g. b0 only), so just grab it..
    avg_shell = dwi_nib.get_fdata()[:,:,:]
elif len(dwi_nib.shape) == 4:
    #otherwise, pick out indices and average
    avg_shell = np.mean(dwi_nib.get_fdata()[:,:,:,indices],3)
else:
    #if not either of these cases, then something weird with indices and volumes
    print('unable to get map indices to get avg shell')
    sys.exit()

#now save as image
avg_shell_nii = nib.Nifti1Image(avg_shell, affine=dwi_nib.affine )
avg_shell_nii.to_filename(snakemake.output[0])
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import numpy as np
import json

#load bvals
bvals = np.loadtxt(snakemake.input[0])


#if just single bval (i.e. rev ph enc b0), then just set manually
if bvals.size == 1:

    out_dict = dict()
    shells = [np.around(bvals,-2).astype('int').tolist()]
    out_dict['shells'] = shells
    out_dict['vol_to_shell'] = shells
    out_dict['shell_to_vol'] = {str(shells[0]): [0]} #point to index 0

#otherwise try to find shells
else:

    shells = []

    # histogram is used to find number of shells, with anywhere from 10 to 100 bins
    #  want to use the highest number of bins that doesn't split up the shells
    #  so that the bin centers can then be used as the shell bvalues..

    for i,nbins in enumerate( np.arange(10,100,5) ):

        counts, bin_edges = np.histogram(bvals, bins=nbins )

        bin_lhs = bin_edges[:-1]
        bin_rhs = bin_edges[1:]
        bin_centers = (bin_lhs + bin_rhs) / 2

        shells.append(bin_centers[np.where(counts>0)])


    #get number of shells for each bin-width choice
    nshells = [len(s) for s in shells]

    print('nshells')
    print(nshells)

    #use the highest number of bins that still produces the minimal number of shells
    min_nshells = np.min(nshells)
    possible_shells = np.where(nshells == min_nshells)[0]
    chosen_shells = shells[possible_shells[-1]]

    #round to nearest 100
    chosen_shells = np.around(chosen_shells,-2).astype('int')

    print('chosen_shells')
    print(chosen_shells)

    #write to file
    #np.savetxt(snakemake.output[0],chosen_shells,fmt='%d')



    #get bval indices, by mindist to shell
    #bvals
    rep_shells = np.tile(chosen_shells,[len(bvals),1])
    rep_bvals = np.tile(bvals,[len(chosen_shells),1]).T

    print(rep_shells)
    print(rep_bvals)

    #abs diff between bvals and shells
    diff = np.abs(rep_bvals - rep_shells)
    shell_ind = np.argmin(diff,1)

    shell_ind = chosen_shells[shell_ind]

    #get a list of indices shells to vols
    shell_to_vol = dict()
    for shell in chosen_shells.tolist():
        shell_to_vol[shell] = np.where(shell_ind==int(shell))[0].tolist()

    #chosen_shell
    out_dict = dict()
    out_dict['shells'] = chosen_shells.tolist()
    out_dict['vol_to_shell'] = shell_ind.tolist()
    out_dict['shell_to_vol'] = shell_to_vol


#write to json, shells and their indices
with open(snakemake.output[0], 'w') as outfile:
    json.dump(out_dict, outfile,indent=4)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
from nilearn import plotting
import matplotlib.pyplot as plt


import matplotlib
matplotlib.use('Agg')

html_view = plotting.view_img(stat_map_img=snakemake.input.seg,bg_img=snakemake.input.img,
                              opacity=0.5,cmap='viridis',dim=-1,threshold=0.5,
                              symmetric_cmap=False,title='sub-{subject}'.format(**snakemake.wildcards))

html_view.save_as_html(snakemake.output.html)



display = plotting.plot_roi(roi_img=snakemake.input.seg, bg_img=snakemake.input.img, display_mode='ortho')
display.savefig(snakemake.output.png)
display.close()
88
shell: 'singularity run {input.container}  {input.bids} {params.out_root} participant --participant_label={params.participant_label}' 
96
shell: 'cp {input} {output}'
102
shell: 'cp {input} {output}'
108
shell: 'cp {input} {output}'
114
shell: 'cp {input} {output}'
120
shell: 'cp {input} {output}'
126
shell: 'cp {input} {output}'
132
shell: 'cp {input} {output}'
139
shell: 'cp {input} {output}'
146
147
script:
    'scripts/get_shells_from_bvals.py'
161
162
script:
    'scripts/get_shell_avg.py'
174
175
shell:
    'antsApplyTransforms -d 3 --interpolation BSpline -i {input.in_img} -o {output.out_img}  -r {input.ref} -t {input.xfm}'
183
184
shell:
    'c3d -verbose {input} -flip x -o {output}'
193
194
shell:
    'c3d -verbose {input} -flip x -o {output}'
211
shell: 'dtifit --data={input.dwi} --out={params.out_prefix} --mask={input.mask} --bvecs={input.bvec} --bvals={input.bval} --gradnonlin={input.grad_dev} --verbose'
224
script: 'scripts/vis_qc_dseg.py'
233
shell: 'cp {input} {output}'
244
shell: 'cp {input.img} {output.img} && fslcpgeom {input.lbl} {output.img}'
249
shell: 'tar -cvf {output} {input}'
ShowHide 18 more snippets with no or duplicated tags.

Login to post a comment if you would like to share your experience with this workflow.

Do you know this workflow well? If so, you can request seller status , and start supporting this workflow.

Free

Created: 1yr ago
Updated: 1yr ago
Maitainers: public
URL: https://github.com/akhanf/prep_retraining_dwi_hippunfold_smk
Name: prep_retraining_dwi_hippunfold_smk
Version: 1
Badge:
workflow icon

Insert copied code into your website to add a link to this workflow.

Downloaded: 0
Copyright: Public Domain
License: None
  • Future updates

Related Workflows

cellranger-snakemake-gke
snakemake workflow to run cellranger on a given bucket using gke.
A Snakemake workflow for running cellranger on a given bucket using Google Kubernetes Engine. The usage of this workflow ...