Diff of /fmri_fsl_1stLvl.py [000000] .. [d4b612]

Switch to unified view

a b/fmri_fsl_1stLvl.py
1
#!/usr/bin/env python
2
# %%
3
"""
4
Created on Wed Dec  4 14:29:06 2019
5
6
@author: Or Duek
7
1st level analysis using FSL output
8
In this one we smooth using SUSAN, which takes longer. 
9
"""
10
# %%
11
from __future__ import print_function
12
from __future__ import division
13
from builtins import str
14
from builtins import range
15
16
import os  # system functions
17
18
import nipype.interfaces.io as nio  # Data i/o
19
import nipype.interfaces.fsl as fsl  # fsl
20
import nipype.interfaces.utility as util  # utility
21
import nipype.pipeline.engine as pe  # pypeline engine
22
import nipype.algorithms.modelgen as model  # model generation
23
#import nipype.algorithms.rapidart as ra  # artifact detection
24
#from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth
25
from nipype.interfaces.utility import Function
26
"""
27
The output file format for FSL routines is being set to compressed NIFTI.
28
"""
29
30
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
31
32
33
data_dir = '/gpfs/gibbs/pi/levy_ifat/Or/Chadi_Data/CPT/BIDS/derivatives/'
34
35
removeTR = 4
36
fwhm = 4
37
tr = 3
38
39
output_dir = '/gpfs/gibbs/pi/levy_ifat/Or/Chadi_Data/results/'
40
# %% Methods
41
def _bids2nipypeinfo(in_file, events_file, regressors_file, removeTR = 4,
42
                     regressors_names=None,
43
                     motion_columns=None,
44
                     decimals=3, amplitude=1.0):
45
    from pathlib import Path
46
    import numpy as np
47
    import pandas as pd
48
    from nipype.interfaces.base.support import Bunch
49
    
50
    # Process the events file
51
    events = pd.read_csv(events_file, sep=',')
52
53
    bunch_fields = ['onsets', 'durations', 'amplitudes']
54
55
    if not motion_columns:
56
        from itertools import product
57
        motion_columns = ['_'.join(v) for v in product(('trans', 'rot'), 'xyz')]
58
59
    out_motion = Path('motion.par').resolve()
60
61
    regress_data = pd.read_csv(regressors_file, sep=r'\s+')
62
    np.savetxt(out_motion, regress_data[motion_columns].values[removeTR:,], '%g')
63
    if regressors_names is None:
64
        regressors_names = sorted(set(regress_data.columns) - set(motion_columns))
65
66
    if regressors_names:
67
        bunch_fields += ['regressor_names']
68
        bunch_fields += ['regressors']
69
70
    runinfo = Bunch(
71
        scans=in_file,
72
        conditions=list(set(events.trial_type.values)),
73
        **{k: [] for k in bunch_fields})
74
75
    for condition in runinfo.conditions:
76
        event = events[events.trial_type.str.match(condition)]
77
78
        runinfo.onsets.append(np.round(event.onset.values-removeTR, 3).tolist()) # added -removeTR to align to the onsets after removing X number of TRs from the scan
79
        runinfo.durations.append(np.round(event.duration.values, 3).tolist())
80
        if 'amplitudes' in events.columns:
81
            runinfo.amplitudes.append(np.round(event.amplitudes.values, 3).tolist())
82
        else:
83
            runinfo.amplitudes.append([amplitude] * len(event))
84
85
    if 'regressor_names' in bunch_fields:
86
        runinfo.regressor_names = regressors_names
87
        runinfo.regressors = regress_data[regressors_names].fillna(0.0).values[removeTR:,].T.tolist() # adding removeTR to cut the first rows
88
89
    return [runinfo], str(out_motion)
90
# %%
91
subject_list = ['2001', '2002','2004','2005','2008','2010','2012','2013','2015','2017','2021','2022','2022',
92
'2023','2024','2025','2026','2027','2028','2032','2033','2034','2036','2037','2038','2039','2042',
93
'2043','2044','2045','2047','2048','2050','2051','2052','2053','2054','2055','2056','2058','2059','2062',
94
'2063','2064']
95
# # Map field names to individual subject runs. 
96
97
98
99
infosource = pe.Node(util.IdentityInterface(fields=['subject_id'
100
                                            ],
101
                                    ),
102
                  name="infosource")
103
infosource.iterables = [('subject_id', subject_list)]
104
105
# SelectFiles - to grab the data (alternativ to DataGrabber)
106
templates = {'func': data_dir +  '/fmriprep/sub-{subject_id}/func/sub-{subject_id}_task-imagery_space-MNI152NLin2009cAsym_res-2_desc-preproc_bold.nii.gz',
107
             'mask': data_dir + '/fmriprep/sub-{subject_id}/func/sub-{subject_id}_task-imagery_space-MNI152NLin2009cAsym_res-2_desc-brain_mask.nii.gz',
108
             'regressors': data_dir + '/fmriprep/sub-{subject_id}/func/sub-{subject_id}_task-imagery_desc-confounds_timeseries.tsv',
109
             'events':  '/home/oad4/CPT_ML/CPT_event_file.csv'}
110
selectfiles = pe.Node(nio.SelectFiles(templates,
111
                               ),
112
                   name="selectfiles")
113
# %%
114
115
# Extract motion parameters from regressors file
116
runinfo = pe.Node(util.Function(
117
    input_names=['in_file', 'events_file', 'regressors_file', 'regressors_names', 'removeTR'],
118
    function=_bids2nipypeinfo, output_names=['info', 'realign_file']),
119
    name='runinfo')
120
runinfo.inputs.removeTR = removeTR
121
122
# Set the column names to be used from the confounds file
123
runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \
124
    ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)]
125
# %%
126
skip = pe.Node(interface=fsl.ExtractROI(), name = 'skip') 
127
skip.inputs.t_min = removeTR
128
skip.inputs.t_size = -1
129
130
# %%
131
susan =  pe.Node(interface=fsl.SUSAN(), name = 'susan') #create_susan_smooth()
132
susan.inputs.fwhm = fwhm
133
susan.inputs.brightness_threshold = 1000.0
134
135
136
# %%
137
modelfit = pe.Workflow(name='modelfit', base_dir= output_dir)
138
139
140
modelspec = pe.Node(interface=model.SpecifyModel(),                  
141
                    name="modelspec")
142
143
modelspec.inputs.input_units = 'secs'
144
modelspec.inputs.time_repetition = tr
145
modelspec.inputs.high_pass_filter_cutoff= 120
146
147
## Building contrasts
148
level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
149
cont1 = ['trauma>neutral', 'T', ['trauma1', 'neutral1'], [1, -1]]
150
cont2 = ['trauma>think_trauma', 'T', ['trauma1', 'think_trauma1'], [1, -1]]
151
cont3 = ['trauma>stop_trauma', 'T', ['trauma1', 'go_trauma1'], [1, -1]]
152
153
contrasts = [cont1, cont2, cont3]
154
155
156
level1design.inputs.interscan_interval = tr
157
level1design.inputs.bases = {'dgamma': {'derivs': False}}
158
level1design.inputs.contrasts = contrasts
159
level1design.inputs.model_serial_correlations = True    
160
161
162
modelgen = pe.Node(
163
    interface=fsl.FEATModel(),
164
    name='modelgen',
165
    )
166
mask =  pe.Node(interface= fsl.maths.ApplyMask(), name = 'mask')
167
168
169
modelestimate = pe.Node(
170
    interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=1000),
171
    name='modelestimate',
172
    )
173
174
175
# %%
176
modelfit.connect([
177
    (infosource, selectfiles, [('subject_id', 'subject_id')]),
178
    (selectfiles, runinfo, [('events','events_file'),('regressors','regressors_file')]),
179
    (selectfiles, skip,[('func','in_file')]),
180
    (skip,susan,[('roi_file','in_file')]),
181
    
182
    (susan, runinfo, [('smoothed_file', 'in_file')]),
183
    (susan, modelspec, [('smoothed_file', 'functional_runs')]),
184
    (runinfo, modelspec, [('info', 'subject_info'), ('realign_file', 'realignment_parameters')]),
185
    (modelspec, level1design, [('session_info', 'session_info')]),
186
    (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files',
187
                                                          'ev_files')]),
188
   # (susan, changeTosrting, [('outputnode.smoothed_files', 'arr')]),
189
    (susan, mask, [('smoothed_file', 'in_file')]),
190
    (selectfiles, mask, [('mask', 'mask_file')]),
191
    (mask, modelestimate, [('out_file','in_file')]),
192
    (modelgen, modelestimate, [('design_file', 'design_file'),('con_file', 'tcon_file'),('fcon_file','fcon_file')]),
193
    
194
])
195
# %%
196
modelfit.run('MultiProc', plugin_args={'n_procs': 5})