-
Notifications
You must be signed in to change notification settings - Fork 0
/
FPVS_SLURM_sweep.py
executable file
·273 lines (236 loc) · 11.5 KB
/
FPVS_SLURM_sweep.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
#!/imaging/local/software/miniconda/envs/mne0.20/bin/python
"""
==========================================
Submit sbatch jobs for FPVS Frequency Sweep
analysis
SLURM, Python 3
==========================================
OH, modified October 2019
modified by Federica M for more subjects (ERP drive, MEG/FPVS/Scripts_Federica),
then re-adapted by OH Jan 2020
"""
import subprocess
from os import path as op
from importlib import reload
# import study parameters
import config_sweep as config
reload(config)
print(__doc__)
# wrapper to run python script via qsub. Python3
fname_wrap = op.join('/', 'home', 'olaf', 'MEG', 'FPVS', 'MNE-Python',
'Python2SLURM.sh')
# indices of subjects to process
subjs = config.do_subjs
job_list = [
# # Neuromag Maxfilter
# {'N': 'F_MF', # job name
# 'Py': 'FPVS_Maxfilter_sweep', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '16G', # memory for qsub process
# 'dep': '', # name of preceeding process (optional)
# 'node': '--constraint=maxfilter'}, # node constraint for MF, just picked one
# # fix EEG electrode positions in fiff-files
# # NOTE: Can get "Permission denied"; should be run separately
# {'N': 'F_FE', # job name
# 'Py': 'FPVS_fix_electrodes_sweep', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '1G', # memory for qsub process
# 'dep': ''}, # name of preceeding process (optional)
# ### Pre-processing
# ### Filter raw data
# {'N': 'F_FR', # job name
# 'Py': 'FPVS_filter_raw_sweep', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '16G', # memory for qsub process
# 'dep': ''}, # name of preceeding process (optional)
# {'N': 'F_Cov', # job name
# 'Py': 'FPVS_make_covmat', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '4G', # memory for qsub process
# 'dep': 'F_FR'},
# # ### Compute ICA
# {'N': 'F_CICA', # job name
# 'Py': 'FPVS_Compute_ICA_sweep', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '96G', # memory for qsub process
# 'dep': 'F_FR'}, # name of preceeding process (optional)
# ### Apply ICA (change ica_suff in config_sweep.py if necessary)
# {'N': 'F_AICA', # job name
# 'Py': 'FPVS_Apply_ICA_sweep', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_CICA'}, # name of preceeding process (optional)
# ## Get sweeps from raw data and average (change ica_suff in config_sweep.py if necessary)
# {'N': 'F_GS', # job name
# 'Py': 'FPVS_get_sweeps', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '8G', # memory for qsub process
# 'dep': 'F_AICA'},
# ### Evoked analysis
# # Get epochs from sweeps for ERP analysis
# # lots of epochs, needs enough memory
# {'N': 'F_EPO', # job name
# 'Py': 'FPVS_epoch_sweeps', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '32G', # memory for qsub process
# 'dep': ''},
# ### Average epochs
# {'N': 'F_EVO', # job name
# 'Py': 'FPVS_average_epochs', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_EPO'},
# ### Plot evoked curves and topographies
# {'N': 'F_PlEVO', # job name
# 'Py': 'FPVS_plot_evoked', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_EVO'},
# ### Source estimation for evoked data
# {'N': 'F_MNEEVO', # job name
# 'Py': 'FPVS_source_estimation_evoked', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': ''},
# ### Source Estimation
# ### Create Source Spaces
# {'N': 'F_SS', # job name
# 'Py': 'FPVS_make_SourceSpace', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': ''}, # name of preceeding process (optional)
# ### Create BEM surfaces and model
# {'N': 'F_BEM', # job name
# 'Py': 'FPVS_make_BEM', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_SS'}, # name of preceeding process (optional)
# # ### Create Forward Solution
# {'N': 'F_Fwd', # job name
# 'Py': 'FPVS_ForwardSolution', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_BEM'},
# ### Create Inverse Operator
# {'N': 'F_Inv', # job name
# 'Py': 'FPVS_InverseOperator', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_Fwd'},
# ### Create Inverse Operator
# {'N': 'F_SM', # job name
# 'Py': 'FPVS_SensitivityMaps', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_Inv'},
# ### Grand-average and plot source estimates (should be run separately)
# {'N': 'F_GMESTC', # job name
# 'Py': 'FPVS_average_evoked_STCs', # Python script
# 'Ss': [99], # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': ''},
# ### Grand-average and plot evoked data (should be run separately)
# {'N': 'F_GMEvo', # job name
# 'Py': 'FPVS_grand_average_evoked', # Python script
# 'Ss': [99], # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_MNEEVO'},
# ### Compute PSDs for averaged sweeps and plot (change ica_suff in config_sweep.py if necessary)
# {'N': 'F_P_C', # job name
# 'Py': 'FPVS_PSD_sweep_compute', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '32G', # memory for qsub process
# 'dep': ''},
# ### Plot PSD results
# {'N': 'F_P_P', # job name
# 'Py': 'FPVS_PSD_sweep_plot', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '8G', # memory for qsub process
# 'dep': 'F_P_C'},
# ### Morph source estimates before averaging
# {'N': 'F_Mph', # job name
# 'Py': 'FPVS_MorphSTC', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '2G', # memory for qsub process
# 'dep': 'F_P_C'},
# ### Compute Grand-Mean (only for 1 "subject")
# # cannot be dependent on previous scripts, because they would
# # have to complete for all participants
{'N': 'F_GM', # job name
'Py': 'FPVS_GrandAverage_PSDs', # Python script
'Ss': [99], # subject indices
'mem': '1G', # memory for qsub process
'dep': ''},
# ### Plot Grand-Mean (only for 1 "subject")
# {'N': 'F_GMP', # job name
# 'Py': 'FPVS_GrandAverage_Plot', # Python script
# 'Ss': [99], # subject indices
# 'mem': '1G', # memory for qsub process
# 'dep': ''},
]
### Other processing steps
# ### compute band-limited time courses using Hilbert transform
# {'N': 'SR_TFH', # job name
# 'Py': 'SR_TFR_Hilbert', # Python script
# 'Ss': subjs, # subject indices
# 'mem': '4G', # memory for qsub process
# 'dep': 'SR_FR'}, # name of preceeding process (optional)
# # ### Filter raw data FM-> generating also txt event file
# # {'N': 'F_FR', # job name
# # 'Py': 'FPVS_filter_raw_sweep_txtfile', # Python script
# # 'Ss': subjs, # subject indices
# # 'mem': '16G', # memory for qsub process
# # 'dep': 'F_FR'}, # name of preceeding process (optional)
# directory where python scripts are
dir_py = op.join('/', 'home', 'olaf', 'MEG', 'FPVS', 'MNE-Python')
# directory for qsub output
dir_sbatch = op.join('/', 'home', 'olaf', 'MEG', 'FPVS', 'MNE-Python',
'sbatch_out')
# keep track of qsub Job IDs
Job_IDs = {}
for job in job_list:
for Ss in job['Ss']:
Ss = str(Ss) # turn into string for filenames etc.
N = Ss + job['N'] # add number to front
Py = op.join(dir_py, job['Py'])
Cf = '' # config file not necessary for FPVS
mem = job['mem']
# files for qsub output
file_out = op.join(dir_sbatch,
job['N'] + '_' + Cf + '-%s.out' % str(Ss))
file_err = op.join(dir_sbatch,
job['N'] + '_' + Cf + '-%s.err' % str(Ss))
# if job dependent of previous job, get Job ID and produce command
if 'dep' in job: # if dependency on previous job specified
if job['dep'] == '':
dep_str = ''
else:
job_id = Job_IDs[Ss + job['dep'], Ss]
dep_str = '--dependency=afterok:%s' % (job_id)
else:
dep_str = ''
if 'node' in job: # if node constraint present (e.g. Maxfilter)
node_str = job['node']
else:
node_str = ''
if 'var' in job: # if variables for python script specified
var_str = job['var']
else:
var_str = ''
# sbatch command string to be executed
sbatch_cmd = 'sbatch \
-o %s \
-e %s \
--export=pycmd="%s.py %s",subj_idx=%s,var=%s \
--mem=%s -t 1-00:00:00 %s -J %s %s %s' \
% (file_out, file_err, Py, Cf, Ss, var_str, mem,
node_str, N, dep_str, fname_wrap)
# format string for display
print_str = sbatch_cmd.replace(' ' * 25, ' ')
print('\n%s\n' % print_str)
# execute qsub command
proc = subprocess.Popen(sbatch_cmd, stdout=subprocess.PIPE, shell=True)
# get linux output
(out, err) = proc.communicate()
# keep track of Job IDs from sbatch, for dependencies
Job_IDs[N, Ss] = str(int(out.split()[-1]))