-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathtest_osa_scripts.py
350 lines (290 loc) · 11.2 KB
/
test_osa_scripts.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
import datetime
import os
import subprocess as sp
from pathlib import Path
from textwrap import dedent
import pytest
import yaml
from osa.configs import options
from osa.scripts.closer import is_sequencer_successful, is_finished_check
ALL_SCRIPTS = [
"sequencer",
"closer",
"copy_datacheck",
"datasequence",
"calibration_pipeline",
"show_run_summary",
"provprocess",
"simulate_processing",
]
options.date = "2020_01_17"
options.tel_id = "LST1"
options.prod_id = "v0.1.0"
options.dl1_prod_id = "tailcut84"
options.directory = "test_osa/test_files0/running_analysis/20200117/v0.1.0/"
def remove_provlog():
log_file = Path("prov.log")
if log_file.is_file():
log_file.unlink()
def run_program(*args):
result = sp.run(args, stdout=sp.PIPE, stderr=sp.STDOUT, encoding="utf-8", check=True)
if result.returncode != 0:
raise ValueError(
f"Running {args[0]} failed with return code {result.returncode}"
f", output: \n {result.stdout}"
)
return result
@pytest.mark.parametrize("script", ALL_SCRIPTS)
def test_all_help(script):
"""Test for all scripts if at least the help works."""
run_program(script, "--help")
def test_simulate_processing(drs4_time_calibration_files, run_summary_file, r0_data):
for file in drs4_time_calibration_files:
assert file.exists()
for r0_file in r0_data:
assert r0_file.exists()
assert run_summary_file.exists()
remove_provlog()
rc = run_program("simulate_processing", "-p", "--force")
assert rc.returncode == 0
prov_dl1_path = Path("./test_osa/test_files0/DL1/20200117/v0.1.0/tailcut84/log")
prov_dl2_path = Path("./test_osa/test_files0/DL2/20200117/v0.1.0/tailcut84_model1/log")
prov_file_dl1 = prov_dl1_path / "calibration_to_dl1_01807_prov.log"
prov_file_dl2 = prov_dl2_path / "calibration_to_dl2_01807_prov.log"
json_file_dl1 = prov_dl1_path / "calibration_to_dl1_01807_prov.json"
json_file_dl2 = prov_dl2_path / "calibration_to_dl2_01807_prov.json"
pdf_file_dl1 = prov_dl1_path / "calibration_to_dl1_01807_prov.pdf"
pdf_file_dl2 = prov_dl2_path / "calibration_to_dl2_01807_prov.pdf"
assert prov_file_dl1.exists()
assert prov_file_dl2.exists()
assert pdf_file_dl1.exists()
assert pdf_file_dl2.exists()
with open(json_file_dl1) as file:
dl1 = yaml.safe_load(file)
assert len(dl1["entity"]) == 15
assert len(dl1["activity"]) == 4
assert len(dl1["used"]) == 12
assert len(dl1["wasGeneratedBy"]) == 7
with open(json_file_dl2) as file:
dl2 = yaml.safe_load(file)
assert len(dl2["entity"]) == 24
assert len(dl2["activity"]) == 6
assert len(dl2["used"]) == 20
assert len(dl2["wasGeneratedBy"]) == 12
rc = run_program("simulate_processing", "-p")
assert rc.returncode == 0
remove_provlog()
rc = run_program("simulate_processing", "-p")
assert rc.returncode == 0
def test_simulated_sequencer(
drs4_time_calibration_files,
run_summary_file,
run_catalog,
r0_data
):
assert run_summary_file.exists()
assert run_catalog.exists()
for r0_file in r0_data:
assert r0_file.exists()
for file in drs4_time_calibration_files:
assert file.exists()
rc = run_program(
"sequencer", "-c", "cfg/sequencer.cfg", "-d", "2020_01_17", "-s", "-t", "LST1"
)
assert rc.returncode == 0
now = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M")
assert rc.stdout == dedent(
f"""\
================================== Starting sequencer.py at {now} UTC for LST, Telescope: LST1, Night: 2020_01_17 ==================================
Tel Seq Parent Type Run Subruns Source Action Tries JobID State CPU_time Exit DL1% MUONS% DL1AB% DATACHECK% DL2%
LST1 0 None PEDCALIB 1805 5 None None None None None None None None None None None None
LST1 1 0 DATA 1807 11 Crab None None None None None None 0 0 0 0 0
LST1 2 0 DATA 1808 9 MadeUpSource None None None None None None 0 0 0 0 0
""")
def test_sequencer(sequence_file_list):
for sequence_file in sequence_file_list:
assert sequence_file.exists()
def test_autocloser(running_analysis_dir):
result = run_program(
"python",
"osa/scripts/autocloser.py",
"--config",
"cfg/sequencer.cfg",
"--date",
"2020_01_17",
"--test",
"LST1",
)
assert os.path.exists(running_analysis_dir)
assert result.stdout.split()[-1] == "Exit"
assert os.path.exists(
"./test_osa/test_files0/running_analysis/20200117/v0.1.0/"
"AutoCloser_Incidences_tmp.txt"
)
def test_closer(r0_dir, running_analysis_dir, test_observed_data):
# First assure that the end of night flag is not set and remove it otherwise
night_finished_flag = Path(
"./test_osa/test_files0/OSA/Closer/20200117/v0.1.0/NightFinished.txt"
)
if night_finished_flag.exists():
night_finished_flag.unlink()
assert r0_dir.exists()
assert running_analysis_dir.exists()
for obs_file in test_observed_data:
assert obs_file.exists()
run_program(
"closer", "-c", "cfg/sequencer.cfg", "-y", "-v", "-t", "-d", "2020_01_17", "LST1"
)
conda_env_export = running_analysis_dir / "log" / "conda_env.yml"
closed_seq_file = running_analysis_dir / "sequence_LST1_01805.closed"
# Check that files have been moved to their final destinations
assert os.path.exists(
"./test_osa/test_files0/DL1/20200117/v0.1.0/muons_LST-1.Run01808.0011.fits"
)
assert os.path.exists(
"./test_osa/test_files0/DL1/20200117/v0.1.0/tailcut84/dl1_LST-1.Run01808.0011.h5"
)
assert os.path.exists(
"./test_osa/test_files0/DL1/20200117/v0.1.0/tailcut84/"
"datacheck_dl1_LST-1.Run01808.0011.h5"
)
assert os.path.exists(
"./test_osa/test_files0/DL2/20200117/v0.1.0/tailcut84_model1/"
"dl2_LST-1.Run01808.0011.h5"
)
# Assert that the link to dl1 and muons files have been created
assert os.path.islink(
"./test_osa/test_files0/running_analysis/20200117/"
"v0.1.0/muons_LST-1.Run01808.0011.fits"
)
assert os.path.islink(
"./test_osa/test_files0/running_analysis/20200117/"
"v0.1.0/dl1_LST-1.Run01808.0011.h5"
)
assert night_finished_flag.exists()
assert conda_env_export.exists()
assert closed_seq_file.exists()
def test_datasequence(running_analysis_dir):
drs4_file = "drs4_pedestal.Run00001.0000.fits"
calib_file = "calibration.Run00002.0000.hdf5"
timecalib_file = "time_calibration.Run00002.0000.hdf5"
drive_file = "drive_log_20200117.txt"
runsummary_file = "RunSummary_20200117.ecsv"
prod_id = "v0.1.0"
run_number = "00003.0000"
options.directory = running_analysis_dir
output = run_program(
"datasequence",
"--config",
"cfg/sequencer.cfg",
"--date=2020_01_17",
"--simulate",
f"--prod-id={prod_id}",
f"--drs4-pedestal-file={drs4_file}",
f"--pedcal-file={calib_file}",
f"--time-calib-file={timecalib_file}",
f"--drive-file={drive_file}",
f"--run-summary={runsummary_file}",
run_number,
"LST1",
)
assert output.returncode == 0
def test_calibration_pipeline(running_analysis_dir):
prod_id = "v0.1.0"
drs4_run_number = "01805"
pedcal_run_number = "01806"
options.directory = running_analysis_dir
output = run_program(
"calibration_pipeline",
"--config",
"cfg/sequencer.cfg",
"--date=2020_01_17",
"--simulate",
f"--prod-id={prod_id}",
f"--drs4-pedestal-run={drs4_run_number}",
f"--pedcal-run={pedcal_run_number}",
"LST1",
)
assert output.returncode == 0
def test_is_sequencer_successful(run_summary, running_analysis_dir):
options.directory = running_analysis_dir
options.test = True
seq_tuple = is_finished_check(run_summary)
options.test = False
assert is_sequencer_successful(seq_tuple) is True
def test_drs4_pedestal_cmd(base_test_dir):
from osa.scripts.calibration_pipeline import drs4_pedestal_command
cmd = drs4_pedestal_command(drs4_pedestal_run_id="01804")
expected_command = [
"onsite_create_drs4_pedestal_file",
"--run_number=01804",
f"--base_dir={base_test_dir}",
"--no-progress",
]
assert cmd == expected_command
def test_calibration_file_cmd(base_test_dir):
from osa.scripts.calibration_pipeline import calibration_file_command
cmd = calibration_file_command(pedestal_run_id="01804", pedcal_run_id="01805")
expected_command = [
"onsite_create_calibration_file",
"--pedestal_run=01804",
"--run_number=01805",
f"--base_dir={base_test_dir}",
"--filters=52"
]
assert cmd == expected_command
def test_look_for_datacheck_files(
drs4_check_plot,
calibration_check_plot,
daily_datacheck_dl1_files,
datacheck_dl1_files
):
assert drs4_check_plot.exists()
assert calibration_check_plot.exists()
for file in daily_datacheck_dl1_files:
assert file.exists()
for file in datacheck_dl1_files:
assert file.exists()
from osa.scripts.copy_datacheck import look_for_datacheck_files
date = "20200117"
files_to_copy = look_for_datacheck_files(date)
assert drs4_check_plot in files_to_copy
assert calibration_check_plot in files_to_copy
for file in daily_datacheck_dl1_files:
assert file in files_to_copy
for file in datacheck_dl1_files:
assert file in files_to_copy
def test_daily_longterm_cmd():
from osa.scripts.closer import daily_longterm_cmd
job_ids = ["12345", "54321"]
cmd = daily_longterm_cmd(parent_job_ids=job_ids)
expected_cmd = [
"sbatch",
"-D",
options.directory,
"-o",
"log/longterm_daily_%j.log",
"--dependency=afterok:12345,54321",
"/fefs/aswg/software/virtual_env/ctasoft/cta-lstchain/lstchain/scripts/longterm_dl1_check.py",
"--input-dir=test_osa/test_files0/DL1/20200117/v0.1.0/tailcut84",
"--output-file=test_osa/test_files0/OSA/DL1DataCheck_LongTerm/v0.1.0/20200117/DL1_datacheck_20200117.h5",
"--muons-dir=test_osa/test_files0/DL1/20200117/v0.1.0",
"--batch"
]
assert cmd == expected_cmd
def test_observation_finished():
"""Check if observation is finished for `options.date=2020_01_17`."""
from osa.scripts.closer import observation_finished
date1 = datetime.datetime(2020, 1, 21, 12, 0, 0)
assert observation_finished(date=date1) is True
date2 = datetime.datetime(2020, 1, 17, 5, 0, 0)
assert observation_finished(date=date2) is False
def test_no_runs_found():
output = sp.run(
["sequencer", "-s", "-d", "2015_01_01", "LST1"],
text=True,
stdout=sp.PIPE,
stderr=sp.PIPE
)
assert output.returncode == 0
assert "No runs found for this date. Nothing to do. Exiting." in output.stderr.splitlines()[-1]