Skip to content

Commit

Permalink
v2.0.8 merged
Browse files Browse the repository at this point in the history
  • Loading branch information
areeda committed Dec 4, 2023
2 parents 6167626 + d2411f9 commit cc9348e
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 15 deletions.
4 changes: 2 additions & 2 deletions omicron/cli/merge_with_gaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def valid_file(path, uint_bug):
os.remove(path)
else:
ret = True
logger.debug(f'valid_file: {ret} {path.name} ({ntrig}), took {time.time()-vf_strt:.2f}')
logger.debug(f'valid_file: {ret} {path.name} ({ntrig}), took {time.time() - vf_strt:.2f}')
return ret


Expand All @@ -203,7 +203,7 @@ def main():
parser.add_argument('--no-gzip', action='store_true', default=False,
help='Do not compress the ligolw xml files')
parser.add_argument('--uint-bug', default=False, action='store_true',
help='Fix problem XML files created by old version of Omicron beforew merging.')
help='Fix problem XML files created by old version of Omicron before merging.')
parser.add_argument('--file-list', help='File with list of input file paths, one per line')
parser.add_argument('infiles', nargs='*', help='List of paths to files to merge or copy')

Expand Down
28 changes: 17 additions & 11 deletions omicron/cli/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,10 +271,13 @@ def create_parser():
'-N',
'--max-channels-per-job',
type=int,
default=10,
default=20,
help='maximum number of channels to process in a single '
'condor job (default: %(default)s)',
)
procg.add_argument('--max-online-lookback', type=int, default=1200,
help='With no immediately previous run, or one that was long ago this is the max time of an '
'online job. Default: %(default)d')
# max concurrent omicron jobs
procg.add_argument('--max-concurrent', default=10, type=int,
help='Max omicron jobs at one time [%(default)s]')
Expand Down Expand Up @@ -331,7 +334,7 @@ def create_parser():
)
condorg.add_argument(
'--condor-accounting-group',
default='ligo.prod.o3.detchar.transient.omicron',
default='ligo.prod.o4.detchar.transient.omicron',
help='accounting_group for condor submission on the LIGO '
'Data Grid (default: %(default)s)',
)
Expand All @@ -345,7 +348,7 @@ def create_parser():
)
condorg.add_argument(
'--condor-request-disk',
default='1G',
default='50G',
help='Required LIGO argument: local disk use (default: %(default)s)',
)
condorg.add_argument(
Expand All @@ -370,7 +373,7 @@ def create_parser():
'--dagman-option',
action='append',
type=str,
default=['force'],
default=['force', '-import_env'],
metavar="\"opt | opt=value\"",
help="Extra options to pass to condor_submit_dag as "
"\"-{opt} [{value}]\". "
Expand Down Expand Up @@ -726,11 +729,13 @@ def main(args=None):

segfile = str(rundir / "segments.txt")
keepfiles.append(segfile)
max_lookback = args.max_online_lookback

if newdag and online:
# get limit of available data (allowing for padding)
end = data.get_latest_data_gps(ifo, frametype) - padding

now = tconvert()
earliest_online = now - max_lookback
try: # start from where we got to last time
last_run_segment = segments.get_last_run_segment(segfile)
start = last_run_segment[1]
Expand All @@ -739,15 +744,16 @@ def main(args=None):
logger.debug("No online segment record, starting with "
"%s seconds" % chunkdur)
start = end - chunkdur + padding
else: # process the last 4000 seconds (arbitrarily)
logger.debug("No online segment record, starting with "
"4000 seconds")
start = end - 4000
else: # process the last requested seconds (arbitrarily)
logger.debug(f"No online segment record, starting with {max_lookback} seconds ago, {earliest_online}")
start = end - max_lookback
else:
logger.debug(f"Online segment record recovered: {last_run_segment[0]} - {last_run_segment[1]}")
elif online:
start, end = segments.get_last_run_segment(segfile)
logger.debug(f"Online segment record recovered: {start} - {end}")
if end - start > max_lookback:
start = end - max_lookback
else:
start, end = args.gps
start = int(start)
Expand Down Expand Up @@ -861,7 +867,7 @@ def main(args=None):
# segment, or long enough to process safely)
if truncate and abs(lastseg) < chunkdur * 2:
logger.info(
"The final segment is too short, " f'Minimum length is {int(chunkdur*2)} '
"The final segment is too short, " f'Minimum length is {int(chunkdur * 2)} '
"but ends at the limit of "
"available data, presumably this is an active segment. It "
"will be removed so that it can be processed properly later",
Expand Down Expand Up @@ -1454,7 +1460,7 @@ def main(args=None):
clean_tempfiles(tempfiles)

# and exit
logger.info(f"--- Processing complete. Elapsed: {time.time()-prog_start} seconds ----------------")
logger.info(f"--- Processing complete. Elapsed: {time.time() - prog_start} seconds ----------------")


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion omicron/cli/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -862,4 +862,4 @@ def print_nagios_json(code, message, outfile, tag='status', **extras):
if __name__ == "__main__":
main()
if logger:
logger.info(f'Run time: {(time.time()-prog_start):.1f} seconds')
logger.info(f'Run time: {(time.time() - prog_start):.1f} seconds')
2 changes: 1 addition & 1 deletion omicron/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def find_frames(obs, frametype, start, end, on_gaps='warn', **kwargs):
if on_gaps != 'ignore':
seglist = SegmentList(map(file_segment, cache)).coalesce()
missing = (SegmentList([Segment(start, end)]) - seglist).coalesce()
msg = "Missing frames:\n{}".format('\n'.join(map(lambda s: f'[{s[0]}, {s[1]}) -> {s[1]-s[0]}s', missing)))
msg = "Missing frames:\n{}".format('\n'.join(map(lambda s: f'[{s[0]}, {s[1]}) -> {s[1] - s[0]}s', missing)))
if missing and on_gaps == 'warn':
warnings.warn(msg)
elif missing:
Expand Down

0 comments on commit cc9348e

Please sign in to comment.