From fdcbb3f042a7db802d1c8ada837617a380e5100a Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 27 Sep 2024 16:51:30 -0700 Subject: [PATCH 01/39] Refactor of the demod mapmaking --- sotodlib/mapmaking/demod_mapmaker.py | 104 ++++++++++++++++++++------- 1 file changed, 79 insertions(+), 25 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index f98ba900a..c4fa7d24d 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -5,12 +5,13 @@ you accumulate observations into the div and rhs maps. For examples how to use look at docstring of DemodMapmaker. """ -__all__ = ['DemodMapmaker','DemodSignal','DemodSignalMap'] +__all__ = ['DemodMapmaker','DemodSignal','DemodSignalMap','make_atomic_map'] import numpy as np from pixell import enmap, utils, tilemap, bunch import so3g.proj from .. import coords +from .. import site_pipeline from .utilities import recentering_to_quat_lonlat, evaluate_recentering, MultiZipper, unarr, safe_invert_div from .noise_model import NmatWhite @@ -43,7 +44,7 @@ def __init__(self, signals=[], noise_model=None, dtype=np.float32, verbose=False rather than from obs.dsT, obs.demodQ, obs.demodU Example usage :: - signal_map = mapmaking.DemodSignalMap(shape, wcs, comm) + signal_map = mapmaking.DemodSignalMap(shape, wcs) signals = [signal_map] mapmaker = mapmaking.DemodMapmaker(signals, noise_model=noise_model) @@ -140,7 +141,7 @@ def from_work(self, x): return x def write (self, prefix, tag, x): pass class DemodSignalMap(DemodSignal): - def __init__(self, shape, wcs, comm, comps="TQU", name="sky", ofmt="{name}", output=True, + def __init__(self, shape, wcs, comps="TQU", name="sky", ofmt="{name}", output=True, ext="fits", dtype=np.float32, sys=None, recenter=None, tile_shape=(500,500), tiled=False, Nsplits=1, singlestream=False): """ Signal describing a non-distributed sky map. Signal describing a sky map in the coordinate @@ -153,8 +154,6 @@ def __init__(self, shape, wcs, comm, comps="TQU", name="sky", ofmt="{name}", out Shape of the output map geometry wcs : wcs WCS of the output map geometry - comm : MPI.comm - MPI communicator comps : str, optional Components to map name : str, optional @@ -186,7 +185,6 @@ def __init__(self, shape, wcs, comm, comps="TQU", name="sky", ofmt="{name}", out """ DemodSignal.__init__(self, name, ofmt, output, ext) - self.comm = comm self.comps = comps self.sys = sys self.recenter = recenter @@ -289,34 +287,90 @@ def prepare(self): if self.ready: return if self.tiled: self.geo_work = self.rhs.geometry - self.rhs = tilemap.redistribute(self.rhs, self.comm) - self.div = tilemap.redistribute(self.div, self.comm) - self.hits = tilemap.redistribute(self.hits,self.comm) - else: - if self.comm is not None: - self.rhs = utils.allreduce(self.rhs, self.comm) - self.div = utils.allreduce(self.div, self.comm) - self.hits = utils.allreduce(self.hits,self.comm) self.ready = True @property def ncomp(self): return len(self.comps) - def to_work(self, map): - if self.tiled: return tilemap.redistribute(map, self.comm, self.geo_work.active) - else: return map.copy() - - def from_work(self, map): - if self.tiled: return tilemap.redistribute(map, self.comm, self.rhs.geometry.active) - else: return utils.allreduce(map, self.comm) - def write(self, prefix, tag, m): if not self.output: return oname = self.ofmt.format(name=self.name) oname = "%s%s_%s.%s" % (prefix, oname, tag, self.ext) if self.tiled: - tilemap.write_map(oname, m, self.comm) + tilemap.write_map(oname, m) else: - if self.comm is None or self.comm.rank == 0: - enmap.write_map(oname, m) + enmap.write_map(oname, m) return oname + +def make_atomic_map(context, obslist, shape, wcs, noise_model, comps="TQU", + t0=0, dtype_tod=np.float32, dtype_map=np.float64, L=None, + tag="", verbose=0, preprocess_config=None, + split_labels=None, det_in_out=False, + det_left_right=False, det_upper_lower=False, + site='so_sat3', recenter=None, outs=None, singlestream=False): + pre = "" if tag is None else tag + " " + L.info(pre + "Initializing equation system") + + # Set up our mapmaking equation + if split_labels==None: + # this is the case where we did not request any splits at all + signal_map = DemodSignalMap(shape, wcs, comps=comps, + dtype=dtype_map, tiled=False, + ofmt="", singlestream=singlestream, + recenter=recenter ) + else: + # this is the case where we asked for at least 2 splits (1 split set). + # We count how many split we'll make, we need to define the Nsplits + # maps inside the DemodSignalMap + Nsplits = len(split_labels) + signal_map = DemodSignalMap(shape, wcs, comps=comps, + dtype=dtype_map, tiled=False, + ofmt="", Nsplits=Nsplits, + singlestream=singlestream, + recenter=recenter) + signals = [signal_map] + mapmaker = DemodMapmaker(signals, noise_model=noise_model, + dtype=dtype_tod, + verbose=verbose>0, + singlestream=singlestream) + L.info(pre + "Building RHS") + # And feed it with our observations + for oi in range(len(obslist)): + obs_id, detset, band = obslist[oi][:3] + name = "%s:%s:%s" % (obs_id, detset, band) + error, output, obs = site_pipeline.preprocess_tod.preproc_or_load_group(obs_id, configs=preprocess_config, + dets={'wafer_slot':detset, 'wafer.bandpass':band}, + logger=L, context=context, overwrite=False) + + obs.wrap("weather", np.full(1, "toco")) + obs.wrap("site", np.full(1, site)) + obs.flags.wrap('glitch_flags', obs.preprocess.turnaround_flags.turnarounds + + obs.preprocess.jumps_2pi.jump_flag + obs.preprocess.glitches.glitch_flags, ) + + if error not in [None,'load_success']: + L.info('tod %s:%s:%s failed in the prepoc database'%(obs_id,detset,band)) + continue + outs.append(error) + outs.append(output) + # And add it to the mapmaker + if split_labels==None: + # this is the case of no splits + mapmaker.add_obs(name, obs) + else: + # this is the case of having splits. We need to pass the split_labels + # at least. If we have detector splits fixed in time, then we pass the + # masks in det_split_masks. Otherwise, det_split_masks will be None. + mapmaker.add_obs(name, obs, split_labels=split_labels) + L.info('Done with tod %s:%s:%s'%(obs_id,detset,band)) + + for signal in signals: + signal.prepare() + L.info(pre + "Writing F+B outputs") + wmap = [] + weights = [] + for n_split in range(signal_map.Nsplits): + wmap.append( signal_map.rhs[n_split] ) + div = np.diagonal(signal_map.div[n_split], axis1=0, axis2=1) + div = np.moveaxis(div, -1, 0) # this moves the last axis to the 0th position + weights.append(div) + return bunch.Bunch(wmap=wmap, weights=weights, signal=signal_map, t0=t0 ) \ No newline at end of file From c51b5a023984caaf261be3a14f7b8158aae7d348 Mon Sep 17 00:00:00 2001 From: chervias Date: Sat, 28 Sep 2024 10:45:05 -0700 Subject: [PATCH 02/39] Docstring for make_atomic_map --- sotodlib/mapmaking/demod_mapmaker.py | 68 ++++++++++++++++++++++++---- 1 file changed, 60 insertions(+), 8 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index c4fa7d24d..a02668457 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -165,7 +165,7 @@ def __init__(self, shape, wcs, comps="TQU", name="sky", ofmt="{name}", output=Tr ext : str, optional The extension used for the files. dtype : numpy.dtype - The data type to use for the time-ordered data. + The data type to use for the maps. sys : str or None, optional The coordinate system to map. Defaults to equatorial recenter : str or None @@ -302,15 +302,67 @@ def write(self, prefix, tag, m): enmap.write_map(oname, m) return oname -def make_atomic_map(context, obslist, shape, wcs, noise_model, comps="TQU", - t0=0, dtype_tod=np.float32, dtype_map=np.float64, L=None, - tag="", verbose=0, preprocess_config=None, - split_labels=None, det_in_out=False, - det_left_right=False, det_upper_lower=False, +def make_atomic_map(context, obslist, shape, wcs, noise_model, L, + preprocess_config, comps="TQU", t0=0, + dtype_tod=np.float32, dtype_map=np.float32, + tag="", verbose=0, split_labels=None, site='so_sat3', recenter=None, outs=None, singlestream=False): + """ + Initialize a FilterBin Mapmaker for demodulated data + + Arguments + --------- + context : sotodlib.core.Context + Context object used to load obs from. + obslist : dict + The obslist which is the output of the + mapmaking.obs_grouping.build_obslists, contains the information of the + single or multiple obs to map. + shape : tuple + Shape of the geometry to use for mapping. + wcs : dict + WCS kernel of the geometry to use for mapping. + noise_model : sotodlib.mapmaking.Nmat + Noise model to pass to DemodMapmaker. + L : logger + Logger for printing on the screen. + preprocess_config : dict + Dictionary with the config yaml file for the preprocess database. + comps : str, optional + Which components to map, only TQU supported for now. + t0 : int, optional + Ctime to use as the label in the map files. + dtype_tod : numpy.dtype, optional + The data type to use for the time-ordered data. Only tested + with float32. + dtype_map : numpy.dtype, optional + The data type to use for the maps. + tag : str, optional + Prefix tag for the logger. + verbose : bool, optional + split_labels : list or None, optional + A list of strings with the splits requested. If None then no splits + were asked for, i.e. we will produce one map. + site : str, optional + Plataform name for the pointing matrix. + recenter : str or None + String to make object-centered maps, such as Moon/Sun/Planet centered maps. + Look at sotodlib.mapmaking.parse_recentering for details. + outs : list, optional + List to accumulate the outputs from preproc_or_load_group. This is + necessary if you end up preprocessing an obs here and want to save + to the database so you won't have to re-process again. + singlestream : Bool + If True, do not perform demodulated filter+bin mapmaking but + rather regular filter+bin mapmaking, i.e. map from obs.signal + rather than from obs.dsT, obs.demodQ, obs.demodU. + + Example usage :: + """ + pre = "" if tag is None else tag + " " L.info(pre + "Initializing equation system") - + # Set up our mapmaking equation if split_labels==None: # this is the case where we did not request any splits at all @@ -373,4 +425,4 @@ def make_atomic_map(context, obslist, shape, wcs, noise_model, comps="TQU", div = np.diagonal(signal_map.div[n_split], axis1=0, axis2=1) div = np.moveaxis(div, -1, 0) # this moves the last axis to the 0th position weights.append(div) - return bunch.Bunch(wmap=wmap, weights=weights, signal=signal_map, t0=t0 ) \ No newline at end of file + return bunch.Bunch(wmap=wmap, weights=weights, signal=signal_map, t0=t0) \ No newline at end of file From bd74d406e1e6617e1ad461b78a8350c8bd7c58f2 Mon Sep 17 00:00:00 2001 From: chervias Date: Sat, 28 Sep 2024 10:48:51 -0700 Subject: [PATCH 03/39] Newline at end of file --- sotodlib/mapmaking/demod_mapmaker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index a02668457..1157c6270 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -425,4 +425,4 @@ def make_atomic_map(context, obslist, shape, wcs, noise_model, L, div = np.diagonal(signal_map.div[n_split], axis1=0, axis2=1) div = np.moveaxis(div, -1, 0) # this moves the last axis to the 0th position weights.append(div) - return bunch.Bunch(wmap=wmap, weights=weights, signal=signal_map, t0=t0) \ No newline at end of file + return bunch.Bunch(wmap=wmap, weights=weights, signal=signal_map, t0=t0) From 372d450c22f7c941747be96ed479666b2c1e4844 Mon Sep 17 00:00:00 2001 From: chervias Date: Sat, 28 Sep 2024 11:04:25 -0700 Subject: [PATCH 04/39] Handling of failed preprocessings --- sotodlib/mapmaking/demod_mapmaker.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 1157c6270..54543ab4f 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -302,13 +302,13 @@ def write(self, prefix, tag, m): enmap.write_map(oname, m) return oname -def make_atomic_map(context, obslist, shape, wcs, noise_model, L, +def make_demod_map(context, obslist, shape, wcs, noise_model, L, preprocess_config, comps="TQU", t0=0, dtype_tod=np.float32, dtype_map=np.float32, tag="", verbose=0, split_labels=None, site='so_sat3', recenter=None, outs=None, singlestream=False): """ - Initialize a FilterBin Mapmaker for demodulated data + Make a demodulated map from the list of observations in obslist. Arguments --------- @@ -357,8 +357,7 @@ def make_atomic_map(context, obslist, shape, wcs, noise_model, L, rather regular filter+bin mapmaking, i.e. map from obs.signal rather than from obs.dsT, obs.demodQ, obs.demodU. - Example usage :: - """ + """ pre = "" if tag is None else tag + " " L.info(pre + "Initializing equation system") @@ -387,23 +386,22 @@ def make_atomic_map(context, obslist, shape, wcs, noise_model, L, singlestream=singlestream) L.info(pre + "Building RHS") # And feed it with our observations + nobs_kept = 0 for oi in range(len(obslist)): obs_id, detset, band = obslist[oi][:3] name = "%s:%s:%s" % (obs_id, detset, band) error, output, obs = site_pipeline.preprocess_tod.preproc_or_load_group(obs_id, configs=preprocess_config, dets={'wafer_slot':detset, 'wafer.bandpass':band}, logger=L, context=context, overwrite=False) - + outs.append(error) + outs.append(output) + if error not in [None,'load_success']: + L.info('tod %s:%s:%s failed in the prepoc database'%(obs_id,detset,band)) + continue obs.wrap("weather", np.full(1, "toco")) obs.wrap("site", np.full(1, site)) obs.flags.wrap('glitch_flags', obs.preprocess.turnaround_flags.turnarounds + obs.preprocess.jumps_2pi.jump_flag + obs.preprocess.glitches.glitch_flags, ) - - if error not in [None,'load_success']: - L.info('tod %s:%s:%s failed in the prepoc database'%(obs_id,detset,band)) - continue - outs.append(error) - outs.append(output) # And add it to the mapmaker if split_labels==None: # this is the case of no splits @@ -414,6 +412,10 @@ def make_atomic_map(context, obslist, shape, wcs, noise_model, L, # masks in det_split_masks. Otherwise, det_split_masks will be None. mapmaker.add_obs(name, obs, split_labels=split_labels) L.info('Done with tod %s:%s:%s'%(obs_id,detset,band)) + nobs_kept += 1 + # if we skip all the obs + if nobs_kept == 0: + return None for signal in signals: signal.prepare() From 2cca6d83dc4ce65d4980a976159ff284d2c45725 Mon Sep 17 00:00:00 2001 From: chervias Date: Sat, 28 Sep 2024 11:13:07 -0700 Subject: [PATCH 05/39] Fixing a bug --- sotodlib/mapmaking/demod_mapmaker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 54543ab4f..86f856064 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -5,7 +5,7 @@ you accumulate observations into the div and rhs maps. For examples how to use look at docstring of DemodMapmaker. """ -__all__ = ['DemodMapmaker','DemodSignal','DemodSignalMap','make_atomic_map'] +__all__ = ['DemodMapmaker','DemodSignal','DemodSignalMap','make_demod_map'] import numpy as np from pixell import enmap, utils, tilemap, bunch import so3g.proj From 7e06d85a16626437eb5fcbd9b1c7b0d9f6e41685 Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 30 Sep 2024 10:49:20 -0700 Subject: [PATCH 06/39] Fix bug --- sotodlib/mapmaking/demod_mapmaker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 86f856064..d851c290c 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -393,8 +393,8 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, error, output, obs = site_pipeline.preprocess_tod.preproc_or_load_group(obs_id, configs=preprocess_config, dets={'wafer_slot':detset, 'wafer.bandpass':band}, logger=L, context=context, overwrite=False) - outs.append(error) - outs.append(output) + if outs is not None: + outs.append((error,output)) if error not in [None,'load_success']: L.info('tod %s:%s:%s failed in the prepoc database'%(obs_id,detset,band)) continue From 4d996a010ae5266031aff9988f66f1f4af401f72 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Mon, 30 Sep 2024 14:09:58 -0700 Subject: [PATCH 07/39] updates for 20240819 satp3 catchup job --- sotodlib/preprocess/pcore.py | 14 ++++ sotodlib/preprocess/processes.py | 57 +++++++++++---- sotodlib/tod_ops/fft_ops.py | 5 +- sotodlib/tod_ops/flags.py | 41 +++++++---- sotodlib/tod_ops/t2pleakage.py | 118 +++++++++++++++++++++++++++++++ 5 files changed, 208 insertions(+), 27 deletions(-) diff --git a/sotodlib/preprocess/pcore.py b/sotodlib/preprocess/pcore.py index 58168e715..5957fea71 100644 --- a/sotodlib/preprocess/pcore.py +++ b/sotodlib/preprocess/pcore.py @@ -282,6 +282,20 @@ def _expand(new, full, wrap_valid=True): nidx.append(slice(None)) oidx = tuple(oidx) nidx = tuple(nidx) + print('k,v',k,v) + try: + print(len(v)) + print('new_dets',new.dets.count) + print('new_samps',new.samps.count) + except: + print('none') + print('full_dets',full.dets.count) + print('full_samps',full.samps.count) + print('out', out) + print('oidx', oidx) + print('nidx', nidx) + + if isinstance(out[k], RangesMatrix): assert new._assignments[k][-1] == 'samps' out[k] = _ranges_matrix_match( out[k], v, oidx, nidx) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 97c8a38cd..c392ab279 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -14,6 +14,7 @@ from .pcore import _Preprocess, _FracFlaggedMixIn from .. import flag_utils +from ..utils.profile import cprofile class FFTTrim(_Preprocess): """Trim the AxisManager to optimize for faster FFTs later in the pipeline. @@ -21,7 +22,8 @@ class FFTTrim(_Preprocess): .. autofunction:: sotodlib.tod_ops.fft_trim """ - name = "fft_trim" + name = "fft_trim" + #@cprofile(name) def process(self, aman, proc_aman): tod_ops.fft_trim(aman, **self.process_cfgs) @@ -37,6 +39,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def process(self, aman, proc_aman): tod_ops.detrend_tod(aman, signal_name=self.signal, **self.process_cfgs) @@ -51,6 +54,7 @@ class DetBiasFlags(_FracFlaggedMixIn, _Preprocess): name = "det_bias_flags" _influx_field = "det_bias_flags_frac" + #@cprofile(name) def calc_and_save(self, aman, proc_aman): dbc_aman = tod_ops.flags.get_det_bias_flags(aman, merge=False, full_output=True, **self.calc_cfgs) @@ -98,7 +102,7 @@ class Trends(_FracFlaggedMixIn, _Preprocess): signal: "signal" # optional calc: max_trend: 2.5 - n_pieces: 10 + t_piece: 100 save: True select: kind: "any" @@ -112,7 +116,8 @@ def __init__(self, step_cfgs): self.signal = step_cfgs.get('signal', 'signal') super().__init__(step_cfgs) - + + #@cprofile(name) def calc_and_save(self, aman, proc_aman): _, trend_aman = tod_ops.flags.get_trending_flags( aman, merge=False, full_output=True, @@ -170,6 +175,7 @@ class GlitchDetection(_FracFlaggedMixIn, _Preprocess): name = "glitches" _influx_field = "glitch_flags_frac" + #@cprofile(name) def calc_and_save(self, aman, proc_aman): _, glitch_aman = tod_ops.flags.get_glitch_flags(aman, merge=False, full_output=True, **self.calc_cfgs @@ -219,6 +225,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def process(self, aman, proc_aman): field = self.process_cfgs['jumps_aman'] aman[self.signal] = tod_ops.jumps.jumpfix_subtract_heights( @@ -257,6 +264,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def calc_and_save(self, aman, proc_aman): function = self.calc_cfgs.get("function", "find_jumps") cfgs = self.calc_cfgs.get('jump_configs', {}) @@ -321,7 +329,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - + #@cprofile(name) def process(self, aman, proc_aman): freqs, Pxx = tod_ops.fft_ops.calc_psd(aman, signal=aman[self.signal], **self.process_cfgs) @@ -333,6 +341,7 @@ def process(self, aman, proc_aman): fft_aman.wrap("Pxx", Pxx, [(0,"dets"), (1,"nusamps")]) aman.wrap(self.wrap, fft_aman) + #@cprofile(name) def calc_and_save(self, aman, proc_aman): self.save(proc_aman, aman[self.wrap]) @@ -371,6 +380,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def calc_and_save(self, aman, proc_aman): if self.psd not in aman: raise ValueError("PSD is not saved in AxisManager") @@ -457,7 +467,8 @@ def __init__(self, step_cfgs): self.signal = step_cfgs.get('signal', 'signal') super().__init__(step_cfgs) - + + #@cprofile(name) def process(self, aman, proc_aman): if self.process_cfgs["kind"] == "single_value": if self.process_cfgs.get("divide", False): @@ -498,6 +509,7 @@ class EstimateHWPSS(_Preprocess): _influx_field = "hwpss_coeffs" _influx_percentiles = [0, 50, 75, 90, 95, 100] + #@cprofile(name) def calc_and_save(self, aman, proc_aman): hwpss_stats = hwp.get_hwpss(aman, **self.calc_cfgs) self.save(proc_aman, hwpss_stats) @@ -520,7 +532,7 @@ def plot(self, aman, proc_aman, filename): plot_4f_2f_counts(aman, filename=filename.replace('{name}', f'{ufm}_4f_2f_counts')) plot_hwpss_fit_status(aman, proc_aman[self.calc_cfgs["hwpss_stats_name"]], filename=filename.replace('{name}', f'{ufm}_hwpss_stats')) - @classmethod + #@classmethod def gen_metric(cls, meta, proc_aman): """ Generate a QA metric for the coefficients of the HWPSS fit. Coefficient percentiles and mean are recorded for every mode and detset. @@ -612,6 +624,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def process(self, aman, proc_aman): if not(proc_aman[self.hwpss_stats] is None): modes = [int(m[1:]) for m in proc_aman[self.hwpss_stats].modes.vals[::2]] @@ -632,6 +645,7 @@ class Apodize(_Preprocess): """ name = "apodize" + #@cprofile(name) def process(self, aman, proc_aman): tod_ops.apodize.apodize_cosine(aman, **self.process_cfgs) @@ -642,6 +656,7 @@ class Demodulate(_Preprocess): """ name = "demodulate" + #@cprofile(name) def process(self, aman, proc_aman): hwp.demod_tod(aman, **self.process_cfgs["demod_cfgs"]) if self.process_cfgs.get("trim_samps"): @@ -673,7 +688,8 @@ class EstimateAzSS(_Preprocess): .. autofunction:: sotodlib.tod_ops.azss.get_azss """ name = "estimate_azss" - + + #@cprofile(name) def calc_and_save(self, aman, proc_aman): calc_aman, _ = tod_ops.azss.get_azss(aman, **self.calc_cfgs) self.save(proc_aman, calc_aman) @@ -709,6 +725,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def process(self, aman, proc_aman): tod_ops.gapfill.fill_glitches( aman, signal=aman[self.signal], @@ -725,6 +742,7 @@ class FlagTurnarounds(_Preprocess): """ name = 'flag_turnarounds' + #@cprofile(name) def calc_and_save(self, aman, proc_aman): if self.calc_cfgs is None: self.calc_cfgs = {} @@ -751,7 +769,8 @@ def save(self, proc_aman, turn_aman): return if self.save_cfgs: proc_aman.wrap("turnaround_flags", turn_aman) - + + #@cprofile(name) def process(self, aman, proc_aman): tod_ops.flags.get_turnaround_flags(aman, **self.process_cfgs) @@ -763,6 +782,7 @@ class SubPolyf(_Preprocess): """ name = 'sub_polyf' + #@cprofile(name) def process(self, aman, proc_aman): tod_ops.sub_polyf.subscan_polyfilter(aman, **self.process_cfgs) @@ -774,6 +794,7 @@ class SSOFootprint(_Preprocess): """ name = 'sso_footprint' + #@cprofile(name) def calc_and_save(self, aman, proc_aman): if self.calc_cfgs.get("source_list", None): ssos = self.calc_cfgs["source_list"] @@ -831,6 +852,7 @@ class DarkDets(_Preprocess): """ name = "dark_dets" + #@cprofile(name) def calc_and_save(self, aman, proc_aman): mskdarks = tod_ops.flags.get_dark_dets(aman, merge=False) @@ -875,7 +897,8 @@ class SourceFlags(_Preprocess): .. autofunction:: sotodlib.tod_ops.flags.get_source_flags """ name = "source_flags" - + + #@cprofile(name) def calc_and_save(self, aman, proc_aman): center_on = self.calc_cfgs.get('center_on', 'planet') # Get source from tags @@ -933,6 +956,7 @@ class HWPAngleModel(_Preprocess): """ name = "hwp_angle_model" + #@cprofile(name) def process(self, aman, proc_aman): if (not 'hwp_angle' in aman._fields) and ('hwp_angle' in proc_aman._fields): aman.wrap('hwp_angle', proc_aman['hwp_angle']['hwp_angle'], @@ -940,6 +964,7 @@ def process(self, aman, proc_aman): else: return + #@cprofile(name) def calc_and_save(self, aman, proc_aman): hwp_angle_model.apply_hwp_angle_model(aman, **self.calc_cfgs) hwp_angle_aman = core.AxisManager(aman.samps) @@ -989,6 +1014,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def process(self, aman, proc_aman): filt_function = self.process_cfgs.get( "filt_function", @@ -1045,6 +1071,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + #@cprofile(name) def calc_and_save(self, aman, proc_aman): bands = np.unique(aman.det_info.wafer.bandpass) bands = bands[bands != 'NC'] @@ -1089,6 +1116,7 @@ class PTPFlags(_Preprocess): """ name = "ptp_flags" + #@cprofile(name) def calc_and_save(self, aman, proc_aman): mskptps = tod_ops.flags.get_ptp_flags(aman, **self.calc_cfgs) @@ -1129,6 +1157,7 @@ class InvVarFlags(_Preprocess): """ name = "inv_var_flags" + #@cprofile(name) def calc_and_save(self, aman, proc_aman): mskptps = tod_ops.flags.get_inv_var_flags(aman, **self.calc_cfgs) @@ -1170,12 +1199,13 @@ class EstimateT2P(_Preprocess): trans_width: 0.1 save: True - .. autofunction:: sotodlib.tod_ops.t2pleakage.get_t2p_coeffs + .. autofunction:: sotodlib.tod_ops.t2pleakage.get_corr """ name = "estimate_t2p" + #@cprofile(name) def calc_and_save(self, aman, proc_aman): - t2p_aman = tod_ops.t2pleakage.get_t2p_coeffs(aman, **self.calc_cfgs) + t2p_aman = tod_ops.t2pleakage.get_corr(aman, **self.calc_cfgs) self.save(proc_aman, t2p_aman) def save(self, proc_aman, t2p_aman): @@ -1194,12 +1224,13 @@ class SubtractT2P(_Preprocess): Q_sig_name: 'demodQ' U_sig_name: 'demodU' - .. autofunction:: sotodlib.tod_ops.t2pleakage.subtract_t2p + .. autofunction:: sotodlib.tod_ops.t2pleakage.subtract_leakage """ name = "subtract_t2p" + #@cprofile(name) def process(self, aman, proc_aman): - tod_ops.t2pleakage.subtract_t2p(aman, proc_aman['t2p'], + tod_ops.t2pleakage.subtract_leakage(aman, proc_aman['t2p'], **self.process_cfgs) _Preprocess.register(SubtractT2P) diff --git a/sotodlib/tod_ops/fft_ops.py b/sotodlib/tod_ops/fft_ops.py index 1dc06eeaa..4f173e8e6 100644 --- a/sotodlib/tod_ops/fft_ops.py +++ b/sotodlib/tod_ops/fft_ops.py @@ -1,5 +1,6 @@ """FFTs and related operations """ +import sys import numdifftools as ndt import numpy as np import pyfftw @@ -151,6 +152,7 @@ def build_rfft_object(n_det, n, direction="FFTW_FORWARD", **kwargs): a = pyfftw.empty_aligned((n_det, n), dtype="float32") b = pyfftw.empty_aligned((n_det, (n + 2) // 2), dtype="complex64") + if direction == "FFTW_FORWARD": t_fun = pyfftw.FFTW(a, b, direction=direction, **fftargs) elif direction == "FFTW_BACKWARD": @@ -426,7 +428,8 @@ def fit_noise_model( pfit = np.polyfit(np.log10(f[f < lowf]), np.log10(p[f < lowf]), 1) fidx = np.argmin(np.abs(10 ** np.polyval(pfit, np.log10(f)) - wnest)) p0 = [f[fidx], wnest, -pfit[0]] - res = minimize(neglnlike, p0, args=(f, p), method="Nelder-Mead") + bounds = [(0, None), (sys.float_info.min, None), (None, None)] + res = minimize(neglnlike, p0, args=(f, p), bounds=bounds, method="Nelder-Mead") try: Hfun = ndt.Hessian(lambda params: neglnlike(params, f, p), full_output=True) hessian_ndt, _ = Hfun(res["x"]) diff --git a/sotodlib/tod_ops/flags.py b/sotodlib/tod_ops/flags.py index 3cac9eeec..992eafde9 100644 --- a/sotodlib/tod_ops/flags.py +++ b/sotodlib/tod_ops/flags.py @@ -16,7 +16,7 @@ from . import fourier_filter def get_det_bias_flags(aman, detcal=None, rfrac_range=(0.1, 0.7), - psat_range=(0, 15), rn_range=None, si_nan=False, + psat_range=None, rn_range=None, si_nan=False, merge=True, overwrite=True, name='det_bias_flags', full_output=False): """ @@ -73,15 +73,17 @@ def get_det_bias_flags(aman, detcal=None, rfrac_range=(0.1, 0.7), ranges = [detcal.bg >= 0, detcal.r_tes > 0, detcal.r_frac >= rfrac_range[0], - detcal.r_frac <= rfrac_range[1], - detcal.p_sat*1e12 >= psat_range[0], - detcal.p_sat*1e12 <= psat_range[1]] + detcal.r_frac <= rfrac_range[1] + ] + if psat_range is not None: + ranges.append(detcal.p_sat*1e12 >= psat_range[0]) + ranges.append(detcal.p_sat*1e12 <= psat_range[1]) if rn_range is not None: ranges.append(detcal.r_n >= rn_range[0]) ranges.append(detcal.r_n <= rn_range[1]) if si_nan: ranges.append(np.isnan(detcal.s_i) == False) - + msk = ~(np.all(ranges, axis=0)) # Expand mask to ndets x nsamps RangesMatrix if 'samps' in aman: @@ -108,16 +110,22 @@ def get_det_bias_flags(aman, detcal=None, rfrac_range=(0.1, 0.7), ranges = [detcal.bg >= 0, detcal.r_tes > 0, detcal.r_frac >= rfrac_range[0], - detcal.r_frac <= rfrac_range[1], - detcal.p_sat*1e12 >= psat_range[0], - detcal.p_sat*1e12 <= psat_range[1]] + detcal.r_frac <= rfrac_range[1] + ] + if psat_range is not None: + ranges.append(detcal.p_sat*1e12 >= psat_range[0]) + ranges.append(detcal.p_sat*1e12 <= psat_range[1]) + for range in ranges: msk = ~(np.all([range], axis=0)) msks.append(RangesMatrix([Ranges.ones_like(x) if Y else Ranges.zeros_like(x) for Y in msk])) - msk_names = ['bg', 'r_tes', 'r_frac_gt', 'r_frac_lt', 'p_sat_gt', 'p_sat_lt'] - + msk_names = ['bg', 'r_tes', 'r_frac_gt', 'r_frac_lt'] + + if psat_range is not None: + msk_names.extend(['p_sat_gt', 'p_sat_lt']) + for i, msk in enumerate(msks): if 'samps' in aman: msk_aman.wrap(f'{msk_names[i]}_flags', msk, [(0, 'dets'), (1, 'samps')]) @@ -401,7 +409,7 @@ def get_glitch_flags(aman, def get_trending_flags(aman, max_trend=1.2, - n_pieces=1, + t_piece=500, max_samples=500, signal=None, timestamps=None, @@ -420,8 +428,8 @@ def get_trending_flags(aman, The tod max_trend : float Slope at which detectors are unlocked. The default is for use with phase units. - n_pieces : int - Number of pieces to cut the timestream in to to look for trends. + t_piece : float + Duration in seconds of each pieces to cut the timestream in to to look for trends max_samples : int Maximum samples to compute the slope with. signal : array @@ -460,6 +468,13 @@ def get_trending_flags(aman, slopes = np.zeros((len(signal), 0)) cut = np.zeros((len(signal), 0), dtype=bool) samp_edges = [0] + + # Get sampling rate + fs = 1 / np.nanmedian(np.diff(timestamps)) + n_samples_per_piece = int(t_piece * fs) + # How many pieces can timestamps be divided into + n_pieces = len(timestamps) // n_samples_per_piece + for t, s in zip( np.array_split(timestamps, n_pieces), np.array_split(signal, n_pieces, 1) ): diff --git a/sotodlib/tod_ops/t2pleakage.py b/sotodlib/tod_ops/t2pleakage.py index 71377a3af..af21ad21d 100644 --- a/sotodlib/tod_ops/t2pleakage.py +++ b/sotodlib/tod_ops/t2pleakage.py @@ -5,6 +5,7 @@ from sotodlib.tod_ops import filters, apodize from sotodlib.tod_ops.fft_ops import calc_psd, calc_wn from scipy.odr import ODR, Model, RealData +from lmfit import Model as LmfitModel def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', wn_demod=None, @@ -158,3 +159,120 @@ def subtract_t2p(aman, t2p_aman, T_signal=None): aman.demodQ -= np.multiply(T_signal.T, t2p_aman.coeffsQ).T aman.demodU -= np.multiply(T_signal.T, t2p_aman.coeffsU).T +def leakage_model(dT, AQ, AU, lamQ, lamU): + return AQ + lamQ * dT + 1.j * (AU + lamU * dT) + +def get_corr(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', + mask=None, ds_factor=100, subtract_sig=False, merge_stats=True, + t2p_stats_name='t2p_stats'): + """ + Compute the leakage coefficients from temperature (T) to polarization (Q and U). + Optionally subtract this leakage and return axismanager of coefficients. + + Parameters + ---------- + aman : AxisManager + AxisManager object containing the TOD data. + T_sig_name : str + Name of the temperature signal in `aman`. Default is 'dsT'. + Q_sig_name : str + Name of the Q polarization signal in `aman`. Default is 'demodQ'. + U_sig_name : str + Name of the U polarization signal in `aman`. Default is 'demodU'. + subtract_sig : bool + Whether to subtract the calculated leakage from the polarization signals. Default is False. + merge_stats : bool + Whether to merge the calculated statistics back into `aman`. Default is True. + t2p_stats_name : str + Name under which to wrap the output AxisManager containing statistics. Default is 't2p_stats'. + + Returns + ------- + out_aman : AxisManager + An AxisManager containing leakage coefficients. + """ + + if mask is None: + mask = np.ones_like(aman.dsT, dtype='bool') + + A_Q_array = [] + A_U_array = [] + A_P_array = [] + lambda_Q_array = [] + lambda_U_array = [] + lambda_P_array = [] + + for di, det in enumerate(aman.dets.vals): + x = aman[T_sig_name][di][mask[di]][::ds_factor] + yQ = aman[Q_sig_name][di][mask[di]][::ds_factor] + yU = aman[U_sig_name][di][mask[di]][::ds_factor] + + model = LmfitModel(leakage_model, independent_vars=['dT']) + params = model.make_params(AQ=np.median(yQ), AU=np.median(yU), + lamQ=0., lamU=0.) + result = model.fit(yQ + 1j * yU, params, dT=x) + A_Q = result.params['AQ'].value + A_U = result.params['AU'].value + A_P = np.sqrt(A_Q**2 + A_U**2) + lambda_Q = result.params['lamQ'].value + lambda_U = result.params['lamU'].value + lambda_P = np.sqrt(lambda_Q**2 + lambda_U**2) + + A_Q_array.append(A_Q) + A_U_array.append(A_U) + A_P_array.append(A_P) + lambda_Q_array.append(lambda_Q) + lambda_U_array.append(lambda_U) + lambda_P_array.append(lambda_P) + + A_Q_array = np.array(A_Q_array) + A_U_array = np.array(A_U_array) + A_P_array = np.array(A_P_array) + + lambda_Q_array = np.array(lambda_Q_array) + lambda_U_array = np.array(lambda_U_array) + lambda_P_array = np.array(lambda_P_array) + + out_aman = core.AxisManager(aman.dets, aman.samps) + out_aman.wrap('AQ', A_Q_array, [(0, 'dets')]) + out_aman.wrap('AU', A_U_array, [(0, 'dets')]) + + out_aman.wrap('lamQ', lambda_Q_array, [(0, 'dets')]) + out_aman.wrap('lamU', lambda_U_array, [(0, 'dets')]) + + if subtract_sig: + subtract_t2p(aman, out_aman) + if merge_stats: + aman.wrap(t2p_stats_name, out_aman) + + return out_aman + +def subtract_leakage(aman, t2p_aman, T_signal=None, mask=None, ds_factor=100, nperseg=600*200): + """ + Subtract T to P leakage. + + Parameters + ---------- + aman : AxisManager + The tod. + t2p_aman : AxisManager + Axis manager with Q and U leakage coeffients. + Q coeffs are in fields ``lamQ`` and ``AQ`` and U coeffs are in fields + ``lamU`` and ``AU``. + T_signal : array + Temperature signal to scale and subtract from Q/U. + Default is ``aman['dsT']``. + + """ + + if T_signal is None: + T_signal = aman['dsT'] + + aman.demodQ -= (T_signal * t2p_aman.lamQ[:, np.newaxis] + t2p_aman.AQ[:, np.newaxis]) + aman.demodU -= (T_signal * t2p_aman.lamU[:, np.newaxis] + t2p_aman.AU[:, np.newaxis]) + + '''freq, Pxx_demodQ_new = calc_psd(aman, signal=aman.demodQ, nperseg=nperseg, merge=False) + freq, Pxx_demodU_new = calc_psd(aman, signal=aman.demodU, nperseg=nperseg, merge=False) + aman.Pxx_demodQ = Pxx_demodQ_new + aman.Pxx_demodU = Pxx_demodU_new + ''' \ No newline at end of file From abb4c9360d0a183f93bed45573a1482cc246f6ae Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Mon, 30 Sep 2024 14:13:14 -0700 Subject: [PATCH 08/39] remove changes added after catchup job finished --- sotodlib/preprocess/pcore.py | 14 -------------- sotodlib/tod_ops/t2pleakage.py | 4 ++-- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/sotodlib/preprocess/pcore.py b/sotodlib/preprocess/pcore.py index 5957fea71..58168e715 100644 --- a/sotodlib/preprocess/pcore.py +++ b/sotodlib/preprocess/pcore.py @@ -282,20 +282,6 @@ def _expand(new, full, wrap_valid=True): nidx.append(slice(None)) oidx = tuple(oidx) nidx = tuple(nidx) - print('k,v',k,v) - try: - print(len(v)) - print('new_dets',new.dets.count) - print('new_samps',new.samps.count) - except: - print('none') - print('full_dets',full.dets.count) - print('full_samps',full.samps.count) - print('out', out) - print('oidx', oidx) - print('nidx', nidx) - - if isinstance(out[k], RangesMatrix): assert new._assignments[k][-1] == 'samps' out[k] = _ranges_matrix_match( out[k], v, oidx, nidx) diff --git a/sotodlib/tod_ops/t2pleakage.py b/sotodlib/tod_ops/t2pleakage.py index af21ad21d..742323f81 100644 --- a/sotodlib/tod_ops/t2pleakage.py +++ b/sotodlib/tod_ops/t2pleakage.py @@ -271,8 +271,8 @@ def subtract_leakage(aman, t2p_aman, T_signal=None, mask=None, ds_factor=100, np aman.demodQ -= (T_signal * t2p_aman.lamQ[:, np.newaxis] + t2p_aman.AQ[:, np.newaxis]) aman.demodU -= (T_signal * t2p_aman.lamU[:, np.newaxis] + t2p_aman.AU[:, np.newaxis]) - '''freq, Pxx_demodQ_new = calc_psd(aman, signal=aman.demodQ, nperseg=nperseg, merge=False) + freq, Pxx_demodQ_new = calc_psd(aman, signal=aman.demodQ, nperseg=nperseg, merge=False) freq, Pxx_demodU_new = calc_psd(aman, signal=aman.demodU, nperseg=nperseg, merge=False) aman.Pxx_demodQ = Pxx_demodQ_new aman.Pxx_demodU = Pxx_demodU_new - ''' \ No newline at end of file + \ No newline at end of file From b0100e040734d5385e420e1b9497c0bde7d9880a Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Tue, 1 Oct 2024 10:18:13 -0700 Subject: [PATCH 09/39] add update to site_pipeline --- sotodlib/site_pipeline/preprocess_tod.py | 63 +++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/preprocess_tod.py b/sotodlib/site_pipeline/preprocess_tod.py index 92c21d9e7..e0b468ad3 100644 --- a/sotodlib/site_pipeline/preprocess_tod.py +++ b/sotodlib/site_pipeline/preprocess_tod.py @@ -16,8 +16,31 @@ import sotodlib.site_pipeline.util as sp_util from sotodlib.preprocess import _Preprocess, Pipeline, processes +import cProfile +import mtprof +import pstats +import io + logger = sp_util.init_logger("preprocess") +def profile_function(func, profile_path, *args, **kwargs): + + local_vars = None + + def wrapper_func(): + nonlocal local_vars # Ensure `result` from the outer scope is modified + # Store the function result in a local variable accessible through locals() + local_vars = func(*args, **kwargs) + + # Ensure profile_path is a valid file path + try: + cProfile.runctx('wrapper_func()', globals(), locals(), filename=profile_path) + logger.info(f"Profile saved to {profile_path}") + except Exception as e: + logger.error(f"Error during profiling: {e}") + + return local_vars + def dummy_preproc(obs_id, group_list, logger, configs, overwrite, run_parallel): """ @@ -692,13 +715,43 @@ def main( dest_file = basename + '_' + str(nfile).zfill(3) + '.h5' logger.info(f'Starting dest_file set to {dest_file}') + + """Run write_block obs-ids in parallel at once and write all to the sqlite db.""" + profile_files = [] # Run write_block obs-ids in parallel at once then write all to the sqlite db. - with ProcessPoolExecutor(nproc) as exe: + '''with ProcessPoolExecutor(nproc) as exe: futures = [exe.submit(preprocess_tod, obs_id=r[0]['obs_id'], group_list=r[1], verbosity=verbosity, configs=swap_archive(configs, f'temp/{r[0]["obs_id"]}.h5'), overwrite=overwrite, run_parallel=True) for r in run_list] + ''' + + output_dir = os.path.dirname(configs['archive']['index']) + logger.info(f'using profiling directory {output_dir}') + + profile_files = [] + + with ProcessPoolExecutor(nproc) as exe: + futures = [] + for r in run_list: + # Generate a profile file in the specified directory + profile_path = os.path.join(output_dir, f'{r[0]["obs_id"]}.prof') + profile_files.append(profile_path) + + future = exe.submit( + profile_function, + preprocess_tod, + profile_path, + obs_id=r[0]['obs_id'], + group_list=r[1], + verbosity=verbosity, + configs=swap_archive(configs, f'temp/{r[0]["obs_id"]}.h5'), + overwrite=overwrite, + run_parallel=True + ) + futures.append(future) + for future in as_completed(futures): logger.info('New future as_completed result') try: @@ -744,6 +797,14 @@ def main( f = open(errlog, 'a') f.write(f'\n{time.time()}, {err}, {db_datasets[0]}\n{db_datasets[1]}\n') f.close() + # Combine the profile results into a single profile + combined_profile_path = os.path.join(output_dir, 'combined_profile.prof') + combined_stats = pstats.Stats(profile_files[0]) + for profile_file in profile_files[1:]: + combined_stats.add(profile_file) + + # Save the combined stats to a file + combined_stats.dump_stats(combined_profile_path) if __name__ == '__main__': sp_util.main_launcher(main, get_parser) From 5d120a3194af1ab6f19867c709c294bb3bd7b217 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 1 Oct 2024 14:31:28 -0700 Subject: [PATCH 10/39] Restoring the MPI communicator capabilities --- sotodlib/mapmaking/demod_mapmaker.py | 36 +++++++++++++++++++--------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index d851c290c..d0c36f58c 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -7,7 +7,7 @@ """ __all__ = ['DemodMapmaker','DemodSignal','DemodSignalMap','make_demod_map'] import numpy as np -from pixell import enmap, utils, tilemap, bunch +from pixell import enmap, utils, tilemap, bunch, mpi import so3g.proj from .. import coords @@ -44,7 +44,7 @@ def __init__(self, signals=[], noise_model=None, dtype=np.float32, verbose=False rather than from obs.dsT, obs.demodQ, obs.demodU Example usage :: - signal_map = mapmaking.DemodSignalMap(shape, wcs) + signal_map = mapmaking.DemodSignalMap(shape, wcs, comm) signals = [signal_map] mapmaker = mapmaking.DemodMapmaker(signals, noise_model=noise_model) @@ -141,7 +141,7 @@ def from_work(self, x): return x def write (self, prefix, tag, x): pass class DemodSignalMap(DemodSignal): - def __init__(self, shape, wcs, comps="TQU", name="sky", ofmt="{name}", output=True, + def __init__(self, shape, wcs, comm, comps="TQU", name="sky", ofmt="{name}", output=True, ext="fits", dtype=np.float32, sys=None, recenter=None, tile_shape=(500,500), tiled=False, Nsplits=1, singlestream=False): """ Signal describing a non-distributed sky map. Signal describing a sky map in the coordinate @@ -154,6 +154,8 @@ def __init__(self, shape, wcs, comps="TQU", name="sky", ofmt="{name}", output=Tr Shape of the output map geometry wcs : wcs WCS of the output map geometry + comm : MPI.comm + MPI communicator comps : str, optional Components to map name : str, optional @@ -185,6 +187,7 @@ def __init__(self, shape, wcs, comps="TQU", name="sky", ofmt="{name}", output=Tr """ DemodSignal.__init__(self, name, ofmt, output, ext) + self.comm = comm self.comps = comps self.sys = sys self.recenter = recenter @@ -287,6 +290,14 @@ def prepare(self): if self.ready: return if self.tiled: self.geo_work = self.rhs.geometry + self.rhs = tilemap.redistribute(self.rhs, self.comm) + self.div = tilemap.redistribute(self.div, self.comm) + self.hits = tilemap.redistribute(self.hits,self.comm) + else: + if self.comm is not None: + self.rhs = utils.allreduce(self.rhs, self.comm) + self.div = utils.allreduce(self.div, self.comm) + self.hits = utils.allreduce(self.hits,self.comm) self.ready = True @property @@ -297,13 +308,14 @@ def write(self, prefix, tag, m): oname = self.ofmt.format(name=self.name) oname = "%s%s_%s.%s" % (prefix, oname, tag, self.ext) if self.tiled: - tilemap.write_map(oname, m) + tilemap.write_map(oname, m, self.comm) else: - enmap.write_map(oname, m) + if self.comm is None or self.comm.rank == 0: + enmap.write_map(oname, m) return oname def make_demod_map(context, obslist, shape, wcs, noise_model, L, - preprocess_config, comps="TQU", t0=0, + preprocess_config, comm=mpi.COMM_WORLD, comps="TQU", t0=0, dtype_tod=np.float32, dtype_map=np.float32, tag="", verbose=0, split_labels=None, site='so_sat3', recenter=None, outs=None, singlestream=False): @@ -360,12 +372,12 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, """ pre = "" if tag is None else tag + " " - L.info(pre + "Initializing equation system") + if comm.rank == 0: L.info(pre + "Initializing equation system") # Set up our mapmaking equation if split_labels==None: # this is the case where we did not request any splits at all - signal_map = DemodSignalMap(shape, wcs, comps=comps, + signal_map = DemodSignalMap(shape, wcs, comm, comps=comps, dtype=dtype_map, tiled=False, ofmt="", singlestream=singlestream, recenter=recenter ) @@ -374,7 +386,7 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, # We count how many split we'll make, we need to define the Nsplits # maps inside the DemodSignalMap Nsplits = len(split_labels) - signal_map = DemodSignalMap(shape, wcs, comps=comps, + signal_map = DemodSignalMap(shape, wcs, comm, comps=comps, dtype=dtype_map, tiled=False, ofmt="", Nsplits=Nsplits, singlestream=singlestream, @@ -384,7 +396,8 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, dtype=dtype_tod, verbose=verbose>0, singlestream=singlestream) - L.info(pre + "Building RHS") + + if comm.rank == 0: L.info(pre + "Building RHS") # And feed it with our observations nobs_kept = 0 for oi in range(len(obslist)): @@ -413,13 +426,14 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, mapmaker.add_obs(name, obs, split_labels=split_labels) L.info('Done with tod %s:%s:%s'%(obs_id,detset,band)) nobs_kept += 1 + nobs_kept = comm.allreduce(nobs_kept) # if we skip all the obs if nobs_kept == 0: return None for signal in signals: signal.prepare() - L.info(pre + "Writing F+B outputs") + if comm.rank == 0: L.info(pre + "Writing F+B outputs") wmap = [] weights = [] for n_split in range(signal_map.Nsplits): From 4f15b0887f088f5014524feb716a7a6daf9ff4b7 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 2 Oct 2024 12:34:29 -0700 Subject: [PATCH 11/39] remove profiling calls --- sotodlib/preprocess/processes.py | 43 +++------------- sotodlib/site_pipeline/preprocess_tod.py | 63 +----------------------- 2 files changed, 7 insertions(+), 99 deletions(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index c392ab279..8f437fe5b 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -14,8 +14,6 @@ from .pcore import _Preprocess, _FracFlaggedMixIn from .. import flag_utils -from ..utils.profile import cprofile - class FFTTrim(_Preprocess): """Trim the AxisManager to optimize for faster FFTs later in the pipeline. All processing configs go to `fft_trim` @@ -23,7 +21,6 @@ class FFTTrim(_Preprocess): .. autofunction:: sotodlib.tod_ops.fft_trim """ name = "fft_trim" - #@cprofile(name) def process(self, aman, proc_aman): tod_ops.fft_trim(aman, **self.process_cfgs) @@ -39,7 +36,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): tod_ops.detrend_tod(aman, signal_name=self.signal, **self.process_cfgs) @@ -54,7 +50,6 @@ class DetBiasFlags(_FracFlaggedMixIn, _Preprocess): name = "det_bias_flags" _influx_field = "det_bias_flags_frac" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): dbc_aman = tod_ops.flags.get_det_bias_flags(aman, merge=False, full_output=True, **self.calc_cfgs) @@ -117,7 +112,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def calc_and_save(self, aman, proc_aman): _, trend_aman = tod_ops.flags.get_trending_flags( aman, merge=False, full_output=True, @@ -175,7 +169,6 @@ class GlitchDetection(_FracFlaggedMixIn, _Preprocess): name = "glitches" _influx_field = "glitch_flags_frac" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): _, glitch_aman = tod_ops.flags.get_glitch_flags(aman, merge=False, full_output=True, **self.calc_cfgs @@ -225,7 +218,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): field = self.process_cfgs['jumps_aman'] aman[self.signal] = tod_ops.jumps.jumpfix_subtract_heights( @@ -264,7 +256,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def calc_and_save(self, aman, proc_aman): function = self.calc_cfgs.get("function", "find_jumps") cfgs = self.calc_cfgs.get('jump_configs', {}) @@ -329,7 +320,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): freqs, Pxx = tod_ops.fft_ops.calc_psd(aman, signal=aman[self.signal], **self.process_cfgs) @@ -341,7 +331,6 @@ def process(self, aman, proc_aman): fft_aman.wrap("Pxx", Pxx, [(0,"dets"), (1,"nusamps")]) aman.wrap(self.wrap, fft_aman) - #@cprofile(name) def calc_and_save(self, aman, proc_aman): self.save(proc_aman, aman[self.wrap]) @@ -380,12 +369,11 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def calc_and_save(self, aman, proc_aman): if self.psd not in aman: raise ValueError("PSD is not saved in AxisManager") psd = aman[self.psd] - + if self.calc_cfgs is None: self.calc_cfgs = {} @@ -468,7 +456,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): if self.process_cfgs["kind"] == "single_value": if self.process_cfgs.get("divide", False): @@ -509,7 +496,6 @@ class EstimateHWPSS(_Preprocess): _influx_field = "hwpss_coeffs" _influx_percentiles = [0, 50, 75, 90, 95, 100] - #@cprofile(name) def calc_and_save(self, aman, proc_aman): hwpss_stats = hwp.get_hwpss(aman, **self.calc_cfgs) self.save(proc_aman, hwpss_stats) @@ -624,7 +610,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): if not(proc_aman[self.hwpss_stats] is None): modes = [int(m[1:]) for m in proc_aman[self.hwpss_stats].modes.vals[::2]] @@ -645,7 +630,6 @@ class Apodize(_Preprocess): """ name = "apodize" - #@cprofile(name) def process(self, aman, proc_aman): tod_ops.apodize.apodize_cosine(aman, **self.process_cfgs) @@ -656,7 +640,6 @@ class Demodulate(_Preprocess): """ name = "demodulate" - #@cprofile(name) def process(self, aman, proc_aman): hwp.demod_tod(aman, **self.process_cfgs["demod_cfgs"]) if self.process_cfgs.get("trim_samps"): @@ -689,7 +672,6 @@ class EstimateAzSS(_Preprocess): """ name = "estimate_azss" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): calc_aman, _ = tod_ops.azss.get_azss(aman, **self.calc_cfgs) self.save(proc_aman, calc_aman) @@ -713,6 +695,8 @@ class GlitchFill(_Preprocess): nbuf: 10 use_pca: False modes: 1 + in_place: True + wrap_name: None .. autofunction:: sotodlib.tod_ops.gapfill.fill_glitches """ @@ -725,7 +709,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): tod_ops.gapfill.fill_glitches( aman, signal=aman[self.signal], @@ -742,7 +725,6 @@ class FlagTurnarounds(_Preprocess): """ name = 'flag_turnarounds' - #@cprofile(name) def calc_and_save(self, aman, proc_aman): if self.calc_cfgs is None: self.calc_cfgs = {} @@ -770,7 +752,6 @@ def save(self, proc_aman, turn_aman): if self.save_cfgs: proc_aman.wrap("turnaround_flags", turn_aman) - #@cprofile(name) def process(self, aman, proc_aman): tod_ops.flags.get_turnaround_flags(aman, **self.process_cfgs) @@ -782,7 +763,6 @@ class SubPolyf(_Preprocess): """ name = 'sub_polyf' - #@cprofile(name) def process(self, aman, proc_aman): tod_ops.sub_polyf.subscan_polyfilter(aman, **self.process_cfgs) @@ -794,7 +774,6 @@ class SSOFootprint(_Preprocess): """ name = 'sso_footprint' - #@cprofile(name) def calc_and_save(self, aman, proc_aman): if self.calc_cfgs.get("source_list", None): ssos = self.calc_cfgs["source_list"] @@ -852,7 +831,6 @@ class DarkDets(_Preprocess): """ name = "dark_dets" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): mskdarks = tod_ops.flags.get_dark_dets(aman, merge=False) @@ -898,7 +876,6 @@ class SourceFlags(_Preprocess): """ name = "source_flags" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): center_on = self.calc_cfgs.get('center_on', 'planet') # Get source from tags @@ -956,7 +933,6 @@ class HWPAngleModel(_Preprocess): """ name = "hwp_angle_model" - #@cprofile(name) def process(self, aman, proc_aman): if (not 'hwp_angle' in aman._fields) and ('hwp_angle' in proc_aman._fields): aman.wrap('hwp_angle', proc_aman['hwp_angle']['hwp_angle'], @@ -964,7 +940,6 @@ def process(self, aman, proc_aman): else: return - #@cprofile(name) def calc_and_save(self, aman, proc_aman): hwp_angle_model.apply_hwp_angle_model(aman, **self.calc_cfgs) hwp_angle_aman = core.AxisManager(aman.samps) @@ -1014,7 +989,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def process(self, aman, proc_aman): filt_function = self.process_cfgs.get( "filt_function", @@ -1071,7 +1045,6 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) - #@cprofile(name) def calc_and_save(self, aman, proc_aman): bands = np.unique(aman.det_info.wafer.bandpass) bands = bands[bands != 'NC'] @@ -1116,7 +1089,6 @@ class PTPFlags(_Preprocess): """ name = "ptp_flags" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): mskptps = tod_ops.flags.get_ptp_flags(aman, **self.calc_cfgs) @@ -1157,7 +1129,6 @@ class InvVarFlags(_Preprocess): """ name = "inv_var_flags" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): mskptps = tod_ops.flags.get_inv_var_flags(aman, **self.calc_cfgs) @@ -1199,13 +1170,12 @@ class EstimateT2P(_Preprocess): trans_width: 0.1 save: True - .. autofunction:: sotodlib.tod_ops.t2pleakage.get_corr + .. autofunction:: sotodlib.tod_ops.t2pleakage.get_t2p_coeffs """ name = "estimate_t2p" - #@cprofile(name) def calc_and_save(self, aman, proc_aman): - t2p_aman = tod_ops.t2pleakage.get_corr(aman, **self.calc_cfgs) + t2p_aman = tod_ops.t2pleakage.get_t2p_coeffs(aman, **self.calc_cfgs) self.save(proc_aman, t2p_aman) def save(self, proc_aman, t2p_aman): @@ -1228,9 +1198,8 @@ class SubtractT2P(_Preprocess): """ name = "subtract_t2p" - #@cprofile(name) def process(self, aman, proc_aman): - tod_ops.t2pleakage.subtract_leakage(aman, proc_aman['t2p'], + tod_ops.t2pleakage.subtract_t2p(aman, proc_aman['t2p'], **self.process_cfgs) _Preprocess.register(SubtractT2P) diff --git a/sotodlib/site_pipeline/preprocess_tod.py b/sotodlib/site_pipeline/preprocess_tod.py index e0b468ad3..92c21d9e7 100644 --- a/sotodlib/site_pipeline/preprocess_tod.py +++ b/sotodlib/site_pipeline/preprocess_tod.py @@ -16,31 +16,8 @@ import sotodlib.site_pipeline.util as sp_util from sotodlib.preprocess import _Preprocess, Pipeline, processes -import cProfile -import mtprof -import pstats -import io - logger = sp_util.init_logger("preprocess") -def profile_function(func, profile_path, *args, **kwargs): - - local_vars = None - - def wrapper_func(): - nonlocal local_vars # Ensure `result` from the outer scope is modified - # Store the function result in a local variable accessible through locals() - local_vars = func(*args, **kwargs) - - # Ensure profile_path is a valid file path - try: - cProfile.runctx('wrapper_func()', globals(), locals(), filename=profile_path) - logger.info(f"Profile saved to {profile_path}") - except Exception as e: - logger.error(f"Error during profiling: {e}") - - return local_vars - def dummy_preproc(obs_id, group_list, logger, configs, overwrite, run_parallel): """ @@ -715,43 +692,13 @@ def main( dest_file = basename + '_' + str(nfile).zfill(3) + '.h5' logger.info(f'Starting dest_file set to {dest_file}') - - """Run write_block obs-ids in parallel at once and write all to the sqlite db.""" - profile_files = [] # Run write_block obs-ids in parallel at once then write all to the sqlite db. - '''with ProcessPoolExecutor(nproc) as exe: + with ProcessPoolExecutor(nproc) as exe: futures = [exe.submit(preprocess_tod, obs_id=r[0]['obs_id'], group_list=r[1], verbosity=verbosity, configs=swap_archive(configs, f'temp/{r[0]["obs_id"]}.h5'), overwrite=overwrite, run_parallel=True) for r in run_list] - ''' - - output_dir = os.path.dirname(configs['archive']['index']) - logger.info(f'using profiling directory {output_dir}') - - profile_files = [] - - with ProcessPoolExecutor(nproc) as exe: - futures = [] - for r in run_list: - # Generate a profile file in the specified directory - profile_path = os.path.join(output_dir, f'{r[0]["obs_id"]}.prof') - profile_files.append(profile_path) - - future = exe.submit( - profile_function, - preprocess_tod, - profile_path, - obs_id=r[0]['obs_id'], - group_list=r[1], - verbosity=verbosity, - configs=swap_archive(configs, f'temp/{r[0]["obs_id"]}.h5'), - overwrite=overwrite, - run_parallel=True - ) - futures.append(future) - for future in as_completed(futures): logger.info('New future as_completed result') try: @@ -797,14 +744,6 @@ def main( f = open(errlog, 'a') f.write(f'\n{time.time()}, {err}, {db_datasets[0]}\n{db_datasets[1]}\n') f.close() - # Combine the profile results into a single profile - combined_profile_path = os.path.join(output_dir, 'combined_profile.prof') - combined_stats = pstats.Stats(profile_files[0]) - for profile_file in profile_files[1:]: - combined_stats.add(profile_file) - - # Save the combined stats to a file - combined_stats.dump_stats(combined_profile_path) if __name__ == '__main__': sp_util.main_launcher(main, get_parser) From d2de9406fcc45f7cc6c15b3bc20897caffe508fa Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 2 Oct 2024 12:35:58 -0700 Subject: [PATCH 12/39] adjust fill_glitches to address #740 --- sotodlib/tod_ops/gapfill.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/sotodlib/tod_ops/gapfill.py b/sotodlib/tod_ops/gapfill.py index ce863eb9f..f50053017 100644 --- a/sotodlib/tod_ops/gapfill.py +++ b/sotodlib/tod_ops/gapfill.py @@ -385,7 +385,7 @@ def get_contaminated_ranges(good_flags, bad_flags): def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, - glitch_flags=None, wrap=True): + glitch_flags=None, in_place=True, wrap_name=None): """ This function fills pre-computed glitches provided by the caller in time-ordered data using either a polynomial (default) or PCA-based @@ -418,9 +418,15 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, """ # Process Args if signal is None: - sig = np.copy(aman.signal) + if in_place: + sig = aman.signal + else: + sig = np.copy(aman.signal) else: - sig = np.copy(signal) + if in_place: + sig = signal + else: + sig = np.copy(signal) if glitch_flags is None: glitch_flags = aman.flags.glitches @@ -438,9 +444,9 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, 'detectors') modes = aman.dets.count # fill with poly fill before PCA - gaps = get_gap_fill(aman, nbuf=nbuf, flags=glitch_flags, - signal=np.float32(sig)) - sig = gaps.swap(aman, signal=sig) + #gaps = get_gap_fill(aman, nbuf=nbuf, flags=glitch_flags, + # signal=np.float32(sig)) + #sig = gaps.swap(aman, signal=sig) # PCA fill mod = pca.get_pca_model(tod=aman, n_modes=modes, signal=sig) @@ -448,15 +454,9 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, sig = gfill.swap(aman, signal=sig) # Wrap and Return - if isinstance(wrap, str): - if wrap in aman._assignments: - aman.move(wrap, None) - aman.wrap(wrap, sig, [(0, 'dets'), (1, 'samps')]) - return sig - elif wrap: - if 'gap_filled' in aman._assignments: - aman.move('gap_filled', None) - aman.wrap('gap_filled', sig, [(0, 'dets'), (1, 'samps')]) - return sig - else: - return sig + if wrap_name is not None: + if wrap_name in aman._assignments: + aman.move(wrap_name, None) + aman.wrap(wrap_name, sig, [(0, 'dets'), (1, 'samps')]) + + return sig From c8bbd939ba07782452ab0ccef750e07c8c214966 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 2 Oct 2024 12:37:10 -0700 Subject: [PATCH 13/39] add error message for empty iir_params to partially address #969 --- sotodlib/tod_ops/filters.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sotodlib/tod_ops/filters.py b/sotodlib/tod_ops/filters.py index 7b66ef33a..a6fb5c613 100644 --- a/sotodlib/tod_ops/filters.py +++ b/sotodlib/tod_ops/filters.py @@ -510,6 +510,9 @@ def iir_filter(freqs, tod, b=None, a=None, fscale=1., iir_params=None, sub_iir_params['fscale'] != _fscale,])): raise ValueError('iir parameters are not uniform.') iir_params = sub_iir_params + # check if iir_params from axis manager are None + if iir_params['a'] is None or iir_params['b'] is None: + raise ValueError('axis manager iir parameters are empty') try: a = iir_params['a'] b = iir_params['b'] From 6a45b758cc0d91acf5bbaf8472289ef6e6c8b6d0 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 2 Oct 2024 12:37:58 -0700 Subject: [PATCH 14/39] adjustment to new t2p leakage model handling --- sotodlib/tod_ops/t2pleakage.py | 221 +++++---------------------------- 1 file changed, 29 insertions(+), 192 deletions(-) diff --git a/sotodlib/tod_ops/t2pleakage.py b/sotodlib/tod_ops/t2pleakage.py index 742323f81..32f9f810a 100644 --- a/sotodlib/tod_ops/t2pleakage.py +++ b/sotodlib/tod_ops/t2pleakage.py @@ -4,165 +4,12 @@ from sotodlib import core from sotodlib.tod_ops import filters, apodize from sotodlib.tod_ops.fft_ops import calc_psd, calc_wn -from scipy.odr import ODR, Model, RealData -from lmfit import Model as LmfitModel - -def get_t2p_coeffs(aman, - T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', wn_demod=None, - f_lpf_cutoff=2.0, flag_name=None, - subtract_sig=False, merge_stats=True, t2p_stats_name='t2p_stats'): - """ - Apply a lowpass filter to the temperature and polarization signals, apodize them, - and compute the leakage coefficients from temperature (T) to polarization (Q and U). - Optionally subtract this leakage and return axismanager of coefficients with their - statistical uncertainties and reduced chi-squared values for the fit. - - Parameters - ---------- - aman : AxisManager - AxisManager object containing the TOD data. - T_sig_name : str - Name of the temperature signal in `aman`. Default is 'dsT'. - Q_sig_name : str - Name of the Q polarization signal in `aman`. Default is 'demodQ'. - U_sig_name : str - Name of the U polarization signal in `aman`. Default is 'demodU'. - wn_demod : float or None - Precomputed white noise level for demodulated signals. If None, it will be calculated. - f_lpf_cutoff: float - Cutoff frequency of low pass filter in demodulation. Used for error bar estimation by - combination with wn_demod. Default is 2.0. - flag_name : str or None - Name of the flag field in `aman` to use for masking data. If None, no masking is applied. - subtract_sig : bool - Whether to subtract the calculated leakage from the polarization signals. Default is False. - merge_stats : bool - Whether to merge the calculated statistics back into `aman`. Default is True. - t2p_stats_name : str - Name under which to wrap the output AxisManager containing statistics. Default is 't2p_stats'. - - Returns - ------- - out_aman : AxisManager - An AxisManager containing leakage coefficients, their errors, and reduced chi-squared statistics. - """ - # get white noise level of demod for error estimation - if wn_demod is None: - freqs, Pxx_demod = calc_psd(aman, signal=aman[Q_sig_name], merge=False) - wn_demod = calc_wn(aman, pxx=Pxx_demod, freqs=freqs, low_f=0.5, high_f=1.5) - - # integrate the white noise level over frequencies to get error bar of each point - sigma_demod = wn_demod * np.sqrt(f_lpf_cutoff) - sigma_T = sigma_demod/np.sqrt(2) - - # get downsampled data - ds_factor = int(np.mean(1/np.diff(aman.timestamps)) / (f_lpf_cutoff) ) - ds_slice = slice(None, None, ds_factor) - ts_ds = aman.timestamps[ds_slice] - T_ds = aman[T_sig_name][:, ds_slice] - Q_ds = aman[Q_sig_name][:, ds_slice] - U_ds = aman[U_sig_name][:, ds_slice] - - if flag_name is None: - mask_ds = np.ones_like(T_ds, dtype=bool) - elif flag_name in aman.flags._fields.keys(): - mask_ds = ~aman.flags[flag_name].mask()[:, ds_slice] - else: - raise ValueError('flag_name should be in aman.flags') - - def linear_model(params, x): - return params[0] * x + params[1] - - coeffsQ = np.zeros(aman.dets.count) - errorsQ = np.zeros(aman.dets.count) - redchi2sQ = np.zeros(aman.dets.count) - coeffsU = np.zeros(aman.dets.count) - errorsU = np.zeros(aman.dets.count) - redchi2sU = np.zeros(aman.dets.count) - - for di, det in enumerate(aman.dets.vals): - mask_ds_det = mask_ds[di] - ts_ds_det = ts_ds[mask_ds_det] - T_ds_det = T_ds[di, mask_ds_det] - Q_ds_det = Q_ds[di, mask_ds_det] - U_ds_det = U_ds[di, mask_ds_det] - - # fitting for Q - try: - model = Model(linear_model) - data = RealData(x=T_ds_det, - y=Q_ds_det, - sx=np.ones_like(T_ds_det) * sigma_T[di], - sy=np.ones_like(T_ds_det) * sigma_demod[di]) - odr = ODR(data, model, beta0=[np.mean(Q_ds_det), 1e-3]) - output = odr.run() - coeffsQ[di] = output.beta[0] - errorsQ[di] = output.sd_beta[0] - redchi2sQ[di] = output.sum_square / (len(T_ds_det) - 2) - except: - coeffsQ[di] = np.nan - errorsQ[di] = np.nan - redchi2sQ[di] = np.nan - - # fitting for U - try: - model = Model(linear_model) - data = RealData(x=T_ds_det, - y=U_ds_det, - sx=np.ones_like(T_ds_det) * sigma_T[di], - sy=np.ones_like(T_ds_det) * sigma_demod[di]) - odr = ODR(data, model, beta0=[np.mean(U_ds_det), 1e-3]) - output = odr.run() - coeffsU[di] = output.beta[0] - errorsU[di] = output.sd_beta[0] - redchi2sU[di] = output.sum_square / (len(T_ds_det) - 2) - except: - coeffsU[di] = np.nan - errorsU[di] = np.nan - redchi2sU[di] = np.nan - - - out_aman = core.AxisManager(aman.dets, aman.samps) - out_aman.wrap('coeffsQ', coeffsQ, [(0, 'dets')]) - out_aman.wrap('errorsQ', errorsQ, [(0, 'dets')]) - out_aman.wrap('redchi2sQ', redchi2sQ, [(0, 'dets')]) - - out_aman.wrap('coeffsU', coeffsU, [(0, 'dets')]) - out_aman.wrap('errorsU', errorsU, [(0, 'dets')]) - out_aman.wrap('redchi2sU', redchi2sU, [(0, 'dets')]) - - if subtract_sig: - subtract_t2p(aman, out_aman) - if merge_stats: - aman.wrap(t2p_stats_name, out_aman) - - return out_aman - -def subtract_t2p(aman, t2p_aman, T_signal=None): - """ - Subtract T to P leakage. - - Parameters - ---------- - aman : AxisManager - The tod. - t2p_aman : AxisManager - Axis manager with Q and U leakage coeffients. - Q coeff in field ``coeffsQ`` and U coeff in field ``coeffsU``. - T_signal : array - Temperature signal to scale and subtract from Q/U. - Default is ``aman['dsT']``. - - """ - if T_signal is None: - T_signal = aman['dsT'] - aman.demodQ -= np.multiply(T_signal.T, t2p_aman.coeffsQ).T - aman.demodU -= np.multiply(T_signal.T, t2p_aman.coeffsU).T +from lmfit import Model def leakage_model(dT, AQ, AU, lamQ, lamU): return AQ + lamQ * dT + 1.j * (AU + lamU * dT) -def get_corr(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', +def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', mask=None, ds_factor=100, subtract_sig=False, merge_stats=True, t2p_stats_name='t2p_stats'): """ @@ -195,50 +42,46 @@ def get_corr(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', if mask is None: mask = np.ones_like(aman.dsT, dtype='bool') - A_Q_array = [] - A_U_array = [] - A_P_array = [] - lambda_Q_array = [] - lambda_U_array = [] - lambda_P_array = [] + A_Q_array = np.empty([aman.dets.count]) + A_U_array = np.empty_like(A_Q_array) + A_P_array = np.empty_like(A_Q_array) + lambda_Q_array = np.empty_like(A_Q_array) + lambda_U_array = np.empty_like(A_Q_array) + lambda_P_array = np.empty_like(A_Q_array) + redchi2s_array = np.empty_like(A_Q_array) for di, det in enumerate(aman.dets.vals): x = aman[T_sig_name][di][mask[di]][::ds_factor] yQ = aman[Q_sig_name][di][mask[di]][::ds_factor] yU = aman[U_sig_name][di][mask[di]][::ds_factor] - model = LmfitModel(leakage_model, independent_vars=['dT']) + model = Model(leakage_model, independent_vars=['dT']) params = model.make_params(AQ=np.median(yQ), AU=np.median(yU), lamQ=0., lamU=0.) result = model.fit(yQ + 1j * yU, params, dT=x) - A_Q = result.params['AQ'].value - A_U = result.params['AU'].value - A_P = np.sqrt(A_Q**2 + A_U**2) - lambda_Q = result.params['lamQ'].value - lambda_U = result.params['lamU'].value - lambda_P = np.sqrt(lambda_Q**2 + lambda_U**2) - - A_Q_array.append(A_Q) - A_U_array.append(A_U) - A_P_array.append(A_P) - lambda_Q_array.append(lambda_Q) - lambda_U_array.append(lambda_U) - lambda_P_array.append(lambda_P) - - A_Q_array = np.array(A_Q_array) - A_U_array = np.array(A_U_array) - A_P_array = np.array(A_P_array) - - lambda_Q_array = np.array(lambda_Q_array) - lambda_U_array = np.array(lambda_U_array) - lambda_P_array = np.array(lambda_P_array) + if result.success: + A_Q_array[di] = result.params['AQ'].value + A_U_array[di] = result.params['AU'].value + A_P_array[di] = np.sqrt(result.params['AQ'].value**2 + result.params['AU'].value**2) + lambda_Q_array[di] = result.params['lamQ'].value + lambda_U_array[di] = result.params['lamU'].value + lambda_P_array[di] = np.sqrt(result.params['lamQ'].value**2 + result.params['lamU'].value**2) + redchi2s_array[di] = result.redchi + else: + A_Q_array[di] = np.nan + A_U_array[di] = np.nan + A_P_array[di] = np.nan + lambda_Q_array[di] = np.nan + lambda_U_array[di] = np.nan + lambda_P_array[di] = np.nan + redchi2s_array[di] = np.nan out_aman = core.AxisManager(aman.dets, aman.samps) out_aman.wrap('AQ', A_Q_array, [(0, 'dets')]) out_aman.wrap('AU', A_U_array, [(0, 'dets')]) - out_aman.wrap('lamQ', lambda_Q_array, [(0, 'dets')]) out_aman.wrap('lamU', lambda_U_array, [(0, 'dets')]) + out_aman.wrap('redchi2s', redchi2s_array, [(0, 'dets')]) if subtract_sig: subtract_t2p(aman, out_aman) @@ -247,7 +90,7 @@ def get_corr(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', return out_aman -def subtract_leakage(aman, t2p_aman, T_signal=None, mask=None, ds_factor=100, nperseg=600*200): +def subtract_t2p(aman, t2p_aman, T_signal=None): """ Subtract T to P leakage. @@ -269,10 +112,4 @@ def subtract_leakage(aman, t2p_aman, T_signal=None, mask=None, ds_factor=100, np T_signal = aman['dsT'] aman.demodQ -= (T_signal * t2p_aman.lamQ[:, np.newaxis] + t2p_aman.AQ[:, np.newaxis]) - aman.demodU -= (T_signal * t2p_aman.lamU[:, np.newaxis] + t2p_aman.AU[:, np.newaxis]) - - freq, Pxx_demodQ_new = calc_psd(aman, signal=aman.demodQ, nperseg=nperseg, merge=False) - freq, Pxx_demodU_new = calc_psd(aman, signal=aman.demodU, nperseg=nperseg, merge=False) - aman.Pxx_demodQ = Pxx_demodQ_new - aman.Pxx_demodU = Pxx_demodU_new - \ No newline at end of file + aman.demodU -= (T_signal * t2p_aman.lamU[:, np.newaxis] + t2p_aman.AU[:, np.newaxis]) \ No newline at end of file From 413d293f057f542e5c9584e861f32f3e22774df9 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 2 Oct 2024 13:09:25 -0700 Subject: [PATCH 15/39] remove repeated calculation get_gap_fill --- sotodlib/tod_ops/gapfill.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/sotodlib/tod_ops/gapfill.py b/sotodlib/tod_ops/gapfill.py index f50053017..2dc1b9464 100644 --- a/sotodlib/tod_ops/gapfill.py +++ b/sotodlib/tod_ops/gapfill.py @@ -443,10 +443,6 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, f'{aman.dets.count}, setting modes = number of ' + 'detectors') modes = aman.dets.count - # fill with poly fill before PCA - #gaps = get_gap_fill(aman, nbuf=nbuf, flags=glitch_flags, - # signal=np.float32(sig)) - #sig = gaps.swap(aman, signal=sig) # PCA fill mod = pca.get_pca_model(tod=aman, n_modes=modes, signal=sig) From feafb55b56a278882007032c6969502ccce82b8d Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 2 Oct 2024 21:33:28 -0700 Subject: [PATCH 16/39] add lmfit to conf.py --- docs/conf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 1a8e0ceb4..488999549 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -102,7 +102,8 @@ 'quaternionarray', 'yaml', 'toml', 'sqlite3','tqdm', 'skyfield', 'h5py', 'pyfftw', 'scipy', 'toast', 'pixell', 'scikit', 'skimage', 'numdifftools', - 'traitlets', 'ephem', 'influxdb', 'megham', 'detmap'): + 'traitlets', 'ephem', 'influxdb', 'megham', 'detmap', + 'lmfit'): try: foo = import_module(missing) except ImportError: From a3e0f79e2cb1db8718aec2495ce34cec6ad2aee5 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Thu, 3 Oct 2024 06:34:36 -0700 Subject: [PATCH 17/39] add lmfit to setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index da5c93be5..c77134877 100644 --- a/setup.py +++ b/setup.py @@ -64,6 +64,7 @@ 'pyfftw', 'numdifftools', 'psycopg2-binary', + 'lmfit', ] setup_opts["extras_require"] = { "site_pipeline": [ From eaf607608e217f6c1744295ca3b21126c0f8bea3 Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 4 Oct 2024 10:44:42 -0700 Subject: [PATCH 18/39] breaking up into smaller functions, the writing of files added to make_demod_map --- sotodlib/mapmaking/demod_mapmaker.py | 150 +++++++++++++++++---------- 1 file changed, 96 insertions(+), 54 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index d0c36f58c..869b3e7d1 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -6,8 +6,8 @@ how to use look at docstring of DemodMapmaker. """ __all__ = ['DemodMapmaker','DemodSignal','DemodSignalMap','make_demod_map'] -import numpy as np -from pixell import enmap, utils, tilemap, bunch, mpi +import numpy as np, os +from pixell import enmap, utils as putils, tilemap, bunch, mpi import so3g.proj from .. import coords @@ -295,9 +295,9 @@ def prepare(self): self.hits = tilemap.redistribute(self.hits,self.comm) else: if self.comm is not None: - self.rhs = utils.allreduce(self.rhs, self.comm) - self.div = utils.allreduce(self.div, self.comm) - self.hits = utils.allreduce(self.hits,self.comm) + self.rhs = putils.allreduce(self.rhs, self.comm) + self.div = putils.allreduce(self.div, self.comm) + self.hits = putils.allreduce(self.hits,self.comm) self.ready = True @property @@ -314,11 +314,72 @@ def write(self, prefix, tag, m): enmap.write_map(oname, m) return oname -def make_demod_map(context, obslist, shape, wcs, noise_model, L, - preprocess_config, comm=mpi.COMM_WORLD, comps="TQU", t0=0, +def setup_demod_map(shape, wcs, noise_model, comm=mpi.COMM_WORLD, + comps='TQU', split_labels=None, singlestream=False, + dtype_tod=np.float32, dtype_map=np.float32, + recenter=None, verbose=0): + """ + Setup the classes for demod mapmaking and return + a DemodMapmmaker object + """ + if split_labels==None: + # this is the case where we did not request any splits at all + signal_map = DemodSignalMap(shape, wcs, comm, comps=comps, + dtype=dtype_map, tiled=False, + ofmt="", singlestream=singlestream, + recenter=recenter ) + else: + # this is the case where we asked for at least 2 splits (1 split set). + # We count how many split we'll make, we need to define the Nsplits + # maps inside the DemodSignalMap + Nsplits = len(split_labels) + signal_map = DemodSignalMap(shape, wcs, comm, comps=comps, + dtype=dtype_map, tiled=False, + ofmt="", Nsplits=Nsplits, + singlestream=singlestream, + recenter=recenter) + signals = [signal_map] + mapmaker = DemodMapmaker(signals, noise_model=noise_model, + dtype=dtype_tod, + verbose=verbose>0, + singlestream=singlestream) + return mapmaker + +def write_demod_maps(prefix, data, split_labels=None): + """ + Write maps from data into files + """ + if split_labels==None: + # we have no splits, so we save index 0 of the lists + data.signal.write(prefix, "full_wmap", data.wmap[0]) + data.signal.write(prefix, "full_weights", data.weights[0]) + data.signal.write(prefix, "full_hits", data.signal.hits) + else: + # we have splits + Nsplits = len(split_labels) + for n_split in range(Nsplits): + data.signal.write(prefix, "%s_wmap"%split_labels[n_split], + data.wmap[n_split]) + data.signal.write(prefix, "%s_weights"%split_labels[n_split], + data.weights[n_split]) + data.signal.write(prefix, "%s_hits"%split_labels[n_split], + data.signal.hits[n_split]) + +def write_demod_info(oname, info, split_labels=None): + putils.mkdir(os.path.dirname(oname)) + if split_labels==None: + bunch.write(oname+'_full_info.hdf', info[0]) + else: + # we have splits + Nsplits = len(split_labels) + for n_split in range(Nsplits): + bunch.write(oname+'_%s_info.hdf'%split_labels[n_split], info[n_split]) + +def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, + preprocess_config, prefix, comm=mpi.COMM_WORLD, comps="TQU", t0=0, dtype_tod=np.float32, dtype_map=np.float32, tag="", verbose=0, split_labels=None, - site='so_sat3', recenter=None, outs=None, singlestream=False): + site='so_sat3', recenter=None, singlestream=False): """ Make a demodulated map from the list of observations in obslist. @@ -338,8 +399,12 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, Noise model to pass to DemodMapmaker. L : logger Logger for printing on the screen. + info : list + Information for the database, will be written as a .hdf file. preprocess_config : dict Dictionary with the config yaml file for the preprocess database. + prefix : str + Prefix for the output files comps : str, optional Which components to map, only TQU supported for now. t0 : int, optional @@ -360,10 +425,6 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, recenter : str or None String to make object-centered maps, such as Moon/Sun/Planet centered maps. Look at sotodlib.mapmaking.parse_recentering for details. - outs : list, optional - List to accumulate the outputs from preproc_or_load_group. This is - necessary if you end up preprocessing an obs here and want to save - to the database so you won't have to re-process again. singlestream : Bool If True, do not perform demodulated filter+bin mapmaking but rather regular filter+bin mapmaking, i.e. map from obs.signal @@ -373,41 +434,25 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, pre = "" if tag is None else tag + " " if comm.rank == 0: L.info(pre + "Initializing equation system") - - # Set up our mapmaking equation - if split_labels==None: - # this is the case where we did not request any splits at all - signal_map = DemodSignalMap(shape, wcs, comm, comps=comps, - dtype=dtype_map, tiled=False, - ofmt="", singlestream=singlestream, - recenter=recenter ) - else: - # this is the case where we asked for at least 2 splits (1 split set). - # We count how many split we'll make, we need to define the Nsplits - # maps inside the DemodSignalMap - Nsplits = len(split_labels) - signal_map = DemodSignalMap(shape, wcs, comm, comps=comps, - dtype=dtype_map, tiled=False, - ofmt="", Nsplits=Nsplits, - singlestream=singlestream, - recenter=recenter) - signals = [signal_map] - mapmaker = DemodMapmaker(signals, noise_model=noise_model, - dtype=dtype_tod, - verbose=verbose>0, - singlestream=singlestream) + mapmaker = setup_demod_map(shape, wcs, noise_model, comm=comm, + comps=comps, split_labels=split_labels, + singlestream=singlestream, dtype_tod=dtype_tod, + dtype_map=dtype_map, recenter=recenter, verbose=verbose) if comm.rank == 0: L.info(pre + "Building RHS") # And feed it with our observations nobs_kept = 0 + errors = [] ; outputs = []; # PENDING: do an allreduce of these. + # not needed for atomic maps, but needed for + # depth-1 maps for oi in range(len(obslist)): obs_id, detset, band = obslist[oi][:3] name = "%s:%s:%s" % (obs_id, detset, band) - error, output, obs = site_pipeline.preprocess_tod.preproc_or_load_group(obs_id, configs=preprocess_config, + error, output, obs = site_pipeline.preprocess_tod.preproc_or_load_group(obs_id, + configs=preprocess_config, dets={'wafer_slot':detset, 'wafer.bandpass':band}, logger=L, context=context, overwrite=False) - if outs is not None: - outs.append((error,output)) + errors.append(error) ; outputs.append(output) ; if error not in [None,'load_success']: L.info('tod %s:%s:%s failed in the prepoc database'%(obs_id,detset,band)) continue @@ -415,30 +460,27 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, obs.wrap("site", np.full(1, site)) obs.flags.wrap('glitch_flags', obs.preprocess.turnaround_flags.turnarounds + obs.preprocess.jumps_2pi.jump_flag + obs.preprocess.glitches.glitch_flags, ) - # And add it to the mapmaker - if split_labels==None: - # this is the case of no splits - mapmaker.add_obs(name, obs) - else: - # this is the case of having splits. We need to pass the split_labels - # at least. If we have detector splits fixed in time, then we pass the - # masks in det_split_masks. Otherwise, det_split_masks will be None. - mapmaker.add_obs(name, obs, split_labels=split_labels) + mapmaker.add_obs(name, obs, split_labels=split_labels) L.info('Done with tod %s:%s:%s'%(obs_id,detset,band)) nobs_kept += 1 nobs_kept = comm.allreduce(nobs_kept) - # if we skip all the obs + # if we skip all the obs then we return error and output if nobs_kept == 0: - return None + return errors, outputs - for signal in signals: + for signal in mapmaker.signals: signal.prepare() if comm.rank == 0: L.info(pre + "Writing F+B outputs") wmap = [] weights = [] - for n_split in range(signal_map.Nsplits): - wmap.append( signal_map.rhs[n_split] ) - div = np.diagonal(signal_map.div[n_split], axis1=0, axis2=1) + # mapmaker.signals[0] is signal_map + for n_split in range(mapmaker.signals[0].Nsplits): + wmap.append( mapmaker.signals[0].rhs[n_split] ) + div = np.diagonal(mapmaker.signals[0].div[n_split], axis1=0, axis2=1) div = np.moveaxis(div, -1, 0) # this moves the last axis to the 0th position weights.append(div) - return bunch.Bunch(wmap=wmap, weights=weights, signal=signal_map, t0=t0) + mapdata = bunch.Bunch(wmap=wmap, weights=weights, signal=mapmaker.signals[0], t0=t0) + # output to files + write_demod_maps(prefix, mapdata, split_labels=split_labels, ) + write_demod_info(prefix, info, split_labels=split_labels ) + return errors, outputs \ No newline at end of file From 8e5efa0339204956efff4724449261574cc97f24 Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 4 Oct 2024 12:10:58 -0700 Subject: [PATCH 19/39] updating the docstring --- sotodlib/mapmaking/demod_mapmaker.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 869b3e7d1..60fe813d5 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -430,6 +430,12 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, rather regular filter+bin mapmaking, i.e. map from obs.signal rather than from obs.dsT, obs.demodQ, obs.demodU. + Returns + ------- + errors : list + List of errors from preprocess database. To be used in cleanup_mandb. + outputs : list + List of outputs from preprocess database. To be used in cleanup_mandb. """ pre = "" if tag is None else tag + " " @@ -483,4 +489,4 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, # output to files write_demod_maps(prefix, mapdata, split_labels=split_labels, ) write_demod_info(prefix, info, split_labels=split_labels ) - return errors, outputs \ No newline at end of file + return errors, outputs From 851d9598a9f3f05c98e2c0aed1149befb6e30242 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 10 Oct 2024 07:30:31 -0700 Subject: [PATCH 20/39] support for multiprocessing and loading the context inside the function --- sotodlib/mapmaking/demod_mapmaker.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 60fe813d5..eeafe8eac 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -10,6 +10,7 @@ from pixell import enmap, utils as putils, tilemap, bunch, mpi import so3g.proj +from .. import core from .. import coords from .. import site_pipeline from .utilities import recentering_to_quat_lonlat, evaluate_recentering, MultiZipper, unarr, safe_invert_div @@ -155,7 +156,7 @@ def __init__(self, shape, wcs, comm, comps="TQU", name="sky", ofmt="{name}", out wcs : wcs WCS of the output map geometry comm : MPI.comm - MPI communicator + MPI communicator comps : str, optional Components to map name : str, optional @@ -385,8 +386,8 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, Arguments --------- - context : sotodlib.core.Context - Context object used to load obs from. + context : str + File path to context used to load obs from. obslist : dict The obslist which is the output of the mapmaking.obs_grouping.build_obslists, contains the information of the @@ -437,7 +438,7 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, outputs : list List of outputs from preprocess database. To be used in cleanup_mandb. """ - + context = core.Context(context) pre = "" if tag is None else tag + " " if comm.rank == 0: L.info(pre + "Initializing equation system") mapmaker = setup_demod_map(shape, wcs, noise_model, comm=comm, From 3a1fc196221a1e2686a773a9d4d640f02994322b Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 10 Oct 2024 13:19:05 -0700 Subject: [PATCH 21/39] Site pipeline script added --- sotodlib/site_pipeline/cli.py | 2 + .../make_atomic_filterbin_map.py | 518 ++++++++++++++++++ 2 files changed, 520 insertions(+) create mode 100644 sotodlib/site_pipeline/make_atomic_filterbin_map.py diff --git a/sotodlib/site_pipeline/cli.py b/sotodlib/site_pipeline/cli.py index a11ca65f1..c1afc86fe 100644 --- a/sotodlib/site_pipeline/cli.py +++ b/sotodlib/site_pipeline/cli.py @@ -43,6 +43,7 @@ def main(obs_id=None, config_file=None, logger=None): analyze_bright_ptsrc, check_book, make_det_info_wafer, + make_atomic_filterbin_map, make_ml_map, make_source_flags, make_uncal_beam_map, @@ -61,6 +62,7 @@ def main(obs_id=None, config_file=None, logger=None): 'analyze-bright-ptsrc': analyze_bright_ptsrc, 'check-book': check_book, 'make-det-info-wafer': make_det_info_wafer, + 'make-atomic-filterbin-map': make_atomic_filterbin_map, 'make-ml-map': make_ml_map, 'make-source-flags': make_source_flags, 'make-uncal-beam-map': make_uncal_beam_map, diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py new file mode 100644 index 000000000..33085a63e --- /dev/null +++ b/sotodlib/site_pipeline/make_atomic_filterbin_map.py @@ -0,0 +1,518 @@ +from argparse import ArgumentParser +import numpy as np, sys, time, warnings, os, so3g, logging, yaml, itertools, multiprocessing, traceback +from sotodlib import coords, mapmaking +from sotodlib.core import Context, metadata as metadata_core, FlagManager, AxisManager, OffsetAxis +from sotodlib.io import metadata, hk_utils +from sotodlib.site_pipeline import preprocess_tod as pt +from pixell import enmap, utils as putils, fft, bunch, wcsutils, tilemap, colors, memory, mpi +from concurrent.futures import ProcessPoolExecutor, as_completed +import sotodlib.site_pipeline.util as util + +defaults = {"query": "1", + "odir": "./output", + "preprocess_config": None, + "comps": "TQU", + "mode": "per_obs", + "nproc": 1, + "ntod": None, + "tods": None, + "nset": None, + "wafer": None, + "freq": None, + "center_at": None, + "site": 'so_sat3', + "max_dets": None, # not implemented yet + "verbose": 0, + "quiet": 0, + "tiled": 0, # not implemented yet + "singlestream": False, + "only_hits": False, + "det_in_out": False, + "det_left_right":False, + "det_upper_lower":False, + "scan_left_right":False, + "window":0.0, # not implemented yet + "dtype_tod": np.float32, + "dtype_map": np.float64, + "atomic_db": "atomic_maps.db", + "fixed_time": None, + "mindur": None, + "l2_data_path": "/global/cfs/cdirs/sobs/untracked/data/site/hk", + } + +def get_parser(parser=None): + if parser is None: + parser = ArgumentParser() + parser.add_argument("--config-file", type=str, default=None, + help="Path to mapmaker config.yaml file") + + parser.add_argument("--context", + help='context file') + parser.add_argument("--query", + help='query, can be a file (list of obs_id) or selection string') + parser.add_argument("--area", + help='wcs kernel') + parser.add_argument("--odir", + help='output directory') + parser.add_argument("--preprocess_config", type=str, + help='file with the config file to run the preprocessing pipeline') + parser.add_argument("--mode", type=str, ) + parser.add_argument("--nproc", type=int, help='Number of procs in the multiprocessing pool') + parser.add_argument("--comps", type=str,) + parser.add_argument("--singlestream", action="store_true") + parser.add_argument("--only_hits", action="store_true") # this will work only when we don't request splits, since I want to avoid loading the signal + + # detector position splits (fixed in time) + parser.add_argument("--det_in_out", action="store_true") + parser.add_argument("--det_left_right", action="store_true") + parser.add_argument("--det_upper_lower", action="store_true") + + # time samples splits + parser.add_argument("--scan_left_right", action="store_true") + + parser.add_argument("--ntod", type=int, ) + parser.add_argument("--tods", type=str, ) + parser.add_argument("--nset", type=int, ) + parser.add_argument("--wafer", type=str, + help="Detector set to map with") + parser.add_argument("--freq", type=str, + help="Frequency band to map with") + parser.add_argument("--max-dets", type=int, ) + parser.add_argument("--fixed_ftime", type=int, ) + parser.add_argument("--mindur", type=int, ) + parser.add_argument("--site", type=str, ) + parser.add_argument("--verbose", action="count", ) + parser.add_argument("--quiet", action="count", ) + parser.add_argument("--window", type=float, ) + parser.add_argument("--dtype_tod", ) + parser.add_argument("--dtype_map", ) + parser.add_argument("--atomic_db", + help='name of the atomic map database, will be saved where make_filterbin_map is being run') + parser.add_argument("--l2_data_path", + help='Path to level-2 data') + return parser + +def _get_config(config_file): + return yaml.safe_load(open(config_file,'r')) + +def get_ra_ref(obs, site='so_sat3'): + # pass an AxisManager of the observation, and return two + # ra_ref @ dec=-40 deg. + # + #t = [obs.obs_info.start_time, obs.obs_info.start_time, obs.obs_info.stop_time, obs.obs_info.stop_time] + t_start = obs.obs_info.start_time + t_stop = obs.obs_info.stop_time + az = np.arange((obs.obs_info.az_center-0.5*obs.obs_info.az_throw)*putils.degree, + (obs.obs_info.az_center+0.5*obs.obs_info.az_throw)*putils.degree, 0.5*putils.degree) + el = obs.obs_info.el_center*putils.degree + + csl = so3g.proj.CelestialSightLine.az_el(t_start*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') + ra_, dec_ = csl.coords().transpose()[:2] + ra_ref_start = np.interp(-40*putils.degree, dec_, ra_) + + csl = so3g.proj.CelestialSightLine.az_el(t_stop*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') + ra_, dec_ = csl.coords().transpose()[:2] + ra_ref_stop = np.interp(-40*putils.degree, dec_, ra_) + return ra_ref_start, ra_ref_stop + +def find_footprint(context, tod, ref_wcs, return_pixboxes=False, pad=1): + # Measure the pixel bounds of each observation relative to our + # reference wcs + pixboxes = [] + my_shape, my_wcs = coords.get_footprint(tod, ref_wcs) + my_pixbox = enmap.pixbox_of(ref_wcs, my_shape, my_wcs) + pixboxes.append(my_pixbox) + if len(pixboxes) == 0: raise DataMissing("No usable obs to estimate footprint from") + pixboxes = np.array(pixboxes) + # Handle sky wrapping. This assumes cylindrical coordinates with sky-wrapping + # in the x-direction, and that there's an integer number of pixels around + # the sky. Could be done more generally, but would be much more involved, + # and this should be good enough. + nphi = putils.nint(np.abs(360/ref_wcs.wcs.cdelt[0])) + widths = pixboxes[:,1,0]-pixboxes[:,0,0] + pixboxes[:,0,0] = putils.rewind(pixboxes[:,0,0], + ref=pixboxes[0,0,0], + period=nphi) + pixboxes[:,1,0] = pixboxes[:,0,0] + widths + # It's now safe to find the total pixel bounding box + union_pixbox = np.array([np.min(pixboxes[:,0],0)-pad,np.max(pixboxes[:,1],0) + +pad]) + # Use this to construct the output geometry + shape = union_pixbox[1]-union_pixbox[0] + wcs = ref_wcs.deepcopy() + wcs.wcs.crpix -= union_pixbox[0,::-1] + if return_pixboxes: return shape, wcs, pixboxes + else: return shape, wcs + +class DataMissing(Exception): pass + +def get_pwv(obs, data_dir): + try: + pwv_info = hk_utils.get_detcosamp_hkaman(obs, alias=['pwv'], + fields = ['site.env-radiometer-class.feeds.pwvs.pwv',], + data_dir = data_dir) + pwv_all = pwv_info['env-radiometer-class']['env-radiometer-class'][0] + pwv = np.nanmedian(pwv_all) + except (KeyError, ValueError): + pwv = 0.0 + return pwv + +def read_tods(context, obslist, + dtype_tod=np.float32, only_hits=False, site='so_sat3', + l2_data='/global/cfs/cdirs/sobs/untracked/data/site/hk'): + """ + context : str + Path to context file + """ + context = Context(context) + # this function will run on multiprocessing and can be returned in any random order + # we will also return the obslist to keep track of the order + my_obslist = [] ; my_tods = [] ; my_ra_ref = [] ; pwvs = [] + inds = range(len(obslist)) + ind = 0 + obs_id, detset, band, obs_ind = obslist[ind] + meta = context.get_meta(obs_id, dets={"wafer_slot":detset, "wafer.bandpass":band},) + tod = context.get_obs(meta, no_signal=True) + #tod = context.get_obs(obs_id, dets={"wafer_slot":detset, + # "wafer.bandpass":band}, + # no_signal=True) + to_remove = [] + for field in tod._fields: + if field!='obs_info' and field!='flags' and field!='signal' and field!='focal_plane' and field!='timestamps' and field!='boresight': to_remove.append(field) + for field in to_remove: + tod.move(field, None) + if only_hits==False: + ra_ref_start, ra_ref_stop = get_ra_ref(tod) + my_ra_ref.append((ra_ref_start/putils.degree, + ra_ref_stop/putils.degree)) + else: + my_ra_ref.append(None) + tod.flags.wrap('glitch_flags', so3g.proj.RangesMatrix.zeros(tod.shape[:2]), + [(0, 'dets'), (1, 'samps')]) + my_tods.append(tod) + + tod_temp = tod.restrict('dets', meta.dets.vals[:1], in_place=False) + pwvs.append(get_pwv(tod_temp, data_dir=l2_data)) + del tod_temp + return obslist, my_tods, my_ra_ref, pwvs + +class ColoredFormatter(logging.Formatter): + def __init__(self, msg, colors={'DEBUG':colors.reset, + 'INFO':colors.lgreen, + 'WARNING':colors.lbrown, + 'ERROR':colors.lred, + 'CRITICAL':colors.lpurple}): + logging.Formatter.__init__(self, msg) + self.colors = colors + def format(self, record): + try: + col = self.colors[record.levelname] + except KeyError: + col = colors.reset + return col + logging.Formatter.format(self, record) + colors.reset + +class LogInfoFilter(logging.Filter): + def __init__(self, rank=0): + self.rank = rank + try: + # Try to get actual time since task start if possible + import os, psutil + p = psutil.Process(os.getpid()) + self.t0 = p.create_time() + except ImportError: + # Otherwise measure from creation of this filter + self.t0 = time.time() + def filter(self, record): + record.rank = self.rank + record.wtime = time.time()-self.t0 + record.wmins = record.wtime/60. + record.whours= record.wmins/60. + record.mem = memory.current()/1024.**3 + record.resmem= memory.resident()/1024.**3 + record.memmax= memory.max()/1024.**3 + return record + +def handle_empty(prefix, tag, e, L): + # This happens if we ended up with no valid tods for some reason + L.info("%s Skipped: %s" % (tag, str(e))) + putils.mkdir(os.path.dirname(prefix)) + with open(prefix + ".empty", "w") as ofile: ofile.write("\n") + +def make_demod_map_dummy(context): + return None + +def main(config_file=None, defaults=defaults, **args): + cfg = dict(defaults) + # Update the default dict with values provided from a config.yaml file + if config_file is not None: + cfg_from_file = _get_config(config_file) + cfg.update({k: v for k, v in cfg_from_file.items() if v is not None}) + else: + print("No config file provided, assuming default values") + # Merge flags from config file and defaults with any passed through CLI + cfg.update({k: v for k, v in args.items() if v is not None}) + # Certain fields are required. Check if they are all supplied here + required_fields = ['context','area'] + for req in required_fields: + if req not in cfg.keys(): + raise KeyError("{} is a required argument. Please supply it in a config file or via the command line".format(req)) + args = cfg + warnings.simplefilter('ignore') + + comm = mpi.FAKE_WORLD # Fake communicator since we won't use MPI + + verbose = args['verbose'] - args['quiet'] + shape, wcs = enmap.read_map_geometry(args['area']) + wcs = wcsutils.WCS(wcs.to_header()) + + noise_model = mapmaking.NmatWhite() + ncomp = len(args['comps']) + meta_only = False + putils.mkdir(args['odir']) + + recenter = None + if args['center_at']: + recenter = mapmaking.parse_recentering(args['center_at']) + + # Set up logging. + L = logging.getLogger(__name__) + L.setLevel(logging.INFO) + ch = logging.StreamHandler() + ch.setLevel(logging.INFO) + ch.setFormatter(ColoredFormatter(" %(wmins)7.2f %(mem)5.2f %(memmax)5.2f %(message)s")) + ch.addFilter(LogInfoFilter()) + L.addHandler(ch) + + if args['preprocess_config'] is not None: + preprocess_config = yaml.safe_load(open(args['preprocess_config'],'r')) + outs = [] + errlog = os.path.join(os.path.dirname(preprocess_config['archive']['index']), + 'errlog.txt') + else: + preprocess_config = None + outs = None + errlog = None + + multiprocessing.set_start_method('spawn') + + context = Context(args['context']) + # obslists is a dict, obskeys is a list, periods is an array, only rank 0 + # will do this and broadcast to others. + obslists, obskeys, periods, \ + obs_infos = mapmaking.build_obslists(context, + args['query'], + mode=args['mode'], + nset=args['nset'], + wafer=args['wafer'], + freq=args['freq'], + ntod=args['ntod'], + tods=args['tods'], + fixed_time=args['fixed_time'], + mindur=args['mindur']) + L.info('Done with build_obslists') + + tags = [] + cwd = os.getcwd() + + split_labels = [] + if args['det_in_out']: + split_labels.append('det_in');split_labels.append('det_out') + if args['det_left_right']: + split_labels.append('det_left');split_labels.append('det_right') + if args['det_upper_lower']: + split_labels.append('det_upper');split_labels.append('det_lower') + if args['scan_left_right']: + split_labels.append('scan_left');split_labels.append('scan_right') + if not split_labels: + split_labels = None + + obslists_arr = [item for key, item in obslists.items()] + + my_oblists=[]; my_tods = []; my_ra_ref=[]; pwvs=[] + L.info('Starting with read_tods') + with ProcessPoolExecutor(args['nproc']) as exe: + futures = [exe.submit(read_tods, args['context'], obslist, + dtype_tod=args['dtype_tod'], + only_hits=args['only_hits'], + l2_data=args['l2_data_path']) for obslist in obslists_arr] + for future in as_completed(futures): + #L.info('New future as_completed result') + try: + my_obslist_here, my_tods_here, my_ra_ref_here, pwvs_here = future.result() + my_oblists.append(my_obslist_here) + my_tods.append(my_tods_here) + my_ra_ref.append(my_ra_ref_here) + pwvs.append(pwvs_here) + except Exception as e: + errmsg = f'{type(e)}: {e}' + tb = ''.join(traceback.format_tb(e.__traceback__)) + L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") + f = open(errlog, 'a') + f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') + f.close() + continue + futures.remove(future) + # flatten the list of lists + my_oblists = list(itertools.chain.from_iterable(my_oblists)) + my_tods = list(itertools.chain.from_iterable(my_tods)) + my_ra_ref = list(itertools.chain.from_iterable(my_ra_ref)) + pwvs = list(itertools.chain.from_iterable(pwvs)) + + # we will do the profile and footprint here, and then allgather the + # subshapes and subwcs.This way we don't have to communicate the + # massive arrays such as timestamps + subshapes = [] ; subwcses = [] + for obs in my_tods: + if recenter is None: + subshape, subwcs = find_footprint(context, obs, wcs,) + subshapes.append(subshape) ; subwcses.append(subwcs) + else: + subshape = shape; subwcs = wcs + subshapes.append(subshape) ; subwcses.append(subwcs) + # subshape and subwcs are in the order given by my_oblists + + run_list = [] + for oi in range(len(my_tods)): + # we will need to build the obskey from my_oblists + #obs_1722126466_satp1_1111111', 'ws3', 'f150', 0) + pid = my_oblists[oi][3]; + detset = my_oblists[oi][1]; + band = my_oblists[oi][2]; + #obskey = (pid, detset, band) + obslist = [my_oblists[oi]] + t = putils.floor(periods[pid,0]) + t5 = ("%05d" % t)[:5] + prefix = "%s/%s/atomic_%010d_%s_%s" % (args['odir'], t5, t, detset, band) + + subshape = subshapes[oi] + subwcs = subwcses[oi] + + tag = "%5d/%d" % (oi+1, len(obskeys)) + putils.mkdir(os.path.dirname(prefix)) + meta_done = os.path.isfile(prefix + "_full_info.hdf") + maps_done = os.path.isfile(prefix + ".empty") or ( + os.path.isfile(prefix + "_full_map.fits") and + os.path.isfile(prefix + "_full_ivar.fits") and + os.path.isfile(prefix + "_full_hits.fits") + ) + #L.info("%s Proc period %4d dset %s:%s @%.0f dur %5.2f h with %2d obs" % (tag, pid, detset, band, t, (periods[pid,1]-periods[pid,0])/3600, len(obslist))) + + my_ra_ref_atomic = [my_ra_ref[oi]] + pwv_atomic = [pwvs[oi]] + # Save file for data base of atomic maps. We will write an individual file, + # another script will loop over those files and write into sqlite data base + if not args['only_hits']: + info = [] + if split_labels is None: + # this means the mapmaker was run without any splits requested + info.append(bunch.Bunch(pid=pid, + obs_id=obslist[0][0].encode(), + telescope=obs_infos[obslist[0][3]].telescope.encode(), + freq_channel=band.encode(), + wafer=detset.encode(), + ctime=int(t), + split_label='full'.encode(), + split_detail='full'.encode(), + prefix_path=str(cwd+'/'+prefix+'_full').encode(), + elevation=obs_infos[obslist[0][3]].el_center, + azimuth=obs_infos[obslist[0][3]].az_center, + RA_ref_start=my_ra_ref_atomic[0][0], + RA_ref_stop=my_ra_ref_atomic[0][1], + pwv=pwv_atomic + )) + else: + # splits were requested and we loop over them + for split_label in split_labels: + info.append(bunch.Bunch(pid=pid, + obs_id=obslist[0][0].encode(), + telescope=obs_infos[obslist[0][3]].telescope.encode(), + freq_channel=band.encode(), + wafer=detset.encode(), + ctime=int(t), + split_label=split_label.encode(), + split_detail=''.encode(), + prefix_path=str(cwd+'/'+prefix+'_%s'%split_label).encode(), + elevation=obs_infos[obslist[0][3]].el_center, + azimuth=obs_infos[obslist[0][3]].az_center, + RA_ref_start=my_ra_ref_atomic[0][0], + RA_ref_stop=my_ra_ref_atomic[0][1], + pwv=pwv_atomic + )) + # inputs that are unique per atomic map go into run_list + run_list.append([obslist, subshape, subwcs, info, prefix, t]) + # Done with creating run_list + with ProcessPoolExecutor(args['nproc']) as exe: + futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], r[1], r[2], + noise_model, L, r[3], preprocess_config, r[4], + comm = comm, t0=r[5], tag=tag, recenter=recenter, + dtype_map=args['dtype_map'], + dtype_tod=args['dtype_tod'], + comps=args['comps'], + verbose=args['verbose'], + split_labels=split_labels, + singlestream=args['singlestream'], + site=args['site']) for r in run_list] + for future in as_completed(futures): + L.info('New future as_completed result') + try: + errors, outputs = future.result() + except Exception as e: + errmsg = f'{type(e)}: {e}' + tb = ''.join(traceback.format_tb(e.__traceback__)) + L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") + f = open(errlog, 'a') + f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') + f.close() + continue + futures.remove(future) + if preprocess_config is not None: + for ii in range(len(errors)): + pt.cleanup_mandb(errors[ii], outputs[ii], preprocess_config, L) + print("Done") + return True + +if __name__ == '__main__': + util.main_launcher(main, get_parser) + +""" +if False: + # We open the data base for checking if we have maps already, + # if we do we will not run them again. + if os.path.isfile('./'+args['atomic_db']) and not args['only_hits']: + conn = sqlite3.connect('./'+args['atomic_db']) # open the connector, in reading mode only + cursor = conn.cursor() + keys_to_remove = [] + # Now we have obslists and splits ready, we look through the database + # to remove the maps we already have from it + for key, value in obslists.items(): + if split_labels == None: + # we want to run only full maps + query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="full"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1] ) + res = cursor.execute(query_) + matches = res.fetchall() + if len(matches)>0: + # this means the map (key,value) is already in the data base, + # so we have to remove it to not run it again + # it seems that removing the maps from the obskeys is enough. + keys_to_remove.append(key) + else: + # we are asking for splits + missing_split = False + for split_label in split_labels: + query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="%s"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1], split_label ) + res = cursor.execute(query_) + matches = res.fetchall() + if len(matches)==0: + # this means one of the requested splits is missing + # in the data base + missing_split = True + break + if missing_split == False: + # this means we have all the splits we requested for the + # particular obs_id/telescope/freq/wafer + keys_to_remove.append(key) + for key in keys_to_remove: + obskeys.remove(key) + del obslists[key] + conn.close() # I close since I only wanted to read +""" \ No newline at end of file From 0ffb6f452df8f476282a5a697fcafa92951b3131 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 10 Oct 2024 13:32:40 -0700 Subject: [PATCH 22/39] Restoring the check in the atomic maps database --- .../make_atomic_filterbin_map.py | 86 +++++++++---------- 1 file changed, 41 insertions(+), 45 deletions(-) diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py index 33085a63e..d020e403c 100644 --- a/sotodlib/site_pipeline/make_atomic_filterbin_map.py +++ b/sotodlib/site_pipeline/make_atomic_filterbin_map.py @@ -326,8 +326,47 @@ def main(config_file=None, defaults=defaults, **args): if not split_labels: split_labels = None - obslists_arr = [item for key, item in obslists.items()] + # We open the data base for checking if we have maps already, + # if we do we will not run them again. + if os.path.isfile('./'+args['atomic_db']) and not args['only_hits']: + conn = sqlite3.connect('./'+args['atomic_db']) # open the connector, in reading mode only + cursor = conn.cursor() + keys_to_remove = [] + # Now we have obslists and splits ready, we look through the database + # to remove the maps we already have from it + for key, value in obslists.items(): + if split_labels == None: + # we want to run only full maps + query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="full"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1] ) + res = cursor.execute(query_) + matches = res.fetchall() + if len(matches)>0: + # this means the map (key,value) is already in the data base, + # so we have to remove it to not run it again + # it seems that removing the maps from the obskeys is enough. + keys_to_remove.append(key) + else: + # we are asking for splits + missing_split = False + for split_label in split_labels: + query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="%s"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1], split_label ) + res = cursor.execute(query_) + matches = res.fetchall() + if len(matches)==0: + # this means one of the requested splits is missing + # in the data base + missing_split = True + break + if missing_split == False: + # this means we have all the splits we requested for the + # particular obs_id/telescope/freq/wafer + keys_to_remove.append(key) + for key in keys_to_remove: + obskeys.remove(key) + del obslists[key] + conn.close() # I close since I only wanted to read + obslists_arr = [item for key, item in obslists.items()] my_oblists=[]; my_tods = []; my_ra_ref=[]; pwvs=[] L.info('Starting with read_tods') with ProcessPoolExecutor(args['nproc']) as exe: @@ -472,47 +511,4 @@ def main(config_file=None, defaults=defaults, **args): return True if __name__ == '__main__': - util.main_launcher(main, get_parser) - -""" -if False: - # We open the data base for checking if we have maps already, - # if we do we will not run them again. - if os.path.isfile('./'+args['atomic_db']) and not args['only_hits']: - conn = sqlite3.connect('./'+args['atomic_db']) # open the connector, in reading mode only - cursor = conn.cursor() - keys_to_remove = [] - # Now we have obslists and splits ready, we look through the database - # to remove the maps we already have from it - for key, value in obslists.items(): - if split_labels == None: - # we want to run only full maps - query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="full"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1] ) - res = cursor.execute(query_) - matches = res.fetchall() - if len(matches)>0: - # this means the map (key,value) is already in the data base, - # so we have to remove it to not run it again - # it seems that removing the maps from the obskeys is enough. - keys_to_remove.append(key) - else: - # we are asking for splits - missing_split = False - for split_label in split_labels: - query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="%s"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1], split_label ) - res = cursor.execute(query_) - matches = res.fetchall() - if len(matches)==0: - # this means one of the requested splits is missing - # in the data base - missing_split = True - break - if missing_split == False: - # this means we have all the splits we requested for the - # particular obs_id/telescope/freq/wafer - keys_to_remove.append(key) - for key in keys_to_remove: - obskeys.remove(key) - del obslists[key] - conn.close() # I close since I only wanted to read -""" \ No newline at end of file + util.main_launcher(main, get_parser) \ No newline at end of file From cf1593ed1f70e49b32dd89268beae5608d334606 Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 14 Oct 2024 13:33:07 -0700 Subject: [PATCH 23/39] Making logger to work under multiprocessing pools --- sotodlib/mapmaking/demod_mapmaker.py | 10 ++++++---- sotodlib/site_pipeline/make_atomic_filterbin_map.py | 3 ++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index eeafe8eac..c1230fd58 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -376,10 +376,10 @@ def write_demod_info(oname, info, split_labels=None): for n_split in range(Nsplits): bunch.write(oname+'_%s_info.hdf'%split_labels[n_split], info[n_split]) -def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, +def make_demod_map(context, obslist, shape, wcs, noise_model, info, preprocess_config, prefix, comm=mpi.COMM_WORLD, comps="TQU", t0=0, dtype_tod=np.float32, dtype_map=np.float32, - tag="", verbose=0, split_labels=None, + tag="", verbose=0, split_labels=None, L=None, site='so_sat3', recenter=None, singlestream=False): """ Make a demodulated map from the list of observations in obslist. @@ -398,8 +398,6 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, WCS kernel of the geometry to use for mapping. noise_model : sotodlib.mapmaking.Nmat Noise model to pass to DemodMapmaker. - L : logger - Logger for printing on the screen. info : list Information for the database, will be written as a .hdf file. preprocess_config : dict @@ -421,6 +419,8 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, split_labels : list or None, optional A list of strings with the splits requested. If None then no splits were asked for, i.e. we will produce one map. + L : logger, optional + Logger for printing on the screen. site : str, optional Plataform name for the pointing matrix. recenter : str or None @@ -439,6 +439,8 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, L, info, List of outputs from preprocess database. To be used in cleanup_mandb. """ context = core.Context(context) + if L is None: + L = site_pipeline.util.init_logger("Demod filterbin mapmaking") pre = "" if tag is None else tag + " " if comm.rank == 0: L.info(pre + "Initializing equation system") mapmaker = setup_demod_map(shape, wcs, noise_model, comm=comm, diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py index d020e403c..f995bed18 100644 --- a/sotodlib/site_pipeline/make_atomic_filterbin_map.py +++ b/sotodlib/site_pipeline/make_atomic_filterbin_map.py @@ -396,6 +396,7 @@ def main(config_file=None, defaults=defaults, **args): my_tods = list(itertools.chain.from_iterable(my_tods)) my_ra_ref = list(itertools.chain.from_iterable(my_ra_ref)) pwvs = list(itertools.chain.from_iterable(pwvs)) + L.info('Starting with read_tods') # we will do the profile and footprint here, and then allgather the # subshapes and subwcs.This way we don't have to communicate the @@ -482,7 +483,7 @@ def main(config_file=None, defaults=defaults, **args): # Done with creating run_list with ProcessPoolExecutor(args['nproc']) as exe: futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], r[1], r[2], - noise_model, L, r[3], preprocess_config, r[4], + noise_model, r[3], preprocess_config, r[4], comm = comm, t0=r[5], tag=tag, recenter=recenter, dtype_map=args['dtype_map'], dtype_tod=args['dtype_tod'], From 6535ecc67dead1f2b84b7da55cf6f5cb068c21d1 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Mon, 21 Oct 2024 08:19:59 -0700 Subject: [PATCH 24/39] updates from satp1 site pipeline job --- sotodlib/tod_ops/flags.py | 4 +-- sotodlib/tod_ops/gapfill.py | 8 +++-- sotodlib/tod_ops/t2pleakage.py | 55 ++++++++++++++++++++++++---------- tests/test_flags.py | 2 +- tests/test_t2pleakage.py | 4 +-- tests/test_tod_ops.py | 12 ++++---- 6 files changed, 55 insertions(+), 30 deletions(-) diff --git a/sotodlib/tod_ops/flags.py b/sotodlib/tod_ops/flags.py index b4bb6d4d5..fef8b4c1e 100644 --- a/sotodlib/tod_ops/flags.py +++ b/sotodlib/tod_ops/flags.py @@ -464,7 +464,7 @@ def get_trending_flags(aman, if timestamps is None: timestamps = aman.timestamps assert len(timestamps) == signal.shape[1] - + # This helps with floating point precision # Not modifying inplace since we don't want to touch aman.timestamps timestamps = timestamps - timestamps[0] @@ -474,7 +474,7 @@ def get_trending_flags(aman, samp_edges = [0] # Get sampling rate - fs = 1 / np.nanmedian(np.diff(timestamps)) + fs = 1. / np.nanmedian(np.diff(timestamps)) n_samples_per_piece = int(t_piece * fs) # How many pieces can timestamps be divided into n_pieces = len(timestamps) // n_samples_per_piece diff --git a/sotodlib/tod_ops/gapfill.py b/sotodlib/tod_ops/gapfill.py index 2dc1b9464..241bc99a7 100644 --- a/sotodlib/tod_ops/gapfill.py +++ b/sotodlib/tod_ops/gapfill.py @@ -407,9 +407,11 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, glitch_flags : RangesMatrix or None RangesMatrix containing flags to use for gap filling. If None then uses ``aman.flags.glitches``. - wrap : bool or str - If True wraps new field called ``gap_filled``, if False returns the - gap filled array, if a string wraps new field with provided name. + in_place : bool + If False it makes a copy of signal before gap filling and returns + the copy. + wrap_name : bool or str + If not None, wrap the gap filled data into tod with this name. Returns ------- diff --git a/sotodlib/tod_ops/t2pleakage.py b/sotodlib/tod_ops/t2pleakage.py index 32f9f810a..37898be96 100644 --- a/sotodlib/tod_ops/t2pleakage.py +++ b/sotodlib/tod_ops/t2pleakage.py @@ -10,8 +10,8 @@ def leakage_model(dT, AQ, AU, lamQ, lamU): return AQ + lamQ * dT + 1.j * (AU + lamU * dT) def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', - mask=None, ds_factor=100, subtract_sig=False, merge_stats=True, - t2p_stats_name='t2p_stats'): + wn_demod=None, f_lpf_cutoff=2.0, mask=None, ds_factor=100, + subtract_sig=False, merge_stats=True, t2p_stats_name='t2p_stats'): """ Compute the leakage coefficients from temperature (T) to polarization (Q and U). Optionally subtract this leakage and return axismanager of coefficients. @@ -37,17 +37,30 @@ def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demo ------- out_aman : AxisManager An AxisManager containing leakage coefficients. - """ + """ + + # get white noise level of demod for error estimation + if wn_demod is None: + freqs, Pxx_demod = calc_psd(aman, signal=aman[Q_sig_name], merge=False) + wn_demod = calc_wn(aman, pxx=Pxx_demod, freqs=freqs, low_f=0.5, high_f=1.5) + + # integrate the white noise level over frequencies to get error bar of each point + sigma_demod = wn_demod * np.sqrt(f_lpf_cutoff) + sigma_T = sigma_demod/np.sqrt(2) if mask is None: - mask = np.ones_like(aman.dsT, dtype='bool') + mask = np.ones_like(aman[T_sig_name], dtype='bool') A_Q_array = np.empty([aman.dets.count]) A_U_array = np.empty_like(A_Q_array) - A_P_array = np.empty_like(A_Q_array) lambda_Q_array = np.empty_like(A_Q_array) lambda_U_array = np.empty_like(A_Q_array) - lambda_P_array = np.empty_like(A_Q_array) + + A_Q_error = np.empty_like(A_Q_array) + A_U_error = np.empty_like(A_Q_array) + lambda_Q_error = np.empty_like(A_Q_array) + lambda_U_error = np.empty_like(A_Q_array) + redchi2s_array = np.empty_like(A_Q_array) for di, det in enumerate(aman.dets.vals): @@ -55,25 +68,30 @@ def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demo yQ = aman[Q_sig_name][di][mask[di]][::ds_factor] yU = aman[U_sig_name][di][mask[di]][::ds_factor] - model = Model(leakage_model, independent_vars=['dT']) - params = model.make_params(AQ=np.median(yQ), AU=np.median(yU), - lamQ=0., lamU=0.) - result = model.fit(yQ + 1j * yU, params, dT=x) - if result.success: + try: + model = Model(leakage_model, independent_vars=['dT'], weights=np.ones_like(x)/sigma_demod[di]) + params = model.make_params(AQ=np.median(yQ), AU=np.median(yU), lamQ=0., lamU=0.) + result = model.fit(yQ + 1j * yU, params, dT=x) A_Q_array[di] = result.params['AQ'].value A_U_array[di] = result.params['AU'].value - A_P_array[di] = np.sqrt(result.params['AQ'].value**2 + result.params['AU'].value**2) lambda_Q_array[di] = result.params['lamQ'].value lambda_U_array[di] = result.params['lamU'].value - lambda_P_array[di] = np.sqrt(result.params['lamQ'].value**2 + result.params['lamU'].value**2) + + A_Q_error[di] = result.params['AQ'].stderr + A_U_error[di] = result.params['AU'].stderr + lambda_Q_error[di] = result.params['lamQ'].stderr + lambda_U_error[di] = result.params['lamU'].stderr redchi2s_array[di] = result.redchi - else: + except: A_Q_array[di] = np.nan A_U_array[di] = np.nan - A_P_array[di] = np.nan lambda_Q_array[di] = np.nan lambda_U_array[di] = np.nan - lambda_P_array[di] = np.nan + + A_Q_error[di] = np.nan + A_U_error[di] = np.nan + lambda_Q_array[di] = np.nan + lambda_U_error[di] = np.nan redchi2s_array[di] = np.nan out_aman = core.AxisManager(aman.dets, aman.samps) @@ -81,6 +99,11 @@ def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demo out_aman.wrap('AU', A_U_array, [(0, 'dets')]) out_aman.wrap('lamQ', lambda_Q_array, [(0, 'dets')]) out_aman.wrap('lamU', lambda_U_array, [(0, 'dets')]) + + out_aman.wrap('AQ_error', A_Q_error, [(0, 'dets')]) + out_aman.wrap('AU_error', A_U_error, [(0, 'dets')]) + out_aman.wrap('lamQ_error', lambda_Q_error, [(0, 'dets')]) + out_aman.wrap('lamU_error', lambda_U_error, [(0, 'dets')]) out_aman.wrap('redchi2s', redchi2s_array, [(0, 'dets')]) if subtract_sig: diff --git a/tests/test_flags.py b/tests/test_flags.py index b241ca4a5..ed0b16c39 100644 --- a/tests/test_flags.py +++ b/tests/test_flags.py @@ -25,7 +25,7 @@ def test_trending(self): self.assertTrue(np.array_equal(cut.ranges[1].ranges(), [[0, 1000]])) self.assertEqual(len(cut.ranges[0].ranges()), 0) - cut = flags.get_trending_flags(aman, max_trend=0.5, n_pieces=3) + cut = flags.get_trending_flags(aman, max_trend=0.5, t_piece=333) self.assertTupleEqual(cut.shape, (2, 1000)) self.assertTrue(np.array_equal(cut.ranges[1].ranges(), [[0, 1000]])) self.assertEqual(len(cut.ranges[0].ranges()), 0) diff --git a/tests/test_t2pleakage.py b/tests/test_t2pleakage.py index 4a720a9de..fd0a91f45 100644 --- a/tests/test_t2pleakage.py +++ b/tests/test_t2pleakage.py @@ -59,8 +59,8 @@ def test_leakage_subtraction(self): hwp.demod_tod(tod) oman = t2pleakage.get_t2p_coeffs(tod) - self.assertTrue(np.all(np.isclose(oman.coeffsQ, t2q, atol=oman.errorsQ*5, rtol=0))) - self.assertTrue(np.all(np.isclose(oman.coeffsU, t2u, atol=oman.errorsU*5, rtol=0))) + self.assertTrue(np.all(np.isclose(oman.lamQ, t2q, atol=oman.lamQ_error*5, rtol=0))) + self.assertTrue(np.all(np.isclose(oman.lamU, t2u, atol=oman.lamU_error*5, rtol=0))) if __name__ == '__main__': unittest.main() \ No newline at end of file diff --git a/tests/test_tod_ops.py b/tests/test_tod_ops.py index c2bd93754..6a588f330 100644 --- a/tests/test_tod_ops.py +++ b/tests/test_tod_ops.py @@ -169,21 +169,21 @@ def test_fillglitches(self): ts = np.arange(0, 1*60, 1/200) aman = get_glitchy_tod(ts, ndets=100) # test poly fill - up, mg = False, False + in_place, up, mg = False, False, None glitch_filled = tod_ops.gapfill.fill_glitches(aman, use_pca=up, - wrap=mg) + in_place=in_place, wrap_name=mg) self.assertTrue(np.max(np.abs(glitch_filled-aman.inputsignal)) < 1e-3) # test pca fill - up, mg = True, False + in_place, up, mg = False, True, None glitch_filled = tod_ops.gapfill.fill_glitches(aman, use_pca=up, - wrap=mg) + in_place=in_place, wrap_name=mg) print(np.max(np.abs(glitch_filled-aman.inputsignal))) # test wrap new field - up, mg = False, True + in_place, up, mg = False, False, "gap_filled" glitch_filled = tod_ops.gapfill.fill_glitches(aman, use_pca=up, - wrap=mg) + in_place=in_place, wrap_name=mg) self.assertTrue('gap_filled' in aman._assignments) class FilterTest(unittest.TestCase): From 2b24dbf552a18ab4a1b2628d209837fcab5fd74a Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Mon, 21 Oct 2024 08:39:47 -0700 Subject: [PATCH 25/39] remove lingering profile call --- sotodlib/preprocess/processes.py | 67 ++++++-------------------------- 1 file changed, 11 insertions(+), 56 deletions(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 377047302..28aa51aba 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -1078,23 +1078,15 @@ class PCARelCal(_Preprocess): Example configuration file entry:: - name: 'pca_relcal' - signal: 'lpf_sig' - pca_run: 'run1' - calc: - xfac: 2 - yfac: 1.5 - calc_good_medianw: True + signal: 'hwpss_remove' + calc: True save: True - plot: - plot_ds_factor: 20 - + See :ref:`pca-background` for more details on the method. """ name = 'pca_relcal' def __init__(self, step_cfgs): self.signal = step_cfgs.get('signal', 'signal') - self.run = step_cfgs.get('pca_run', 'run1') - self.run_name = f'{self.signal}_{self.run}' super().__init__(step_cfgs) @@ -1102,64 +1094,27 @@ def calc_and_save(self, aman, proc_aman): bands = np.unique(aman.det_info.wafer.bandpass) bands = bands[bands != 'NC'] rc_aman = core.AxisManager(aman.dets, aman.samps) - pca_det_mask = np.full(aman.dets.count, False, dtype=bool) relcal = np.zeros(aman.dets.count) - pca_weight0 = np.zeros(aman.dets.count) for band in bands: m0 = aman.det_info.wafer.bandpass == band - rc_aman.wrap(f'{band}_idx', m0, [(0, 'dets')]) + rc_aman.wrap(f'{band}_mask', m0, [(0, 'dets')]) band_aman = aman.restrict('dets', aman.dets.vals[m0], in_place=False) - pca_out = tod_ops.pca.get_pca(band_aman,signal=band_aman[self.signal]) pca_signal = tod_ops.pca.get_pca_model(band_aman, pca_out, signal=band_aman[self.signal]) - result_aman = tod_ops.pca.pca_cuts_and_cal(band_aman, pca_signal, **self.calc_cfgs) - - pca_det_mask[m0] = np.logical_or(pca_det_mask[m0], result_aman['pca_det_mask']) - relcal[m0] = result_aman['relcal'] - pca_weight0[m0] = result_aman['pca_weight0'] - rc_aman.wrap(f'{band}_pca_mode0', result_aman['pca_mode0'], [(0, 'samps')]) - rc_aman.wrap(f'{band}_xbounds', result_aman['xbounds']) - rc_aman.wrap(f'{band}_ybounds', result_aman['ybounds']) - rc_aman.wrap(f'{band}_median', result_aman['median']) - - rc_aman.wrap('pca_det_mask', pca_det_mask, [(0, 'dets')]) - rc_aman.wrap('relcal', relcal, [(0, 'dets')]) - rc_aman.wrap('pca_weight0', pca_weight0, [(0, 'dets')]) + med = np.median(pca_signal.weights[:,0]) + relcal[m0] = med/pca_signal.weights[:,0] + rc_aman.wrap(f'{band}_median', med) + rc_aman.wrap(f'{band}_pca_mode0', pca_signal.modes[0], [(0, 'samps')]) + rc_aman.wrap('relcal', relcal, [(0,'dets')]) self.save(proc_aman, rc_aman) - def save(self, proc_aman, pca_aman): + def save(self, proc_aman, rc_aman): if self.save_cfgs is None: return if self.save_cfgs: - proc_aman.wrap(self.run_name, pca_aman) - - def select(self, meta, proc_aman=None): - if self.select_cfgs is None: - return meta - if proc_aman is None: - proc_aman = meta.preprocess - keep = ~proc_aman[self.run_name]['pca_det_mask'] - meta.restrict("dets", meta.dets.vals[keep]) - return meta - - def plot(self, aman, proc_aman, filename): - if self.plot_cfgs is None: - return - if self.plot_cfgs: - from .preprocess_plot import plot_pcabounds - filename = filename.replace('{ctime}', f'{str(aman.timestamps[0])[:5]}') - filename = filename.replace('{obsid}', aman.obs_info.obs_id) - det = aman.dets.vals[0] - ufm = det.split('_')[2] - - bands = np.unique(aman.det_info.wafer.bandpass) - bands = bands[bands != 'NC'] - for band in bands: - pca_aman = aman.restrict('dets', aman.dets.vals[proc_aman[self.run_name][f'{band}_idx']], in_place=False) - band_aman = proc_aman[self.run_name].restrict('dets', aman.dets.vals[proc_aman[self.run_name][f'{band}_idx']], in_place=False) - plot_pcabounds(pca_aman, band_aman, filename=filename.replace('{name}', f'{ufm}_{band}_pca'), signal=self.signal, band=band, plot_ds_factor=self.plot_cfgs.get('plot_ds_factor', 20)) + proc_aman.wrap(self.name, rc_aman) class PTPFlags(_Preprocess): """Find detectors with anomalous peak-to-peak signal. From 9ea21961525d255320fe8d7632a113eae107603e Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Mon, 21 Oct 2024 09:11:00 -0700 Subject: [PATCH 26/39] fix processes.py --- sotodlib/preprocess/processes.py | 89 ++++++++++++++++++++++++-------- 1 file changed, 68 insertions(+), 21 deletions(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 28aa51aba..545b56f41 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -14,13 +14,14 @@ from .pcore import _Preprocess, _FracFlaggedMixIn from .. import flag_utils + class FFTTrim(_Preprocess): """Trim the AxisManager to optimize for faster FFTs later in the pipeline. All processing configs go to `fft_trim` .. autofunction:: sotodlib.tod_ops.fft_trim """ - name = "fft_trim" + name = "fft_trim" def process(self, aman, proc_aman): tod_ops.fft_trim(aman, **self.process_cfgs) @@ -97,7 +98,7 @@ class Trends(_FracFlaggedMixIn, _Preprocess): signal: "signal" # optional calc: max_trend: 2.5 - t_piece: 100 + t_pieces: 100 save: True plot: True select: @@ -112,7 +113,7 @@ def __init__(self, step_cfgs): self.signal = step_cfgs.get('signal', 'signal') super().__init__(step_cfgs) - + def calc_and_save(self, aman, proc_aman): _, trend_aman = tod_ops.flags.get_trending_flags( aman, merge=False, full_output=True, @@ -365,6 +366,7 @@ def __init__(self, step_cfgs): super().__init__(step_cfgs) + def process(self, aman, proc_aman): freqs, Pxx = tod_ops.fft_ops.calc_psd(aman, signal=aman[self.signal], **self.process_cfgs) @@ -418,7 +420,7 @@ def calc_and_save(self, aman, proc_aman): if self.psd not in aman: raise ValueError("PSD is not saved in AxisManager") psd = aman[self.psd] - + if self.calc_cfgs is None: self.calc_cfgs = {} @@ -500,7 +502,7 @@ def __init__(self, step_cfgs): self.signal = step_cfgs.get('signal', 'signal') super().__init__(step_cfgs) - + def process(self, aman, proc_aman): if self.process_cfgs["kind"] == "single_value": if self.process_cfgs.get("divide", False): @@ -563,7 +565,7 @@ def plot(self, aman, proc_aman, filename): plot_4f_2f_counts(aman, filename=filename.replace('{name}', f'{ufm}_4f_2f_counts')) plot_hwpss_fit_status(aman, proc_aman[self.calc_cfgs["hwpss_stats_name"]], filename=filename.replace('{name}', f'{ufm}_hwpss_stats')) - #@classmethod + @classmethod def gen_metric(cls, meta, proc_aman): """ Generate a QA metric for the coefficients of the HWPSS fit. Coefficient percentiles and mean are recorded for every mode and detset. @@ -716,7 +718,7 @@ class EstimateAzSS(_Preprocess): .. autofunction:: sotodlib.tod_ops.azss.get_azss """ name = "estimate_azss" - + def calc_and_save(self, aman, proc_aman): calc_aman, _ = tod_ops.azss.get_azss(aman, **self.calc_cfgs) self.save(proc_aman, calc_aman) @@ -796,7 +798,7 @@ def save(self, proc_aman, turn_aman): return if self.save_cfgs: proc_aman.wrap("turnaround_flags", turn_aman) - + def process(self, aman, proc_aman): tod_ops.flags.get_turnaround_flags(aman, **self.process_cfgs) @@ -920,7 +922,7 @@ class SourceFlags(_Preprocess): .. autofunction:: sotodlib.tod_ops.flags.get_source_flags """ name = "source_flags" - + def calc_and_save(self, aman, proc_aman): center_on = self.calc_cfgs.get('center_on', 'planet') # Get source from tags @@ -1078,15 +1080,23 @@ class PCARelCal(_Preprocess): Example configuration file entry:: - name: 'pca_relcal' - signal: 'hwpss_remove' - calc: True + signal: 'lpf_sig' + pca_run: 'run1' + calc: + xfac: 2 + yfac: 1.5 + calc_good_medianw: True save: True - + plot: + plot_ds_factor: 20 + See :ref:`pca-background` for more details on the method. """ name = 'pca_relcal' def __init__(self, step_cfgs): self.signal = step_cfgs.get('signal', 'signal') + self.run = step_cfgs.get('pca_run', 'run1') + self.run_name = f'{self.signal}_{self.run}' super().__init__(step_cfgs) @@ -1094,27 +1104,64 @@ def calc_and_save(self, aman, proc_aman): bands = np.unique(aman.det_info.wafer.bandpass) bands = bands[bands != 'NC'] rc_aman = core.AxisManager(aman.dets, aman.samps) + pca_det_mask = np.full(aman.dets.count, False, dtype=bool) relcal = np.zeros(aman.dets.count) + pca_weight0 = np.zeros(aman.dets.count) for band in bands: m0 = aman.det_info.wafer.bandpass == band - rc_aman.wrap(f'{band}_mask', m0, [(0, 'dets')]) + rc_aman.wrap(f'{band}_idx', m0, [(0, 'dets')]) band_aman = aman.restrict('dets', aman.dets.vals[m0], in_place=False) + pca_out = tod_ops.pca.get_pca(band_aman,signal=band_aman[self.signal]) pca_signal = tod_ops.pca.get_pca_model(band_aman, pca_out, signal=band_aman[self.signal]) - med = np.median(pca_signal.weights[:,0]) - relcal[m0] = med/pca_signal.weights[:,0] + result_aman = tod_ops.pca.pca_cuts_and_cal(band_aman, pca_signal, **self.calc_cfgs) + + pca_det_mask[m0] = np.logical_or(pca_det_mask[m0], result_aman['pca_det_mask']) + relcal[m0] = result_aman['relcal'] + pca_weight0[m0] = result_aman['pca_weight0'] + rc_aman.wrap(f'{band}_pca_mode0', result_aman['pca_mode0'], [(0, 'samps')]) + rc_aman.wrap(f'{band}_xbounds', result_aman['xbounds']) + rc_aman.wrap(f'{band}_ybounds', result_aman['ybounds']) + rc_aman.wrap(f'{band}_median', result_aman['median']) + + rc_aman.wrap('pca_det_mask', pca_det_mask, [(0, 'dets')]) + rc_aman.wrap('relcal', relcal, [(0, 'dets')]) + rc_aman.wrap('pca_weight0', pca_weight0, [(0, 'dets')]) - rc_aman.wrap(f'{band}_median', med) - rc_aman.wrap(f'{band}_pca_mode0', pca_signal.modes[0], [(0, 'samps')]) - rc_aman.wrap('relcal', relcal, [(0,'dets')]) self.save(proc_aman, rc_aman) - def save(self, proc_aman, rc_aman): + def save(self, proc_aman, pca_aman): if self.save_cfgs is None: return if self.save_cfgs: - proc_aman.wrap(self.name, rc_aman) + proc_aman.wrap(self.run_name, pca_aman) + + def select(self, meta, proc_aman=None): + if self.select_cfgs is None: + return meta + if proc_aman is None: + proc_aman = meta.preprocess + keep = ~proc_aman[self.run_name]['pca_det_mask'] + meta.restrict("dets", meta.dets.vals[keep]) + return meta + + def plot(self, aman, proc_aman, filename): + if self.plot_cfgs is None: + return + if self.plot_cfgs: + from .preprocess_plot import plot_pcabounds + filename = filename.replace('{ctime}', f'{str(aman.timestamps[0])[:5]}') + filename = filename.replace('{obsid}', aman.obs_info.obs_id) + det = aman.dets.vals[0] + ufm = det.split('_')[2] + + bands = np.unique(aman.det_info.wafer.bandpass) + bands = bands[bands != 'NC'] + for band in bands: + pca_aman = aman.restrict('dets', aman.dets.vals[proc_aman[self.run_name][f'{band}_idx']], in_place=False) + band_aman = proc_aman[self.run_name].restrict('dets', aman.dets.vals[proc_aman[self.run_name][f'{band}_idx']], in_place=False) + plot_pcabounds(pca_aman, band_aman, filename=filename.replace('{name}', f'{ufm}_{band}_pca'), signal=self.signal, band=band, plot_ds_factor=self.plot_cfgs.get('plot_ds_factor', 20)) class PTPFlags(_Preprocess): """Find detectors with anomalous peak-to-peak signal. @@ -1239,7 +1286,7 @@ class SubtractT2P(_Preprocess): Q_sig_name: 'demodQ' U_sig_name: 'demodU' - .. autofunction:: sotodlib.tod_ops.t2pleakage.subtract_leakage + .. autofunction:: sotodlib.tod_ops.t2pleakage.subtract_t2p """ name = "subtract_t2p" From fddb0be873e8ed336a0c1d8a50370fb83a00f51b Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 28 Oct 2024 14:39:06 -0700 Subject: [PATCH 27/39] Support for healpix added to make_demod_map and the script --- sotodlib/mapmaking/demod_mapmaker.py | 77 +++++++++++------- .../make_atomic_filterbin_map.py | 78 +++++++++++++------ 2 files changed, 103 insertions(+), 52 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index f7abb8801..0707d655a 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -379,30 +379,48 @@ def write(self, prefix, tag, m): return oname -def setup_demod_map(shape, wcs, noise_model, comm=mpi.COMM_WORLD, - comps='TQU', split_labels=None, singlestream=False, - dtype_tod=np.float32, dtype_map=np.float32, - recenter=None, verbose=0): +def setup_demod_map(noise_model, shape=None, wcs=None, nside=None, + comm=mpi.COMM_WORLD, comps='TQU', split_labels=None, + singlestream=False, dtype_tod=np.float32, + dtype_map=np.float32, recenter=None, verbose=0): """ Setup the classes for demod mapmaking and return a DemodMapmmaker object """ - if split_labels==None: - # this is the case where we did not request any splits at all - signal_map = DemodSignalMap.for_rectpix(shape, wcs, comm, comps=comps, - dtype=dtype_map, tiled=False, - ofmt="", singlestream=singlestream, - recenter=recenter ) - else: - # this is the case where we asked for at least 2 splits (1 split set). - # We count how many split we'll make, we need to define the Nsplits - # maps inside the DemodSignalMap - Nsplits = len(split_labels) - signal_map = DemodSignalMap.for_rectpix(shape, wcs, comm, comps=comps, - dtype=dtype_map, tiled=False, - ofmt="", Nsplits=Nsplits, - singlestream=singlestream, - recenter=recenter) + if shape is not None and wcs is not None: + if split_labels==None: + # this is the case where we did not request any splits at all + signal_map = DemodSignalMap.for_rectpix(shape, wcs, comm, comps=comps, + dtype=dtype_map, tiled=False, + ofmt="", singlestream=singlestream, + recenter=recenter ) + else: + # this is the case where we asked for at least 2 splits (1 split set). + # We count how many split we'll make, we need to define the Nsplits + # maps inside the DemodSignalMap + Nsplits = len(split_labels) + signal_map = DemodSignalMap.for_rectpix(shape, wcs, comm, comps=comps, + dtype=dtype_map, tiled=False, + ofmt="", Nsplits=Nsplits, + singlestream=singlestream, + recenter=recenter) + elif nside is not None: + if split_labels==None: + # this is the case where we did not request any splits at all + signal_map = DemodSignalMap.for_healpix(nside, comps=comps, + dtype=dtype_map, + ofmt="", singlestream=singlestream, + ext="fits.gz") + else: + # this is the case where we asked for at least 2 splits (1 split set). + # We count how many split we'll make, we need to define the Nsplits + # maps inside the DemodSignalMap + Nsplits = len(split_labels) + signal_map = DemodSignalMap.for_healpix(nside, comps=comps, + dtype=dtype_map, + ofmt="", Nsplits=Nsplits, + singlestream=singlestream, + ext="fits.gz") signals = [signal_map] mapmaker = DemodMapmaker(signals, noise_model=noise_model, dtype=dtype_tod, @@ -440,8 +458,9 @@ def write_demod_info(oname, info, split_labels=None): for n_split in range(Nsplits): bunch.write(oname+'_%s_info.hdf'%split_labels[n_split], info[n_split]) -def make_demod_map(context, obslist, shape, wcs, noise_model, info, - preprocess_config, prefix, comm=mpi.COMM_WORLD, comps="TQU", t0=0, +def make_demod_map(context, obslist, noise_model, info, + preprocess_config, prefix, shape=None, wcs=None, + nside=None, comm=mpi.COMM_WORLD, comps="TQU", t0=0, dtype_tod=np.float32, dtype_map=np.float32, tag="", verbose=0, split_labels=None, L=None, site='so_sat3', recenter=None, singlestream=False): @@ -456,10 +475,6 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, info, The obslist which is the output of the mapmaking.obs_grouping.build_obslists, contains the information of the single or multiple obs to map. - shape : tuple - Shape of the geometry to use for mapping. - wcs : dict - WCS kernel of the geometry to use for mapping. noise_model : sotodlib.mapmaking.Nmat Noise model to pass to DemodMapmaker. info : list @@ -468,6 +483,12 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, info, Dictionary with the config yaml file for the preprocess database. prefix : str Prefix for the output files + shape : tuple, optional + Shape of the geometry to use for mapping. + wcs : dict, optional + WCS kernel of the geometry to use for mapping. + nside : int, optional + Nside for healpix pixelization comps : str, optional Which components to map, only TQU supported for now. t0 : int, optional @@ -507,8 +528,8 @@ def make_demod_map(context, obslist, shape, wcs, noise_model, info, L = site_pipeline.util.init_logger("Demod filterbin mapmaking") pre = "" if tag is None else tag + " " if comm.rank == 0: L.info(pre + "Initializing equation system") - mapmaker = setup_demod_map(shape, wcs, noise_model, comm=comm, - comps=comps, split_labels=split_labels, + mapmaker = setup_demod_map(noise_model, shape=shape, wcs=wcs, nside=nside, + comm=comm, comps=comps, split_labels=split_labels, singlestream=singlestream, dtype_tod=dtype_tod, dtype_map=dtype_map, recenter=recenter, verbose=verbose) diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py index f995bed18..2ebfc9d87 100644 --- a/sotodlib/site_pipeline/make_atomic_filterbin_map.py +++ b/sotodlib/site_pipeline/make_atomic_filterbin_map.py @@ -8,7 +8,9 @@ from concurrent.futures import ProcessPoolExecutor, as_completed import sotodlib.site_pipeline.util as util -defaults = {"query": "1", +defaults = {"area": None, + "nside": None, + "query": "1", "odir": "./output", "preprocess_config": None, "comps": "TQU", @@ -46,12 +48,14 @@ def get_parser(parser=None): parser.add_argument("--config-file", type=str, default=None, help="Path to mapmaker config.yaml file") - parser.add_argument("--context", - help='context file') - parser.add_argument("--query", - help='query, can be a file (list of obs_id) or selection string') + parser.add_argument("--context", type=str, + help='context file') parser.add_argument("--area", help='wcs kernel') + parser.add_argument("--nside", + help='Nside if you want map in HEALPIX') + parser.add_argument("--query", + help='query, can be a file (list of obs_id) or selection string') parser.add_argument("--odir", help='output directory') parser.add_argument("--preprocess_config", type=str, @@ -262,8 +266,14 @@ def main(config_file=None, defaults=defaults, **args): comm = mpi.FAKE_WORLD # Fake communicator since we won't use MPI verbose = args['verbose'] - args['quiet'] - shape, wcs = enmap.read_map_geometry(args['area']) - wcs = wcsutils.WCS(wcs.to_header()) + if args['area'] is not None: + shape, wcs = enmap.read_map_geometry(args['area']) + wcs = wcsutils.WCS(wcs.to_header()) + elif args['nside'] is not None: + nside = int(args['nside']) + else: + print('Neither rectangular area or nside specified, exiting.') + exit(1) noise_model = mapmaking.NmatWhite() ncomp = len(args['comps']) @@ -396,20 +406,21 @@ def main(config_file=None, defaults=defaults, **args): my_tods = list(itertools.chain.from_iterable(my_tods)) my_ra_ref = list(itertools.chain.from_iterable(my_ra_ref)) pwvs = list(itertools.chain.from_iterable(pwvs)) - L.info('Starting with read_tods') + L.info('Done with read_tods') - # we will do the profile and footprint here, and then allgather the - # subshapes and subwcs.This way we don't have to communicate the - # massive arrays such as timestamps - subshapes = [] ; subwcses = [] - for obs in my_tods: - if recenter is None: - subshape, subwcs = find_footprint(context, obs, wcs,) - subshapes.append(subshape) ; subwcses.append(subwcs) - else: - subshape = shape; subwcs = wcs - subshapes.append(subshape) ; subwcses.append(subwcs) - # subshape and subwcs are in the order given by my_oblists + if args['area'] is not None: + # we will do the profile and footprint here, and then allgather the + # subshapes and subwcs.This way we don't have to communicate the + # massive arrays such as timestamps + subshapes = [] ; subwcses = [] + for obs in my_tods: + if recenter is None: + subshape, subwcs = find_footprint(context, obs, wcs,) + subshapes.append(subshape) ; subwcses.append(subwcs) + else: + subshape = shape; subwcs = wcs + subshapes.append(subshape) ; subwcses.append(subwcs) + # subshape and subwcs are in the order given by my_oblists run_list = [] for oi in range(len(my_tods)): @@ -424,8 +435,9 @@ def main(config_file=None, defaults=defaults, **args): t5 = ("%05d" % t)[:5] prefix = "%s/%s/atomic_%010d_%s_%s" % (args['odir'], t5, t, detset, band) - subshape = subshapes[oi] - subwcs = subwcses[oi] + if args['area'] is not None: + subshape = subshapes[oi] + subwcs = subwcses[oi] tag = "%5d/%d" % (oi+1, len(obskeys)) putils.mkdir(os.path.dirname(prefix)) @@ -479,11 +491,17 @@ def main(config_file=None, defaults=defaults, **args): pwv=pwv_atomic )) # inputs that are unique per atomic map go into run_list - run_list.append([obslist, subshape, subwcs, info, prefix, t]) + if args['area'] is not None: + run_list.append([obslist, subshape, subwcs, info, prefix, t]) + elif args['nside'] is not None: + run_list.append([obslist, info, prefix, t]) # Done with creating run_list + with ProcessPoolExecutor(args['nproc']) as exe: - futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], r[1], r[2], + if args['area'] is not None: + futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], noise_model, r[3], preprocess_config, r[4], + shape=r[1], wcs=r[2], comm = comm, t0=r[5], tag=tag, recenter=recenter, dtype_map=args['dtype_map'], dtype_tod=args['dtype_tod'], @@ -492,6 +510,18 @@ def main(config_file=None, defaults=defaults, **args): split_labels=split_labels, singlestream=args['singlestream'], site=args['site']) for r in run_list] + elif args['nside'] is not None: + futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], + noise_model, r[1], preprocess_config, r[2], + nside = nside, + comm = comm, t0=r[3], tag=tag, recenter=recenter, + dtype_map=args['dtype_map'], + dtype_tod=args['dtype_tod'], + comps=args['comps'], + verbose=args['verbose'], + split_labels=split_labels, + singlestream=args['singlestream'], + site=args['site']) for r in run_list] for future in as_completed(futures): L.info('New future as_completed result') try: From 63e00d5aa560ee037a50aa2de806d23465e96c96 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 29 Oct 2024 11:49:20 -0700 Subject: [PATCH 28/39] Implementing nside_tile=auto in make_demod_map --- sotodlib/mapmaking/demod_mapmaker.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 0707d655a..0b99f92f3 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -407,20 +407,20 @@ def setup_demod_map(noise_model, shape=None, wcs=None, nside=None, elif nside is not None: if split_labels==None: # this is the case where we did not request any splits at all - signal_map = DemodSignalMap.for_healpix(nside, comps=comps, - dtype=dtype_map, - ofmt="", singlestream=singlestream, - ext="fits.gz") + signal_map = DemodSignalMap.for_healpix(nside, nside_tile='auto', + comps=comps, dtype=dtype_map, + ofmt="", singlestream=singlestream, + ext="fits.gz") else: # this is the case where we asked for at least 2 splits (1 split set). # We count how many split we'll make, we need to define the Nsplits # maps inside the DemodSignalMap Nsplits = len(split_labels) - signal_map = DemodSignalMap.for_healpix(nside, comps=comps, - dtype=dtype_map, - ofmt="", Nsplits=Nsplits, - singlestream=singlestream, - ext="fits.gz") + signal_map = DemodSignalMap.for_healpix(nside, nside_tile='auto', + comps=comps, dtype=dtype_map, + ofmt="", Nsplits=Nsplits, + singlestream=singlestream, + ext="fits.gz") signals = [signal_map] mapmaker = DemodMapmaker(signals, noise_model=noise_model, dtype=dtype_tod, From bc97351aff77efcd665891ea7d1de34b152bc103 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 30 Oct 2024 08:40:24 -0700 Subject: [PATCH 29/39] adjustments to t2p, flags, gapfill --- sotodlib/preprocess/processes.py | 5 +- sotodlib/tod_ops/flags.py | 1 + sotodlib/tod_ops/gapfill.py | 12 +- sotodlib/tod_ops/t2pleakage.py | 277 ++++++++++++++++++++++++++----- tests/test_t2pleakage.py | 24 ++- 5 files changed, 266 insertions(+), 53 deletions(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 545b56f41..55199bea7 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -98,7 +98,7 @@ class Trends(_FracFlaggedMixIn, _Preprocess): signal: "signal" # optional calc: max_trend: 2.5 - t_pieces: 100 + t_piece: 100 save: True plot: True select: @@ -743,7 +743,7 @@ class GlitchFill(_Preprocess): use_pca: False modes: 1 in_place: True - wrap_name: None + wrap: None .. autofunction:: sotodlib.tod_ops.gapfill.fill_glitches """ @@ -1255,6 +1255,7 @@ class EstimateT2P(_Preprocess): T_sig_name: 'dsT' Q_sig_name: 'demodQ' U_sig_name: 'demodU' + joint_fit: True trim_samps: 2000 lpf_cfgs: type: 'sine2' diff --git a/sotodlib/tod_ops/flags.py b/sotodlib/tod_ops/flags.py index fef8b4c1e..4f0088eb7 100644 --- a/sotodlib/tod_ops/flags.py +++ b/sotodlib/tod_ops/flags.py @@ -34,6 +34,7 @@ def get_det_bias_flags(aman, detcal=None, rfrac_range=(0.1, 0.7), psat_range : Tuple Tuple (lower_bound, upper_bound) for P_SAT from IV analysis. P_SAT in the IV analysis is the bias power at 90% Rn in pW. + If None, no flags are not applied from P_SAT. rn_range : Tuple Tuple (lower_bound, upper_bound) for r_n det selection. si_nan : bool diff --git a/sotodlib/tod_ops/gapfill.py b/sotodlib/tod_ops/gapfill.py index 241bc99a7..aa527eefb 100644 --- a/sotodlib/tod_ops/gapfill.py +++ b/sotodlib/tod_ops/gapfill.py @@ -385,7 +385,7 @@ def get_contaminated_ranges(good_flags, bad_flags): def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, - glitch_flags=None, in_place=True, wrap_name=None): + glitch_flags=None, in_place=True, wrap=None): """ This function fills pre-computed glitches provided by the caller in time-ordered data using either a polynomial (default) or PCA-based @@ -410,7 +410,7 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, in_place : bool If False it makes a copy of signal before gap filling and returns the copy. - wrap_name : bool or str + wrap : str or None If not None, wrap the gap filled data into tod with this name. Returns @@ -452,9 +452,9 @@ def fill_glitches(aman, nbuf=10, use_pca=False, modes=3, signal=None, sig = gfill.swap(aman, signal=sig) # Wrap and Return - if wrap_name is not None: - if wrap_name in aman._assignments: - aman.move(wrap_name, None) - aman.wrap(wrap_name, sig, [(0, 'dets'), (1, 'samps')]) + if wrap is not None: + if wrap in aman._assignments: + aman.move(wrap, None) + aman.wrap(wrap, sig, [(0, 'dets'), (1, 'samps')]) return sig diff --git a/sotodlib/tod_ops/t2pleakage.py b/sotodlib/tod_ops/t2pleakage.py index 37898be96..26116b9ae 100644 --- a/sotodlib/tod_ops/t2pleakage.py +++ b/sotodlib/tod_ops/t2pleakage.py @@ -4,17 +4,17 @@ from sotodlib import core from sotodlib.tod_ops import filters, apodize from sotodlib.tod_ops.fft_ops import calc_psd, calc_wn -from lmfit import Model - -def leakage_model(dT, AQ, AU, lamQ, lamU): - return AQ + lamQ * dT + 1.j * (AU + lamU * dT) +from scipy.odr import ODR, Model, RealData +from lmfit import Model as LmfitModel -def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', - wn_demod=None, f_lpf_cutoff=2.0, mask=None, ds_factor=100, - subtract_sig=False, merge_stats=True, t2p_stats_name='t2p_stats'): + +def t2p_fit(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', + sigma_T=None, sigma_demod=None, flag_name=None, ds_factor=100): """ - Compute the leakage coefficients from temperature (T) to polarization (Q and U). - Optionally subtract this leakage and return axismanager of coefficients. + + Compute the leakage coefficients from temperature (T) to polarization (Q and U) + individually. Return an axismanager of coefficients with their + statistical uncertainties and reduced chi-squared values for the fit. Parameters ---------- @@ -26,51 +26,167 @@ def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demo Name of the Q polarization signal in `aman`. Default is 'demodQ'. U_sig_name : str Name of the U polarization signal in `aman`. Default is 'demodU'. - subtract_sig : bool - Whether to subtract the calculated leakage from the polarization signals. Default is False. - merge_stats : bool - Whether to merge the calculated statistics back into `aman`. Default is True. - t2p_stats_name : str - Name under which to wrap the output AxisManager containing statistics. Default is 't2p_stats'. + flag_name : str or None + Name of the flag field in `aman` to use for masking data. If None, + no masking is applied. + sigma_T : array-like + Array of standard deviations for the signal TOD used fitting. + If None, all are set to unity. + sigma_demod : array-like or None + Array of standard deviations for the Q and U TODs used as weights + while fitting. If None, all are set to unity. + ds_factor : float + Factor by which to downsample the TODs prior to fitting. Default is 100. Returns ------- out_aman : AxisManager - An AxisManager containing leakage coefficients. + An AxisManager containing leakage coefficients, their errors, and reduced + chi-squared statistics. """ - - # get white noise level of demod for error estimation - if wn_demod is None: - freqs, Pxx_demod = calc_psd(aman, signal=aman[Q_sig_name], merge=False) - wn_demod = calc_wn(aman, pxx=Pxx_demod, freqs=freqs, low_f=0.5, high_f=1.5) - - # integrate the white noise level over frequencies to get error bar of each point - sigma_demod = wn_demod * np.sqrt(f_lpf_cutoff) - sigma_T = sigma_demod/np.sqrt(2) - - if mask is None: + + def leakage_model(params, x): + return params[0] * x + params[1] + + if sigma_T is None: + sigma_T = np.ones_like(aman.dets.count) + + if sigma_demod is None: + sigma_demod = np.ones_like(aman.dets.count) + + if flag_name is None: mask = np.ones_like(aman[T_sig_name], dtype='bool') - + elif flag_name in aman.flags._fields.keys(): + mask = ~aman.flags[flag_name].mask() + else: + raise ValueError('flag_name should be in aman.flags') + + coeffsQ = np.zeros(aman.dets.count) + errorsQ = np.zeros(aman.dets.count) + redchi2sQ = np.zeros(aman.dets.count) + coeffsU = np.zeros(aman.dets.count) + errorsU = np.zeros(aman.dets.count) + redchi2sU = np.zeros(aman.dets.count) + + for di, det in enumerate(aman.dets.vals): + T_ds_det = aman[T_sig_name][di][mask[di]][::ds_factor] + Q_ds_det = aman[Q_sig_name][di][mask[di]][::ds_factor] + U_ds_det = aman[U_sig_name][di][mask[di]][::ds_factor] + + # fitting for Q + try: + model = Model(leakage_model) + data = RealData(x=T_ds_det, + y=Q_ds_det, + sx=np.ones_like(T_ds_det) * sigma_T[di], + sy=np.ones_like(T_ds_det) * sigma_demod[di]) + odr = ODR(data, model, beta0=[np.mean(Q_ds_det), 1e-3]) + output = odr.run() + coeffsQ[di] = output.beta[0] + errorsQ[di] = output.sd_beta[0] + redchi2sQ[di] = output.sum_square / (len(T_ds_det) - 2) + except: + coeffsQ[di] = np.nan + errorsQ[di] = np.nan + redchi2sQ[di] = np.nan + + # fitting for U + try: + model = Model(leakage_model) + data = RealData(x=T_ds_det, + y=U_ds_det, + sx=np.ones_like(T_ds_det) * sigma_T[di], + sy=np.ones_like(T_ds_det) * sigma_demod[di]) + odr = ODR(data, model, beta0=[np.mean(U_ds_det), 1e-3]) + output = odr.run() + coeffsU[di] = output.beta[0] + errorsU[di] = output.sd_beta[0] + redchi2sU[di] = output.sum_square / (len(T_ds_det) - 2) + except: + coeffsU[di] = np.nan + errorsU[di] = np.nan + redchi2sU[di] = np.nan + + out_aman = core.AxisManager(aman.dets, aman.samps) + out_aman.wrap('coeffsQ', coeffsQ, [(0, 'dets')]) + out_aman.wrap('errorsQ', errorsQ, [(0, 'dets')]) + out_aman.wrap('redchi2sQ', redchi2sQ, [(0, 'dets')]) + + out_aman.wrap('coeffsU', coeffsU, [(0, 'dets')]) + out_aman.wrap('errorsU', errorsU, [(0, 'dets')]) + out_aman.wrap('redchi2sU', redchi2sU, [(0, 'dets')]) + + return out_aman + +def t2p_joint_fit(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', + sigma_demod=None, flag_name=None, ds_factor=100): + """ + Compute the leakage coefficients from temperature (T) to polarization (Q and U) + by performing a joint fit of both simultaneously. Return an axismanager of the + coefficients with their statistical uncertainties and reduced chi-squared values + for the fit. + + Parameters + ---------- + aman : AxisManager + AxisManager object containing the TOD data. + T_sig_name : str + Name of the temperature signal in `aman`. Default is 'dsT'. + Q_sig_name : str + Name of the Q polarization signal in `aman`. Default is 'demodQ'. + U_sig_name : str + Name of the U polarization signal in `aman`. Default is 'demodU'. + flag_name : str or None + Name of the flag field in `aman` to use for masking data. If None, + no masking is applied. + sigma_demod : array-like or None + Array of standard deviations for the Q and U TOD used as weights + while fitting. If None, all are set to unity. + ds_factor : float + Factor by which to downsample the TODs prior to fitting. Default is 100. + + Returns + ------- + out_aman : AxisManager + An AxisManager containing leakage coefficients, their errors, and reduced + chi-squared statistics. + """ + + def leakage_model(dT, AQ, AU, lamQ, lamU): + return AQ + lamQ * dT + 1.j * (AU + lamU * dT) + + if sigma_demod is None: + sigma_demod = np.ones_like(aman.dets.count) + + if flag_name is None: + mask = np.ones_like(aman[T_sig_name], dtype='bool') + elif flag_name in aman.flags._fields.keys(): + mask = ~aman.flags[flag_name].mask() + else: + raise ValueError('flag_name should be in aman.flags') + A_Q_array = np.empty([aman.dets.count]) A_U_array = np.empty_like(A_Q_array) lambda_Q_array = np.empty_like(A_Q_array) lambda_U_array = np.empty_like(A_Q_array) - + A_Q_error = np.empty_like(A_Q_array) A_U_error = np.empty_like(A_Q_array) lambda_Q_error = np.empty_like(A_Q_array) lambda_U_error = np.empty_like(A_Q_array) redchi2s_array = np.empty_like(A_Q_array) - + for di, det in enumerate(aman.dets.vals): x = aman[T_sig_name][di][mask[di]][::ds_factor] yQ = aman[Q_sig_name][di][mask[di]][::ds_factor] yU = aman[U_sig_name][di][mask[di]][::ds_factor] - + try: - model = Model(leakage_model, independent_vars=['dT'], weights=np.ones_like(x)/sigma_demod[di]) - params = model.make_params(AQ=np.median(yQ), AU=np.median(yU), lamQ=0., lamU=0.) + model = LmfitModel(leakage_model, independent_vars=['dT'], + weights=np.ones_like(x)/sigma_demod[di]) + params = model.make_params(AQ=np.median(yQ), AU=np.median(yU), + lamQ=0., lamU=0.) result = model.fit(yQ + 1j * yU, params, dT=x) A_Q_array[di] = result.params['AQ'].value A_U_array[di] = result.params['AU'].value @@ -93,24 +209,92 @@ def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demo lambda_Q_array[di] = np.nan lambda_U_error[di] = np.nan redchi2s_array[di] = np.nan - + out_aman = core.AxisManager(aman.dets, aman.samps) out_aman.wrap('AQ', A_Q_array, [(0, 'dets')]) out_aman.wrap('AU', A_U_array, [(0, 'dets')]) out_aman.wrap('lamQ', lambda_Q_array, [(0, 'dets')]) out_aman.wrap('lamU', lambda_U_array, [(0, 'dets')]) - + out_aman.wrap('AQ_error', A_Q_error, [(0, 'dets')]) out_aman.wrap('AU_error', A_U_error, [(0, 'dets')]) out_aman.wrap('lamQ_error', lambda_Q_error, [(0, 'dets')]) out_aman.wrap('lamU_error', lambda_U_error, [(0, 'dets')]) out_aman.wrap('redchi2s', redchi2s_array, [(0, 'dets')]) - + + return out_aman + +def get_t2p_coeffs(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', + joint_fit=True, wn_demod=None, f_lpf_cutoff=2.0, flag_name=None, + ds_factor=100, subtract_sig=False, merge_stats=True, + t2p_stats_name='t2p_stats'): + """ + Compute the leakage coefficients from temperature (T) to polarization (Q and U) by + either a joint fit of both or individually. Optionally subtract this leakage. Return + an axismanager of the coefficients with their statistical uncertainties and reduced + chi-squared values for the fit. + + Parameters + ---------- + aman : AxisManager + AxisManager object containing the TOD data. + T_sig_name : str + Name of the temperature signal in `aman`. Default is 'dsT'. + Q_sig_name : str + Name of the Q polarization signal in `aman`. Default is 'demodQ'. + U_sig_name : str + Name of the U polarization signal in `aman`. Default is 'demodU'. + joint_fit : bool + Whether to fit Q and U leakage coefficients as parameters in a single model or + fit independently. Default is True. + wn_demod : float or None + Precomputed white noise level for demodulated signals. If None, it will be calculated. + f_lpf_cutoff : float + Cutoff frequency of low pass filter in demodulation. Used for error bar estimation by + combination with wn_demod. Default is 2.0. + flag_name : str + Name of the flag field in `aman` to use for masking data. If None, no masking is applied. + ds_factor: float or None + Factor by which to downsample the TODs prior to fitting. If None, the low pass filter + frequency is used to estimate the factor. + subtract_sig : bool + Whether to subtract the calculated leakage from the polarization signals. Default is False. + merge_stats : bool + Whether to merge the calculated statistics back into `aman`. Default is True. + t2p_stats_name : str + Name under which to wrap the output AxisManager containing statistics. Default is 't2p_stats'. + + Returns + ------- + out_aman : AxisManager + An AxisManager containing leakage coefficients, their errors, and reduced + chi-squared statistics. + """ + + # get white noise level of demod for error estimation + if wn_demod is None: + freqs, Pxx_demod = calc_psd(aman, signal=aman[Q_sig_name], merge=False) + wn_demod = calc_wn(aman, pxx=Pxx_demod, freqs=freqs, low_f=0.5, high_f=1.5) + + # integrate the white noise level over frequencies to get error bar of each point + sigma_demod = wn_demod * np.sqrt(f_lpf_cutoff) + sigma_T = sigma_demod / np.sqrt(2) + + if ds_factor is None: + ds_factor = int(np.mean(1. / np.diff(aman.timestamps)) / (f_lpf_cutoff)) + + if joint_fit: + out_aman = t2p_joint_fit(aman, T_sig_name, Q_sig_name, U_sig_name, + sigma_demod, flag_name, ds_factor) + else: + out_aman = t2p_fit(aman, T_sig_name, Q_sig_name, U_sig_name, + sigma_T, sigma_demod, flag_name, ds_factor) + if subtract_sig: subtract_t2p(aman, out_aman) if merge_stats: aman.wrap(t2p_stats_name, out_aman) - + return out_aman def subtract_t2p(aman, t2p_aman, T_signal=None): @@ -123,16 +307,23 @@ def subtract_t2p(aman, t2p_aman, T_signal=None): The tod. t2p_aman : AxisManager Axis manager with Q and U leakage coeffients. - Q coeffs are in fields ``lamQ`` and ``AQ`` and U coeffs are in fields - ``lamU`` and ``AU``. + If joint fitting was used in get_t2p_coeffs, Q coeffs are in + fields ``lamQ`` and ``AQ`` and U coeffs are in ``lamU`` and + ``AU``. Otherwise Q coeff is in field ``coeffsQ`` and U coeff + in ``coeffsU``. T_signal : array Temperature signal to scale and subtract from Q/U. Default is ``aman['dsT']``. - """ - + if T_signal is None: T_signal = aman['dsT'] - aman.demodQ -= (T_signal * t2p_aman.lamQ[:, np.newaxis] + t2p_aman.AQ[:, np.newaxis]) - aman.demodU -= (T_signal * t2p_aman.lamU[:, np.newaxis] + t2p_aman.AU[:, np.newaxis]) \ No newline at end of file + if 'AQ' in t2p_aman._assignments: + aman.demodQ -= (T_signal * t2p_aman.lamQ[:, np.newaxis] + t2p_aman.AQ[:, np.newaxis]) + aman.demodU -= (T_signal * t2p_aman.lamU[:, np.newaxis] + t2p_aman.AU[:, np.newaxis]) + elif 'coeffsQ' in t2p_aman._assignments: + aman.demodQ -= np.multiply(T_signal.T, t2p_aman.coeffsQ).T + aman.demodU -= np.multiply(T_signal.T, t2p_aman.coeffsU).T + else: + raise ValueError('no leakage coefficients found in axis manager') \ No newline at end of file diff --git a/tests/test_t2pleakage.py b/tests/test_t2pleakage.py index fd0a91f45..81d0b0ae5 100644 --- a/tests/test_t2pleakage.py +++ b/tests/test_t2pleakage.py @@ -42,7 +42,7 @@ def quick_tod(n_det, n_samp, dt=.005, fknee=0.5, fhwp=2.0, t2q=1e-3, t2u=4e-3): class LeakageSubtractionTest(unittest.TestCase): "Test the temperature-to-polarization leakage subtaction" - def test_leakage_subtraction(self): + def test_leakage_subtraction_joint(self): #prepare tod n_det = 10 n_samp = 200*600 @@ -58,9 +58,29 @@ def test_leakage_subtraction(self): hwp.demod_tod(tod) oman = t2pleakage.get_t2p_coeffs(tod) - + self.assertTrue(np.all(np.isclose(oman.lamQ, t2q, atol=oman.lamQ_error*5, rtol=0))) self.assertTrue(np.all(np.isclose(oman.lamU, t2u, atol=oman.lamU_error*5, rtol=0))) + + def test_leakage_subtraction(self): + #prepare tod + n_det = 10 + n_samp = 200*600 + dt=.005 + fknee=0.5 + fhwp=2.0 + t2q=1e-3 + t2u=4e-3 + + tod = quick_tod(n_det=n_det, n_samp=n_samp, + dt=dt, fknee=fknee, fhwp=fhwp, + t2q=t2q, t2u=t2u) + + hwp.demod_tod(tod) + oman = t2pleakage.get_t2p_coeffs(tod, joint_fit=False) + + self.assertTrue(np.all(np.isclose(oman.coeffsQ, t2q, atol=oman.errorsQ*5, rtol=0))) + self.assertTrue(np.all(np.isclose(oman.coeffsU, t2u, atol=oman.errorsU*5, rtol=0))) if __name__ == '__main__': unittest.main() \ No newline at end of file From 78627d948f49e7a9fbec2c8307f5cc461ee20348 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 30 Oct 2024 08:46:13 -0700 Subject: [PATCH 30/39] fix docstring --- sotodlib/tod_ops/t2pleakage.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sotodlib/tod_ops/t2pleakage.py b/sotodlib/tod_ops/t2pleakage.py index 26116b9ae..10cdf821f 100644 --- a/sotodlib/tod_ops/t2pleakage.py +++ b/sotodlib/tod_ops/t2pleakage.py @@ -26,15 +26,15 @@ def t2p_fit(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demodU', Name of the Q polarization signal in `aman`. Default is 'demodQ'. U_sig_name : str Name of the U polarization signal in `aman`. Default is 'demodU'. - flag_name : str or None - Name of the flag field in `aman` to use for masking data. If None, - no masking is applied. sigma_T : array-like Array of standard deviations for the signal TOD used fitting. If None, all are set to unity. sigma_demod : array-like or None Array of standard deviations for the Q and U TODs used as weights while fitting. If None, all are set to unity. + flag_name : str or None + Name of the flag field in `aman` to use for masking data. If None, + no masking is applied. ds_factor : float Factor by which to downsample the TODs prior to fitting. Default is 100. @@ -136,12 +136,12 @@ def t2p_joint_fit(aman, T_sig_name='dsT', Q_sig_name='demodQ', U_sig_name='demod Name of the Q polarization signal in `aman`. Default is 'demodQ'. U_sig_name : str Name of the U polarization signal in `aman`. Default is 'demodU'. - flag_name : str or None - Name of the flag field in `aman` to use for masking data. If None, - no masking is applied. sigma_demod : array-like or None Array of standard deviations for the Q and U TOD used as weights while fitting. If None, all are set to unity. + flag_name : str or None + Name of the flag field in `aman` to use for masking data. If None, + no masking is applied. ds_factor : float Factor by which to downsample the TODs prior to fitting. Default is 100. From 873021aa9eae5e5f985f05b62d52112ce717f57e Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 30 Oct 2024 08:49:02 -0700 Subject: [PATCH 31/39] update glitch fill test --- tests/test_tod_ops.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_tod_ops.py b/tests/test_tod_ops.py index 6a588f330..7687b9bc8 100644 --- a/tests/test_tod_ops.py +++ b/tests/test_tod_ops.py @@ -171,19 +171,19 @@ def test_fillglitches(self): # test poly fill in_place, up, mg = False, False, None glitch_filled = tod_ops.gapfill.fill_glitches(aman, use_pca=up, - in_place=in_place, wrap_name=mg) + in_place=in_place, wrap=mg) self.assertTrue(np.max(np.abs(glitch_filled-aman.inputsignal)) < 1e-3) # test pca fill in_place, up, mg = False, True, None glitch_filled = tod_ops.gapfill.fill_glitches(aman, use_pca=up, - in_place=in_place, wrap_name=mg) + in_place=in_place, wrap=mg) print(np.max(np.abs(glitch_filled-aman.inputsignal))) # test wrap new field in_place, up, mg = False, False, "gap_filled" glitch_filled = tod_ops.gapfill.fill_glitches(aman, use_pca=up, - in_place=in_place, wrap_name=mg) + in_place=in_place, wrap=mg) self.assertTrue('gap_filled' in aman._assignments) class FilterTest(unittest.TestCase): From 0ffbdafadac9ec4c8371e36770b4748cd744b16e Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Thu, 31 Oct 2024 08:52:30 -0700 Subject: [PATCH 32/39] update pca for compatibility --- sotodlib/preprocess/processes.py | 6 ++- sotodlib/tod_ops/pca.py | 71 ++++++++++++++++++++++++++++++-- 2 files changed, 73 insertions(+), 4 deletions(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 55199bea7..d9df285b5 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -1115,7 +1115,10 @@ def calc_and_save(self, aman, proc_aman): pca_out = tod_ops.pca.get_pca(band_aman,signal=band_aman[self.signal]) pca_signal = tod_ops.pca.get_pca_model(band_aman, pca_out, signal=band_aman[self.signal]) - result_aman = tod_ops.pca.pca_cuts_and_cal(band_aman, pca_signal, **self.calc_cfgs) + if isinstance(self.calc_cfgs, bool): + result_aman = tod_ops.pca.pca_cuts_and_cal(band_aman, pca_signal) + else: + result_aman = tod_ops.pca.pca_cuts_and_cal(band_aman, pca_signal, **self.calc_cfgs) pca_det_mask[m0] = np.logical_or(pca_det_mask[m0], result_aman['pca_det_mask']) relcal[m0] = result_aman['relcal'] @@ -1163,6 +1166,7 @@ def plot(self, aman, proc_aman, filename): band_aman = proc_aman[self.run_name].restrict('dets', aman.dets.vals[proc_aman[self.run_name][f'{band}_idx']], in_place=False) plot_pcabounds(pca_aman, band_aman, filename=filename.replace('{name}', f'{ufm}_{band}_pca'), signal=self.signal, band=band, plot_ds_factor=self.plot_cfgs.get('plot_ds_factor', 20)) + class PTPFlags(_Preprocess): """Find detectors with anomalous peak-to-peak signal. diff --git a/sotodlib/tod_ops/pca.py b/sotodlib/tod_ops/pca.py index 2ac6c1c2d..dd20e74d6 100644 --- a/sotodlib/tod_ops/pca.py +++ b/sotodlib/tod_ops/pca.py @@ -1,5 +1,8 @@ from sotodlib import core import numpy as np +import logging + +logger = logging.getLogger(__name__) # Note to future developers with a need for speed: there are two # obvious places where OpenMP acceleration in a C++ routine would be @@ -73,7 +76,7 @@ def get_pca_model(tod=None, pca=None, n_modes=None, signal=None, return output -def get_pca(tod=None, cov=None, signal=None, wrap=None): +def get_pca(tod=None, cov=None, signal=None, wrap=None, mask=None): """Compute a PCA decomposition of the kind useful for signal analysis. A symmetric non-negative matrix cov of shape(n_dets, n_dets) can be decomposed into matrix R (same shape) and vector E (length @@ -93,6 +96,10 @@ def get_pca(tod=None, cov=None, signal=None, wrap=None): tod.signal. wrap: string; if set then the returned result is also stored in tod under this name. + mask: If specifed, a boolean array to select which dets are to + be considered in the PCA decomp. This is achieved by + modifying the cov to make the non-considered dets + independent and low significance. Returns: AxisManager with axes 'dets' and 'eigen' (of the same length), @@ -105,18 +112,26 @@ def get_pca(tod=None, cov=None, signal=None, wrap=None): if signal is None: signal = tod.signal cov = np.cov(signal) + + if mask is not None: + var_min = min(np.diag(cov)[mask]) + for i in (~mask).nonzero()[0]: + cov[i,:] = 0 + cov[:,i] = 0 + cov[i,i] = var_min * 1e-2 + dets = tod.dets mode_axis = core.IndexAxis('eigen', dets.count) output = core.AxisManager(dets, mode_axis) output.wrap('cov', cov, [(0, dets.name), (1, dets.name)]) - # Note eig will sometimes return complex eigenvalues. - E, R = np.linalg.eig(cov) # eigh nans sometimes... + E, R = np.linalg.eigh(cov) E[np.isnan(E)] = 0. E, R = E.real, R.real idx = np.argsort(-E) + output.wrap('E', E[idx], [(0, mode_axis.name)]) output.wrap('R', R[:, idx], [(0, dets.name), (1, mode_axis.name)]) if not(wrap is None): @@ -300,3 +315,53 @@ def pca_cuts_and_cal(tod, pca_aman, xfac=2, yfac=1.5, calc_good_medianw=False): pca_relcal.wrap('median', medianw) return pca_relcal + + +def get_common_mode( + tod, + signal='signal', + method='median', + wrap=None, + weights=None, +): + """Returns common mode timestream between detectors. + This uses method 'median' or 'average' across detectors as opposed to a principle + component analysis to get the common mode. + + Arguments + --------- + tod: axis manager + signal: str, optional + The name of the signal to estimate common mode or ndarray with shape of + (n_dets x n_samps). Defaults to 'signal'. + method: str + method of common mode estimation. 'median' or 'average'. + wrap: str or None. + If not None, wrap the common mode into tod with this name. + weights: array with dets axis + If not None, estimate common mode by taking average with this weights. + + Returns + ------- + common mode timestream + + """ + if isinstance(signal, str): + signal = tod[signal] + elif isinstance(signal, np.ndarray): + if np.shape(signal) != (tod.dets.count, tod.samps.count): + raise ValueError("When passing signal as ndarray shape must match (n_dets x n_samps).") + else: + raise TypeError("signal must be str, or ndarray") + + if method == 'median': + if weights is not None: + logger.warning('weights will be ignored because median method is chosen') + common_mode = np.median(signal, axis=0) + elif method == 'average': + common_mode = np.average(signal, axis=0, weights=weights) + else: + raise ValueError("method flag must be median or average") + if wrap is not None: + tod.wrap(wrap, common_mode, [(0, 'samps')]) + return common_mode \ No newline at end of file From 15017a96746060be118adaae884a467baf9c1715 Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 1 Nov 2024 01:13:15 -0700 Subject: [PATCH 33/39] newline fix --- sotodlib/mapmaking/demod_mapmaker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 0b99f92f3..9c04ac310 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -673,4 +673,4 @@ def project_all_single(pmap, Nd, det_weights, comps, wrapper=lambda x:x): pmap.to_weights(dest=div, comps=comps, det_weights=det_weights) div = wrapper(div) hits = wrapper(pmap.to_map(signal=np.ones_like(Nd))) - return rhs, div, hits \ No newline at end of file + return rhs, div, hits From 0dea922075b1477522e056c5904465d9fd023a5a Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 7 Nov 2024 00:54:42 -0800 Subject: [PATCH 34/39] removing the script, which will be merged in a further commit --- sotodlib/site_pipeline/cli.py | 2 - .../make_atomic_filterbin_map.py | 545 ------------------ 2 files changed, 547 deletions(-) delete mode 100644 sotodlib/site_pipeline/make_atomic_filterbin_map.py diff --git a/sotodlib/site_pipeline/cli.py b/sotodlib/site_pipeline/cli.py index c1afc86fe..a11ca65f1 100644 --- a/sotodlib/site_pipeline/cli.py +++ b/sotodlib/site_pipeline/cli.py @@ -43,7 +43,6 @@ def main(obs_id=None, config_file=None, logger=None): analyze_bright_ptsrc, check_book, make_det_info_wafer, - make_atomic_filterbin_map, make_ml_map, make_source_flags, make_uncal_beam_map, @@ -62,7 +61,6 @@ def main(obs_id=None, config_file=None, logger=None): 'analyze-bright-ptsrc': analyze_bright_ptsrc, 'check-book': check_book, 'make-det-info-wafer': make_det_info_wafer, - 'make-atomic-filterbin-map': make_atomic_filterbin_map, 'make-ml-map': make_ml_map, 'make-source-flags': make_source_flags, 'make-uncal-beam-map': make_uncal_beam_map, diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py deleted file mode 100644 index 2ebfc9d87..000000000 --- a/sotodlib/site_pipeline/make_atomic_filterbin_map.py +++ /dev/null @@ -1,545 +0,0 @@ -from argparse import ArgumentParser -import numpy as np, sys, time, warnings, os, so3g, logging, yaml, itertools, multiprocessing, traceback -from sotodlib import coords, mapmaking -from sotodlib.core import Context, metadata as metadata_core, FlagManager, AxisManager, OffsetAxis -from sotodlib.io import metadata, hk_utils -from sotodlib.site_pipeline import preprocess_tod as pt -from pixell import enmap, utils as putils, fft, bunch, wcsutils, tilemap, colors, memory, mpi -from concurrent.futures import ProcessPoolExecutor, as_completed -import sotodlib.site_pipeline.util as util - -defaults = {"area": None, - "nside": None, - "query": "1", - "odir": "./output", - "preprocess_config": None, - "comps": "TQU", - "mode": "per_obs", - "nproc": 1, - "ntod": None, - "tods": None, - "nset": None, - "wafer": None, - "freq": None, - "center_at": None, - "site": 'so_sat3', - "max_dets": None, # not implemented yet - "verbose": 0, - "quiet": 0, - "tiled": 0, # not implemented yet - "singlestream": False, - "only_hits": False, - "det_in_out": False, - "det_left_right":False, - "det_upper_lower":False, - "scan_left_right":False, - "window":0.0, # not implemented yet - "dtype_tod": np.float32, - "dtype_map": np.float64, - "atomic_db": "atomic_maps.db", - "fixed_time": None, - "mindur": None, - "l2_data_path": "/global/cfs/cdirs/sobs/untracked/data/site/hk", - } - -def get_parser(parser=None): - if parser is None: - parser = ArgumentParser() - parser.add_argument("--config-file", type=str, default=None, - help="Path to mapmaker config.yaml file") - - parser.add_argument("--context", type=str, - help='context file') - parser.add_argument("--area", - help='wcs kernel') - parser.add_argument("--nside", - help='Nside if you want map in HEALPIX') - parser.add_argument("--query", - help='query, can be a file (list of obs_id) or selection string') - parser.add_argument("--odir", - help='output directory') - parser.add_argument("--preprocess_config", type=str, - help='file with the config file to run the preprocessing pipeline') - parser.add_argument("--mode", type=str, ) - parser.add_argument("--nproc", type=int, help='Number of procs in the multiprocessing pool') - parser.add_argument("--comps", type=str,) - parser.add_argument("--singlestream", action="store_true") - parser.add_argument("--only_hits", action="store_true") # this will work only when we don't request splits, since I want to avoid loading the signal - - # detector position splits (fixed in time) - parser.add_argument("--det_in_out", action="store_true") - parser.add_argument("--det_left_right", action="store_true") - parser.add_argument("--det_upper_lower", action="store_true") - - # time samples splits - parser.add_argument("--scan_left_right", action="store_true") - - parser.add_argument("--ntod", type=int, ) - parser.add_argument("--tods", type=str, ) - parser.add_argument("--nset", type=int, ) - parser.add_argument("--wafer", type=str, - help="Detector set to map with") - parser.add_argument("--freq", type=str, - help="Frequency band to map with") - parser.add_argument("--max-dets", type=int, ) - parser.add_argument("--fixed_ftime", type=int, ) - parser.add_argument("--mindur", type=int, ) - parser.add_argument("--site", type=str, ) - parser.add_argument("--verbose", action="count", ) - parser.add_argument("--quiet", action="count", ) - parser.add_argument("--window", type=float, ) - parser.add_argument("--dtype_tod", ) - parser.add_argument("--dtype_map", ) - parser.add_argument("--atomic_db", - help='name of the atomic map database, will be saved where make_filterbin_map is being run') - parser.add_argument("--l2_data_path", - help='Path to level-2 data') - return parser - -def _get_config(config_file): - return yaml.safe_load(open(config_file,'r')) - -def get_ra_ref(obs, site='so_sat3'): - # pass an AxisManager of the observation, and return two - # ra_ref @ dec=-40 deg. - # - #t = [obs.obs_info.start_time, obs.obs_info.start_time, obs.obs_info.stop_time, obs.obs_info.stop_time] - t_start = obs.obs_info.start_time - t_stop = obs.obs_info.stop_time - az = np.arange((obs.obs_info.az_center-0.5*obs.obs_info.az_throw)*putils.degree, - (obs.obs_info.az_center+0.5*obs.obs_info.az_throw)*putils.degree, 0.5*putils.degree) - el = obs.obs_info.el_center*putils.degree - - csl = so3g.proj.CelestialSightLine.az_el(t_start*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') - ra_, dec_ = csl.coords().transpose()[:2] - ra_ref_start = np.interp(-40*putils.degree, dec_, ra_) - - csl = so3g.proj.CelestialSightLine.az_el(t_stop*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') - ra_, dec_ = csl.coords().transpose()[:2] - ra_ref_stop = np.interp(-40*putils.degree, dec_, ra_) - return ra_ref_start, ra_ref_stop - -def find_footprint(context, tod, ref_wcs, return_pixboxes=False, pad=1): - # Measure the pixel bounds of each observation relative to our - # reference wcs - pixboxes = [] - my_shape, my_wcs = coords.get_footprint(tod, ref_wcs) - my_pixbox = enmap.pixbox_of(ref_wcs, my_shape, my_wcs) - pixboxes.append(my_pixbox) - if len(pixboxes) == 0: raise DataMissing("No usable obs to estimate footprint from") - pixboxes = np.array(pixboxes) - # Handle sky wrapping. This assumes cylindrical coordinates with sky-wrapping - # in the x-direction, and that there's an integer number of pixels around - # the sky. Could be done more generally, but would be much more involved, - # and this should be good enough. - nphi = putils.nint(np.abs(360/ref_wcs.wcs.cdelt[0])) - widths = pixboxes[:,1,0]-pixboxes[:,0,0] - pixboxes[:,0,0] = putils.rewind(pixboxes[:,0,0], - ref=pixboxes[0,0,0], - period=nphi) - pixboxes[:,1,0] = pixboxes[:,0,0] + widths - # It's now safe to find the total pixel bounding box - union_pixbox = np.array([np.min(pixboxes[:,0],0)-pad,np.max(pixboxes[:,1],0) - +pad]) - # Use this to construct the output geometry - shape = union_pixbox[1]-union_pixbox[0] - wcs = ref_wcs.deepcopy() - wcs.wcs.crpix -= union_pixbox[0,::-1] - if return_pixboxes: return shape, wcs, pixboxes - else: return shape, wcs - -class DataMissing(Exception): pass - -def get_pwv(obs, data_dir): - try: - pwv_info = hk_utils.get_detcosamp_hkaman(obs, alias=['pwv'], - fields = ['site.env-radiometer-class.feeds.pwvs.pwv',], - data_dir = data_dir) - pwv_all = pwv_info['env-radiometer-class']['env-radiometer-class'][0] - pwv = np.nanmedian(pwv_all) - except (KeyError, ValueError): - pwv = 0.0 - return pwv - -def read_tods(context, obslist, - dtype_tod=np.float32, only_hits=False, site='so_sat3', - l2_data='/global/cfs/cdirs/sobs/untracked/data/site/hk'): - """ - context : str - Path to context file - """ - context = Context(context) - # this function will run on multiprocessing and can be returned in any random order - # we will also return the obslist to keep track of the order - my_obslist = [] ; my_tods = [] ; my_ra_ref = [] ; pwvs = [] - inds = range(len(obslist)) - ind = 0 - obs_id, detset, band, obs_ind = obslist[ind] - meta = context.get_meta(obs_id, dets={"wafer_slot":detset, "wafer.bandpass":band},) - tod = context.get_obs(meta, no_signal=True) - #tod = context.get_obs(obs_id, dets={"wafer_slot":detset, - # "wafer.bandpass":band}, - # no_signal=True) - to_remove = [] - for field in tod._fields: - if field!='obs_info' and field!='flags' and field!='signal' and field!='focal_plane' and field!='timestamps' and field!='boresight': to_remove.append(field) - for field in to_remove: - tod.move(field, None) - if only_hits==False: - ra_ref_start, ra_ref_stop = get_ra_ref(tod) - my_ra_ref.append((ra_ref_start/putils.degree, - ra_ref_stop/putils.degree)) - else: - my_ra_ref.append(None) - tod.flags.wrap('glitch_flags', so3g.proj.RangesMatrix.zeros(tod.shape[:2]), - [(0, 'dets'), (1, 'samps')]) - my_tods.append(tod) - - tod_temp = tod.restrict('dets', meta.dets.vals[:1], in_place=False) - pwvs.append(get_pwv(tod_temp, data_dir=l2_data)) - del tod_temp - return obslist, my_tods, my_ra_ref, pwvs - -class ColoredFormatter(logging.Formatter): - def __init__(self, msg, colors={'DEBUG':colors.reset, - 'INFO':colors.lgreen, - 'WARNING':colors.lbrown, - 'ERROR':colors.lred, - 'CRITICAL':colors.lpurple}): - logging.Formatter.__init__(self, msg) - self.colors = colors - def format(self, record): - try: - col = self.colors[record.levelname] - except KeyError: - col = colors.reset - return col + logging.Formatter.format(self, record) + colors.reset - -class LogInfoFilter(logging.Filter): - def __init__(self, rank=0): - self.rank = rank - try: - # Try to get actual time since task start if possible - import os, psutil - p = psutil.Process(os.getpid()) - self.t0 = p.create_time() - except ImportError: - # Otherwise measure from creation of this filter - self.t0 = time.time() - def filter(self, record): - record.rank = self.rank - record.wtime = time.time()-self.t0 - record.wmins = record.wtime/60. - record.whours= record.wmins/60. - record.mem = memory.current()/1024.**3 - record.resmem= memory.resident()/1024.**3 - record.memmax= memory.max()/1024.**3 - return record - -def handle_empty(prefix, tag, e, L): - # This happens if we ended up with no valid tods for some reason - L.info("%s Skipped: %s" % (tag, str(e))) - putils.mkdir(os.path.dirname(prefix)) - with open(prefix + ".empty", "w") as ofile: ofile.write("\n") - -def make_demod_map_dummy(context): - return None - -def main(config_file=None, defaults=defaults, **args): - cfg = dict(defaults) - # Update the default dict with values provided from a config.yaml file - if config_file is not None: - cfg_from_file = _get_config(config_file) - cfg.update({k: v for k, v in cfg_from_file.items() if v is not None}) - else: - print("No config file provided, assuming default values") - # Merge flags from config file and defaults with any passed through CLI - cfg.update({k: v for k, v in args.items() if v is not None}) - # Certain fields are required. Check if they are all supplied here - required_fields = ['context','area'] - for req in required_fields: - if req not in cfg.keys(): - raise KeyError("{} is a required argument. Please supply it in a config file or via the command line".format(req)) - args = cfg - warnings.simplefilter('ignore') - - comm = mpi.FAKE_WORLD # Fake communicator since we won't use MPI - - verbose = args['verbose'] - args['quiet'] - if args['area'] is not None: - shape, wcs = enmap.read_map_geometry(args['area']) - wcs = wcsutils.WCS(wcs.to_header()) - elif args['nside'] is not None: - nside = int(args['nside']) - else: - print('Neither rectangular area or nside specified, exiting.') - exit(1) - - noise_model = mapmaking.NmatWhite() - ncomp = len(args['comps']) - meta_only = False - putils.mkdir(args['odir']) - - recenter = None - if args['center_at']: - recenter = mapmaking.parse_recentering(args['center_at']) - - # Set up logging. - L = logging.getLogger(__name__) - L.setLevel(logging.INFO) - ch = logging.StreamHandler() - ch.setLevel(logging.INFO) - ch.setFormatter(ColoredFormatter(" %(wmins)7.2f %(mem)5.2f %(memmax)5.2f %(message)s")) - ch.addFilter(LogInfoFilter()) - L.addHandler(ch) - - if args['preprocess_config'] is not None: - preprocess_config = yaml.safe_load(open(args['preprocess_config'],'r')) - outs = [] - errlog = os.path.join(os.path.dirname(preprocess_config['archive']['index']), - 'errlog.txt') - else: - preprocess_config = None - outs = None - errlog = None - - multiprocessing.set_start_method('spawn') - - context = Context(args['context']) - # obslists is a dict, obskeys is a list, periods is an array, only rank 0 - # will do this and broadcast to others. - obslists, obskeys, periods, \ - obs_infos = mapmaking.build_obslists(context, - args['query'], - mode=args['mode'], - nset=args['nset'], - wafer=args['wafer'], - freq=args['freq'], - ntod=args['ntod'], - tods=args['tods'], - fixed_time=args['fixed_time'], - mindur=args['mindur']) - L.info('Done with build_obslists') - - tags = [] - cwd = os.getcwd() - - split_labels = [] - if args['det_in_out']: - split_labels.append('det_in');split_labels.append('det_out') - if args['det_left_right']: - split_labels.append('det_left');split_labels.append('det_right') - if args['det_upper_lower']: - split_labels.append('det_upper');split_labels.append('det_lower') - if args['scan_left_right']: - split_labels.append('scan_left');split_labels.append('scan_right') - if not split_labels: - split_labels = None - - # We open the data base for checking if we have maps already, - # if we do we will not run them again. - if os.path.isfile('./'+args['atomic_db']) and not args['only_hits']: - conn = sqlite3.connect('./'+args['atomic_db']) # open the connector, in reading mode only - cursor = conn.cursor() - keys_to_remove = [] - # Now we have obslists and splits ready, we look through the database - # to remove the maps we already have from it - for key, value in obslists.items(): - if split_labels == None: - # we want to run only full maps - query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="full"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1] ) - res = cursor.execute(query_) - matches = res.fetchall() - if len(matches)>0: - # this means the map (key,value) is already in the data base, - # so we have to remove it to not run it again - # it seems that removing the maps from the obskeys is enough. - keys_to_remove.append(key) - else: - # we are asking for splits - missing_split = False - for split_label in split_labels: - query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="%s"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1], split_label ) - res = cursor.execute(query_) - matches = res.fetchall() - if len(matches)==0: - # this means one of the requested splits is missing - # in the data base - missing_split = True - break - if missing_split == False: - # this means we have all the splits we requested for the - # particular obs_id/telescope/freq/wafer - keys_to_remove.append(key) - for key in keys_to_remove: - obskeys.remove(key) - del obslists[key] - conn.close() # I close since I only wanted to read - - obslists_arr = [item for key, item in obslists.items()] - my_oblists=[]; my_tods = []; my_ra_ref=[]; pwvs=[] - L.info('Starting with read_tods') - with ProcessPoolExecutor(args['nproc']) as exe: - futures = [exe.submit(read_tods, args['context'], obslist, - dtype_tod=args['dtype_tod'], - only_hits=args['only_hits'], - l2_data=args['l2_data_path']) for obslist in obslists_arr] - for future in as_completed(futures): - #L.info('New future as_completed result') - try: - my_obslist_here, my_tods_here, my_ra_ref_here, pwvs_here = future.result() - my_oblists.append(my_obslist_here) - my_tods.append(my_tods_here) - my_ra_ref.append(my_ra_ref_here) - pwvs.append(pwvs_here) - except Exception as e: - errmsg = f'{type(e)}: {e}' - tb = ''.join(traceback.format_tb(e.__traceback__)) - L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") - f = open(errlog, 'a') - f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') - f.close() - continue - futures.remove(future) - # flatten the list of lists - my_oblists = list(itertools.chain.from_iterable(my_oblists)) - my_tods = list(itertools.chain.from_iterable(my_tods)) - my_ra_ref = list(itertools.chain.from_iterable(my_ra_ref)) - pwvs = list(itertools.chain.from_iterable(pwvs)) - L.info('Done with read_tods') - - if args['area'] is not None: - # we will do the profile and footprint here, and then allgather the - # subshapes and subwcs.This way we don't have to communicate the - # massive arrays such as timestamps - subshapes = [] ; subwcses = [] - for obs in my_tods: - if recenter is None: - subshape, subwcs = find_footprint(context, obs, wcs,) - subshapes.append(subshape) ; subwcses.append(subwcs) - else: - subshape = shape; subwcs = wcs - subshapes.append(subshape) ; subwcses.append(subwcs) - # subshape and subwcs are in the order given by my_oblists - - run_list = [] - for oi in range(len(my_tods)): - # we will need to build the obskey from my_oblists - #obs_1722126466_satp1_1111111', 'ws3', 'f150', 0) - pid = my_oblists[oi][3]; - detset = my_oblists[oi][1]; - band = my_oblists[oi][2]; - #obskey = (pid, detset, band) - obslist = [my_oblists[oi]] - t = putils.floor(periods[pid,0]) - t5 = ("%05d" % t)[:5] - prefix = "%s/%s/atomic_%010d_%s_%s" % (args['odir'], t5, t, detset, band) - - if args['area'] is not None: - subshape = subshapes[oi] - subwcs = subwcses[oi] - - tag = "%5d/%d" % (oi+1, len(obskeys)) - putils.mkdir(os.path.dirname(prefix)) - meta_done = os.path.isfile(prefix + "_full_info.hdf") - maps_done = os.path.isfile(prefix + ".empty") or ( - os.path.isfile(prefix + "_full_map.fits") and - os.path.isfile(prefix + "_full_ivar.fits") and - os.path.isfile(prefix + "_full_hits.fits") - ) - #L.info("%s Proc period %4d dset %s:%s @%.0f dur %5.2f h with %2d obs" % (tag, pid, detset, band, t, (periods[pid,1]-periods[pid,0])/3600, len(obslist))) - - my_ra_ref_atomic = [my_ra_ref[oi]] - pwv_atomic = [pwvs[oi]] - # Save file for data base of atomic maps. We will write an individual file, - # another script will loop over those files and write into sqlite data base - if not args['only_hits']: - info = [] - if split_labels is None: - # this means the mapmaker was run without any splits requested - info.append(bunch.Bunch(pid=pid, - obs_id=obslist[0][0].encode(), - telescope=obs_infos[obslist[0][3]].telescope.encode(), - freq_channel=band.encode(), - wafer=detset.encode(), - ctime=int(t), - split_label='full'.encode(), - split_detail='full'.encode(), - prefix_path=str(cwd+'/'+prefix+'_full').encode(), - elevation=obs_infos[obslist[0][3]].el_center, - azimuth=obs_infos[obslist[0][3]].az_center, - RA_ref_start=my_ra_ref_atomic[0][0], - RA_ref_stop=my_ra_ref_atomic[0][1], - pwv=pwv_atomic - )) - else: - # splits were requested and we loop over them - for split_label in split_labels: - info.append(bunch.Bunch(pid=pid, - obs_id=obslist[0][0].encode(), - telescope=obs_infos[obslist[0][3]].telescope.encode(), - freq_channel=band.encode(), - wafer=detset.encode(), - ctime=int(t), - split_label=split_label.encode(), - split_detail=''.encode(), - prefix_path=str(cwd+'/'+prefix+'_%s'%split_label).encode(), - elevation=obs_infos[obslist[0][3]].el_center, - azimuth=obs_infos[obslist[0][3]].az_center, - RA_ref_start=my_ra_ref_atomic[0][0], - RA_ref_stop=my_ra_ref_atomic[0][1], - pwv=pwv_atomic - )) - # inputs that are unique per atomic map go into run_list - if args['area'] is not None: - run_list.append([obslist, subshape, subwcs, info, prefix, t]) - elif args['nside'] is not None: - run_list.append([obslist, info, prefix, t]) - # Done with creating run_list - - with ProcessPoolExecutor(args['nproc']) as exe: - if args['area'] is not None: - futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], - noise_model, r[3], preprocess_config, r[4], - shape=r[1], wcs=r[2], - comm = comm, t0=r[5], tag=tag, recenter=recenter, - dtype_map=args['dtype_map'], - dtype_tod=args['dtype_tod'], - comps=args['comps'], - verbose=args['verbose'], - split_labels=split_labels, - singlestream=args['singlestream'], - site=args['site']) for r in run_list] - elif args['nside'] is not None: - futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], - noise_model, r[1], preprocess_config, r[2], - nside = nside, - comm = comm, t0=r[3], tag=tag, recenter=recenter, - dtype_map=args['dtype_map'], - dtype_tod=args['dtype_tod'], - comps=args['comps'], - verbose=args['verbose'], - split_labels=split_labels, - singlestream=args['singlestream'], - site=args['site']) for r in run_list] - for future in as_completed(futures): - L.info('New future as_completed result') - try: - errors, outputs = future.result() - except Exception as e: - errmsg = f'{type(e)}: {e}' - tb = ''.join(traceback.format_tb(e.__traceback__)) - L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") - f = open(errlog, 'a') - f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') - f.close() - continue - futures.remove(future) - if preprocess_config is not None: - for ii in range(len(errors)): - pt.cleanup_mandb(errors[ii], outputs[ii], preprocess_config, L) - print("Done") - return True - -if __name__ == '__main__': - util.main_launcher(main, get_parser) \ No newline at end of file From 59c6d58532acf56db20399fd52109ee728c24b94 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 7 Nov 2024 01:42:45 -0800 Subject: [PATCH 35/39] Fixing bug where sys was not imported --- sotodlib/mapmaking/obs_grouping.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/obs_grouping.py b/sotodlib/mapmaking/obs_grouping.py index 92fc68ff5..8670e6a93 100644 --- a/sotodlib/mapmaking/obs_grouping.py +++ b/sotodlib/mapmaking/obs_grouping.py @@ -16,7 +16,7 @@ """ __all__ = ['build_obslists'] -import numpy as np +import numpy as np, sys from pixell import utils from scipy import ndimage From a6e488b97f9d838448404ee2ef1282d053e2bdbe Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 8 Nov 2024 01:06:21 -0800 Subject: [PATCH 36/39] Adding exception handling in obs_grouping --- sotodlib/mapmaking/obs_grouping.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/sotodlib/mapmaking/obs_grouping.py b/sotodlib/mapmaking/obs_grouping.py index 8670e6a93..ac8ee329c 100644 --- a/sotodlib/mapmaking/obs_grouping.py +++ b/sotodlib/mapmaking/obs_grouping.py @@ -15,13 +15,17 @@ depth-1 maps, etc. """ -__all__ = ['build_obslists'] -import numpy as np, sys +__all__ = ['build_obslists','NoTODFound'] +import numpy as np from pixell import utils from scipy import ndimage from .utilities import get_ids +class NoTODFound(Exception): + def __init__(self, msg): + self.msg = msg + def build_obslists(context, query, mode=None, nset=None, wafer=None, freq=None, ntod=None, tods=None, fixed_time=None, mindur=None, ): """ @@ -78,8 +82,7 @@ def build_obslists(context, query, mode=None, nset=None, wafer=None, if tods: ids = np.array(eval("ids"+tods)).reshape(-1) if ntod: ids = ids[:ntod] if len(ids) == 0: - print("No tods found!") - sys.exit(1) + raise NoTODFound("No tods found!") # Extract info about the selected ids obs_infos = context.obsdb.query("obs_id in (%s)" % ",".join(["'%s'" % id for id in ids])) obs_infos = obs_infos.asarray().view(np.recarray) @@ -100,8 +103,7 @@ def build_obslists(context, query, mode=None, nset=None, wafer=None, periods = split_periods(periods, 24*3600) # If fixed_time was not set, #then we do 24 hrs by default and it will be the same as depth_1 else: - print("Invalid mode!") - sys.exit(1) + raise NoTODFound("Invalid mode!") # We will make one map per period-detset-band obslists = build_period_obslists(obs_infos, periods, context, nset=nset, From 5c4686cc7ca80314ccb72a54ff328eddff9031f3 Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 8 Nov 2024 01:12:20 -0800 Subject: [PATCH 37/39] Various fixes replying to review --- .../make_atomic_filterbin_map.py | 556 ++++++++++++++++++ 1 file changed, 556 insertions(+) create mode 100644 sotodlib/site_pipeline/make_atomic_filterbin_map.py diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py new file mode 100644 index 000000000..61dcc6548 --- /dev/null +++ b/sotodlib/site_pipeline/make_atomic_filterbin_map.py @@ -0,0 +1,556 @@ +from argparse import ArgumentParser +import numpy as np, sys, time, warnings, os, so3g, logging, yaml, itertools, multiprocessing, traceback +from sotodlib import coords, mapmaking +from sotodlib.core import Context, metadata as metadata_core, FlagManager, AxisManager, OffsetAxis +from sotodlib.io import metadata, hk_utils +from sotodlib.site_pipeline import preprocess_tod as pt +from pixell import enmap, utils as putils, fft, bunch, wcsutils, tilemap, colors, memory, mpi +from concurrent.futures import ProcessPoolExecutor, as_completed +import sotodlib.site_pipeline.util as util + +defaults = {"area": None, + "nside": None, + "query": "type == 'obs' and subtype == 'cmb'", + "odir": "./output", + "preprocess_config": None, + "update-delay": None, + "comps": "TQU", + "mode": "per_obs", + "nproc": 1, + "ntod": None, + "tods": None, + "nset": None, + "wafer": None, + "freq": None, + "center_at": None, + "site": 'so_sat3', + "max_dets": None, # not implemented yet + "verbose": 0, + "quiet": 0, + "tiled": 0, # not implemented yet + "singlestream": False, + "only_hits": False, + "det_in_out": False, + "det_left_right":False, + "det_upper_lower":False, + "scan_left_right":False, + "window":0.0, # not implemented yet + "dtype_tod": np.float32, + "dtype_map": np.float64, + "atomic_db": "atomic_maps.db", + "fixed_time": None, + "mindur": None, + "l2_data_path": "/global/cfs/cdirs/sobs/untracked/data/site/hk", + } + +def get_parser(parser=None): + if parser is None: + parser = ArgumentParser() + parser.add_argument("--config-file", type=str, default=None, + help="Path to mapmaker config.yaml file") + + parser.add_argument("--context", type=str, + help='context file') + parser.add_argument("--area", + help='wcs kernel') + parser.add_argument("--nside", + help='Nside if you want map in HEALPIX') + parser.add_argument("--query", + help='query, can be a file (list of obs_id) or selection string') + parser.add_argument("--odir", + help='output directory') + parser.add_argument("--preprocess_config", type=str, + help='file with the config file to run the preprocessing pipeline') + parser.add_argument('--update-delay', type=int, + help="Number of days (unit is days) in the past to start observation list.") + parser.add_argument("--mode", type=str, ) + parser.add_argument("--nproc", type=int, help='Number of procs in the multiprocessing pool') + parser.add_argument("--comps", type=str,) + parser.add_argument("--singlestream", action="store_true") + parser.add_argument("--only_hits", action="store_true") # this will work only when we don't request splits, since I want to avoid loading the signal + + # detector position splits (fixed in time) + parser.add_argument("--det_in_out", action="store_true") + parser.add_argument("--det_left_right", action="store_true") + parser.add_argument("--det_upper_lower", action="store_true") + + # time samples splits + parser.add_argument("--scan_left_right", action="store_true") + + parser.add_argument("--ntod", type=int, ) + parser.add_argument("--tods", type=str, ) + parser.add_argument("--nset", type=int, ) + parser.add_argument("--wafer", type=str, + help="Detector set to map with") + parser.add_argument("--freq", type=str, + help="Frequency band to map with") + parser.add_argument("--max-dets", type=int, ) + parser.add_argument("--fixed_ftime", type=int, ) + parser.add_argument("--mindur", type=int, ) + parser.add_argument("--site", type=str, ) + parser.add_argument("--verbose", action="count", ) + parser.add_argument("--quiet", action="count", ) + parser.add_argument("--window", type=float, ) + parser.add_argument("--dtype_tod", ) + parser.add_argument("--dtype_map", ) + parser.add_argument("--atomic_db", + help='name of the atomic map database, will be saved where make_filterbin_map is being run') + parser.add_argument("--l2_data_path", + help='Path to level-2 data') + return parser + +def _get_config(config_file): + return yaml.safe_load(open(config_file,'r')) + +def get_ra_ref(obs, site='so_sat3'): + # pass an AxisManager of the observation, and return two + # ra_ref @ dec=-40 deg. + # + #t = [obs.obs_info.start_time, obs.obs_info.start_time, obs.obs_info.stop_time, obs.obs_info.stop_time] + t_start = obs.obs_info.start_time + t_stop = obs.obs_info.stop_time + az = np.arange((obs.obs_info.az_center-0.5*obs.obs_info.az_throw)*putils.degree, + (obs.obs_info.az_center+0.5*obs.obs_info.az_throw)*putils.degree, 0.5*putils.degree) + el = obs.obs_info.el_center*putils.degree + + csl = so3g.proj.CelestialSightLine.az_el(t_start*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') + ra_, dec_ = csl.coords().transpose()[:2] + ra_ref_start = np.interp(-40*putils.degree, dec_, ra_) + + csl = so3g.proj.CelestialSightLine.az_el(t_stop*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') + ra_, dec_ = csl.coords().transpose()[:2] + ra_ref_stop = np.interp(-40*putils.degree, dec_, ra_) + return ra_ref_start, ra_ref_stop + +def find_footprint(context, tod, ref_wcs, return_pixboxes=False, pad=1): + # Measure the pixel bounds of each observation relative to our + # reference wcs + pixboxes = [] + my_shape, my_wcs = coords.get_footprint(tod, ref_wcs) + my_pixbox = enmap.pixbox_of(ref_wcs, my_shape, my_wcs) + pixboxes.append(my_pixbox) + if len(pixboxes) == 0: raise DataMissing("No usable obs to estimate footprint from") + pixboxes = np.array(pixboxes) + # Handle sky wrapping. This assumes cylindrical coordinates with sky-wrapping + # in the x-direction, and that there's an integer number of pixels around + # the sky. Could be done more generally, but would be much more involved, + # and this should be good enough. + nphi = putils.nint(np.abs(360/ref_wcs.wcs.cdelt[0])) + widths = pixboxes[:,1,0]-pixboxes[:,0,0] + pixboxes[:,0,0] = putils.rewind(pixboxes[:,0,0], + ref=pixboxes[0,0,0], + period=nphi) + pixboxes[:,1,0] = pixboxes[:,0,0] + widths + # It's now safe to find the total pixel bounding box + union_pixbox = np.array([np.min(pixboxes[:,0],0)-pad,np.max(pixboxes[:,1],0) + +pad]) + # Use this to construct the output geometry + shape = union_pixbox[1]-union_pixbox[0] + wcs = ref_wcs.deepcopy() + wcs.wcs.crpix -= union_pixbox[0,::-1] + if return_pixboxes: return shape, wcs, pixboxes + else: return shape, wcs + +class DataMissing(Exception): pass + +def get_pwv(obs, data_dir): + try: + pwv_info = hk_utils.get_detcosamp_hkaman(obs, alias=['pwv'], + fields = ['site.env-radiometer-class.feeds.pwvs.pwv',], + data_dir = data_dir) + pwv_all = pwv_info['env-radiometer-class']['env-radiometer-class'][0] + pwv = np.nanmedian(pwv_all) + except (KeyError, ValueError): + pwv = 0.0 + return pwv + +def read_tods(context, obslist, + dtype_tod=np.float32, only_hits=False, site='so_sat3', + l2_data='/global/cfs/cdirs/sobs/untracked/data/site/hk'): + """ + context : str + Path to context file + """ + context = Context(context) + # this function will run on multiprocessing and can be returned in any random order + # we will also return the obslist to keep track of the order + my_obslist = [] ; my_tods = [] ; my_ra_ref = [] ; pwvs = [] + inds = range(len(obslist)) + ind = 0 + obs_id, detset, band, obs_ind = obslist[ind] + meta = context.get_meta(obs_id, dets={"wafer_slot":detset, "wafer.bandpass":band},) + tod = context.get_obs(meta, no_signal=True) + #tod = context.get_obs(obs_id, dets={"wafer_slot":detset, + # "wafer.bandpass":band}, + # no_signal=True) + to_remove = [] + for field in tod._fields: + if field!='obs_info' and field!='flags' and field!='signal' and field!='focal_plane' and field!='timestamps' and field!='boresight': to_remove.append(field) + for field in to_remove: + tod.move(field, None) + if only_hits==False: + ra_ref_start, ra_ref_stop = get_ra_ref(tod) + my_ra_ref.append((ra_ref_start/putils.degree, + ra_ref_stop/putils.degree)) + else: + my_ra_ref.append(None) + tod.flags.wrap('glitch_flags', so3g.proj.RangesMatrix.zeros(tod.shape[:2]), + [(0, 'dets'), (1, 'samps')]) + my_tods.append(tod) + + tod_temp = tod.restrict('dets', meta.dets.vals[:1], in_place=False) + pwvs.append(get_pwv(tod_temp, data_dir=l2_data)) + del tod_temp + return obslist, my_tods, my_ra_ref, pwvs + +class ColoredFormatter(logging.Formatter): + def __init__(self, msg, colors={'DEBUG':colors.reset, + 'INFO':colors.lgreen, + 'WARNING':colors.lbrown, + 'ERROR':colors.lred, + 'CRITICAL':colors.lpurple}): + logging.Formatter.__init__(self, msg) + self.colors = colors + def format(self, record): + try: + col = self.colors[record.levelname] + except KeyError: + col = colors.reset + return col + logging.Formatter.format(self, record) + colors.reset + +class LogInfoFilter(logging.Filter): + def __init__(self, rank=0): + self.rank = rank + try: + # Try to get actual time since task start if possible + import os, psutil + p = psutil.Process(os.getpid()) + self.t0 = p.create_time() + except ImportError: + # Otherwise measure from creation of this filter + self.t0 = time.time() + def filter(self, record): + record.rank = self.rank + record.wtime = time.time()-self.t0 + record.wmins = record.wtime/60. + record.whours= record.wmins/60. + record.mem = memory.current()/1024.**3 + record.resmem= memory.resident()/1024.**3 + record.memmax= memory.max()/1024.**3 + return record + +def handle_empty(prefix, tag, e, L): + # This happens if we ended up with no valid tods for some reason + L.info("%s Skipped: %s" % (tag, str(e))) + putils.mkdir(os.path.dirname(prefix)) + with open(prefix + ".empty", "w") as ofile: ofile.write("\n") + +def make_demod_map_dummy(context): + return None + +def main(config_file=None, defaults=defaults, **args): + cfg = dict(defaults) + # Update the default dict with values provided from a config.yaml file + if config_file is not None: + cfg_from_file = _get_config(config_file) + cfg.update({k: v for k, v in cfg_from_file.items() if v is not None}) + else: + print("No config file provided, assuming default values") + # Merge flags from config file and defaults with any passed through CLI + cfg.update({k: v for k, v in args.items() if v is not None}) + # Certain fields are required. Check if they are all supplied here + required_fields = ['context','area'] + for req in required_fields: + if req not in cfg.keys(): + raise KeyError("{} is a required argument. Please supply it in a config file or via the command line".format(req)) + args = cfg + warnings.simplefilter('ignore') + + comm = mpi.FAKE_WORLD # Fake communicator since we won't use MPI + + verbose = args['verbose'] - args['quiet'] + if args['area'] is not None: + shape, wcs = enmap.read_map_geometry(args['area']) + wcs = wcsutils.WCS(wcs.to_header()) + elif args['nside'] is not None: + nside = int(args['nside']) + else: + print('Neither rectangular area or nside specified, exiting.') + exit(1) + + noise_model = mapmaking.NmatWhite() + ncomp = len(args['comps']) + meta_only = False + putils.mkdir(args['odir']) + + recenter = None + if args['center_at']: + recenter = mapmaking.parse_recentering(args['center_at']) + + # Set up logging. + L = logging.getLogger(__name__) + L.setLevel(logging.INFO) + ch = logging.StreamHandler() + ch.setLevel(logging.INFO) + ch.setFormatter(ColoredFormatter(" %(wmins)7.2f %(mem)5.2f %(memmax)5.2f %(message)s")) + ch.addFilter(LogInfoFilter()) + L.addHandler(ch) + + if args['preprocess_config'] is not None: + preprocess_config = yaml.safe_load(open(args['preprocess_config'],'r')) + outs = [] + errlog = os.path.join(os.path.dirname(preprocess_config['archive']['index']), + 'errlog.txt') + else: + preprocess_config = None + outs = None + errlog = None + + multiprocessing.set_start_method('spawn') + + if (args['update_delay'] is not None): + min_ctime = int(time.time()) - args['update_delay']*86400 + args['query'] += f" and timestamp>={min_ctime}" + + context = Context(args['context']) + # obslists is a dict, obskeys is a list, periods is an array, only rank 0 + # will do this and broadcast to others. + try: + obslists, obskeys, periods, \ + obs_infos = mapmaking.build_obslists(context, + args['query'], + mode=args['mode'], + nset=args['nset'], + wafer=args['wafer'], + freq=args['freq'], + ntod=args['ntod'], + tods=args['tods'], + fixed_time=args['fixed_time'], + mindur=args['mindur']) + except mapmaking.NoTODFound as err: + print(err) + exit(1) + L.info(f'Done with build_obslists, running {len(obslists)} maps') + + tags = [] + cwd = os.getcwd() + + split_labels = [] + if args['det_in_out']: + split_labels.append('det_in');split_labels.append('det_out') + if args['det_left_right']: + split_labels.append('det_left');split_labels.append('det_right') + if args['det_upper_lower']: + split_labels.append('det_upper');split_labels.append('det_lower') + if args['scan_left_right']: + split_labels.append('scan_left');split_labels.append('scan_right') + if not split_labels: + split_labels = None + + # We open the data base for checking if we have maps already, + # if we do we will not run them again. + if os.path.isfile('./'+args['atomic_db']) and not args['only_hits']: + conn = sqlite3.connect('./'+args['atomic_db']) # open the connector, in reading mode only + cursor = conn.cursor() + keys_to_remove = [] + # Now we have obslists and splits ready, we look through the database + # to remove the maps we already have from it + for key, value in obslists.items(): + if split_labels == None: + # we want to run only full maps + query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="full"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1] ) + res = cursor.execute(query_) + matches = res.fetchall() + if len(matches)>0: + # this means the map (key,value) is already in the data base, + # so we have to remove it to not run it again + # it seems that removing the maps from the obskeys is enough. + keys_to_remove.append(key) + else: + # we are asking for splits + missing_split = False + for split_label in split_labels: + query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="%s"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1], split_label ) + res = cursor.execute(query_) + matches = res.fetchall() + if len(matches)==0: + # this means one of the requested splits is missing + # in the data base + missing_split = True + break + if missing_split == False: + # this means we have all the splits we requested for the + # particular obs_id/telescope/freq/wafer + keys_to_remove.append(key) + for key in keys_to_remove: + obskeys.remove(key) + del obslists[key] + conn.close() # I close since I only wanted to read + + obslists_arr = [item for key, item in obslists.items()] + my_oblists=[]; my_tods = []; my_ra_ref=[]; pwvs=[] + L.info('Starting with read_tods') + with ProcessPoolExecutor(args['nproc']) as exe: + futures = [exe.submit(read_tods, args['context'], obslist, + dtype_tod=args['dtype_tod'], + only_hits=args['only_hits'], + l2_data=args['l2_data_path']) for obslist in obslists_arr] + for future in as_completed(futures): + #L.info('New future as_completed result') + try: + my_obslist_here, my_tods_here, my_ra_ref_here, pwvs_here = future.result() + my_oblists.append(my_obslist_here) + my_tods.append(my_tods_here) + my_ra_ref.append(my_ra_ref_here) + pwvs.append(pwvs_here) + except Exception as e: + errmsg = f'{type(e)}: {e}' + tb = ''.join(traceback.format_tb(e.__traceback__)) + L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") + f = open(errlog, 'a') + f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') + f.close() + continue + futures.remove(future) + # flatten the list of lists + my_oblists = list(itertools.chain.from_iterable(my_oblists)) + my_tods = list(itertools.chain.from_iterable(my_tods)) + my_ra_ref = list(itertools.chain.from_iterable(my_ra_ref)) + pwvs = list(itertools.chain.from_iterable(pwvs)) + L.info('Done with read_tods') + + if args['area'] is not None: + # we will do the profile and footprint here, and then allgather the + # subshapes and subwcs.This way we don't have to communicate the + # massive arrays such as timestamps + subshapes = [] ; subwcses = [] + for obs in my_tods: + if recenter is None: + subshape, subwcs = find_footprint(context, obs, wcs,) + subshapes.append(subshape) ; subwcses.append(subwcs) + else: + subshape = shape; subwcs = wcs + subshapes.append(subshape) ; subwcses.append(subwcs) + # subshape and subwcs are in the order given by my_oblists + + run_list = [] + for oi in range(len(my_tods)): + # we will need to build the obskey from my_oblists + #obs_1722126466_satp1_1111111', 'ws3', 'f150', 0) + pid = my_oblists[oi][3]; + detset = my_oblists[oi][1]; + band = my_oblists[oi][2]; + #obskey = (pid, detset, band) + obslist = [my_oblists[oi]] + t = putils.floor(periods[pid,0]) + t5 = ("%05d" % t)[:5] + prefix = "%s/%s/atomic_%010d_%s_%s" % (args['odir'], t5, t, detset, band) + + if args['area'] is not None: + subshape = subshapes[oi] + subwcs = subwcses[oi] + + tag = "%5d/%d" % (oi+1, len(obskeys)) + putils.mkdir(os.path.dirname(prefix)) + meta_done = os.path.isfile(prefix + "_full_info.hdf") + maps_done = os.path.isfile(prefix + ".empty") or ( + os.path.isfile(prefix + "_full_map.fits") and + os.path.isfile(prefix + "_full_ivar.fits") and + os.path.isfile(prefix + "_full_hits.fits") + ) + #L.info("%s Proc period %4d dset %s:%s @%.0f dur %5.2f h with %2d obs" % (tag, pid, detset, band, t, (periods[pid,1]-periods[pid,0])/3600, len(obslist))) + + my_ra_ref_atomic = [my_ra_ref[oi]] + pwv_atomic = [pwvs[oi]] + # Save file for data base of atomic maps. We will write an individual file, + # another script will loop over those files and write into sqlite data base + if not args['only_hits']: + info = [] + if split_labels is None: + # this means the mapmaker was run without any splits requested + info.append(bunch.Bunch(pid=pid, + obs_id=obslist[0][0].encode(), + telescope=obs_infos[obslist[0][3]].telescope.encode(), + freq_channel=band.encode(), + wafer=detset.encode(), + ctime=int(t), + split_label='full'.encode(), + split_detail='full'.encode(), + prefix_path=str(cwd+'/'+prefix+'_full').encode(), + elevation=obs_infos[obslist[0][3]].el_center, + azimuth=obs_infos[obslist[0][3]].az_center, + RA_ref_start=my_ra_ref_atomic[0][0], + RA_ref_stop=my_ra_ref_atomic[0][1], + pwv=pwv_atomic + )) + else: + # splits were requested and we loop over them + for split_label in split_labels: + info.append(bunch.Bunch(pid=pid, + obs_id=obslist[0][0].encode(), + telescope=obs_infos[obslist[0][3]].telescope.encode(), + freq_channel=band.encode(), + wafer=detset.encode(), + ctime=int(t), + split_label=split_label.encode(), + split_detail=''.encode(), + prefix_path=str(cwd+'/'+prefix+'_%s'%split_label).encode(), + elevation=obs_infos[obslist[0][3]].el_center, + azimuth=obs_infos[obslist[0][3]].az_center, + RA_ref_start=my_ra_ref_atomic[0][0], + RA_ref_stop=my_ra_ref_atomic[0][1], + pwv=pwv_atomic + )) + # inputs that are unique per atomic map go into run_list + if args['area'] is not None: + run_list.append([obslist, subshape, subwcs, info, prefix, t]) + elif args['nside'] is not None: + run_list.append([obslist, info, prefix, t]) + # Done with creating run_list + + with ProcessPoolExecutor(args['nproc']) as exe: + if args['area'] is not None: + futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], + noise_model, r[3], preprocess_config, r[4], + shape=r[1], wcs=r[2], + comm = comm, t0=r[5], tag=tag, recenter=recenter, + dtype_map=args['dtype_map'], + dtype_tod=args['dtype_tod'], + comps=args['comps'], + verbose=args['verbose'], + split_labels=split_labels, + singlestream=args['singlestream'], + site=args['site']) for r in run_list] + elif args['nside'] is not None: + futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], + noise_model, r[1], preprocess_config, r[2], + nside = nside, + comm = comm, t0=r[3], tag=tag, recenter=recenter, + dtype_map=args['dtype_map'], + dtype_tod=args['dtype_tod'], + comps=args['comps'], + verbose=args['verbose'], + split_labels=split_labels, + singlestream=args['singlestream'], + site=args['site']) for r in run_list] + for future in as_completed(futures): + L.info('New future as_completed result') + try: + errors, outputs = future.result() + except Exception as e: + errmsg = f'{type(e)}: {e}' + tb = ''.join(traceback.format_tb(e.__traceback__)) + L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") + f = open(errlog, 'a') + f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') + f.close() + continue + futures.remove(future) + if preprocess_config is not None: + for ii in range(len(errors)): + pt.cleanup_mandb(errors[ii], outputs[ii], preprocess_config, L) + print("Done") + return True + +if __name__ == '__main__': + util.main_launcher(main, get_parser) \ No newline at end of file From dbeb889d35c2e082e56c5c324e643c2c153359d2 Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 8 Nov 2024 01:15:36 -0800 Subject: [PATCH 38/39] removing script which was added by mistake --- .../make_atomic_filterbin_map.py | 556 ------------------ 1 file changed, 556 deletions(-) delete mode 100644 sotodlib/site_pipeline/make_atomic_filterbin_map.py diff --git a/sotodlib/site_pipeline/make_atomic_filterbin_map.py b/sotodlib/site_pipeline/make_atomic_filterbin_map.py deleted file mode 100644 index 61dcc6548..000000000 --- a/sotodlib/site_pipeline/make_atomic_filterbin_map.py +++ /dev/null @@ -1,556 +0,0 @@ -from argparse import ArgumentParser -import numpy as np, sys, time, warnings, os, so3g, logging, yaml, itertools, multiprocessing, traceback -from sotodlib import coords, mapmaking -from sotodlib.core import Context, metadata as metadata_core, FlagManager, AxisManager, OffsetAxis -from sotodlib.io import metadata, hk_utils -from sotodlib.site_pipeline import preprocess_tod as pt -from pixell import enmap, utils as putils, fft, bunch, wcsutils, tilemap, colors, memory, mpi -from concurrent.futures import ProcessPoolExecutor, as_completed -import sotodlib.site_pipeline.util as util - -defaults = {"area": None, - "nside": None, - "query": "type == 'obs' and subtype == 'cmb'", - "odir": "./output", - "preprocess_config": None, - "update-delay": None, - "comps": "TQU", - "mode": "per_obs", - "nproc": 1, - "ntod": None, - "tods": None, - "nset": None, - "wafer": None, - "freq": None, - "center_at": None, - "site": 'so_sat3', - "max_dets": None, # not implemented yet - "verbose": 0, - "quiet": 0, - "tiled": 0, # not implemented yet - "singlestream": False, - "only_hits": False, - "det_in_out": False, - "det_left_right":False, - "det_upper_lower":False, - "scan_left_right":False, - "window":0.0, # not implemented yet - "dtype_tod": np.float32, - "dtype_map": np.float64, - "atomic_db": "atomic_maps.db", - "fixed_time": None, - "mindur": None, - "l2_data_path": "/global/cfs/cdirs/sobs/untracked/data/site/hk", - } - -def get_parser(parser=None): - if parser is None: - parser = ArgumentParser() - parser.add_argument("--config-file", type=str, default=None, - help="Path to mapmaker config.yaml file") - - parser.add_argument("--context", type=str, - help='context file') - parser.add_argument("--area", - help='wcs kernel') - parser.add_argument("--nside", - help='Nside if you want map in HEALPIX') - parser.add_argument("--query", - help='query, can be a file (list of obs_id) or selection string') - parser.add_argument("--odir", - help='output directory') - parser.add_argument("--preprocess_config", type=str, - help='file with the config file to run the preprocessing pipeline') - parser.add_argument('--update-delay', type=int, - help="Number of days (unit is days) in the past to start observation list.") - parser.add_argument("--mode", type=str, ) - parser.add_argument("--nproc", type=int, help='Number of procs in the multiprocessing pool') - parser.add_argument("--comps", type=str,) - parser.add_argument("--singlestream", action="store_true") - parser.add_argument("--only_hits", action="store_true") # this will work only when we don't request splits, since I want to avoid loading the signal - - # detector position splits (fixed in time) - parser.add_argument("--det_in_out", action="store_true") - parser.add_argument("--det_left_right", action="store_true") - parser.add_argument("--det_upper_lower", action="store_true") - - # time samples splits - parser.add_argument("--scan_left_right", action="store_true") - - parser.add_argument("--ntod", type=int, ) - parser.add_argument("--tods", type=str, ) - parser.add_argument("--nset", type=int, ) - parser.add_argument("--wafer", type=str, - help="Detector set to map with") - parser.add_argument("--freq", type=str, - help="Frequency band to map with") - parser.add_argument("--max-dets", type=int, ) - parser.add_argument("--fixed_ftime", type=int, ) - parser.add_argument("--mindur", type=int, ) - parser.add_argument("--site", type=str, ) - parser.add_argument("--verbose", action="count", ) - parser.add_argument("--quiet", action="count", ) - parser.add_argument("--window", type=float, ) - parser.add_argument("--dtype_tod", ) - parser.add_argument("--dtype_map", ) - parser.add_argument("--atomic_db", - help='name of the atomic map database, will be saved where make_filterbin_map is being run') - parser.add_argument("--l2_data_path", - help='Path to level-2 data') - return parser - -def _get_config(config_file): - return yaml.safe_load(open(config_file,'r')) - -def get_ra_ref(obs, site='so_sat3'): - # pass an AxisManager of the observation, and return two - # ra_ref @ dec=-40 deg. - # - #t = [obs.obs_info.start_time, obs.obs_info.start_time, obs.obs_info.stop_time, obs.obs_info.stop_time] - t_start = obs.obs_info.start_time - t_stop = obs.obs_info.stop_time - az = np.arange((obs.obs_info.az_center-0.5*obs.obs_info.az_throw)*putils.degree, - (obs.obs_info.az_center+0.5*obs.obs_info.az_throw)*putils.degree, 0.5*putils.degree) - el = obs.obs_info.el_center*putils.degree - - csl = so3g.proj.CelestialSightLine.az_el(t_start*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') - ra_, dec_ = csl.coords().transpose()[:2] - ra_ref_start = np.interp(-40*putils.degree, dec_, ra_) - - csl = so3g.proj.CelestialSightLine.az_el(t_stop*np.ones(len(az)), az, el*np.ones(len(az)), site=site, weather='toco') - ra_, dec_ = csl.coords().transpose()[:2] - ra_ref_stop = np.interp(-40*putils.degree, dec_, ra_) - return ra_ref_start, ra_ref_stop - -def find_footprint(context, tod, ref_wcs, return_pixboxes=False, pad=1): - # Measure the pixel bounds of each observation relative to our - # reference wcs - pixboxes = [] - my_shape, my_wcs = coords.get_footprint(tod, ref_wcs) - my_pixbox = enmap.pixbox_of(ref_wcs, my_shape, my_wcs) - pixboxes.append(my_pixbox) - if len(pixboxes) == 0: raise DataMissing("No usable obs to estimate footprint from") - pixboxes = np.array(pixboxes) - # Handle sky wrapping. This assumes cylindrical coordinates with sky-wrapping - # in the x-direction, and that there's an integer number of pixels around - # the sky. Could be done more generally, but would be much more involved, - # and this should be good enough. - nphi = putils.nint(np.abs(360/ref_wcs.wcs.cdelt[0])) - widths = pixboxes[:,1,0]-pixboxes[:,0,0] - pixboxes[:,0,0] = putils.rewind(pixboxes[:,0,0], - ref=pixboxes[0,0,0], - period=nphi) - pixboxes[:,1,0] = pixboxes[:,0,0] + widths - # It's now safe to find the total pixel bounding box - union_pixbox = np.array([np.min(pixboxes[:,0],0)-pad,np.max(pixboxes[:,1],0) - +pad]) - # Use this to construct the output geometry - shape = union_pixbox[1]-union_pixbox[0] - wcs = ref_wcs.deepcopy() - wcs.wcs.crpix -= union_pixbox[0,::-1] - if return_pixboxes: return shape, wcs, pixboxes - else: return shape, wcs - -class DataMissing(Exception): pass - -def get_pwv(obs, data_dir): - try: - pwv_info = hk_utils.get_detcosamp_hkaman(obs, alias=['pwv'], - fields = ['site.env-radiometer-class.feeds.pwvs.pwv',], - data_dir = data_dir) - pwv_all = pwv_info['env-radiometer-class']['env-radiometer-class'][0] - pwv = np.nanmedian(pwv_all) - except (KeyError, ValueError): - pwv = 0.0 - return pwv - -def read_tods(context, obslist, - dtype_tod=np.float32, only_hits=False, site='so_sat3', - l2_data='/global/cfs/cdirs/sobs/untracked/data/site/hk'): - """ - context : str - Path to context file - """ - context = Context(context) - # this function will run on multiprocessing and can be returned in any random order - # we will also return the obslist to keep track of the order - my_obslist = [] ; my_tods = [] ; my_ra_ref = [] ; pwvs = [] - inds = range(len(obslist)) - ind = 0 - obs_id, detset, band, obs_ind = obslist[ind] - meta = context.get_meta(obs_id, dets={"wafer_slot":detset, "wafer.bandpass":band},) - tod = context.get_obs(meta, no_signal=True) - #tod = context.get_obs(obs_id, dets={"wafer_slot":detset, - # "wafer.bandpass":band}, - # no_signal=True) - to_remove = [] - for field in tod._fields: - if field!='obs_info' and field!='flags' and field!='signal' and field!='focal_plane' and field!='timestamps' and field!='boresight': to_remove.append(field) - for field in to_remove: - tod.move(field, None) - if only_hits==False: - ra_ref_start, ra_ref_stop = get_ra_ref(tod) - my_ra_ref.append((ra_ref_start/putils.degree, - ra_ref_stop/putils.degree)) - else: - my_ra_ref.append(None) - tod.flags.wrap('glitch_flags', so3g.proj.RangesMatrix.zeros(tod.shape[:2]), - [(0, 'dets'), (1, 'samps')]) - my_tods.append(tod) - - tod_temp = tod.restrict('dets', meta.dets.vals[:1], in_place=False) - pwvs.append(get_pwv(tod_temp, data_dir=l2_data)) - del tod_temp - return obslist, my_tods, my_ra_ref, pwvs - -class ColoredFormatter(logging.Formatter): - def __init__(self, msg, colors={'DEBUG':colors.reset, - 'INFO':colors.lgreen, - 'WARNING':colors.lbrown, - 'ERROR':colors.lred, - 'CRITICAL':colors.lpurple}): - logging.Formatter.__init__(self, msg) - self.colors = colors - def format(self, record): - try: - col = self.colors[record.levelname] - except KeyError: - col = colors.reset - return col + logging.Formatter.format(self, record) + colors.reset - -class LogInfoFilter(logging.Filter): - def __init__(self, rank=0): - self.rank = rank - try: - # Try to get actual time since task start if possible - import os, psutil - p = psutil.Process(os.getpid()) - self.t0 = p.create_time() - except ImportError: - # Otherwise measure from creation of this filter - self.t0 = time.time() - def filter(self, record): - record.rank = self.rank - record.wtime = time.time()-self.t0 - record.wmins = record.wtime/60. - record.whours= record.wmins/60. - record.mem = memory.current()/1024.**3 - record.resmem= memory.resident()/1024.**3 - record.memmax= memory.max()/1024.**3 - return record - -def handle_empty(prefix, tag, e, L): - # This happens if we ended up with no valid tods for some reason - L.info("%s Skipped: %s" % (tag, str(e))) - putils.mkdir(os.path.dirname(prefix)) - with open(prefix + ".empty", "w") as ofile: ofile.write("\n") - -def make_demod_map_dummy(context): - return None - -def main(config_file=None, defaults=defaults, **args): - cfg = dict(defaults) - # Update the default dict with values provided from a config.yaml file - if config_file is not None: - cfg_from_file = _get_config(config_file) - cfg.update({k: v for k, v in cfg_from_file.items() if v is not None}) - else: - print("No config file provided, assuming default values") - # Merge flags from config file and defaults with any passed through CLI - cfg.update({k: v for k, v in args.items() if v is not None}) - # Certain fields are required. Check if they are all supplied here - required_fields = ['context','area'] - for req in required_fields: - if req not in cfg.keys(): - raise KeyError("{} is a required argument. Please supply it in a config file or via the command line".format(req)) - args = cfg - warnings.simplefilter('ignore') - - comm = mpi.FAKE_WORLD # Fake communicator since we won't use MPI - - verbose = args['verbose'] - args['quiet'] - if args['area'] is not None: - shape, wcs = enmap.read_map_geometry(args['area']) - wcs = wcsutils.WCS(wcs.to_header()) - elif args['nside'] is not None: - nside = int(args['nside']) - else: - print('Neither rectangular area or nside specified, exiting.') - exit(1) - - noise_model = mapmaking.NmatWhite() - ncomp = len(args['comps']) - meta_only = False - putils.mkdir(args['odir']) - - recenter = None - if args['center_at']: - recenter = mapmaking.parse_recentering(args['center_at']) - - # Set up logging. - L = logging.getLogger(__name__) - L.setLevel(logging.INFO) - ch = logging.StreamHandler() - ch.setLevel(logging.INFO) - ch.setFormatter(ColoredFormatter(" %(wmins)7.2f %(mem)5.2f %(memmax)5.2f %(message)s")) - ch.addFilter(LogInfoFilter()) - L.addHandler(ch) - - if args['preprocess_config'] is not None: - preprocess_config = yaml.safe_load(open(args['preprocess_config'],'r')) - outs = [] - errlog = os.path.join(os.path.dirname(preprocess_config['archive']['index']), - 'errlog.txt') - else: - preprocess_config = None - outs = None - errlog = None - - multiprocessing.set_start_method('spawn') - - if (args['update_delay'] is not None): - min_ctime = int(time.time()) - args['update_delay']*86400 - args['query'] += f" and timestamp>={min_ctime}" - - context = Context(args['context']) - # obslists is a dict, obskeys is a list, periods is an array, only rank 0 - # will do this and broadcast to others. - try: - obslists, obskeys, periods, \ - obs_infos = mapmaking.build_obslists(context, - args['query'], - mode=args['mode'], - nset=args['nset'], - wafer=args['wafer'], - freq=args['freq'], - ntod=args['ntod'], - tods=args['tods'], - fixed_time=args['fixed_time'], - mindur=args['mindur']) - except mapmaking.NoTODFound as err: - print(err) - exit(1) - L.info(f'Done with build_obslists, running {len(obslists)} maps') - - tags = [] - cwd = os.getcwd() - - split_labels = [] - if args['det_in_out']: - split_labels.append('det_in');split_labels.append('det_out') - if args['det_left_right']: - split_labels.append('det_left');split_labels.append('det_right') - if args['det_upper_lower']: - split_labels.append('det_upper');split_labels.append('det_lower') - if args['scan_left_right']: - split_labels.append('scan_left');split_labels.append('scan_right') - if not split_labels: - split_labels = None - - # We open the data base for checking if we have maps already, - # if we do we will not run them again. - if os.path.isfile('./'+args['atomic_db']) and not args['only_hits']: - conn = sqlite3.connect('./'+args['atomic_db']) # open the connector, in reading mode only - cursor = conn.cursor() - keys_to_remove = [] - # Now we have obslists and splits ready, we look through the database - # to remove the maps we already have from it - for key, value in obslists.items(): - if split_labels == None: - # we want to run only full maps - query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="full"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1] ) - res = cursor.execute(query_) - matches = res.fetchall() - if len(matches)>0: - # this means the map (key,value) is already in the data base, - # so we have to remove it to not run it again - # it seems that removing the maps from the obskeys is enough. - keys_to_remove.append(key) - else: - # we are asking for splits - missing_split = False - for split_label in split_labels: - query_ = 'SELECT * from atomic where obs_id="%s" and telescope="%s" and freq_channel="%s" and wafer="%s" and split_label="%s"'%(value[0][0], obs_infos[value[0][3]].telescope, key[2], key[1], split_label ) - res = cursor.execute(query_) - matches = res.fetchall() - if len(matches)==0: - # this means one of the requested splits is missing - # in the data base - missing_split = True - break - if missing_split == False: - # this means we have all the splits we requested for the - # particular obs_id/telescope/freq/wafer - keys_to_remove.append(key) - for key in keys_to_remove: - obskeys.remove(key) - del obslists[key] - conn.close() # I close since I only wanted to read - - obslists_arr = [item for key, item in obslists.items()] - my_oblists=[]; my_tods = []; my_ra_ref=[]; pwvs=[] - L.info('Starting with read_tods') - with ProcessPoolExecutor(args['nproc']) as exe: - futures = [exe.submit(read_tods, args['context'], obslist, - dtype_tod=args['dtype_tod'], - only_hits=args['only_hits'], - l2_data=args['l2_data_path']) for obslist in obslists_arr] - for future in as_completed(futures): - #L.info('New future as_completed result') - try: - my_obslist_here, my_tods_here, my_ra_ref_here, pwvs_here = future.result() - my_oblists.append(my_obslist_here) - my_tods.append(my_tods_here) - my_ra_ref.append(my_ra_ref_here) - pwvs.append(pwvs_here) - except Exception as e: - errmsg = f'{type(e)}: {e}' - tb = ''.join(traceback.format_tb(e.__traceback__)) - L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") - f = open(errlog, 'a') - f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') - f.close() - continue - futures.remove(future) - # flatten the list of lists - my_oblists = list(itertools.chain.from_iterable(my_oblists)) - my_tods = list(itertools.chain.from_iterable(my_tods)) - my_ra_ref = list(itertools.chain.from_iterable(my_ra_ref)) - pwvs = list(itertools.chain.from_iterable(pwvs)) - L.info('Done with read_tods') - - if args['area'] is not None: - # we will do the profile and footprint here, and then allgather the - # subshapes and subwcs.This way we don't have to communicate the - # massive arrays such as timestamps - subshapes = [] ; subwcses = [] - for obs in my_tods: - if recenter is None: - subshape, subwcs = find_footprint(context, obs, wcs,) - subshapes.append(subshape) ; subwcses.append(subwcs) - else: - subshape = shape; subwcs = wcs - subshapes.append(subshape) ; subwcses.append(subwcs) - # subshape and subwcs are in the order given by my_oblists - - run_list = [] - for oi in range(len(my_tods)): - # we will need to build the obskey from my_oblists - #obs_1722126466_satp1_1111111', 'ws3', 'f150', 0) - pid = my_oblists[oi][3]; - detset = my_oblists[oi][1]; - band = my_oblists[oi][2]; - #obskey = (pid, detset, band) - obslist = [my_oblists[oi]] - t = putils.floor(periods[pid,0]) - t5 = ("%05d" % t)[:5] - prefix = "%s/%s/atomic_%010d_%s_%s" % (args['odir'], t5, t, detset, band) - - if args['area'] is not None: - subshape = subshapes[oi] - subwcs = subwcses[oi] - - tag = "%5d/%d" % (oi+1, len(obskeys)) - putils.mkdir(os.path.dirname(prefix)) - meta_done = os.path.isfile(prefix + "_full_info.hdf") - maps_done = os.path.isfile(prefix + ".empty") or ( - os.path.isfile(prefix + "_full_map.fits") and - os.path.isfile(prefix + "_full_ivar.fits") and - os.path.isfile(prefix + "_full_hits.fits") - ) - #L.info("%s Proc period %4d dset %s:%s @%.0f dur %5.2f h with %2d obs" % (tag, pid, detset, band, t, (periods[pid,1]-periods[pid,0])/3600, len(obslist))) - - my_ra_ref_atomic = [my_ra_ref[oi]] - pwv_atomic = [pwvs[oi]] - # Save file for data base of atomic maps. We will write an individual file, - # another script will loop over those files and write into sqlite data base - if not args['only_hits']: - info = [] - if split_labels is None: - # this means the mapmaker was run without any splits requested - info.append(bunch.Bunch(pid=pid, - obs_id=obslist[0][0].encode(), - telescope=obs_infos[obslist[0][3]].telescope.encode(), - freq_channel=band.encode(), - wafer=detset.encode(), - ctime=int(t), - split_label='full'.encode(), - split_detail='full'.encode(), - prefix_path=str(cwd+'/'+prefix+'_full').encode(), - elevation=obs_infos[obslist[0][3]].el_center, - azimuth=obs_infos[obslist[0][3]].az_center, - RA_ref_start=my_ra_ref_atomic[0][0], - RA_ref_stop=my_ra_ref_atomic[0][1], - pwv=pwv_atomic - )) - else: - # splits were requested and we loop over them - for split_label in split_labels: - info.append(bunch.Bunch(pid=pid, - obs_id=obslist[0][0].encode(), - telescope=obs_infos[obslist[0][3]].telescope.encode(), - freq_channel=band.encode(), - wafer=detset.encode(), - ctime=int(t), - split_label=split_label.encode(), - split_detail=''.encode(), - prefix_path=str(cwd+'/'+prefix+'_%s'%split_label).encode(), - elevation=obs_infos[obslist[0][3]].el_center, - azimuth=obs_infos[obslist[0][3]].az_center, - RA_ref_start=my_ra_ref_atomic[0][0], - RA_ref_stop=my_ra_ref_atomic[0][1], - pwv=pwv_atomic - )) - # inputs that are unique per atomic map go into run_list - if args['area'] is not None: - run_list.append([obslist, subshape, subwcs, info, prefix, t]) - elif args['nside'] is not None: - run_list.append([obslist, info, prefix, t]) - # Done with creating run_list - - with ProcessPoolExecutor(args['nproc']) as exe: - if args['area'] is not None: - futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], - noise_model, r[3], preprocess_config, r[4], - shape=r[1], wcs=r[2], - comm = comm, t0=r[5], tag=tag, recenter=recenter, - dtype_map=args['dtype_map'], - dtype_tod=args['dtype_tod'], - comps=args['comps'], - verbose=args['verbose'], - split_labels=split_labels, - singlestream=args['singlestream'], - site=args['site']) for r in run_list] - elif args['nside'] is not None: - futures = [exe.submit(mapmaking.make_demod_map, args['context'], r[0], - noise_model, r[1], preprocess_config, r[2], - nside = nside, - comm = comm, t0=r[3], tag=tag, recenter=recenter, - dtype_map=args['dtype_map'], - dtype_tod=args['dtype_tod'], - comps=args['comps'], - verbose=args['verbose'], - split_labels=split_labels, - singlestream=args['singlestream'], - site=args['site']) for r in run_list] - for future in as_completed(futures): - L.info('New future as_completed result') - try: - errors, outputs = future.result() - except Exception as e: - errmsg = f'{type(e)}: {e}' - tb = ''.join(traceback.format_tb(e.__traceback__)) - L.info(f"ERROR: future.result()\n{errmsg}\n{tb}") - f = open(errlog, 'a') - f.write(f'\n{time.time()}, future.result() error\n{errmsg}\n{tb}\n') - f.close() - continue - futures.remove(future) - if preprocess_config is not None: - for ii in range(len(errors)): - pt.cleanup_mandb(errors[ii], outputs[ii], preprocess_config, L) - print("Done") - return True - -if __name__ == '__main__': - util.main_launcher(main, get_parser) \ No newline at end of file From 6a4627ed5789d401c6a8b2105f29463d9ee11a8e Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 8 Nov 2024 01:22:17 -0800 Subject: [PATCH 39/39] Various fixes --- sotodlib/mapmaking/demod_mapmaker.py | 120 +++++++++++++-------------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/sotodlib/mapmaking/demod_mapmaker.py b/sotodlib/mapmaking/demod_mapmaker.py index 9c04ac310..54e5b0b35 100644 --- a/sotodlib/mapmaking/demod_mapmaker.py +++ b/sotodlib/mapmaking/demod_mapmaker.py @@ -12,7 +12,6 @@ from .. import core from .. import coords -from .. import site_pipeline from .utilities import recentering_to_quat_lonlat, evaluate_recentering, MultiZipper, unarr, safe_invert_div from .utilities import import_optional, get_flags from .noise_model import NmatWhite @@ -384,8 +383,8 @@ def setup_demod_map(noise_model, shape=None, wcs=None, nside=None, singlestream=False, dtype_tod=np.float32, dtype_map=np.float32, recenter=None, verbose=0): """ - Setup the classes for demod mapmaking and return - a DemodMapmmaker object + Setup the classes for demod mapmaking and return + a DemodMapmmaker object """ if shape is not None and wcs is not None: if split_labels==None: @@ -430,7 +429,7 @@ def setup_demod_map(noise_model, shape=None, wcs=None, nside=None, def write_demod_maps(prefix, data, split_labels=None): """ - Write maps from data into files + Write maps from data into files """ if split_labels==None: # we have no splits, so we save index 0 of the lists @@ -465,64 +464,65 @@ def make_demod_map(context, obslist, noise_model, info, tag="", verbose=0, split_labels=None, L=None, site='so_sat3', recenter=None, singlestream=False): """ - Make a demodulated map from the list of observations in obslist. + Make a demodulated map from the list of observations in obslist. - Arguments - --------- - context : str - File path to context used to load obs from. - obslist : dict - The obslist which is the output of the - mapmaking.obs_grouping.build_obslists, contains the information of the - single or multiple obs to map. - noise_model : sotodlib.mapmaking.Nmat - Noise model to pass to DemodMapmaker. - info : list - Information for the database, will be written as a .hdf file. - preprocess_config : dict - Dictionary with the config yaml file for the preprocess database. - prefix : str - Prefix for the output files - shape : tuple, optional - Shape of the geometry to use for mapping. - wcs : dict, optional - WCS kernel of the geometry to use for mapping. - nside : int, optional - Nside for healpix pixelization - comps : str, optional - Which components to map, only TQU supported for now. - t0 : int, optional - Ctime to use as the label in the map files. - dtype_tod : numpy.dtype, optional - The data type to use for the time-ordered data. Only tested - with float32. - dtype_map : numpy.dtype, optional - The data type to use for the maps. - tag : str, optional - Prefix tag for the logger. - verbose : bool, optional - split_labels : list or None, optional - A list of strings with the splits requested. If None then no splits - were asked for, i.e. we will produce one map. - L : logger, optional - Logger for printing on the screen. - site : str, optional - Plataform name for the pointing matrix. - recenter : str or None - String to make object-centered maps, such as Moon/Sun/Planet centered maps. - Look at sotodlib.mapmaking.parse_recentering for details. - singlestream : Bool - If True, do not perform demodulated filter+bin mapmaking but - rather regular filter+bin mapmaking, i.e. map from obs.signal - rather than from obs.dsT, obs.demodQ, obs.demodU. - - Returns - ------- - errors : list - List of errors from preprocess database. To be used in cleanup_mandb. - outputs : list - List of outputs from preprocess database. To be used in cleanup_mandb. + Arguments + --------- + context : str + File path to context used to load obs from. + obslist : dict + The obslist which is the output of the + mapmaking.obs_grouping.build_obslists, contains the information of the + single or multiple obs to map. + noise_model : sotodlib.mapmaking.Nmat + Noise model to pass to DemodMapmaker. + info : list + Information for the database, will be written as a .hdf file. + preprocess_config : dict + Dictionary with the config yaml file for the preprocess database. + prefix : str + Prefix for the output files + shape : tuple, optional + Shape of the geometry to use for mapping. + wcs : dict, optional + WCS kernel of the geometry to use for mapping. + nside : int, optional + Nside for healpix pixelization + comps : str, optional + Which components to map, only TQU supported for now. + t0 : int, optional + Ctime to use as the label in the map files. + dtype_tod : numpy.dtype, optional + The data type to use for the time-ordered data. Only tested + with float32. + dtype_map : numpy.dtype, optional + The data type to use for the maps. + tag : str, optional + Prefix tag for the logger. + verbose : bool, optional + split_labels : list or None, optional + A list of strings with the splits requested. If None then no splits + were asked for, i.e. we will produce one map. + L : logger, optional + Logger for printing on the screen. + site : str, optional + Platform name for the pointing matrix. + recenter : str or None + String to make object-centered maps, such as Moon/Sun/Planet centered maps. + Look at sotodlib.mapmaking.parse_recentering for details. + singlestream : Bool + If True, do not perform demodulated filter+bin mapmaking but + rather regular filter+bin mapmaking, i.e. map from obs.signal + rather than from obs.dsT, obs.demodQ, obs.demodU. + + Returns + ------- + errors : list + List of errors from preprocess database. To be used in cleanup_mandb. + outputs : list + List of outputs from preprocess database. To be used in cleanup_mandb. """ + from .. import site_pipeline context = core.Context(context) if L is None: L = site_pipeline.util.init_logger("Demod filterbin mapmaking")