Skip to content

Commit

Permalink
Merge branch 'main' into lstbin-cal
Browse files Browse the repository at this point in the history
  • Loading branch information
tyler-a-cox authored Jan 12, 2024
2 parents fb9e3b4 + d45df27 commit 5cff5f0
Show file tree
Hide file tree
Showing 23 changed files with 2,790 additions and 992 deletions.
13 changes: 9 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
PYTHON: ${{ matrix.python-version }}
OS: ${{ matrix.os }}
runs-on: ${{ matrix.os }}

strategy:
matrix:
os: [ubuntu-latest, macos-latest]
Expand All @@ -40,7 +40,12 @@ jobs:
run: |
pytest -n auto --pyargs hera_cal --cov=hera_cal --cov-config=./.coveragerc --cov-report xml:./coverage.xml --durations=15
- name: Upload Coverage (Ubuntu)
- name: Upload coverage report
if: matrix.os == 'ubuntu-latest' && success()
run: |
bash <(curl -s https://codecov.io/bash) -t ${{ secrets.CODECOV_TOKEN }}
uses: codecov/[email protected]
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage.xml
flags: unittests
name: codecov-umbrella
fail_ci_if_error: true
6 changes: 6 additions & 0 deletions hera_cal/data/example_filter_params.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
filter_centers:
(0, 1): 0.1234
(0, 2): 0.173
filter_half_widths:
(0, 1): 0.05
(0, 2): 0.08
105 changes: 78 additions & 27 deletions hera_cal/datacontainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@
import numpy as np
from collections import OrderedDict as odict
import copy
import warnings

from typing import Sequence
from .utils import conj_pol, comply_pol, make_bl, comply_bl, reverse_bl
from .red_groups import RedundantGroups, Baseline, AntPair


class DataContainer:
"""Dictionary-like object that abstracts away the pol/ant pair ordering of data
dictionaries and the the polarization case (i.e. 'nn' vs. 'NN'). Keys are in
Expand Down Expand Up @@ -68,8 +70,7 @@ def __init__(self, data):
setattr(self, attr, getattr(data, attr))
else:
setattr(self, attr, None)



@property
def dtype(self):
"""The dtype of the underlying data."""
Expand All @@ -81,7 +82,6 @@ def dtype(self):
else:
return None


def antpairs(self, pol=None):
'''Return a set of antenna pairs (with a specific pol or more generally).'''
if pol is None:
Expand Down Expand Up @@ -185,7 +185,7 @@ def __delitem__(self, key):
@property
def shape(self) -> tuple[int]:
return self[next(iter(self.keys()))].shape

def concatenate(self, D, axis=0):
'''Concatenates D, a DataContainer or a list of DCs, with self along an axis'''
# check type of D
Expand Down Expand Up @@ -514,26 +514,81 @@ def select_or_expand_times(self, new_times, in_place=True, skip_bda_check=False)
if not in_place:
return dc

def select_freqs(
self,
freqs: np.ndarray | None = None,
channels: np.ndarray | slice | None = None,
in_place: bool = True
):
"""Update the object with a subset of frequencies (which may be repeated).
While typically this will be used to down-select frequencies, one can
'expand' the frequencies by duplicating channels.
Parameters
----------
freqs : np.ndarray, optional
Frequencies to select. If given, all frequencies must be in the datacontainer.
channels : np.ndarray, slice, optional
Channels to select. If given, all channels must be in the datacontainer.
Only one of freqs or channels can be given.
in_place : bool, optional
If True, modify the object in place. Otherwise, return a modified copy.
Even if `in_place` is True, the object is still returned for convenience.
Returns
-------
DataContainer
The modified object. If `in_place` is True, this is the same object.
"""
obj = self if in_place else copy.deepcopy(self)
if freqs is None and channels is None:
return obj
elif freqs is not None and channels is not None:
raise ValueError('Cannot specify both freqs and channels.')

if freqs is not None:
if obj.freqs is None:
raise ValueError('Cannot select frequencies if self.freqs is None.')

if not np.all([fq in obj.freqs for fq in freqs]):
raise ValueError('All freqs must be in self.freqs.')
channels = np.searchsorted(obj.freqs, freqs)

if obj.freqs is None:
warnings.warn("It is impossible to automatically detect which axis is frequency. Trying last axis.")
axis = -1
else:
axis = obj[next(iter(obj.keys()))].shape.index(len(obj.freqs))
for bl in obj:
obj[bl] = obj[bl].take(channels, axis=axis)

# update metadata
if obj.freqs is not None:
obj.freqs = obj.freqs[channels]

return obj


class RedDataContainer(DataContainer):
'''Structure for containing redundant visibilities that can be accessed by any
one of the redundant baseline keys (or their conjugate).'''

def __init__(
self,
data: DataContainer | dict[Baseline, np.ndarray],
reds: RedundantGroups | Sequence[Sequence[Baseline | AntPair]] | None=None,
antpos: dict[int, np.ndarray] | None=None,
bl_error_tol: float=1.0
):
self,
data: DataContainer | dict[Baseline, np.ndarray],
reds: RedundantGroups | Sequence[Sequence[Baseline | AntPair]] | None = None,
antpos: dict[int, np.ndarray] | None = None,
bl_error_tol: float = 1.0
):
'''Creates a RedDataContainer.
Parameters
----------
data : DataContainer or dictionary of visibilities, just as one would pass into DataContainer().
Will error if multiple baselines are part of the same redundant group.
reds : :class:`RedundantGroups` object, or list of lists of redundant baseline tuples, e.g. (ind1, ind2, pol).
These are the redundant groups of baselines. If not provided, will try to
These are the redundant groups of baselines. If not provided, will try to
infer them from antpos.
antpos: dictionary of antenna positions in the form {ant_index: np.array([x, y, z])}.
Will error if one tries to provide both reds and antpos. If neither is provided,
Expand All @@ -545,10 +600,10 @@ def __init__(
Attributes
----------
reds
A :class:`RedundantGroups` object that contains the redundant groups for
the entire array, and methods to manipulate them.
reds
A :class:`RedundantGroups` object that contains the redundant groups for
the entire array, and methods to manipulate them.
'''
if reds is not None and antpos is not None:
raise ValueError('Can only provide reds or antpos, not both.')
Expand All @@ -568,25 +623,24 @@ def __init__(
)
else:
raise ValueError('Must provide reds, antpos, or have antpos available at data.antpos')

if not isinstance(reds, RedundantGroups):
reds = RedundantGroups(red_list=reds, antpos=self.antpos)

self.build_red_keys(reds)


def build_red_keys(self, reds: RedundantGroups | list[list[Baseline]]):
'''Build the dictionaries that map baselines to redundant keys.
Arguments:
reds: list of lists of redundant baseline tuples, e.g. (ind1, ind2, pol).
'''

if isinstance(reds, RedundantGroups):
self.reds = reds
else:
self.reds = RedundantGroups(red_list=reds, antpos=getattr(self, 'antpos', None))

self._reds_keyed_on_data = self.reds.keyed_on_bls(bls=self.bls())

# delete unused data to avoid leaking memory
Expand All @@ -600,24 +654,23 @@ def build_red_keys(self, reds: RedundantGroups | list[list[Baseline]]):
raise ValueError(
'RedDataContainer can only be constructed with (at most) one baseline per group, '
f'but {bl} is redundant with {redkeys[ubl]}.'
)
)
else:
redkeys[ubl] = bl


def get_ubl_key(self, bl):
'''Returns the blkey used to internally denote the data stored.
If this bl is in a redundant group present in the data, this will return the
blkey that exists in the data. Otherwise, it will return the array-wide blkey
representing this group.
'''
return self._reds_keyed_on_data.get_ubl_key(bl)

def get_red(self, key):
'''Returns the list of baselines in the array redundant with this key.
Note: this is not just baselines existing in the data itself, but in the
Note: this is not just baselines existing in the data itself, but in the
entire array.
'''
return self.reds[key]
Expand All @@ -642,8 +695,6 @@ def __setitem__(self, key, value):

super().__setitem__(ubl_key, value)


def __contains__(self, key):
'''Returns true if the baseline redundant with the key is in the data.'''
return (key in self.reds) and (super().__contains__(self.get_ubl_key(key)))

Loading

0 comments on commit 5cff5f0

Please sign in to comment.