From cc1c8edc6d5718ab343f48594200bf42b992e5d4 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Wed, 11 Sep 2024 09:48:49 -0400
Subject: [PATCH 01/12] Add a new generator with the correct parameters
---
src/kbmod/trajectory_generator.py | 125 +++++++++++++++++++++++++++++
src/kbmod/work_unit.py | 18 +++++
tests/test_trajectory_generator.py | 28 ++++++-
tests/test_work_unit.py | 9 +++
4 files changed, 178 insertions(+), 2 deletions(-)
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index 604874770..e0b05ba67 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -329,6 +329,131 @@ def __init__(self, v_arr, ang_arr, average_angle, *args, **kwargs):
super().__init__(v_arr[2], v_arr[0], v_arr[1], ang_arr[2], ang_min, ang_max, *args, **kwargs)
+class EclipticSearch(TrajectoryGenerator):
+ """Search a grid defined by velocities and angles relative to the ecliptic.
+
+ Attributes
+ ----------
+ ecliptic_angle : `float`
+ The angle of the ecliptic in the image (in radians).
+ vel_steps : `int`
+ The number of velocity steps.
+ min_vel : `float`
+ The minimum velocity magnitude (in pixels per day)
+ max_vel : `float`
+ The maximum velocity magnitude (in pixels per day)
+ ang_steps : `int`
+ The number of angle steps.
+ min_ang_offset : `float`
+ The minimum offset of the search angle relative from the ecliptic_angle (in radians).
+ max_ang_offset : `float`
+ The maximum offset of the search angle relative from the ecliptic_angle (in radians).
+ min_ang : `float`
+ The minimum search angle in the image (ecliptic_angle + min_ang_offset) in radians.
+ max_ang : `float`
+ The maximum search angle in the image (ecliptic_angle + max_ang_offset) in radians.
+ """
+
+ def __init__(
+ self,
+ ecliptic_angle,
+ vel_steps,
+ min_vel,
+ max_vel,
+ ang_steps,
+ min_ang_offset,
+ max_ang_offset,
+ angle_units="radians",
+ inclusive_bounds=True,
+ *args,
+ **kwargs,
+ ):
+ """Create a class KBMODV1Search.
+
+ Parameters
+ ----------
+ vel_steps : `int`
+ The number of velocity steps.
+ min_vel : `float`
+ The minimum velocity magnitude (in pixels per day)
+ max_vel : `float`
+ The maximum velocity magnitude (in pixels per day)
+ ang_steps : `int`
+ The number of angle steps.
+ min_ang_offset : `float`
+ The minimum offset of the search angle relative from the ecliptic_angle
+ (in the units defined in ``angle_units``).
+ max_ang_offset : `float`
+ The maximum offset of the search angle relative from the ecliptic_angle
+ (in the units defined in ``angle_units``).
+ angle_units : `str`
+ The units for the angle. Must be one of radians or degrees.
+ Default: 'radians'
+ """
+ super().__init__(*args, **kwargs)
+
+ if vel_steps < 1 or ang_steps < 1:
+ raise ValueError("EclipticSearch requires at least 1 step in each dimension")
+ if max_vel < min_vel:
+ raise ValueError("Invalid EclipticSearch bounds.")
+
+ self.vel_steps = vel_steps
+ self.min_vel = min_vel
+ self.max_vel = max_vel
+ self.vel_stepsize = (self.max_vel - self.min_vel) / float(self.vel_steps - 1)
+
+ # Convert the angles into radians.
+ if angle_units[:3] == "rad":
+ self.ecliptic_angle = ecliptic_angle
+ self.min_ang_offset = min_ang_offset
+ self.max_ang_offset = max_ang_offset
+ elif angle_units[:3] == "deg":
+ deg_to_rad = math.pi / 180.0
+ self.ecliptic_angle = deg_to_rad * ecliptic_angle
+ self.min_ang_offset = deg_to_rad * min_ang_offset
+ self.max_ang_offset = deg_to_rad * max_ang_offset
+ else:
+ raise ValueError(f"Unknown angular units {angle_units}")
+
+ self.ang_steps = ang_steps
+ self.min_ang = self.ecliptic_angle + self.min_ang_offset
+ self.max_ang = self.ecliptic_angle + self.max_ang_offset
+ self.ang_stepsize = (self.max_ang - self.min_ang) / float(self.ang_steps - 1)
+
+ def __repr__(self):
+ return (
+ "EclipticSearch:"
+ f" v=[{self.min_vel}, {self.max_vel}], {self.vel_steps}"
+ f" a=[{self.min_ang}, {self.max_ang}], {self.ang_steps}"
+ )
+
+ def __str__(self):
+ return f"""EclipticSearch:
+ Vel: [{self.min_vel}, {self.max_vel}] in {self.vel_steps} steps.
+ Ang:
+ Ecliptic = {self.ecliptic_angle}
+ Offsets = {self.min_ang_offset} to {self.max_ang_offset}
+ [{self.min_ang}, {self.max_ang}] in {self.ang_steps} steps."""
+
+ def generate(self, *args, **kwargs):
+ """Produces a single candidate trajectory to test.
+
+ Returns
+ -------
+ candidate : `Trajectory`
+ A ``Trajectory`` to test at each pixel.
+ """
+ for ang_i in range(self.ang_steps):
+ for vel_i in range(self.vel_steps):
+ curr_ang = self.min_ang + ang_i * self.ang_stepsize
+ curr_vel = self.min_vel + vel_i * self.vel_stepsize
+
+ vx = math.cos(curr_ang) * curr_vel
+ vy = math.sin(curr_ang) * curr_vel
+
+ yield Trajectory(vx=vx, vy=vy)
+
+
class RandomVelocitySearch(TrajectoryGenerator):
"""Search a grid defined by min/max bounds on pixel velocities."""
diff --git a/src/kbmod/work_unit.py b/src/kbmod/work_unit.py
index a5d1dec5b..4f17a8af6 100644
--- a/src/kbmod/work_unit.py
+++ b/src/kbmod/work_unit.py
@@ -16,6 +16,7 @@
from kbmod.search import ImageStack, LayeredImage, PSF, RawImage, Logging
from kbmod.wcs_utils import (
append_wcs_to_hdu_header,
+ calc_ecliptic_angle,
extract_wcs_from_hdu_header,
wcs_fits_equal,
wcs_from_dict,
@@ -204,6 +205,23 @@ def get_wcs(self, img_num):
return per_img
+ def compute_ecliptic_angle(self):
+ """Return the ecliptic angle (in radians in pixel space) derived from the
+ images and WCS.
+
+ Returns
+ -------
+ ecliptic_angle : `float` or `None`
+ The computed ecliptic_angle or ``None`` if data is missing.
+ """
+
+ wcs = self.get_wcs(0)
+ if wcs is None or self.im_stack is None:
+ logger.warning(f"A valid wcs and ImageStack is needed to compute the ecliptic angle.")
+ return None
+ center_pixel = (self.im_stack.get_width() / 2, self.im_stack.get_height() / 2)
+ return calc_ecliptic_angle(wcs, center_pixel)
+
def get_all_obstimes(self):
"""Return a list of the observation times."""
if self._obstimes is not None:
diff --git a/tests/test_trajectory_generator.py b/tests/test_trajectory_generator.py
index ee8fcb59f..10779d7d8 100644
--- a/tests/test_trajectory_generator.py
+++ b/tests/test_trajectory_generator.py
@@ -4,6 +4,7 @@
from kbmod.trajectory_generator import (
KBMODV1Search,
KBMODV1SearchConfig,
+ EclipticSearch,
SingleVelocitySearch,
RandomVelocitySearch,
VelocityGridSearch,
@@ -44,7 +45,7 @@ def test_VelocityGridSearch(self):
self.assertRaises(ValueError, VelocityGridSearch, 3, 2.0, 0.0, 3, -0.25, 0.25)
def test_KBMODV1Search(self):
- # Note that KBMOD v1's search will never include the upper bound of angle or velocity.
+ # Note that KBMOD v1's legacy search will never include the upper bound of angle or velocity.
gen = KBMODV1Search(3, 0.0, 3.0, 2, -0.25, 0.25)
expected_x = [0.0, 0.9689, 1.9378, 0.0, 1.0, 2.0]
expected_y = [0.0, -0.247, -0.4948, 0.0, 0.0, 0.0]
@@ -62,12 +63,35 @@ def test_KBMODV1Search(self):
self.assertAlmostEqual(tbl["vx"][i], expected_x[i], delta=0.001)
self.assertAlmostEqual(tbl["vy"][i], expected_y[i], delta=0.001)
- # Test invalid number of steps.
+ # Test invalid number of steps and ranges.
self.assertRaises(ValueError, KBMODV1Search, 3, 0.0, 3.0, 0, -0.25, 0.25)
self.assertRaises(ValueError, KBMODV1Search, 0, 0.0, 3.0, 2, -0.25, 0.25)
self.assertRaises(ValueError, KBMODV1Search, 3, 0.0, 3.0, 2, 0.25, -0.25)
self.assertRaises(ValueError, KBMODV1Search, 3, 3.5, 3.0, 2, -0.25, 0.25)
+ def test_EclipticSearch(self):
+ gen = EclipticSearch(0.0, 3, 0.0, 2.0, 3, -45.0, 45.0, angle_units="degrees")
+ expected_x = [0.0, 0.707107, 1.41421, 0.0, 1.0, 2.0, 0.0, 0.707107, 1.41421]
+ expected_y = [0.0, -0.707107, -1.41421, 0.0, 0.0, 0.0, 0.0, 0.707107, 1.41421]
+
+ trjs = [trj for trj in gen]
+ self.assertEqual(len(trjs), 9)
+ for i in range(9):
+ self.assertAlmostEqual(trjs[i].vx, expected_x[i], delta=0.001)
+ self.assertAlmostEqual(trjs[i].vy, expected_y[i], delta=0.001)
+
+ # Test that we get the correct results if we dump to a table.
+ tbl = gen.to_table()
+ self.assertEqual(len(tbl), 9)
+ for i in range(9):
+ self.assertAlmostEqual(tbl["vx"][i], expected_x[i], delta=0.001)
+ self.assertAlmostEqual(tbl["vy"][i], expected_y[i], delta=0.001)
+
+ # Test invalid number of steps and ranges.
+ self.assertRaises(ValueError, EclipticSearch, 0.0, 3, 0.0, 3.0, 0, -0.25, 0.25)
+ self.assertRaises(ValueError, EclipticSearch, 0.0, 0, 0.0, 3.0, 2, -0.25, 0.25)
+ self.assertRaises(ValueError, EclipticSearch, 0.0, 3, 3.5, 3.0, 2, -0.25, 0.25)
+
def test_KBMODV1SearchConfig(self):
# Note that KBMOD v1's search will never include the upper bound of angle or velocity.
gen = KBMODV1SearchConfig([0.0, 3.0, 3], [0.25, 0.25, 2], 0.0)
diff --git a/tests/test_work_unit.py b/tests/test_work_unit.py
index b22e300ad..1bee6ba92 100644
--- a/tests/test_work_unit.py
+++ b/tests/test_work_unit.py
@@ -383,6 +383,15 @@ def test_save_and_load_fits_global_wcs(self):
self.assertIsNotNone(work2.get_wcs(i))
self.assertTrue(wcs_fits_equal(work2.get_wcs(i), self.wcs))
+ def test_get_ecliptic_angle(self):
+ """Check that we can compute an ecliptic angle."""
+ work = WorkUnit(self.im_stack, self.config, self.wcs, None)
+ self.assertAlmostEqual(work.compute_ecliptic_angle(), -0.381541020495931)
+
+ # If we do not have a WCS, we get None for the ecliptic angle.
+ work2 = WorkUnit(self.im_stack, self.config, None, None)
+ self.assertIsNone(work2.compute_ecliptic_angle())
+
def test_to_from_yaml(self):
# Create WorkUnit with only global WCS.
work = WorkUnit(self.im_stack, self.config, self.wcs, None)
From 23ef026c25e414e6cb52cd94653d084d97f8a2ac Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Wed, 11 Sep 2024 10:58:33 -0400
Subject: [PATCH 02/12] All TrajectoryGenerators to take optional keyword
arguments
---
src/kbmod/trajectory_generator.py | 68 +++++++++++++++++++-----------
tests/test_trajectory_generator.py | 27 +++++++++++-
2 files changed, 70 insertions(+), 25 deletions(-)
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index e0b05ba67..4ecc2d7c0 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -1,4 +1,5 @@
import abc
+import copy
import logging
import math
import random
@@ -9,7 +10,7 @@
from kbmod.search import Trajectory
-def create_trajectory_generator(config):
+def create_trajectory_generator(config, **kwargs):
"""Create a TrajectoryGenerator object given a dictionary
of configuration options. The generator class is specified by
the 'name' entry, which must exist and match the class name of one
@@ -53,7 +54,12 @@ def create_trajectory_generator(config):
logger = logging.getLogger(__name__)
logger.info(f"Creating trajectory generator of type {name}")
- return TrajectoryGenerator.generators[name](**config)
+ # Add any keyword arguments to the params, overriding current values.
+ params = copy.deepcopy(config)
+ params.update(kwargs)
+ logger.debug(str(params))
+
+ return TrajectoryGenerator.generators[name](**params)
class TrajectoryGenerator(abc.ABC):
@@ -67,7 +73,7 @@ class TrajectoryGenerator(abc.ABC):
generators = {} # A mapping of class name to class object for subclasses.
- def __init__(self, *args, **kwargs):
+ def __init__(self, **kwargs):
pass
def __init_subclass__(cls, **kwargs):
@@ -131,7 +137,7 @@ def to_table(self):
class SingleVelocitySearch(TrajectoryGenerator):
"""Search a single velocity from each pixel."""
- def __init__(self, vx, vy, *args, **kwargs):
+ def __init__(self, vx, vy, **kwargs):
"""Create a class SingleVelocitySearch.
Parameters
@@ -141,7 +147,7 @@ def __init__(self, vx, vy, *args, **kwargs):
vy : `float`
The velocity in y pixels (pixels per day).
"""
- super().__init__(*args, **kwargs)
+ super().__init__(**kwargs)
self.vx = vx
self.vy = vy
@@ -165,7 +171,7 @@ def generate(self, *args, **kwargs):
class VelocityGridSearch(TrajectoryGenerator):
"""Search a grid defined by steps in velocity space."""
- def __init__(self, vx_steps, min_vx, max_vx, vy_steps, min_vy, max_vy, *args, **kwargs):
+ def __init__(self, vx_steps, min_vx, max_vx, vy_steps, min_vy, max_vy, **kwargs):
"""Create a class VelocityGridSearch.
Parameters
@@ -183,7 +189,7 @@ def __init__(self, vx_steps, min_vx, max_vx, vy_steps, min_vy, max_vy, *args, **
max_vy : `float`
The maximum velocity in the y-dimension (pixels per day).
"""
- super().__init__(*args, **kwargs)
+ super().__init__(**kwargs)
if vx_steps < 2 or vy_steps < 2:
raise ValueError("VelocityGridSearch requires at least 2 steps in each dimension")
@@ -232,7 +238,7 @@ def generate(self, *args, **kwargs):
class KBMODV1Search(TrajectoryGenerator):
"""Search a grid defined by velocities and angles."""
- def __init__(self, vel_steps, min_vel, max_vel, ang_steps, min_ang, max_ang, *args, **kwargs):
+ def __init__(self, vel_steps, min_vel, max_vel, ang_steps, min_ang, max_ang, **kwargs):
"""Create a class KBMODV1Search.
Parameters
@@ -250,7 +256,7 @@ def __init__(self, vel_steps, min_vel, max_vel, ang_steps, min_ang, max_ang, *ar
max_ang : `float`
The maximum angle (in radians)
"""
- super().__init__(*args, **kwargs)
+ super().__init__(**kwargs)
if vel_steps < 1 or ang_steps < 1:
raise ValueError("KBMODV1Search requires at least 1 step in each dimension")
@@ -303,7 +309,7 @@ def generate(self, *args, **kwargs):
class KBMODV1SearchConfig(KBMODV1Search):
"""Search a grid defined by velocities and angles in the format of the configuration file."""
- def __init__(self, v_arr, ang_arr, average_angle, *args, **kwargs):
+ def __init__(self, v_arr, ang_arr, average_angle, **kwargs):
"""Create a class KBMODV1SearchConfig.
Parameters
@@ -326,7 +332,7 @@ def __init__(self, v_arr, ang_arr, average_angle, *args, **kwargs):
ang_min = average_angle - ang_arr[0]
ang_max = average_angle + ang_arr[1]
- super().__init__(v_arr[2], v_arr[0], v_arr[1], ang_arr[2], ang_min, ang_max, *args, **kwargs)
+ super().__init__(v_arr[2], v_arr[0], v_arr[1], ang_arr[2], ang_min, ang_max, **kwargs)
class EclipticSearch(TrajectoryGenerator):
@@ -335,7 +341,7 @@ class EclipticSearch(TrajectoryGenerator):
Attributes
----------
ecliptic_angle : `float`
- The angle of the ecliptic in the image (in radians).
+ The angle to use for the ecliptic in the image (in the units defined in ``angle_units``).
vel_steps : `int`
The number of velocity steps.
min_vel : `float`
@@ -356,16 +362,15 @@ class EclipticSearch(TrajectoryGenerator):
def __init__(
self,
- ecliptic_angle,
- vel_steps,
- min_vel,
- max_vel,
- ang_steps,
- min_ang_offset,
- max_ang_offset,
+ vel_steps=0,
+ min_vel=0.0,
+ max_vel=0.0,
+ ang_steps=0,
+ min_ang_offset=0.0,
+ max_ang_offset=0.0,
angle_units="radians",
- inclusive_bounds=True,
- *args,
+ force_ecliptic=None,
+ computed_ecliptic_angle=None,
**kwargs,
):
"""Create a class KBMODV1Search.
@@ -389,8 +394,23 @@ def __init__(
angle_units : `str`
The units for the angle. Must be one of radians or degrees.
Default: 'radians'
+ force_ecliptic : `float`, optional
+ An override for the ecliptic as given in the config (in the units defined in
+ ``angle_units``). This angle takes precedence over ``computed_ecliptic``.
+ computed_ecliptic_angle : `float`, optional
+ An override for the computed ecliptic from a WCS (in the units defined in
+ ``angle_units``). This parameter is ignored if ``force_ecliptic`` is given.
"""
- super().__init__(*args, **kwargs)
+ super().__init__(**kwargs)
+
+ if force_ecliptic is not None:
+ ecliptic_angle = force_ecliptic
+ elif computed_ecliptic_angle is not None:
+ ecliptic_angle = computed_ecliptic_angle
+ else:
+ logger = logging.getLogger(__name__)
+ logger.warning("No ecliptic angle provided. Using 0.0.")
+ ecliptic_angle = 0.0
if vel_steps < 1 or ang_steps < 1:
raise ValueError("EclipticSearch requires at least 1 step in each dimension")
@@ -457,7 +477,7 @@ def generate(self, *args, **kwargs):
class RandomVelocitySearch(TrajectoryGenerator):
"""Search a grid defined by min/max bounds on pixel velocities."""
- def __init__(self, min_vx, max_vx, min_vy, max_vy, max_samples=1_000_000, *args, **kwargs):
+ def __init__(self, min_vx, max_vx, min_vy, max_vy, max_samples=1_000_000, **kwargs):
"""Create a class KBMODV1Search.
Parameters
@@ -474,7 +494,7 @@ def __init__(self, min_vx, max_vx, min_vy, max_vy, max_samples=1_000_000, *args,
The maximum number of samples to generate. Used to avoid
infinite loops in KBMOD code.
"""
- super().__init__(*args, **kwargs)
+ super().__init__(**kwargs)
if max_vx < min_vx or max_vy < min_vy:
raise ValueError("Invalid RandomVelocitySearch bounds.")
if max_samples <= 0:
diff --git a/tests/test_trajectory_generator.py b/tests/test_trajectory_generator.py
index 10779d7d8..cbd707fd7 100644
--- a/tests/test_trajectory_generator.py
+++ b/tests/test_trajectory_generator.py
@@ -1,3 +1,4 @@
+import numpy as np
import unittest
from kbmod.configuration import SearchConfiguration
@@ -70,7 +71,7 @@ def test_KBMODV1Search(self):
self.assertRaises(ValueError, KBMODV1Search, 3, 3.5, 3.0, 2, -0.25, 0.25)
def test_EclipticSearch(self):
- gen = EclipticSearch(0.0, 3, 0.0, 2.0, 3, -45.0, 45.0, angle_units="degrees")
+ gen = EclipticSearch(3, 0.0, 2.0, 3, -45.0, 45.0, angle_units="degrees", ecliptic_angle=0.0)
expected_x = [0.0, 0.707107, 1.41421, 0.0, 1.0, 2.0, 0.0, 0.707107, 1.41421]
expected_y = [0.0, -0.707107, -1.41421, 0.0, 0.0, 0.0, 0.0, 0.707107, 1.41421]
@@ -151,6 +152,30 @@ def test_create_trajectory_generator(self):
self.assertEqual(gen2.vx, 1)
self.assertEqual(gen2.vy, 2)
+ # Test we can create a trajectory generator with optional keyword parameters.
+ config3 = {
+ "name": "EclipticSearch",
+ "vel_steps": 2,
+ "min_vel": 0.0,
+ "max_vel": 1.0,
+ "ang_steps": 2,
+ "min_ang_offset": 0.0,
+ "max_ang_offset": 45.0,
+ "angle_units": "degrees",
+ "force_ecliptic": None,
+ }
+ gen3 = create_trajectory_generator(config3, computed_ecliptic_angle=45.0)
+ self.assertTrue(type(gen3) is EclipticSearch)
+ self.assertAlmostEqual(gen3.ecliptic_angle, np.pi / 4.0)
+ self.assertEqual(gen3.min_ang, np.pi / 4.0)
+ self.assertEqual(gen3.max_ang, np.pi / 2.0)
+
+ config3["force_ecliptic"] = 0.0
+ gen4 = create_trajectory_generator(config3, computed_ecliptic_angle=45.0)
+ self.assertAlmostEqual(gen4.ecliptic_angle, 0.0)
+ self.assertEqual(gen4.min_ang, 0.0)
+ self.assertEqual(gen4.max_ang, np.pi / 4.0)
+
def test_create_trajectory_generator_config(self):
config = SearchConfiguration()
config.set("ang_arr", [0.5, 0.5, 30])
From fe4a8054cb06b23738dfe8c9f21d7019bad0e16e Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Thu, 12 Sep 2024 09:56:08 -0400
Subject: [PATCH 03/12] Include changes from clustering fix
---
src/kbmod/configuration.py | 3 +-
src/kbmod/filters/clustering_filters.py | 125 ++++++------------------
src/kbmod/run_search.py | 24 +----
tests/test_clustering_filters.py | 91 +++++------------
tests/test_regression_test.py | 2 +-
5 files changed, 59 insertions(+), 186 deletions(-)
diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py
index e5755eccc..fed0a87de 100644
--- a/src/kbmod/configuration.py
+++ b/src/kbmod/configuration.py
@@ -24,13 +24,14 @@ def __init__(self):
"center_thresh": 0.00,
"chunk_size": 500000,
"clip_negative": False,
+ "cluster_eps": 20.0,
"cluster_type": "all",
+ "cluster_v_scale": 1.0,
"coadds": [],
"debug": False,
"do_clustering": True,
"do_mask": True,
"do_stamp_filter": True,
- "eps": 0.03,
"encode_num_bytes": -1,
"generator_config": None,
"gpu_filter": False,
diff --git a/src/kbmod/filters/clustering_filters.py b/src/kbmod/filters/clustering_filters.py
index 450c34f00..cc6f5360e 100644
--- a/src/kbmod/filters/clustering_filters.py
+++ b/src/kbmod/filters/clustering_filters.py
@@ -11,17 +11,17 @@ class DBSCANFilter:
"""Cluster the candidates using DBSCAN and only keep a
single representative trajectory from each cluster."""
- def __init__(self, eps, **kwargs):
+ def __init__(self, cluster_eps, **kwargs):
"""Create a DBSCANFilter.
Parameters
----------
- eps : `float`
+ cluster_eps : `float`
The clustering threshold.
"""
- self.eps = eps
+ self.cluster_eps = cluster_eps
self.cluster_type = ""
- self.cluster_args = dict(eps=self.eps, min_samples=1, n_jobs=-1)
+ self.cluster_args = dict(eps=self.cluster_eps, min_samples=1, n_jobs=-1)
def get_filter_name(self):
"""Get the name of the filter.
@@ -31,7 +31,7 @@ def get_filter_name(self):
str
The filter name.
"""
- return f"DBSCAN_{self.cluster_type} eps={self.eps}"
+ return f"DBSCAN_{self.cluster_type} eps={self.cluster_eps}"
def _build_clustering_data(self, result_data):
"""Build the specific data set for this clustering approach.
@@ -82,37 +82,18 @@ def keep_indices(self, result_data):
class ClusterPredictionFilter(DBSCANFilter):
"""Cluster the candidates using their positions at specific times."""
- def __init__(self, eps, pred_times=[0.0], height=1.0, width=1.0, scaled=True, **kwargs):
+ def __init__(self, cluster_eps, pred_times=[0.0], **kwargs):
"""Create a DBSCANFilter.
Parameters
----------
- eps : `float`
+ cluster_eps : `float`
The clustering threshold.
pred_times : `list`
The times a which to prediction the positions.
Default = [0.0] (starting position only)
- height : `int`
- The size of the image height (in pixels) for scaling.
- Default: 1 (no scaling)
- width : `int`
- The size of the image width (in pixels) for scaling.
- Default: 1 (no scaling)
- scaled : `bool`
- Scale the positions to [0, 1] based on ``width`` and ``height``. This impacts
- how ``eps`` is interpreted by DBSCAN. If scaling is turned on ``eps``
- approximates the percentage of each dimension between points. If scaling is
- turned off ``eps`` is a distance in pixels.
"""
- super().__init__(eps, **kwargs)
- if scaled:
- if height <= 0.0 or width <= 0:
- raise ValueError(f"Invalid scaling parameters y={height} by x={width}")
- self.height = height
- self.width = width
- else:
- self.height = 1.0
- self.width = 1.0
+ super().__init__(cluster_eps, **kwargs)
# Confirm we have at least one prediction time.
if len(pred_times) == 0:
@@ -120,7 +101,7 @@ def __init__(self, eps, pred_times=[0.0], height=1.0, width=1.0, scaled=True, **
self.times = pred_times
# Set up the clustering algorithm's name.
- self.cluster_type = f"position (scaled={scaled}) t={self.times}"
+ self.cluster_type = f"position t={self.times}"
def _build_clustering_data(self, result_data):
"""Build the specific data set for this clustering approach.
@@ -145,49 +126,30 @@ def _build_clustering_data(self, result_data):
# the division by height and width will be no-ops.
coords = []
for t in self.times:
- coords.append((x_arr + t * vx_arr) / self.width)
- coords.append((y_arr + t * vy_arr) / self.height)
+ coords.append(x_arr + t * vx_arr)
+ coords.append(y_arr + t * vy_arr)
return np.array(coords)
-class ClusterPosAngVelFilter(DBSCANFilter):
- """Cluster the candidates using their scaled starting position, trajectory
- angles, and trajectory velocity magnitude.
- """
+class ClusterPosVelFilter(DBSCANFilter):
+ """Cluster the candidates using their starting position and velocities."""
- def __init__(self, eps, height, width, vel_lims, ang_lims, **kwargs):
+ def __init__(self, cluster_eps, cluster_v_scale=1.0, **kwargs):
"""Create a DBSCANFilter.
Parameters
----------
- eps : `float`
- The clustering threshold.
- height : `int`
- The size of the image height (in pixels) for scaling.
- width : `int`
- The size of the image width (in pixels) for scaling.
- vel_lims : `list`
- The velocity limits of the search such that v_lim[1] - v_lim[0]
- is the range of velocities searched. Used for scaling.
- ang_lims : `list`
- The angle limits of the search such that ang_lim[1] - ang_lim[0]
- is the range of velocities searched.
+ cluster_eps : `float`
+ The clustering threshold in pixels.
+ cluster_v_scale : `float`
+ The relative scaling of velocity differences compared to position
+ differences. Default: 1.0 (no difference).
"""
- super().__init__(eps, **kwargs)
- if height <= 0.0 or width <= 0:
- raise ValueError(f"Invalid scaling parameters y={height} by x={width}")
- if len(vel_lims) < 2:
- raise ValueError(f"Invalid velocity magnitude scaling parameters {vel_lims}")
- if len(ang_lims) < 2:
- raise ValueError(f"Invalid velocity angle scaling parameters {ang_lims}")
- self.height = height
- self.width = width
-
- self.v_scale = (vel_lims[1] - vel_lims[0]) if vel_lims[1] != vel_lims[0] else 1.0
- self.a_scale = (ang_lims[1] - ang_lims[0]) if ang_lims[1] != ang_lims[0] else 1.0
- self.min_v = vel_lims[0]
- self.min_a = ang_lims[0]
-
+ super().__init__(cluster_eps, **kwargs)
+ if cluster_v_scale <= 0.0:
+ # Avoid divide by zero.
+ cluster_v_scale = 1e-12
+ self.cluster_v_scale = cluster_v_scale
self.cluster_type = "all"
def _build_clustering_data(self, result_data):
@@ -207,19 +169,9 @@ def _build_clustering_data(self, result_data):
# Create arrays of each the trajectories information.
x_arr = np.array(result_data["x"])
y_arr = np.array(result_data["y"])
- vx_arr = np.array(result_data["vx"])
- vy_arr = np.array(result_data["vy"])
- vel_arr = np.sqrt(np.square(vx_arr) + np.square(vy_arr))
- ang_arr = np.arctan2(vy_arr, vx_arr)
-
- # Scale the values. We always do this because distances and velocities
- # are not directly comparable.
- scaled_x = x_arr / self.width
- scaled_y = y_arr / self.height
- scaled_vel = (vel_arr - self.min_v) / self.v_scale
- scaled_ang = (ang_arr - self.min_a) / self.a_scale
-
- return np.array([scaled_x, scaled_y, scaled_vel, scaled_ang])
+ vx_arr = np.array(result_data["vx"]) * self.cluster_v_scale
+ vy_arr = np.array(result_data["vy"]) * self.cluster_v_scale
+ return np.array([x_arr, y_arr, vx_arr, vy_arr])
def apply_clustering(result_data, cluster_params):
@@ -232,7 +184,7 @@ def apply_clustering(result_data, cluster_params):
the filtering.
cluster_params : dict
Contains values concerning the image and search settings including:
- cluster_type, eps, height, width, scaled, vel_lims, ang_lims, and times.
+ cluster_type, cluster_eps, times, and cluster_v_scale (optional).
Raises
------
@@ -240,7 +192,7 @@ def apply_clustering(result_data, cluster_params):
Raises a TypeError if ``result_data`` is of an unsupported type.
"""
if "cluster_type" not in cluster_params:
- raise ValueError("Missing cluster_type parameter")
+ raise KeyError("Missing cluster_type parameter")
cluster_type = cluster_params["cluster_type"]
# Skip clustering if there is nothing to cluster.
@@ -249,36 +201,23 @@ def apply_clustering(result_data, cluster_params):
return
# Get the times used for prediction clustering.
+ if not "times" in cluster_params:
+ raise KeyError("Missing times parameter in the clustering parameters.")
all_times = np.sort(cluster_params["times"])
zeroed_times = np.array(all_times) - all_times[0]
# Do the clustering and the filtering.
if cluster_type == "all" or cluster_type == "pos_vel":
- filt = ClusterPosAngVelFilter(**cluster_params)
+ filt = ClusterPosVelFilter(**cluster_params)
elif cluster_type == "position" or cluster_type == "start_position":
cluster_params["pred_times"] = [0.0]
- cluster_params["scaled"] = True
- filt = ClusterPredictionFilter(**cluster_params)
- elif cluster_type == "position_unscaled" or cluster_type == "start_position_unscaled":
- cluster_params["pred_times"] = [0.0]
- cluster_params["scaled"] = False
filt = ClusterPredictionFilter(**cluster_params)
elif cluster_type == "mid_position":
cluster_params["pred_times"] = [np.median(zeroed_times)]
- cluster_params["scaled"] = True
- filt = ClusterPredictionFilter(**cluster_params)
- elif cluster_type == "mid_position_unscaled":
- cluster_params["pred_times"] = [np.median(zeroed_times)]
- cluster_params["scaled"] = False
filt = ClusterPredictionFilter(**cluster_params)
elif cluster_type == "start_end_position":
cluster_params["pred_times"] = [0.0, zeroed_times[-1]]
filt = ClusterPredictionFilter(**cluster_params)
- cluster_params["scaled"] = True
- elif cluster_type == "start_end_position_unscaled":
- cluster_params["pred_times"] = [0.0, zeroed_times[-1]]
- filt = ClusterPredictionFilter(**cluster_params)
- cluster_params["scaled"] = False
else:
raise ValueError(f"Unknown clustering type: {cluster_type}")
logger.info(f"Clustering {len(result_data)} results using {filt.get_filter_name()}")
diff --git a/src/kbmod/run_search.py b/src/kbmod/run_search.py
index 31478eb62..9f262c5ca 100644
--- a/src/kbmod/run_search.py
+++ b/src/kbmod/run_search.py
@@ -26,23 +26,6 @@ class SearchRunner:
def __init__(self):
pass
- def get_angle_limits(self, config):
- """Compute the angle limits based on the configuration information.
-
- Parameters
- ----------
- config : `SearchConfiguration`
- The configuration parameters
-
- Returns
- -------
- res : `list`
- A list with the minimum and maximum angle to search (in pixel space).
- """
- ang_min = config["average_angle"] - config["ang_arr"][0]
- ang_max = config["average_angle"] + config["ang_arr"][1]
- return [ang_min, ang_max]
-
def load_and_filter_results(self, search, config):
"""This function loads results that are output by the gpu grid search.
Results are loaded in chunks and evaluated to see if the minimum
@@ -249,13 +232,10 @@ def run_search(self, config, stack, trj_generator=None):
cluster_timer = kb.DebugTimer("clustering", logger)
mjds = [stack.get_obstime(t) for t in range(stack.img_count())]
cluster_params = {
- "ang_lims": self.get_angle_limits(config),
"cluster_type": config["cluster_type"],
- "eps": config["eps"],
+ "cluster_eps": config["cluster_eps"],
+ "cluster_v_scale": config["cluster_v_scale"],
"times": np.array(mjds),
- "vel_lims": config["v_arr"],
- "width": stack.get_width(),
- "height": stack.get_height(),
}
apply_clustering(keep, cluster_params)
cluster_timer.stop()
diff --git a/tests/test_clustering_filters.py b/tests/test_clustering_filters.py
index 2a77efa29..dfdce7ae1 100644
--- a/tests/test_clustering_filters.py
+++ b/tests/test_clustering_filters.py
@@ -40,30 +40,19 @@ def test_dbscan_position_results(self):
)
# Standard-ish params collapses to 2 clusters.
- f1 = ClusterPredictionFilter(eps=0.025, pred_times=[0.0], height=100, width=100)
+ f1 = ClusterPredictionFilter(cluster_eps=5.0, pred_times=[0.0])
self.assertEqual(f1.keep_indices(rs), [0, 3])
# Small eps is 4 clusters.
- f2 = ClusterPredictionFilter(eps=0.000015, pred_times=[0.0], height=100, width=100)
+ f2 = ClusterPredictionFilter(cluster_eps=0.000015, pred_times=[0.0])
self.assertEqual(f2.keep_indices(rs), [0, 3, 4, 5])
# Large scale means 1 cluster
- f3 = ClusterPredictionFilter(eps=0.025, pred_times=[0.0], height=1000, width=1000)
+ f3 = ClusterPredictionFilter(cluster_eps=5000.0, pred_times=[0.0])
self.assertEqual(f3.keep_indices(rs), [0])
- # If we do not scale everything at the same starting point is marked its own cluster at low eps
- # (0.025 pixels max distance).
- f4 = ClusterPredictionFilter(eps=0.025, pred_times=[0.0], height=100, width=100, scaled=False)
- self.assertEqual(f4.keep_indices(rs), [0, 3, 4, 5])
-
- # We regain the clustering power with a higher eps (3 pixels max distance).
- f5 = ClusterPredictionFilter(eps=3.0, pred_times=[0.0], height=100, width=100, scaled=False)
- self.assertEqual(f5.keep_indices(rs), [0, 3])
-
# Catch invalid parameters
- self.assertRaises(ValueError, ClusterPredictionFilter, eps=0.025, width=0)
- self.assertRaises(ValueError, ClusterPredictionFilter, eps=0.025, height=0)
- self.assertRaises(ValueError, ClusterPredictionFilter, eps=0.025, pred_times=[])
+ self.assertRaises(ValueError, ClusterPredictionFilter, cluster_eps=0.025, pred_times=[])
def test_dbscan_all_results(self):
"""Test clustering based on the median position of the trajectory."""
@@ -79,36 +68,19 @@ def test_dbscan_all_results(self):
)
# Start with 5 clusters
- f1 = ClusterPosAngVelFilter(eps=0.025, height=100, width=100, vel_lims=[0, 100], ang_lims=[0, 1.5])
+ f1 = ClusterPosVelFilter(cluster_eps=5.0)
self.assertEqual(f1.keep_indices(rs), [0, 1, 3, 4, 5])
# Larger eps is 3 clusters.
- f2 = ClusterPosAngVelFilter(eps=0.25, height=100, width=100, vel_lims=[0, 100], ang_lims=[0, 1.5])
+ f2 = ClusterPosVelFilter(cluster_eps=20.0)
self.assertEqual(f2.keep_indices(rs), [0, 1, 3])
- # Larger scale is 3 clusters.
- f3 = ClusterPosAngVelFilter(eps=0.025, height=100, width=100, vel_lims=[0, 5000], ang_lims=[0, 1.5])
- self.assertEqual(f3.keep_indices(rs), [0, 1, 3])
+ # Adding the scaling factor increases or reduces the impact of velocity.
+ f3 = ClusterPosVelFilter(cluster_eps=5.0, cluster_v_scale=5.0)
+ self.assertEqual(f3.keep_indices(rs), [0, 1, 3, 4, 5])
- # Catch invalid parameters
- self.assertRaises(
- ValueError,
- ClusterPosAngVelFilter,
- eps=0.025,
- height=100,
- width=100,
- vel_lims=[100],
- ang_lims=[0, 1.5],
- )
- self.assertRaises(
- ValueError,
- ClusterPosAngVelFilter,
- eps=0.025,
- height=100,
- width=100,
- vel_lims=[0, 5000],
- ang_lims=[1.5],
- )
+ f4 = ClusterPosVelFilter(cluster_eps=5.0, cluster_v_scale=1e-9)
+ self.assertEqual(f4.keep_indices(rs), [0, 3])
def test_dbscan_mid_pos(self):
rs = self._make_result_data(
@@ -124,21 +96,17 @@ def test_dbscan_mid_pos(self):
)
# Use the median time.
- f1 = ClusterPredictionFilter(eps=0.1, pred_times=[0.95], height=20, width=20)
+ f1 = ClusterPredictionFilter(cluster_eps=2.0, pred_times=[0.95])
self.assertEqual(f1.keep_indices(rs), [0, 1, 3, 6])
# Use different times.
- f2 = ClusterPredictionFilter(eps=0.1, pred_times=[10.0], height=20, width=20)
+ f2 = ClusterPredictionFilter(cluster_eps=2.0, pred_times=[10.0])
self.assertEqual(f2.keep_indices(rs), [0, 1, 3, 4, 5, 6])
# Use different times again.
- f3 = ClusterPredictionFilter(eps=0.1, pred_times=[0.001], height=20, width=20)
+ f3 = ClusterPredictionFilter(cluster_eps=2.0, pred_times=[0.001])
self.assertEqual(f3.keep_indices(rs), [0, 3, 5])
- # Try without scaling
- f4 = ClusterPredictionFilter(eps=0.1, pred_times=[10.95], height=20, width=20, scaled=False)
- self.assertEqual(f4.keep_indices(rs), [0, 1, 2, 3, 4, 5, 6])
-
def test_dbscan_start_end_pos(self):
rs = self._make_result_data(
[
@@ -156,18 +124,15 @@ def test_dbscan_start_end_pos(self):
]
)
- f1 = ClusterPredictionFilter(eps=3.0, pred_times=[10, 11.9], scaled=False)
+ f1 = ClusterPredictionFilter(cluster_eps=3.0, pred_times=[10, 11.9])
self.assertEqual(f1.keep_indices(rs), [0, 1, 4, 5, 8])
def test_clustering_results(self):
cluster_params = {
- "ang_lims": [0.0, 1.5],
"cluster_type": "all",
- "eps": 0.03,
+ "cluster_eps": 5.0,
+ "cluster_v_scale": 1.0,
"times": self.times,
- "width": 100,
- "height": 100,
- "vel_lims": [5.0, 40.0],
}
results = self._make_result_data(
@@ -182,6 +147,11 @@ def test_clustering_results(self):
apply_clustering(results, cluster_params)
self.assertEqual(len(results), 4)
+ # If we remove the weighting on velocity, then we drop to three clusters.
+ cluster_params["cluster_v_scale"] = 1e-16
+ apply_clustering(results, cluster_params)
+ self.assertEqual(len(results), 3)
+
# Try clustering with only positions.
results2 = self._make_result_data(
[
@@ -196,27 +166,10 @@ def test_clustering_results(self):
apply_clustering(results2, cluster_params)
self.assertEqual(len(results2), 3)
- # Try clustering with start and end positions
- results3 = self._make_result_data(
- [
- [10, 11, 1, 2],
- [10, 11, 10, 20],
- [40, 5, -1, 2],
- [5, 0, 1, 2],
- [5, 1, 1, 2],
- ]
- )
- cluster_params["cluster_type"] = "start_end_position"
- apply_clustering(results3, cluster_params)
- self.assertEqual(len(results3), 4)
-
# Try invalid or missing cluster_type.
cluster_params["cluster_type"] = "invalid"
self.assertRaises(ValueError, apply_clustering, results2, cluster_params)
- del cluster_params["cluster_type"]
- self.assertRaises(ValueError, apply_clustering, results2, cluster_params)
-
if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_regression_test.py b/tests/test_regression_test.py
index e8ac76de1..521a54e8c 100644
--- a/tests/test_regression_test.py
+++ b/tests/test_regression_test.py
@@ -271,7 +271,7 @@ def perform_search(im_stack, res_filename, default_psf):
"peak_offset": [3.0, 3.0],
"chunk_size": 1000000,
"stamp_type": "cpp_median",
- "eps": 0.03,
+ "cluster_eps": 20.0,
"gpu_filter": True,
"clip_negative": True,
"x_pixel_buffer": 10,
From 613fc23a7f0cac177d20638be0a6c3f24b3f00df Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Fri, 13 Sep 2024 11:11:53 -0400
Subject: [PATCH 04/12] Make the configuration change
---
benchmarks/bench_filter_cluster.py | 123 -----------
docs/source/user_manual/results_filtering.rst | 18 +-
docs/source/user_manual/search_params.rst | 36 ++--
docs/source/user_manual/search_space.rst | 53 ++++-
notebooks/KBMOD_Demo.ipynb | 19 +-
notebooks/create_fake_data.ipynb | 14 +-
src/kbmod/configuration.py | 15 +-
src/kbmod/fake_data/demo_helper.py | 14 +-
src/kbmod/run_search.py | 21 +-
src/kbmod/trajectory_generator.py | 27 ++-
src/kbmod/work_unit.py | 200 +++++++++++++++++-
tests/test_configuration.py | 14 +-
tests/test_regression_test.py | 33 ++-
tests/test_trajectory_generator.py | 25 ++-
14 files changed, 372 insertions(+), 240 deletions(-)
delete mode 100644 benchmarks/bench_filter_cluster.py
diff --git a/benchmarks/bench_filter_cluster.py b/benchmarks/bench_filter_cluster.py
deleted file mode 100644
index 2d0a6fe09..000000000
--- a/benchmarks/bench_filter_cluster.py
+++ /dev/null
@@ -1,123 +0,0 @@
-import timeit
-import numpy as np
-
-from kbmod.filters.clustering_filters import *
-from kbmod.result_list import ResultList, ResultRow
-from kbmod.search import *
-
-
-def _make_data(objs, times):
- """Create a ResultList for the given objects.
-
- Parameters
- ----------
- obj : list of lists
- A list where each element specifies a Trajectory
- as [x, y, xv, yv].
-
- Returns
- -------
- ResultList
- """
- num_times = len(times)
- rs = ResultList(times, track_filtered=True)
- for x in objs:
- t = Trajectory()
- t.x = x[0]
- t.y = x[1]
- t.vx = x[2]
- t.vy = x[3]
- t.lh = 100.0
-
- row = ResultRow(t, num_times)
- rs.append_result(row)
- return rs
-
-
-def run_index_benchmark(filter, rs):
- tmr = timeit.Timer(stmt="filter.keep_indices(rs)", globals=locals())
- res_time = np.mean(tmr.repeat(repeat=10, number=20))
- return res_time
-
-
-def run_all_benchmarks():
- times = [(10.0 + 0.1 * float(i)) for i in range(20)]
- rs1 = _make_data(
- [
- [10, 11, 1, 2],
- [10, 11, 1000, -1000],
- [10, 11, 0.0, 0.0],
- [25, 24, 1.0, 1.0],
- [25, 26, 10.0, 10.0],
- [10, 12, 5, 5],
- ],
- times,
- )
-
- print("Method | Time")
- print("-" * 40)
-
- f1 = DBSCANFilter("position", 0.025, 100, 100, [0, 50], [0, 1.5], times)
-
- res_time = run_index_benchmark(f1, rs1)
- print(f"position - 2 clusters | {res_time:10.7f}")
-
- f2 = DBSCANFilter("position", 0.025, 100, 100, [0, 50], [0, 1.5], times)
-
- res_time = run_index_benchmark(f2, rs1)
- print(f"position - 4 clusters | {res_time:10.7f}")
-
- f3 = DBSCANFilter("position", 0.025, 1000, 1000, [0, 50], [0, 1.5], times)
-
- res_time = run_index_benchmark(f3, rs1)
- print(f"position - 1 cluster | {res_time:10.7f}")
-
- rs2 = _make_data(
- [
- [10, 11, 1, 2],
- [10, 11, 1000, -1000],
- [10, 11, 1.0, 2.1],
- [55, 54, 1.0, 1.0],
- [55, 56, 10.0, 10.0],
- [10, 12, 4.1, 8],
- ],
- times,
- )
-
- f4 = DBSCANFilter("all", 0.025, 100, 100, [0, 100], [0, 1.5], times)
-
- res_time = run_index_benchmark(f4, rs2)
- print(f"all - 5 clusters | {res_time:10.7f}")
-
- # Larger eps is 3 clusters.
- f5 = DBSCANFilter("all", 0.25, 100, 100, [0, 100], [0, 1.5], times)
-
- res_time = run_index_benchmark(f5, rs2)
- print(f"all - 3 clusters | {res_time:10.7f}")
-
- # Larger scale is 3 clusters.
- f6 = DBSCANFilter("all", 0.025, 100, 100, [0, 5000], [0, 1.5], times)
-
- res_time = run_index_benchmark(f6, rs2)
- print(f"all (larger) - 3 clusters | {res_time:10.7f}")
-
- rs3 = _make_data(
- [
- [10, 11, 1, 2],
- [10, 11, 2, 5],
- [10, 11, 1.01, 1.99],
- [21, 23, 1, 2],
- [21, 23, -10, -10],
- [5, 10, 6, 1],
- [5, 10, 1, 2],
- ],
- times,
- )
-
- f7 = DBSCANFilter("mid_position", 0.1, 20, 20, [0, 100], [0, 1.5], times)
- res_time = run_index_benchmark(f7, rs3)
- print(f"mid_position | {res_time:10.7f}")
-
-
-if __name__ == "__main__":
- run_all_benchmarks()
diff --git a/docs/source/user_manual/results_filtering.rst b/docs/source/user_manual/results_filtering.rst
index 5171af502..85ec1874f 100644
--- a/docs/source/user_manual/results_filtering.rst
+++ b/docs/source/user_manual/results_filtering.rst
@@ -54,25 +54,23 @@ In the extreme case imagine a bright object centered at (10, 15) and moving at (
But how do we tell which trajectories are "close"? If we only look at the pixel locations at a given time point (event t=0), we might combined two trajectories with very different velocities that happen to pass near the same pixel at that time. Even if this is not likely for real objects, we might merge a real object with a noisy false detection.
-The `scikit-learn `_ ``DBSCAN`` algorithm performs clustering the trajectories. The algorithm can cluster the results based on a combination of position, velocity, and angle as specified by the parameter cluster_type, which can take on the values of:
+The `scikit-learn `_ ``DBSCAN`` algorithm performs clustering the trajectories. The algorithm can cluster the results based on a combination of position and velocity angle as specified by the parameter ``cluster_type``, which can take on the values of:
-* ``all`` - Use scaled x position, scaled y position, scaled velocity, and scaled angle as coordinates for clustering.
-* ``position`` - Use only the trajectory's scaled (x, y) position at the first timestep for clustering.
-* ``position_unscaled`` - Use only trajctory's (x, y) position at the first timestep for clustering.
-* ``mid_position`` - Use the (scaled) predicted position at the median time as coordinates for clustering.
-* ``mid_position_unscaled`` - Use the predicted position at the median time as coordinates for clustering.
-* ``start_end_position`` - Use the (scaled) predicted positions at the start and end times as coordinates for clustering.
-* ``start_end_position_unscaled`` - Use the predicted positions at the start and end times as coordinates for clustering.
+* ``all`` or ``pos_vel`` - Use a trajectory's position at the first time stamp (x, y) and velocity (vx, vy) for filtering
+* ``position`` or ``start_position`` - Use only trajctory's (x, y) position at the first timestep for clustering.
+* ``mid_position`` - Use the predicted position at the median time as coordinates for clustering.
+* ``start_end_position`` - Use the predicted positions at the start and end times as coordinates for clustering.
Most of the clustering approaches rely on predicted positions at different times. For example midpoint-based clustering will encode each trajectory `(x0, y0, xv, yv)` as a 2-dimensional point `(x0 + tm * xv, y0 + tm + yv)` where `tm` is the median time. Thus trajectories only need to be close at time=`tm` to be merged into a single trajectory. In contrast the start and eng based clustering will encode the same trajectory as a 4-dimensional point (x0, y0, x0 + te * xv, y0 + te + yv)` where `te` is the last time. Thus the points will need to be close at both time=0.0 and time=`te` to be merged into a single result.
-Each of the positional based clusterings have both a scaled and unscaled version. This impacts how DBSCAN interprets distances. In the scaled version all values are divided by the width of the corresponding dimension to normalize the values. This maps points within the image to [0, 1], so an `eps` value of 0.01 might make sense. In contrast the unscaled versions do not perform normalization. The distances between two trajectories is measured in pixels. Here an `eps` value of 10 (for 10 pixels) might be better.
+The way DBSCAN computes distances between the trajectories depends on the encoding used. For positional encodings, such as ``position``, ``mid_position``, and ``start_end_position``, the distance is measured directly in pixels. The ``all`` encoding behaves somewhat similarly. However since it combines positions and velocities (or change in pixels per day), they are not actually in the same space.
Relevant clustering parameters include:
* ``cluster_type`` - The types of predicted values to use when determining which trajectories should be clustered together, including position, velocity, and angles (if ``do_clustering = True``). Must be one of all, position, or mid_position.
* ``do_clustering`` - Cluster the resulting trajectories to remove duplicates.
-* ``eps`` - The distance threshold used by DBSCAN.
+* ``cluster_eps`` - The distance threshold used by DBSCAN.
+* ``cluster_v_scale`` - The relative scale between velocity differences and positional differences in ``all``
See Also
________
diff --git a/docs/source/user_manual/search_params.rst b/docs/source/user_manual/search_params.rst
index 9c27042f5..1077bd3a1 100644
--- a/docs/source/user_manual/search_params.rst
+++ b/docs/source/user_manual/search_params.rst
@@ -8,14 +8,6 @@ This document serves to provide a quick overview of the existing parameters and
+------------------------+-----------------------------+----------------------------------------+
| **Parameter** | **Default Value** | **Interpretation** |
+------------------------+-----------------------------+----------------------------------------+
-| ``ang_arr`` | [np.pi/15, np.pi/15, 128] | Minimum, maximum and number of angles |
-| | | to search through (in radians) |
-+------------------------+-----------------------------+----------------------------------------+
-| ``average_angle`` | None | Overrides the ecliptic angle |
-| | | calculation and instead centers the |
-| | | average search around average_angle |
-| | | (in radians). |
-+------------------------+-----------------------------+----------------------------------------+
| ``center_thresh`` | 0.00 | The minimum fraction of total flux |
| | | within a stamp that must be contained |
| | | in the central pixel |
@@ -28,15 +20,20 @@ This document serves to provide a quick overview of the existing parameters and
| | | remove all negative values prior to |
| | | computing the percentiles. |
+------------------------+-----------------------------+----------------------------------------+
+| ``cluster_eps `` | 20.0 | The threshold to use for clustering |
+| | | similar results. |
++------------------------+-----------------------------+----------------------------------------+
| ``cluster_type`` | all | Types of predicted values to use when |
| | | determining trajectories to clustered |
-| | | together, including position, velocity,|
-| | | and angles (if do_clustering = True). |
+| | | together, including position and |
+| | | velocities (if do_clustering = True). |
| | | Options include: ``all``, ``position``,|
-| | | ``position_unscaled``, ``mid_position``|
-| | | ``mid_position_unscaled``, |
-| | | ``start_end_position``, or |
-| | | ``start_end_position_unscaled``. |
+| | | ``mid_position``, and |
+| | | ``start_end_position`` |
++------------------------+-----------------------------+----------------------------------------+
+| ``cluster_v_scale`` | 1.0 | The weight of differences in velocity |
+| | | relative to differences in distances |
+| | | during clustering. |
+------------------------+-----------------------------+----------------------------------------+
| ``coadds`` | [] | A list of additional coadds to create. |
| | | These are not used in filtering, but |
@@ -56,10 +53,6 @@ This document serves to provide a quick overview of the existing parameters and
| ``do_stamp_filter`` | True | Apply post-search filtering on the |
| | | image stamps. |
+------------------------+-----------------------------+----------------------------------------+
-| ``eps`` | 0.03 | The epsilon value to use in DBSCAN |
-| | | clustering (if ``cluster_type=DBSCAN`` |
-| | | and ``do_clustering=True``). |
-+------------------------+-----------------------------+----------------------------------------+
| ``encode_num_bytes`` | -1 | The number of bytes to use to encode |
| | | ``psi`` and ``phi`` images on GPU. By |
| | | default a ``float`` encoding is used. |
@@ -148,17 +141,12 @@ This document serves to provide a quick overview of the existing parameters and
| | | if: |
| | | * ``sum`` - (default) Per pixel sum |
| | | * ``median`` - A per pixel median |
-| | | * ``mean`` - A per pixel mean |\
+| | | * ``mean`` - A per pixel mean |
+------------------------+-----------------------------+----------------------------------------+
| ``track_filtered`` | False | A Boolean indicating whether to track |
| | | the filtered trajectories. Warning |
| | | can use a lot of memory. |
+------------------------+-----------------------------+----------------------------------------+
-| ``v_arr`` | [92.0, 526.0, 256] | Minimum, maximum and number of |
-| | | velocities to search through. The |
-| | | minimum and maximum velocities are |
-| | | specified in pixels per day. |
-+------------------------+-----------------------------+----------------------------------------+
| ``x_pixel_bounds`` | None | A length two list giving the starting |
| | | and ending x pixels to use for the |
| | | search. `None` uses the image bounds. |
diff --git a/docs/source/user_manual/search_space.rst b/docs/source/user_manual/search_space.rst
index 939f1a1cd..a39b139b1 100644
--- a/docs/source/user_manual/search_space.rst
+++ b/docs/source/user_manual/search_space.rst
@@ -33,7 +33,8 @@ Choosing Velocities
Perhaps the most complex aspect of the KBMOD algorithm is how it defines the grid of search velocities. KBMOD allows you to define custom search strategies to best match the data. These include:
* ``SingleVelocitySearch`` - A single predefined x and y velocity
* ``VelocityGridSearch`` - An evenly spaced grid of x and y velocities
-* ``KBMODV1SearchConfig`` - An evenly spaced grid of velocity magnitudes and angles (this was the only option in v1.1 and before)
+* ``EclipticSearch`` - An evenly spaced grid of velocity magnitudes and angles (using a current parameterization).
+* ``KBMODV1SearchConfig`` - An evenly spaced grid of velocity magnitudes and angles (using the legacy parameterization).
* ``RandomVelocitySearch`` - Randomly sampled x and y velocities
Additional search strategies can be defined by overriding the ``TrajectoryGenerator`` class in trajectory_generator.py.
@@ -77,6 +78,56 @@ The ``VelocityGridSearch`` strategy searches a uniform grid of x and y velocitie
| ``max_vy`` | The maximum velocity in the y-dimension (pixels per day). |
+------------------------+-----------------------------------------------------------+
+EclipticSearch
+--------------
+
+The grid is defined by two sets of parameters: a sampling of absolute velocities in pixels per day and a sampling of the velocities' angles in degrees or radians. Each sampling consists of values defining the range and number of sampling steps.
+
+Given the linear sampling for both velocities and angles, the full set of candidate trajectories is computed as::
+
+
+ for (int a = 0; a < angleSteps; ++a) {
+ for (int v = 0; v < velocitySteps; ++v) {
+ searchList[a * velocitySteps + v].xVel = cos(angles[a]) * velocities[v];
+ searchList[a * velocitySteps + v].yVel = sin(angles[a]) * velocities[v];
+ }
+ }
+
+where ``angles`` contains the list of angles to test and ``velocities`` contains the list of velocities.
+
+The list of velocities is created from the given bounds (``min_vel`` and ``max_vel``) and number of steps (``vel_steps``). The range is inclusive of both bounds.
+
+Each angle in the list is computed as an **offset** from the ecliptic angle. KBMOD uses the following ordering for extracting the ecliptic.
+1. If ``force_ecliptic`` is provided (is not ``None``) in the generator’s configuration that value is used directly.
+2. If the first image has a WCS, the ecliptic is estimated from that WCS.
+3. A default ecliptic of 0.0 is used.
+The list of angles spans ``[ecliptic + min_ang_offset, ecliptic + max_ang_offset]`` inclusive of both bounds. Angles can be specified in degrees or radians (as noted by the ``angle_units`` parameter) but must be consistent among all angles.
+
+
++------------------------+-----------------------------------------------------+
+| **Parameter** | **Interpretation** |
++------------------------+-----------------------------------------------------+
+| ``ang_steps`` | The number of angle steps. |
++------------------------+-----------------------------------------------------+
+| ``angle_units`` | The units to use for angles. |
+| | Either 'degrees' or 'radians' |
++------------------------+-----------------------------------------------------+
+| ``min_ang_offset `` | The minimum angular offset from the ecliptic |
+| | (in either radians or degrees). |
++------------------------+-----------------------------------------------------+
+| ``max_ang_offset `` | The maximum angular offset from the ecliptic |
+| | (in either radians or degrees). |
++------------------------+-----------------------------------------------------+
+| ``force_ecliptic `` | The given value of the ecliptic angle |
+| | (in either radians or degrees). |
++------------------------+-----------------------------------------------------+
+| ``vel_steps `` | The number of velocity steps. |
++------------------------+-----------------------------------------------------+
+| ``min_vel `` | The minimum velocity magnitude (in pixels per day). |
++------------------------+-----------------------------------------------------+
+| ``max_vel `` | The maximum velocity magnitude (in pixels per day). |
++------------------------+-----------------------------------------------------+
+
KBMODV1SearchConfig
-------------------
diff --git a/notebooks/KBMOD_Demo.ipynb b/notebooks/KBMOD_Demo.ipynb
index c21cdea39..9507b54b1 100644
--- a/notebooks/KBMOD_Demo.ipynb
+++ b/notebooks/KBMOD_Demo.ipynb
@@ -122,9 +122,19 @@
"ang_arr = [ang_below, ang_above, ang_steps]\n",
"\n",
"input_parameters = {\n",
- " # Grid search parameters\n",
- " \"v_arr\": v_arr,\n",
- " \"ang_arr\": ang_arr,\n",
+ " # Use search parameters (including a force ecliptic angle of 0.0)\n",
+ " # to match what we know is in the demo data.\n",
+ " \"generator_config\": {\n",
+ " \"name\": \"EclipticSearch\",\n",
+ " \"vel_steps\": 21,\n",
+ " \"min_vel\": 0.0,\n",
+ " \"max_vel\": 20.0,\n",
+ " \"ang_steps\": 11,\n",
+ " \"min_ang_offset\": -0.5,\n",
+ " \"max_ang_offset\": 0.5,\n",
+ " \"angle_units\": \"radians\",\n",
+ " \"force_ecliptic\": 0.0,\n",
+ " },\n",
" # Output parameters\n",
" \"res_filepath\": res_filepath,\n",
" \"output_suffix\": results_suffix,\n",
@@ -142,9 +152,6 @@
" # Some basic stamp filtering limits.\n",
" \"mom_lims\": [37.5, 37.5, 1.5, 1.0, 1.0],\n",
" \"peak_offset\": [3.0, 3.0],\n",
- " # Override the ecliptic angle for the demo data since we\n",
- " # know the true angle in pixel space.\n",
- " \"average_angle\": 0.0,\n",
"}\n",
"config = SearchConfiguration.from_dict(input_parameters)"
]
diff --git a/notebooks/create_fake_data.ipynb b/notebooks/create_fake_data.ipynb
index 489789c42..4faa3f0fc 100644
--- a/notebooks/create_fake_data.ipynb
+++ b/notebooks/create_fake_data.ipynb
@@ -149,9 +149,17 @@
"\n",
"settings = {\n",
" # Override the search data to match the known object.\n",
- " \"average_angle\": 0.0,\n",
- " \"v_arr\": [0, 20, 20],\n",
- " \"ang_arr\": [0.5, 0.5, 10],\n",
+ " \"generator_config\": {\n",
+ " \"name\": \"EclipticSearch\",\n",
+ " \"vel_steps\": 21,\n",
+ " \"min_vel\": 0.0,\n",
+ " \"max_vel\": 20.0,\n",
+ " \"ang_steps\": 11,\n",
+ " \"min_ang_offset\": -0.5,\n",
+ " \"max_ang_offset\": 0.5,\n",
+ " \"angle_units\": \"radians\",\n",
+ " \"force_ecliptic\": 0.0,\n",
+ " },\n",
" # Loosen the other filtering parameters.\n",
" \"clip_negative\": True,\n",
" \"sigmaG_lims\": [15, 60],\n",
diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py
index 7ce545589..1c66b02cb 100644
--- a/src/kbmod/configuration.py
+++ b/src/kbmod/configuration.py
@@ -19,8 +19,6 @@ def __init__(self):
self._required_params = set()
self._params = {
- "ang_arr": [math.pi / 15, math.pi / 15, 128],
- "average_angle": None,
"center_thresh": 0.00,
"chunk_size": 500000,
"clip_negative": False,
@@ -33,7 +31,17 @@ def __init__(self):
"do_mask": True,
"do_stamp_filter": True,
"encode_num_bytes": -1,
- "generator_config": None,
+ "generator_config": {
+ "name": "EclipticSearch",
+ "vel_steps": 257,
+ "min_vel": 92.0,
+ "max_vel": 526.0,
+ "ang_steps": 129,
+ "min_ang_offset": -math.pi / 15,
+ "max_ang_offset": math.pi / 15,
+ "angle_units": "radians",
+ "force_ecliptic": None,
+ },
"gpu_filter": False,
"ind_output_files": True,
"im_filepath": None,
@@ -52,7 +60,6 @@ def __init__(self):
"stamp_radius": 10,
"stamp_type": "sum",
"track_filtered": False,
- "v_arr": [92.0, 526.0, 256],
"x_pixel_bounds": None,
"x_pixel_buffer": None,
"y_pixel_bounds": None,
diff --git a/src/kbmod/fake_data/demo_helper.py b/src/kbmod/fake_data/demo_helper.py
index 62c7f9fc5..5ca323cd0 100644
--- a/src/kbmod/fake_data/demo_helper.py
+++ b/src/kbmod/fake_data/demo_helper.py
@@ -38,9 +38,17 @@ def make_demo_data(filename=None):
# Create configuraiton settings that match the object inserted.
settings = {
# Override the search data to match the known object.
- "average_angle": 0.0,
- "v_arr": [0, 20, 20],
- "ang_arr": [0.5, 0.5, 10],
+ "generator_config": {
+ "name": "EclipticSearch",
+ "vel_steps": 21,
+ "min_vel": 0.0,
+ "max_vel": 20.0,
+ "ang_steps": 11,
+ "min_ang_offset": -0.5,
+ "max_ang_offset": 0.5,
+ "angle_units": "radians",
+ "force_ecliptic": 0.0,
+ },
# Loosen the other filtering parameters.
"clip_negative": True,
"sigmaG_lims": [15, 60],
diff --git a/src/kbmod/run_search.py b/src/kbmod/run_search.py
index b5bbd6111..68c54b917 100644
--- a/src/kbmod/run_search.py
+++ b/src/kbmod/run_search.py
@@ -11,8 +11,7 @@
from .filters.stamp_filters import append_all_stamps, append_coadds, get_coadds_and_filter_results
from .results import Results
-from .trajectory_generator import create_trajectory_generator, KBMODV1SearchConfig
-from .wcs_utils import calc_ecliptic_angle
+from .trajectory_generator import create_trajectory_generator
from .work_unit import WorkUnit
@@ -188,7 +187,7 @@ def do_gpu_search(self, config, stack, trj_generator):
keep = self.load_and_filter_results(search, config)
return keep
- def run_search(self, config, stack, trj_generator=None):
+ def run_search(self, config, stack, trj_generator=None, computed_ecliptic=None):
"""This function serves as the highest-level python interface for starting
a KBMOD search given an ImageStack and SearchConfiguration.
@@ -201,6 +200,10 @@ def run_search(self, config, stack, trj_generator=None):
trj_generator : `TrajectoryGenerator`, optional
The object to generate the candidate trajectories for each pixel.
If None uses the default KBMODv1 grid search
+ computed_ecliptic : `float`, optional
+ The computed ecliptic angle in the data from a WCS (if present).
+ Uses ``None`` if the information needed to compute the angle is not
+ available.
Returns
-------
@@ -219,7 +222,7 @@ def run_search(self, config, stack, trj_generator=None):
# Perform the actual search.
if trj_generator is None:
- trj_generator = create_trajectory_generator(config)
+ trj_generator = create_trajectory_generator(config, computed_ecliptic_angle=computed_ecliptic)
keep = self.do_gpu_search(config, stack, trj_generator)
if config["do_stamp_filter"]:
@@ -288,14 +291,8 @@ def run_search_from_work_unit(self, work):
keep : `Results`
The results.
"""
- # Set the average angle if it is not set.
- if work.config["average_angle"] is None:
- center_pixel = (work.im_stack.get_width() / 2, work.im_stack.get_height() / 2)
- if work.get_wcs(0) is not None:
- work.config.set("average_angle", calc_ecliptic_angle(work.get_wcs(0), center_pixel))
- else:
- logger.warning("Average angle not set and no WCS provided. Setting average_angle=0.0")
- work.config.set("average_angle", 0.0)
+ # If there is a WCS compute the ecliptic angle from it.
+ computed_ecliptic = work.compute_ecliptic_angle()
# Run the search.
return self.run_search(work.config, work.im_stack)
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index 4ecc2d7c0..c162a981a 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -34,16 +34,8 @@ def create_trajectory_generator(config, **kwargs):
# Check if we are dealing with a top level configuration.
if type(config) is SearchConfiguration:
if config["generator_config"] is None:
- # We are dealing with a legacy configuration file.
- gen = KBMODV1SearchConfig(
- v_arr=config["v_arr"],
- ang_arr=config["ang_arr"],
- average_angle=config["average_angle"],
- )
- return gen
- else:
- # We are dealing with a top level configuration.
- config = config["generator_config"]
+ raise ValueError("Missing generator_config parameter.")
+ config = config["generator_config"]
if "name" not in config:
raise KeyError("The trajectory generator configuration must contain a name field.")
@@ -307,9 +299,9 @@ def generate(self, *args, **kwargs):
class KBMODV1SearchConfig(KBMODV1Search):
- """Search a grid defined by velocities and angles in the format of the configuration file."""
+ """Search a grid defined by velocities and angles in the format of the legacy configuration file."""
- def __init__(self, v_arr, ang_arr, average_angle, **kwargs):
+ def __init__(self, v_arr, ang_arr, average_angle=None, computed_ecliptic_angle=None, **kwargs):
"""Create a class KBMODV1SearchConfig.
Parameters
@@ -320,15 +312,22 @@ def __init__(self, v_arr, ang_arr, average_angle, **kwargs):
ang_arr : `list`
A triplet of the minimum angle offset (in radians), and the maximum angle offset
(in radians), and the number of angles to try.
- average_angle : `float`
+ average_angle : `float`, optional
The central angle to search around. Should align with the ecliptic in most cases.
+ computed_ecliptic_angle : `float`, optional
+ An override for the computed ecliptic from a WCS (in the units defined in
+ ``angle_units``). This parameter is ignored if ``force_ecliptic`` is given.
"""
if len(v_arr) != 3:
raise ValueError("KBMODV1SearchConfig requires v_arr to be length 3")
if len(ang_arr) != 3:
raise ValueError("KBMODV1SearchConfig requires ang_arr to be length 3")
if average_angle is None:
- raise ValueError("KBMODV1SearchConfig requires a valid average_angle.")
+ if computed_ecliptic_angle is None:
+ raise ValueError(
+ "KBMODV1SearchConfig requires a valid average_angle or computed_ecliptic_angle."
+ )
+ average_angle = computed_ecliptic_angle
ang_min = average_angle - ang_arr[0]
ang_max = average_angle + ang_arr[1]
diff --git a/src/kbmod/work_unit.py b/src/kbmod/work_unit.py
index bf9e932bb..4f17a8af6 100644
--- a/src/kbmod/work_unit.py
+++ b/src/kbmod/work_unit.py
@@ -223,10 +223,7 @@ def compute_ecliptic_angle(self):
return calc_ecliptic_angle(wcs, center_pixel)
def get_all_obstimes(self):
- """Return a list of the observation times.
- If the `WorkUnit` was lazily loaded, then the obstimes
- have already been preloaded. Otherwise, grab them from
- the `ImageStack."""
+ """Return a list of the observation times."""
if self._obstimes is not None:
return self._obstimes
@@ -363,6 +360,153 @@ def from_fits(cls, filename, show_progress=None):
)
return result
+ @classmethod
+ def from_dict(cls, workunit_dict):
+ """Create a WorkUnit from a combined dictionary.
+
+ Parameters
+ ----------
+ workunit_dict : `dict`
+ The dictionary of information.
+
+ Returns
+ -------
+ `WorkUnit`
+
+ Raises
+ ------
+ Raises a ``ValueError`` for any invalid parameters.
+ """
+ num_images = workunit_dict["num_images"]
+ logger.debug(f"Creating WorkUnit from dictionary with {num_images} images.")
+
+ width = workunit_dict["width"]
+ height = workunit_dict["height"]
+ if width <= 0 or height <= 0:
+ raise ValueError(f"Illegal image dimensions width={width}, height={height}")
+
+ # Load the configuration supporting both dictionary and SearchConfiguration.
+ if type(workunit_dict["config"]) is dict:
+ config = SearchConfiguration.from_dict(workunit_dict["config"])
+ elif type(workunit_dict["config"]) is SearchConfiguration:
+ config = workunit_dict["config"]
+ else:
+ raise ValueError("Unrecognized type for WorkUnit config parameter.")
+
+ # Load the global WCS if one exists.
+ if "wcs" in workunit_dict:
+ if type(workunit_dict["wcs"]) is dict:
+ global_wcs = wcs_from_dict(workunit_dict["wcs"])
+ else:
+ global_wcs = workunit_dict["wcs"]
+ else:
+ global_wcs = None
+
+ constituent_images = workunit_dict["constituent_images"]
+ heliocentric_distance = workunit_dict["heliocentric_distance"]
+ geocentric_distances = workunit_dict["geocentric_distances"]
+ reprojected = workunit_dict["reprojected"]
+ per_image_indices = workunit_dict["per_image_indices"]
+
+ imgs = []
+ per_image_wcs = []
+ per_image_ebd_wcs = []
+ for i in range(num_images):
+ obs_time = workunit_dict["times"][i]
+
+ if type(workunit_dict["sci_imgs"][i]) is RawImage:
+ sci_img = workunit_dict["sci_imgs"][i]
+ else:
+ sci_arr = np.array(workunit_dict["sci_imgs"][i], dtype=np.float32).reshape(height, width)
+ sci_img = RawImage(img=sci_arr, obs_time=obs_time)
+
+ if type(workunit_dict["var_imgs"][i]) is RawImage:
+ var_img = workunit_dict["var_imgs"][i]
+ else:
+ var_arr = np.array(workunit_dict["var_imgs"][i], dtype=np.float32).reshape(height, width)
+ var_img = RawImage(img=var_arr, obs_time=obs_time)
+
+ # Masks are optional.
+ if workunit_dict["msk_imgs"][i] is None:
+ msk_arr = np.zeros(height, width)
+ msk_img = RawImage(img=msk_arr, obs_time=obs_time)
+ elif type(workunit_dict["msk_imgs"][i]) is RawImage:
+ msk_img = workunit_dict["msk_imgs"][i]
+ else:
+ msk_arr = np.array(workunit_dict["msk_imgs"][i], dtype=np.float32).reshape(height, width)
+ msk_img = RawImage(img=msk_arr, obs_time=obs_time)
+
+ # PSFs are optional.
+ if workunit_dict["psfs"][i] is None:
+ p = PSF()
+ elif type(workunit_dict["psfs"][i]) is PSF:
+ p = workunit_dict["psfs"][i]
+ else:
+ p = PSF(np.array(workunit_dict["psfs"][i], dtype=np.float32))
+
+ imgs.append(LayeredImage(sci_img, var_img, msk_img, p))
+
+ n_constituents = len(constituent_images)
+ for i in range(n_constituents):
+ # Read a per_image_wcs if one exists.
+ current_wcs = workunit_dict["per_image_wcs"][i]
+ if type(current_wcs) is dict:
+ current_wcs = wcs_from_dict(current_wcs)
+ per_image_wcs.append(current_wcs)
+
+ current_ebd = workunit_dict["per_image_ebd_wcs"][i]
+ if type(current_ebd) is dict:
+ current_ebd = wcs_from_dict(current_ebd)
+ per_image_ebd_wcs.append(current_ebd)
+
+ im_stack = ImageStack(imgs)
+ return WorkUnit(
+ im_stack=im_stack,
+ config=config,
+ wcs=global_wcs,
+ constituent_images=constituent_images,
+ per_image_wcs=per_image_wcs,
+ per_image_ebd_wcs=per_image_ebd_wcs,
+ heliocentric_distance=heliocentric_distance,
+ geocentric_distances=geocentric_distances,
+ reprojected=reprojected,
+ per_image_indices=per_image_indices,
+ )
+
+ @classmethod
+ def from_yaml(cls, work_unit, strict=False):
+ """Load a configuration from a YAML string.
+
+ Parameters
+ ----------
+ work_unit : `str` or `_io.TextIOWrapper`
+ The serialized YAML data.
+ strict : `bool`
+ Raise an error if the file is not a WorkUnit.
+
+ Returns
+ -------
+ result : `WorkUnit` or `None`
+ Returns the extracted WorkUnit. If the file did not contain a WorkUnit and
+ strict=False the function will return None.
+
+ Raises
+ ------
+ Raises a ``ValueError`` for any invalid parameters.
+ """
+ yaml_dict = safe_load(work_unit)
+
+ # Check if this a WorkUnit yaml file by checking it has the required fields.
+ required_fields = ["config", "height", "num_images", "sci_imgs", "times", "var_imgs", "width"]
+ for name in required_fields:
+ if name not in yaml_dict:
+ if strict:
+ raise ValueError(f"Missing required field {name}")
+ else:
+ return None
+
+ return WorkUnit.from_dict(yaml_dict)
+
def to_fits(self, filename, overwrite=False):
"""Write the WorkUnit to a single FITS file.
@@ -678,6 +822,54 @@ def append_all_wcs(self, hdul):
ebd_hdu.name = f"EBD_{i}"
hdul.append(ebd_hdu)
+ def to_yaml(self):
+ """Serialize the WorkUnit as a YAML string.
+
+ Returns
+ -------
+ result : `str`
+ The serialized YAML string.
+ """
+ workunit_dict = {
+ "num_images": self.im_stack.img_count(),
+ "width": self.im_stack.get_width(),
+ "height": self.im_stack.get_height(),
+ "config": self.config._params,
+ "wcs": wcs_to_dict(self.wcs),
+ # Per image data
+ "times": [],
+ "sci_imgs": [],
+ "var_imgs": [],
+ "msk_imgs": [],
+ "psfs": [],
+ "constituent_images": self.constituent_images,
+ "per_image_wcs": [],
+ "per_image_ebd_wcs": [],
+ "heliocentric_distance": self.heliocentric_distance,
+ "geocentric_distances": self.geocentric_distances,
+ "reprojected": self.reprojected,
+ "per_image_indices": self._per_image_indices,
+ }
+
+ # Fill in the per-image data.
+ for i in range(self.im_stack.img_count()):
+ layered = self.im_stack.get_single_image(i)
+ workunit_dict["times"].append(layered.get_obstime())
+ p = layered.get_psf()
+
+ workunit_dict["sci_imgs"].append(layered.get_science().image.tolist())
+ workunit_dict["var_imgs"].append(layered.get_variance().image.tolist())
+ workunit_dict["msk_imgs"].append(layered.get_mask().image.tolist())
+
+ psf_array = np.array(p.get_kernel()).reshape((p.get_dim(), p.get_dim()))
+ workunit_dict["psfs"].append(psf_array.tolist())
+
+ for i in range(len(self._per_image_wcs)):
+ workunit_dict["per_image_wcs"].append(wcs_to_dict(self._per_image_wcs[i]))
+ workunit_dict["per_image_ebd_wcs"].append(wcs_to_dict(self._per_image_ebd_wcs[i]))
+
+ return dump(workunit_dict)
+
def image_positions_to_original_icrs(
self, image_indices, positions, input_format="xy", output_format="xy", filter_in_frame=True
):
diff --git a/tests/test_configuration.py b/tests/test_configuration.py
index 9a29969a5..3c7aeb18c 100644
--- a/tests/test_configuration.py
+++ b/tests/test_configuration.py
@@ -73,7 +73,8 @@ def test_to_hdu(self):
"cluster_type": None,
"do_clustering": False,
"res_filepath": "There",
- "ang_arr": [1.0, 2.0, 3.0],
+ "generator_config": {"name": "test_gen", "p1": 1.0, "p2": 2.0},
+ "basic_array": [1.0, 2.0, 3.0],
}
config = SearchConfiguration.from_dict(d)
hdu = config.to_hdu()
@@ -82,7 +83,8 @@ def test_to_hdu(self):
self.assertEqual(hdu.data["num_obs"][0], "5\n...")
self.assertEqual(hdu.data["cluster_type"][0], "null\n...")
self.assertEqual(hdu.data["res_filepath"][0], "There\n...")
- self.assertEqual(hdu.data["ang_arr"][0], "[1.0, 2.0, 3.0]")
+ self.assertEqual(hdu.data["generator_config"][0], "{name: test_gen, p1: 1.0, p2: 2.0}")
+ self.assertEqual(hdu.data["basic_array"][0], "[1.0, 2.0, 3.0]")
def test_to_yaml(self):
d = {
@@ -91,7 +93,7 @@ def test_to_yaml(self):
"cluster_type": None,
"do_clustering": False,
"res_filepath": "There",
- "ang_arr": [1.0, 2.0, 3.0],
+ "generator_config": {"name": "test_gen", "p1": 1.0, "p2": 2.0},
}
config = SearchConfiguration.from_dict(d)
yaml_str = config.to_yaml()
@@ -101,9 +103,9 @@ def test_to_yaml(self):
self.assertEqual(yaml_dict["num_obs"], 5)
self.assertEqual(yaml_dict["cluster_type"], None)
self.assertEqual(yaml_dict["res_filepath"], "There")
- self.assertEqual(yaml_dict["ang_arr"][0], 1.0)
- self.assertEqual(yaml_dict["ang_arr"][1], 2.0)
- self.assertEqual(yaml_dict["ang_arr"][2], 3.0)
+ self.assertEqual(yaml_dict["generator_config"]["name"], "test_gen")
+ self.assertEqual(yaml_dict["generator_config"]["p1"], 1.0)
+ self.assertEqual(yaml_dict["generator_config"]["p2"], 2.0)
def test_save_and_load_yaml(self):
config = SearchConfiguration()
diff --git a/tests/test_regression_test.py b/tests/test_regression_test.py
index 521a54e8c..883dd7a03 100644
--- a/tests/test_regression_test.py
+++ b/tests/test_regression_test.py
@@ -238,32 +238,25 @@ def perform_search(im_stack, res_filename, default_psf):
The default PSF value to use when nothing is provided
in the PSF file.
"""
- v_min = 92.0 # Pixels/day
- v_max = 550.0
-
- # Manually set the average angle that will work with the (manually specified) tracks.
- average_angle = 1.1901106654050821
-
- # Offset by PI for prograde orbits in lori allen data
- ang_below = -np.pi + np.pi / 10.0 # Angle below ecliptic
- ang_above = np.pi + np.pi / 10.0 # Angle above ecliptic
- v_steps = 51
- ang_steps = 25
-
- v_arr = [v_min, v_max, v_steps]
- ang_arr = [ang_below, ang_above, ang_steps]
- num_obs = 15
-
input_parameters = {
"im_filepath": "./",
"res_filepath": None,
"result_filename": res_filename,
"psf_val": default_psf,
"output_suffix": "",
- "v_arr": v_arr,
- "average_angle": average_angle,
- "ang_arr": ang_arr,
- "num_obs": num_obs,
+ "generator_config": {
+ "name": "EclipticSearch",
+ "vel_steps": 52,
+ "min_vel": 92.0,
+ "max_vel": 550.0,
+ "ang_steps": 26,
+ # Offset by PI for prograde orbits in lori allen data
+ "min_ang_offset": np.pi - np.pi / 10.0,
+ "max_ang_offset": np.pi + np.pi / 10.0,
+ "angle_units": "radians",
+ "force_ecliptic": 1.1901106654050821,
+ },
+ "num_obs": 15,
"do_mask": True,
"lh_level": 25.0,
"sigmaG_lims": [25, 75],
diff --git a/tests/test_trajectory_generator.py b/tests/test_trajectory_generator.py
index cbd707fd7..e66706732 100644
--- a/tests/test_trajectory_generator.py
+++ b/tests/test_trajectory_generator.py
@@ -176,13 +176,21 @@ def test_create_trajectory_generator(self):
self.assertEqual(gen4.min_ang, 0.0)
self.assertEqual(gen4.max_ang, np.pi / 4.0)
+ # Fail with no name or a bad name.
+ self.assertRaises(KeyError, create_trajectory_generator, {})
+ self.assertRaises(KeyError, create_trajectory_generator, {"name": "Invalid_generator"})
+
def test_create_trajectory_generator_config(self):
config = SearchConfiguration()
- config.set("ang_arr", [0.5, 0.5, 30])
- config.set("average_angle", 0.0)
- config.set("v_arr", [0.0, 10.0, 100])
+ generator_config = {
+ "name": "KBMODV1SearchConfig",
+ "ang_arr": [0.5, 0.5, 30],
+ "average_angle": 0.0,
+ "v_arr": [0.0, 10.0, 100],
+ }
+ config.set("generator_config", generator_config)
- # Without a "generator_config" option, we fall back on the legacy generator.
+ # We process the legacy configuration correctly.
gen1 = create_trajectory_generator(config)
self.assertTrue(type(gen1) is KBMODV1SearchConfig)
self.assertEqual(gen1.vel_steps, 100)
@@ -192,12 +200,9 @@ def test_create_trajectory_generator_config(self):
self.assertEqual(gen1.min_ang, -0.5)
self.assertEqual(gen1.max_ang, 0.5)
- # Add a generator configuration.
- config.set("generator_config", {"name": "SingleVelocitySearch", "vx": 1, "vy": 2})
- gen2 = create_trajectory_generator(config)
- self.assertTrue(type(gen2) is SingleVelocitySearch)
- self.assertEqual(gen2.vx, 1)
- self.assertEqual(gen2.vy, 2)
+ # Fail if no generator configuration is provided.
+ config.set("generator_config", None)
+ self.assertRaises(ValueError, create_trajectory_generator, config)
if __name__ == "__main__":
From be971720ae3b243f6eecd8e80f0d5b4e154169a2 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Fri, 13 Sep 2024 12:36:50 -0400
Subject: [PATCH 05/12] Update clustering_filters.py
---
src/kbmod/filters/clustering_filters.py | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/src/kbmod/filters/clustering_filters.py b/src/kbmod/filters/clustering_filters.py
index cc6f5360e..db7015889 100644
--- a/src/kbmod/filters/clustering_filters.py
+++ b/src/kbmod/filters/clustering_filters.py
@@ -146,9 +146,8 @@ def __init__(self, cluster_eps, cluster_v_scale=1.0, **kwargs):
differences. Default: 1.0 (no difference).
"""
super().__init__(cluster_eps, **kwargs)
- if cluster_v_scale <= 0.0:
- # Avoid divide by zero.
- cluster_v_scale = 1e-12
+ if cluster_v_scale < 0.0:
+ raise ValueError("cluster_v_scale cannot be negative.")
self.cluster_v_scale = cluster_v_scale
self.cluster_type = "all"
From f2bf533a01b921b7eb0af2c3f9d9aacc8d82af79 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Fri, 13 Sep 2024 14:24:06 -0400
Subject: [PATCH 06/12] Fix bad merge
---
src/kbmod/work_unit.py | 200 +----------------------------------------
1 file changed, 4 insertions(+), 196 deletions(-)
diff --git a/src/kbmod/work_unit.py b/src/kbmod/work_unit.py
index 4f17a8af6..bf9e932bb 100644
--- a/src/kbmod/work_unit.py
+++ b/src/kbmod/work_unit.py
@@ -223,7 +223,10 @@ def compute_ecliptic_angle(self):
return calc_ecliptic_angle(wcs, center_pixel)
def get_all_obstimes(self):
- """Return a list of the observation times."""
+ """Return a list of the observation times.
+ If the `WorkUnit` was lazily loaded, then the obstimes
+ have already been preloaded. Otherwise, grab them from
+ the `ImageStack."""
if self._obstimes is not None:
return self._obstimes
@@ -360,153 +363,6 @@ def from_fits(cls, filename, show_progress=None):
)
return result
- @classmethod
- def from_dict(cls, workunit_dict):
- """Create a WorkUnit from a combined dictionary.
-
- Parameters
- ----------
- workunit_dict : `dict`
- The dictionary of information.
-
- Returns
- -------
- `WorkUnit`
-
- Raises
- ------
- Raises a ``ValueError`` for any invalid parameters.
- """
- num_images = workunit_dict["num_images"]
- logger.debug(f"Creating WorkUnit from dictionary with {num_images} images.")
-
- width = workunit_dict["width"]
- height = workunit_dict["height"]
- if width <= 0 or height <= 0:
- raise ValueError(f"Illegal image dimensions width={width}, height={height}")
-
- # Load the configuration supporting both dictionary and SearchConfiguration.
- if type(workunit_dict["config"]) is dict:
- config = SearchConfiguration.from_dict(workunit_dict["config"])
- elif type(workunit_dict["config"]) is SearchConfiguration:
- config = workunit_dict["config"]
- else:
- raise ValueError("Unrecognized type for WorkUnit config parameter.")
-
- # Load the global WCS if one exists.
- if "wcs" in workunit_dict:
- if type(workunit_dict["wcs"]) is dict:
- global_wcs = wcs_from_dict(workunit_dict["wcs"])
- else:
- global_wcs = workunit_dict["wcs"]
- else:
- global_wcs = None
-
- constituent_images = workunit_dict["constituent_images"]
- heliocentric_distance = workunit_dict["heliocentric_distance"]
- geocentric_distances = workunit_dict["geocentric_distances"]
- reprojected = workunit_dict["reprojected"]
- per_image_indices = workunit_dict["per_image_indices"]
-
- imgs = []
- per_image_wcs = []
- per_image_ebd_wcs = []
- for i in range(num_images):
- obs_time = workunit_dict["times"][i]
-
- if type(workunit_dict["sci_imgs"][i]) is RawImage:
- sci_img = workunit_dict["sci_imgs"][i]
- else:
- sci_arr = np.array(workunit_dict["sci_imgs"][i], dtype=np.float32).reshape(height, width)
- sci_img = RawImage(img=sci_arr, obs_time=obs_time)
-
- if type(workunit_dict["var_imgs"][i]) is RawImage:
- var_img = workunit_dict["var_imgs"][i]
- else:
- var_arr = np.array(workunit_dict["var_imgs"][i], dtype=np.float32).reshape(height, width)
- var_img = RawImage(img=var_arr, obs_time=obs_time)
-
- # Masks are optional.
- if workunit_dict["msk_imgs"][i] is None:
- msk_arr = np.zeros(height, width)
- msk_img = RawImage(img=msk_arr, obs_time=obs_time)
- elif type(workunit_dict["msk_imgs"][i]) is RawImage:
- msk_img = workunit_dict["msk_imgs"][i]
- else:
- msk_arr = np.array(workunit_dict["msk_imgs"][i], dtype=np.float32).reshape(height, width)
- msk_img = RawImage(img=msk_arr, obs_time=obs_time)
-
- # PSFs are optional.
- if workunit_dict["psfs"][i] is None:
- p = PSF()
- elif type(workunit_dict["psfs"][i]) is PSF:
- p = workunit_dict["psfs"][i]
- else:
- p = PSF(np.array(workunit_dict["psfs"][i], dtype=np.float32))
-
- imgs.append(LayeredImage(sci_img, var_img, msk_img, p))
-
- n_constituents = len(constituent_images)
- for i in range(n_constituents):
- # Read a per_image_wcs if one exists.
- current_wcs = workunit_dict["per_image_wcs"][i]
- if type(current_wcs) is dict:
- current_wcs = wcs_from_dict(current_wcs)
- per_image_wcs.append(current_wcs)
-
- current_ebd = workunit_dict["per_image_ebd_wcs"][i]
- if type(current_ebd) is dict:
- current_ebd = wcs_from_dict(current_ebd)
- per_image_ebd_wcs.append(current_ebd)
-
- im_stack = ImageStack(imgs)
- return WorkUnit(
- im_stack=im_stack,
- config=config,
- wcs=global_wcs,
- constituent_images=constituent_images,
- per_image_wcs=per_image_wcs,
- per_image_ebd_wcs=per_image_ebd_wcs,
- heliocentric_distance=heliocentric_distance,
- geocentric_distances=geocentric_distances,
- reprojected=reprojected,
- per_image_indices=per_image_indices,
- )
-
- @classmethod
- def from_yaml(cls, work_unit, strict=False):
- """Load a configuration from a YAML string.
-
- Parameters
- ----------
- work_unit : `str` or `_io.TextIOWrapper`
- The serialized YAML data.
- strict : `bool`
- Raise an error if the file is not a WorkUnit.
-
- Returns
- -------
- result : `WorkUnit` or `None`
- Returns the extracted WorkUnit. If the file did not contain a WorkUnit and
- strict=False the function will return None.
-
- Raises
- ------
- Raises a ``ValueError`` for any invalid parameters.
- """
- yaml_dict = safe_load(work_unit)
-
- # Check if this a WorkUnit yaml file by checking it has the required fields.
- required_fields = ["config", "height", "num_images", "sci_imgs", "times", "var_imgs", "width"]
- for name in required_fields:
- if name not in yaml_dict:
- if strict:
- raise ValueError(f"Missing required field {name}")
- else:
- return None
-
- return WorkUnit.from_dict(yaml_dict)
-
def to_fits(self, filename, overwrite=False):
"""Write the WorkUnit to a single FITS file.
@@ -822,54 +678,6 @@ def append_all_wcs(self, hdul):
ebd_hdu.name = f"EBD_{i}"
hdul.append(ebd_hdu)
- def to_yaml(self):
- """Serialize the WorkUnit as a YAML string.
-
- Returns
- -------
- result : `str`
- The serialized YAML string.
- """
- workunit_dict = {
- "num_images": self.im_stack.img_count(),
- "width": self.im_stack.get_width(),
- "height": self.im_stack.get_height(),
- "config": self.config._params,
- "wcs": wcs_to_dict(self.wcs),
- # Per image data
- "times": [],
- "sci_imgs": [],
- "var_imgs": [],
- "msk_imgs": [],
- "psfs": [],
- "constituent_images": self.constituent_images,
- "per_image_wcs": [],
- "per_image_ebd_wcs": [],
- "heliocentric_distance": self.heliocentric_distance,
- "geocentric_distances": self.geocentric_distances,
- "reprojected": self.reprojected,
- "per_image_indices": self._per_image_indices,
- }
-
- # Fill in the per-image data.
- for i in range(self.im_stack.img_count()):
- layered = self.im_stack.get_single_image(i)
- workunit_dict["times"].append(layered.get_obstime())
- p = layered.get_psf()
-
- workunit_dict["sci_imgs"].append(layered.get_science().image.tolist())
- workunit_dict["var_imgs"].append(layered.get_variance().image.tolist())
- workunit_dict["msk_imgs"].append(layered.get_mask().image.tolist())
-
- psf_array = np.array(p.get_kernel()).reshape((p.get_dim(), p.get_dim()))
- workunit_dict["psfs"].append(psf_array.tolist())
-
- for i in range(len(self._per_image_wcs)):
- workunit_dict["per_image_wcs"].append(wcs_to_dict(self._per_image_wcs[i]))
- workunit_dict["per_image_ebd_wcs"].append(wcs_to_dict(self._per_image_ebd_wcs[i]))
-
- return dump(workunit_dict)
-
def image_positions_to_original_icrs(
self, image_indices, positions, input_format="xy", output_format="xy", filter_in_frame=True
):
From bffaa7345cc7062300dcae2d83eacf8c03224095 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Mon, 16 Sep 2024 15:27:59 -0400
Subject: [PATCH 07/12] Address PR comments
---
docs/source/user_manual/search_space.rst | 31 +++----
notebooks/KBMOD_Demo.ipynb | 10 +--
notebooks/create_fake_data.ipynb | 10 +--
src/kbmod/configuration.py | 12 +--
src/kbmod/fake_data/demo_helper.py | 12 +--
src/kbmod/trajectory_generator.py | 109 +++++++++--------------
tests/test_configuration.py | 18 ++--
tests/test_regression_test.py | 12 +--
tests/test_trajectory_generator.py | 28 +++---
9 files changed, 97 insertions(+), 145 deletions(-)
diff --git a/docs/source/user_manual/search_space.rst b/docs/source/user_manual/search_space.rst
index a39b139b1..4c14d96d9 100644
--- a/docs/source/user_manual/search_space.rst
+++ b/docs/source/user_manual/search_space.rst
@@ -78,8 +78,8 @@ The ``VelocityGridSearch`` strategy searches a uniform grid of x and y velocitie
| ``max_vy`` | The maximum velocity in the y-dimension (pixels per day). |
+------------------------+-----------------------------------------------------------+
-EclipticSearch
---------------
+EclipticCenteredSearch
+----------------------
The grid is defined by two sets of parameters: a sampling of absolute velocities in pixels per day and a sampling of the velocities' angles in degrees or radians. Each sampling consists of values defining the range and number of sampling steps.
@@ -95,37 +95,32 @@ Given the linear sampling for both velocities and angles, the full set of candid
where ``angles`` contains the list of angles to test and ``velocities`` contains the list of velocities.
-The list of velocities is created from the given bounds (``min_vel`` and ``max_vel``) and number of steps (``vel_steps``). The range is inclusive of both bounds.
+The list of velocities is created from the given bounds list ``velocities=[min_vel, max_vel, vel_steps]``. The range is inclusive of both bounds.
Each angle in the list is computed as an **offset** from the ecliptic angle. KBMOD uses the following ordering for extracting the ecliptic.
-1. If ``force_ecliptic`` is provided (is not ``None``) in the generator’s configuration that value is used directly.
+1. If ``given_ecliptic`` is provided (is not ``None``) in the generator’s configuration that value is used directly.
2. If the first image has a WCS, the ecliptic is estimated from that WCS.
3. A default ecliptic of 0.0 is used.
-The list of angles spans ``[ecliptic + min_ang_offset, ecliptic + max_ang_offset]`` inclusive of both bounds. Angles can be specified in degrees or radians (as noted by the ``angle_units`` parameter) but must be consistent among all angles.
+The list of angles used is defined from the list ``angles=[min_offset, max_offset, angle_steps]`` and will span ``[ecliptic + min_offset, ecliptic + max_offset]`` inclusive of both bounds. Angles can be specified in degrees or radians (as noted by the ``angle_units`` parameter) but must be consistent among all angles.
+------------------------+-----------------------------------------------------+
| **Parameter** | **Interpretation** |
+------------------------+-----------------------------------------------------+
-| ``ang_steps`` | The number of angle steps. |
+| ``angles`` | A length 3 tuple with the minimum angle offset, |
+| | the maximum offset, and the number of angles to |
+| | to search through (angles specified in either |
+| | radians or degrees) |
+------------------------+-----------------------------------------------------+
| ``angle_units`` | The units to use for angles. |
| | Either 'degrees' or 'radians' |
+------------------------+-----------------------------------------------------+
-| ``min_ang_offset `` | The minimum angular offset from the ecliptic |
-| | (in either radians or degrees). |
-+------------------------+-----------------------------------------------------+
-| ``max_ang_offset `` | The maximum angular offset from the ecliptic |
+| ``given_ecliptic `` | The given value of the ecliptic angle |
| | (in either radians or degrees). |
+------------------------+-----------------------------------------------------+
-| ``force_ecliptic `` | The given value of the ecliptic angle |
-| | (in either radians or degrees). |
-+------------------------+-----------------------------------------------------+
-| ``vel_steps `` | The number of velocity steps. |
-+------------------------+-----------------------------------------------------+
-| ``min_vel `` | The minimum velocity magnitude (in pixels per day). |
-+------------------------+-----------------------------------------------------+
-| ``max_vel `` | The maximum velocity magnitude (in pixels per day). |
+| ``velocities `` | A length 3 tuple with the minimum velocity (in |
+| | pixels per day), the maximum velocity (in pixels |
+| | per day), and number of velocities to test. |
+------------------------+-----------------------------------------------------+
diff --git a/notebooks/KBMOD_Demo.ipynb b/notebooks/KBMOD_Demo.ipynb
index 9507b54b1..5fa253287 100644
--- a/notebooks/KBMOD_Demo.ipynb
+++ b/notebooks/KBMOD_Demo.ipynb
@@ -125,13 +125,9 @@
" # Use search parameters (including a force ecliptic angle of 0.0)\n",
" # to match what we know is in the demo data.\n",
" \"generator_config\": {\n",
- " \"name\": \"EclipticSearch\",\n",
- " \"vel_steps\": 21,\n",
- " \"min_vel\": 0.0,\n",
- " \"max_vel\": 20.0,\n",
- " \"ang_steps\": 11,\n",
- " \"min_ang_offset\": -0.5,\n",
- " \"max_ang_offset\": 0.5,\n",
+ " \"name\": \"EclipticCenteredSearch\",\n",
+ " \"angles\": [-0.5, 0.5, 11],\n",
+ " \"velocities\": [0.0, 20.0, 21],\n",
" \"angle_units\": \"radians\",\n",
" \"force_ecliptic\": 0.0,\n",
" },\n",
diff --git a/notebooks/create_fake_data.ipynb b/notebooks/create_fake_data.ipynb
index 4faa3f0fc..86886acbc 100644
--- a/notebooks/create_fake_data.ipynb
+++ b/notebooks/create_fake_data.ipynb
@@ -150,13 +150,9 @@
"settings = {\n",
" # Override the search data to match the known object.\n",
" \"generator_config\": {\n",
- " \"name\": \"EclipticSearch\",\n",
- " \"vel_steps\": 21,\n",
- " \"min_vel\": 0.0,\n",
- " \"max_vel\": 20.0,\n",
- " \"ang_steps\": 11,\n",
- " \"min_ang_offset\": -0.5,\n",
- " \"max_ang_offset\": 0.5,\n",
+ " \"name\": \"EclipticCenteredSearch\",\n",
+ " \"angles\": [-0.5, 0.5, 11],\n",
+ " \"velocities\": [0.0, 20.0, 21],\n",
" \"angle_units\": \"radians\",\n",
" \"force_ecliptic\": 0.0,\n",
" },\n",
diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py
index 1c66b02cb..94c951659 100644
--- a/src/kbmod/configuration.py
+++ b/src/kbmod/configuration.py
@@ -32,15 +32,11 @@ def __init__(self):
"do_stamp_filter": True,
"encode_num_bytes": -1,
"generator_config": {
- "name": "EclipticSearch",
- "vel_steps": 257,
- "min_vel": 92.0,
- "max_vel": 526.0,
- "ang_steps": 129,
- "min_ang_offset": -math.pi / 15,
- "max_ang_offset": math.pi / 15,
+ "name": "EclipticCenteredSearch",
+ "velocity": [92.0, 526.0, 257],
+ "angles": [-math.pi / 15, math.pi / 15, 129],
"angle_units": "radians",
- "force_ecliptic": None,
+ "given_ecliptic": None,
},
"gpu_filter": False,
"ind_output_files": True,
diff --git a/src/kbmod/fake_data/demo_helper.py b/src/kbmod/fake_data/demo_helper.py
index 5ca323cd0..5af7e4ce7 100644
--- a/src/kbmod/fake_data/demo_helper.py
+++ b/src/kbmod/fake_data/demo_helper.py
@@ -39,15 +39,11 @@ def make_demo_data(filename=None):
settings = {
# Override the search data to match the known object.
"generator_config": {
- "name": "EclipticSearch",
- "vel_steps": 21,
- "min_vel": 0.0,
- "max_vel": 20.0,
- "ang_steps": 11,
- "min_ang_offset": -0.5,
- "max_ang_offset": 0.5,
+ "name": "EclipticCenteredSearch",
+ "velocities": [0, 20.0, 21],
+ "angles": [-0.5, 0.5, 11],
"angle_units": "radians",
- "force_ecliptic": 0.0,
+ "given_ecliptic": 0.0,
},
# Loosen the other filtering parameters.
"clip_negative": True,
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index c162a981a..388198f57 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -334,25 +334,19 @@ def __init__(self, v_arr, ang_arr, average_angle=None, computed_ecliptic_angle=N
super().__init__(v_arr[2], v_arr[0], v_arr[1], ang_arr[2], ang_min, ang_max, **kwargs)
-class EclipticSearch(TrajectoryGenerator):
+class EclipticCenteredSearch(TrajectoryGenerator):
"""Search a grid defined by velocities and angles relative to the ecliptic.
Attributes
----------
ecliptic_angle : `float`
The angle to use for the ecliptic in the image (in the units defined in ``angle_units``).
- vel_steps : `int`
- The number of velocity steps.
- min_vel : `float`
- The minimum velocity magnitude (in pixels per day)
- max_vel : `float`
- The maximum velocity magnitude (in pixels per day)
- ang_steps : `int`
- The number of angle steps.
- min_ang_offset : `float`
- The minimum offset of the search angle relative from the ecliptic_angle (in radians).
- max_ang_offset : `float`
- The maximum offset of the search angle relative from the ecliptic_angle (in radians).
+ velocities : `list`
+ A triplet of the minimum velocity to use (in pixels per day), and the maximum velocity
+ magnitude (in pixels per day), and the number of velocity steps to try.
+ angles : `list`
+ A triplet of the minimum angle offset (in radians), and the maximum angle offset (in radians),
+ and the number of angles to try.
min_ang : `float`
The minimum search angle in the image (ecliptic_angle + min_ang_offset) in radians.
max_ang : `float`
@@ -361,39 +355,27 @@ class EclipticSearch(TrajectoryGenerator):
def __init__(
self,
- vel_steps=0,
- min_vel=0.0,
- max_vel=0.0,
- ang_steps=0,
- min_ang_offset=0.0,
- max_ang_offset=0.0,
+ velocities=[0.0, 0.0, 0],
+ angles=[0.0, 0.0, 0],
angle_units="radians",
- force_ecliptic=None,
+ given_ecliptic=None,
computed_ecliptic_angle=None,
**kwargs,
):
- """Create a class KBMODV1Search.
+ """Create a class EclipticCenteredSearch.
Parameters
----------
- vel_steps : `int`
- The number of velocity steps.
- min_vel : `float`
- The minimum velocity magnitude (in pixels per day)
- max_vel : `float`
- The maximum velocity magnitude (in pixels per day)
- ang_steps : `int`
- The number of angle steps.
- min_ang_offset : `float`
- The minimum offset of the search angle relative from the ecliptic_angle
- (in the units defined in ``angle_units``).
- max_ang_offset : `float`
- The maximum offset of the search angle relative from the ecliptic_angle
- (in the units defined in ``angle_units``).
+ velocities : `list`
+ A triplet of the minimum velocity to use (in pixels per day), and the maximum velocity
+ magnitude (in pixels per day), and the number of velocity steps to try.
+ angles : `list`
+ A triplet of the minimum angle offset (in the units defined in ``angle_units``), the maximum
+ angle offset (in the units defined in ``angle_units``), and the number of angles to try.
angle_units : `str`
The units for the angle. Must be one of radians or degrees.
Default: 'radians'
- force_ecliptic : `float`, optional
+ given_ecliptic : `float`, optional
An override for the ecliptic as given in the config (in the units defined in
``angle_units``). This angle takes precedence over ``computed_ecliptic``.
computed_ecliptic_angle : `float`, optional
@@ -402,8 +384,8 @@ def __init__(
"""
super().__init__(**kwargs)
- if force_ecliptic is not None:
- ecliptic_angle = force_ecliptic
+ if given_ecliptic is not None:
+ ecliptic_angle = given_ecliptic
elif computed_ecliptic_angle is not None:
ecliptic_angle = computed_ecliptic_angle
else:
@@ -411,48 +393,43 @@ def __init__(
logger.warning("No ecliptic angle provided. Using 0.0.")
ecliptic_angle = 0.0
- if vel_steps < 1 or ang_steps < 1:
- raise ValueError("EclipticSearch requires at least 1 step in each dimension")
- if max_vel < min_vel:
- raise ValueError("Invalid EclipticSearch bounds.")
+ if velocities[2] < 1 or angles[2] < 1:
+ raise ValueError("EclipticCenteredSearch requires at least 1 step in each dimension")
+ if velocities[1] < velocities[0]:
+ raise ValueError(f"Invalid EclipticCenteredSearch bounds: {velocities}")
- self.vel_steps = vel_steps
- self.min_vel = min_vel
- self.max_vel = max_vel
- self.vel_stepsize = (self.max_vel - self.min_vel) / float(self.vel_steps - 1)
+ self.velocities = velocities
+ self.vel_stepsize = (velocities[1] - velocities[0]) / float(velocities[2] - 1)
# Convert the angles into radians.
- if angle_units[:3] == "rad":
- self.ecliptic_angle = ecliptic_angle
- self.min_ang_offset = min_ang_offset
- self.max_ang_offset = max_ang_offset
- elif angle_units[:3] == "deg":
+ self.angles = angles
+ self.ecliptic_angle = ecliptic_angle
+ if angle_units[:3] == "deg":
deg_to_rad = math.pi / 180.0
- self.ecliptic_angle = deg_to_rad * ecliptic_angle
- self.min_ang_offset = deg_to_rad * min_ang_offset
- self.max_ang_offset = deg_to_rad * max_ang_offset
- else:
+ self.ecliptic_angle = deg_to_rad * self.ecliptic_angle
+ self.angles[0] = deg_to_rad * self.angles[0]
+ self.angles[1] = deg_to_rad * self.angles[1]
+ elif angle_units[:3] != "rad":
raise ValueError(f"Unknown angular units {angle_units}")
- self.ang_steps = ang_steps
- self.min_ang = self.ecliptic_angle + self.min_ang_offset
- self.max_ang = self.ecliptic_angle + self.max_ang_offset
- self.ang_stepsize = (self.max_ang - self.min_ang) / float(self.ang_steps - 1)
+ self.min_ang = self.ecliptic_angle + self.angles[0]
+ self.max_ang = self.ecliptic_angle + self.angles[1]
+ self.ang_stepsize = (self.max_ang - self.min_ang) / float(self.angles[2] - 1)
def __repr__(self):
return (
"EclipticSearch:"
- f" v=[{self.min_vel}, {self.max_vel}], {self.vel_steps}"
+ f" v=[{self.velocities[0]}, {self.velocities[1]}], {self.velocities[2]}"
f" a=[{self.min_ang}, {self.max_ang}], {self.ang_steps}"
)
def __str__(self):
return f"""EclipticSearch:
- Vel: [{self.min_vel}, {self.max_vel}] in {self.vel_steps} steps.
+ Vel: [{self.velocities[0]}, {self.velocities[1]}] in {self.velocities[2]} steps.
Ang:
Ecliptic = {self.ecliptic_angle}
- Offsets = {self.min_ang_offset} to {self.max_ang_offset}
- [{self.min_ang}, {self.max_ang}] in {self.ang_steps} steps."""
+ Offsets = {self.angles[0]} to {self.angles[1]}
+ [{self.min_ang}, {self.max_ang}] in {self.angles[2]} steps."""
def generate(self, *args, **kwargs):
"""Produces a single candidate trajectory to test.
@@ -462,10 +439,10 @@ def generate(self, *args, **kwargs):
candidate : `Trajectory`
A ``Trajectory`` to test at each pixel.
"""
- for ang_i in range(self.ang_steps):
- for vel_i in range(self.vel_steps):
+ for ang_i in range(self.angles[2]):
+ for vel_i in range(self.velocities[2]):
curr_ang = self.min_ang + ang_i * self.ang_stepsize
- curr_vel = self.min_vel + vel_i * self.vel_stepsize
+ curr_vel = self.velocities[0] + vel_i * self.vel_stepsize
vx = math.cos(curr_ang) * curr_vel
vy = math.sin(curr_ang) * curr_vel
diff --git a/tests/test_configuration.py b/tests/test_configuration.py
index 3c7aeb18c..e93a49f91 100644
--- a/tests/test_configuration.py
+++ b/tests/test_configuration.py
@@ -52,11 +52,10 @@ def test_from_hdu(self):
["Here3"],
["7"],
["null"],
- [
- "[1, 2]",
- ],
+ ["[1, 2]"],
+ ["{name: test_gen, p1: [1.0, 2.0], p2: 2.0}"],
],
- names=("im_filepath", "num_obs", "cluster_type", "ang_arr"),
+ names=("im_filepath", "num_obs", "cluster_type", "ang_arr", "generator_config"),
)
hdu = fits.table_to_hdu(t)
@@ -64,6 +63,9 @@ def test_from_hdu(self):
self.assertEqual(config["im_filepath"], "Here3")
self.assertEqual(config["num_obs"], 7)
self.assertEqual(config["ang_arr"], [1, 2])
+ self.assertEqual(config["generator_config"]["name"], "test_gen")
+ self.assertEqual(config["generator_config"]["p1"], [1.0, 2.0])
+ self.assertEqual(config["generator_config"]["p2"], 2.0)
self.assertIsNone(config["cluster_type"])
def test_to_hdu(self):
@@ -73,7 +75,7 @@ def test_to_hdu(self):
"cluster_type": None,
"do_clustering": False,
"res_filepath": "There",
- "generator_config": {"name": "test_gen", "p1": 1.0, "p2": 2.0},
+ "generator_config": {"name": "test_gen", "p1": [1.0, 2.0], "p2": 2.0},
"basic_array": [1.0, 2.0, 3.0],
}
config = SearchConfiguration.from_dict(d)
@@ -83,7 +85,7 @@ def test_to_hdu(self):
self.assertEqual(hdu.data["num_obs"][0], "5\n...")
self.assertEqual(hdu.data["cluster_type"][0], "null\n...")
self.assertEqual(hdu.data["res_filepath"][0], "There\n...")
- self.assertEqual(hdu.data["generator_config"][0], "{name: test_gen, p1: 1.0, p2: 2.0}")
+ self.assertEqual(hdu.data["generator_config"][0], "{name: test_gen, p1: [1.0, 2.0], p2: 2.0}")
self.assertEqual(hdu.data["basic_array"][0], "[1.0, 2.0, 3.0]")
def test_to_yaml(self):
@@ -93,7 +95,7 @@ def test_to_yaml(self):
"cluster_type": None,
"do_clustering": False,
"res_filepath": "There",
- "generator_config": {"name": "test_gen", "p1": 1.0, "p2": 2.0},
+ "generator_config": {"name": "test_gen", "p1": [1.0, 2.0], "p2": 2.0},
}
config = SearchConfiguration.from_dict(d)
yaml_str = config.to_yaml()
@@ -104,7 +106,7 @@ def test_to_yaml(self):
self.assertEqual(yaml_dict["cluster_type"], None)
self.assertEqual(yaml_dict["res_filepath"], "There")
self.assertEqual(yaml_dict["generator_config"]["name"], "test_gen")
- self.assertEqual(yaml_dict["generator_config"]["p1"], 1.0)
+ self.assertEqual(yaml_dict["generator_config"]["p1"], [1.0, 2.0])
self.assertEqual(yaml_dict["generator_config"]["p2"], 2.0)
def test_save_and_load_yaml(self):
diff --git a/tests/test_regression_test.py b/tests/test_regression_test.py
index 883dd7a03..8fe864eac 100644
--- a/tests/test_regression_test.py
+++ b/tests/test_regression_test.py
@@ -245,16 +245,12 @@ def perform_search(im_stack, res_filename, default_psf):
"psf_val": default_psf,
"output_suffix": "",
"generator_config": {
- "name": "EclipticSearch",
- "vel_steps": 52,
- "min_vel": 92.0,
- "max_vel": 550.0,
- "ang_steps": 26,
+ "name": "EclipticCenteredSearch",
# Offset by PI for prograde orbits in lori allen data
- "min_ang_offset": np.pi - np.pi / 10.0,
- "max_ang_offset": np.pi + np.pi / 10.0,
+ "angles": [np.pi - np.pi / 10.0, np.pi + np.pi / 10.0, 26],
+ "velocities": [92.0, 550.0, 52],
"angle_units": "radians",
- "force_ecliptic": 1.1901106654050821,
+ "given_ecliptic": 1.1901106654050821,
},
"num_obs": 15,
"do_mask": True,
diff --git a/tests/test_trajectory_generator.py b/tests/test_trajectory_generator.py
index e66706732..34b16d6c7 100644
--- a/tests/test_trajectory_generator.py
+++ b/tests/test_trajectory_generator.py
@@ -5,7 +5,7 @@
from kbmod.trajectory_generator import (
KBMODV1Search,
KBMODV1SearchConfig,
- EclipticSearch,
+ EclipticCenteredSearch,
SingleVelocitySearch,
RandomVelocitySearch,
VelocityGridSearch,
@@ -70,8 +70,10 @@ def test_KBMODV1Search(self):
self.assertRaises(ValueError, KBMODV1Search, 3, 0.0, 3.0, 2, 0.25, -0.25)
self.assertRaises(ValueError, KBMODV1Search, 3, 3.5, 3.0, 2, -0.25, 0.25)
- def test_EclipticSearch(self):
- gen = EclipticSearch(3, 0.0, 2.0, 3, -45.0, 45.0, angle_units="degrees", ecliptic_angle=0.0)
+ def test_EclipticCenteredSearch(self):
+ gen = EclipticCenteredSearch(
+ [0.0, 2.0, 3], [-45.0, 45.0, 3], angle_units="degrees", given_ecliptic=0.0
+ )
expected_x = [0.0, 0.707107, 1.41421, 0.0, 1.0, 2.0, 0.0, 0.707107, 1.41421]
expected_y = [0.0, -0.707107, -1.41421, 0.0, 0.0, 0.0, 0.0, 0.707107, 1.41421]
@@ -89,9 +91,9 @@ def test_EclipticSearch(self):
self.assertAlmostEqual(tbl["vy"][i], expected_y[i], delta=0.001)
# Test invalid number of steps and ranges.
- self.assertRaises(ValueError, EclipticSearch, 0.0, 3, 0.0, 3.0, 0, -0.25, 0.25)
- self.assertRaises(ValueError, EclipticSearch, 0.0, 0, 0.0, 3.0, 2, -0.25, 0.25)
- self.assertRaises(ValueError, EclipticSearch, 0.0, 3, 3.5, 3.0, 2, -0.25, 0.25)
+ self.assertRaises(ValueError, EclipticCenteredSearch, [0.0, 3.0, 3], [-0.25, 0.25, 0])
+ self.assertRaises(ValueError, EclipticCenteredSearch, [0.0, 3.0, 0], [-0.25, 0.25, 2])
+ self.assertRaises(ValueError, EclipticCenteredSearch, [3.5, 3.0, 3], [-0.25, 0.25, 2])
def test_KBMODV1SearchConfig(self):
# Note that KBMOD v1's search will never include the upper bound of angle or velocity.
@@ -154,23 +156,19 @@ def test_create_trajectory_generator(self):
# Test we can create a trajectory generator with optional keyword parameters.
config3 = {
- "name": "EclipticSearch",
- "vel_steps": 2,
- "min_vel": 0.0,
- "max_vel": 1.0,
- "ang_steps": 2,
- "min_ang_offset": 0.0,
- "max_ang_offset": 45.0,
+ "name": "EclipticCenteredSearch",
+ "angles": [0.0, 45.0, 2],
+ "velocities": [0.0, 1.0, 2],
"angle_units": "degrees",
"force_ecliptic": None,
}
gen3 = create_trajectory_generator(config3, computed_ecliptic_angle=45.0)
- self.assertTrue(type(gen3) is EclipticSearch)
+ self.assertTrue(type(gen3) is EclipticCenteredSearch)
self.assertAlmostEqual(gen3.ecliptic_angle, np.pi / 4.0)
self.assertEqual(gen3.min_ang, np.pi / 4.0)
self.assertEqual(gen3.max_ang, np.pi / 2.0)
- config3["force_ecliptic"] = 0.0
+ config3["given_ecliptic"] = 0.0
gen4 = create_trajectory_generator(config3, computed_ecliptic_angle=45.0)
self.assertAlmostEqual(gen4.ecliptic_angle, 0.0)
self.assertEqual(gen4.min_ang, 0.0)
From 592e139900a6d90a2d61273a616f7a2ac1df76d3 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Mon, 16 Sep 2024 15:34:57 -0400
Subject: [PATCH 08/12] Documentation updates
---
docs/source/user_manual/search_space.rst | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/docs/source/user_manual/search_space.rst b/docs/source/user_manual/search_space.rst
index 4c14d96d9..132f236ea 100644
--- a/docs/source/user_manual/search_space.rst
+++ b/docs/source/user_manual/search_space.rst
@@ -33,7 +33,7 @@ Choosing Velocities
Perhaps the most complex aspect of the KBMOD algorithm is how it defines the grid of search velocities. KBMOD allows you to define custom search strategies to best match the data. These include:
* ``SingleVelocitySearch`` - A single predefined x and y velocity
* ``VelocityGridSearch`` - An evenly spaced grid of x and y velocities
-* ``EclipticSearch`` - An evenly spaced grid of velocity magnitudes and angles (using a current parameterization).
+* ``EclipticCenteredSearch`` - An evenly spaced grid of velocity magnitudes and angles (using a current parameterization) centered on a given or computed ecliptic angle.
* ``KBMODV1SearchConfig`` - An evenly spaced grid of velocity magnitudes and angles (using the legacy parameterization).
* ``RandomVelocitySearch`` - Randomly sampled x and y velocities
Additional search strategies can be defined by overriding the ``TrajectoryGenerator`` class in trajectory_generator.py.
@@ -88,12 +88,12 @@ Given the linear sampling for both velocities and angles, the full set of candid
for (int a = 0; a < angleSteps; ++a) {
for (int v = 0; v < velocitySteps; ++v) {
- searchList[a * velocitySteps + v].xVel = cos(angles[a]) * velocities[v];
- searchList[a * velocitySteps + v].yVel = sin(angles[a]) * velocities[v];
+ searchList[a * velocitySteps + v].xVel = cos(sampled_angles[a]) * sampled_velocities[v];
+ searchList[a * velocitySteps + v].yVel = sin(sampled_angles[a]) * sampled_velocities[v];
}
}
-where ``angles`` contains the list of angles to test and ``velocities`` contains the list of velocities.
+where ``sampled_angles`` contains the list of angles to test and ``sampled_velocities`` contains the list of velocities.
The list of velocities is created from the given bounds list ``velocities=[min_vel, max_vel, vel_steps]``. The range is inclusive of both bounds.
@@ -101,13 +101,13 @@ Each angle in the list is computed as an **offset** from the ecliptic angle. KBM
1. If ``given_ecliptic`` is provided (is not ``None``) in the generator’s configuration that value is used directly.
2. If the first image has a WCS, the ecliptic is estimated from that WCS.
3. A default ecliptic of 0.0 is used.
-The list of angles used is defined from the list ``angles=[min_offset, max_offset, angle_steps]`` and will span ``[ecliptic + min_offset, ecliptic + max_offset]`` inclusive of both bounds. Angles can be specified in degrees or radians (as noted by the ``angle_units`` parameter) but must be consistent among all angles.
+The angles used are defined from the list ``angles=[min_offset, max_offset, angle_steps]`` and will span ``[ecliptic + min_offset, ecliptic + max_offset]`` inclusive of both bounds. Angles can be specified in degrees or radians (as noted by the ``angle_units`` parameter) but must be consistent among all angles.
+------------------------+-----------------------------------------------------+
| **Parameter** | **Interpretation** |
+------------------------+-----------------------------------------------------+
-| ``angles`` | A length 3 tuple with the minimum angle offset, |
+| ``angles`` | A length 3 list with the minimum angle offset, |
| | the maximum offset, and the number of angles to |
| | to search through (angles specified in either |
| | radians or degrees) |
@@ -118,7 +118,7 @@ The list of angles used is defined from the list ``angles=[min_offset, max_offse
| ``given_ecliptic `` | The given value of the ecliptic angle |
| | (in either radians or degrees). |
+------------------------+-----------------------------------------------------+
-| ``velocities `` | A length 3 tuple with the minimum velocity (in |
+| ``velocities `` | A length 3 list with the minimum velocity (in |
| | pixels per day), the maximum velocity (in pixels |
| | per day), and number of velocities to test. |
+------------------------+-----------------------------------------------------+
From c87c1ef2a46a2868c78968c98d881731fba7189d Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Tue, 17 Sep 2024 09:43:17 -0400
Subject: [PATCH 09/12] TrajectoryGenerators take a WorkUnit for extra
information
---
src/kbmod/run_search.py | 13 +++-----
src/kbmod/trajectory_generator.py | 50 +++++++++++++++++-------------
tests/test_trajectory_generator.py | 35 ++++++++++++++++-----
3 files changed, 61 insertions(+), 37 deletions(-)
diff --git a/src/kbmod/run_search.py b/src/kbmod/run_search.py
index 68c54b917..b7f2cd65e 100644
--- a/src/kbmod/run_search.py
+++ b/src/kbmod/run_search.py
@@ -187,7 +187,7 @@ def do_gpu_search(self, config, stack, trj_generator):
keep = self.load_and_filter_results(search, config)
return keep
- def run_search(self, config, stack, trj_generator=None, computed_ecliptic=None):
+ def run_search(self, config, stack, trj_generator=None):
"""This function serves as the highest-level python interface for starting
a KBMOD search given an ImageStack and SearchConfiguration.
@@ -199,11 +199,7 @@ def run_search(self, config, stack, trj_generator=None, computed_ecliptic=None):
The stack before the masks have been applied. Modified in-place.
trj_generator : `TrajectoryGenerator`, optional
The object to generate the candidate trajectories for each pixel.
- If None uses the default KBMODv1 grid search
- computed_ecliptic : `float`, optional
- The computed ecliptic angle in the data from a WCS (if present).
- Uses ``None`` if the information needed to compute the angle is not
- available.
+ If None uses the default EclipticCenteredSearch
Returns
-------
@@ -222,7 +218,7 @@ def run_search(self, config, stack, trj_generator=None, computed_ecliptic=None):
# Perform the actual search.
if trj_generator is None:
- trj_generator = create_trajectory_generator(config, computed_ecliptic_angle=computed_ecliptic)
+ trj_generator = create_trajectory_generator(config, work_unit=None)
keep = self.do_gpu_search(config, stack, trj_generator)
if config["do_stamp_filter"]:
@@ -291,8 +287,7 @@ def run_search_from_work_unit(self, work):
keep : `Results`
The results.
"""
- # If there is a WCS compute the ecliptic angle from it.
- computed_ecliptic = work.compute_ecliptic_angle()
+ trj_generator = create_trajectory_generator(work.config, work_unit=work)
# Run the search.
return self.run_search(work.config, work.im_stack)
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index 388198f57..c949b2e92 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -10,7 +10,7 @@
from kbmod.search import Trajectory
-def create_trajectory_generator(config, **kwargs):
+def create_trajectory_generator(config, work_unit=None, **kwargs):
"""Create a TrajectoryGenerator object given a dictionary
of configuration options. The generator class is specified by
the 'name' entry, which must exist and match the class name of one
@@ -20,6 +20,9 @@ def create_trajectory_generator(config, **kwargs):
----------
config : `dict` or `SearchConfiguration`
The dictionary of generator parameters.
+ work_unit : `WorkUnit`, optional
+ A WorkUnit to provide additional information about the data that
+ can be used to derive parameters that depend on the input.
Returns
-------
@@ -51,7 +54,7 @@ def create_trajectory_generator(config, **kwargs):
params.update(kwargs)
logger.debug(str(params))
- return TrajectoryGenerator.generators[name](**params)
+ return TrajectoryGenerator.generators[name](**params, work_unit=work_unit)
class TrajectoryGenerator(abc.ABC):
@@ -65,7 +68,7 @@ class TrajectoryGenerator(abc.ABC):
generators = {} # A mapping of class name to class object for subclasses.
- def __init__(self, **kwargs):
+ def __init__(self, work_unit=None, **kwargs):
pass
def __init_subclass__(cls, **kwargs):
@@ -301,7 +304,7 @@ def generate(self, *args, **kwargs):
class KBMODV1SearchConfig(KBMODV1Search):
"""Search a grid defined by velocities and angles in the format of the legacy configuration file."""
- def __init__(self, v_arr, ang_arr, average_angle=None, computed_ecliptic_angle=None, **kwargs):
+ def __init__(self, v_arr, ang_arr, average_angle=None, work_unit=None, **kwargs):
"""Create a class KBMODV1SearchConfig.
Parameters
@@ -314,20 +317,20 @@ def __init__(self, v_arr, ang_arr, average_angle=None, computed_ecliptic_angle=N
(in radians), and the number of angles to try.
average_angle : `float`, optional
The central angle to search around. Should align with the ecliptic in most cases.
- computed_ecliptic_angle : `float`, optional
- An override for the computed ecliptic from a WCS (in the units defined in
- ``angle_units``). This parameter is ignored if ``force_ecliptic`` is given.
+ work_unit : `WorkUnit`, optional
+ A WorkUnit to provide additional information about the data that
+ can be used to derive parameters that depend on the input.
"""
if len(v_arr) != 3:
raise ValueError("KBMODV1SearchConfig requires v_arr to be length 3")
if len(ang_arr) != 3:
raise ValueError("KBMODV1SearchConfig requires ang_arr to be length 3")
if average_angle is None:
- if computed_ecliptic_angle is None:
+ if work_unit is None:
raise ValueError(
- "KBMODV1SearchConfig requires a valid average_angle or computed_ecliptic_angle."
+ "KBMODV1SearchConfig requires a valid average_angle or a WorkUnit with a WCS."
)
- average_angle = computed_ecliptic_angle
+ average_angle = work_unit.compute_ecliptic_angle()
ang_min = average_angle - ang_arr[0]
ang_max = average_angle + ang_arr[1]
@@ -359,7 +362,7 @@ def __init__(
angles=[0.0, 0.0, 0],
angle_units="radians",
given_ecliptic=None,
- computed_ecliptic_angle=None,
+ work_unit=None,
**kwargs,
):
"""Create a class EclipticCenteredSearch.
@@ -378,16 +381,23 @@ def __init__(
given_ecliptic : `float`, optional
An override for the ecliptic as given in the config (in the units defined in
``angle_units``). This angle takes precedence over ``computed_ecliptic``.
- computed_ecliptic_angle : `float`, optional
- An override for the computed ecliptic from a WCS (in the units defined in
- ``angle_units``). This parameter is ignored if ``force_ecliptic`` is given.
+ work_unit : `WorkUnit`, optional
+ A WorkUnit to provide additional information about the data that
+ can be used to derive parameters that depend on the input.
"""
super().__init__(**kwargs)
if given_ecliptic is not None:
- ecliptic_angle = given_ecliptic
- elif computed_ecliptic_angle is not None:
- ecliptic_angle = computed_ecliptic_angle
+ if angle_units[:3] == "deg":
+ ecliptic_angle = given_ecliptic * (math.pi / 180.0)
+ elif angle_units[:3] == "rad":
+ ecliptic_angle = given_ecliptic
+ else:
+ raise ValueError(f"Unknown angular units {angle_units}")
+ elif work_unit is not None:
+ # compute_ecliptic_angle() always produces radians.
+ ecliptic_angle = work_unit.compute_ecliptic_angle()
+ print(f"Using WU = {ecliptic_angle}")
else:
logger = logging.getLogger(__name__)
logger.warning("No ecliptic angle provided. Using 0.0.")
@@ -405,10 +415,8 @@ def __init__(
self.angles = angles
self.ecliptic_angle = ecliptic_angle
if angle_units[:3] == "deg":
- deg_to_rad = math.pi / 180.0
- self.ecliptic_angle = deg_to_rad * self.ecliptic_angle
- self.angles[0] = deg_to_rad * self.angles[0]
- self.angles[1] = deg_to_rad * self.angles[1]
+ self.angles[0] = (math.pi / 180.0) * self.angles[0]
+ self.angles[1] = (math.pi / 180.0) * self.angles[1]
elif angle_units[:3] != "rad":
raise ValueError(f"Unknown angular units {angle_units}")
diff --git a/tests/test_trajectory_generator.py b/tests/test_trajectory_generator.py
index 34b16d6c7..b20e13815 100644
--- a/tests/test_trajectory_generator.py
+++ b/tests/test_trajectory_generator.py
@@ -1,7 +1,10 @@
import numpy as np
import unittest
+from astropy.wcs import WCS
+
from kbmod.configuration import SearchConfiguration
+from kbmod.fake_data.fake_data_creator import FakeDataSet
from kbmod.trajectory_generator import (
KBMODV1Search,
KBMODV1SearchConfig,
@@ -11,6 +14,7 @@
VelocityGridSearch,
create_trajectory_generator,
)
+from kbmod.work_unit import WorkUnit
class test_trajectory_generator(unittest.TestCase):
@@ -97,7 +101,7 @@ def test_EclipticCenteredSearch(self):
def test_KBMODV1SearchConfig(self):
# Note that KBMOD v1's search will never include the upper bound of angle or velocity.
- gen = KBMODV1SearchConfig([0.0, 3.0, 3], [0.25, 0.25, 2], 0.0)
+ gen = KBMODV1SearchConfig([0.0, 3.0, 3], [0.25, 0.25, 2], average_angle=0.0)
expected_x = [0.0, 0.9689, 1.9378, 0.0, 1.0, 2.0]
expected_y = [0.0, -0.247, -0.4948, 0.0, 0.0, 0.0]
@@ -154,22 +158,39 @@ def test_create_trajectory_generator(self):
self.assertEqual(gen2.vx, 1)
self.assertEqual(gen2.vy, 2)
+ # Create a fake work unit with one image and a WCS with a non-zero ecliptic angle.
+ fake_wcs = WCS(naxis=2)
+ fake_wcs.wcs.crpix = [0.0, 0.0]
+ fake_wcs.wcs.cdelt = np.array([-0.1, 0.1])
+ fake_wcs.wcs.crval = [0, -90]
+ fake_wcs.wcs.ctype = ["RA---TAN-SIP", "DEC--TAN-SIP"]
+ fake_wcs.wcs.crota = np.array([0.0, 0.0])
+
+ fake_data = FakeDataSet(10, 10, [0.0])
+ base_config = SearchConfiguration()
+ fake_work = WorkUnit(im_stack=fake_data.stack, config=base_config, wcs=fake_wcs)
+ fake_ecliptic = fake_work.compute_ecliptic_angle()
+ self.assertGreater(fake_ecliptic, 1.0)
+
# Test we can create a trajectory generator with optional keyword parameters.
config3 = {
"name": "EclipticCenteredSearch",
"angles": [0.0, 45.0, 2],
"velocities": [0.0, 1.0, 2],
"angle_units": "degrees",
- "force_ecliptic": None,
+ "given_ecliptic": None,
}
- gen3 = create_trajectory_generator(config3, computed_ecliptic_angle=45.0)
+
+ # Without a given ecliptic, we use the WCS.
+ gen3 = create_trajectory_generator(config3, work_unit=fake_work)
self.assertTrue(type(gen3) is EclipticCenteredSearch)
- self.assertAlmostEqual(gen3.ecliptic_angle, np.pi / 4.0)
- self.assertEqual(gen3.min_ang, np.pi / 4.0)
- self.assertEqual(gen3.max_ang, np.pi / 2.0)
+ self.assertAlmostEqual(gen3.ecliptic_angle, fake_ecliptic)
+ self.assertAlmostEqual(gen3.min_ang, fake_ecliptic)
+ self.assertAlmostEqual(gen3.max_ang, fake_ecliptic + np.pi / 4.0)
+ # The given_ecliptic has priority over the fake WCS.
config3["given_ecliptic"] = 0.0
- gen4 = create_trajectory_generator(config3, computed_ecliptic_angle=45.0)
+ gen4 = create_trajectory_generator(config3, work_unit=fake_work)
self.assertAlmostEqual(gen4.ecliptic_angle, 0.0)
self.assertEqual(gen4.min_ang, 0.0)
self.assertEqual(gen4.max_ang, np.pi / 4.0)
From 0a681869dbdcd0acce64c442aac1ad1584fd8b19 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Tue, 17 Sep 2024 11:57:24 -0400
Subject: [PATCH 10/12] Address PR comment
---
src/kbmod/trajectory_generator.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index c949b2e92..867e7ba69 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -10,6 +10,9 @@
from kbmod.search import Trajectory
+logger = logging.getLogger(__name__)
+
+
def create_trajectory_generator(config, work_unit=None, **kwargs):
"""Create a TrajectoryGenerator object given a dictionary
of configuration options. The generator class is specified by
@@ -46,7 +49,6 @@ def create_trajectory_generator(config, work_unit=None, **kwargs):
name = config["name"]
if name not in TrajectoryGenerator.generators:
raise KeyError("Trajectory generator {name} is undefined.")
- logger = logging.getLogger(__name__)
logger.info(f"Creating trajectory generator of type {name}")
# Add any keyword arguments to the params, overriding current values.
@@ -399,7 +401,6 @@ def __init__(
ecliptic_angle = work_unit.compute_ecliptic_angle()
print(f"Using WU = {ecliptic_angle}")
else:
- logger = logging.getLogger(__name__)
logger.warning("No ecliptic angle provided. Using 0.0.")
ecliptic_angle = 0.0
From f3c49084b9d0bb87f7bf666264aee4aa308efd28 Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Tue, 17 Sep 2024 12:33:50 -0400
Subject: [PATCH 11/12] Use the astropy unit parsing.
---
docs/source/user_manual/search_space.rst | 35 ++++++++++-----------
notebooks/KBMOD_Demo.ipynb | 2 +-
notebooks/create_fake_data.ipynb | 2 +-
src/kbmod/configuration.py | 2 +-
src/kbmod/fake_data/demo_helper.py | 2 +-
src/kbmod/trajectory_generator.py | 40 +++++++++++-------------
tests/test_regression_test.py | 2 +-
tests/test_trajectory_generator.py | 4 +--
8 files changed, 43 insertions(+), 46 deletions(-)
diff --git a/docs/source/user_manual/search_space.rst b/docs/source/user_manual/search_space.rst
index 132f236ea..537c5f2c7 100644
--- a/docs/source/user_manual/search_space.rst
+++ b/docs/source/user_manual/search_space.rst
@@ -104,24 +104,23 @@ Each angle in the list is computed as an **offset** from the ecliptic angle. KBM
The angles used are defined from the list ``angles=[min_offset, max_offset, angle_steps]`` and will span ``[ecliptic + min_offset, ecliptic + max_offset]`` inclusive of both bounds. Angles can be specified in degrees or radians (as noted by the ``angle_units`` parameter) but must be consistent among all angles.
-+------------------------+-----------------------------------------------------+
-| **Parameter** | **Interpretation** |
-+------------------------+-----------------------------------------------------+
-| ``angles`` | A length 3 list with the minimum angle offset, |
-| | the maximum offset, and the number of angles to |
-| | to search through (angles specified in either |
-| | radians or degrees) |
-+------------------------+-----------------------------------------------------+
-| ``angle_units`` | The units to use for angles. |
-| | Either 'degrees' or 'radians' |
-+------------------------+-----------------------------------------------------+
-| ``given_ecliptic `` | The given value of the ecliptic angle |
-| | (in either radians or degrees). |
-+------------------------+-----------------------------------------------------+
-| ``velocities `` | A length 3 list with the minimum velocity (in |
-| | pixels per day), the maximum velocity (in pixels |
-| | per day), and number of velocities to test. |
-+------------------------+-----------------------------------------------------+
++------------------------+------------------------------------------------------+
+| **Parameter** | **Interpretation** |
++------------------------+------------------------------------------------------+
+| ``angles`` | A length 3 list with the minimum angle offset, |
+| | the maximum offset, and the number of angles to |
+| | to search through (angles specified in units given |
+| | by ``angle_units``). |
++------------------------+------------------------------------------------------+
+| ``angle_units`` | The units to use for angles, such as "rad" or "deg". |
++------------------------+------------------------------------------------------+
+| ``given_ecliptic `` | The given value of the ecliptic angle (specified in |
+| | units given by ``angle_units``). |
++------------------------+------------------------------------------------------+
+| ``velocities `` | A length 3 list with the minimum velocity (in |
+| | pixels per day), the maximum velocity (in pixels |
+| | per day), and number of velocities to test. |
++------------------------+------------------------------------------------------+
KBMODV1SearchConfig
diff --git a/notebooks/KBMOD_Demo.ipynb b/notebooks/KBMOD_Demo.ipynb
index 5fa253287..6797f9917 100644
--- a/notebooks/KBMOD_Demo.ipynb
+++ b/notebooks/KBMOD_Demo.ipynb
@@ -128,7 +128,7 @@
" \"name\": \"EclipticCenteredSearch\",\n",
" \"angles\": [-0.5, 0.5, 11],\n",
" \"velocities\": [0.0, 20.0, 21],\n",
- " \"angle_units\": \"radians\",\n",
+ " \"angle_units\": \"radian\",\n",
" \"force_ecliptic\": 0.0,\n",
" },\n",
" # Output parameters\n",
diff --git a/notebooks/create_fake_data.ipynb b/notebooks/create_fake_data.ipynb
index 86886acbc..3835c31ae 100644
--- a/notebooks/create_fake_data.ipynb
+++ b/notebooks/create_fake_data.ipynb
@@ -153,7 +153,7 @@
" \"name\": \"EclipticCenteredSearch\",\n",
" \"angles\": [-0.5, 0.5, 11],\n",
" \"velocities\": [0.0, 20.0, 21],\n",
- " \"angle_units\": \"radians\",\n",
+ " \"angle_units\": \"radian\",\n",
" \"force_ecliptic\": 0.0,\n",
" },\n",
" # Loosen the other filtering parameters.\n",
diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py
index 94c951659..c66b400e8 100644
--- a/src/kbmod/configuration.py
+++ b/src/kbmod/configuration.py
@@ -35,7 +35,7 @@ def __init__(self):
"name": "EclipticCenteredSearch",
"velocity": [92.0, 526.0, 257],
"angles": [-math.pi / 15, math.pi / 15, 129],
- "angle_units": "radians",
+ "angle_units": "radian",
"given_ecliptic": None,
},
"gpu_filter": False,
diff --git a/src/kbmod/fake_data/demo_helper.py b/src/kbmod/fake_data/demo_helper.py
index 5af7e4ce7..57ff09d8c 100644
--- a/src/kbmod/fake_data/demo_helper.py
+++ b/src/kbmod/fake_data/demo_helper.py
@@ -42,7 +42,7 @@ def make_demo_data(filename=None):
"name": "EclipticCenteredSearch",
"velocities": [0, 20.0, 21],
"angles": [-0.5, 0.5, 11],
- "angle_units": "radians",
+ "angle_units": "radian",
"given_ecliptic": 0.0,
},
# Loosen the other filtering parameters.
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index 867e7ba69..6e3986505 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -4,6 +4,8 @@
import math
import random
+import astropy.units as u
+
from astropy.table import Table
from kbmod.configuration import SearchConfiguration
@@ -362,7 +364,7 @@ def __init__(
self,
velocities=[0.0, 0.0, 0],
angles=[0.0, 0.0, 0],
- angle_units="radians",
+ angle_units="radian",
given_ecliptic=None,
work_unit=None,
**kwargs,
@@ -378,8 +380,8 @@ def __init__(
A triplet of the minimum angle offset (in the units defined in ``angle_units``), the maximum
angle offset (in the units defined in ``angle_units``), and the number of angles to try.
angle_units : `str`
- The units for the angle. Must be one of radians or degrees.
- Default: 'radians'
+ The units for the angle.
+ Default: 'radian'
given_ecliptic : `float`, optional
An override for the ecliptic as given in the config (in the units defined in
``angle_units``). This angle takes precedence over ``computed_ecliptic``.
@@ -388,22 +390,21 @@ def __init__(
can be used to derive parameters that depend on the input.
"""
super().__init__(**kwargs)
+ ang_units = u.Unit(angle_units)
if given_ecliptic is not None:
- if angle_units[:3] == "deg":
- ecliptic_angle = given_ecliptic * (math.pi / 180.0)
- elif angle_units[:3] == "rad":
- ecliptic_angle = given_ecliptic
- else:
- raise ValueError(f"Unknown angular units {angle_units}")
+ self.ecliptic_angle = (given_ecliptic * ang_units).to(u.rad).value
elif work_unit is not None:
# compute_ecliptic_angle() always produces radians.
- ecliptic_angle = work_unit.compute_ecliptic_angle()
- print(f"Using WU = {ecliptic_angle}")
+ self.ecliptic_angle = work_unit.compute_ecliptic_angle()
else:
logger.warning("No ecliptic angle provided. Using 0.0.")
- ecliptic_angle = 0.0
+ self.ecliptic_angle = 0.0
+ if len(angles) != 3:
+ raise ValueError("Invalid angles parameter. Expected a length 3 list.")
+ if len(velocities) != 3:
+ raise ValueError("Invalid velocity parameter. Expected a length 3 list.")
if velocities[2] < 1 or angles[2] < 1:
raise ValueError("EclipticCenteredSearch requires at least 1 step in each dimension")
if velocities[1] < velocities[0]:
@@ -412,15 +413,12 @@ def __init__(
self.velocities = velocities
self.vel_stepsize = (velocities[1] - velocities[0]) / float(velocities[2] - 1)
- # Convert the angles into radians.
- self.angles = angles
- self.ecliptic_angle = ecliptic_angle
- if angle_units[:3] == "deg":
- self.angles[0] = (math.pi / 180.0) * self.angles[0]
- self.angles[1] = (math.pi / 180.0) * self.angles[1]
- elif angle_units[:3] != "rad":
- raise ValueError(f"Unknown angular units {angle_units}")
-
+ # Compute the angle bounds and step size in radians.
+ self.angles = [
+ (angles[0] * ang_units).to(u.rad).value,
+ (angles[1] * ang_units).to(u.rad).value,
+ angles[2],
+ ]
self.min_ang = self.ecliptic_angle + self.angles[0]
self.max_ang = self.ecliptic_angle + self.angles[1]
self.ang_stepsize = (self.max_ang - self.min_ang) / float(self.angles[2] - 1)
diff --git a/tests/test_regression_test.py b/tests/test_regression_test.py
index 8fe864eac..7b1159bf1 100644
--- a/tests/test_regression_test.py
+++ b/tests/test_regression_test.py
@@ -249,7 +249,7 @@ def perform_search(im_stack, res_filename, default_psf):
# Offset by PI for prograde orbits in lori allen data
"angles": [np.pi - np.pi / 10.0, np.pi + np.pi / 10.0, 26],
"velocities": [92.0, 550.0, 52],
- "angle_units": "radians",
+ "angle_units": "radian",
"given_ecliptic": 1.1901106654050821,
},
"num_obs": 15,
diff --git a/tests/test_trajectory_generator.py b/tests/test_trajectory_generator.py
index b20e13815..f08fbd9e1 100644
--- a/tests/test_trajectory_generator.py
+++ b/tests/test_trajectory_generator.py
@@ -76,7 +76,7 @@ def test_KBMODV1Search(self):
def test_EclipticCenteredSearch(self):
gen = EclipticCenteredSearch(
- [0.0, 2.0, 3], [-45.0, 45.0, 3], angle_units="degrees", given_ecliptic=0.0
+ [0.0, 2.0, 3], [-45.0, 45.0, 3], angle_units="degree", given_ecliptic=0.0
)
expected_x = [0.0, 0.707107, 1.41421, 0.0, 1.0, 2.0, 0.0, 0.707107, 1.41421]
expected_y = [0.0, -0.707107, -1.41421, 0.0, 0.0, 0.0, 0.0, 0.707107, 1.41421]
@@ -177,7 +177,7 @@ def test_create_trajectory_generator(self):
"name": "EclipticCenteredSearch",
"angles": [0.0, 45.0, 2],
"velocities": [0.0, 1.0, 2],
- "angle_units": "degrees",
+ "angle_units": "degree",
"given_ecliptic": None,
}
From 7abf218628b3e7131d45d0d3260ac2afcbe7db7e Mon Sep 17 00:00:00 2001
From: Jeremy Kubica <104161096+jeremykubica@users.noreply.github.com>
Date: Tue, 17 Sep 2024 12:55:25 -0400
Subject: [PATCH 12/12] Add velocity units
---
docs/source/user_manual/search_space.rst | 6 ++++--
src/kbmod/configuration.py | 1 +
src/kbmod/trajectory_generator.py | 11 ++++++++++-
3 files changed, 15 insertions(+), 3 deletions(-)
diff --git a/docs/source/user_manual/search_space.rst b/docs/source/user_manual/search_space.rst
index 537c5f2c7..218aee193 100644
--- a/docs/source/user_manual/search_space.rst
+++ b/docs/source/user_manual/search_space.rst
@@ -114,13 +114,15 @@ The angles used are defined from the list ``angles=[min_offset, max_offset, angl
+------------------------+------------------------------------------------------+
| ``angle_units`` | The units to use for angles, such as "rad" or "deg". |
+------------------------+------------------------------------------------------+
-| ``given_ecliptic `` | The given value of the ecliptic angle (specified in |
+| ``given_ecliptic`` | The given value of the ecliptic angle (specified in |
| | units given by ``angle_units``). |
+------------------------+------------------------------------------------------+
-| ``velocities `` | A length 3 list with the minimum velocity (in |
+| ``velocities`` | A length 3 list with the minimum velocity (in |
| | pixels per day), the maximum velocity (in pixels |
| | per day), and number of velocities to test. |
+------------------------+------------------------------------------------------+
+| ``velocity_units`` | The units to use for velocities (e.g. "pix / d") |
++------------------------+------------------------------------------------------+
KBMODV1SearchConfig
diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py
index c66b400e8..b678593c5 100644
--- a/src/kbmod/configuration.py
+++ b/src/kbmod/configuration.py
@@ -36,6 +36,7 @@ def __init__(self):
"velocity": [92.0, 526.0, 257],
"angles": [-math.pi / 15, math.pi / 15, 129],
"angle_units": "radian",
+ "velocity_units": "pix / d",
"given_ecliptic": None,
},
"gpu_filter": False,
diff --git a/src/kbmod/trajectory_generator.py b/src/kbmod/trajectory_generator.py
index 6e3986505..9b0ede75e 100644
--- a/src/kbmod/trajectory_generator.py
+++ b/src/kbmod/trajectory_generator.py
@@ -365,6 +365,7 @@ def __init__(
velocities=[0.0, 0.0, 0],
angles=[0.0, 0.0, 0],
angle_units="radian",
+ velocity_units="pix / d",
given_ecliptic=None,
work_unit=None,
**kwargs,
@@ -382,6 +383,9 @@ def __init__(
angle_units : `str`
The units for the angle.
Default: 'radian'
+ velocity_units : `str`
+ The units for the angle.
+ Default: 'pix / d'
given_ecliptic : `float`, optional
An override for the ecliptic as given in the config (in the units defined in
``angle_units``). This angle takes precedence over ``computed_ecliptic``.
@@ -391,6 +395,7 @@ def __init__(
"""
super().__init__(**kwargs)
ang_units = u.Unit(angle_units)
+ vel_units = u.Unit(velocity_units)
if given_ecliptic is not None:
self.ecliptic_angle = (given_ecliptic * ang_units).to(u.rad).value
@@ -410,7 +415,11 @@ def __init__(
if velocities[1] < velocities[0]:
raise ValueError(f"Invalid EclipticCenteredSearch bounds: {velocities}")
- self.velocities = velocities
+ self.velocities = [
+ (velocities[0] * vel_units).to(u.pixel / u.day).value,
+ (velocities[1] * vel_units).to(u.pixel / u.day).value,
+ velocities[2],
+ ]
self.vel_stepsize = (velocities[1] - velocities[0]) / float(velocities[2] - 1)
# Compute the angle bounds and step size in radians.