diff --git a/docs/source/user_manual/output_files.rst b/docs/source/user_manual/output_files.rst index db9a46454..bf73a1dcd 100644 --- a/docs/source/user_manual/output_files.rst +++ b/docs/source/user_manual/output_files.rst @@ -1,7 +1,7 @@ Output Files ============ -KBMOD outputs a range of information about the discovered trajectories. Depending on the search configuration parameters this data can be output as a single combined file and/or individual files. +KBMOD outputs a range of information about the discovered trajectories. Results Table ------------- @@ -17,16 +17,7 @@ By default the "all_stamps" column is dropped to save space. This can disabled ( See the notebooks (especially the KBMOD analysis notebook) for examples of how to work with these results. -Individual Files +Legacy Text File ---------------- -If the ``res_filepath`` configuration option is provided and ``ind_output_files`` configuration option is set to ``True``, the code will produce a few individual output files are useful on their own. Each filename includes a user defined suffix, allowing user to easily save and compare files from different runs. Below we use SUFFIX to indicate the user-defined suffix. - -The main file that most users will want to access is ``results_SUFFIX.txt``. This file contains one line for each trajectory with the trajectory information (x pixel start, y pixel start, x velocity, y velocity), the number of observations seen, the estimated flux, and the estimated likelihood. - -The full list of output files is: - -* ``all_stamps_SUFFIX.npy`` - All of the postage stamp images for each found trajectory. This is a size ``N`` x ``T`` numpy array where ``N`` is the number of results and ``T`` is the number of time steps. -* ``config_SUFFIX.yml`` - A full dump of the configuration parameters in YAML format. -* ``filter_stats_SUFFIX.csv`` - A CSV mapping each filtering label to the number of results removed at that stage. -* ``results_SUFFIX.txt`` - The main results file including the found trajectories, their likelihoods, and fluxes. +If the ``legacy_result_filename`` is provided, KBMOD will output the minimal result information (Trajectory details) in a text file format that can be read by numpy. The main results file including the found trajectories, their likelihoods, and fluxes. diff --git a/docs/source/user_manual/search_params.rst b/docs/source/user_manual/search_params.rst index fd640aeb3..32696b6ca 100644 --- a/docs/source/user_manual/search_params.rst +++ b/docs/source/user_manual/search_params.rst @@ -83,8 +83,9 @@ This document serves to provide a quick overview of the existing parameters and | | | directory with multiple FITS files | | | | (one for each exposure). | +------------------------+-----------------------------+----------------------------------------+ -| ``ind_output_files`` | True | Output results to a series of | -| | | individual files. | +| ``legacy_filename` ` | None | The full path and file name for the | +| | | legacy text file of results. If | +| | | ``None`` does not output this file. | +------------------------+-----------------------------+----------------------------------------+ | ``lh_level`` | 10.0 | The minimum computed likelihood for an | | | | object to be accepted. | @@ -107,9 +108,6 @@ This document serves to provide a quick overview of the existing parameters and | | | observations for the object to be | | | | accepted. | +------------------------+-----------------------------+----------------------------------------+ -| ``output_suffix`` | search | Suffix appended to output filenames. | -| | | See :ref:`Output Files` for more. | -+------------------------+-----------------------------+----------------------------------------+ | ``peak_offset`` | [2.0, 2.0] | How far, in pixels, the brightest pixel| | | | in the stamp can be from the central | | | | pixel in each direction ``[x,y]``. | @@ -121,9 +119,6 @@ This document serves to provide a quick overview of the existing parameters and | ``repeated_flag_keys`` | default_repeated_flag_keys | The flags used when creating the global| | | | mask. See :ref:`Masking`. | +------------------------+-----------------------------+----------------------------------------+ -| ``res_filepath`` | None | The path of the directory in which to | -| | | store the individual results files. | -+------------------------+-----------------------------+----------------------------------------+ | ``result_filename`` | None | Full filename and path for a single | | | | tabular result saves as ecsv. | | | | Can be use used in addition to | @@ -149,7 +144,7 @@ This document serves to provide a quick overview of the existing parameters and | | | if: | | | | * ``sum`` - (default) Per pixel sum | | | | * ``median`` - A per pixel median | -| | | * ``mean`` - A per pixel mean |\ +| | | * ``mean`` - A per pixel mean | +------------------------+-----------------------------+----------------------------------------+ | ``track_filtered`` | False | A Boolean indicating whether to track | | | | the filtered trajectories. Warning | diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py index 7ce545589..c79533a69 100644 --- a/src/kbmod/configuration.py +++ b/src/kbmod/configuration.py @@ -35,16 +35,14 @@ def __init__(self): "encode_num_bytes": -1, "generator_config": None, "gpu_filter": False, - "ind_output_files": True, "im_filepath": None, + "legacy_filename": None, "lh_level": 10.0, "max_lh": 1000.0, "mom_lims": [35.5, 35.5, 2.0, 0.3, 0.3], "num_obs": 10, - "output_suffix": "search", "peak_offset": [2.0, 2.0], "psf_val": 1.4, - "res_filepath": None, "result_filename": None, "results_per_pixel": 8, "save_all_stamps": False, diff --git a/src/kbmod/run_search.py b/src/kbmod/run_search.py index b5bbd6111..7e4e14830 100644 --- a/src/kbmod/run_search.py +++ b/src/kbmod/run_search.py @@ -249,24 +249,15 @@ def run_search(self, config, stack, trj_generator=None): if config["save_all_stamps"]: append_all_stamps(keep, stack, config["stamp_radius"]) - # Save the results and the configuration information used. logger.info(f"Found {len(keep)} potential trajectories.") - if config["res_filepath"] is not None and config["ind_output_files"]: - trj_filename = os.path.join(config["res_filepath"], f"results_{config['output_suffix']}.txt") - keep.write_trajectory_file(trj_filename) - config_filename = os.path.join(config["res_filepath"], f"config_{config['output_suffix']}.yml") - config.to_file(config_filename, overwrite=True) - - stats_filename = os.path.join( - config["res_filepath"], f"filter_stats_{config['output_suffix']}.csv" - ) - keep.write_filtered_stats(stats_filename) - - if "all_stamps" in keep.colnames: - keep.write_column("all_stamps", f"all_stamps_{config['output_suffix']}.npy") + # Save the results in as an ecsv file and/or a legacy text file. + if config["legacy_filename"] is not None: + logger.info(f"Saving legacy results to {config['legacy_filename']}") + keep.write_trajectory_file(config["legacy_filename"]) if config["result_filename"] is not None: + logger.info(f"Saving results table to {config['result_filename']}") if not config["save_all_stamps"]: keep.write_table(config["result_filename"], cols_to_drop=["all_stamps"]) else: diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 9a29969a5..155ac54dc 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -72,7 +72,7 @@ def test_to_hdu(self): "num_obs": 5, "cluster_type": None, "do_clustering": False, - "res_filepath": "There", + "legacy_filename": "There", "ang_arr": [1.0, 2.0, 3.0], } config = SearchConfiguration.from_dict(d) @@ -81,7 +81,7 @@ def test_to_hdu(self): self.assertEqual(hdu.data["im_filepath"][0], "Here2\n...") self.assertEqual(hdu.data["num_obs"][0], "5\n...") self.assertEqual(hdu.data["cluster_type"][0], "null\n...") - self.assertEqual(hdu.data["res_filepath"][0], "There\n...") + self.assertEqual(hdu.data["legacy_filename"][0], "There\n...") self.assertEqual(hdu.data["ang_arr"][0], "[1.0, 2.0, 3.0]") def test_to_yaml(self): @@ -90,7 +90,7 @@ def test_to_yaml(self): "num_obs": 5, "cluster_type": None, "do_clustering": False, - "res_filepath": "There", + "legacy_filename": "There", "ang_arr": [1.0, 2.0, 3.0], } config = SearchConfiguration.from_dict(d) @@ -100,7 +100,7 @@ def test_to_yaml(self): self.assertEqual(yaml_dict["im_filepath"], "Here2") self.assertEqual(yaml_dict["num_obs"], 5) self.assertEqual(yaml_dict["cluster_type"], None) - self.assertEqual(yaml_dict["res_filepath"], "There") + self.assertEqual(yaml_dict["legacy_filename"], "There") self.assertEqual(yaml_dict["ang_arr"][0], 1.0) self.assertEqual(yaml_dict["ang_arr"][1], 2.0) self.assertEqual(yaml_dict["ang_arr"][2], 3.0) @@ -111,7 +111,7 @@ def test_save_and_load_yaml(self): # Overwrite some defaults. config.set("im_filepath", "Here") - config.set("output_suffix", "txt") + config.set("lh_level", 25.0) with tempfile.TemporaryDirectory() as dir_name: file_path = os.path.join(dir_name, "tmp_config_data.yaml") @@ -132,8 +132,7 @@ def test_save_and_load_yaml(self): self.assertEqual(len(config2._params), num_defaults) self.assertEqual(config2["im_filepath"], "Here") - self.assertEqual(config2["res_filepath"], None) - self.assertEqual(config2["output_suffix"], "txt") + self.assertEqual(config2["lh_level"], 25.0) def test_save_and_load_fits(self): config = SearchConfiguration() @@ -141,7 +140,7 @@ def test_save_and_load_fits(self): # Overwrite some defaults. config.set("im_filepath", "Here2") - config.set("output_suffix", "csv") + config.set("lh_level", 25.0) with tempfile.TemporaryDirectory() as dir_name: file_path = os.path.join(dir_name, "test.fits") @@ -163,10 +162,10 @@ def test_save_and_load_fits(self): self.assertEqual(len(config2._params), num_defaults) self.assertEqual(config2["im_filepath"], "Here2") - self.assertEqual(config2["output_suffix"], "csv") + self.assertEqual(config2["lh_level"], 25.0) - # Check that we correctly parse dictionaries and Nones. - self.assertIsNone(config2["res_filepath"]) + # Check that we correctly parse Nones. + self.assertIsNone(config2["legacy_filename"]) if __name__ == "__main__": diff --git a/tests/test_regression_test.py b/tests/test_regression_test.py index 521a54e8c..b13e59bd8 100644 --- a/tests/test_regression_test.py +++ b/tests/test_regression_test.py @@ -256,10 +256,8 @@ def perform_search(im_stack, res_filename, default_psf): input_parameters = { "im_filepath": "./", - "res_filepath": None, "result_filename": res_filename, "psf_val": default_psf, - "output_suffix": "", "v_arr": v_arr, "average_angle": average_angle, "ang_arr": ang_arr,