From 6ebaa9f783ece8e9b6712ed798b050caa018ed71 Mon Sep 17 00:00:00 2001 From: DinoBektesevic Date: Thu, 29 Feb 2024 10:13:59 -0800 Subject: [PATCH] Revert "Merge pull request #456 from dirac-institute/proposals/logging" This reverts commit 3be19c72155e0a9b96867fad5d1bf96432abfaf7, reversing changes made to a87c1113f7eca45c9c38eb6cd4d5a1a340a3a256. --- src/kbmod/__init__.py | 57 +---- src/kbmod/analysis/create_stamps.py | 7 +- src/kbmod/analysis_utils.py | 21 +- src/kbmod/configuration.py | 9 +- src/kbmod/data_interface.py | 40 +-- src/kbmod/fake_data/fake_data_creator.py | 3 +- src/kbmod/filters/stamp_filters.py | 27 +- src/kbmod/run_search.py | 36 +-- .../kbmod/search/.clang-format | 0 src/kbmod/search/bindings.cpp | 2 - src/kbmod/search/debug_timer.cpp | 38 ++- src/kbmod/search/debug_timer.h | 10 +- src/kbmod/search/logging.h | 236 ------------------ src/kbmod/search/pydocs/stack_search_docs.h | 1 + src/kbmod/search/stack_search.cpp | 36 +-- src/kbmod/search/stack_search.h | 2 - src/kbmod/trajectory_explorer.py | 8 +- src/kbmod/work_unit.py | 8 +- tests/test_debug_timer.py | 2 +- 19 files changed, 113 insertions(+), 430 deletions(-) rename .clang-format => src/kbmod/search/.clang-format (100%) delete mode 100644 src/kbmod/search/logging.h diff --git a/src/kbmod/__init__.py b/src/kbmod/__init__.py index 67ff02942..544674a00 100644 --- a/src/kbmod/__init__.py +++ b/src/kbmod/__init__.py @@ -1,18 +1,11 @@ import warnings try: - from ._version import version as __version__ # noqa: F401 + from ._version import version as __version__ except ImportError: warnings.warn("Unable to determine the package version. " "This is likely a broken installation.") -import os -import time -import logging as _logging -from logging import config as _config - -# Import the rest of the package -from kbmod.search import Logging -from . import ( # noqa: F401 +from . import ( analysis, analysis_utils, data_interface, @@ -26,49 +19,3 @@ from .search import PSF, RawImage, LayeredImage, ImageStack, StackSearch from .standardizers import Standardizer, StandardizerConfig from .image_collection import ImageCollection - - -# there are ways for this to go to a file, but is it worth it? -# Then we have to roll a whole logging.config_from_shared_config thing -_SHARED_LOGGING_CONFIG = { - "level": os.environ.get("KB_LOG_LEVEL", "WARNING"), - "format": "[%(asctime)s %(levelname)s %(name)s] %(message)s", - "datefmt": "%Y-%m-%dT%H:%M:%SZ", - "converter": "gmtime", -} - -# Declare our own root logger, so that we don't start printing DEBUG -# messages from every package we import -__PY_LOGGING_CONFIG = { - "version": 1.0, - "formatters": { - "standard": { - "format": _SHARED_LOGGING_CONFIG["format"], - }, - }, - "handlers": { - "default": { - "level": _SHARED_LOGGING_CONFIG["level"], - "formatter": "standard", - "class": "logging.StreamHandler", - "stream": "ext://sys.stderr", - } - }, - "loggers": { - "kbmod": { - "handlers": ["default"], - "level": _SHARED_LOGGING_CONFIG["level"], - } - }, -} - -# The timezone converter can not be configured via the config submodule for -# some reason, only directly. Must be configured after loading the dictConfig -_config.dictConfig(__PY_LOGGING_CONFIG) -if _SHARED_LOGGING_CONFIG["converter"] == "gmtime": - _logging.Formatter.converter = time.gmtime -else: - _logging.Formatter.converter = time.localtime - -# Configure the CPP logging wrapper with the same setup -Logging().setConfig(_SHARED_LOGGING_CONFIG) diff --git a/src/kbmod/analysis/create_stamps.py b/src/kbmod/analysis/create_stamps.py index 17863c54a..851bac468 100644 --- a/src/kbmod/analysis/create_stamps.py +++ b/src/kbmod/analysis/create_stamps.py @@ -6,13 +6,9 @@ import numpy as np from astropy.io import fits -from kbmod.search import Logging from kbmod.file_utils import * -logger = Logging.getLogger(__name__) - - class CreateStamps(object): def __init__(self): return @@ -121,7 +117,8 @@ def max_value_stamp_filter(self, stamps, center_thresh, verbose=True): An np array of stamp indices to keep. """ keep_stamps = np.where(np.max(stamps, axis=1) > center_thresh)[0] - logger.info(f"Center filtering keeps {len(keep_stamps)} out of {len(stamps)} stamps.") + if verbose: + print("Center filtering keeps %i out of %i stamps." % (len(keep_stamps), len(stamps))) return keep_stamps def load_results(self, res_filename): diff --git a/src/kbmod/analysis_utils.py b/src/kbmod/analysis_utils.py index 28df6cb69..a779d64aa 100644 --- a/src/kbmod/analysis_utils.py +++ b/src/kbmod/analysis_utils.py @@ -14,9 +14,6 @@ from .result_list import ResultList, ResultRow -logger = kb.Logging.getLogger(__name__) - - class PostProcess: """This class manages the post-processing utilities used to filter out and otherwise remove false positives from the KBMOD search. This includes, @@ -84,13 +81,17 @@ def load_and_filter_results( else: stats_filter = CombinedStatsFilter(min_obs=self.num_obs) - logger.info("Retrieving Results") + print("---------------------------------------") + print("Retrieving Results") + print("---------------------------------------") while likelihood_limit is False: - logger.info("Getting results...") + print("Getting results...") results = search.get_results(res_num, chunk_size) - logger.info("Chunk Start = %i" % res_num) - logger.info("Chunk Max Likelihood = %.2f" % results[0].lh) - logger.info("Chunk Min. Likelihood = %.2f" % results[-1].lh) + print("---------------------------------------") + print("Chunk Start = %i" % res_num) + print("Chunk Max Likelihood = %.2f" % results[0].lh) + print("Chunk Min. Likelihood = %.2f" % results[-1].lh) + print("---------------------------------------") result_batch = ResultList(self._mjds) for i, trj in enumerate(results): @@ -109,7 +110,7 @@ def load_and_filter_results( total_count += 1 batch_size = result_batch.num_results() - logger.info("Extracted batch of %i results for total of %i" % (batch_size, total_count)) + print("Extracted batch of %i results for total of %i" % (batch_size, total_count)) if batch_size > 0: apply_clipped_sigma_g(clipper, result_batch, self.num_cores) result_batch.apply_filter(stats_filter) @@ -134,7 +135,7 @@ def apply_clustering(self, result_list, cluster_params): # Skip clustering if there is nothing to cluster. if result_list.num_results() == 0: return - logger.info("Clustering %i results" % result_list.num_results()) + print("Clustering %i results" % result_list.num_results(), flush=True) # Do the clustering and the filtering. f = DBSCANFilter( diff --git a/src/kbmod/configuration.py b/src/kbmod/configuration.py index 096b4a96b..2f02ced31 100644 --- a/src/kbmod/configuration.py +++ b/src/kbmod/configuration.py @@ -6,10 +6,6 @@ from pathlib import Path import yaml from yaml import dump, safe_load -from kbmod.search import Logging - - -logger = Logging.getLogger(__name__) class SearchConfiguration: @@ -124,7 +120,8 @@ def set(self, param, value, strict=True): if param not in self._params: if strict: raise KeyError(f"Invalid parameter: {param}") - logger.warning(f"Ignoring invalid parameter: {param}") + else: + print(f"Ignoring invalid parameter: {param}") else: self._params[param] = value @@ -284,7 +281,7 @@ def to_file(self, filename, overwrite=False): Indicates whether to overwrite an existing file. """ if Path(filename).is_file() and not overwrite: - logger.warning(f"Configuration file {filename} already exists.") + print(f"Warning: Configuration file {filename} already exists.") return with open(filename, "w") as file: diff --git a/src/kbmod/data_interface.py b/src/kbmod/data_interface.py index 06e68b57c..f51f7e06a 100644 --- a/src/kbmod/data_interface.py +++ b/src/kbmod/data_interface.py @@ -7,14 +7,16 @@ from kbmod.configuration import SearchConfiguration from kbmod.file_utils import * -from kbmod.search import ImageStack, LayeredImage, PSF, RawImage, Logging +from kbmod.search import ( + ImageStack, + LayeredImage, + PSF, + RawImage, +) from kbmod.wcs_utils import append_wcs_to_hdu_header from kbmod.work_unit import WorkUnit, raw_image_to_hdu -logger = Logging.getLogger(__name__) - - def load_deccam_layered_image(filename, psf): """Load a layered image from the legacy deccam format. @@ -108,9 +110,6 @@ def save_deccam_layered_image(img, filename, wcs=None, overwrite=True): hdul.writeto(filename, overwrite=overwrite) -logger = kb.Logging.getLogger(__name__) - - def load_input_from_individual_files( im_filepath, time_file, @@ -147,17 +146,21 @@ def load_input_from_individual_files( visit_times : `list` A list of MJD times. """ - logger.info("Loading Images") + print("---------------------------------------") + print("Loading Images") + print("---------------------------------------") # Load a mapping from visit numbers to the visit times. This dictionary stays # empty if no time file is specified. image_time_dict = FileUtils.load_time_dictionary(time_file) - logger.info(f"Loaded {len(image_time_dict)} time stamps.") + if verbose: + print(f"Loaded {len(image_time_dict)} time stamps.") # Load a mapping from visit numbers to PSFs. This dictionary stays # empty if no time file is specified. image_psf_dict = FileUtils.load_psf_dictionary(psf_file) - logger.info(f"Loaded {len(image_psf_dict)} image PSFs stamps.") + if verbose: + print(f"Loaded {len(image_psf_dict)} image PSFs stamps.") # Retrieve the list of visits (file names) in the data directory. patch_visits = sorted(os.listdir(im_filepath)) @@ -169,7 +172,8 @@ def load_input_from_individual_files( for visit_file in np.sort(patch_visits): # Skip non-fits files. if not ".fits" in visit_file: - logger.info(f"Skipping non-FITS file {visit_file}") + if verbose: + print(f"Skipping non-FITS file {visit_file}") continue # Compute the full file path for loading. @@ -190,7 +194,8 @@ def load_input_from_individual_files( # Skip files without a valid visit ID. if visit_id is None: - logger.warning(f"WARNING: Unable to extract visit ID for {visit_file}.") + if verbose: + print(f"WARNING: Unable to extract visit ID for {visit_file}.") continue # Check if the image has a specific PSF. @@ -199,7 +204,8 @@ def load_input_from_individual_files( psf = PSF(image_psf_dict[visit_id]) # Load the image file and set its time. - logger.info(f"Loading file: {full_file_path}") + if verbose: + print(f"Loading file: {full_file_path}") img = load_deccam_layered_image(full_file_path, psf) time_stamp = img.get_obstime() @@ -209,12 +215,14 @@ def load_input_from_individual_files( img.set_obstime(time_stamp) if time_stamp <= 0.0: - logger.warning(f"WARNING: No valid timestamp provided for {visit_file}.") + if verbose: + print(f"WARNING: No valid timestamp provided for {visit_file}.") continue # Check if we should filter the record based on the time bounds. if mjd_lims is not None and (time_stamp < mjd_lims[0] or time_stamp > mjd_lims[1]): - logger.info(f"Pruning file {visit_file} by timestamp={time_stamp}.") + if verbose: + print(f"Pruning file {visit_file} by timestamp={time_stamp}.") continue # Save image, time, and WCS information. @@ -222,7 +230,7 @@ def load_input_from_individual_files( images.append(img) wcs_list.append(curr_wcs) - logger.info(f"Loaded {len(images)} images") + print(f"Loaded {len(images)} images") stack = ImageStack(images) return (stack, wcs_list, visit_times) diff --git a/src/kbmod/fake_data/fake_data_creator.py b/src/kbmod/fake_data/fake_data_creator.py index bc48b5dea..022d0ec63 100644 --- a/src/kbmod/fake_data/fake_data_creator.py +++ b/src/kbmod/fake_data/fake_data_creator.py @@ -17,7 +17,6 @@ from kbmod.data_interface import save_deccam_layered_image from kbmod.file_utils import * from kbmod.search import * -from kbmod.search import Logging from kbmod.wcs_utils import append_wcs_to_hdu_header from kbmod.work_unit import WorkUnit @@ -280,7 +279,7 @@ def save_fake_data_to_dir(self, data_dir): # Make the subdirectory if needed. dir_path = Path(data_dir) if not dir_path.is_dir(): - logger.info(f"Directory {data_dir} does not exist. Creating.") + print("Directory '%s' does not exist. Creating." % data_dir) os.mkdir(data_dir) # Save each of the image files. diff --git a/src/kbmod/filters/stamp_filters.py b/src/kbmod/filters/stamp_filters.py index 075139017..4354c26c5 100644 --- a/src/kbmod/filters/stamp_filters.py +++ b/src/kbmod/filters/stamp_filters.py @@ -18,13 +18,9 @@ StampCreator, StampParameters, StampType, - Logging, ) -logger = Logging.getLogger(__name__) - - class BaseStampFilter(abc.ABC): """The base class for the various stamp filters. @@ -332,12 +328,16 @@ def get_coadds_and_filter(result_list, im_stack, stamp_params, chunk_size=100000 if type(stamp_params) is SearchConfiguration: stamp_params = extract_search_parameters_from_config(stamp_params) - if result_list.num_results() <= 0: - logger.debug("Stamp Filtering : skipping, othing to filter.") - else: - logger.debug(f"Stamp filtering {result_list.num_results()} results.") - logger.debug(f"Using filtering params: {stamp_params}") - logger.debug(f"Using chunksize = {chunk_size}") + if debug: + print("---------------------------------------") + print("Applying Stamp Filtering") + print("---------------------------------------") + if result_list.num_results() <= 0: + print("Skipping. Nothing to filter.") + else: + print(f"Stamp filtering {result_list.num_results()} results.") + print(stamp_params) + print(f"Using chunksize = {chunk_size}") # Run the stamp creation and filtering in batches of chunk_size. start_time = time.time() @@ -383,9 +383,10 @@ def get_coadds_and_filter(result_list, im_stack, stamp_params, chunk_size=100000 # Do the actual filtering of results result_list.filter_results(all_valid_inds) - - logger.debug(f"Keeping {result_list.num_results()} results") - logger.debug("{:.2f}s elapsed".format(time.time() - start_time)) + if debug: + print("Keeping %i results" % result_list.num_results(), flush=True) + time_elapsed = time.time() - start_time + print("{:.2f}s elapsed".format(time_elapsed)) def append_all_stamps(result_list, im_stack, stamp_radius): diff --git a/src/kbmod/run_search.py b/src/kbmod/run_search.py index 8584d767d..7933751ef 100644 --- a/src/kbmod/run_search.py +++ b/src/kbmod/run_search.py @@ -1,5 +1,6 @@ import os import time +import warnings import koffi import numpy as np @@ -18,9 +19,6 @@ from .work_unit import WorkUnit -logger = kb.Logging.getLogger(__name__) - - class SearchRunner: """A class to run the KBMOD grid search.""" @@ -77,12 +75,16 @@ def do_gpu_search(self, config, search, trj_generator): elif config["y_pixel_buffer"] and config["y_pixel_buffer"] > 0: search.set_start_bounds_y(-config["y_pixel_buffer"], height + config["y_pixel_buffer"]) - search_timer = kb.DebugTimer("grid search", logger) - logger.debug(f"{trj_generator}") + search_timer = kb.DebugTimer("Grid Search", debug) + if debug: + print(f"Average Angle = {config['average_angle']}") + print(f"Search Angle Limits = {ang_lim}") + print(f"Velocity Limits = {config['v_arr']}") # If we are using gpu_filtering, enable it and set the parameters. if config["gpu_filter"]: - logger.debug("Using in-line GPU sigmaG filtering methods", flush=True) + if debug: + print("Using in-line GPU sigmaG filtering methods", flush=True) coeff = SigmaGClipping.find_sigma_g_coeff( config["sigmaG_lims"][0], config["sigmaG_lims"][1], @@ -127,7 +129,7 @@ def run_search(self, config, stack, trj_generator=None): keep : ResultList The results. """ - full_timer = kb.DebugTimer("KBMOD", logger) + full_timer = kb.DebugTimer("KBMOD", config["debug"]) # Collect the MJDs. mjds = [] @@ -163,7 +165,7 @@ def run_search(self, config, stack, trj_generator=None): max_lh=config["max_lh"], ) if config["do_stamp_filter"]: - stamp_timer = kb.DebugTimer("stamp filtering", logger) + stamp_timer = kb.DebugTimer("stamp filtering", config["debug"]) get_coadds_and_filter( keep, search.get_imagestack(), @@ -173,7 +175,7 @@ def run_search(self, config, stack, trj_generator=None): stamp_timer.stop() if config["do_clustering"]: - cluster_timer = kb.DebugTimer("clustering", logger) + cluster_timer = kb.DebugTimer("clustering", config["debug"]) cluster_params = {} cluster_params["x_size"] = stack.get_width() cluster_params["y_size"] = stack.get_height() @@ -185,7 +187,7 @@ def run_search(self, config, stack, trj_generator=None): # Extract all the stamps for all time steps and append them onto the result rows. if config["save_all_stamps"]: - stamp_timer = kb.DebugTimer("computing all stamps", logger) + stamp_timer = kb.DebugTimer("computing all stamps", config["debug"]) append_all_stamps(keep, search.get_imagestack(), config["stamp_radius"]) stamp_timer.stop() @@ -196,7 +198,7 @@ def run_search(self, config, stack, trj_generator=None): # _count_known_matches(keep, search) # Save the results and the configuration information used. - logger.info(f"Found {keep.num_results()} potential trajectories.") + print(f"Found {keep.num_results()} potential trajectories.") if config["res_filepath"] is not None and config["ind_output_files"]: keep.save_to_files(config["res_filepath"], config["output_suffix"]) @@ -228,7 +230,7 @@ def run_search_from_work_unit(self, work): if work.get_wcs(0) is not None: work.config.set("average_angle", calc_ecliptic_angle(work.get_wcs(0), center_pixel)) else: - logger.warning("Average angle not set and no WCS provided. Setting average_angle=0.0") + print("WARNING: average_angle is unset and no WCS provided. Using 0.0.") work.config.set("average_angle", 0.0) # Run the search. @@ -302,11 +304,12 @@ def _count_known_matches(self, result_list, search): ps.build_from_images_and_xy_positions(PixelPositions, metadata) ps_list.append(ps) + print("-----------------") matches = {} known_obj_thresh = config["known_obj_thresh"] min_obs = config["known_obj_obs"] if config["known_obj_jpl"]: - logger.info("Querying known objects from JPL.") + print("Quering known objects from JPL") matches = koffi.jpl_query_known_objects_stack( potential_sources=ps_list, images=metadata, @@ -314,7 +317,7 @@ def _count_known_matches(self, result_list, search): tolerance=known_obj_thresh, ) else: - logger.info("Querying known objects from SkyBoT.") + print("Quering known objects from SkyBoT") matches = koffi.skybot_query_known_objects_stack( potential_sources=ps_list, images=metadata, @@ -328,7 +331,8 @@ def _count_known_matches(self, result_list, search): if len(matches[ps_id]) > 0: num_found += 1 matches_string += f"result id {ps_id}:" + str(matches[ps_id])[1:-1] + "\n" - logger.info(f"Found {num_found} objects with at least {config['num_obs']} potential observations.") + print("Found %i objects with at least %i potential observations." % (num_found, config["num_obs"])) if num_found > 0: - logger.info(f"{matches_string}") + print(matches_string) + print("-----------------") diff --git a/.clang-format b/src/kbmod/search/.clang-format similarity index 100% rename from .clang-format rename to src/kbmod/search/.clang-format diff --git a/src/kbmod/search/bindings.cpp b/src/kbmod/search/bindings.cpp index 3362a32e2..5e9962d77 100644 --- a/src/kbmod/search/bindings.cpp +++ b/src/kbmod/search/bindings.cpp @@ -6,7 +6,6 @@ namespace py = pybind11; -#include "logging.h" #include "common.h" #include "geom.h" @@ -28,7 +27,6 @@ PYBIND11_MODULE(search, m) { .value("STAMP_MEAN", search::StampType::STAMP_MEAN) .value("STAMP_MEDIAN", search::StampType::STAMP_MEDIAN) .export_values(); - logging::logging_bindings(m); indexing::index_bindings(m); indexing::point_bindings(m); indexing::rectangle_bindings(m); diff --git a/src/kbmod/search/debug_timer.cpp b/src/kbmod/search/debug_timer.cpp index 2bbada4a4..e3606df2d 100644 --- a/src/kbmod/search/debug_timer.cpp +++ b/src/kbmod/search/debug_timer.cpp @@ -10,33 +10,28 @@ namespace search { -DebugTimer::DebugTimer(std::string message, std::string name) - : message_(message), logger_(logging::getLogger(name)) { - start(); -} - -DebugTimer::DebugTimer(std::string message, logging::Logger* logger) : message_(message), logger_(logger) { - start(); -} - -DebugTimer::DebugTimer(std::string message) : message_(message) { - std::replace(message.begin(), message.end(), ' ', '.'); - std::string derived_name = "DebugTimer." + message; - logger_ = logging::getLogger(derived_name); +DebugTimer::DebugTimer(std::string name, bool verbose) { + name_ = name; + verbose_ = verbose; start(); } void DebugTimer::start() { running_ = true; t_start_ = std::chrono::system_clock::now(); - logger_->debug("Starting " + message_ + " timer."); + if (verbose_) { + std::cout << "Starting " << name_ << "...\n" << std::flush; + } } void DebugTimer::stop() { t_end_ = std::chrono::system_clock::now(); running_ = false; - auto t_delta = std::chrono::duration_cast(t_end_ - t_start_); - logger_->debug("Finished " + message_ + " in " + std::to_string(t_delta.count() / 1000.0) + "seconds."); + + if (verbose_) { + auto t_delta = std::chrono::duration_cast(t_end_ - t_start_); + std::cout << name_ << " finished in " << t_delta.count() / 1000.0 << " seconds.\n" << std::flush; + } } double DebugTimer::read() { @@ -49,7 +44,9 @@ double DebugTimer::read() { } double result = t_delta.count() / 1000.0; - logger_->debug("Step " + message_ + " is at " + std::to_string(result) + "seconds."); + if (verbose_) { + std::cout << name_ << " at " << result << " seconds.\n" << std::flush; + } return result; } @@ -57,12 +54,7 @@ double DebugTimer::read() { static void debug_timer_binding(py::module& m) { using dbt = search::DebugTimer; py::class_(m, "DebugTimer", pydocs::DOC_DEBUG_TIMER) - .def(py::init()) - .def(py::init()) - .def(py::init([](std::string message, py::object logger) { - std::string name = std::string(py::str(logger.attr("name"))); - return std::unique_ptr(new DebugTimer(message, name)); - })) + .def(py::init()) .def("start", &dbt::start, pydocs::DOC_DEBUG_TIMER_start) .def("stop", &dbt::stop, pydocs::DOC_DEBUG_TIMER_stop) .def("read", &dbt::read, pydocs::DOC_DEBUG_TIMER_read); diff --git a/src/kbmod/search/debug_timer.h b/src/kbmod/search/debug_timer.h index 43d816d85..9eb094c2c 100644 --- a/src/kbmod/search/debug_timer.h +++ b/src/kbmod/search/debug_timer.h @@ -10,14 +10,10 @@ #include #include -#include "logging.h" - namespace search { class DebugTimer { public: - DebugTimer(std::string message, std::string name); - DebugTimer(std::string message, logging::Logger* logger); - DebugTimer(std::string message); + DebugTimer(std::string name, bool verbose); void start(); void stop(); @@ -29,9 +25,9 @@ class DebugTimer { private: std::chrono::time_point t_start_; std::chrono::time_point t_end_; + std::string name_; + bool verbose_; bool running_; - logging::Logger* logger_; - std::string message_; }; } /* namespace search */ diff --git a/src/kbmod/search/logging.h b/src/kbmod/search/logging.h deleted file mode 100644 index 193d26c3d..000000000 --- a/src/kbmod/search/logging.h +++ /dev/null @@ -1,236 +0,0 @@ -#ifndef KBMOD_LOGGER -#define KBMOD_LOGGER - -#include -#include -#include -#include -#include -#include -#include - -/* - * The Logging class is a singleton that keeps a reference to all created - * Loggers. The Loggers define the log format and IO method (stdout, file etc.). - * Logging keeps references to Loggers in a registry. This registry is exposed - * via the `getLogger` method, which doubles as a factory function for Loggers - * This is modeled after Python's logging module. When `getLogger` is called from - * Python (via the pybind11 bindings) it creates a new Python-side Logger object - * and registers its reference. When called C++ side it creates a C++-side - * Logger and registers its reference. Accessing a `getLogger` using a name that - * was already registered - returns the reference from the registry (python or - * internal). - * - * The obvious pitfall is the case when a user does not route through this cls, - * and instead registers a Python-side Logger via Python's logging module. Then - * these Python-side Loggers are not registered in the Logging's registry the - * KBMOD Logging will default to using the C++ std::out logger. This can lead to - * differing output formats if the Python Logger in question is re-configured. - */ -namespace logging { -// Python's dict[str: str]-like typedef for readability -typedef std::unordered_map sdict; - -// translate between Python's log-levels in a way that will continue to -// respect any user-added in-between levels that are not necessarily -// registered in C++ -// https://docs.python.org/3/library/logging.html#logging-levels -enum LogLevel { DEBUG = 10, INFO = 20, WARNING = 30, ERROR = 40, CRITICAL = 50 }; - -static std::unordered_map StringToLogLevel{{"DEBUG", LogLevel::DEBUG}, - {"INFO", LogLevel::INFO}, - {"WARNING", LogLevel::WARNING}, - {"ERROR", LogLevel::ERROR}, - {"CRITICAL", LogLevel::CRITICAL}}; - -static std::unordered_map LogLevelToString{{LogLevel::DEBUG, "DEBUG"}, - {LogLevel::INFO, "INFO"}, - {LogLevel::WARNING, "WARNING"}, - {LogLevel::ERROR, "ERROR"}, - {LogLevel::CRITICAL, "CRITICAL"}}; - -// Logger is a base class that dispatches the logging mechanism (IO mostly) -// It wraps convenience methods and other shared functionality, such as -// string formatters, commonly used by child Loggers. Expects the following -// config key-values to exist: -// `level`: `string`` -// LogLevel enum value, the minimal level that is printed. -// `datefmt`: `string` -// Timestamp template usable with `std::put_time`. -// `format`: `string` -// log format template, currently supports ``asctime``, ``levelname``, -// ``names``, and ``message``. The fields in the string are expected to -// be formatted as in Python ``%{field}s`` -// `converter`: `string` -// Time zone converter, either `gmtime` or `localtime`. -class Logger { -public: - std::string name; - sdict config; - LogLevel level_threshold; - - Logger(const std::string logger_name) : name(logger_name), config(), level_threshold{LogLevel::WARNING} {} - - Logger(const std::string logger_name, const sdict conf) : name(logger_name), config(conf) { - level_threshold = StringToLogLevel[config["level"]]; - } - - virtual ~Logger() {} - - std::string fmt_time() { - std::time_t now = std::time(nullptr); - std::tm timeinfo; - - if (config["converter"] == "gmtime") { - timeinfo = *std::gmtime(&now); - } else { - timeinfo = *std::localtime(&now); - } - - std::ostringstream timestamp; - timestamp << std::put_time(&timeinfo, config["datefmt"].c_str()); - return timestamp.str(); - } - - std::string fmt_log(const std::string level, const std::string msg) { - std::string logfmt = config["format"]; - - std::regex t("%\\(asctime\\)s"); - logfmt = std::regex_replace(logfmt, t, fmt_time()); - - std::regex l("%\\(levelname\\)s"); - logfmt = std::regex_replace(logfmt, l, level); - - std::regex n("%\\(name\\)s"); - logfmt = std::regex_replace(logfmt, n, name); - - std::regex m("%\\(message\\)s"); - logfmt = std::regex_replace(logfmt, m, msg); - - return logfmt; - } - - virtual void log(std::string level, std::string msg) = 0; - void debug(std::string msg) { log("DEBUG", msg); } - void info(std::string msg) { log("INFO", msg); } - void warning(std::string msg) { log("WARNING", msg); } - void error(std::string msg) { log("ERROR", msg); } - void critical(std::string msg) { log("CRITICAL", msg); } -}; - -// Glorified std::cout. -class CoutLogger : public Logger { -public: - CoutLogger(std::string name, sdict config) : Logger(name, config) {} - - virtual void log(const std::string level, const std::string msg) { - if (level_threshold <= StringToLogLevel[level]) std::cout << fmt_log(level, msg) << std::endl; - } -}; - -// Wrapper around the Python-side loggers. Basically dispatches the logging -// calls to the Python-side object. Does no formatting, IO, or other management -// except to ensure the message is dispatched to the correct in-Python method. -#ifdef Py_PYTHON_H -class PyLogger : public Logger { -private: - py::object pylogger; - -public: - PyLogger(py::object logger) : Logger(logger.attr("name").cast()), pylogger(logger) {} - - virtual void log(std::string level, const std::string msg) { - for (char& ch : level) ch = std::tolower(ch); - pylogger.attr(level.c_str())(msg); - } -}; -#endif // Py_PYTHON_H - -// Logging is a singleton keeping the registry of all registered Loggers and -// their default configuration. Use `getLoger(name)` to get or create a new -// logger. When called, it will check if the logger exists and return a -// reference if it does. If it doesn't exists, and the method was called from -// Python's, it creates a new Python-side logger and returns it. When called -// from C++, without being able to provide a reference to a, or a name of an -// already existing, Python logger, it creates a default logger on the C++ side. -// This logger will share at leas the `logging.basicConfig`-uration with any -// already existing Python loggers. By default it will create a `CoutLogger`. -// If literally nothing exists, and KBMOD is being driven purely from C++ side -// it will instantiate a new default logger using the default configuration -// that matches the default Python logging configuration as closely as -// possible: -// - level: "WARNING" -// - datefmt": "%Y-%m-%dT%H:%M:%SZ" -// - converter": "localtime" -// - format: "[%(asctime)s %(levelname)s %(name)s] %(message)s" - -class Logging { -private: - sdict default_config = {{"level", "WARNING"}, - {"datefmt", "%Y-%m-%dT%H:%M:%SZ"}, - {"converter", "localtime"}, - {"format", "[%(asctime)s %(levelname)s %(name)s] %(message)s"}}; - std::unordered_map registry; - - Logging() {} - ~Logging() { - for (auto elem = registry.begin(); elem != registry.end(); elem++) delete elem->second; - } - -public: - // delete copy operators - it's a singleton - Logging(Logging& other) = delete; - void operator=(const Logging&) = delete; - - // get the singleton instance - static Logging* logging() { - static Logging instance; - return &instance; - } - - void setConfig(sdict config) { Logging::logging()->default_config = config; } - - sdict getConfig() { return Logging::logging()->default_config; } - - // Generic template to create any kind of new Logger instance and add it to - // the registry at the same time. CamelCase to match the Python `logging` - // module - template - static Logger* getLogger(std::string name, sdict config = {}) { - Logging* instance = Logging::logging(); - - // if key not found use default setup - if (instance->registry.find(name) == instance->registry.end()) { - sdict tmpconf = config.size() != 0 ? config : instance->getConfig(); - instance->registry[name] = new LoggerCls(name, tmpconf); - } - return instance->registry[name]; - } - - static Logger* getLogger(std::string name, sdict config = {}) { - return Logging::logging()->getLogger(name, config); - } - - void register_logger(Logger* logger) { Logging::logging()->registry[logger->name] = logger; } -}; - -// Convenience method to shorten the very long signature required to invoke -// correct functionality: logging::Logging::logging()->getLogger(name) -// to logging::getLogger(name) -Logger* getLogger(std::string name, sdict config = {}) { return Logging::logging()->getLogger(name, config); } - -#ifdef Py_PYTHON_H -static void logging_bindings(py::module& m) { - py::class_>(m, "Logging") - .def(py::init([]() { return std::unique_ptr(Logging::logging()); })) - .def("setConfig", &Logging::setConfig) - .def_static("getLogger", [](py::str name) -> py::object { - py::module_ logging = py::module_::import("logging"); - py::object pylogger = logging.attr("getLogger")(name); - Logging::logging()->register_logger(new PyLogger(pylogger)); - return pylogger; - }); -} -#endif /* Py_PYTHON_H */ -} // namespace logging -#endif // KBMOD_LOGGER diff --git a/src/kbmod/search/pydocs/stack_search_docs.h b/src/kbmod/search/pydocs/stack_search_docs.h index 9ddb520b3..2ccbf73c2 100644 --- a/src/kbmod/search/pydocs/stack_search_docs.h +++ b/src/kbmod/search/pydocs/stack_search_docs.h @@ -28,6 +28,7 @@ static const auto DOC_StackSearch_set_min_lh = R"doc( The minimum likelihood value for a trajectory to be returned. )doc"; + static const auto DOC_StackSearch_enable_gpu_sigmag_filter = R"doc( Enable on-GPU sigma-G filtering. diff --git a/src/kbmod/search/stack_search.cpp b/src/kbmod/search/stack_search.cpp index dfb65bd8d..8d7076775 100644 --- a/src/kbmod/search/stack_search.cpp +++ b/src/kbmod/search/stack_search.cpp @@ -9,12 +9,6 @@ extern "C" void evaluateTrajectory(PsiPhiArrayMeta psi_phi_meta, void* psi_phi_v SearchParameters params, Trajectory* candidate); #endif -// This logger is often used in this module so we might as well declare it -// global, but this would generally be a one-liner like: -// logging::getLogger("kbmod.search.run_search") -> level(msg) -// I'd imaging... -auto rs_logger = logging::getLogger("kbmod.search.run_search"); - StackSearch::StackSearch(ImageStack& imstack) : stack(imstack) { debug_info = false; psi_phi_generated = false; @@ -107,7 +101,7 @@ void StackSearch::set_start_bounds_y(int y_min, int y_max) { void StackSearch::prepare_psi_phi() { if (!psi_phi_generated) { - DebugTimer timer = DebugTimer("preparing Psi and Phi images", rs_logger); + DebugTimer timer = DebugTimer("Preparing Psi and Phi images", debug_info); fill_psi_phi_array_from_image_stack(psi_phi_array, stack, params.encode_num_bytes, debug_info); timer.stop(); psi_phi_generated = true; @@ -156,9 +150,9 @@ Trajectory StackSearch::search_linear_trajectory(short x, short y, float vx, flo } void StackSearch::search(std::vector& search_list, int min_observations) { - DebugTimer core_timer = DebugTimer("core search", rs_logger); + DebugTimer core_timer = DebugTimer("Running core search", debug_info); - DebugTimer psi_phi_timer = DebugTimer("creating psi/phi buffers", rs_logger); + DebugTimer psi_phi_timer = DebugTimer("Creating psi/phi buffers", debug_info); prepare_psi_phi(); psi_phi_timer.stop(); @@ -166,27 +160,19 @@ void StackSearch::search(std::vector& search_list, int min_observati int num_search_pixels = ((params.x_start_max - params.x_start_min) * (params.y_start_max - params.y_start_min)); int max_results = num_search_pixels * RESULTS_PER_PIXEL; - - // staple C++ - std::stringstream logmsg; - logmsg << "Searching X=[" << params.x_start_min << ", " << params.x_start_max << "] " - << "Y=[" << params.y_start_min << ", " << params.y_start_max << "]\n" - << "Allocating space for " << max_results << " results."; - rs_logger->info(logmsg.str()); - + if (debug_info) { + std::cout << "Searching X=[" << params.x_start_min << ", " << params.x_start_max << "]" + << " Y=[" << params.y_start_min << ", " << params.y_start_max << "]\n"; + std::cout << "Allocating space for " << max_results << " results.\n"; + } results = std::vector(max_results); - - // We need some better log messages, this clears the stream: - logmsg.str(""); - logmsg << search_list.size() << " trajectories..."; - rs_logger->info(logmsg.str()); - // if (debug_info) std::cout << search_list.size() << " trajectories... \n" << std::flush; + if (debug_info) std::cout << search_list.size() << " trajectories... \n" << std::flush; // Set the minimum number of observations. params.min_observations = min_observations; // Do the actual search on the GPU. - DebugTimer search_timer = DebugTimer("search execution", rs_logger); + DebugTimer search_timer = DebugTimer("Running search", debug_info); #ifdef HAVE_CUDA deviceSearchFilter(psi_phi_array, params, search_list.size(), search_list.data(), max_results, results.data()); @@ -195,7 +181,7 @@ void StackSearch::search(std::vector& search_list, int min_observati #endif search_timer.stop(); - DebugTimer sort_timer = DebugTimer("Sorting results", rs_logger); + DebugTimer sort_timer = DebugTimer("Sorting results", debug_info); sort_results(); sort_timer.stop(); core_timer.stop(); diff --git a/src/kbmod/search/stack_search.h b/src/kbmod/search/stack_search.h index 3324c17f2..20b36d013 100644 --- a/src/kbmod/search/stack_search.h +++ b/src/kbmod/search/stack_search.h @@ -6,13 +6,11 @@ #include #include #include -#include // formatting log msgs #include #include #include #include -#include "logging.h" #include "common.h" #include "debug_timer.h" #include "geom.h" diff --git a/src/kbmod/trajectory_explorer.py b/src/kbmod/trajectory_explorer.py index b50e281a7..c1c73ad6d 100644 --- a/src/kbmod/trajectory_explorer.py +++ b/src/kbmod/trajectory_explorer.py @@ -4,10 +4,7 @@ from kbmod.filters.sigma_g_filter import apply_single_clipped_sigma_g, SigmaGClipping from kbmod.masking import apply_mask_operations from kbmod.result_list import ResultRow -from kbmod.search import StackSearch, StampCreator, Logging - - -logger = Logging.getLogger(__name__) +from kbmod.search import StackSearch, StampCreator class TrajectoryExplorer: @@ -59,7 +56,8 @@ def initialize_data(self): # set the parameters. if self.config["encode_num_bytes"] > 0: self.search.enable_gpu_encoding(self.config["encode_num_bytes"]) - logger.debug(f"Setting encoding = {self.config['encode_num_bytes']}") + if self.debug: + print(f"Setting encoding = {self.config['encode_num_bytes']}") # Allocate the search structure. self.search = StackSearch(self.im_stack) diff --git a/src/kbmod/work_unit.py b/src/kbmod/work_unit.py index 2e8936a45..01e828df5 100644 --- a/src/kbmod/work_unit.py +++ b/src/kbmod/work_unit.py @@ -10,7 +10,7 @@ from yaml import dump, safe_load from kbmod.configuration import SearchConfiguration -from kbmod.search import ImageStack, LayeredImage, PSF, RawImage, Logging +from kbmod.search import ImageStack, LayeredImage, PSF, RawImage from kbmod.wcs_utils import ( append_wcs_to_hdu_header, extract_wcs_from_hdu_header, @@ -20,9 +20,6 @@ ) -logger = Logging.getLogger(__name__) - - class WorkUnit: """The work unit is a storage and I/O class for all of the data needed for a full run of KBMOD, including the: the search parameters, @@ -348,8 +345,7 @@ def to_fits(self, filename, overwrite=False): Indicates whether to overwrite an existing file. """ if Path(filename).is_file() and not overwrite: - # are you sure you did not want to raise an error here? - logger.error(f"Warning: WorkUnit file {filename} already exists.") + print(f"Warning: WorkUnit file {filename} already exists.") return # Set up the initial HDU list, including the primary header diff --git a/tests/test_debug_timer.py b/tests/test_debug_timer.py index a66770301..41b3d831d 100644 --- a/tests/test_debug_timer.py +++ b/tests/test_debug_timer.py @@ -6,7 +6,7 @@ class test_debug_timer(unittest.TestCase): def test_create(self): - my_timer = DebugTimer("hi") + my_timer = DebugTimer("hi", False) time1 = my_timer.read() # We use sleep (100ms) because we are only interested in