Skip to content

Commit

Permalink
Revert "Merge pull request #456 from dirac-institute/proposals/logging"
Browse files Browse the repository at this point in the history
This reverts commit 3be19c7, reversing
changes made to a87c111.
  • Loading branch information
DinoBektesevic committed Feb 29, 2024
1 parent 46de496 commit 6ebaa9f
Show file tree
Hide file tree
Showing 19 changed files with 113 additions and 430 deletions.
57 changes: 2 additions & 55 deletions src/kbmod/__init__.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,11 @@
import warnings

try:
from ._version import version as __version__ # noqa: F401
from ._version import version as __version__
except ImportError:
warnings.warn("Unable to determine the package version. " "This is likely a broken installation.")

import os
import time
import logging as _logging
from logging import config as _config

# Import the rest of the package
from kbmod.search import Logging
from . import ( # noqa: F401
from . import (
analysis,
analysis_utils,
data_interface,
Expand All @@ -26,49 +19,3 @@
from .search import PSF, RawImage, LayeredImage, ImageStack, StackSearch
from .standardizers import Standardizer, StandardizerConfig
from .image_collection import ImageCollection


# there are ways for this to go to a file, but is it worth it?
# Then we have to roll a whole logging.config_from_shared_config thing
_SHARED_LOGGING_CONFIG = {
"level": os.environ.get("KB_LOG_LEVEL", "WARNING"),
"format": "[%(asctime)s %(levelname)s %(name)s] %(message)s",
"datefmt": "%Y-%m-%dT%H:%M:%SZ",
"converter": "gmtime",
}

# Declare our own root logger, so that we don't start printing DEBUG
# messages from every package we import
__PY_LOGGING_CONFIG = {
"version": 1.0,
"formatters": {
"standard": {
"format": _SHARED_LOGGING_CONFIG["format"],
},
},
"handlers": {
"default": {
"level": _SHARED_LOGGING_CONFIG["level"],
"formatter": "standard",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
}
},
"loggers": {
"kbmod": {
"handlers": ["default"],
"level": _SHARED_LOGGING_CONFIG["level"],
}
},
}

# The timezone converter can not be configured via the config submodule for
# some reason, only directly. Must be configured after loading the dictConfig
_config.dictConfig(__PY_LOGGING_CONFIG)
if _SHARED_LOGGING_CONFIG["converter"] == "gmtime":
_logging.Formatter.converter = time.gmtime
else:
_logging.Formatter.converter = time.localtime

# Configure the CPP logging wrapper with the same setup
Logging().setConfig(_SHARED_LOGGING_CONFIG)
7 changes: 2 additions & 5 deletions src/kbmod/analysis/create_stamps.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,9 @@
import numpy as np
from astropy.io import fits

from kbmod.search import Logging
from kbmod.file_utils import *


logger = Logging.getLogger(__name__)


class CreateStamps(object):
def __init__(self):
return
Expand Down Expand Up @@ -121,7 +117,8 @@ def max_value_stamp_filter(self, stamps, center_thresh, verbose=True):
An np array of stamp indices to keep.
"""
keep_stamps = np.where(np.max(stamps, axis=1) > center_thresh)[0]
logger.info(f"Center filtering keeps {len(keep_stamps)} out of {len(stamps)} stamps.")
if verbose:
print("Center filtering keeps %i out of %i stamps." % (len(keep_stamps), len(stamps)))
return keep_stamps

def load_results(self, res_filename):
Expand Down
21 changes: 11 additions & 10 deletions src/kbmod/analysis_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@
from .result_list import ResultList, ResultRow


logger = kb.Logging.getLogger(__name__)


class PostProcess:
"""This class manages the post-processing utilities used to filter out and
otherwise remove false positives from the KBMOD search. This includes,
Expand Down Expand Up @@ -84,13 +81,17 @@ def load_and_filter_results(
else:
stats_filter = CombinedStatsFilter(min_obs=self.num_obs)

logger.info("Retrieving Results")
print("---------------------------------------")
print("Retrieving Results")
print("---------------------------------------")
while likelihood_limit is False:
logger.info("Getting results...")
print("Getting results...")
results = search.get_results(res_num, chunk_size)
logger.info("Chunk Start = %i" % res_num)
logger.info("Chunk Max Likelihood = %.2f" % results[0].lh)
logger.info("Chunk Min. Likelihood = %.2f" % results[-1].lh)
print("---------------------------------------")
print("Chunk Start = %i" % res_num)
print("Chunk Max Likelihood = %.2f" % results[0].lh)
print("Chunk Min. Likelihood = %.2f" % results[-1].lh)
print("---------------------------------------")

result_batch = ResultList(self._mjds)
for i, trj in enumerate(results):
Expand All @@ -109,7 +110,7 @@ def load_and_filter_results(
total_count += 1

batch_size = result_batch.num_results()
logger.info("Extracted batch of %i results for total of %i" % (batch_size, total_count))
print("Extracted batch of %i results for total of %i" % (batch_size, total_count))
if batch_size > 0:
apply_clipped_sigma_g(clipper, result_batch, self.num_cores)
result_batch.apply_filter(stats_filter)
Expand All @@ -134,7 +135,7 @@ def apply_clustering(self, result_list, cluster_params):
# Skip clustering if there is nothing to cluster.
if result_list.num_results() == 0:
return
logger.info("Clustering %i results" % result_list.num_results())
print("Clustering %i results" % result_list.num_results(), flush=True)

# Do the clustering and the filtering.
f = DBSCANFilter(
Expand Down
9 changes: 3 additions & 6 deletions src/kbmod/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,6 @@
from pathlib import Path
import yaml
from yaml import dump, safe_load
from kbmod.search import Logging


logger = Logging.getLogger(__name__)


class SearchConfiguration:
Expand Down Expand Up @@ -124,7 +120,8 @@ def set(self, param, value, strict=True):
if param not in self._params:
if strict:
raise KeyError(f"Invalid parameter: {param}")
logger.warning(f"Ignoring invalid parameter: {param}")
else:
print(f"Ignoring invalid parameter: {param}")
else:
self._params[param] = value

Expand Down Expand Up @@ -284,7 +281,7 @@ def to_file(self, filename, overwrite=False):
Indicates whether to overwrite an existing file.
"""
if Path(filename).is_file() and not overwrite:
logger.warning(f"Configuration file {filename} already exists.")
print(f"Warning: Configuration file {filename} already exists.")
return

with open(filename, "w") as file:
Expand Down
40 changes: 24 additions & 16 deletions src/kbmod/data_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,16 @@

from kbmod.configuration import SearchConfiguration
from kbmod.file_utils import *
from kbmod.search import ImageStack, LayeredImage, PSF, RawImage, Logging
from kbmod.search import (
ImageStack,
LayeredImage,
PSF,
RawImage,
)
from kbmod.wcs_utils import append_wcs_to_hdu_header
from kbmod.work_unit import WorkUnit, raw_image_to_hdu


logger = Logging.getLogger(__name__)


def load_deccam_layered_image(filename, psf):
"""Load a layered image from the legacy deccam format.
Expand Down Expand Up @@ -108,9 +110,6 @@ def save_deccam_layered_image(img, filename, wcs=None, overwrite=True):
hdul.writeto(filename, overwrite=overwrite)


logger = kb.Logging.getLogger(__name__)


def load_input_from_individual_files(
im_filepath,
time_file,
Expand Down Expand Up @@ -147,17 +146,21 @@ def load_input_from_individual_files(
visit_times : `list`
A list of MJD times.
"""
logger.info("Loading Images")
print("---------------------------------------")
print("Loading Images")
print("---------------------------------------")

# Load a mapping from visit numbers to the visit times. This dictionary stays
# empty if no time file is specified.
image_time_dict = FileUtils.load_time_dictionary(time_file)
logger.info(f"Loaded {len(image_time_dict)} time stamps.")
if verbose:
print(f"Loaded {len(image_time_dict)} time stamps.")

# Load a mapping from visit numbers to PSFs. This dictionary stays
# empty if no time file is specified.
image_psf_dict = FileUtils.load_psf_dictionary(psf_file)
logger.info(f"Loaded {len(image_psf_dict)} image PSFs stamps.")
if verbose:
print(f"Loaded {len(image_psf_dict)} image PSFs stamps.")

# Retrieve the list of visits (file names) in the data directory.
patch_visits = sorted(os.listdir(im_filepath))
Expand All @@ -169,7 +172,8 @@ def load_input_from_individual_files(
for visit_file in np.sort(patch_visits):
# Skip non-fits files.
if not ".fits" in visit_file:
logger.info(f"Skipping non-FITS file {visit_file}")
if verbose:
print(f"Skipping non-FITS file {visit_file}")
continue

# Compute the full file path for loading.
Expand All @@ -190,7 +194,8 @@ def load_input_from_individual_files(

# Skip files without a valid visit ID.
if visit_id is None:
logger.warning(f"WARNING: Unable to extract visit ID for {visit_file}.")
if verbose:
print(f"WARNING: Unable to extract visit ID for {visit_file}.")
continue

# Check if the image has a specific PSF.
Expand All @@ -199,7 +204,8 @@ def load_input_from_individual_files(
psf = PSF(image_psf_dict[visit_id])

# Load the image file and set its time.
logger.info(f"Loading file: {full_file_path}")
if verbose:
print(f"Loading file: {full_file_path}")
img = load_deccam_layered_image(full_file_path, psf)
time_stamp = img.get_obstime()

Expand All @@ -209,20 +215,22 @@ def load_input_from_individual_files(
img.set_obstime(time_stamp)

if time_stamp <= 0.0:
logger.warning(f"WARNING: No valid timestamp provided for {visit_file}.")
if verbose:
print(f"WARNING: No valid timestamp provided for {visit_file}.")
continue

# Check if we should filter the record based on the time bounds.
if mjd_lims is not None and (time_stamp < mjd_lims[0] or time_stamp > mjd_lims[1]):
logger.info(f"Pruning file {visit_file} by timestamp={time_stamp}.")
if verbose:
print(f"Pruning file {visit_file} by timestamp={time_stamp}.")
continue

# Save image, time, and WCS information.
visit_times.append(time_stamp)
images.append(img)
wcs_list.append(curr_wcs)

logger.info(f"Loaded {len(images)} images")
print(f"Loaded {len(images)} images")
stack = ImageStack(images)

return (stack, wcs_list, visit_times)
Expand Down
3 changes: 1 addition & 2 deletions src/kbmod/fake_data/fake_data_creator.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from kbmod.data_interface import save_deccam_layered_image
from kbmod.file_utils import *
from kbmod.search import *
from kbmod.search import Logging
from kbmod.wcs_utils import append_wcs_to_hdu_header
from kbmod.work_unit import WorkUnit

Expand Down Expand Up @@ -280,7 +279,7 @@ def save_fake_data_to_dir(self, data_dir):
# Make the subdirectory if needed.
dir_path = Path(data_dir)
if not dir_path.is_dir():
logger.info(f"Directory {data_dir} does not exist. Creating.")
print("Directory '%s' does not exist. Creating." % data_dir)
os.mkdir(data_dir)

# Save each of the image files.
Expand Down
27 changes: 14 additions & 13 deletions src/kbmod/filters/stamp_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,9 @@
StampCreator,
StampParameters,
StampType,
Logging,
)


logger = Logging.getLogger(__name__)


class BaseStampFilter(abc.ABC):
"""The base class for the various stamp filters.
Expand Down Expand Up @@ -332,12 +328,16 @@ def get_coadds_and_filter(result_list, im_stack, stamp_params, chunk_size=100000
if type(stamp_params) is SearchConfiguration:
stamp_params = extract_search_parameters_from_config(stamp_params)

if result_list.num_results() <= 0:
logger.debug("Stamp Filtering : skipping, othing to filter.")
else:
logger.debug(f"Stamp filtering {result_list.num_results()} results.")
logger.debug(f"Using filtering params: {stamp_params}")
logger.debug(f"Using chunksize = {chunk_size}")
if debug:
print("---------------------------------------")
print("Applying Stamp Filtering")
print("---------------------------------------")
if result_list.num_results() <= 0:
print("Skipping. Nothing to filter.")
else:
print(f"Stamp filtering {result_list.num_results()} results.")
print(stamp_params)
print(f"Using chunksize = {chunk_size}")

# Run the stamp creation and filtering in batches of chunk_size.
start_time = time.time()
Expand Down Expand Up @@ -383,9 +383,10 @@ def get_coadds_and_filter(result_list, im_stack, stamp_params, chunk_size=100000

# Do the actual filtering of results
result_list.filter_results(all_valid_inds)

logger.debug(f"Keeping {result_list.num_results()} results")
logger.debug("{:.2f}s elapsed".format(time.time() - start_time))
if debug:
print("Keeping %i results" % result_list.num_results(), flush=True)
time_elapsed = time.time() - start_time
print("{:.2f}s elapsed".format(time_elapsed))


def append_all_stamps(result_list, im_stack, stamp_radius):
Expand Down
Loading

0 comments on commit 6ebaa9f

Please sign in to comment.