Skip to content

Commit

Permalink
Merge pull request #404 from lsst/tickets/OPSIM-1144
Browse files Browse the repository at this point in the history
Tickets/opsim 1144
  • Loading branch information
yoachim authored Apr 8, 2024
2 parents 44aec55 + f78b626 commit 2189dfe
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 19 deletions.
32 changes: 18 additions & 14 deletions rubin_sim/maf/maf_contrib/periodic_star_modulation_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,20 +187,24 @@ def run(self, data_slice, slice_point=None):
noise = np.random.randn(true_lc.size) * dmag
# Suppress warnings about failing on covariance
fit_obj = PeriodicStar(t_subrun["filter"])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# If it fails to converge,
# save values that should fail later
try:
parm_vals, pcov = curve_fit(
fit_obj,
t_subrun["time"],
true_lc + noise,
p0=true_params,
sigma=dmag,
)
except RuntimeError:
parm_vals = true_params * 0 + np.inf
# check if we have enough points
if np.size(true_params) >= np.size(fit_obj):
parm_vals = true_params * 0 + np.inf
else:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# If it fails to converge,
# save values that should fail later
try:
parm_vals, pcov = curve_fit(
fit_obj,
t_subrun["time"],
true_lc + noise,
p0=true_params,
sigma=dmag,
)
except RuntimeError:
parm_vals = true_params * 0 + np.inf
fits[i, :] = parm_vals

# Throw out any magnitude fits if there are no
Expand Down
2 changes: 1 addition & 1 deletion rubin_sim/maf/maf_contrib/xrb_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def __init__(
night_col="night",
pts_needed=2,
pts_early=2,
t_early=2,
t_early=7,
mjd0=None,
output_lc=False,
badval=-666,
Expand Down
16 changes: 12 additions & 4 deletions rubin_sim/maf/run_comparison/gather_summaries.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ def combine_result_dbs(run_dirs, dbfilename="resultsDb_sqlite.db"):
run_names.append(dirname_to_runname(dname))

# querry to grab all the summary stats
sql_q = "SELECT metrics.metric_name, metrics.metric_info_label, "
sql_q += "metrics.slicer_name, summarystats.summary_name, "
sql_q += "summarystats.summary_value "
sql_q = "SELECT summarystats.summary_value, "
sql_q += "metrics.metric_name, metrics.metric_info_label, "
sql_q += "metrics.slicer_name, summarystats.summary_name "
sql_q += "FROM summarystats INNER JOIN metrics ON metrics.metric_id=summarystats.metric_id"

rows = []
Expand Down Expand Up @@ -71,6 +71,13 @@ def combine_result_dbs(run_dirs, dbfilename="resultsDb_sqlite.db"):
columns=col_names,
index=[row_name],
)

# Can have duplicate columns if MAF was run multiple times.
# Remove duplicates:
# https://stackoverflow.com/questions/14984119/
# python-pandas-remove-duplicate-columns
row = row.loc[:, ~row.columns.duplicated()].copy()

rows.append(row)
# Create final large DataFrame to hold everything
all_cols = np.unique(np.concatenate([r.columns.values for r in rows]))
Expand All @@ -83,7 +90,8 @@ def combine_result_dbs(run_dirs, dbfilename="resultsDb_sqlite.db"):

# Put each row into the final DataFrame
for row_name, row in zip(run_names, rows):
result_df.loc[row_name][row.columns] = np.ravel(row.values)
result_df.loc[row_name, row.columns] = np.ravel(row.values)

return result_df


Expand Down

0 comments on commit 2189dfe

Please sign in to comment.