Skip to content

Commit

Permalink
Merge pull request #163 from jarq6c/test-metrics
Browse files Browse the repository at this point in the history
Metrics: Default to numpy scalars for computation
  • Loading branch information
jarq6c authored Dec 9, 2021
2 parents 4930ced + b054d23 commit 4eb6f87
Show file tree
Hide file tree
Showing 3 changed files with 227 additions and 7 deletions.
2 changes: 1 addition & 1 deletion python/metrics/src/hydrotools/metrics/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "1.0.3"
__version__ = "1.1.3"
67 changes: 65 additions & 2 deletions python/metrics/src/hydrotools/metrics/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
Functions
---------
- compute_contingency_table
- convert_mapping_values
- probability_of_detection
- probability_of_false_detection
- probability_of_false_alarm
Expand All @@ -27,7 +28,7 @@
import numpy as np
import numpy.typing as npt
import pandas as pd
from typing import Union
from typing import Union, Mapping, MutableMapping

def mean_squared_error(
y_true: npt.ArrayLike,
Expand Down Expand Up @@ -84,7 +85,7 @@ def nash_sutcliffe_efficiency(
Returns
-------
score: float
NashSutcliffe model efficiency coefficient
Nash-Sutcliffe model efficiency coefficient
References
----------
Expand Down Expand Up @@ -153,6 +154,36 @@ def compute_contingency_table(
true_negative_key : ctab.loc[False, False]
})

def convert_mapping_values(
mapping: Mapping[str, npt.DTypeLike],
converter: np.dtype = np.float64
) -> MutableMapping:
"""Convert mapping values to a consistent type. Primarily used to validate
contingency tables.
Parameters
----------
mapping: dict-like, required
Input mapping with string keys and values that can be coerced into a
numpy data type.
converter: numpy.dtype, optional, default numpy.float64
Converter data type or function used to convert mapping values to a
consistent type.
Returns
-------
converted_mapping: dict-like, same type as mapping
New mapping with converted values.
"""
# Populate new dictionary with converted values
d = {}
for key, value in dict(mapping).items():
d[key] = converter(value)

# Return new mapping with same type as original
return type(mapping)(d)

def probability_of_detection(
contingency_table: Union[dict, pd.DataFrame, pd.Series],
true_positive_key: str = 'true_positive',
Expand All @@ -177,6 +208,10 @@ def probability_of_detection(
Probability of detection.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
a = contingency_table[true_positive_key]
c = contingency_table[false_negative_key]
return a / (a+c)
Expand Down Expand Up @@ -205,6 +240,10 @@ def probability_of_false_detection(
Probability of false detection.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
b = contingency_table[false_positive_key]
d = contingency_table[true_negative_key]
return b / (b+d)
Expand Down Expand Up @@ -233,6 +272,10 @@ def probability_of_false_alarm(
Probability of false alarm.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
b = contingency_table[false_positive_key]
a = contingency_table[true_positive_key]
return b / (b+a)
Expand Down Expand Up @@ -264,6 +307,10 @@ def threat_score(
Threat score.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
a = contingency_table[true_positive_key]
b = contingency_table[false_positive_key]
c = contingency_table[false_negative_key]
Expand Down Expand Up @@ -296,6 +343,10 @@ def frequency_bias(
Frequency bias.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
a = contingency_table[true_positive_key]
b = contingency_table[false_positive_key]
c = contingency_table[false_negative_key]
Expand Down Expand Up @@ -331,6 +382,10 @@ def percent_correct(
Percent correct.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
a = contingency_table[true_positive_key]
b = contingency_table[false_positive_key]
c = contingency_table[false_negative_key]
Expand Down Expand Up @@ -365,6 +420,10 @@ def base_chance(
Base chance to hit by chance.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
a = contingency_table[true_positive_key]
b = contingency_table[false_positive_key]
c = contingency_table[false_negative_key]
Expand Down Expand Up @@ -401,6 +460,10 @@ def equitable_threat_score(
Equitable threat score.
"""
# Convert values to numpy scalars
contingency_table = convert_mapping_values(contingency_table)

# Compute
a_r = base_chance(contingency_table,
true_positive_key=true_positive_key,
false_positive_key=false_positive_key,
Expand Down
Loading

0 comments on commit 4eb6f87

Please sign in to comment.