Skip to content

Commit

Permalink
[DLMED] update according to comments
Browse files Browse the repository at this point in the history
  • Loading branch information
Nic-Ma committed Jul 23, 2020
1 parent b79ca92 commit 68cfbd3
Show file tree
Hide file tree
Showing 10 changed files with 28 additions and 33 deletions.
5 changes: 2 additions & 3 deletions monai/handlers/mean_dice.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,8 @@ def __init__(
a combination of argmax and to_onehot. Defaults to False.
sigmoid: whether to add sigmoid function to the output prediction before computing Dice.
Defaults to False.
other_act: if don't want to use `sigmoid`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.tanh(x)`.
other_act: callable function to replace `sigmoid` as activation layer if needed, Defaults to ``None``.
for example: `other_act = torch.tanh`.
logit_thresh: the threshold value to round value to 0.0 and 1.0. Defaults to None (no thresholding).
output_transform: transform the ignite.engine.state.output into [y_pred, y] pair.
device: device specification in case of distributed computation usage.
Expand Down
8 changes: 4 additions & 4 deletions monai/handlers/roc_auc.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@ class ROCAUC(Metric): # type: ignore # incorrectly typed due to optional_import
Args:
to_onehot_y: whether to convert `y` into the one-hot format. Defaults to False.
softmax: whether to add softmax function to `y_pred` before computation. Defaults to False.
other_act: if don't want to use `softmax`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.log_softmax(x)`.
other_act: callable function to replace `softmax` as activation layer if needed, Defaults to ``None``.
for example: `other_act = lambda x: torch.log_softmax(x)`.
average: {``"macro"``, ``"weighted"``, ``"micro"``, ``"none"``}
Type of averaging performed if not binary classification. Defaults to ``"macro"``.
Expand All @@ -48,7 +47,8 @@ class ROCAUC(Metric): # type: ignore # incorrectly typed due to optional_import
device: device specification in case of distributed computation usage.
Note:
ROCAUC expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or confidence values.
ROCAUC expects y to be comprised of 0's and 1's.
y_pred must either be probability estimates or confidence values.
"""

Expand Down
15 changes: 7 additions & 8 deletions monai/losses/dice.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(
softmax: if True, apply a softmax function to the prediction.
other_act: if don't want to use `sigmoid` or `softmax`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.tanh(x)`.
`other_act = torch.tanh`.
squared_pred: use squared versions of targets and predictions in the denominator or not.
jaccard: compute Jaccard Index (soft IoU) instead of dice or not.
reduction: {``"none"``, ``"mean"``, ``"sum"``}
Expand All @@ -69,10 +69,12 @@ def __init__(
Raises:
ValueError: reduction={reduction} is invalid. Valid options are: none, mean or sum.
ValueError: can only enable 1 of sigmoid, softmax and other_act.
ValueError: other_act must be a Callable function.
"""
super().__init__(reduction=LossReduction(reduction).value)

if other_act is not None and not callable(other_act):
raise ValueError("other_act must be a Callable function.")
if int(sigmoid) + int(softmax) + int(other_act is not None) > 1:
raise ValueError("can only enable 1 of sigmoid, softmax and other_act.")
self.include_background = include_background
Expand Down Expand Up @@ -105,8 +107,6 @@ def forward(self, input: torch.Tensor, target: torch.Tensor, smooth: float = 1e-
input = torch.softmax(input, 1)

if self.other_act is not None:
if not callable(self.other_act):
raise ValueError("other_act must be a Callable function.")
input = self.other_act(input)

if self.to_onehot_y:
Expand Down Expand Up @@ -220,7 +220,7 @@ def __init__(
softmax: If True, apply a softmax function to the prediction.
other_act: if don't want to use `sigmoid` or `softmax`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.tanh(x)`.
`other_act = torch.tanh`.
squared_pred: use squared versions of targets and predictions in the denominator or not.
w_type: {``"square"``, ``"simple"``, ``"uniform"``}
Type of function to transform ground truth volume to a weight factor. Defaults to ``"square"``.
Expand All @@ -237,7 +237,8 @@ def __init__(
"""
super().__init__(reduction=LossReduction(reduction).value)

if other_act is not None and not callable(other_act):
raise ValueError("other_act must be a Callable function.")
if int(sigmoid) + int(softmax) + int(other_act is not None) > 1:
raise ValueError("can only enable 1 of sigmoid, softmax and other_act.")
self.include_background = include_background
Expand Down Expand Up @@ -274,8 +275,6 @@ def forward(self, input: torch.Tensor, target: torch.Tensor, smooth: float = 1e-
input = torch.softmax(input, 1)

if self.other_act is not None:
if not callable(self.other_act):
raise ValueError("other_act must be a Callable function.")
input = self.other_act(input)

if self.to_onehot_y:
Expand Down
7 changes: 3 additions & 4 deletions monai/losses/tversky.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(
softmax: If True, apply a softmax function to the prediction.
other_act: if don't want to use `sigmoid` or `softmax`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.tanh(x)`.
`other_act = torch.tanh`.
alpha: weight of false positives
beta: weight of false negatives
reduction: {``"none"``, ``"mean"``, ``"sum"``}
Expand All @@ -67,7 +67,8 @@ def __init__(
"""

super().__init__(reduction=LossReduction(reduction).value)

if other_act is not None and not callable(other_act):
raise ValueError("other_act must be a Callable function.")
if int(sigmoid) + int(softmax) + int(other_act is not None) > 1:
raise ValueError("can only enable 1 of sigmoid, softmax and other_act.")
self.include_background = include_background
Expand Down Expand Up @@ -100,8 +101,6 @@ def forward(self, input: torch.Tensor, target: torch.Tensor, smooth: float = 1e-
input = torch.softmax(input, 1)

if self.other_act is not None:
if not callable(self.other_act):
raise ValueError("other_act must be a Callable function.")
input = self.other_act(input)

if self.to_onehot_y:
Expand Down
13 changes: 6 additions & 7 deletions monai/metrics/meandice.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,12 @@ class DiceMetric:
mutually_exclusive: if True, `y_pred` will be converted into a binary matrix using
a combination of argmax and to_onehot. Defaults to False.
sigmoid: whether to add sigmoid function to y_pred before computation. Defaults to False.
other_act: if don't want to use `sigmoid`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.tanh(x)`.
other_act: callable function to replace `sigmoid` as activation layer if needed, Defaults to ``None``.
for example: `other_act = torch.tanh`.
logit_thresh: the threshold value used to convert (for example, after sigmoid if `sigmoid=True`)
`y_pred` into a binary matrix. Defaults to 0.5.
reduction: {``"none"``, ``"mean"``, ``"sum"``, ``"mean_batch"``, ``"sum_batch"``, ``"mean_channel"``, ``"sum_channel"``}
reduction: {``"none"``, ``"mean"``, ``"sum"``, ``"mean_batch"``, ``"sum_batch"``,
``"mean_channel"``, ``"sum_channel"``}
Define the mode to reduce computation result of 1 batch data. Defaults to ``"mean"``.
"""
Expand Down Expand Up @@ -151,9 +151,8 @@ def compute_meandice(
mutually_exclusive: if True, `y_pred` will be converted into a binary matrix using
a combination of argmax and to_onehot. Defaults to False.
sigmoid: whether to add sigmoid function to y_pred before computation. Defaults to False.
other_act: if don't want to use `sigmoid`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.tanh(x)`.
other_act: callable function to replace `sigmoid` as activation layer if needed, Defaults to ``None``.
for example: `other_act = torch.tanh`.
logit_thresh: the threshold value used to convert (for example, after sigmoid if `sigmoid=True`)
`y_pred` into a binary matrix. Defaults to 0.5.
Expand Down
5 changes: 2 additions & 3 deletions monai/metrics/rocauc.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,8 @@ def compute_roc_auc(
example shape: [16, 1] will be converted into [16, 2] (where `2` is inferred from `y_pred`).
to_onehot_y: whether to convert `y` into the one-hot format. Defaults to False.
softmax: whether to add softmax function to `y_pred` before computation. Defaults to False.
other_act: if don't want to use `softmax`, use other callable function to execute
other activation layers, Defaults to ``None``. for example:
`other_act = lambda x: torch.log_softmax(x)`.
other_act: callable function to replace `softmax` as activation layer if needed, Defaults to ``None``.
for example: `other_act = lambda x: torch.log_softmax(x)`.
average: {``"macro"``, ``"weighted"``, ``"micro"``, ``"none"``}
Type of averaging performed if not binary classification.
Defaults to ``"macro"``.
Expand Down
2 changes: 1 addition & 1 deletion tests/test_compute_meandice.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@
"to_onehot_y": False,
"mutually_exclusive": False,
"logit_thresh": 0.0,
"other_act": lambda x: torch.tanh(x),
"other_act": torch.tanh,
},
[[0.8]],
]
Expand Down
2 changes: 1 addition & 1 deletion tests/test_dice_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@
0.470451,
],
[ # shape: (2, 1, 2, 2), (2, 1, 2, 2)
{"include_background": True, "other_act": lambda x: torch.tanh(x)},
{"include_background": True, "other_act": torch.tanh},
{
"input": torch.tensor([[[[1.0, -1.0], [-1.0, 1.0]]], [[[1.0, -1.0], [-1.0, 1.0]]]]),
"target": torch.tensor([[[[1.0, 1.0], [1.0, 1.0]]], [[[1.0, 0.0], [1.0, 0.0]]]]),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_generalized_dice_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@
0.250023,
],
[ # shape: (2, 1, 2, 2), (2, 1, 2, 2)
{"include_background": True, "other_act": lambda x: torch.tanh(x)},
{"include_background": True, "other_act": torch.tanh},
{
"input": torch.tensor([[[[1.0, -1.0], [-1.0, 1.0]]], [[[1.0, -1.0], [-1.0, 1.0]]]]),
"target": torch.tensor([[[[1.0, 1.0], [1.0, 1.0]]], [[[1.0, 0.0], [1.0, 0.0]]]]),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_tversky_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@
0.247366,
],
[ # shape: (2, 1, 2, 2), (2, 1, 2, 2)
{"include_background": True, "other_act": lambda x: torch.tanh(x)},
{"include_background": True, "other_act": torch.tanh},
{
"input": torch.tensor([[[[1.0, -1.0], [-1.0, 1.0]]], [[[1.0, -1.0], [-1.0, 1.0]]]]),
"target": torch.tensor([[[[1.0, 1.0], [1.0, 1.0]]], [[[1.0, 0.0], [1.0, 0.0]]]]),
Expand Down

0 comments on commit 68cfbd3

Please sign in to comment.