Skip to content

Commit

Permalink
🚨Hotfix: compute precision recall on raw scores (#1973)
Browse files Browse the repository at this point in the history
compute precision recall on raw scores

Signed-off-by: Ashwin Vaidya <[email protected]>
  • Loading branch information
ashwinvaidya17 authored Apr 10, 2024
1 parent 2cec512 commit c60b5ce
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
3 changes: 2 additions & 1 deletion src/anomalib/metrics/optimal_f1.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@

import torch
from torchmetrics import Metric
from torchmetrics.classification import BinaryPrecisionRecallCurve

from anomalib.metrics.precision_recall_curve import BinaryPrecisionRecallCurve

logger = logging.getLogger(__name__)

Expand Down
8 changes: 7 additions & 1 deletion tests/unit/metrics/test_optimal_f1.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,10 @@ def test_optimal_f1_raw() -> None:

metric.update(preds, labels)
assert metric.compute() == 1.0
assert metric.threshold == 0.5
assert metric.threshold == 0.0

metric.reset()
preds = torch.tensor([-0.5, 0.0, 1.0, 2.0, -0.1])
metric.update(preds, labels)
assert metric.compute() == torch.tensor(1.0)
assert metric.threshold == -0.1

0 comments on commit c60b5ce

Please sign in to comment.