[96354c]: / tests / metrics / test_evaluation_metrics.py

Download this file

93 lines (68 with data), 2.6 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import pytest
import numpy as np
from src.metrics import evaluation_metrics as metrics
@pytest.fixture(scope="function")
def volume():
return np.random.randint(4, size=(4, 4, 4))
@pytest.fixture(scope="function")
def roi():
return np.ones(shape=(4, 4, 4))
def test_confusion_matrix_correct_output(volume, roi):
tp, fp, tn, fn = metrics.get_confusion_matrix(volume, volume, roi)
non_zero = np.count_nonzero(volume)
zero = np.prod(volume.shape) - non_zero
assert tp == non_zero
assert tn == zero
assert fp == 0
assert fn == 0
def test_confusion_matrix_assertation_error_size_mismatch(volume, roi):
with pytest.raises(Exception) as e:
ref = np.random.randint(4, size=(2, 2, 4))
_, _, _, _ = metrics.get_confusion_matrix(volume, ref, roi)
def test_hausdorff_distance_correct_output(volume):
hd = metrics.hausdorff(volume, volume)
assert hd == 0
def test_perfect_dice_score():
dice_score = metrics.dice(tp=75, fp=0, fn=0)
assert dice_score == 1
def test_50_percent_dice_score():
dice_score = metrics.dice(tp=30, fp=40, fn=20)
assert dice_score == 0.5
def test_dice_zerodivicion():
assert metrics.dice(tp=0, fp=0, fn=0) == 0
def test_precision_correct():
precision = metrics.precision(tp=10, fp=0)
assert precision == 1
def test_precision_50_percent():
precision = metrics.precision(tp=10, fp=10)
assert precision == 0.5
def test_precision_zerodivicion():
precision = metrics.precision(tp=0, fp=0)
assert precision == 0
def test_recall_correct():
recall = metrics.recall(tp=10, fn=0)
assert recall == 1
def test_recall_50_percent():
recall = metrics.recall(tp=10, fn=10)
assert recall == 0.5
def test_recall_zerodivicion():
assert metrics.recall(tp=0, fn=0) == 0
def test_acc_correct():
recall = metrics.accuracy(tp=10, fp=0, tn=10, fn=0)
assert recall == 1
def test_acc_50_percent():
recall = metrics.accuracy(tp=10, fp=10, tn=10, fn=10)
assert recall == 0.5
def test_acc_zerodivicion():
assert metrics.accuracy(tp=0, fp=0, tn=0, fn=0) == 0
def test_fscore_correct():
recall = metrics.fscore(tp=10, fp=0, tn=10, fn=0)
assert recall == 1
def test_fscore_50_percent():
recall = metrics.fscore(tp=10, fp=10, tn=10, fn=10)
assert recall == 0.5
def test_fscore_zerodivicion():
assert metrics.fscore(tp=0, fp=10, tn=0, fn=10) == 0
def test_fscore_zero_beta_raises_exeception():
with pytest.raises(AssertionError):
_ = metrics.fscore(tp=10, fp=10, tn=10, fn=10, beta=0)