@@ -13,14 +13,15 @@ def test_metrics(self):
13
13
14
14
precision = keras_metrics .precision ()
15
15
recall = keras_metrics .recall ()
16
+ f1 = keras_metrics .f1_score ()
16
17
17
18
model = keras .models .Sequential ()
18
19
model .add (keras .layers .Dense (1 , activation = "sigmoid" , input_dim = 2 ))
19
20
model .add (keras .layers .Dense (1 , activation = "softmax" ))
20
21
21
22
model .compile (optimizer = "sgd" ,
22
23
loss = "binary_crossentropy" ,
23
- metrics = [tp , fp , fn , precision , recall ])
24
+ metrics = [tp , fp , fn , precision , recall , f1 ])
24
25
25
26
samples = 1000
26
27
x = numpy .random .random ((samples , 2 ))
@@ -35,12 +36,18 @@ def test_metrics(self):
35
36
36
37
precision = metrics [3 ]
37
38
recall = metrics [4 ]
39
+ f1 = metrics [5 ]
38
40
39
41
expected_precision = tp_val / (tp_val + fp_val )
40
42
expected_recall = tp_val / (tp_val + fn_val )
41
43
44
+ f1_divident = (expected_precision * expected_recall )
45
+ f1_divisor = (expected_precision + expected_recall )
46
+ expected_f1 = (2 * f1_divident / f1_divisor )
47
+
42
48
self .assertAlmostEqual (expected_precision , precision , delta = 0.05 )
43
49
self .assertAlmostEqual (expected_recall , recall , delta = 0.05 )
50
+ self .assertAlmostEqual (expected_f1 , f1 , delta = 0.05 )
44
51
45
52
46
53
if __name__ == "__main__" :
0 commit comments