|
a |
|
b/metrics.py |
|
|
1 |
# -*- coding: utf-8 -*- |
|
|
2 |
from __future__ import absolute_import, division, print_function |
|
|
3 |
|
|
|
4 |
import keras |
|
|
5 |
import numpy as np |
|
|
6 |
from sklearn.metrics import * |
|
|
7 |
|
|
|
8 |
|
|
|
9 |
class SKLearnMetrics(keras.callbacks.Callback): |
|
|
10 |
""" SKLearnMetrics computes various classification metrics at the end of a batch. |
|
|
11 |
Unforunately, doesn't work when used with generators....""" |
|
|
12 |
|
|
|
13 |
def on_train_begin(self, logs={}): |
|
|
14 |
self.confusion = [] |
|
|
15 |
self.precision = [] |
|
|
16 |
self.recall = [] |
|
|
17 |
self.f1s = [] |
|
|
18 |
self.kappa = [] |
|
|
19 |
self.auc = [] |
|
|
20 |
|
|
|
21 |
def on_epoch_end(self, epoch, logs={}): |
|
|
22 |
score = np.asarray(self.model.predict(self.validation_data[0])) |
|
|
23 |
predict = np.round(np.asarray(self.model.predict(self.validation_data[0]))) |
|
|
24 |
target = self.validation_data[1] |
|
|
25 |
|
|
|
26 |
self.auc.append(roc_auc_score(target, score)) |
|
|
27 |
self.confusion.append(confusion_matrix(target, predict)) |
|
|
28 |
self.precision.append(precision_score(target, predict)) |
|
|
29 |
self.recall.append(recall_score(target, predict)) |
|
|
30 |
self.f1s.append(f1_score(target, predict)) |
|
|
31 |
self.kappa.append(cohen_kappa_score(target, predict)) |
|
|
32 |
return |