a b/GP/MLP/mlp.py
1
from scipy.stats.stats import pearsonr
2
import pandas as pd
3
import numpy as np
4
from keras import backend as K
5
from keras.models import Sequential
6
from keras.layers import Dense, Activation
7
from keras.layers import Dropout
8
from keras import regularizers
9
from keras.activations import relu, elu, linear, softmax, tanh, softplus
10
from keras.callbacks import EarlyStopping, Callback
11
from keras.wrappers.scikit_learn import KerasRegressor
12
from keras.optimizers import Adam, Nadam, sgd,Adadelta, RMSprop
13
from keras.losses import mean_squared_error, categorical_crossentropy, logcosh
14
from keras.utils.np_utils import to_categorical
15
from keras import metrics
16
17
18
from keras import regularizers
19
#keras Load Model
20
from keras.models import load_model
21
22
import talos as ta
23
import wrangle as wr
24
from talos.metrics.keras_metrics import fmeasure_acc
25
from talos.model.layers import hidden_layers
26
from talos import live
27
from talos.model import lr_normalizer, early_stopper, hidden_layers
28
import os
29
30
from CNN.cnn import acc_pearson_r  as acc_pearson_r
31
32
def mlp_main(x, y, x_val, y_val, params):
33
34
    model_mlp = Sequential()
35
    nSNP = x.shape[1]
36
    try:
37
        out_c= y.shape[1]
38
    except IndexError:
39
        out_c=1
40
41
42
    model_mlp.add(Dense(params['first_neuron'], input_dim=nSNP,
43
                        activation=params['activation'],
44
                        kernel_initializer='normal', kernel_regularizer=regularizers.l2(params['reg1'])))
45
46
    model_mlp.add(Dropout(params['dropout_1']))
47
    if (params['hidden_layers'] != 0):
48
        # if we want to also test for number of layers and shapes, that's possible
49
        for _ in range(params['hidden_layers']):
50
            model_mlp.add(Dense(params['hidden_neurons'], activation=params['activation'],
51
                                kernel_regularizer=regularizers.l2(params['reg1'])))
52
53
            # hidden_layers(model, params, 1)
54
            model_mlp.add(Dropout(params['dropout_2']))
55
56
    model_mlp.add(Dense(out_c,activation=params['last_activation'],
57
    kernel_regularizer=regularizers.l2(params['reg2'])))
58
    if params['optimizer']=='Adam':
59
        params['optimizer']= Adam
60
    if params['optimizer']=='Nadam':
61
        params['optimizer']= Nadam
62
    if params['optimizer']=='sgd':
63
        params['optimizer']= sgd
64
65
    model_mlp.compile(loss=mean_squared_error,
66
                     optimizer=params['optimizer'](lr=lr_normalizer(params['lr'], params['optimizer'])),
67
                     metrics=[acc_pearson_r])
68
    #es = EarlyStopping(monitor=mean_squared_error, mode='min', verbose=1)
69
70
    # callbacks=[live()] see the output
71
    # callbacks= es to EarlyStopping
72
73
    out_mlp = model_mlp.fit(x, y, validation_split=0.2,
74
                            verbose=0, batch_size=params['batch_size'],
75
                            epochs=params['epochs'])
76
77
    return out_mlp, model_mlp
78
79
def mlp_main_cat(x, y, x_val, y_val, params):
80
81
    model_mlp = Sequential()
82
    nSNP = x.shape[1]
83
    last_layer= y.shape[1]
84
85
    model_mlp.add(Dense(params['first_neuron'], input_dim=nSNP,
86
                        activation=params['activation'],
87
                        kernel_initializer='normal', activity_regularizer=regularizers.l1(params['reg1'])))
88
89
    model_mlp.add(Dropout(params['dropout_1']))
90
    if (params['hidden_layers'] != 0):
91
        # if we want to also test for number of layers and shapes, that's possible
92
        for _ in range(params['hidden_layers']):
93
            model_mlp.add(Dense(params['hidden_neurons'], activation=params['activation'],
94
                                activity_regularizer=regularizers.l2(params['reg1'])))
95
96
            # hidden_layers(model, params, 1)
97
            model_mlp.add(Dropout(params['dropout_2']))
98
    model_mlp.add(Dense(last_layer,activation='softmax'))
99
    if params['optimizer']=='Adam':
100
        params['optimizer']= Adam
101
    if params['optimizer']=='Nadam':
102
        params['optimizer']= Nadam
103
    if params['optimizer']=='sgd':
104
        params['optimizer']= sgd
105
106
    model_mlp.compile(loss='categorical_crossentropy',optimizer=params['optimizer'](lr=lr_normalizer(params['lr'],
107
    params['optimizer'])), metrics=['accuracy'])
108
109
110
    #acc or mean_squared_error in metrics
111
    # simple early stopping
112
    # if you monitor is an accuracy parameter (pearson here, you should chose mode="max"), otherwise it would be "min"
113
    #es = EarlyStopping(monitor=mean_squared_error, mode='min', verbose=1)
114
115
    # callbacks=[live()] see the output
116
    # callbacks= es to EarlyStopping
117
118
    out_mlp = model_mlp.fit(x, y, validation_split=0.2,
119
                            verbose=0, batch_size=params['batch_size'],
120
                            epochs=params['epochs'])
121
122
    return out_mlp, model_mlp