|
a |
|
b/architectures/neural_network_models.py |
|
|
1 |
''' |
|
|
2 |
About: Python script to define four MLP architectures for the classification problem. |
|
|
3 |
Author: Iman Kafian-Attari |
|
|
4 |
Date: 20.07.2021 |
|
|
5 |
Licence: MIT |
|
|
6 |
version: 0.1 |
|
|
7 |
========================================================= |
|
|
8 |
How to use: |
|
|
9 |
1. You do not need to interact with this script directly. |
|
|
10 |
========================================================= |
|
|
11 |
Notes: |
|
|
12 |
1. This script is called from the main script. |
|
|
13 |
2. It includes various MLP models for the classification problem. |
|
|
14 |
3. You can modify the existing MLP models or create new models, if you desire. |
|
|
15 |
4. If you created new models, do not forget to call them from the main script. |
|
|
16 |
========================================================= |
|
|
17 |
TODO for version O.2 |
|
|
18 |
1. Add new neural network models. |
|
|
19 |
========================================================= |
|
|
20 |
''' |
|
|
21 |
|
|
|
22 |
print(__doc__) |
|
|
23 |
|
|
|
24 |
from tensorflow import keras |
|
|
25 |
from keras.models import Sequential |
|
|
26 |
from tensorflow.keras.callbacks import EarlyStopping |
|
|
27 |
from keras.layers import * |
|
|
28 |
from keras.optimizers import * |
|
|
29 |
from keras.losses import * |
|
|
30 |
from tensorflow.keras.losses import categorical_crossentropy |
|
|
31 |
|
|
|
32 |
|
|
|
33 |
# Defining the 1st architecture |
|
|
34 |
def neural_model1(num_input_nodes=281, num_label_nodes=4): |
|
|
35 |
model = Sequential() |
|
|
36 |
# Adding the first hidden layer with 5*node+5 neurons |
|
|
37 |
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,))) |
|
|
38 |
model.add(Dropout(0.2)) |
|
|
39 |
# Add the remaining hidden layers |
|
|
40 |
model.add(BatchNormalization()) |
|
|
41 |
model.add(Dense(128, activation='relu')) |
|
|
42 |
model.add(Dropout(0.2)) |
|
|
43 |
model.add(BatchNormalization()) |
|
|
44 |
model.add(Dense(64, activation='relu')) |
|
|
45 |
model.add(Dropout(0.2)) |
|
|
46 |
model.add(BatchNormalization()) |
|
|
47 |
model.add(Dense(32, activation='relu')) |
|
|
48 |
model.add(Dropout(0.2)) |
|
|
49 |
model.add(BatchNormalization()) |
|
|
50 |
model.add(Dense(16, activation='relu')) |
|
|
51 |
model.add(Dropout(0.2)) |
|
|
52 |
model.add(BatchNormalization()) |
|
|
53 |
model.add(Dense(8, activation='relu')) |
|
|
54 |
model.add(Dropout(0.2)) |
|
|
55 |
# Adding the output layer |
|
|
56 |
# The activation for the output layer is always set to sigmoid function in this exercise session |
|
|
57 |
model.add(Dense(num_label_nodes, activation='softmax')) |
|
|
58 |
|
|
|
59 |
model.summary() |
|
|
60 |
|
|
|
61 |
return model |
|
|
62 |
|
|
|
63 |
|
|
|
64 |
# Defining the 2nd architecture |
|
|
65 |
def neural_model2(num_input_nodes=281, num_label_nodes=4): |
|
|
66 |
|
|
|
67 |
model = Sequential() |
|
|
68 |
# Adding the first hidden layer with 5*node+5 neurons |
|
|
69 |
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,))) |
|
|
70 |
model.add(Dropout(0.2)) |
|
|
71 |
# Add the remaining hidden layers |
|
|
72 |
model.add(BatchNormalization()) |
|
|
73 |
model.add(Dense(64, activation='relu')) |
|
|
74 |
model.add(Dropout(0.2)) |
|
|
75 |
model.add(BatchNormalization()) |
|
|
76 |
model.add(Dense(16, activation='relu')) |
|
|
77 |
model.add(Dropout(0.2)) |
|
|
78 |
# Adding the output layer |
|
|
79 |
# The activation for the output layer is always set to sigmoid function in this exercise session |
|
|
80 |
model.add(Dense(num_label_nodes, activation='softmax')) |
|
|
81 |
|
|
|
82 |
model.summary() |
|
|
83 |
|
|
|
84 |
return model |
|
|
85 |
|
|
|
86 |
|
|
|
87 |
# Defining the 3rd architecture |
|
|
88 |
def neural_model3(num_input_nodes=281, num_label_nodes=4): |
|
|
89 |
|
|
|
90 |
model = Sequential() |
|
|
91 |
# Adding the first hidden layer with 5*node+5 neurons |
|
|
92 |
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,))) |
|
|
93 |
model.add(Dropout(0.2)) |
|
|
94 |
# Add the remaining hidden layers |
|
|
95 |
model.add(BatchNormalization()) |
|
|
96 |
model.add(Dense(32, activation='relu')) |
|
|
97 |
model.add(Dropout(0.2)) |
|
|
98 |
# Adding the output layer |
|
|
99 |
# The activation for the output layer is always set to sigmoid function in this exercise session |
|
|
100 |
model.add(Dense(num_label_nodes, activation='softmax')) |
|
|
101 |
|
|
|
102 |
model.summary() |
|
|
103 |
|
|
|
104 |
return model |
|
|
105 |
|
|
|
106 |
|
|
|
107 |
# Defining the 4th architecture |
|
|
108 |
def neural_model4(num_input_nodes=281, num_label_nodes=4): |
|
|
109 |
|
|
|
110 |
model = Sequential() |
|
|
111 |
# Adding the first hidden layer with 5*node+5 neurons |
|
|
112 |
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,))) |
|
|
113 |
model.add(Dropout(0.2)) |
|
|
114 |
# Adding the output layer |
|
|
115 |
# The activation for the output layer is always set to sigmoid function in this exercise session |
|
|
116 |
model.add(Dense(num_label_nodes, activation='softmax')) |
|
|
117 |
|
|
|
118 |
model.summary() |
|
|
119 |
|
|
|
120 |
return model |
|
|
121 |
|