[8ba013]: / architectures / neural_network_models.py

Download this file

122 lines (99 with data), 4.2 kB

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
'''
About: Python script to define four MLP architectures for the classification problem.
Author: Iman Kafian-Attari
Date: 20.07.2021
Licence: MIT
version: 0.1
=========================================================
How to use:
1. You do not need to interact with this script directly.
=========================================================
Notes:
1. This script is called from the main script.
2. It includes various MLP models for the classification problem.
3. You can modify the existing MLP models or create new models, if you desire.
4. If you created new models, do not forget to call them from the main script.
=========================================================
TODO for version O.2
1. Add new neural network models.
=========================================================
'''
print(__doc__)
from tensorflow import keras
from keras.models import Sequential
from tensorflow.keras.callbacks import EarlyStopping
from keras.layers import *
from keras.optimizers import *
from keras.losses import *
from tensorflow.keras.losses import categorical_crossentropy
# Defining the 1st architecture
def neural_model1(num_input_nodes=281, num_label_nodes=4):
model = Sequential()
# Adding the first hidden layer with 5*node+5 neurons
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,)))
model.add(Dropout(0.2))
# Add the remaining hidden layers
model.add(BatchNormalization())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(Dense(32, activation='relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(Dense(16, activation='relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(Dense(8, activation='relu'))
model.add(Dropout(0.2))
# Adding the output layer
# The activation for the output layer is always set to sigmoid function in this exercise session
model.add(Dense(num_label_nodes, activation='softmax'))
model.summary()
return model
# Defining the 2nd architecture
def neural_model2(num_input_nodes=281, num_label_nodes=4):
model = Sequential()
# Adding the first hidden layer with 5*node+5 neurons
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,)))
model.add(Dropout(0.2))
# Add the remaining hidden layers
model.add(BatchNormalization())
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.2))
model.add(BatchNormalization())
model.add(Dense(16, activation='relu'))
model.add(Dropout(0.2))
# Adding the output layer
# The activation for the output layer is always set to sigmoid function in this exercise session
model.add(Dense(num_label_nodes, activation='softmax'))
model.summary()
return model
# Defining the 3rd architecture
def neural_model3(num_input_nodes=281, num_label_nodes=4):
model = Sequential()
# Adding the first hidden layer with 5*node+5 neurons
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,)))
model.add(Dropout(0.2))
# Add the remaining hidden layers
model.add(BatchNormalization())
model.add(Dense(32, activation='relu'))
model.add(Dropout(0.2))
# Adding the output layer
# The activation for the output layer is always set to sigmoid function in this exercise session
model.add(Dense(num_label_nodes, activation='softmax'))
model.summary()
return model
# Defining the 4th architecture
def neural_model4(num_input_nodes=281, num_label_nodes=4):
model = Sequential()
# Adding the first hidden layer with 5*node+5 neurons
model.add(Dense(num_input_nodes, activation='relu', input_shape=(num_input_nodes,)))
model.add(Dropout(0.2))
# Adding the output layer
# The activation for the output layer is always set to sigmoid function in this exercise session
model.add(Dense(num_label_nodes, activation='softmax'))
model.summary()
return model