############################################################################################################################################
################################################# CREATION OF THE NETWORK #####################################################
############################################################################################################################################
############## =================== General Options ================= ################
[General]
networkName = liviaTest
# Saving Options
folderName = LiviaNet_Test
############## =================== CNN_Architecture ================= ################
[CNN_Architecture]
numkernelsperlayer = [25,25,25,50,50,50,75,75,75,400,200,150]
# Kernels shapes: (Note, if kernel size is equal to 1 on one layer means that this layer is fully connected)
# In this example there will be 3 conv layers and 1 fully connected layer (+ classification layer)
kernelshapes = [[3, 3, 3], [3, 3, 3], [3, 3, 3],[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [1],[1],[1]]
# Intermediate layers to connect to the last conv layer (just before the first fully connected layer)
intermediateConnectedLayers = [2,5]
# In the current implementation it does not support pooling (To be done...)
pooling_scales = [[1,1,1],[1,1,1],[1,1,1]]
# Array size should be equal to number of fully connected (FC) layers + classification layer
dropout_Rates = [0.25,0.25,0.25,0.5]
# Non-linear activations
# Type: 0: Linear
# 1: ReLU
# 2: PReLU
# 3: LeakyReLU
activationType = 2
# TODO. Include activation type for Softmax layer
# Number of classes: background + classes to segment
n_classes = 9
# ------- Weights initialization ----------- #
# There are some ways to initialize the weights. This is defined by the variable "weight_Initialization"
# Here, there is a list of supported methods
# 0, Classic
# 1: Delving (He, Kaiming, et al. "Delving deep into rectifiers: Surpassing human-level performance on imagenet classification." ICCV'15)
# 2: Load Pre-trained
# ----------
# There is also the choice of which layers will be initialized with pre-trained weights. This is specified in the variable
# "load weight layers". This can be either empty (i.e. all layers will be initialized with pre-trained weights in case
# "weight_Initialization" is 1)
weight_Initialization_CNN = 1
weight_Initialization_FCN = 1
#load weight layers = [] # Next release
# If using pre-trained models, specify the folder that contains the weights and the indexes of those weights to use
# To ease the transfer between different softwares (i.e matlab for instance), and between different architectures,
# the weights for each layer should be stored as a single file.
# Right now weights have to be in .npy format
weights folderName = /~yourpath/trainedWeights
# Same length as conv layers
weights trained indexes = [0,1,2]
#weight_Initialization_Sec = 1
############## =================== Training Options ================= ################
[Training Parameters]
#n_epochs=20
batch_size=10
number Of Epochs = 30
number Of SubEpochs = 20
number of samples at each SubEpoch Train = 1000
# TODO. To define some changes in the learning rate
learning Rate change Type = 0
# Subvolumes (i.e. samples) sizes.
# Validation equal to testing samples
sampleSize_Train = [27,27,27]
sampleSize_Test = [35,35,35]
# Cost function values
# 0:
# 1:
costFunction = 0
SoftMax temperature = 1.0
#### ========= Learning rate ========== #####
L1 Regularization Constant = 1e-6
L2 Regularization Constant = 1e-4
# TO check
# The array size has to be equal to the total number of layers (i.e. CNNs + FCs + Classification layer)
#Leraning Rate = [0.0001, 0.0001, 0.0001, 0.0001,0.0001, 0.0001, 0.0001, 0.0001,0.0001, 0.0001, 0.0001, 0.0001,0.0001, 0.0001 ]
Leraning Rate = [0.001]
# First epoch to change learning rate
First Epoch Change LR = 1
# Each how many epochs change learning rate
Frequency Change LR = 2
# TODO. Add learning rate for each layer
#### ========= Momentum ========== #####
# Type of momentum
# 0: Classic
# 1: Nesterov
Momentum Type = 1
Momentum Value = 0.6
# Use momentum normalized?
momentumNormalized = 1
#### ======== Optimizer ===== ######
# Type: 0-> SGD
# 1-> RMSProp (TODO. Check why RMSProp complains....)
Optimizer Type = 1
#In case we chose RMSProp
Rho RMSProp = 0.9
Epsilon RMSProp = 1e-4
# Apply Batch normalization
# 0: False, 1: True
applyBatchNormalization = 1
BatchNormEpochs = 20
# Apply padding to images
# 0: False, 1: True
applyPadding = 1
############################################################################################################################################
################################################# TRAINING VALUES #####################################################
############################################################################################################################################
[Training Images]
imagesFolder = ../Dataset/MR/
GroundTruthFolder = ../Dataset/Label/
# ROI folder will contain the ROI where to extract the pacthes and where to perform the segmentation.
# Values of the ROI should be 0 (non interest) and 1 (region of interest)
ROIFolder = ../Dataset/ROI/
# If you have no ROIs
#ROIFolder = []
# Type of images in the dataset
# 0: nifti format
# 1: matlab format
# IMPORTANT: All the volumes should have been saved as 'vol'
imageTypes = 1
# Indexes for training/validation images. Note that indexes should correspond to the position = inex + 1 in the folder,
# since python starts indexing at 0
indexesForTraining = [0,1,2,3,4,5]
indexesForValidation = [6]