|
a |
|
b/5-Training with Ignite and Optuna/constants.py |
|
|
1 |
import optuna |
|
|
2 |
|
|
|
3 |
MODEL_NAME='MyModel' #the name of the class in models.py you want to tune |
|
|
4 |
OPTIMIZERS=['AdamW', 'RMSprop'] #the list of optimizers to sample from |
|
|
5 |
MAX_EPOCHS=100 #the max number of epochs to train for each hyperparameter combination |
|
|
6 |
METRIC='roc_auc' #the metric to optimize for across trials, also used for early stopping within a trial and pruning across trials, full list of possible values in tuningfunctions.Objective |
|
|
7 |
MIN_LR=3e-5 #the mininum learning rate to sample from on a log scale |
|
|
8 |
MAX_LR=3e-3 #the maximum learning rate to sample from on a log scale |
|
|
9 |
PATIENCE=10 #the early stopping patience, can be set to None |
|
|
10 |
SCHEDULER=False #bool for whether to use the learning rate scheduler |
|
|
11 |
STEP=1 #the learning rate scheduler step size, if SCHEDULER=True, cannot be None |
|
|
12 |
GAMMA=0.975 #the learning rate gamma, if SHCEDULER=True, cannot be None |
|
|
13 |
|
|
|
14 |
PRUNER=optuna.pruners.NopPruner() #the optuna pruner to use across trials |
|
|
15 |
NUM_TRIALS=5 #the number of different hyperparameter combination to try |
|
|
16 |
DIRECTION='maximize' #the direction of the metric to optimize towards ex: 'loss' = 'minimize', 'roc_auc' = 'maximize' |
|
|
17 |
|
|
|
18 |
|
|
|
19 |
|
|
|
20 |
## MyModel specific parameters |
|
|
21 |
INITIAL_KERNEL_NUM = [4,8,16,32,64] |
|
|
22 |
MIN_DROPOUT = 0 |
|
|
23 |
MAX_DROPOUT = 1 |
|
|
24 |
CONV1_KERNEL1 = [7,21] |
|
|
25 |
CONV1_KERNEL2 = [1,3] |