[0f1df3]: / AICare-baselines / configs / experiments_challenge.py

Download this file

126 lines (126 with data), 3.0 kB

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
hparams =[
{
'model': 'GRU',
'dataset': 'challenge',
'task': 'outcome',
'epochs': 50,
'patience': 10,
'batch_size': 1024,
'learning_rate': 0.001,
'main_metric': 'auprc',
'demo_dim': 5,
'lab_dim': 33,
'hidden_dim': 128,
'output_dim': 1
},
{
'model': 'Transformer',
'dataset': 'challenge',
'task': 'outcome',
'epochs': 50,
'patience': 10,
'batch_size': 1024,
'learning_rate': 0.001,
'main_metric': 'auprc',
'demo_dim': 5,
'lab_dim': 33,
'hidden_dim': 128,
'output_dim': 1
},
{
'model': 'MTRHN',
'dataset': 'challenge',
'task': 'outcome',
'epochs': 50,
'patience': 10,
'batch_size': 1024,
'learning_rate': 0.001,
'main_metric': 'auprc',
'demo_dim': 5,
'lab_dim': 33,
'hidden_dim': 128,
'output_dim': 1
},
{
'model': 'LSTM',
'dataset': 'challenge',
'task': 'outcome',
'epochs': 50,
'patience': 10,
'batch_size': 1024,
'learning_rate': 0.001,
'main_metric': 'auprc',
'demo_dim': 5,
'lab_dim': 33,
'hidden_dim': 128,
'output_dim': 1
},
{
'model': 'BiLSTM',
'dataset': 'challenge',
'task': 'outcome',
'epochs': 50,
'patience': 10,
'batch_size': 1024,
'learning_rate': 0.001,
'main_metric': 'auprc',
'demo_dim': 5,
'lab_dim': 33,
'hidden_dim': 128,
'output_dim': 1
},
# {
# 'model': 'AICare',
# 'dataset': 'challenge',
# 'task': 'outcome',
# 'epochs': 50,
# 'patience': 10,
# 'batch_size': 1024,
# 'learning_rate': 0.001,
# 'main_metric': 'auprc',
# 'demo_dim': 5,
# 'lab_dim': 33,
# 'hidden_dim': 128,
# 'output_dim': 1
# },
{
"model": "LR",
"dataset": "challenge",
"task": "outcome",
"max_depth": 5,
"n_estimators": 10,
"learning_rate": 0.01,
"batch_size": 81920,
"main_metric": "auprc",
},
{
"model": "XGBoost",
"dataset": "challenge",
"task": "outcome",
"max_depth": 5,
"n_estimators": 50,
"learning_rate": 0.1,
"batch_size": 81920,
"main_metric": "auprc",
},
{
"model": "DT",
"dataset": "challenge",
"task": "outcome",
"max_depth": 5,
"n_estimators": 10,
"learning_rate": 0.01,
"batch_size": 81920,
"main_metric": "auprc",
},
{
"model": "LightGBM",
"dataset": "challenge",
"task": "outcome",
"max_depth": 5,
"n_estimators": 50,
"learning_rate": 0.01,
"batch_size": 81920,
"main_metric": "auprc",
},
]