--- a +++ b/AICare-baselines/configs/experiments_challenge.py @@ -0,0 +1,126 @@ +hparams =[ + { + 'model': 'GRU', + 'dataset': 'challenge', + 'task': 'outcome', + 'epochs': 50, + 'patience': 10, + 'batch_size': 1024, + 'learning_rate': 0.001, + 'main_metric': 'auprc', + 'demo_dim': 5, + 'lab_dim': 33, + 'hidden_dim': 128, + 'output_dim': 1 + }, + { + 'model': 'Transformer', + 'dataset': 'challenge', + 'task': 'outcome', + 'epochs': 50, + 'patience': 10, + 'batch_size': 1024, + 'learning_rate': 0.001, + 'main_metric': 'auprc', + 'demo_dim': 5, + 'lab_dim': 33, + 'hidden_dim': 128, + 'output_dim': 1 + }, + { + 'model': 'MTRHN', + 'dataset': 'challenge', + 'task': 'outcome', + 'epochs': 50, + 'patience': 10, + 'batch_size': 1024, + 'learning_rate': 0.001, + 'main_metric': 'auprc', + 'demo_dim': 5, + 'lab_dim': 33, + 'hidden_dim': 128, + 'output_dim': 1 + }, + { + 'model': 'LSTM', + 'dataset': 'challenge', + 'task': 'outcome', + 'epochs': 50, + 'patience': 10, + 'batch_size': 1024, + 'learning_rate': 0.001, + 'main_metric': 'auprc', + 'demo_dim': 5, + 'lab_dim': 33, + 'hidden_dim': 128, + 'output_dim': 1 + }, + { + 'model': 'BiLSTM', + 'dataset': 'challenge', + 'task': 'outcome', + 'epochs': 50, + 'patience': 10, + 'batch_size': 1024, + 'learning_rate': 0.001, + 'main_metric': 'auprc', + 'demo_dim': 5, + 'lab_dim': 33, + 'hidden_dim': 128, + 'output_dim': 1 + }, + # { + # 'model': 'AICare', + # 'dataset': 'challenge', + # 'task': 'outcome', + # 'epochs': 50, + # 'patience': 10, + # 'batch_size': 1024, + # 'learning_rate': 0.001, + # 'main_metric': 'auprc', + # 'demo_dim': 5, + # 'lab_dim': 33, + # 'hidden_dim': 128, + # 'output_dim': 1 + # }, + { + "model": "LR", + "dataset": "challenge", + "task": "outcome", + "max_depth": 5, + "n_estimators": 10, + "learning_rate": 0.01, + "batch_size": 81920, + "main_metric": "auprc", + }, + { + "model": "XGBoost", + "dataset": "challenge", + "task": "outcome", + "max_depth": 5, + "n_estimators": 50, + "learning_rate": 0.1, + "batch_size": 81920, + "main_metric": "auprc", + }, + { + "model": "DT", + "dataset": "challenge", + "task": "outcome", + "max_depth": 5, + "n_estimators": 10, + "learning_rate": 0.01, + "batch_size": 81920, + "main_metric": "auprc", + }, + { + "model": "LightGBM", + "dataset": "challenge", + "task": "outcome", + "max_depth": 5, + "n_estimators": 50, + "learning_rate": 0.01, + "batch_size": 81920, + "main_metric": "auprc", + }, +] \ No newline at end of file