|
a |
|
b/chexbert/src/constants.py |
|
|
1 |
NUM_EPOCHS = 8 #Number of epochs to train for |
|
|
2 |
BATCH_SIZE = 12 #Change this depending on GPU memory |
|
|
3 |
NUM_WORKERS = 4 #A value of 0 means the main process loads the data |
|
|
4 |
LEARNING_RATE = 2e-5 |
|
|
5 |
LOG_EVERY = 200 #iterations after which to log status during training |
|
|
6 |
VALID_NITER = 2000 #iterations after which to evaluate model and possibly save (if dev performance is a new max) |
|
|
7 |
PRETRAIN_PATH = None #path to pretrained model, such as BlueBERT or BioBERT |
|
|
8 |
PAD_IDX = 0 #padding index as required by the tokenizer |
|
|
9 |
|
|
|
10 |
#CONDITIONS is a list of all 14 medical observations |
|
|
11 |
CONDITIONS = ['Enlarged Cardiomediastinum', 'Cardiomegaly', 'Lung Opacity', |
|
|
12 |
'Lung Lesion', 'Edema', 'Consolidation', 'Pneumonia', 'Atelectasis', |
|
|
13 |
'Pneumothorax', 'Pleural Effusion', 'Pleural Other', 'Fracture', |
|
|
14 |
'Support Devices', 'No Finding'] |
|
|
15 |
CLASS_MAPPING = {0: "Blank", 1: "Positive", 2: "Negative", 3: "Uncertain"} |