Diff of /configs/config_toan.yml [000000] .. [95f789]

Switch to unified view

a b/configs/config_toan.yml
1
model_params:
2
  model: &model CNNFinetuneModels
3
  model_name: &model_name densenet169
4
  num_classes: 6
5
6
7
args:
8
  expdir: "src"
9
  logdir: &logdir "./logs/rsna"
10
  baselogdir: "./logs/rsna"
11
12
distributed_params:
13
  opt_level: O1
14
15
stages:
16
17
  state_params:
18
    main_metric: &reduce_metric loss
19
    minimize_metric: True
20
21
  criterion_params:
22
    criterion: &criterion LogLoss
23
    weight: [1,1,1,1,1,2]
24
25
  data_params:
26
    batch_size: 32
27
    num_workers: 4
28
    drop_last: False
29
30
    image_size: &image_size [512, 512]
31
    train_csv: "./csv/stratified_kfold/train_0.csv.gz"
32
    valid_csv: "./csv/stratified_kfold/valid_0.csv.gz"
33
#    dataset_type: "RSNAMultiWindowsDataset"
34
    with_any: True
35
    root: "../stage_1_train_images_jpg_preprocessing/"
36
    image_type: "jpg"
37
38
  warmup:
39
    optimizer_params:
40
      optimizer: AdamW
41
      lr: 0.001
42
43
    scheduler_params:
44
      scheduler: MultiStepLR
45
      milestones: [10]
46
      gamma: 0.3
47
48
    state_params:
49
      num_epochs: 3
50
51
    callbacks_params: &callbacks_params
52
      loss:
53
        callback: CriterionCallback
54
55
      optimizer:
56
        callback: OptimizerCallback
57
        accumulation_steps: 1
58
      scheduler:
59
        callback: SchedulerCallback
60
        reduce_metric: *reduce_metric
61
      saver:
62
        callback: CheckpointCallback
63
        save_n_best: 5
64
65
  stage1:
66
67
    optimizer_params:
68
      optimizer: AdamW
69
      lr: 0.0001
70
71
    scheduler_params:
72
      scheduler: ReduceLROnPlateau
73
      patience: 1
74
      min_lr: 0.00001
75
      verbose: True
76
#      scheduler: OneCycleLR
77
#      num_steps: &num_epochs 25
78
#      lr_range: [0.0005, 0.00001]
79
#      warmup_steps: 5
80
#      momentum_range: [0.85, 0.95]
81
82
    state_params:
83
      num_epochs: 20
84
85
    callbacks_params:
86
      loss:
87
        callback: CriterionCallback
88
89
      optimizer:
90
        callback: OptimizerCallback
91
        accumulation_steps: 1
92
      scheduler:
93
        callback: SchedulerCallback
94
        reduce_metric: *reduce_metric
95
      saver:
96
        callback: CheckpointCallback
97
        save_n_best: 5
98
99
      early_stoping:
100
        callback: EarlyStoppingCallback
101
        patience: 2
102
103
monitoring_params:
104
  project: "Kaggle-RSNA"
105
  tags: [*model, *model_name, *criterion]