|
a |
|
b/args.py |
|
|
1 |
import argparse |
|
|
2 |
|
|
|
3 |
def get_args_parser(): |
|
|
4 |
parser = argparse.ArgumentParser('EEGTransformer training and evaluation script', add_help=False) |
|
|
5 |
|
|
|
6 |
parser.add_argument('--opt', default='adamw', type=str) |
|
|
7 |
parser.add_argument('--epochs', default=50, type=int) |
|
|
8 |
# parser.add_argument('--lr', default=1e-5, type=float) |
|
|
9 |
# parser.add_argument('--sched', default='cosine', type=str) |
|
|
10 |
parser.add_argument('--momentum', type=float, default=0.9, metavar='M', |
|
|
11 |
help='SGD momentum (default: 0.9)') |
|
|
12 |
parser.add_argument('--weight-decay', type=float, default=0.05, |
|
|
13 |
help='weight decay (default: 0.05)') |
|
|
14 |
# parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR', |
|
|
15 |
# help='lower lr bound for cyclic schedulers that hit 0 (1e-5)') |
|
|
16 |
|
|
|
17 |
# Learning rate schedule parameters |
|
|
18 |
parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER', |
|
|
19 |
help='LR scheduler (default: "cosine"') |
|
|
20 |
parser.add_argument('--lr', type=float, default=5e-6, metavar='LR', |
|
|
21 |
help='learning rate (default: 5e-4)') |
|
|
22 |
parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct', |
|
|
23 |
help='learning rate noise on/off epoch percentages') |
|
|
24 |
parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT', |
|
|
25 |
help='learning rate noise limit percent (default: 0.67)') |
|
|
26 |
parser.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV', |
|
|
27 |
help='learning rate noise std-dev (default: 1.0)') |
|
|
28 |
parser.add_argument('--warmup-lr', type=float, default=1e-6, metavar='LR', |
|
|
29 |
help='warmup learning rate (default: 1e-6)') |
|
|
30 |
parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR', |
|
|
31 |
help='lower lr bound for cyclic schedulers that hit 0 (1e-5)') |
|
|
32 |
parser.add_argument('--hard_code_lr', type=float, default=0.0, metavar='LR', |
|
|
33 |
help='learning rate (default: 5e-4)') |
|
|
34 |
|
|
|
35 |
parser.add_argument('--decay-epochs', type=float, default=30, metavar='N', |
|
|
36 |
help='epoch interval to decay LR') |
|
|
37 |
parser.add_argument('--warmup-epochs', type=int, default=0, metavar='N', |
|
|
38 |
help='epochs to warmup LR, if scheduler supports') |
|
|
39 |
parser.add_argument('--cooldown-epochs', type=int, default=10, metavar='N', |
|
|
40 |
help='epochs to cooldown LR at min_lr, after cyclic schedule ends') |
|
|
41 |
parser.add_argument('--patience-epochs', type=int, default=10, metavar='N', |
|
|
42 |
help='patience epochs for Plateau LR scheduler (default: 10') |
|
|
43 |
parser.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE', |
|
|
44 |
help='LR decay rate (default: 0.1)') |
|
|
45 |
return parser |