a | b/tools/dist_train.sh | ||
---|---|---|---|
1 | #!/usr/bin/env bash |
||
2 | |||
3 | CONFIG=$1 |
||
4 | GPUS=$2 |
||
5 | PORT=${PORT:-29500} |
||
6 | |||
7 | PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ |
||
8 | python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ |
||
9 | $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} |
||
10 | # Any arguments from the third one are captured by ${@:3} |