mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
update
This commit is contained in:
parent
996b951365
commit
2363c63f95
@ -86,6 +86,22 @@ def get_parser():
|
|||||||
help="The master port for distributed training"
|
help="The master port for distributed training"
|
||||||
"This value is used when dist_init_method == 'env://'",
|
"This value is used when dist_init_method == 'env://'",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dist_launcher",
|
||||||
|
default=None,
|
||||||
|
type=str_or_none,
|
||||||
|
choices=["slurm", "mpi", None],
|
||||||
|
help="The launcher type for distributed training",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--multiprocessing_distributed",
|
||||||
|
default=True,
|
||||||
|
type=str2bool,
|
||||||
|
help="Use multi-processing distributed training to launch "
|
||||||
|
"N processes per node, which has N GPUs. This is the "
|
||||||
|
"fastest way to use PyTorch for either single node or "
|
||||||
|
"multi node data parallel training",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--unused_parameters",
|
"--unused_parameters",
|
||||||
type=str2bool,
|
type=str2bool,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user