mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
funasr2
This commit is contained in:
parent
15868f6230
commit
99340740f5
@ -46,7 +46,7 @@ def main(kwargs: DictConfig):
|
||||
|
||||
local_rank = int(os.environ.get('LOCAL_RANK', 0))
|
||||
# Check if we are using DDP or FSDP
|
||||
use_ddp = 'WORLD_SIZE' in os.environ and os.environ["WORLD_SIZE"] > 1
|
||||
use_ddp = 'WORLD_SIZE' in os.environ and int(os.environ["WORLD_SIZE"]) > 1
|
||||
use_fsdp = kwargs.get("use_fsdp", None)
|
||||
if use_ddp or use_fsdp:
|
||||
dist.init_process_group(backend=kwargs.get("backend", "nccl"), init_method='env://')
|
||||
|
||||
@ -180,7 +180,7 @@ class Trainer:
|
||||
pbar.update(1)
|
||||
if self.local_rank == 0:
|
||||
pbar.set_description(
|
||||
f"Training Epoch: {epoch + 1}/{self.max_epoch}, step {batch_idx}/{len(self.dataloader_train)} (loss: {loss.detach().float()})")
|
||||
f"Training Epoch: {epoch + 1}/{self.max_epoch}, step {batch_idx}/{len(self.dataloader_train)} (loss: {loss.detach().float():.3f}, {[(k, round(v.cpu().item(), 3)) for k, v in stats.items()]})")
|
||||
|
||||
pbar.close()
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user