This commit is contained in:
speech_asr 2023-04-18 16:26:51 +08:00
parent 1eb85d7d17
commit 25e28c49b7

View File

@ -1,6 +1,9 @@
import logging
import os
import numpy as np
import torch
from funasr.datasets.small_datasets.dataset import ESPnetDataset
from funasr.datasets.small_datasets.preprocessor import build_preprocess
from funasr.samplers.length_batch_sampler import LengthBatchSampler
@ -9,7 +12,7 @@ from funasr.samplers.length_batch_sampler import LengthBatchSampler
def build_dataloader(args, mode="train"):
preprocess_fn = build_preprocess(args, train=mode == "train")
dest_sample_rate = args.frontend_conf["fs"] if (
args.frontend_conf is not None and "fs" in args.frontend_conf) else 16000
args.frontend_conf is not None and "fs" in args.frontend_conf) else 16000
if mode == "train":
data_path_and_name_and_type = args.train_data_path_and_name_and_type
shape_files = args.train_shape_file
@ -46,3 +49,18 @@ def build_dataloader(args, mode="train"):
f"[{mode}] mini-batch sizes summary: N-batch={len(bs_list)}, "
f"mean={np.mean(bs_list):.1f}, min={np.min(bs_list)}, max={np.max(bs_list)}"
)
if args.scheduler == "tri_stage" and mode == "train":
args.max_update = len(bs_list) * args.max_epoch
logging.info("Max update: {}".format(args.max_update))
if args.distributed:
world_size = torch.distributed.get_world_size()
rank = torch.distributed.get_rank()
for batch in batches:
if len(batch) < world_size:
raise RuntimeError(
f"The batch-size must be equal or more than world_size: "
f"{len(batch)} < {world_size}"
)
batches = [batch[rank::world_size] for batch in batches]