mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
update
This commit is contained in:
parent
c2b99916f5
commit
1eb85d7d17
@ -1,13 +1,15 @@
|
|||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import torch
|
|
||||||
from funasr.datasets.small_datasets.dataset import ESPnetDataset
|
from funasr.datasets.small_datasets.dataset import ESPnetDataset
|
||||||
from funasr.datasets.small_datasets.preprocessor import build_preprocess
|
from funasr.datasets.small_datasets.preprocessor import build_preprocess
|
||||||
from funasr.samplers.build_batch_sampler import build_batch_sampler
|
from funasr.samplers.length_batch_sampler import LengthBatchSampler
|
||||||
|
|
||||||
|
|
||||||
def build_dataloader(args, mode="train"):
|
def build_dataloader(args, mode="train"):
|
||||||
preprocess_fn = build_preprocess(args, train=mode=="train")
|
preprocess_fn = build_preprocess(args, train=mode == "train")
|
||||||
dest_sample_rate = args.frontend_conf["fs"] if (args.frontend_conf is not None and "fs" in args.frontend_conf) else 16000
|
dest_sample_rate = args.frontend_conf["fs"] if (
|
||||||
|
args.frontend_conf is not None and "fs" in args.frontend_conf) else 16000
|
||||||
if mode == "train":
|
if mode == "train":
|
||||||
data_path_and_name_and_type = args.train_data_path_and_name_and_type
|
data_path_and_name_and_type = args.train_data_path_and_name_and_type
|
||||||
shape_files = args.train_shape_file
|
shape_files = args.train_shape_file
|
||||||
@ -25,15 +27,22 @@ def build_dataloader(args, mode="train"):
|
|||||||
utt2category_file = os.path.join(data_path_and_name_and_type[0][0].parent, "utt2category")
|
utt2category_file = os.path.join(data_path_and_name_and_type[0][0].parent, "utt2category")
|
||||||
else:
|
else:
|
||||||
utt2category_file = None
|
utt2category_file = None
|
||||||
batch_sampler = build_batch_sampler(
|
|
||||||
type=args.batch_type,
|
dataset_conf = args.dataset_conf
|
||||||
shape_files=iter_options.shape_files,
|
batch_sampler = LengthBatchSampler(
|
||||||
fold_lengths=args.fold_length,
|
batch_bins=dataset_conf["batch_size"],
|
||||||
batch_size=iter_options.batch_size,
|
shape_files=shape_files,
|
||||||
batch_bins=iter_options.batch_bins,
|
sort_in_batch=dataset_conf["sort_in_batch"] if hasattr(dataset_conf, "sort_in_batch") else "descending",
|
||||||
sort_in_batch=args.sort_in_batch,
|
sort_batch=dataset_conf["sort_batch"] if hasattr(dataset_conf, "sort_batch") else "ascending",
|
||||||
sort_batch=args.sort_batch,
|
|
||||||
drop_last=False,
|
drop_last=False,
|
||||||
min_batch_size=torch.distributed.get_world_size() if args.distributed else 1,
|
padding=True,
|
||||||
utt2category_file=utt2category_file,
|
)
|
||||||
)
|
|
||||||
|
batches = list(batch_sampler)
|
||||||
|
bs_list = [len(batch) for batch in batches]
|
||||||
|
logging.info(f"[{mode}] dataset:\n{dataset}")
|
||||||
|
logging.info(f"[{mode}] Batch sampler: {batch_sampler}")
|
||||||
|
logging.info(
|
||||||
|
f"[{mode}] mini-batch sizes summary: N-batch={len(bs_list)}, "
|
||||||
|
f"mean={np.mean(bs_list):.1f}, min={np.min(bs_list)}, max={np.max(bs_list)}"
|
||||||
|
)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user