From 0f0ab681292867606299e99e165315b9a92cd6d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=B8=E9=9B=81?= Date: Fri, 10 Mar 2023 19:18:09 +0800 Subject: [PATCH] egs --- .../finetune2.py | 37 ------------------- 1 file changed, 37 deletions(-) delete mode 100644 egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune2.py diff --git a/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune2.py b/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune2.py deleted file mode 100644 index fdd2aa282..000000000 --- a/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune2.py +++ /dev/null @@ -1,37 +0,0 @@ -import os - -from modelscope.metainfo import Trainers -from modelscope.trainers import build_trainer - -from funasr.datasets.ms_dataset import MsDataset -from funasr.utils.modelscope_param import modelscope_args - - -def modelscope_finetune(params): - if not os.path.exists(params.output_dir): - os.makedirs(params.output_dir, exist_ok=True) - # dataset split ["train", "validation"] - ds_dict = MsDataset.load(params.data_path) - kwargs = dict( - model=params.model, - data_dir=ds_dict, - dataset_type=params.dataset_type, - work_dir=params.output_dir, - batch_bins=params.batch_bins, - max_epoch=params.max_epoch, - lr=params.lr) - trainer = build_trainer(Trainers.speech_asr_trainer, default_args=kwargs) - trainer.train() - - -if __name__ == '__main__': - params = modelscope_args(model="damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch", data_path="./data") - params.output_dir = "./checkpoint" # m模型保存路径 - params.data_path = "./example_data/" # 数据路径 - params.dataset_type = "small" # 小数据量设置small,若数据量大于1000小时,请使用large - params.batch_bins = 2000 # batch size,如果dataset_type="small",batch_bins单位为fbank特征帧数,如果dataset_type="large",batch_bins单位为毫秒, - params.max_epoch = 50 # 最大训练轮数 - params.lr = 0.0005 # 设置学习率 - params.scheduler_conf = {"warmup_steps": 30000} - - modelscope_finetune(params)