From 5feca0cc1718ca2dea23aecbacf2d2218e13a036 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=98=89=E6=B8=8A?= Date: Wed, 14 Jun 2023 23:39:48 +0800 Subject: [PATCH] update repo --- funasr/build_utils/build_asr_model.py | 8 ++++++-- funasr/models/e2e_uni_asr.py | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/funasr/build_utils/build_asr_model.py b/funasr/build_utils/build_asr_model.py index 621c4d960..7483a9aa1 100644 --- a/funasr/build_utils/build_asr_model.py +++ b/funasr/build_utils/build_asr_model.py @@ -239,6 +239,7 @@ def build_asr_model(args): vocab_size = len(token_list) logging.info(f"Vocabulary size: {vocab_size}") else: + token_list = None vocab_size = None # frontend @@ -265,7 +266,10 @@ def build_asr_model(args): # normalization layer if args.normalize is not None: normalize_class = normalize_choices.get_class(args.normalize) - normalize = normalize_class(**args.normalize_conf) + if args.model == "mfcca": + normalize = normalize_class(stats_file=args.cmvn_file,**args.normalize_conf) + else: + normalize = normalize_class(**args.normalize_conf) else: normalize = None @@ -300,7 +304,7 @@ def build_asr_model(args): **args.model_conf, ) elif args.model in ["paraformer", "paraformer_online", "paraformer_bert", "bicif_paraformer", - "contextual_paraformer"]: + "contextual_paraformer", "neatcontextual_paraformer"]: # predictor predictor_class = predictor_choices.get_class(args.predictor) predictor = predictor_class(**args.predictor_conf) diff --git a/funasr/models/e2e_uni_asr.py b/funasr/models/e2e_uni_asr.py index d08ea37fd..9ec3a39a0 100644 --- a/funasr/models/e2e_uni_asr.py +++ b/funasr/models/e2e_uni_asr.py @@ -50,9 +50,7 @@ class UniASR(FunASRModel): frontend: Optional[AbsFrontend], specaug: Optional[AbsSpecAug], normalize: Optional[AbsNormalize], - preencoder: Optional[AbsPreEncoder], encoder: AbsEncoder, - postencoder: Optional[AbsPostEncoder], decoder: AbsDecoder, ctc: CTC, ctc_weight: float = 0.5, @@ -80,6 +78,8 @@ class UniASR(FunASRModel): loss_weight_model1: float = 0.5, enable_maas_finetune: bool = False, freeze_encoder2: bool = False, + preencoder: Optional[AbsPreEncoder] = None, + postencoder: Optional[AbsPostEncoder] = None, encoder1_encoder2_joint_training: bool = True, ): assert check_argument_types()