diff --git a/funasr/models/e2e_uni_asr.py b/funasr/models/e2e_uni_asr.py index ee5e2baba..0c533899f 100644 --- a/funasr/models/e2e_uni_asr.py +++ b/funasr/models/e2e_uni_asr.py @@ -17,10 +17,10 @@ from funasr.losses.label_smoothing_loss import ( LabelSmoothingLoss, # noqa: H301 ) from funasr.models.ctc import CTC +from funasr.models.encoder.abs_encoder import AbsEncoder from funasr.models.decoder.abs_decoder import AbsDecoder from funasr.models.postencoder.abs_postencoder import AbsPostEncoder from funasr.models.preencoder.abs_preencoder import AbsPreEncoder -from funasr.models.base_model import FunASRModel from funasr.torch_utils.device_funcs import force_gatherable from funasr.models.base_model import FunASRModel from funasr.modules.streaming_utils.chunk_utilis import sequence_mask diff --git a/funasr/models/encoder/abs_encoder.py b/funasr/models/encoder/abs_encoder.py new file mode 100644 index 000000000..034bc1f88 --- /dev/null +++ b/funasr/models/encoder/abs_encoder.py @@ -0,0 +1,21 @@ +from abc import ABC +from abc import abstractmethod +from typing import Optional +from typing import Tuple + +import torch + + +class AbsEncoder(torch.nn.Module, ABC): + @abstractmethod + def output_size(self) -> int: + raise NotImplementedError + + @abstractmethod + def forward( + self, + xs_pad: torch.Tensor, + ilens: torch.Tensor, + prev_states: torch.Tensor = None, + ) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]: + raise NotImplementedError \ No newline at end of file