diff --git a/funasr/models/llm_asr/adaptor.py b/funasr/models/llm_asr/adaptor.py index 1928b52d1..82edce3fb 100644 --- a/funasr/models/llm_asr/adaptor.py +++ b/funasr/models/llm_asr/adaptor.py @@ -90,7 +90,7 @@ class Transformer(nn.Module): llm_dim, kwargs.get("attention_dropout_rate", 0.0), ), - positionwise_layer( + PositionwiseFeedForward( llm_dim, llm_dim // 4, kwargs.get("dropout_rate", 0.0),