Compare commits

...

4 Commits

Author SHA1 Message Date
Yu Cao
8964e8b87f
Merge e5cc659f40 into 4402e95b0f 2025-08-15 16:18:21 +08:00
游雁
4402e95b0f v1.2.7 2025-08-15 15:22:18 +08:00
游雁
f5051c55cd trust_remote_code 2025-08-15 15:10:37 +08:00
Yu Cao
e5cc659f40
fix "can not find model issue when running libtorch runtime" 2025-05-06 11:59:35 +08:00
4 changed files with 7 additions and 13 deletions

View File

@ -1,10 +0,0 @@
# method2, inference from local path
from funasr import AutoModel
model = AutoModel(
model="iic/emotion2vec_base",
hub="ms"
)
res = model.export(type="onnx", quantize=False, opset_version=13, device='cpu') # fp32 onnx-gpu
# res = model.export(type="onnx_fp16", quantize=False, opset_version=13, device='cuda') # fp16 onnx-gpu

View File

@ -1,3 +1,4 @@
import logging
import os
import json
from omegaconf import OmegaConf, DictConfig
@ -79,7 +80,10 @@ def download_from_ms(**kwargs):
kwargs["jieba_usr_dict"] = os.path.join(model_or_path, "jieba_usr_dict")
if isinstance(kwargs, DictConfig):
kwargs = OmegaConf.to_container(kwargs, resolve=True)
if os.path.exists(os.path.join(model_or_path, "requirements.txt")):
logging.warning(f'trust_remote_code: {kwargs.get("trust_remote_code", False)}')
if os.path.exists(os.path.join(model_or_path, "requirements.txt")) and kwargs.get(
"trust_remote_code", False
):
requirements = os.path.join(model_or_path, "requirements.txt")
print(f"Detect model requirements, begin to install it: {requirements}")
from funasr.utils.install_model_requirements import install_requirements

View File

@ -84,4 +84,4 @@ def export_dynamic_axes(self):
def export_name(
self,
):
return "model"
return "model.onnx"

View File

@ -1 +1 @@
1.2.6
1.2.7