mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
update export
This commit is contained in:
parent
4b388768d0
commit
e7351db81b
@ -13,16 +13,16 @@ model = AutoModel(
|
|||||||
model="iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch",
|
model="iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch",
|
||||||
)
|
)
|
||||||
|
|
||||||
res = model.export(type="onnx", quantize=False)
|
res = model.export(type="torchscript", quantize=False)
|
||||||
print(res)
|
print(res)
|
||||||
|
|
||||||
|
|
||||||
# method2, inference from local path
|
# # method2, inference from local path
|
||||||
from funasr import AutoModel
|
# from funasr import AutoModel
|
||||||
|
|
||||||
model = AutoModel(
|
# model = AutoModel(
|
||||||
model="/Users/zhifu/.cache/modelscope/hub/iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
|
# model="/Users/zhifu/.cache/modelscope/hub/iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
|
||||||
)
|
# )
|
||||||
|
|
||||||
res = model.export(type="onnx", quantize=False)
|
# res = model.export(type="onnx", quantize=False)
|
||||||
print(res)
|
# print(res)
|
||||||
|
|||||||
@ -580,12 +580,6 @@ class AutoModel:
|
|||||||
)
|
)
|
||||||
|
|
||||||
with torch.no_grad():
|
with torch.no_grad():
|
||||||
|
export_dir = export_utils.export(model=model, data_in=data_list, **kwargs)
|
||||||
if type == "onnx":
|
|
||||||
export_dir = export_utils.export_onnx(model=model, data_in=data_list, **kwargs)
|
|
||||||
else:
|
|
||||||
export_dir = export_utils.export_torchscripts(
|
|
||||||
model=model, data_in=data_list, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
return export_dir
|
return export_dir
|
||||||
|
|||||||
@ -31,6 +31,7 @@ def export_rebuild_model(model, **kwargs):
|
|||||||
model.export_dynamic_axes = types.MethodType(export_dynamic_axes, model)
|
model.export_dynamic_axes = types.MethodType(export_dynamic_axes, model)
|
||||||
model.export_name = types.MethodType(export_name, model)
|
model.export_name = types.MethodType(export_name, model)
|
||||||
|
|
||||||
|
model.export_name = 'model'
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -109,7 +109,9 @@ def export_rebuild_model(model, **kwargs):
|
|||||||
backbone_model.export_dynamic_axes = types.MethodType(
|
backbone_model.export_dynamic_axes = types.MethodType(
|
||||||
export_backbone_dynamic_axes, backbone_model
|
export_backbone_dynamic_axes, backbone_model
|
||||||
)
|
)
|
||||||
backbone_model.export_name = types.MethodType(export_backbone_name, backbone_model)
|
|
||||||
|
embedder_model.export_name = "model_eb"
|
||||||
|
backbone_model.export_name = "model_bb"
|
||||||
|
|
||||||
return backbone_model, embedder_model
|
return backbone_model, embedder_model
|
||||||
|
|
||||||
@ -192,6 +194,3 @@ def export_backbone_dynamic_axes(self):
|
|||||||
"pre_acoustic_embeds": {1: "feats_length1"},
|
"pre_acoustic_embeds": {1: "feats_length1"},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def export_backbone_name(self):
|
|
||||||
return "model.onnx"
|
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import os
|
|||||||
import torch
|
import torch
|
||||||
|
|
||||||
|
|
||||||
def export_onnx(model, data_in=None, quantize: bool = False, opset_version: int = 14, **kwargs):
|
def export(model, data_in=None, quantize: bool = False, opset_version: int = 14, type='onnx', **kwargs):
|
||||||
model_scripts = model.export(**kwargs)
|
model_scripts = model.export(**kwargs)
|
||||||
export_dir = kwargs.get("output_dir", os.path.dirname(kwargs.get("init_param")))
|
export_dir = kwargs.get("output_dir", os.path.dirname(kwargs.get("init_param")))
|
||||||
os.makedirs(export_dir, exist_ok=True)
|
os.makedirs(export_dir, exist_ok=True)
|
||||||
@ -11,14 +11,20 @@ def export_onnx(model, data_in=None, quantize: bool = False, opset_version: int
|
|||||||
model_scripts = (model_scripts,)
|
model_scripts = (model_scripts,)
|
||||||
for m in model_scripts:
|
for m in model_scripts:
|
||||||
m.eval()
|
m.eval()
|
||||||
_onnx(
|
if type == 'onnx':
|
||||||
m,
|
_onnx(
|
||||||
data_in=data_in,
|
m,
|
||||||
quantize=quantize,
|
data_in=data_in,
|
||||||
opset_version=opset_version,
|
quantize=quantize,
|
||||||
export_dir=export_dir,
|
opset_version=opset_version,
|
||||||
**kwargs
|
export_dir=export_dir,
|
||||||
)
|
**kwargs
|
||||||
|
)
|
||||||
|
elif type == 'torchscript':
|
||||||
|
_torchscripts(
|
||||||
|
m,
|
||||||
|
path=export_dir,
|
||||||
|
)
|
||||||
print("output dir: {}".format(export_dir))
|
print("output dir: {}".format(export_dir))
|
||||||
|
|
||||||
return export_dir
|
return export_dir
|
||||||
@ -37,7 +43,7 @@ def _onnx(
|
|||||||
|
|
||||||
verbose = kwargs.get("verbose", False)
|
verbose = kwargs.get("verbose", False)
|
||||||
|
|
||||||
export_name = model.export_name() if hasattr(model, "export_name") else "model.onnx"
|
export_name = model.export_name + '.onnx'
|
||||||
model_path = os.path.join(export_dir, export_name)
|
model_path = os.path.join(export_dir, export_name)
|
||||||
torch.onnx.export(
|
torch.onnx.export(
|
||||||
model,
|
model,
|
||||||
@ -70,3 +76,15 @@ def _onnx(
|
|||||||
weight_type=QuantType.QUInt8,
|
weight_type=QuantType.QUInt8,
|
||||||
nodes_to_exclude=nodes_to_exclude,
|
nodes_to_exclude=nodes_to_exclude,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _torchscripts(model, path, device='cpu'):
|
||||||
|
dummy_input = model.export_dummy_inputs()
|
||||||
|
|
||||||
|
if device == 'cuda':
|
||||||
|
model = model.cuda()
|
||||||
|
dummy_input = tuple([i.cuda() for i in dummy_input])
|
||||||
|
|
||||||
|
# model_script = torch.jit.script(model)
|
||||||
|
model_script = torch.jit.trace(model, dummy_input)
|
||||||
|
model_script.save(os.path.join(path, f'{model.export_name}.torchscripts'))
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user