mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
* Add ITN,include openfst/gflags in onnxruntime/third_party. * 2pass server support Hotword and Timestamp. The start_time of each segment need to be fix. * add global time start and end of each frame(both online and offline), support two-pass timestamp(both segment and token level). * update websocket cmake. * 2pass server support itn, hw and tp. * Add local build and run. Add timestamp in 2pass server, update cmakelist. * fix filemode bug in h5, avoid 2pass wss server close before final. * offline server add itn. * offline server add ITN. * update hotword model dir. * Add Acknowledgement to WeTextProcessing(https://github.com/wenet-e2e/WeTextProcessing) * adapted to original FunASR. * adapted to itn timestamp hotword * merge from main (#949) * fix empty timestamp list inference * punc large * fix decoding_ind none bug * fix decoding_ind none bug * docs * setup * change eng punc in offline model * update contextual export * update proc for oov in hotword onnx inference * add python http code (#940) * funasr-onnx 0.2.2 * funasr-onnx 0.2.3 * bug fix in timestamp inference * fix bug in timestamp inference * Update preprocessor.py --------- Co-authored-by: shixian.shi <shixian.shi@alibaba-inc.com> Co-authored-by: 游雁 <zhifu.gzf@alibaba-inc.com> Co-authored-by: haoneng.lhn <haoneng.lhn@alibaba-inc.com> Co-authored-by: mengzhe.cmz <mengzhe.cmz@alibaba-inc.com> Co-authored-by: Xian Shi <40013335+R1ckShi@users.noreply.github.com> Co-authored-by: chenmengzheAAA <123789350+chenmengzheAAA@users.noreply.github.com> Co-authored-by: 夜雨飘零 <yeyupiaoling@foxmail.com> * update docs * update deploy_tools --------- Co-authored-by: dujing <dujing@xmov.ai> Co-authored-by: Jean Du <37294470+duj12@users.noreply.github.com> Co-authored-by: shixian.shi <shixian.shi@alibaba-inc.com> Co-authored-by: 游雁 <zhifu.gzf@alibaba-inc.com> Co-authored-by: haoneng.lhn <haoneng.lhn@alibaba-inc.com> Co-authored-by: mengzhe.cmz <mengzhe.cmz@alibaba-inc.com> Co-authored-by: Xian Shi <40013335+R1ckShi@users.noreply.github.com> Co-authored-by: chenmengzheAAA <123789350+chenmengzheAAA@users.noreply.github.com> Co-authored-by: 夜雨飘零 <yeyupiaoling@foxmail.com>
40 lines
1.7 KiB
Python
40 lines
1.7 KiB
Python
from pathlib import Path
|
|
import os
|
|
import argparse
|
|
from funasr.utils.types import str2bool
|
|
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument('--model-name', type=str, required=True)
|
|
parser.add_argument('--export-dir', type=str, required=True)
|
|
parser.add_argument('--export', type=str2bool, default=True, help='whether to export model')
|
|
parser.add_argument('--type', type=str, default='onnx', help='["onnx", "torch"]')
|
|
parser.add_argument('--device', type=str, default='cpu', help='["cpu", "cuda"]')
|
|
parser.add_argument('--quantize', type=str2bool, default=False, help='export quantized model')
|
|
parser.add_argument('--fallback-num', type=int, default=0, help='amp fallback number')
|
|
parser.add_argument('--audio_in', type=str, default=None, help='["wav", "wav.scp"]')
|
|
parser.add_argument('--model_revision', type=str, default=None, help='model_revision')
|
|
parser.add_argument('--calib_num', type=int, default=200, help='calib max num')
|
|
args = parser.parse_args()
|
|
|
|
model_dir = args.model_name
|
|
if not Path(args.model_name).exists():
|
|
from modelscope.hub.snapshot_download import snapshot_download
|
|
try:
|
|
model_dir = snapshot_download(args.model_name, cache_dir=args.export_dir, revision=args.model_revision)
|
|
except:
|
|
raise "model_dir must be model_name in modelscope or local path downloaded from modelscope, but is {}".format \
|
|
(model_dir)
|
|
if args.export:
|
|
model_file = os.path.join(model_dir, 'model.onnx')
|
|
if args.quantize:
|
|
model_file = os.path.join(model_dir, 'model_quant.onnx')
|
|
if not os.path.exists(model_file):
|
|
print(".onnx is not exist, begin to export onnx")
|
|
from funasr.export.export_model import ModelExport
|
|
export_model = ModelExport(
|
|
cache_dir=args.export_dir,
|
|
onnx=True,
|
|
device="cpu",
|
|
quant=args.quantize,
|
|
)
|
|
export_model.export(model_dir) |