update readme

This commit is contained in:
雾聪 2023-06-29 15:16:04 +08:00
parent 95fc10961f
commit dd3e3f4c92
2 changed files with 34 additions and 17 deletions

View File

@ -32,7 +32,7 @@ int main(int argc, char* argv[]) {
false, "", "string");
TCLAP::ValueArg<std::string> model_dir(
"", MODEL_DIR,
"default: /workspace/models/asr, the asr model path, which contains model.onnx, config.yaml, am.mvn",
"default: /workspace/models/asr, the asr model path, which contains model_quant.onnx, config.yaml, am.mvn",
false, "/workspace/models/asr", "string");
TCLAP::ValueArg<std::string> model_revision(
"", "model-revision",
@ -40,12 +40,12 @@ int main(int argc, char* argv[]) {
false, "v1.2.1", "string");
TCLAP::ValueArg<std::string> quantize(
"", QUANTIZE,
"true (Default), load the model of model.onnx in model_dir. If set "
"true, load the model of model_quant.onnx in model_dir",
"true (Default), load the model of model_quant.onnx in model_dir. If set "
"false, load the model of model.onnx in model_dir",
false, "true", "string");
TCLAP::ValueArg<std::string> vad_dir(
"", VAD_DIR,
"default: /workspace/models/vad, the vad model path, which contains model.onnx, vad.yaml, vad.mvn",
"default: /workspace/models/vad, the vad model path, which contains model_quant.onnx, vad.yaml, vad.mvn",
false, "/workspace/models/vad", "string");
TCLAP::ValueArg<std::string> vad_revision(
"", "vad-revision",
@ -53,12 +53,12 @@ int main(int argc, char* argv[]) {
false, "v1.2.0", "string");
TCLAP::ValueArg<std::string> vad_quant(
"", VAD_QUANT,
"true (Default), load the model of model.onnx in vad_dir. If set "
"true, load the model of model_quant.onnx in vad_dir",
"true (Default), load the model of model_quant.onnx in vad_dir. If set "
"false, load the model of model.onnx in vad_dir",
false, "true", "string");
TCLAP::ValueArg<std::string> punc_dir(
"", PUNC_DIR,
"default: /workspace/models/punc, the punc model path, which contains model.onnx, punc.yaml",
"default: /workspace/models/punc, the punc model path, which contains model_quant.onnx, punc.yaml",
false, "/workspace/models/punc",
"string");
TCLAP::ValueArg<std::string> punc_revision(
@ -67,8 +67,8 @@ int main(int argc, char* argv[]) {
false, "v1.1.7", "string");
TCLAP::ValueArg<std::string> punc_quant(
"", PUNC_QUANT,
"true (Default), load the model of model.onnx in punc_dir. If set "
"true, load the model of model_quant.onnx in punc_dir",
"true (Default), load the model of model_quant.onnx in punc_dir. If set "
"false, load the model of model.onnx in punc_dir",
false, "true", "string");
TCLAP::ValueArg<std::string> listen_ip("", "listen-ip", "listen ip", false,

View File

@ -49,27 +49,31 @@ make
```shell
cd bin
./funasr-wss-server [--model-thread-num <int>] [--decoder-thread-num <int>]
./funasr-wss-server [--download-model-dir <string>]
[--model-thread-num <int>] [--decoder-thread-num <int>]
[--io-thread-num <int>] [--port <int>] [--listen_ip
<string>] [--punc-quant <string>] [--punc-dir <string>]
[--vad-quant <string>] [--vad-dir <string>] [--quantize
<string>] --model-dir <string> [--keyfile <string>]
[--certfile <string>] [--] [--version] [-h]
Where:
--download-model-dir <string>
Download model from Modelscope to download_model_dir
--model-dir <string>
default: /workspace/models/asr, the asr model path, which contains model.onnx, config.yaml, am.mvn
default: /workspace/models/asr, the asr model path, which contains model_quant.onnx, config.yaml, am.mvn
--quantize <string>
true (Default), load the model of model.onnx in model_dir. If set true, load the model of model_quant.onnx in model_dir
true (Default), load the model of model_quant.onnx in model_dir. If set false, load the model of model.onnx in model_dir
--vad-dir <string>
default: /workspace/models/vad, the vad model path, which contains model.onnx, vad.yaml, vad.mvn
default: /workspace/models/vad, the vad model path, which contains model_quant.onnx, vad.yaml, vad.mvn
--vad-quant <string>
true (Default), load the model of model.onnx in vad_dir. If set true, load the model of model_quant.onnx in vad_dir
true (Default), load the model of model_quant.onnx in vad_dir. If set false, load the model of model.onnx in vad_dir
--punc-dir <string>
default: /workspace/models/punc, the punc model path, which contains model.onnx, punc.yaml
default: /workspace/models/punc, the punc model path, which contains model_quant.onnx, punc.yaml
--punc-quant <string>
true (Default), load the model of model.onnx in punc_dir. If set true, load the model of model_quant.onnx in punc_dir
true (Default), load the model of model_quant.onnx in punc_dir. If set false, load the model of model.onnx in punc_dir
--decoder-thread-num <int>
number of threads for decoder, default:8
@ -83,7 +87,20 @@ Where:
default: ../../../ssl_key/server.key, path of keyfile for WSS connection
example:
./funasr-wss-server --model-dir /FunASR/funasr/runtime/onnxruntime/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch
# you can use models downloaded from modelscope or local models:
# download models from modelscope
./funasr-wss-server \
--download-model-dir /workspace/models \
--model-dir damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-onnx \
--vad-dir damo/speech_fsmn_vad_zh-cn-16k-common-onnx \
--punc-dir damo/punc_ct-transformer_zh-cn-common-vocab272727-onnx
# load models from local paths
./funasr-wss-server \
--model-dir /workspace/models/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-onnx \
--vad-dir /workspace/models/damo/speech_fsmn_vad_zh-cn-16k-common-onnx \
--punc-dir /workspace/models/damo/punc_ct-transformer_zh-cn-common-vocab272727-onnx
```
## Run websocket client test