mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
paraformer batch padding
This commit is contained in:
parent
0515095886
commit
28719e181a
@ -8,7 +8,7 @@ gpu_num=2
|
|||||||
count=1
|
count=1
|
||||||
gpu_inference=true # Whether to perform gpu decoding, set false for cpu decoding
|
gpu_inference=true # Whether to perform gpu decoding, set false for cpu decoding
|
||||||
# for gpu decoding, inference_nj=ngpu*njob; for cpu decoding, inference_nj=njob
|
# for gpu decoding, inference_nj=ngpu*njob; for cpu decoding, inference_nj=njob
|
||||||
njob=8
|
njob=1
|
||||||
train_cmd=utils/run.pl
|
train_cmd=utils/run.pl
|
||||||
infer_cmd=utils/run.pl
|
infer_cmd=utils/run.pl
|
||||||
|
|
||||||
@ -219,7 +219,7 @@ if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
|
|||||||
fi
|
fi
|
||||||
${infer_cmd} --gpu "${_ngpu}" --max-jobs-run "${_nj}" JOB=1:"${_nj}" "${_logdir}"/asr_inference.JOB.log \
|
${infer_cmd} --gpu "${_ngpu}" --max-jobs-run "${_nj}" JOB=1:"${_nj}" "${_logdir}"/asr_inference.JOB.log \
|
||||||
python -m funasr.bin.asr_inference_launch \
|
python -m funasr.bin.asr_inference_launch \
|
||||||
--batch_size 1 \
|
--batch_size 100 \
|
||||||
--ngpu "${_ngpu}" \
|
--ngpu "${_ngpu}" \
|
||||||
--njob ${njob} \
|
--njob ${njob} \
|
||||||
--gpuid_list ${gpuid_list} \
|
--gpuid_list ${gpuid_list} \
|
||||||
|
|||||||
@ -8,7 +8,7 @@ gpu_num=2
|
|||||||
count=1
|
count=1
|
||||||
gpu_inference=true # Whether to perform gpu decoding, set false for cpu decoding
|
gpu_inference=true # Whether to perform gpu decoding, set false for cpu decoding
|
||||||
# for gpu decoding, inference_nj=ngpu*njob; for cpu decoding, inference_nj=njob
|
# for gpu decoding, inference_nj=ngpu*njob; for cpu decoding, inference_nj=njob
|
||||||
njob=8
|
njob=1
|
||||||
train_cmd=utils/run.pl
|
train_cmd=utils/run.pl
|
||||||
infer_cmd=utils/run.pl
|
infer_cmd=utils/run.pl
|
||||||
|
|
||||||
@ -235,7 +235,7 @@ if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
|
|||||||
fi
|
fi
|
||||||
${infer_cmd} --gpu "${_ngpu}" --max-jobs-run "${_nj}" JOB=1:"${_nj}" "${_logdir}"/asr_inference.JOB.log \
|
${infer_cmd} --gpu "${_ngpu}" --max-jobs-run "${_nj}" JOB=1:"${_nj}" "${_logdir}"/asr_inference.JOB.log \
|
||||||
python -m funasr.bin.asr_inference_launch \
|
python -m funasr.bin.asr_inference_launch \
|
||||||
--batch_size 1 \
|
--batch_size 100 \
|
||||||
--ngpu "${_ngpu}" \
|
--ngpu "${_ngpu}" \
|
||||||
--njob ${njob} \
|
--njob ${njob} \
|
||||||
--gpuid_list ${gpuid_list} \
|
--gpuid_list ${gpuid_list} \
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user