fix hotword tokenize

This commit is contained in:
haoneng.lhn 2023-07-17 14:24:42 +08:00
parent bcf3b48647
commit 52b36291e1

View File

@ -54,9 +54,9 @@ def tokenize(data,
length = len(text)
if 'hw_tag' in data:
pre_index = None
if hw_config['pre_hwlist'] is not None and hw_config['pre_prob'] > 0:
# enable preset hotword detect in sampling
pre_index = None
for hw in hw_config['pre_hwlist']:
hw = " ".join(seg_tokenize(hw, seg_dict))
_find = " ".join(text).find(hw)