update with main (#1163)

* v0.8.7

* update cmd version

* set openfst HAVE_BIN/HAVE_SCRIPT off for win32

* 修复为支持新版本的热词 (#1137)

* update CMakeLists.txt

* Revert "update CMakeLists.txt"

This reverts commit 54bcd1f674.

* rm log.h for wins-websocket

* fix bug of websocket lock blocking

* update funasr-wss-server

* update model-revision by model name

* update funasr-wss-server-2pass

* 增加分角色语音识别对ERes2Net模型的支持。

* Update README.md (#1140)

minor fix

* automatically configure parameters such as decoder-thread-num

* update docs

* update docs

* update docs

* 分角色语音识别支持更多的模型

* update spk inference

* remove never use code (#1151)

* fix loss normalization for ddp training

* fix loss normalization for ddp training

* fix loss normalization for ddp training

---------

Co-authored-by: 雾聪 <wucong.lyb@alibaba-inc.com>
Co-authored-by: 夜雨飘零 <yeyupiaoling@foxmail.com>
Co-authored-by: Ikko Eltociear Ashimine <eltociear@gmail.com>
Co-authored-by: Shi Xian <40013335+R1ckShi@users.noreply.github.com>
Co-authored-by: shixian.shi <shixian.shi@alibaba-inc.com>
Co-authored-by: haoneng.lhn <haoneng.lhn@alibaba-inc.com>
This commit is contained in:
zhifu gao 2023-12-11 10:10:54 +08:00 committed by GitHub
parent 5cf512419c
commit 85cabd30a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 16 additions and 8 deletions

View File

@ -222,7 +222,8 @@ class ASRModel(FunASRModel):
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + 1).sum().type_as(batch_size)
batch_size = int((text_lengths + 1).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight

View File

@ -233,7 +233,8 @@ class NeatContextualParaformer(Paraformer):
stats["loss"] = torch.clone(loss.detach())
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight

View File

@ -255,7 +255,8 @@ class Paraformer(FunASRModel):
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@ -867,7 +868,8 @@ class ParaformerOnline(Paraformer):
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@ -1494,7 +1496,8 @@ class ParaformerBert(Paraformer):
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@ -1765,7 +1768,8 @@ class BiCifParaformer(Paraformer):
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@ -1967,7 +1971,8 @@ class ContextualParaformer(Paraformer):
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight

View File

@ -442,7 +442,8 @@ class UniASR(FunASRModel):
stats["loss"] = torch.clone(loss.detach())
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
batch_size = (text_lengths + 1).sum().type_as(batch_size)
batch_size = int((text_lengths + 1).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight