remove set_all_random_seed

This commit is contained in:
志浩 2024-09-13 17:23:44 +08:00
parent 4b00bc61e9
commit 6c59692f71
3 changed files with 3 additions and 3 deletions

View File

@ -48,7 +48,7 @@ class BASECFM(torch.nn.Module, ABC):
sample: generated mel-spectrogram
shape: (batch_size, n_feats, mel_timesteps)
"""
set_all_random_seed(0)
# set_all_random_seed(0)
z = torch.randn_like(mu) * temperature
t_span = torch.linspace(0, 1, n_timesteps + 1, device=mu.device)
if self.t_scheduler == 'cosine':

View File

@ -2924,7 +2924,7 @@ class LLMASRXvecSlotTTS(nn.Module):
enabled=True if tts_dtype != "fp32" else False, dtype=dtype_map[tts_dtype]
):
assert llm_cur_kv_cache is not None
set_all_random_seed(rand_seed)
# set_all_random_seed(rand_seed)
# speech_tokens, mel, wav = self.generate_speech(
# response, llm_cur_kv_cache, llm_cur_kv_cache_len, dtype_map[tts_dtype]
# )

View File

@ -1003,7 +1003,7 @@ class UCTDXvecSlotModel(UpsampleCtcTokenDiffModel):
cur_token_len = cur_token_len - token_hop_len
# forward FM model
set_all_random_seed(0)
# set_all_random_seed(0)
if cur_token_len[0] < 1:
return None, None
feat = self.fm_model.inference(