mirror of
https://github.com/modelscope/FunASR
synced 2025-09-15 14:48:36 +08:00
Dev gzf new (#1555)
* train * train * train * train * train * train * train * train * train * train * train * train * train
This commit is contained in:
parent
31350db825
commit
d1b1fdd520
@ -56,7 +56,7 @@ class EspnetStyleBatchSampler(DistributedSampler):
|
||||
self.shuffle = shuffle and is_training
|
||||
self.drop_last = drop_last
|
||||
|
||||
self.total_size = len(self.dataset)
|
||||
# self.total_size = len(self.dataset)
|
||||
# self.num_samples = int(math.ceil(self.total_size / self.num_replicas))
|
||||
self.epoch = 0
|
||||
self.sort_size = sort_size * num_replicas
|
||||
@ -71,10 +71,10 @@ class EspnetStyleBatchSampler(DistributedSampler):
|
||||
g = torch.Generator()
|
||||
g.manual_seed(self.epoch)
|
||||
random.seed(self.epoch)
|
||||
indices = torch.randperm(self.total_size, generator=g).tolist()
|
||||
indices = torch.randperm(len(self.dataset), generator=g).tolist()
|
||||
else:
|
||||
indices = list(range(self.total_size))
|
||||
|
||||
indices = list(range(len(self.dataset)))
|
||||
|
||||
# Sort indices by sample length
|
||||
sorted_indices = sorted(indices, key=lambda idx: self.dataset.get_source_len(idx))
|
||||
|
||||
|
||||
@ -323,8 +323,8 @@ class CustomDistributedBufferDynamicBatchSampler(DistributedSampler):
|
||||
self.shuffle = shuffle and is_training
|
||||
self.drop_last = drop_last
|
||||
|
||||
self.total_size = len(self.dataset)
|
||||
# self.num_samples = int(math.ceil(self.total_size / self.num_replicas))
|
||||
# self.total_size = len(self.dataset)
|
||||
self.num_samples = int(math.ceil(self.total_size / self.num_replicas))
|
||||
self.epoch = 0
|
||||
self.sort_size = sort_size * num_replicas
|
||||
self.max_token_length = kwargs.get("max_token_length", 2048)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user