diff --git a/layers/tacotron2.py b/layers/tacotron2.py index 6122c38b..871e577f 100644 --- a/layers/tacotron2.py +++ b/layers/tacotron2.py @@ -445,7 +445,7 @@ class Decoder(nn.Module): self.attention_layer.init_forward_attn_state(inputs) outputs, stop_tokens, alignments, t = [], [], [], 0 - stop_flags = [True, False, False] + stop_flags = [False, False, False] stop_count = 0 while True: memory = self.prenet(memory) @@ -456,7 +456,7 @@ class Decoder(nn.Module): alignments += [alignment] stop_flags[0] = stop_flags[0] or stop_token > 0.5 - stop_flags[1] = stop_flags[1] or (alignment[0, -2:].sum() > 0.5 and t > inputs.shape[1]) + stop_flags[1] = stop_flags[1] or (alignment[0, -2:].sum() > 0.8 and t > inputs.shape[1]) stop_flags[2] = t > inputs.shape[1] * 2 if all(stop_flags): stop_count += 1