From 043e49f367d2e7fc951267aec88974ecf59a5a8f Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Fri, 5 Apr 2019 08:40:09 +0200 Subject: [PATCH] active windowing --- layers/tacotron2.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/layers/tacotron2.py b/layers/tacotron2.py index 9dbed3fc..6b413d10 100644 --- a/layers/tacotron2.py +++ b/layers/tacotron2.py @@ -124,14 +124,13 @@ class Attention(nn.Module): attention_dim) self._mask_value = -float("inf") self.windowing = windowing - if self.windowing: - self.win_back = 1 - self.win_front = 3 - self.win_idx = None + self.win_idx = None self.norm = norm def init_win_idx(self): self.win_idx = -1 + self.win_back = 1 + self.win_front = 3 def get_attention(self, query, processed_inputs, attention_cat): processed_query = self.query_layer(query.unsqueeze(1))