mirror of https://github.com/coqui-ai/TTS.git
Attention bias setting Revert to old
This commit is contained in:
parent
eb839a7acd
commit
d28bbe09fb
|
@ -55,7 +55,7 @@ class LocationSensitiveAttention(nn.Module):
|
||||||
self.loc_linear = nn.Linear(filters, attn_dim, bias=True)
|
self.loc_linear = nn.Linear(filters, attn_dim, bias=True)
|
||||||
self.query_layer = nn.Linear(query_dim, attn_dim, bias=True)
|
self.query_layer = nn.Linear(query_dim, attn_dim, bias=True)
|
||||||
self.annot_layer = nn.Linear(annot_dim, attn_dim, bias=True)
|
self.annot_layer = nn.Linear(annot_dim, attn_dim, bias=True)
|
||||||
self.v = nn.Linear(attn_dim, 1, bias=True)
|
self.v = nn.Linear(attn_dim, 1, bias=False)
|
||||||
self.processed_annots = None
|
self.processed_annots = None
|
||||||
# self.init_layers()
|
# self.init_layers()
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue