diff --git a/layers/attention.py b/layers/attention.py index 3bab1ad6..51d3542a 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -1,5 +1,4 @@ import torch -from torch.autograd import Variable from torch import nn from torch.nn import functional as F diff --git a/layers/tacotron.py b/layers/tacotron.py index 96c27328..4d23835f 100644 --- a/layers/tacotron.py +++ b/layers/tacotron.py @@ -1,7 +1,6 @@ # coding: utf-8 import torch from torch import nn - from .attention import AttentionRNN from .attention import get_mask_from_lengths