diff --git a/datasets/LJSpeech.py b/datasets/LJSpeech.py index 898b88bf..aea6575d 100644 --- a/datasets/LJSpeech.py +++ b/datasets/LJSpeech.py @@ -74,7 +74,7 @@ class LJSpeechDataset(Dataset): def get_dummy_data(self): r"""Get a dummy input for testing""" - return torch.autograd.Variable(torch.ones(16, 143)).type(torch.LongTensor) + return torch.ones(16, 143).type(torch.LongTensor) def collate_fn(self, batch): r""" diff --git a/datasets/TWEB.py b/datasets/TWEB.py index af070381..eeae551a 100644 --- a/datasets/TWEB.py +++ b/datasets/TWEB.py @@ -72,10 +72,6 @@ class TWEBDataset(Dataset): sample = {'text': text, 'wav': wav, 'item_idx': self.frames[idx][0]} return sample - def get_dummy_data(self): - r"""Get a dummy input for testing""" - return torch.autograd.Variable(torch.ones(16, 143)).type(torch.LongTensor) - def collate_fn(self, batch): r""" Perform preprocessing and create a final data batch: diff --git a/layers/attention.py b/layers/attention.py index 3bab1ad6..51d3542a 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -1,5 +1,4 @@ import torch -from torch.autograd import Variable from torch import nn from torch.nn import functional as F diff --git a/layers/custom_layers.py b/layers/custom_layers.py index c7337e71..e7f52d7c 100644 --- a/layers/custom_layers.py +++ b/layers/custom_layers.py @@ -1,6 +1,5 @@ # coding: utf-8 import torch -from torch.autograd import Variable from torch import nn diff --git a/layers/losses.py b/layers/losses.py index 3e8376a4..9e467ef8 100644 --- a/layers/losses.py +++ b/layers/losses.py @@ -1,6 +1,5 @@ import torch from torch.nn import functional -from torch.autograd import Variable from torch import nn @@ -11,7 +10,6 @@ def _sequence_mask(sequence_length, max_len=None): batch_size = sequence_length.size(0) seq_range = torch.arange(0, max_len).long() seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len) - seq_range_expand = Variable(seq_range_expand) if sequence_length.is_cuda: seq_range_expand = seq_range_expand.cuda() seq_length_expand = (sequence_length.unsqueeze(1)