mirror of https://github.com/coqui-ai/TTS.git
remove Variable from losses.py
This commit is contained in:
parent
db50d395ed
commit
c7b20d5133
|
@ -1,6 +1,5 @@
|
||||||
import torch
|
import torch
|
||||||
from torch.nn import functional
|
from torch.nn import functional
|
||||||
from torch.autograd import Variable
|
|
||||||
from torch import nn
|
from torch import nn
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,7 +10,6 @@ def _sequence_mask(sequence_length, max_len=None):
|
||||||
batch_size = sequence_length.size(0)
|
batch_size = sequence_length.size(0)
|
||||||
seq_range = torch.arange(0, max_len).long()
|
seq_range = torch.arange(0, max_len).long()
|
||||||
seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len)
|
seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len)
|
||||||
seq_range_expand = Variable(seq_range_expand)
|
|
||||||
if sequence_length.is_cuda:
|
if sequence_length.is_cuda:
|
||||||
seq_range_expand = seq_range_expand.cuda()
|
seq_range_expand = seq_range_expand.cuda()
|
||||||
seq_length_expand = (sequence_length.unsqueeze(1)
|
seq_length_expand = (sequence_length.unsqueeze(1)
|
||||||
|
|
Loading…
Reference in New Issue