mirror of https://github.com/coqui-ai/TTS.git
pylint fixes
This commit is contained in:
parent
3aeef5e83c
commit
a6f564c8c8
|
@ -7,6 +7,8 @@ from torch.nn import functional
|
||||||
from TTS.tts.utils.generic_utils import sequence_mask
|
from TTS.tts.utils.generic_utils import sequence_mask
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=abstract-method Method
|
||||||
|
# relates https://github.com/pytorch/pytorch/issues/42305
|
||||||
class L1LossMasked(nn.Module):
|
class L1LossMasked(nn.Module):
|
||||||
def __init__(self, seq_len_norm):
|
def __init__(self, seq_len_norm):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -145,9 +147,8 @@ class DifferentailSpectralLoss(nn.Module):
|
||||||
target_diff = target[:, 1:] - target[:, :-1]
|
target_diff = target[:, 1:] - target[:, :-1]
|
||||||
if len(signature(self.loss_func).parameters) > 2:
|
if len(signature(self.loss_func).parameters) > 2:
|
||||||
return self.loss_func(x_diff, target_diff, length-1)
|
return self.loss_func(x_diff, target_diff, length-1)
|
||||||
else:
|
# if loss masking is not enabled
|
||||||
# if loss masking is not enabled
|
return self.loss_func(x_diff, target_diff)
|
||||||
return self.loss_func(x_diff, target_diff)
|
|
||||||
|
|
||||||
|
|
||||||
class GuidedAttentionLoss(torch.nn.Module):
|
class GuidedAttentionLoss(torch.nn.Module):
|
||||||
|
|
|
@ -16,7 +16,8 @@ from TTS.vocoder.utils.distribution import (
|
||||||
def stream(string, variables):
|
def stream(string, variables):
|
||||||
sys.stdout.write(f"\r{string}" % variables)
|
sys.stdout.write(f"\r{string}" % variables)
|
||||||
|
|
||||||
|
# pylint: disable=abstract-method
|
||||||
|
# relates https://github.com/pytorch/pytorch/issues/42305
|
||||||
class ResBlock(nn.Module):
|
class ResBlock(nn.Module):
|
||||||
def __init__(self, dims):
|
def __init__(self, dims):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
Loading…
Reference in New Issue