Update FastPitchConfig

This commit is contained in:
Eren Gölge 2022-01-25 09:29:21 +00:00
parent 1932401e8d
commit b3ed6ff6b7
1 changed files with 5 additions and 8 deletions

View File

@ -89,12 +89,9 @@ class FastPitchConfig(BaseTTSConfig):
pitch_loss_alpha (float): pitch_loss_alpha (float):
Weight for the pitch predictor's loss. If set 0, disables the pitch predictor. Defaults to 1.0. Weight for the pitch predictor's loss. If set 0, disables the pitch predictor. Defaults to 1.0.
binary_loss_alpha (float): binary_align_loss_alpha (float):
Weight for the binary loss. If set 0, disables the binary loss. Defaults to 1.0. Weight for the binary loss. If set 0, disables the binary loss. Defaults to 1.0.
binary_align_loss_start_step (int):
Start binary alignment loss after this many steps. Defaults to 20000.
min_seq_len (int): min_seq_len (int):
Minimum input sequence length to be used at training. Minimum input sequence length to be used at training.
@ -129,12 +126,12 @@ class FastPitchConfig(BaseTTSConfig):
duration_loss_type: str = "mse" duration_loss_type: str = "mse"
use_ssim_loss: bool = True use_ssim_loss: bool = True
ssim_loss_alpha: float = 1.0 ssim_loss_alpha: float = 1.0
dur_loss_alpha: float = 1.0
spec_loss_alpha: float = 1.0 spec_loss_alpha: float = 1.0
pitch_loss_alpha: float = 1.0
aligner_loss_alpha: float = 1.0 aligner_loss_alpha: float = 1.0
binary_align_loss_alpha: float = 1.0 pitch_loss_alpha: float = 0.1
binary_align_loss_start_step: int = 20000 dur_loss_alpha: float = 0.1
binary_align_loss_alpha: float = 0.1
binary_loss_warmup_epochs: int = 150
# overrides # overrides
min_seq_len: int = 13 min_seq_len: int = 13