mirror of https://github.com/coqui-ai/TTS.git
use librosa 0.7.2 and fix vocoder datatset assert
This commit is contained in:
parent
83d42ba7fa
commit
c33068ad40
|
@ -3,7 +3,7 @@ tensorflow>=2.2
|
||||||
numpy>=1.16.0
|
numpy>=1.16.0
|
||||||
scipy>=0.19.0
|
scipy>=0.19.0
|
||||||
numba==0.48
|
numba==0.48
|
||||||
librosa==0.6.2
|
librosa==0.7.2
|
||||||
unidecode==0.4.20
|
unidecode==0.4.20
|
||||||
attrdict
|
attrdict
|
||||||
tensorboardX
|
tensorboardX
|
||||||
|
|
|
@ -3,7 +3,7 @@ tensorflow==2.3rc
|
||||||
numpy>=1.16.0
|
numpy>=1.16.0
|
||||||
scipy>=0.19.0
|
scipy>=0.19.0
|
||||||
numba==0.48
|
numba==0.48
|
||||||
librosa==0.6.2
|
librosa==0.7.2
|
||||||
unidecode==0.4.20
|
unidecode==0.4.20
|
||||||
attrdict
|
attrdict
|
||||||
tensorboardX
|
tensorboardX
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -82,7 +82,7 @@ requirements = {
|
||||||
"numpy>=1.16.0",
|
"numpy>=1.16.0",
|
||||||
"numba==0.48",
|
"numba==0.48",
|
||||||
"scipy>=0.19.0",
|
"scipy>=0.19.0",
|
||||||
"librosa==0.6.2",
|
"librosa==0.7.2",
|
||||||
"unidecode==0.4.20",
|
"unidecode==0.4.20",
|
||||||
"attrdict",
|
"attrdict",
|
||||||
"tensorboardX",
|
"tensorboardX",
|
||||||
|
|
|
@ -59,9 +59,9 @@ def gan_dataset_case(batch_size, seq_len, hop_len, conv_pad, return_segments, us
|
||||||
audio = wav1[idx].squeeze()
|
audio = wav1[idx].squeeze()
|
||||||
feat = feat1[idx]
|
feat = feat1[idx]
|
||||||
mel = ap.melspectrogram(audio)
|
mel = ap.melspectrogram(audio)
|
||||||
# the first 2 and the last frame is skipped due to the padding
|
# the first 2 and the last 2 frames are skipped due to the padding
|
||||||
# applied in spec. computation.
|
# differences in stft
|
||||||
assert (feat - mel[:, :feat1.shape[-1]])[:, 2:-1].sum() == 0, f' [!] {(feat - mel[:, :feat1.shape[-1]])[:, 2:-1].sum()}'
|
assert (feat - mel[:, :feat1.shape[-1]])[:, 2:-2].sum() <= 0, f' [!] {(feat - mel[:, :feat1.shape[-1]])[:, 2:-2].sum()}'
|
||||||
|
|
||||||
count_iter += 1
|
count_iter += 1
|
||||||
# if count_iter == max_iter:
|
# if count_iter == max_iter:
|
||||||
|
|
Loading…
Reference in New Issue