mirror of https://github.com/coqui-ai/TTS.git
phonemizer updates for utils.text
This commit is contained in:
parent
0103ee8967
commit
b9629135db
|
@ -67,36 +67,36 @@ def sequence_to_phoneme(sequence):
|
|||
return result.replace('}{', ' ')
|
||||
|
||||
|
||||
|
||||
def text2phone(text):
|
||||
'''
|
||||
Convert graphemes to phonemes.
|
||||
'''
|
||||
try:
|
||||
ph = epi.trans_list(text, normpunc=True)
|
||||
except:
|
||||
ph = None
|
||||
seperator = phonemizer.separator.Separator(' ', '', '|')
|
||||
#try:
|
||||
punctuations = re.findall(pat, text)
|
||||
ph = phonemizer.phonemize(text, separator=seperator, strip=False, njobs=1, backend='espeak', language='en-us')
|
||||
# Replace \n with matching punctuations.
|
||||
for punct in punctuations[:-1]:
|
||||
ph = ph.replace(' \n', punct+'| ', 1)
|
||||
ph = ph[:-1] + punctuations[-1]
|
||||
#except:
|
||||
# ph = None
|
||||
return ph
|
||||
|
||||
|
||||
|
||||
def phoneme_to_sequence(text, cleaner_names):
|
||||
'''
|
||||
TODO: This ignores punctuations
|
||||
'''
|
||||
sequence = []
|
||||
clean_text = _clean_text(text, cleaner_names)
|
||||
for word in clean_text.split():
|
||||
phonemes_text = text2phone(word)
|
||||
phonemes = text2phone(clean_text)
|
||||
print(phonemes.replace('|', ''))
|
||||
if phonemes is None:
|
||||
print("!! After phoneme conversion the result is None. -- {} ".format(clean_text))
|
||||
for phoneme in phonemes.split('|'):
|
||||
# print(word, ' -- ', phonemes_text)
|
||||
if phonemes_text == None:
|
||||
print("!! After phoneme conversion the result is None. -- {} ".format(word))
|
||||
continue
|
||||
sequence += _phoneme_to_sequence(phonemes_text)
|
||||
if word[0] in _punctuations:
|
||||
sequence.append(_phonemes_to_id[word[0]])
|
||||
elif word[-1] in _punctuations:
|
||||
sequence.append(_phonemes_to_id[word[-1]])
|
||||
sequence.append(_phonemes_to_id[' '])
|
||||
sequence += _phoneme_to_sequence(phoneme)
|
||||
# Aeepnd EOS char
|
||||
sequence.append(_phonemes_to_id['~'])
|
||||
return sequence
|
||||
|
|
Loading…
Reference in New Issue