config for tacotron2

This commit is contained in:
Eren Golge 2019-10-13 01:22:30 +02:00
parent 2dcdc14ea6
commit c1f598b5d0
2 changed files with 6 additions and 5 deletions

View File

@ -11,8 +11,9 @@ sudo sh install.sh
python3 setup.py develop
# cp -R ${USER_DIR}/GermanData ../tmp/
# cp -R /data/ro/shared/data/keithito/LJSpeech-1.1/ ../tmp/
python3 distribute.py --config_path config.json --data_path /data/ro/shared/data/keithito/LJSpeech-1.1/
# python3 distribute.py --config_path config.json --data_path /data/ro/shared/data/keithito/LJSpeech-1.1/
# cp -R ${USER_DIR}/Mozilla_22050 ../tmp/
# python3 distribute.py --config_path config_tacotron_gst.json --data_path ../tmp/Mozilla_22050/
python3 distribute.py --config_path config.json --data_path /data/rw/home/LibriTTS/train-clean-360
# python3 distribute.py --config_path config.json --data_path /data/rw/home/LibriTTS/train-clean-360
python3 distribute.py --config_path config.json
while true; do sleep 1000000; done

View File

@ -1,6 +1,6 @@
{
"run_name": "ljspeech",
"run_description": "Tacotron prenet fix test run - dev-memory_fix",
"run_description": "Tacotron2 ljspeech release training",
"audio":{
// Audio processing parameters
@ -31,7 +31,7 @@
"reinit_layers": [],
"model": "Tacotron", // one of the model in models/
"model": "Tacotron2", // one of the model in models/
"grad_clip": 1, // upper limit for gradients for clipping.
"epochs": 1000, // total number of epochs to train.
"lr": 0.0001, // Initial learning rate. If Noam decay is active, maximum learning rate.
@ -82,7 +82,7 @@
[
{
"name": "ljspeech",
"path": "/home/erogol/Data/LJSpeech-1.1/",
"path": "/data/ro/shared/data/keithito/LJSpeech-1.1/",
"meta_file_train": "metadata_train.csv",
"meta_file_val": "metadata_val.csv"
}