mirror of https://github.com/coqui-ai/TTS.git
1.8 MiB
1.8 MiB
None
<html lang="en">
<head>
</head>
</html>
In [1]:
%load_ext autoreload %autoreload 2 import os import sys import io import torch import time import numpy as np from collections import OrderedDict %pylab inline rcParams["figure.figsize"] = (16,5) sys.path.append('/home/erogol/projects/') import librosa import librosa.display from TTS.models.tacotron import Tacotron from TTS.layers import * from TTS.utils.data import * from TTS.utils.audio import AudioProcessor from TTS.utils.generic_utils import load_config from TTS.utils.text import text_to_sequence import IPython from IPython.display import Audio from utils import *
Populating the interactive namespace from numpy and matplotlib
In [2]:
def tts(model, text, CONFIG, use_cuda, ap, figures=True): t_1 = time.time() waveform, alignment, spectrogram = create_speech(model, text, CONFIG, use_cuda, ap) print(" > Run-time: {}".format(time.time() - t_1)) if figures: visualize(alignment, spectrogram, CONFIG) IPython.display.display(Audio(waveform, rate=CONFIG.sample_rate)) return alignment, spectrogram
In [3]:
# Set constants ROOT_PATH = '../result/February-13-2018_01:04AM/' MODEL_PATH = ROOT_PATH + '/best_model.pth.tar' CONFIG_PATH = ROOT_PATH + '/config.json' OUT_FOLDER = ROOT_PATH + '/test/' CONFIG = load_config(CONFIG_PATH) use_cuda = False
In [5]:
# load the model model = Tacotron(CONFIG.embedding_size, CONFIG.hidden_size, CONFIG.num_mels, CONFIG.num_freq, CONFIG.r) # load the audio processor ap = AudioProcessor(CONFIG.sample_rate, CONFIG.num_mels, CONFIG.min_level_db, CONFIG.frame_shift_ms, CONFIG.frame_length_ms, CONFIG.preemphasis, CONFIG.ref_level_db, CONFIG.num_freq, CONFIG.power, griffin_lim_iters=80) # load model state if use_cuda: cp = torch.load(MODEL_PATH) else: cp = torch.load(MODEL_PATH, map_location=lambda storage, loc: storage) # # small trick to remove DataParallel wrapper new_state_dict = OrderedDict() for k, v in cp['model'].items(): name = k[7:] # remove `module.` new_state_dict[name] = v cp['model'] = new_state_dict # load the model model.load_state_dict(cp['model']) if use_cuda: model.cuda() model.eval()
| > Embedding dim : 149
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) <ipython-input-5-994bc6f7ae61> in <module>() 23 24 # load the model ---> 25 model.load_state_dict(cp['model']) 26 if use_cuda: 27 model.cuda() ~/miniconda3/envs/pytorch/lib/python3.6/site-packages/torch/nn/modules/module.py in load_state_dict(self, state_dict, strict) 488 elif strict: 489 raise KeyError('unexpected key "{}" in state_dict' --> 490 .format(name)) 491 if strict: 492 missing = set(own_state.keys()) - set(state_dict.keys()) KeyError: 'unexpected key "module.embedding.weight" in state_dict'
In [6]:
cp['model']
Out[6]:
OrderedDict([('module.embedding.weight', -3.5297e-02 -3.2110e-02 1.5772e-02 ... 1.5752e-03 8.5511e-02 -2.4540e-03 2.2812e-02 4.3733e-02 -8.5045e-02 ... 3.9608e-02 5.9179e-02 2.2359e-02 -5.6533e-02 2.8566e-01 -5.8419e-01 ... -1.9973e-01 3.0205e-01 9.3615e-02 ... ⋱ ... -2.3212e-01 1.7337e-01 -1.8613e-01 ... -2.9493e-02 -2.2340e-03 8.0515e-03 -3.9615e-01 1.3994e-01 -4.2236e-02 ... 2.7774e-01 -2.1261e-02 4.8095e-01 1.0893e-01 3.4349e-01 6.2014e-01 ... 4.3346e-01 -2.2796e-01 -2.4084e-01 [torch.FloatTensor of size 149x256]), ('module.encoder.prenet.layers.0.weight', -1.0014e-01 2.9802e-02 -2.4292e-01 ... -1.8605e-01 -7.1386e-02 -2.3602e-02 3.2112e-01 -1.7793e-02 -4.7806e-02 ... -6.9432e-02 -2.0528e-02 -9.1605e-02 1.8654e-01 6.4952e-02 9.3887e-02 ... -1.0719e-02 3.4214e-02 6.8752e-02 ... ⋱ ... 6.9887e-03 1.6763e-01 1.2888e-03 ... 7.4507e-03 -6.6574e-02 -6.5742e-04 2.4360e-01 3.5927e-02 3.5399e-02 ... 1.2186e-01 -1.2406e-02 -1.6318e-01 -1.1759e-02 -8.7780e-03 -1.9734e-01 ... -5.8825e-02 6.7379e-02 -1.0530e-02 [torch.FloatTensor of size 256x256]), ('module.encoder.prenet.layers.0.bias', -0.0704 -0.2325 -0.1103 -0.0531 -0.1821 -0.0734 -0.0968 -0.1854 -0.0928 -0.2251 -0.1243 -0.0883 -0.0765 -0.0324 -0.1123 -0.2250 -0.0198 -0.2890 -0.0232 -0.0220 0.0422 -0.0454 0.0288 -0.1473 -0.0470 -0.1018 -0.0934 -0.1430 -0.0440 -0.1701 -0.1629 -0.1001 0.0394 -0.0823 -0.0032 -0.0437 -0.1353 -0.3906 -0.1264 -0.0330 0.0433 -0.1669 -0.1263 -0.0900 -0.1593 -0.1618 -0.1133 -0.0787 -0.1686 -0.0694 -0.0861 0.0319 -0.0489 -0.0673 -0.2221 -0.1320 -0.0541 -0.4774 -0.0294 -0.0766 0.0717 -0.1841 -0.0333 -0.1523 -0.1103 -0.0361 -0.0648 -0.0957 -0.1330 -0.1408 0.0165 -0.1503 -0.1603 -0.0686 -0.0476 -0.2059 -0.1780 -0.0734 -0.1016 -0.0711 -0.1130 -0.2282 -0.0068 -0.1549 -0.1033 0.0493 -0.2593 -0.0633 -0.2569 -0.1936 -0.3189 -0.0126 -0.2992 -0.0018 -0.0711 -0.0621 -0.2238 -0.1572 -0.0731 -0.0806 -0.0980 0.0449 -0.1195 -0.1380 -0.1631 -0.0250 -0.1572 -0.0357 -0.1325 -0.0770 -0.2003 0.0232 -0.0048 -0.1999 0.0309 -0.1343 -0.2083 -0.1762 -0.1273 -0.1509 -0.1643 0.0228 -0.1819 -0.0917 0.0122 -0.0810 -0.2499 -0.1043 -0.0875 -0.0225 0.0012 -0.1237 -0.1153 -0.0151 0.0029 -0.0437 -0.3011 0.0030 -0.0078 -0.0594 -0.0669 -0.2825 -0.0541 -0.0008 -0.0352 -0.1139 -0.0350 -0.2285 -0.2661 -0.0469 -0.0023 -0.1536 -0.1614 0.0145 -0.1819 -0.0677 -0.0682 -0.0521 -0.0962 -0.0995 -0.0487 -0.0144 -0.1920 -0.1974 -0.0702 -0.0893 -0.0509 -0.0741 -0.1373 0.0637 -0.2082 -0.1559 -0.2094 -0.2431 -0.1071 -0.0244 -0.1300 -0.1789 0.0219 -0.2220 -0.0408 -0.2379 -0.2404 -0.0639 -0.0447 -0.1562 -0.0362 -0.2018 -0.0858 -0.0118 -0.0631 -0.0660 -0.0260 -0.1357 -0.3616 -0.4833 -0.0934 -0.0108 -0.0121 -0.0484 -0.2504 -0.1337 -0.1002 -0.1239 -0.0047 0.0031 -0.1129 0.0301 0.0399 -0.0143 -0.1699 -0.0369 -0.0570 -0.1132 -0.0772 -0.0208 -0.0780 -0.0719 -0.0142 0.0278 -0.0418 -0.0729 -0.0724 -0.0749 -0.0849 -0.0984 -0.1697 -0.0529 -0.3286 -0.0006 0.0464 -0.0439 -0.0135 -0.1863 -0.0453 -0.1910 -0.1649 -0.1927 -0.1597 -0.0844 -0.1204 -0.0122 -0.2126 -0.0206 -0.2664 -0.0634 -0.3220 -0.0365 -0.0187 -0.1900 -0.2600 -0.0692 -0.1204 -0.3588 -0.0812 -0.0753 [torch.FloatTensor of size 256]), ('module.encoder.prenet.layers.1.weight', -1.6126e-01 2.8914e-02 -1.0028e-01 ... -3.5420e-02 -1.3256e-01 -3.0317e-03 -8.8005e-01 -5.2990e-01 -1.5287e-01 ... 2.2524e-02 1.0137e-01 -1.0721e-01 -1.4194e+00 -4.4239e-01 -1.8868e-02 ... -2.4959e-01 -8.1351e-01 -1.4342e+00 ... ⋱ ... -9.7612e-01 -5.1730e-01 -7.7711e-01 ... -5.1752e-02 -6.2894e-01 -8.6156e-01 -2.0189e-01 -5.1400e-01 -4.7799e-01 ... -2.3133e-01 1.4875e-01 -3.8852e-01 -3.5884e-02 -1.0765e-01 -2.2468e-01 ... -1.0134e-01 -8.1144e-02 2.8081e-02 [torch.FloatTensor of size 128x256]), ('module.encoder.prenet.layers.1.bias', -0.3585 1.6995 2.3166 1.7364 0.4701 1.5738 2.1307 1.2531 1.6245 2.1560 0.9820 1.8875 2.4729 1.0907 2.2598 1.9542 1.9548 1.5077 1.0076 2.6810 2.4430 1.9737 -0.0700 1.8026 -0.4326 1.5797 1.2291 1.4399 1.8057 2.7138 0.3383 2.0052 1.9785 2.7670 2.1501 0.8639 2.3999 2.3451 1.7723 1.1212 -0.2053 1.8817 2.6431 1.9419 -0.2995 1.9662 1.1049 1.8972 1.4069 1.3095 2.8848 2.2875 2.4240 1.6281 2.4198 1.2517 1.8351 2.3133 1.7785 -0.3808 2.4419 2.2181 2.6447 2.2217 1.2486 0.3046 0.6007 0.9984 2.0237 2.5613 0.9227 0.3079 2.2933 1.0479 1.7179 1.8413 2.2759 1.6229 0.4718 1.7324 2.0497 3.0045 0.8048 1.7378 2.7005 1.3603 1.8880 0.8904 0.4747 2.2508 2.0709 1.5177 -0.3586 0.9661 1.7081 2.5825 1.9696 2.3685 2.0387 1.4500 2.4367 0.4283 1.3150 2.2226 2.4524 2.1837 0.7118 2.1082 2.2519 1.7399 1.9642 1.2885 2.7799 -0.4589 1.0244 0.5252 1.9328 1.9372 1.6940 0.9164 2.0596 0.9265 1.7577 2.0141 1.5980 2.5939 0.3658 -0.3199 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.0.conv1d.weight', ( 0 ,.,.) = -6.6251e-02 -6.8183e-01 -8.8998e-01 ⋮ -7.3685e-02 -1.7196e-01 -6.8184e-02 ( 1 ,.,.) = -3.6669e-02 1.0216e-01 2.9180e-01 ⋮ -4.3819e-01 1.1236e-02 -8.6161e-02 ( 2 ,.,.) = 3.1580e-02 -4.6436e-02 -3.1578e-02 ⋮ -2.9636e-01 -3.1398e-01 -9.8746e-02 ... (125,.,.) = -4.8537e-02 -9.7628e-01 -4.7115e-02 ⋮ 3.7837e-02 -2.5917e-01 -4.0414e-01 (126,.,.) = -1.0193e-01 4.4068e-01 -5.4034e-01 ⋮ -1.7104e-01 -2.3850e-01 -1.6505e-01 (127,.,.) = 2.5536e-02 -9.0031e-01 -7.3607e-01 ⋮ -3.3190e-01 -7.5025e-03 -1.0425e-01 [torch.FloatTensor of size 128x128x1]), ('module.encoder.cbhg.conv1d_banks.0.bn.weight', 0.6591 -1.2572 0.8739 0.0423 -0.8999 0.4206 1.2460 -1.7693 1.1016 0.3619 -1.5488 -0.4151 0.0202 -1.1553 0.6241 -0.7603 0.1831 -1.3233 -1.1399 0.2576 0.3289 0.1837 -0.3407 0.3372 -0.7382 0.3482 0.3916 0.6138 -0.0488 -1.7011 0.5796 0.2722 -0.4631 0.0869 -1.8734 0.7504 -0.4008 -0.0150 -1.9485 -1.5207 0.1789 -1.8307 0.4566 0.4261 0.8417 -0.2912 0.0864 0.0459 0.3181 -0.5764 -0.1530 0.0720 0.4791 0.1626 -1.7365 0.9922 0.2440 0.3228 0.2166 0.2625 -1.8546 -2.8205 0.4102 0.2564 0.8064 -1.9707 -1.5620 0.2139 1.7856 0.1005 0.6677 1.7832 0.2558 0.4171 -1.5547 -0.3117 0.2358 -0.7742 0.1305 -0.0683 -1.2802 -1.3206 -0.0826 0.5054 -1.3429 -0.8753 -1.3754 -0.0851 0.4566 1.2336 0.3783 -2.4549 -1.1048 1.9755 0.7881 -0.1720 0.5107 -1.2934 -1.0566 -1.4817 -0.9210 -0.1149 0.8603 1.7239 -0.0900 -0.0847 0.0394 0.5084 0.2656 0.4883 0.6596 -1.0905 -0.5039 -2.9462 0.0330 -0.0026 -0.0639 0.7629 0.7044 0.5001 0.0064 -0.9646 0.0032 0.0372 1.2075 0.9911 0.2357 0.8908 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.0.bn.bias', 0.0496 -0.3761 0.1720 0.0088 -0.1417 0.0287 0.1284 -0.6863 0.1531 0.1071 -0.3297 -0.1519 -0.0036 -0.3632 0.1521 -0.3017 0.0584 -0.2840 -0.2838 0.0796 0.1045 0.0586 -0.1395 0.0719 -0.2271 0.0499 0.0884 0.2548 -0.0211 -0.2821 0.1719 0.0869 -0.1280 0.0238 -0.3729 0.2631 -0.1296 -0.0357 -0.2797 -0.4018 0.0218 -0.4129 0.1654 0.0324 0.1249 -0.0757 -0.0278 0.0114 0.1644 -0.0654 -0.0445 0.0206 0.2573 -0.0784 -0.4035 0.2922 -0.0144 0.0615 0.0280 0.0292 -0.2697 -0.3457 0.2012 -0.0295 0.2810 -0.3361 -0.2685 0.0690 0.2289 0.0216 0.1789 0.1590 0.0330 0.1722 -0.3000 -0.0665 0.0471 -0.2774 0.0282 -0.0082 -0.4068 -0.3256 -0.0221 0.1838 -0.3682 -0.2745 -0.3362 -0.0176 0.0966 0.0550 0.0761 -0.2648 -0.4274 0.2190 0.3803 -0.0387 0.0363 -0.3328 -0.2526 -0.3576 -0.4342 -0.0747 0.1348 0.2771 0.0020 -0.0246 -0.0045 0.1549 0.0506 -0.0160 0.2828 -0.3593 -0.2290 -0.4338 -0.0021 -0.0034 -0.0237 0.1105 0.0896 0.0697 -0.0307 -0.1974 -0.0047 0.0069 0.1067 0.2567 -0.0471 0.2444 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.0.bn.running_mean', 0.0870 0.8101 0.5247 2.2122 0.4873 0.0197 0.0576 3.7668 0.3194 0.2192 1.2198 1.0392 1.1233 6.1512 0.6100 4.5341 0.1639 2.1069 0.4447 0.6375 0.0551 0.7121 1.6102 0.1971 2.0502 0.1030 0.0089 0.0028 2.7830 0.3154 0.5923 0.0087 1.6420 0.1864 0.3373 0.4181 1.1021 1.2134 0.4320 1.1481 0.0204 0.7017 0.2296 0.0967 0.0505 0.6132 0.1166 0.9885 3.3698 0.0310 0.7492 0.5738 0.2248 2.9998 0.6050 0.0560 0.6973 0.1761 0.2494 0.1141 0.7000 2.7446 0.0505 0.1667 0.3415 1.4325 0.5639 0.0893 0.1094 4.7658 0.3892 0.2030 0.0042 0.0783 0.4031 0.6467 0.1158 2.0079 0.1498 1.2992 1.4152 1.1754 1.8695 0.0708 2.6983 0.7383 0.8094 0.4446 0.5453 0.9943 0.4924 0.1364 1.9822 0.1972 1.9096 0.8834 0.0014 2.7301 0.6040 1.6772 1.4184 1.2977 0.0487 0.6774 1.9628 1.7026 1.2595 0.0462 1.2077 0.0032 1.5108 2.9752 2.2626 0.1426 0.5018 1.4696 1.5673 1.3662 2.2242 0.0077 2.6727 3.0063 2.0216 0.5201 0.1247 0.2195 0.3367 0.2375 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.0.bn.running_var', 0.3437 3.1590 3.5308 16.8469 3.0804 0.0273 0.2468 31.7647 1.4386 0.9426 6.6400 3.8405 9.8438 50.8472 4.1808 21.3124 0.5102 9.6467 1.7094 2.8162 0.1607 2.5650 5.6620 1.3423 10.7854 0.3705 0.0262 0.0040 16.0253 1.5826 2.8785 0.0123 7.1599 0.4248 1.3036 3.5653 5.6428 11.3821 1.8151 6.7774 0.0608 3.9251 0.9103 0.3961 0.1327 2.1445 0.2094 5.4538 25.3652 0.0586 4.3396 2.1219 0.9367 6.9949 3.4362 0.3150 3.5132 0.9837 0.8066 0.2909 3.5503 22.5505 0.1241 0.4674 1.4275 8.2066 2.8071 0.3559 0.2892 18.5240 3.1646 1.0415 0.0045 0.3100 2.6372 3.3976 0.2980 8.8771 0.4361 3.1446 12.0329 5.2685 8.6919 0.3742 15.9113 3.2323 3.6449 2.1727 2.5035 5.6634 5.1186 0.7378 10.0247 1.0395 33.3002 3.8079 0.0013 14.0447 3.7312 10.0017 6.9413 6.1180 0.1488 3.0287 11.6265 6.9702 3.3300 0.1468 7.1564 0.0031 14.6520 18.4089 12.1343 0.4487 2.2629 9.2180 5.7901 7.2934 18.6814 0.0180 14.8378 14.6351 8.0705 2.4461 0.8510 1.0535 1.1406 0.7860 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.1.conv1d.weight', ( 0 ,.,.) = -1.7469e-03 -5.8269e-02 6.7772e-02 -8.3827e-01 -1.9679e+00 3.3696e-01 ⋮ -6.3320e-01 -4.1194e-01 -6.1837e-01 -4.3298e-01 1.2093e-01 -2.6532e-01 ( 1 ,.,.) = -7.0265e-02 -4.4727e-02 -1.3089e-02 1.0594e-01 -1.4405e-01 3.7635e-01 ⋮ 3.0504e-01 5.5851e-02 3.3968e-01 -7.2954e-03 5.3568e-02 1.9476e-01 ( 2 ,.,.) = 1.2149e-02 -6.5031e-02 -1.1192e-02 2.0588e-02 -9.9884e-02 2.7532e-01 ⋮ 2.2661e-01 -1.4156e-01 1.0049e-01 3.1395e-03 7.0527e-02 -2.9458e-02 ... (125,.,.) = -4.5840e-02 -3.6793e-02 1.6884e-01 -4.0931e-01 2.5989e-01 2.7923e-01 ⋮ -2.6109e-01 -3.3987e-01 1.5648e-01 -6.1283e-02 -5.4954e-01 -1.1350e-01 (126,.,.) = -6.2937e-02 4.4589e-02 -6.2535e-02 -5.3324e-01 2.1476e-01 -3.5696e-01 ⋮ 2.1135e-01 -5.8720e-01 -1.0717e-01 -4.9246e-02 2.3508e-01 -8.5545e-03 (127,.,.) = 1.0879e-01 -3.1740e-02 4.4000e-03 -1.4642e+00 3.1502e-01 2.8231e-01 ⋮ -7.7851e-01 5.1266e-02 -1.2681e-03 -2.2417e-01 -2.1902e-01 -3.5927e-01 [torch.FloatTensor of size 128x128x2]), ('module.encoder.cbhg.conv1d_banks.1.bn.weight', 0.4733 0.0235 -0.0062 0.4201 1.3014 0.1988 -0.0900 1.3110 0.3646 0.4270 -1.1778 0.7919 0.1093 -0.0262 0.7808 0.4058 1.0810 -0.0285 0.4852 0.3591 0.5424 0.8443 0.5654 -0.3290 1.2563 0.2957 0.6080 -1.0073 -0.6875 0.6321 0.6867 -1.3513 -0.3735 1.0324 0.4403 -1.3225 1.0055 -1.0831 0.3380 -0.7721 -0.0921 0.6735 0.7410 0.0396 1.0561 0.0040 1.0135 0.8185 0.7132 1.1297 -0.0641 0.5488 0.6189 0.6287 1.6989 0.2293 0.5385 0.7866 -0.0101 0.7307 -0.8609 0.1057 1.3026 0.3684 1.4587 -0.1244 0.6186 -1.1850 1.2956 -1.2645 0.4718 0.0087 0.2734 -0.3027 -0.9086 -0.7481 -0.0305 0.1738 0.6157 0.2185 -0.5635 0.4902 0.4909 -0.7121 -1.4196 0.0211 0.2764 0.8459 -1.0611 -1.9599 0.6789 -0.0087 0.8961 0.4669 0.3157 0.9896 0.5475 0.4048 -0.7899 0.9126 -0.2827 0.0139 -1.1461 1.1425 0.2756 1.0780 -1.2292 0.5387 0.2735 1.2863 -1.5314 -0.0675 0.3419 0.9257 0.5979 -0.1600 0.6024 0.3802 -0.0378 0.4436 0.4443 1.3742 0.3733 0.0133 -1.9551 -1.2258 -0.9844 0.5876 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.1.bn.bias', 0.1701 -0.0081 -0.0007 0.2301 0.2036 0.0267 -0.0508 0.1225 0.1918 0.2228 -0.2664 0.3788 0.0042 -0.0011 0.0979 0.0099 0.0533 -0.0044 0.0644 0.1301 0.1707 0.0083 -0.1809 -0.0810 0.1219 0.0308 -0.0266 -0.3285 -0.1071 0.0112 0.0175 -0.3918 -0.0652 0.2522 0.1066 -0.1811 0.0816 -0.2488 0.1812 -0.3955 -0.0276 -0.0875 0.0278 0.0080 0.1422 0.0212 0.1189 0.0657 0.1570 0.4617 -0.0123 0.2006 0.1181 0.2937 0.2162 0.0653 -0.0357 0.1045 -0.0054 0.0629 -0.2165 0.0177 0.1745 0.1962 0.1149 -0.0463 0.0938 -0.2009 0.1857 -0.3686 0.1085 0.0068 0.0247 -0.0742 -0.3292 -0.1746 -0.0137 -0.0106 0.1596 0.0604 -0.1541 -0.1548 0.0034 -0.1204 -0.1631 -0.0175 0.1083 0.0942 -0.1659 -0.3056 0.5220 0.0009 0.1898 0.0333 0.1392 0.1659 -0.1028 0.0858 -0.2728 0.3501 -0.0931 -0.0023 -0.0371 0.0988 -0.0181 0.0447 -0.3148 0.0571 0.0850 0.1255 -0.2664 -0.0209 0.0853 0.1626 0.0210 -0.0426 0.1644 0.0993 -0.0196 0.0656 0.0561 0.3851 -0.0397 -0.0042 -0.1487 -0.2297 -0.2560 -0.1302 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.1.bn.running_mean', 0.2278 1.3284 5.4859 2.7325 0.3709 5.2223 1.6543 0.0211 1.6813 0.9461 4.6698 0.6030 1.4935 4.3162 0.2561 2.1722 0.7438 6.9892 0.5223 7.1926 1.5904 1.4662 0.9416 1.1059 0.3037 5.7653 0.0300 3.2579 0.8365 0.2451 0.2678 0.8860 0.8639 0.2315 2.2403 3.4847 0.1228 0.8269 1.9055 4.1844 1.5615 0.3953 0.8498 2.7287 1.2589 4.2431 0.3563 0.3371 1.4904 1.7422 0.9660 3.5193 0.1304 3.3561 0.1678 5.5767 0.2415 0.0174 0.2963 0.2006 2.3182 6.5804 0.2712 0.9532 0.2055 2.6334 0.2016 0.6562 0.1193 1.0522 7.8449 1.7169 0.6664 1.3208 1.4695 2.0873 2.7356 0.6997 0.2519 5.0028 1.6218 0.1138 0.2069 0.7647 0.1726 4.4857 0.3926 0.2857 0.9989 0.3531 4.2766 1.1147 0.1493 0.5459 6.1042 0.0601 0.2063 0.1297 2.2081 1.3151 2.6887 7.2990 0.0021 0.3346 1.2581 0.3534 2.4283 0.1082 5.4669 0.1340 0.2159 2.2148 0.6639 0.2035 1.0627 1.4622 1.6266 1.6433 1.1739 0.0596 9.3307 0.6544 1.2687 2.7657 0.4171 1.5379 1.2445 0.3488 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.1.bn.running_var', 1.3367 7.6086 29.6188 19.9479 1.9745 22.7413 5.9792 0.0637 10.5301 6.9245 45.1371 4.3097 7.0463 30.0321 1.6828 10.8739 3.6743 33.7168 3.1064 33.4510 12.2032 11.5044 4.9928 4.6164 2.3413 29.8270 0.0870 22.6399 4.5027 1.0404 2.8947 5.9069 4.3338 1.4424 15.1429 31.3466 0.4048 4.3270 11.8973 27.4739 5.2958 1.8844 5.1403 11.1396 10.1982 25.9107 2.5645 1.8474 7.7697 23.2299 3.5681 23.7102 0.5725 42.8053 0.8816 44.7615 1.0349 0.0376 0.9509 1.1513 18.1707 42.8582 1.3485 5.3684 0.9533 13.0618 1.1537 3.8212 0.4204 6.5487 41.9320 16.4796 4.3733 6.8044 10.9295 13.2641 11.7745 3.6665 1.6709 25.2989 11.1395 0.4067 0.7982 4.9462 1.2926 12.2255 1.8526 1.4234 6.5699 2.1196 46.5058 5.0214 0.5336 4.7017 35.4098 0.2543 1.0787 0.7361 15.2158 9.3967 14.4744 47.6725 0.0015 1.4966 8.1529 2.0170 24.5650 0.4699 24.9536 0.6458 1.1774 8.2636 5.0634 0.8180 10.3734 7.5653 9.4897 11.5371 4.4367 0.2274 46.4561 6.3732 8.3160 14.7590 2.9065 15.0195 8.1915 1.9411 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.2.conv1d.weight', ( 0 ,.,.) = -5.4286e-02 -9.4237e-02 4.7910e-02 -2.0761e-01 1.6850e-01 -1.9503e+00 -4.2229e-01 -1.3163e+00 -2.3051e-01 ⋮ 6.4546e-02 -1.0657e+00 -1.1922e+00 5.5012e-02 -5.6582e-01 -2.0700e-01 2.8215e-02 -1.4465e-01 -1.5023e-01 ( 1 ,.,.) = -2.9313e-02 -4.5640e-03 -2.5330e-02 -1.8320e+00 9.9294e-02 2.6152e-01 1.1664e-01 9.9127e-02 -2.0662e+00 ⋮ -8.1441e-01 -8.7617e-01 1.7442e-02 -1.5637e-01 1.0771e-01 -6.5480e-01 1.2788e-01 1.0396e-01 -5.3307e-02 ( 2 ,.,.) = 1.3774e-02 -2.2764e-02 1.7160e-02 -5.4339e-01 8.6952e-02 8.0073e-03 -2.6484e-01 -1.3497e+00 8.9788e-02 ⋮ 1.1648e-01 -7.7175e-01 1.5116e-01 -2.1602e-01 2.5640e-01 1.0208e-01 1.2165e-01 -2.6676e-02 -4.5937e-02 ... (125,.,.) = -4.6806e-02 -3.6873e-02 3.9662e-02 -7.0332e-02 -8.7892e-01 -1.4400e+00 1.8573e-01 9.4182e-02 1.1713e-01 ⋮ 1.5029e-01 -3.6476e-02 6.7077e-02 2.8011e-01 -4.7094e-02 2.2105e-01 -2.4804e-01 -1.3658e-02 -3.4306e-03 (126,.,.) = -1.9260e-02 -1.2697e-02 -1.4019e-02 -1.3391e-01 -3.4813e-01 -4.7117e-01 1.4140e-01 1.4029e-01 -1.3311e-01 ⋮ 1.8775e-01 5.7309e-02 2.6160e-01 2.2137e-01 -2.6558e-02 -3.1574e-02 1.0015e-02 -1.2476e-01 -3.1886e-02 (127,.,.) = 2.4374e-02 1.2599e-02 1.4980e-02 -1.8930e-01 -3.1155e-02 -2.2507e-01 3.4954e-01 -6.2865e-02 4.2429e-01 ⋮ 2.4214e-01 -4.6694e-02 -1.8781e-01 -2.5548e-01 2.9772e-01 4.9350e-01 1.5860e-01 -2.1079e-01 1.1477e-01 [torch.FloatTensor of size 128x128x3]), ('module.encoder.cbhg.conv1d_banks.2.bn.weight', 0.7882 1.1679 0.6868 -0.5602 0.6926 0.3480 1.3040 -0.5898 0.8252 -0.1509 0.6994 0.5404 0.7474 0.9570 0.1598 0.5288 0.8474 -0.4721 0.7928 0.6296 0.9907 -0.6676 -0.1030 -1.0869 0.4828 1.1944 -0.3796 0.7430 0.5693 0.4382 0.3220 0.3703 0.8995 0.7451 -1.3021 -0.8754 -0.8579 0.5799 0.4983 0.5480 -2.1142 0.9737 1.2022 0.3887 0.5268 1.2057 0.8936 0.3334 0.7513 0.6445 -0.7795 1.0365 0.4544 0.5647 0.7380 1.1126 0.6847 0.2264 0.3797 -1.0073 0.9932 0.5080 -1.0126 0.8422 -0.9466 0.4633 1.2529 0.7878 0.3980 0.9587 0.5316 -0.5880 0.6710 0.7551 0.5722 0.5651 -0.8144 0.8886 0.4788 0.6518 0.9727 -0.7357 1.2086 0.5821 0.2523 1.3351 1.0008 1.0258 -0.8218 0.2004 0.4271 0.9437 -0.3267 1.2607 0.6416 0.6931 -0.6647 -1.7838 0.9953 -0.5783 -0.8165 0.8011 0.8279 1.0071 -0.9063 0.7007 -1.4719 -1.4968 -1.0625 0.8999 0.7239 0.0646 -0.9613 0.6627 0.7891 1.0621 0.6897 0.7706 0.5824 0.7817 -1.3201 -0.9172 0.3756 -1.4005 0.8096 0.6342 -0.7933 0.1299 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.2.bn.bias', 0.0638 0.0563 0.2731 -0.1831 0.0058 -0.1194 0.1583 -0.1720 0.0106 -0.0349 0.0823 0.0839 0.2265 0.3051 0.0007 -0.1692 0.1714 -0.0963 0.0330 0.0207 -0.1119 -0.1277 -0.0389 -0.3312 -0.0054 0.0919 -0.0990 0.2415 0.1045 0.0839 -0.0179 0.1326 0.1313 0.3185 -0.1222 -0.2094 -0.2618 -0.1596 0.2535 -0.0093 -0.0895 0.1695 0.1285 0.1045 0.0112 0.0559 0.2153 -0.0550 0.1243 0.0311 -0.0646 0.3177 0.0043 0.0961 0.0593 0.0338 0.0701 0.0092 -0.0261 -0.3211 0.1917 0.0501 -0.1208 0.2013 0.0326 0.0782 0.3299 0.0598 0.0763 -0.0259 0.2968 -0.1930 0.2456 0.0162 0.0692 0.1265 -0.1297 0.4153 -0.0290 0.0303 0.2357 -0.2576 0.2377 0.0680 0.1226 0.3216 0.1055 0.1506 -0.1748 0.1498 -0.1754 -0.0923 -0.0671 0.0901 -0.0195 0.0387 -0.1568 -0.2594 -0.0551 -0.0781 -0.1634 0.0727 -0.0663 0.1575 -0.2397 0.1048 -0.1084 -0.3413 -0.1369 0.1626 0.0728 -0.0186 -0.1495 -0.0135 0.1119 0.1673 0.0699 0.1361 0.3015 0.2214 -0.1587 -0.2259 0.2413 -0.1080 0.0616 0.3122 -0.2073 0.0341 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.2.bn.running_mean', 1.1630 0.2190 0.6604 6.8502 0.5800 0.7456 0.8671 4.6402 0.1669 3.2986 0.3361 1.2620 0.4217 0.3617 1.4444 0.6314 0.9157 1.0370 1.7103 0.1520 0.2035 3.8058 1.4098 3.0435 1.8632 0.2145 1.3484 0.0739 3.0820 4.6308 0.8663 5.6400 1.1108 1.6447 0.2646 2.1980 3.5989 0.5466 0.4014 0.5370 0.1492 0.4578 0.0107 0.9942 1.0825 0.5560 0.4456 1.5005 2.0964 1.6159 0.0207 1.3731 1.0164 0.6175 0.9700 0.0986 2.0744 1.9988 1.1954 2.7618 0.1706 0.0942 1.4422 0.9258 1.6989 12.8986 0.7410 0.3696 0.9520 0.4256 0.8205 4.3940 1.5047 0.6868 3.1306 2.5420 1.0325 0.3960 0.7334 1.4882 0.5329 2.3058 0.0159 0.7815 10.4380 0.1061 0.1331 0.6696 2.0808 3.2479 4.0375 0.5901 1.7219 0.2626 1.2888 2.9755 2.2197 0.2895 0.9920 1.0100 1.7728 0.3082 0.1416 0.2235 2.5931 0.0869 0.4106 0.7252 1.9628 0.0089 1.1316 4.9027 1.3078 0.1001 0.2413 0.2598 0.3739 0.1684 1.3397 0.2249 1.6900 1.6697 1.3694 0.4396 0.8980 0.2888 1.2549 4.6069 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.2.bn.running_var', 9.7994 1.2514 4.1081 46.8629 3.6430 4.7418 6.4415 33.7882 0.9173 15.8640 3.5358 11.0178 3.2181 2.7233 6.4556 5.5252 7.7024 5.3057 14.0739 1.0280 1.1828 28.1004 7.1049 23.1384 13.8020 1.1702 8.0575 0.3680 29.6075 42.2086 5.7056 61.3984 8.8590 14.3460 1.8237 17.0321 38.5890 4.8810 3.5166 3.8442 1.1839 2.8182 0.0253 13.5511 8.0220 3.5922 3.4204 13.1982 14.3898 15.3038 0.0906 13.9396 7.8078 5.1677 7.0526 0.5769 16.6135 13.2465 9.8636 23.3770 0.9680 0.6132 11.0925 7.6960 15.5853 49.7492 5.7911 1.9542 6.6985 2.6736 6.1363 38.2642 14.4988 4.6359 34.5189 19.9102 7.4275 2.7323 7.2089 12.1778 2.7965 21.4060 0.0439 5.3728 58.0339 0.5599 0.6702 4.5155 19.7068 24.7468 32.4359 3.7611 8.8014 1.4880 9.7919 22.7541 17.9358 1.9441 7.9773 5.0693 15.6299 2.0215 0.7720 1.1532 23.9688 0.3376 2.5808 6.6397 15.5070 0.0205 9.0777 22.0513 11.2502 0.5872 1.3869 1.4350 2.5002 0.9065 11.0231 1.2137 12.7303 15.6336 9.6978 3.0546 6.9087 2.1190 8.6749 36.0892 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.3.conv1d.weight', ( 0 ,.,.) = -3.0199e-02 -4.5101e-02 4.1657e-02 -1.3989e-01 5.9655e-02 1.3077e-01 -1.0970e+00 9.7011e-03 -1.3781e+00 2.7077e-01 -1.9742e-02 2.1801e-02 ⋮ -9.1431e-02 1.5480e-01 2.4212e-02 7.3165e-02 1.4958e-01 -4.2327e-01 1.9488e-01 -6.6253e-01 -1.3541e-01 -2.3292e-01 -3.2272e-01 -1.7024e-02 ( 1 ,.,.) = 3.2595e-02 -3.2977e-03 4.1706e-02 -1.3661e-03 -1.0077e+00 -2.9712e-01 -4.7499e-02 -5.7860e-01 2.7742e-01 5.4497e-02 2.4373e-01 -1.1944e+00 ⋮ -7.5219e-01 5.6820e-02 -4.2962e-01 -2.7082e-01 6.5423e-02 2.2855e-01 -7.5106e-02 2.1938e-01 -1.2268e-01 -2.7853e-02 8.2643e-02 -2.9314e-01 ( 2 ,.,.) = -3.8005e-02 -1.8723e-02 9.9610e-03 -1.8004e-02 2.8541e-01 -4.0136e-01 -6.4734e-01 -9.4345e-01 -4.9899e-03 1.0887e-01 -5.8836e-02 -5.9108e-01 ⋮ -5.1846e-01 4.7479e-03 -4.8246e-01 -1.0629e-01 8.4392e-02 9.2139e-02 -5.7263e-02 -2.2137e-01 -2.2947e-01 -8.1368e-02 -1.8130e-01 1.4157e-01 ... (125,.,.) = -1.9266e-02 -5.8194e-03 -5.9193e-02 1.2790e-02 7.3518e-02 1.8488e-01 1.2557e-01 1.2247e-01 -1.3630e-01 5.6454e-02 1.3659e-01 -6.8306e-02 ⋮ 4.1640e-01 -1.0616e-01 -2.5067e-01 1.5842e-01 -7.1185e-02 1.9634e-01 -1.7411e-01 2.7764e-01 3.3945e-03 -3.4820e-03 8.2969e-02 -1.0172e-01 (126,.,.) = -1.9898e-02 1.1437e-02 -6.1109e-02 -2.0294e-02 -1.9157e-01 1.5399e-01 -2.3774e+00 -1.2851e+00 4.7223e-02 -8.3199e-01 1.5482e-01 -1.4128e+00 ⋮ 9.8303e-02 8.7049e-02 -4.9641e-01 1.8631e-02 -1.5819e-01 -7.8419e-01 -4.9958e-01 -4.4801e-01 -2.3128e-01 -8.0637e-02 -1.0088e-01 3.4297e-02 (127,.,.) = -4.6688e-02 2.6377e-02 -1.1830e-01 4.1776e-02 1.7381e-01 7.2495e-02 -4.0078e-02 1.2306e-01 -1.6091e-01 -1.0786e-01 -1.6649e-01 3.2966e-02 ⋮ 2.8699e-01 2.1908e-01 -4.5487e-01 -2.5770e-02 2.0351e-01 -4.6262e-01 1.0559e-01 -3.1717e-01 -4.0802e-02 6.2488e-02 1.3862e-01 -1.4880e-02 [torch.FloatTensor of size 128x128x4]), ('module.encoder.cbhg.conv1d_banks.3.bn.weight', 0.6762 1.2839 0.5925 0.4043 0.5573 0.4524 0.4971 -0.5960 0.5021 -0.9494 0.5080 -0.8624 0.6140 0.5730 -1.2350 0.7618 0.4530 1.6662 -0.3794 0.5542 0.4506 0.5946 0.5797 0.4881 0.4952 -0.5356 0.6488 -0.8908 0.4057 0.6898 0.6750 -0.1251 0.6325 0.4851 0.4389 0.4041 0.4299 0.7708 0.7325 -0.0463 0.6394 0.5451 0.3378 0.7166 0.6030 1.1028 0.3994 -0.9556 0.3748 0.5475 0.4361 0.3910 0.7346 0.6367 0.5006 0.6014 0.6725 0.4923 0.6960 0.3339 0.3371 0.3961 0.4107 0.5951 0.4860 0.5769 -0.7742 0.4339 0.7209 0.4315 0.5832 0.5863 0.5736 0.4677 -1.0682 0.9264 0.7246 0.7324 0.4137 -0.5774 0.4973 0.7447 0.3545 0.7746 0.6656 0.5835 0.5291 1.1132 0.4102 0.5999 0.3807 0.3736 0.6172 1.1931 -0.0256 0.4723 0.6797 0.8751 0.4438 0.4281 0.3294 0.9565 0.7108 0.8660 0.7950 1.0954 -0.0163 0.0693 0.4338 -0.0255 0.0793 -0.7395 0.0218 0.3456 0.6162 -0.6018 1.5660 0.7036 0.8461 0.6650 0.3238 -0.1641 0.3654 0.6098 0.6175 -0.5964 0.6494 0.3895 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.3.bn.bias', 0.1157 0.4014 0.1449 0.0870 0.0422 -0.1932 0.2791 -0.1141 0.0616 -0.1564 0.1885 -0.1000 0.0900 0.1455 -0.3226 0.0664 0.1438 0.0250 -0.0582 0.0415 0.0589 0.1084 0.0381 -0.0546 0.0147 -0.1381 -0.0526 -0.1121 0.0751 0.0238 0.0121 -0.0361 0.1103 0.1307 -0.0235 -0.0487 0.0064 0.1435 -0.2283 -0.0059 0.0107 -0.0105 0.0806 0.0485 0.0593 0.2291 -0.0547 -0.2096 -0.1142 0.0520 0.0096 -0.0157 0.0705 -0.1203 0.2084 0.0232 0.1335 0.0985 -0.0865 0.0218 -0.0995 0.1750 0.0665 0.1305 0.1409 -0.0519 -0.2571 -0.2469 0.1067 -0.1175 -0.0143 0.0273 0.1013 -0.1832 -0.0928 0.1175 0.0343 0.1175 0.1041 0.0484 0.2421 0.1170 -0.0565 0.1435 0.0914 0.1470 -0.1090 0.2005 0.0871 0.0101 0.0458 -0.0110 0.1671 0.0892 0.0073 0.0335 0.0925 0.1804 -0.0319 -0.0401 0.1655 0.1278 0.1544 0.2714 -0.1484 0.0515 -0.0145 0.0018 0.0577 0.0064 -0.0056 -0.0825 -0.0013 0.0528 -0.0137 -0.1068 0.0190 0.0481 0.0621 0.1866 0.0732 -0.0291 -0.0383 0.2391 0.1083 -0.1501 -0.0074 -0.0611 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.3.bn.running_mean', 0.2907 0.9724 0.0591 7.0921 0.3564 1.5779 13.5156 1.0280 0.5696 2.6036 0.2261 2.1813 0.6724 0.6929 0.7458 0.3476 1.9931 0.1909 7.2323 0.5527 0.3362 2.3720 1.5167 3.6392 1.3306 2.6000 0.5801 1.3793 1.9162 0.3574 0.1882 1.2014 1.2526 0.7860 0.5745 0.3189 0.7388 0.7234 0.0937 9.9037 1.0526 0.1291 1.8733 0.9669 1.0198 0.0990 10.1295 1.1176 1.2340 1.5836 2.7698 1.7052 1.6756 0.2054 1.8708 0.8747 0.3102 4.3427 0.2556 1.4933 1.1341 10.7140 1.2381 0.5267 0.3254 0.1514 2.8627 0.3161 0.6128 3.2774 1.0922 1.4499 0.1820 0.3361 4.7489 0.9300 0.3193 0.1965 0.1582 4.3853 0.7887 0.7031 2.5427 0.1071 0.4976 0.3115 0.3476 0.3716 0.8462 0.5684 0.4245 0.4826 0.9536 0.1709 5.0987 2.5484 0.1011 0.0435 1.7191 0.7203 0.7345 1.1218 0.2452 5.0673 0.8436 0.0573 8.1118 3.5657 1.1770 3.5329 1.3464 1.5698 5.1944 3.2881 0.3376 4.2355 1.4871 2.0011 0.0229 1.0462 2.2058 3.6935 0.6207 2.2306 0.3112 2.4165 0.1631 2.1980 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.3.bn.running_var', 1.8418 12.1676 0.4626 43.6007 4.3930 15.3208 87.6767 7.0887 5.4963 25.9627 2.0877 20.8844 5.1665 5.3227 6.8652 2.6800 16.6571 4.4653 43.1472 3.8001 2.2434 23.9898 15.3317 31.4329 9.5696 18.6807 6.8543 12.6780 21.7198 2.8985 1.4760 5.6311 10.7562 7.2283 5.4736 3.2340 7.0783 6.1088 0.5139 48.0629 13.1178 0.7780 15.6500 8.2420 9.1927 0.6737 64.0188 7.9275 12.5226 18.7606 29.9964 13.7883 15.7356 1.4478 15.4805 7.1590 2.7078 36.3994 1.7871 9.7304 10.0635 72.9783 9.4999 5.3499 2.3363 0.9517 26.1254 2.6923 4.6281 26.8849 10.3987 13.1628 1.4532 2.4368 52.3030 13.3247 2.5919 1.7007 1.1596 42.2475 6.4425 5.6024 19.3698 0.5629 3.6024 2.4057 2.8786 2.9411 7.8058 4.0184 4.0513 4.5486 10.3213 1.0285 26.9182 23.5436 0.7894 0.2378 17.0778 5.9807 6.3857 8.9704 1.8861 65.6700 7.0327 0.2992 74.7671 15.9068 12.2373 14.9016 6.7227 13.5921 39.4942 22.5308 2.6461 29.9793 11.1880 17.1316 0.0988 9.8629 14.2450 27.3753 5.3518 21.6821 2.3591 19.7895 1.0592 17.0844 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.4.conv1d.weight', ( 0 ,.,.) = 1.8676e-02 4.3095e-02 -1.5726e-02 -4.3661e-02 -9.8139e-03 -2.0828e-02 3.0502e-01 -2.4855e-01 -1.1689e-01 -1.2934e-02 -2.4981e-01 6.2893e-02 -1.9024e-01 -5.0738e-02 3.3915e-01 ⋮ 1.6444e-01 -6.0511e-01 -5.3369e-01 -2.9930e-01 3.1245e-01 -2.8105e-01 2.4210e-01 -5.1733e-01 -5.1014e-01 -3.4072e-01 -2.4825e-01 -7.7791e-02 -2.0725e-02 -4.8233e-02 -9.1717e-02 ( 1 ,.,.) = 6.9531e-02 -4.4087e-02 9.2566e-02 -4.6828e-02 3.0438e-02 1.0145e-01 1.8946e-01 -2.9237e+00 -2.4120e+00 1.6530e-01 -4.3283e-02 -7.5086e-02 -6.1910e-01 -1.1024e+00 3.5328e-01 ⋮ -1.2166e-01 1.3522e-01 2.8608e-04 2.4502e-01 -7.5205e-02 -4.6553e-02 1.3329e-02 -8.7335e-01 -3.2406e-02 2.4081e-01 1.9314e-01 3.8841e-02 -6.1874e-01 4.3118e-02 -2.3558e-01 ( 2 ,.,.) = 1.9084e-02 -6.8872e-03 -4.7294e-02 -1.1103e-03 5.2093e-02 3.9098e-01 -4.9400e-02 -5.5011e-02 -1.4766e-01 3.7494e-01 -3.2525e-01 1.6407e-01 1.2210e-01 2.1276e-01 3.4819e-02 ⋮ -1.2377e-01 3.4028e-02 2.1447e-01 -4.6228e-01 8.3448e-02 -4.6388e-01 2.2220e-01 -3.5371e-01 2.5234e-01 3.4024e-01 3.3419e-02 1.7029e-01 3.2783e-02 -5.3499e-02 9.5706e-02 ... (125,.,.) = -4.0932e-02 -2.5763e-02 7.7512e-03 3.6522e-02 3.7960e-02 -3.4469e-02 -1.0437e+00 -7.4566e-01 -1.6032e-02 1.0350e-01 -8.7426e-01 1.6048e-01 -8.3122e-02 -1.8413e-03 -9.7011e-01 ⋮ -1.6335e-01 7.2348e-02 -1.1172e+00 -4.5718e-01 -1.0383e+00 2.5089e-01 1.4073e-01 -5.9574e-01 -9.8118e-01 1.5447e-01 -1.8739e-01 -4.8575e-01 -1.2125e-01 -3.5486e-01 -4.8543e-01 (126,.,.) = 4.5631e-02 1.0822e-02 -2.5906e-02 8.9613e-03 -4.3499e-02 -6.0878e-01 -9.8588e-01 -5.2404e-01 -8.0881e-01 -3.5317e-01 -3.4792e-02 -1.0723e+00 -7.0286e-01 -5.1342e-01 8.7097e-03 ⋮ 1.7486e-01 5.7678e-02 -5.5733e-01 -1.9898e-01 -3.2039e-01 1.6729e-01 -2.5874e-01 -3.0634e-01 -4.0217e-01 7.8019e-02 -3.8354e-02 5.5831e-02 -1.5912e-01 -1.3364e-01 -1.0953e-01 (127,.,.) = -3.2366e-02 2.3642e-02 3.2589e-02 -1.1044e-02 8.8390e-03 -7.1364e-01 -3.9804e-01 2.8420e-02 -1.2269e+00 -4.1309e-01 -1.2522e-01 5.2512e-02 1.4330e-02 9.9259e-02 3.5246e-02 ⋮ -1.6750e-02 -4.6182e-01 -8.4984e-01 -7.7151e-01 6.1423e-02 8.0939e-02 -4.9918e-01 -3.9325e-02 -3.2784e-01 -4.4412e-01 7.7830e-02 -6.5344e-02 1.7677e-02 -5.8119e-02 2.2737e-02 [torch.FloatTensor of size 128x128x5]), ('module.encoder.cbhg.conv1d_banks.4.bn.weight', 0.3426 0.6003 0.2337 0.2767 -1.5664 0.6790 0.2967 0.6112 0.5981 1.2043 0.8861 0.4381 0.5049 0.4052 1.1499 0.0001 0.5920 -0.6619 0.5119 0.8957 0.4586 0.8248 0.5741 0.3737 -0.7913 0.3334 0.4213 0.7619 0.9248 0.8743 0.8350 1.1013 1.1371 0.4845 0.5254 0.3206 0.4344 0.5647 0.5539 0.8183 0.6006 0.4469 1.2965 -0.6258 0.4940 0.6888 0.5895 0.5103 0.1940 -0.0739 0.4408 -1.0388 0.4637 0.5204 0.5693 0.7005 0.3624 0.5829 0.5393 0.5945 0.2330 0.2835 0.7863 0.4587 0.5319 -0.6582 -1.5175 0.5959 0.3762 0.4533 0.8087 0.7461 0.7765 0.4178 0.5040 0.5407 0.7416 -0.9143 -1.0565 1.1500 -1.2856 0.4806 0.5750 1.2181 0.7721 0.6361 0.4319 0.5130 0.0221 -0.9896 -1.1924 0.4069 0.6089 0.3713 -0.1563 0.4905 0.3348 0.5328 0.5235 0.5093 0.4743 0.5250 0.6474 0.3690 0.4880 0.5428 0.5635 0.4048 0.4607 -0.8978 0.6659 0.6544 -1.4788 -1.0401 0.9904 0.5127 0.4872 0.4587 0.7145 0.7750 0.3730 -1.0018 0.5827 0.3664 0.6188 0.6160 -1.4819 0.7840 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.4.bn.bias', 0.1269 0.1285 0.0361 -0.0386 -0.1509 0.0291 -0.0117 0.0266 0.1041 0.1785 0.2022 -0.1472 0.0636 -0.0567 0.0901 -0.0037 0.0677 -0.0685 0.0430 0.0297 0.0887 -0.1836 0.1372 -0.1900 -0.0034 -0.1570 0.0834 0.1421 0.1412 -0.0480 0.0061 -0.0238 0.0758 -0.0220 0.0793 0.0252 -0.0169 0.1349 -0.0997 -0.0222 0.0424 0.2772 0.2115 -0.1162 -0.0630 -0.1354 0.0988 0.0382 -0.0212 -0.0052 -0.0553 -0.2416 0.0796 0.1696 0.0128 0.2235 0.0418 0.0549 0.0119 -0.2704 -0.0125 0.0839 0.0265 0.0725 0.1788 -0.0850 -0.0673 0.2108 -0.0181 -0.0814 0.0149 0.0042 0.1059 -0.0182 0.0980 0.1088 0.1629 -0.1967 -0.1704 0.0361 -0.2944 -0.0876 0.0523 0.0819 0.1366 -0.2225 -0.1965 0.0689 -0.0139 -0.2385 -0.2867 0.0257 -0.1125 0.0647 -0.0456 0.0824 0.1223 0.0441 -0.0074 0.1459 0.0766 0.2161 0.0482 -0.0085 0.1937 0.1123 0.1412 -0.0042 0.0901 -0.1947 0.0449 -0.0446 -0.0334 -0.2068 0.1845 -0.0796 0.0236 0.0239 0.0896 0.0036 0.0559 0.0695 0.0764 -0.0007 -0.1054 0.0849 -0.1300 0.0266 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.4.bn.running_mean', 1.1662e+00 5.7806e-01 8.6421e+00 1.5111e+00 1.4286e-01 4.7887e-01 2.5149e+00 4.1017e-01 1.7029e+00 6.8182e-01 5.2371e-01 4.1583e+00 9.4160e-02 1.1704e+00 2.1705e-02 1.2901e+01 5.2419e+00 4.7075e-01 7.1928e-02 8.2839e-02 3.8725e-01 1.5272e-01 2.7374e+00 1.2776e+00 4.3609e-09 2.0726e+00 1.1753e+00 5.5592e-01 3.3004e-01 1.3981e+00 4.9237e-02 2.0195e-01 3.9373e-01 9.7453e-01 1.2037e+01 1.4995e+00 6.6274e-01 3.8213e-01 6.1267e-01 4.6233e-02 1.9922e-01 4.2108e-01 2.4134e+00 3.7927e+00 8.8383e-01 2.1092e+00 3.5873e-02 1.0479e+00 2.5503e+00 4.3555e+00 9.6033e-01 1.9151e+00 8.3043e-01 9.1108e-01 6.2211e-01 6.0335e-02 1.8175e+00 2.3355e-01 6.3597e-01 3.3824e-01 4.1144e+00 1.0162e+01 2.9306e-01 1.6782e-01 3.0191e+00 1.3903e+00 7.1522e-02 2.3149e-01 7.9709e-01 4.1425e-01 5.9430e-02 6.7193e-02 1.7077e-01 8.2555e+00 1.0324e+00 2.9795e-01 2.9554e+00 3.0318e+00 1.3888e+00 4.4584e-02 7.3407e-01 3.9918e+00 4.1640e-01 4.0112e-01 1.1698e-01 1.3736e+00 8.5450e-01 3.1089e+00 9.4334e+00 2.0476e+00 2.7410e+00 2.9759e-01 3.9749e-01 3.2181e-01 7.5011e+00 4.1735e-01 1.6323e+01 3.7291e-01 5.9362e-01 7.4685e-01 5.1302e-01 4.3762e-01 1.5844e+00 4.9481e-01 1.2357e+00 6.6337e-01 3.1602e-01 7.9581e+00 5.7080e+00 1.7927e+00 1.4669e+00 1.4931e+00 6.7405e-03 1.7772e+00 5.2709e-01 3.7220e-01 8.1531e-01 1.0348e+00 8.1878e-02 3.5830e-01 1.9062e+00 1.2071e+00 2.6719e-01 8.8440e-01 7.4879e-01 4.4662e-01 8.5571e-02 8.4275e-02 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.4.bn.running_var', 1.1161e+01 5.8489e+00 6.2559e+01 1.3760e+01 1.1316e+00 4.6863e+00 2.8584e+01 4.4339e+00 2.0113e+01 7.2040e+00 4.8571e+00 3.6512e+01 6.5280e-01 1.0856e+01 1.2327e-01 9.9910e+01 6.3146e+01 3.9984e+00 5.3184e-01 5.8083e-01 2.9553e+00 1.1699e+00 3.3601e+01 1.3705e+01 3.9209e-09 1.9752e+01 1.3838e+01 4.7755e+00 3.3254e+00 1.4998e+01 2.8412e-01 1.9239e+00 3.3981e+00 9.1401e+00 8.0982e+01 1.4672e+01 7.0427e+00 3.5584e+00 5.6003e+00 3.3115e-01 1.5336e+00 4.0793e+00 2.8563e+01 3.5319e+01 7.8510e+00 2.2402e+01 2.3532e-01 1.1328e+01 1.2928e+01 2.3886e+01 8.7510e+00 1.8744e+01 7.6653e+00 9.4153e+00 5.6654e+00 4.7516e-01 1.8806e+01 2.1668e+00 6.3653e+00 2.6073e+00 3.9408e+01 6.9438e+01 2.9417e+00 1.2135e+00 2.8289e+01 1.4237e+01 4.5265e-01 2.0719e+00 7.5292e+00 3.9462e+00 3.8858e-01 4.1534e-01 1.1462e+00 6.7391e+01 1.1868e+01 2.6739e+00 3.1718e+01 3.5087e+01 1.3465e+01 3.2188e-01 6.7510e+00 4.8879e+01 4.4594e+00 3.8057e+00 8.5733e-01 1.1967e+01 8.7259e+00 3.7869e+01 8.3978e+01 2.3187e+01 3.3431e+01 2.8585e+00 3.4761e+00 3.1597e+00 3.5961e+01 3.7471e+00 9.6401e+01 3.2155e+00 6.9874e+00 7.7710e+00 4.5784e+00 4.4322e+00 1.7687e+01 4.4191e+00 1.1004e+01 6.7891e+00 2.8761e+00 4.6781e+01 4.7436e+01 1.9463e+01 1.4606e+01 1.7952e+01 4.9074e-02 2.2192e+01 5.1112e+00 2.9833e+00 9.0143e+00 1.0664e+01 6.4345e-01 2.8626e+00 2.0771e+01 1.2641e+01 2.3985e+00 7.9465e+00 6.2094e+00 4.9007e+00 6.8326e-01 5.1287e-01 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.5.conv1d.weight', ( 0 ,.,.) = -1.6707e-02 1.5062e-02 -2.4290e-02 -2.0553e-02 7.6927e-02 2.5556e-02 -1.5825e-01 1.8375e-01 -1.3715e+00 -1.0272e-01 -1.0912e+00 5.3088e-03 -2.9068e-01 -4.3543e-01 -4.9879e-02 1.9555e-02 1.5983e-01 7.6618e-02 ⋮ -3.8250e-03 -8.1282e-02 7.3171e-02 1.9127e-02 1.2154e-01 -1.3264e+00 -5.9122e-02 -4.1025e-01 -3.0630e-01 -8.2133e-02 -5.3492e-01 1.9383e-01 1.1465e-01 -8.7856e-02 1.9600e-01 -2.7086e-01 -3.4039e-02 -6.0773e-02 ( 1 ,.,.) = -5.3984e-04 9.0873e-04 9.3033e-03 -9.9289e-03 -2.5024e-02 6.0367e-02 -4.0239e-01 1.9475e-02 -5.3144e-01 -2.0597e+00 -9.3734e-02 1.1011e-01 1.1027e-01 -8.8092e-02 -6.0984e-01 2.7714e-01 -2.5632e+00 -1.7113e-02 ⋮ -7.9546e-01 -1.1268e+00 5.8257e-02 1.1209e-01 1.2857e-01 -1.9039e-01 1.4975e-01 1.0255e-01 -5.6228e-01 -4.2333e-01 -4.6771e-01 5.2113e-02 9.4120e-02 -2.0300e-01 -1.4289e-01 -3.4866e-01 -3.5331e-01 6.1327e-03 ( 2 ,.,.) = 8.2903e-02 -2.8668e-02 -4.9796e-03 8.3328e-02 7.2743e-03 -1.2039e-03 1.2976e-01 3.7283e-02 5.2159e-02 2.3635e-01 7.3198e-02 -1.5133e+00 1.3264e-01 -1.4702e-01 1.6758e-01 1.0320e-01 -8.4526e-02 6.9735e-02 ⋮ -1.4193e-01 -1.2630e-02 1.8733e-01 1.9073e-01 -2.8929e-01 -3.8090e-02 -3.0786e-01 -6.9524e-01 -2.1240e-01 -8.6229e-01 -2.4218e-02 9.6994e-02 -1.9311e-01 9.7328e-03 -2.3853e-01 -6.1829e-02 2.2485e-02 2.0454e-01 ... (125,.,.) = -1.9382e-02 -5.8581e-02 2.0791e-02 -3.7576e-02 -2.6406e-02 -1.5199e-02 2.0389e-02 -1.8575e-01 -1.1484e-01 1.1992e-01 -1.4563e-02 -6.6121e-02 3.8035e-02 -1.8355e-02 -3.8509e-01 1.3890e-01 -3.7742e-01 3.6402e-02 ⋮ 3.2539e-01 1.4324e-01 1.2824e-01 7.3527e-02 -1.1932e-01 -2.4204e-01 -1.6890e-01 7.2598e-02 -7.4014e-03 -8.6582e-02 -2.3952e-01 9.7968e-02 5.8235e-02 -7.7000e-02 5.9546e-02 4.4890e-02 3.4878e-01 -2.8980e-01 (126,.,.) = 6.8186e-02 2.5241e-02 8.6074e-03 -5.2280e-02 -2.3363e-02 3.2551e-02 1.8539e-01 -5.2975e-02 -4.4095e-01 -1.0673e+00 2.9257e-01 1.3955e-01 4.7721e-03 1.0114e-01 -4.3054e-01 -1.8220e-01 -3.3834e-01 -3.7529e-01 ⋮ -1.3286e-01 6.4792e-03 1.3652e-01 -1.6517e+00 -3.3156e-01 2.8734e-01 -9.4012e-01 3.8226e-01 1.5905e-01 -3.5121e-01 6.5831e-02 -1.9648e-01 6.5724e-02 -1.5490e-01 6.5657e-02 -1.0527e-01 -1.5488e-01 -9.4005e-03 (127,.,.) = -2.3498e-02 6.9453e-03 4.4837e-02 2.5762e-02 4.6459e-02 2.5738e-02 1.3858e-01 -5.1303e-01 -2.2565e-02 9.0544e-02 1.9153e-01 1.7328e-01 -8.9238e-02 -5.8354e-02 5.8674e-02 1.4331e-02 -1.2725e-01 -9.0258e-02 ⋮ 5.6267e-02 7.4358e-02 -2.7942e-01 -1.4918e+00 -1.2619e+00 -7.9139e-03 -1.6702e-01 2.0627e-01 -7.0767e-02 6.5989e-03 6.8970e-02 1.4741e-01 8.9828e-02 -1.9443e-01 -1.1682e-01 1.7129e-01 5.7646e-02 1.2306e-02 [torch.FloatTensor of size 128x128x6]), ('module.encoder.cbhg.conv1d_banks.5.bn.weight', 0.6030 0.4659 0.6145 0.5868 0.5053 0.4299 0.3208 0.3690 0.5187 0.3643 0.7580 0.5170 -0.9788 0.2888 0.6778 0.5030 0.3522 0.5321 0.6273 -1.3146 0.4277 0.6141 0.4835 0.4332 0.5083 -0.9616 -0.8252 0.4086 -0.6273 0.5303 0.5253 0.4767 0.4248 0.6602 0.6277 0.6253 0.4188 0.6715 0.6525 0.4297 0.4657 0.3448 0.6934 -0.0343 0.6080 0.4010 0.4177 0.4304 0.5196 -0.9563 0.5729 0.4634 0.5252 0.4574 0.4260 0.5545 0.7378 0.5902 -0.9966 0.6326 0.5971 -0.9012 -0.6216 0.2051 0.5022 0.4865 -0.9215 0.4763 0.3403 0.3322 -0.7515 1.1560 0.4566 0.5261 0.3933 0.4992 0.3358 0.4127 0.4077 0.4432 1.0589 0.6539 0.2347 -0.8491 0.6595 0.5311 0.4118 0.4477 0.1726 0.5200 0.4053 0.4654 0.9625 0.5017 0.5881 0.5774 0.4825 0.3396 -0.7531 0.3112 0.5963 1.0316 0.3974 0.5853 0.7602 0.4758 0.3991 0.4226 0.5176 0.7534 0.6088 0.5584 -0.8560 0.7328 0.4891 -0.8217 0.4753 0.6604 0.6666 0.4886 0.5125 0.4264 0.5448 -1.0820 0.3960 0.2083 0.5247 0.9695 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.5.bn.bias', 0.0947 0.0718 0.0124 0.2336 0.0085 0.1004 -0.0893 -0.0778 -0.0524 0.0065 0.1136 -0.0418 -0.1065 -0.0254 0.1232 0.0879 0.0633 -0.0840 0.1193 -0.2054 0.1677 0.2151 0.0697 0.0597 0.0166 -0.1709 -0.1288 0.0489 -0.1630 -0.0754 0.0570 0.0086 0.1426 0.0505 0.0564 0.2278 0.0128 -0.0277 0.0535 0.0196 0.0594 0.0252 -0.0084 0.0057 0.0969 0.0142 -0.1417 0.1198 -0.1133 -0.2663 0.2193 0.0708 0.0075 0.0174 0.0811 -0.0021 0.0313 0.1300 -0.1708 0.0537 0.1554 -0.1699 -0.1159 -0.0202 -0.0004 0.0829 -0.2699 -0.0136 0.0054 -0.0462 -0.1225 0.0095 0.0144 0.1135 -0.0139 0.0421 -0.0032 0.0377 0.0843 0.0332 0.1215 0.3692 -0.0251 -0.1014 -0.1097 0.0472 0.1536 0.0918 -0.0179 0.1474 -0.0726 0.0957 0.1166 0.0688 0.2160 0.0116 -0.0253 0.1411 -0.0984 -0.0216 0.1054 -0.0392 -0.1219 0.1568 -0.0006 -0.0553 -0.0160 0.0574 -0.0736 0.0534 -0.0771 0.0323 -0.1174 -0.0647 0.0409 -0.0897 0.1087 0.0810 -0.0016 -0.0134 0.0187 0.0645 -0.0278 -0.1699 0.0223 0.0254 -0.1488 0.0309 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.5.bn.running_mean', 0.1942 0.2407 2.3295 13.7009 0.5646 1.5938 1.6236 0.1412 3.5567 0.3877 0.1246 1.3773 2.0049 5.1180 0.4043 0.2196 2.6233 0.2074 0.4685 0.4912 1.4852 0.0517 2.0695 0.3175 0.1819 2.0328 2.0154 0.4518 7.0997 0.6767 0.0989 0.8559 0.5069 0.0173 0.0892 0.3716 2.1047 0.4246 0.1018 4.0117 0.4722 1.4483 0.3713 9.8769 1.0078 2.7833 0.2142 2.5666 2.5710 3.1552 1.3968 0.2538 0.8166 1.4437 0.2308 6.6636 0.3067 0.1399 0.7262 0.1478 0.1166 3.7425 6.9665 1.8447 1.2830 0.4066 3.4474 0.5367 0.3763 0.4006 2.5741 0.1998 0.4160 0.3257 1.5232 1.1630 2.7245 0.2250 0.8890 2.0377 0.0878 2.4357 0.8960 2.0837 0.5346 0.0699 0.7732 0.5608 1.8463 0.0790 1.3423 0.4863 0.1751 2.8209 2.3684 0.3946 0.8917 14.5403 1.9912 6.0808 0.5597 0.0064 1.8138 0.5429 0.1226 0.2695 0.4319 0.6293 0.2789 0.0554 0.9388 0.0294 2.7917 0.2053 0.1704 4.8849 0.4043 0.2905 0.2785 0.2442 3.3915 6.8654 0.8866 0.8732 2.7530 2.3496 2.6061 0.6980 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.5.bn.running_var', 2.0262 1.9987 25.1144 218.8765 6.4417 17.2455 17.5415 1.4495 46.8464 4.0630 1.1017 16.3569 23.2124 31.4073 3.9895 2.2963 22.1562 2.0646 5.9942 4.7428 17.5209 0.3373 26.3400 3.9097 1.7637 25.6737 22.4007 4.7676 54.8449 8.0765 0.8915 8.6571 4.9478 0.1151 0.8361 3.9047 19.3890 5.0134 0.8813 32.9457 4.8735 15.3827 3.2114 72.7250 10.3798 27.6761 2.1794 24.0510 35.4656 40.1592 17.1983 2.5397 8.5297 13.3604 2.1418 61.0993 2.9829 1.3570 7.2358 1.4686 1.0055 53.2624 58.9634 11.5873 13.8819 4.3137 48.9352 5.7693 3.6650 4.0847 30.2679 1.4583 3.7447 3.1868 17.5012 11.1999 36.6523 2.1900 12.0475 17.7696 0.7518 28.8415 7.9658 24.2708 5.0903 0.5693 8.9742 5.9398 12.8828 0.5220 17.0810 5.1503 1.5296 26.6620 25.7122 4.1311 11.0452 105.3546 16.2668 40.0044 5.6596 0.0321 25.1323 5.5465 1.2754 5.0395 4.8464 7.7836 2.8487 0.3995 12.0627 0.2450 34.3101 2.2691 1.5937 65.4531 3.9816 3.0409 2.7536 2.6018 47.5258 63.4379 11.8568 9.6248 23.6316 16.6635 31.3495 8.4266 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.6.conv1d.weight', ( 0 ,.,.) = -4.5353e-03 4.5222e-03 -3.9971e-02 ... -3.5038e-02 4.8943e-02 -5.0395e-03 8.4028e-02 2.2407e-02 -6.3263e-01 ... -2.4554e-01 -4.2537e-03 -8.9171e-01 -4.7564e-02 6.2109e-02 -9.9553e-01 ... 1.4646e-01 -9.9596e-01 1.6758e-01 ... ⋱ ... 2.2037e-01 1.9275e-01 1.5566e-01 ... -4.8230e-01 -9.3068e-01 -1.2051e-01 -2.5592e-02 -6.7962e-01 -2.8601e-01 ... 1.0041e-01 -3.2003e-01 -2.3566e-02 9.6592e-02 -1.9106e-01 8.4323e-02 ... -7.2522e-01 -4.6632e-02 1.1757e-01 ( 1 ,.,.) = 3.8156e-02 3.8708e-02 9.7287e-03 ... 4.9475e-02 4.8088e-02 3.4232e-02 -1.2674e-01 1.4788e-01 -2.9398e-01 ... 1.5944e-01 -1.7241e-02 7.4101e-02 3.7165e-01 -1.2560e-01 1.2577e-01 ... -1.2738e-01 5.0040e-01 -6.6209e-01 ... ⋱ ... 1.1140e-01 2.7386e-01 -4.2877e-01 ... 2.7582e-01 9.3554e-02 -5.7552e-01 -5.0430e-01 1.2536e-01 -3.1027e-01 ... -4.4512e-01 -4.1321e-01 7.5062e-02 -2.2296e-01 9.1203e-02 -1.4282e-01 ... -1.0473e-01 2.4301e-01 -1.2898e-03 ( 2 ,.,.) = -3.0079e-02 1.5203e-02 -2.8322e-02 ... -4.0184e-03 -1.2454e-02 8.4558e-04 6.6261e-02 -7.0814e-02 -6.4725e-02 ... 7.4598e-02 -5.3767e-01 -4.6577e-02 -1.0299e-01 -6.4324e-02 -9.7807e-02 ... -6.6077e-01 -6.5349e-02 -4.2513e-02 ... ⋱ ... -2.4065e-01 2.6608e-01 1.7404e-01 ... -6.9059e-02 -4.1446e-01 9.7021e-02 -9.1595e-02 -2.3584e-01 1.2416e-01 ... -9.2408e-01 4.9623e-02 -5.6548e-01 -7.9593e-02 -9.2951e-02 -1.1617e-01 ... -4.8386e-02 -9.6230e-02 -1.4643e-02 ... (125,.,.) = 1.0336e-02 -2.2211e-03 3.0974e-02 ... 5.5323e-02 7.0989e-03 1.1988e-02 -3.3972e-02 -2.6190e-01 3.3510e-02 ... -2.2103e+00 -1.2093e+00 -8.1943e-02 1.9794e-02 -1.1747e-01 2.1005e-01 ... -3.2863e-02 6.6548e-02 1.8791e-01 ... ⋱ ... 1.8541e-01 2.1995e-01 -9.6276e-01 ... -1.6338e-01 -8.7571e-01 -1.0884e+00 2.3457e-01 2.4633e-01 1.5244e-01 ... -2.2887e-01 -2.0436e-01 9.2099e-02 7.3668e-02 -1.5229e-01 -2.5827e-01 ... -1.0859e-01 -4.0845e-01 -2.8507e-02 (126,.,.) = -1.6866e-02 2.1091e-02 -1.3386e-02 ... -4.4216e-03 6.1371e-02 -1.1978e-02 5.1298e-02 -1.8507e-01 1.7350e-01 ... -6.8860e-01 1.3121e-01 1.1693e-01 -2.1713e-02 9.1121e-02 -8.1626e-01 ... -8.0668e-02 7.9635e-02 -1.6500e-01 ... ⋱ ... 5.4700e-02 -1.4175e-01 7.1425e-02 ... -9.5701e-01 3.1120e-01 -2.1914e-02 -3.2772e-01 4.5213e-02 -4.7685e-01 ... -3.1668e-01 -3.9196e-01 2.9794e-02 2.3587e-01 6.2199e-02 9.8133e-02 ... -2.6222e-01 -2.2871e-02 8.5439e-02 (127,.,.) = 3.3882e-02 -5.7220e-03 -2.2094e-02 ... 9.1597e-03 4.2177e-03 6.7870e-02 -1.4162e+00 -7.0018e-02 -7.7413e-01 ... -9.2951e-01 -1.7080e+00 -1.0842e-01 -2.1010e-03 1.1678e-01 -3.1630e-01 ... -3.0953e-01 2.2892e-01 -1.1174e+00 ... ⋱ ... -6.8819e-02 -2.1103e-03 2.1246e-02 ... 1.1823e-01 -8.3776e-01 -1.3537e+00 -1.6399e-01 2.0398e-01 -8.5368e-01 ... -5.6074e-01 -5.9672e-01 1.0280e-01 1.9300e-01 1.8914e-02 -8.1840e-02 ... 1.1959e-01 -3.3886e-01 -3.0645e-01 [torch.FloatTensor of size 128x128x7]), ('module.encoder.cbhg.conv1d_banks.6.bn.weight', 0.7607 0.5658 0.4069 0.5596 0.4923 0.3722 0.5383 0.5383 0.5162 0.4805 0.3922 0.6085 0.3954 0.3648 0.3961 0.7022 0.4645 0.4231 0.6034 0.4023 0.4413 0.3966 0.7327 -1.1406 0.4644 0.4746 0.4408 -0.9712 0.4288 0.6129 0.5061 0.5056 0.4656 -0.9311 0.4196 0.4411 0.4886 0.6136 -0.6578 0.4390 -1.1062 0.4580 0.4731 0.4692 0.5310 -0.8401 0.5045 0.4854 0.6072 0.4684 0.5032 0.5790 -0.8204 0.4661 0.4229 0.5374 0.3683 0.4203 0.3933 0.4200 1.0160 0.5978 0.4463 0.5107 0.5004 0.5872 0.6598 -1.0738 0.5930 0.5918 0.6508 0.5747 0.5351 0.4417 0.5006 0.4125 0.8759 0.4766 0.6038 0.5418 -1.2765 0.6014 0.5849 0.4119 0.4250 0.5348 0.5735 0.4446 0.8250 0.3214 0.5479 0.2924 0.3977 0.4694 0.5606 0.5359 0.5207 0.5898 0.5368 -0.6191 0.5788 0.7520 0.4601 0.5408 0.4477 0.7225 0.4985 -0.5981 0.3489 0.4543 0.4469 0.5317 0.4642 0.5542 0.3984 0.7196 0.7181 0.5273 0.4640 0.0085 0.5395 0.5949 0.6260 0.8270 0.4650 0.5774 0.5891 0.6750 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.6.bn.bias', 0.0465 0.2987 0.0744 0.1089 0.0278 -0.0894 0.1110 -0.0822 0.1167 0.0487 0.0298 0.1312 -0.1109 0.1366 0.0715 0.1303 0.0864 0.0792 -0.0341 -0.0299 0.0299 0.0357 0.1634 -0.1350 0.0503 -0.0015 -0.0014 -0.1345 0.0592 0.1044 0.1948 -0.0091 0.0063 -0.0762 0.0760 0.0956 0.0144 0.0784 -0.1574 -0.1276 -0.1545 -0.0514 0.0524 0.0895 -0.0015 -0.2377 0.0847 -0.0527 0.1217 0.0508 -0.0818 0.1196 -0.1154 0.0045 -0.1345 0.0840 0.0765 0.1920 0.1128 0.0415 0.0997 -0.1596 -0.0704 0.0274 0.0970 -0.0021 0.0329 -0.1977 -0.0295 0.1885 0.0173 0.0856 0.1171 -0.0773 -0.0033 0.0388 0.1215 -0.1076 -0.0509 0.0968 -0.3588 0.1007 -0.0009 -0.0015 0.0628 0.1344 0.1188 -0.1710 0.1497 -0.0175 0.1135 0.1049 0.0318 -0.0166 0.0242 0.0569 0.1420 -0.1035 0.0536 -0.1027 -0.1302 0.0295 0.0140 0.1080 0.0770 0.1285 -0.0579 -0.0593 0.0450 -0.2370 0.0294 0.2751 -0.0870 0.0337 0.0056 0.0325 -0.0473 0.0454 -0.0045 -0.0056 0.1151 0.0345 0.0490 0.2114 -0.0237 0.0176 0.0554 0.0154 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.6.bn.running_mean', 0.1225 5.6629 0.3841 0.4293 0.3980 0.9333 0.3633 0.5193 0.8983 0.3812 1.3739 0.1601 0.3842 3.3985 0.5929 0.0909 0.8515 0.3350 0.0835 0.5757 1.8449 1.4684 0.2797 0.8511 5.5845 0.9328 0.2805 0.1486 0.2183 0.4159 0.2561 1.0573 0.6213 2.2505 1.3800 2.7031 11.2693 1.5261 5.5516 1.0681 2.6043 0.8857 0.4740 0.1695 7.5865 3.3979 0.3413 1.0511 0.4375 1.3408 0.9484 1.4140 1.3673 1.3293 0.9539 0.2955 1.1923 0.7360 0.4612 0.2043 0.0889 0.5241 1.0609 0.3513 0.7633 0.6991 0.3560 1.9585 0.5934 0.5133 2.8330 0.4533 0.0918 0.5697 0.3809 3.3425 0.1446 0.1284 0.1832 1.7694 1.3482 5.0727 0.4330 0.9860 0.6478 11.7975 1.7110 0.7188 0.0323 5.5869 0.2135 0.7491 0.2602 0.3105 1.0022 1.0937 0.0551 0.4785 0.1808 7.3116 1.1992 0.0383 0.5896 0.3206 6.3004 0.6682 0.2665 0.4015 0.9495 1.3414 1.7563 2.1910 0.1055 0.1848 0.6613 0.0694 1.1714 1.0957 0.2106 14.4284 1.2125 0.2225 0.1841 0.0471 0.5587 0.3019 3.0310 0.1366 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.6.bn.running_var', 1.2599 73.2295 3.8698 5.0350 4.1386 11.7909 4.4939 6.1335 11.6138 4.7495 17.9012 1.8395 3.5470 35.2414 7.7309 0.8977 10.8892 3.6262 0.9323 6.7581 20.3248 18.4403 2.6988 11.7070 46.7139 12.6321 3.6993 1.2615 2.0364 5.4538 2.9888 11.9071 7.0494 28.3594 15.1422 27.9737 82.0041 20.0873 57.2968 12.5148 35.7018 10.4392 6.3748 2.0117 100.2919 41.9469 3.8628 11.7238 4.9764 17.7406 12.6431 14.7179 12.8097 14.9661 10.4469 3.3374 12.9786 9.2808 5.3461 1.6822 0.9324 7.0337 11.8969 3.9069 9.7212 7.7170 4.3436 22.5543 6.9971 5.5269 46.4625 7.0761 0.9344 6.4191 4.1967 31.3372 1.3874 1.0796 1.6380 24.6118 19.2409 54.6652 5.4432 10.5934 8.4867 127.3044 25.4032 8.7970 0.4156 36.1659 2.5299 8.9533 3.0954 3.1635 11.6492 9.1341 0.7056 5.0821 1.6385 72.9483 14.3235 0.3451 7.7560 3.5609 73.2377 8.5205 3.3550 4.4147 12.5989 14.3726 20.5676 25.9159 1.0745 2.0458 7.9261 0.6185 16.3564 11.1497 2.3524 212.2715 13.2993 2.2799 2.1828 0.4154 6.3970 3.1219 35.8959 1.4659 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.7.conv1d.weight', ( 0 ,.,.) = 5.8847e-03 2.1921e-03 -1.8377e-02 ... -1.3964e-02 2.6978e-02 5.5054e-02 7.2680e-02 -3.6535e-01 -5.2221e-03 ... -2.3408e-01 1.2410e-01 3.1972e-03 1.3844e-02 -1.1377e-01 1.9315e-02 ... 3.8549e-02 -1.6374e+00 4.1895e-01 ... ⋱ ... -4.8199e-01 3.3490e-01 -3.2895e-01 ... 8.4339e-02 6.1372e-02 -1.5737e+00 -1.0041e+00 -1.5568e-01 5.7800e-02 ... -5.0289e-01 9.4830e-02 1.0988e-02 -1.4340e-01 -3.7784e-01 1.9994e-01 ... -1.7842e-01 -5.8743e-02 -7.0364e-02 ( 1 ,.,.) = -3.3945e-02 -8.2994e-04 1.3010e-02 ... 1.9219e-02 -2.1708e-02 3.9321e-02 3.4651e-02 -1.7901e-01 -5.5429e-01 ... -7.5145e-01 -4.6915e-01 6.6311e-02 2.7764e-01 -9.3591e-02 1.3791e-01 ... -2.7145e-01 2.8122e-02 1.0651e-01 ... ⋱ ... 8.7166e-02 1.5260e-01 -7.1763e-02 ... -1.3844e+00 -1.0487e+00 -1.2397e+00 -1.2803e-01 -3.3113e-02 -3.9552e-01 ... -3.1659e-01 1.0684e-01 8.3546e-02 -8.9448e-02 2.4670e-02 2.8182e-02 ... 1.6902e-01 -5.5595e-02 -2.6387e-01 ( 2 ,.,.) = 2.4514e-02 2.0854e-02 -3.0092e-02 ... 4.2989e-02 3.4699e-02 3.5149e-02 -8.7910e-02 -3.7414e-02 -1.6182e-02 ... 1.3356e-01 -3.4510e-01 2.9737e-01 -5.0063e-02 -2.8385e-01 2.0934e-01 ... 2.8240e-02 8.9445e-02 -7.5023e-01 ... ⋱ ... -1.0467e-01 -2.0252e-01 -9.7439e-02 ... -1.9067e-03 7.2500e-02 -3.6290e-02 5.2983e-01 3.4188e-02 -1.1855e-01 ... -2.9443e-02 -2.1767e-01 -1.4374e-01 2.0587e-01 -5.5229e-02 1.2016e-01 ... 1.0737e-01 2.8937e-03 9.0145e-02 ... (125,.,.) = 2.3339e-02 -2.2911e-02 -2.5257e-02 ... 3.4837e-02 -3.6842e-03 -2.4066e-02 1.7219e-01 2.6568e-01 3.3388e-02 ... -8.4869e-02 1.0888e-01 -1.6184e-01 1.4101e-01 1.0440e-01 -1.5647e-01 ... 1.7188e-01 -1.0538e+00 -1.3552e+00 ... ⋱ ... -3.3658e-01 1.9421e-01 7.1169e-04 ... -4.4340e-01 -1.4363e+00 1.1073e-01 -2.8200e-01 -2.7627e-01 -3.5717e-01 ... -2.6617e-01 1.8424e-01 -5.1155e-02 -2.2967e-01 -9.3827e-02 1.8522e-01 ... -3.8467e-01 -8.9247e-02 -5.5430e-02 (126,.,.) = -1.6671e-02 -4.0164e-02 -3.8903e-02 ... -6.3660e-02 -3.1307e-02 -2.3072e-02 -2.9091e-01 1.1981e-01 8.3795e-02 ... -1.5997e+00 -1.1918e+00 -9.8389e-02 1.3939e-01 1.5886e-01 1.7142e-01 ... 2.3185e-01 -5.8920e-01 2.3799e-01 ... ⋱ ... 3.5581e-02 2.9580e-02 1.4395e-01 ... -6.6605e-01 -1.4438e+00 -7.2258e-01 1.3443e-02 -1.0530e-01 -6.6303e-01 ... -5.2410e-01 -1.6476e-01 9.5399e-02 2.5848e-02 -8.6919e-02 -1.2343e-01 ... 7.9862e-02 -1.0104e-02 -3.2548e-01 (127,.,.) = 1.3345e-02 1.6564e-02 -3.3550e-02 ... 2.5938e-02 2.8583e-02 -4.1905e-02 1.3643e-01 -1.3233e-01 1.6668e-01 ... 1.4648e-01 -1.8051e-01 -9.3971e-02 7.5661e-02 1.2190e-01 -1.9221e-01 ... 6.9154e-03 -1.6507e-02 -5.3025e-02 ... ⋱ ... 3.2565e-01 1.2659e-01 1.3699e-01 ... -1.9743e-01 2.6545e-01 -3.4281e-02 -1.7613e-02 -8.1578e-02 4.1549e-01 ... -1.9517e-01 4.2245e-01 2.1490e-02 9.5571e-02 1.0512e-01 -6.3192e-02 ... 1.4061e-01 2.6262e-01 1.6268e-01 [torch.FloatTensor of size 128x128x8]), ('module.encoder.cbhg.conv1d_banks.7.bn.weight', 0.5201 0.6178 0.3503 0.1448 0.5527 0.4999 0.7059 0.4651 0.4145 0.4550 0.5447 0.4034 0.4155 0.3661 0.5640 0.5221 0.3899 0.4972 0.5214 0.4538 0.5328 -1.0370 0.6178 0.5419 0.4244 0.3987 -1.6089 0.4248 0.3864 0.4433 0.4740 0.5513 0.4550 -1.1776 0.5307 0.5215 0.4541 0.5152 0.4265 -0.4415 0.4885 0.6703 0.4037 0.5493 0.3952 0.3893 0.4978 0.5512 0.4581 0.0155 0.6011 0.4262 0.4914 0.4226 0.4978 0.5835 0.4875 0.3945 0.3939 0.4874 0.5940 0.5797 -0.9385 0.6759 0.5902 0.5815 0.5256 0.4608 -1.2700 0.4835 0.5862 0.8845 0.5304 0.3843 0.4911 0.4653 -0.8797 0.4928 -1.0461 0.4720 0.5692 -0.8213 0.4002 0.4373 0.5114 0.4222 0.3675 -1.3332 0.4836 0.5117 0.4860 0.4630 0.4095 -0.6982 -0.8360 0.5048 0.4621 0.4235 -0.8588 0.5175 0.3910 0.5467 0.5042 0.5353 0.4100 -1.0143 0.4884 0.4789 0.4557 0.6047 0.4890 1.0135 0.6491 -0.9885 0.4902 0.4262 0.5985 0.3811 0.4982 0.4380 -1.0503 0.5028 0.3959 -0.9950 0.5104 0.4080 0.5989 -0.7140 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.7.bn.bias', 0.0531 0.1352 -0.1326 -0.0226 0.1226 -0.0110 -0.0515 -0.1070 -0.0722 0.0605 0.0245 0.1540 0.0093 0.0824 -0.1027 0.0786 0.0775 0.0218 0.0305 -0.0334 -0.0583 -0.2360 0.1077 0.0735 0.0465 0.0191 -0.1012 -0.0647 -0.0001 0.1090 -0.0216 0.0867 -0.1156 -0.0776 0.0726 -0.0987 0.2782 0.0555 -0.0561 -0.0393 0.1253 0.0206 -0.1254 0.0507 0.0083 0.0365 0.0373 0.0833 0.1624 -0.0043 0.1495 0.0487 0.0595 -0.0549 0.1385 0.0319 0.0761 0.1448 -0.0136 0.0397 0.2314 0.2268 -0.0985 0.1825 0.1466 -0.0436 0.0372 0.0725 -0.0878 -0.0063 0.1393 0.1552 -0.0325 0.0941 0.0756 0.1570 -0.1996 -0.0028 -0.2038 0.0497 0.0060 -0.1240 -0.0317 0.0253 0.0478 -0.0950 0.0721 -0.2091 0.0940 -0.1020 0.0115 0.0147 0.1373 -0.1032 0.0048 -0.1266 -0.1190 0.1090 -0.0406 0.1024 -0.1009 0.0821 0.0140 0.2145 0.0478 -0.2095 -0.1442 0.0544 0.0345 0.0340 0.0834 0.2172 0.1414 -0.1626 -0.0807 0.0232 -0.0692 -0.0451 0.0634 0.0790 -0.1703 0.0544 0.0259 -0.2925 -0.1293 0.0527 0.0511 -0.0644 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.7.bn.running_mean', 0.2812 0.1372 3.9745 7.4301 1.2966 2.3002 0.3036 1.0330 0.1696 0.1970 0.0372 0.8095 3.5538 0.6015 1.1094 0.5394 6.0354 0.2902 1.2992 1.0459 1.4696 3.5347 1.2122 0.8080 0.5515 0.7807 0.0896 0.6803 1.1137 0.1049 2.4879 0.1218 0.5865 1.5523 4.8728 0.9130 0.9491 1.5586 2.0178 12.4710 0.8012 0.3510 0.3879 8.8085 1.0802 0.4705 0.9743 0.4803 0.5677 9.9005 1.3111 0.3241 1.5443 0.9569 1.3462 1.8252 0.9787 1.1744 0.8573 0.9252 4.9456 4.2871 0.0392 0.4906 3.7855 0.1991 0.8360 0.8939 0.3714 0.2258 2.8575 0.2458 0.1251 0.9596 0.2072 0.5053 3.7698 9.7333 1.5940 1.2681 0.1128 3.9079 4.3071 0.1252 0.5050 1.4095 1.2890 0.6643 0.2945 0.3837 1.9824 0.4402 0.7092 2.5530 6.0500 0.6629 0.4099 1.3019 5.8735 0.8943 0.2518 0.9939 0.1958 2.5477 0.3094 2.3750 0.7993 1.1749 0.4016 8.2615 0.7854 0.1583 1.5999 5.3653 0.5013 0.9868 0.3480 0.2088 0.2505 0.3646 1.0942 0.3739 0.5132 3.1107 0.9551 0.9754 0.0634 5.9613 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.7.bn.running_var', 3.4528 1.4335 56.6181 43.0498 19.2615 33.8639 3.2992 13.0646 1.7917 2.1548 0.5802 10.7251 53.0717 7.4029 16.0876 7.8325 64.2025 3.4285 17.8234 14.2309 20.2142 46.8463 13.8736 11.6845 6.8415 10.1249 0.5939 7.8891 14.9711 1.2082 26.2391 1.4065 7.8775 21.0462 59.2802 12.7272 15.2322 23.1035 29.2466 100.4376 11.8395 4.3766 4.0224 99.0680 14.3279 4.6216 10.4203 7.0414 7.9117 88.7863 14.2661 3.8882 23.2832 12.0185 20.1529 22.3754 13.8920 15.9952 10.1813 13.3677 67.4268 49.7770 0.2859 7.1800 45.6419 2.0598 11.4943 10.0566 3.6953 2.3665 36.3690 2.9168 1.4601 12.4855 2.7109 6.1154 47.0909 84.9703 28.9722 17.3121 1.7708 46.1098 40.3095 1.3421 6.2969 19.3591 18.4596 8.4604 3.7825 4.2503 28.9321 4.7093 9.0993 26.3167 79.0112 8.9691 4.0326 19.4620 78.8674 12.4432 3.5051 12.4378 2.4673 29.6752 3.7716 32.0126 10.5445 13.3852 5.1200 84.1416 12.3769 1.6237 19.6032 71.8491 5.8113 10.7161 4.0810 2.1904 2.5760 5.0541 12.6978 4.9397 5.0230 37.5503 12.6321 13.6513 0.5706 66.9323 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.8.conv1d.weight', ( 0 ,.,.) = -2.7547e-02 4.2572e-02 4.9609e-02 ... 3.9734e-02 4.3074e-02 3.7359e-02 7.7272e-02 1.5924e-01 1.1967e-01 ... -1.0723e+00 -1.3182e+00 8.4199e-02 1.2160e-01 -2.6351e-01 -9.3865e-02 ... 1.9552e-01 3.4170e-01 -3.8920e-01 ... ⋱ ... 8.6547e-02 1.8861e-02 1.8739e-01 ... -1.3392e+00 1.6523e-01 -7.8392e-01 6.9175e-02 5.4181e-02 2.3608e-01 ... -6.6663e-01 -8.3389e-02 -2.4729e-01 -1.6154e-01 -5.3170e-02 7.6704e-02 ... -8.9796e-02 -4.5192e-02 -1.7497e-01 ( 1 ,.,.) = 2.0992e-02 -6.3350e-03 5.8229e-03 ... -7.8143e-03 1.5739e-02 9.7577e-04 -6.3255e-02 1.6842e-01 2.9652e-01 ... -1.1351e+00 2.6696e-02 -2.3831e-01 -7.5772e-03 4.0876e-01 2.8722e-02 ... -3.6409e-01 1.6402e-02 6.1327e-02 ... ⋱ ... 4.7118e-03 4.9662e-02 -1.0712e-01 ... 2.1898e-01 2.2844e-01 4.2636e-01 -1.5443e-01 -4.4623e-02 -4.2821e-02 ... 3.0447e-01 -1.1925e+00 -1.1519e-01 -6.3182e-02 8.5808e-02 -1.1307e-01 ... 5.1514e-02 -1.5927e-01 -2.3402e-01 ( 2 ,.,.) = -8.6872e-03 9.8962e-03 -1.3881e-02 ... 6.4804e-03 1.7940e-02 -1.6702e-02 -5.5718e-02 -3.8603e-02 3.3173e-01 ... -5.4399e-02 -2.6046e-02 -1.8899e-01 1.7230e-01 -3.0073e-01 1.2899e-01 ... 1.5112e-01 -2.3045e-01 -2.2903e-01 ... ⋱ ... -6.5743e-02 -4.2672e-03 1.5176e-01 ... 6.4832e-03 1.4361e-01 3.0495e-02 1.7581e-01 -1.3613e-01 -1.2332e-01 ... -1.2568e-01 -4.6832e-03 -2.4509e-01 -4.4992e-02 -1.2710e-01 1.3147e-02 ... -3.4621e-02 2.4574e-02 -4.5390e-02 ... (125,.,.) = -1.7292e-02 -7.6999e-03 1.0731e-02 ... 6.0297e-03 3.7521e-03 2.6006e-02 -7.9919e-02 -4.9393e-02 -5.8662e-01 ... -3.1168e-01 -1.3692e+00 -9.1817e-01 -3.7842e-01 5.7602e-02 -7.9871e-02 ... 9.0100e-03 -9.7410e-02 5.3491e-02 ... ⋱ ... -2.0529e-01 6.3498e-02 2.7442e-02 ... -7.3926e-01 1.5999e-01 -1.1082e-01 -2.1393e-01 1.3996e-01 -7.4514e-02 ... -1.9744e-01 -4.8600e-01 -4.3896e-01 1.2696e-02 -1.9454e-01 5.5097e-02 ... -9.6668e-03 -7.0573e-01 -4.2450e-01 (126,.,.) = 9.5369e-03 -1.4266e-02 -9.3971e-03 ... 1.1653e-02 -4.1660e-03 1.2811e-02 1.8217e-01 2.1676e-02 -3.0760e-01 ... 1.6222e-01 1.0926e-01 -4.7690e-02 5.3789e-02 -2.5205e-01 2.3654e-01 ... -5.4497e-03 7.4248e-02 -2.2669e-01 ... ⋱ ... -1.6270e-01 -8.9960e-02 -6.6290e-02 ... 5.3901e-02 2.5797e-04 7.1787e-01 -1.0061e-01 1.4840e-01 -1.0621e-01 ... -5.7225e-01 3.4818e-02 4.1044e-01 -3.5203e-02 -7.5935e-03 8.3569e-02 ... 1.1512e-01 1.9343e-02 -8.4515e-02 (127,.,.) = 1.7825e-02 5.7461e-03 -5.2420e-02 ... -8.4316e-03 3.7530e-02 7.4160e-03 -7.5303e-01 2.5693e-01 -5.4925e-01 ... -1.6473e-01 9.0183e-03 -1.2218e-01 -3.1759e-01 5.5704e-02 -2.5702e-01 ... 1.2007e-01 -5.1170e-01 -5.0482e-01 ... ⋱ ... 7.2999e-02 2.4884e-01 -6.3399e-01 ... -1.0819e-01 -8.2761e-01 -3.9823e-02 -6.4678e-02 2.1777e-01 1.4814e-01 ... -2.4322e-01 2.4405e-01 -5.4170e-01 1.5825e-01 -1.0545e-01 8.2131e-02 ... 3.3050e-01 -3.3931e-01 -6.4139e-02 [torch.FloatTensor of size 128x128x9]), ('module.encoder.cbhg.conv1d_banks.8.bn.weight', 0.5771 0.4581 -0.7716 0.6157 -0.0321 0.5181 -0.9333 0.9233 0.4347 0.4704 -1.3071 0.4834 0.4864 0.3935 0.4802 0.4552 0.4248 0.5687 0.4133 0.5554 0.5055 0.5408 0.4969 0.4613 -0.9117 0.6503 0.3440 0.4934 0.4743 0.7277 0.5781 -0.9944 0.4250 0.4817 0.4396 0.6737 0.4569 0.4752 0.4585 0.4791 0.7359 0.5473 0.5542 0.8879 0.4969 0.4156 0.4636 0.5663 0.6065 0.4312 0.4343 -1.1273 -1.2112 0.4511 -1.0567 0.4800 0.7169 0.6837 0.4633 0.4376 0.4631 0.3726 0.4705 0.4251 -0.7982 -1.0721 0.6287 0.3680 0.4368 0.4333 -0.9332 0.3998 0.4077 0.4922 0.4723 0.5908 0.5140 -0.8896 -0.7219 0.4918 0.5012 0.4491 0.3801 0.3578 0.3361 -0.8209 0.5648 0.5712 0.4660 0.5767 -0.9550 0.5229 1.1241 0.4727 0.4580 0.4395 0.4749 0.4501 0.4727 0.4406 -0.0748 0.5233 0.3423 0.5639 0.5692 0.6152 0.4194 -1.0430 -0.9918 0.5580 0.4808 0.4698 0.5428 0.4862 0.4849 0.3935 0.4148 0.6059 0.3756 0.4541 0.4012 0.4700 -1.0728 0.4493 0.5653 0.5266 -0.7840 0.5118 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.8.bn.bias', 0.0501 -0.0455 -0.1282 0.0573 -0.0026 0.2111 -0.2480 0.1674 -0.0672 0.1419 -0.1950 -0.0008 0.0860 0.0740 -0.0304 0.0451 0.0265 0.0886 -0.0701 0.0380 0.0024 0.1123 0.0816 -0.0170 -0.0699 0.2331 0.0394 0.0186 -0.0887 0.1286 0.0954 -0.3693 0.2549 0.0197 0.0338 -0.0266 0.0174 0.1036 -0.0043 0.1029 0.1415 0.0642 0.1490 0.1656 0.0606 0.1683 0.0088 0.0462 -0.0647 -0.0888 -0.1728 -0.2408 -0.0847 -0.0096 -0.0791 -0.2313 -0.0319 0.1064 0.0187 0.0005 -0.0566 0.1294 0.0037 -0.0533 -0.0811 -0.1528 0.1238 -0.0294 0.1289 -0.1212 -0.2811 0.1584 0.0270 0.0790 0.1521 -0.0276 0.0304 -0.2490 -0.0301 -0.0343 0.0026 -0.0490 -0.0082 0.0151 0.0540 -0.0843 0.2147 0.1075 0.0691 0.0812 -0.1074 0.0371 0.0939 0.0553 0.0268 -0.0289 0.0969 0.0376 0.0850 0.0560 -0.0032 0.1189 -0.1336 0.1118 0.0979 -0.1019 0.0585 -0.1189 -0.2122 0.0355 0.0324 0.1024 -0.0053 0.0846 -0.0164 0.0347 -0.0575 0.1198 0.0437 0.0006 0.0076 0.0700 -0.3651 0.1086 -0.0448 0.1511 -0.1061 0.0879 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.8.bn.running_mean', 0.2172 1.0771 4.1651 0.9412 9.1795 0.7024 4.9453 0.0088 0.4241 0.5401 0.4936 0.9792 0.2949 0.4522 1.2292 0.4527 0.2819 0.3859 0.1360 5.6269 1.4467 0.9929 0.6515 0.1727 1.0151 0.8062 0.8452 0.2774 1.3303 1.9343 0.2595 7.0958 1.3832 0.1969 1.2018 0.2215 0.3508 0.1674 0.1257 0.6598 0.2392 0.0863 1.0187 0.2623 2.1649 0.8956 1.5223 0.0435 0.5574 0.6954 0.8634 2.5703 1.4503 1.2198 2.5609 0.5472 0.1622 0.7101 0.7225 0.1451 0.8528 1.9435 0.5099 0.2758 7.3982 1.0463 1.3533 0.1834 0.3139 1.3081 9.1900 1.7530 0.3186 0.0864 0.3088 3.5589 3.8022 4.5939 0.1067 2.4051 1.6988 3.5461 1.2979 0.2601 0.2093 0.6377 0.9761 0.7301 0.8001 0.1085 2.1115 0.2482 0.0415 0.0286 0.4192 0.9545 0.8663 0.3971 1.4581 0.2442 8.4836 0.1308 1.3314 0.4853 0.2627 20.8991 0.4634 6.1021 3.4489 1.1361 1.6796 0.4814 0.1891 0.5700 0.7645 0.5785 0.9796 0.2983 2.0324 0.8786 0.2386 1.3299 3.0389 0.8985 0.3829 0.1680 6.3584 0.9019 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.8.bn.running_var', 2.6072 15.8986 41.4874 12.6203 120.1542 9.2696 69.7438 0.1051 5.7360 7.4304 5.7254 13.7302 3.6712 5.7503 19.8554 5.4608 3.6343 4.9982 1.3920 56.8145 23.1102 13.2925 8.4560 2.5918 14.3851 12.5008 11.4232 3.0210 22.5177 24.3255 3.4395 105.0245 20.6933 2.8084 18.2451 3.1466 4.5639 2.3411 1.3012 9.6010 3.1274 1.0201 16.0106 4.2138 34.5435 12.2854 23.6870 0.5350 7.4291 8.9021 11.7848 36.8306 20.2813 17.7479 36.3394 7.2768 2.1176 10.1342 11.8166 2.0295 10.7391 28.6486 6.8892 4.2384 101.8086 10.5774 15.8857 2.3810 3.1255 19.9336 109.9822 27.0445 3.5758 1.2688 4.0835 55.7266 59.2590 59.2517 1.0412 31.4357 22.5261 39.3778 23.9771 1.8874 1.7850 6.1222 13.9891 8.8358 11.9521 0.9764 29.9719 3.0948 0.4055 0.3364 4.8352 13.7660 12.5649 5.3636 24.2606 3.5150 38.5196 1.5219 13.0963 6.5039 3.6606 184.7602 7.0112 75.9883 56.8520 17.2093 27.7894 6.7595 2.5488 8.0841 10.2645 7.5262 12.5878 3.9603 28.3046 11.7080 2.7740 20.5979 48.2758 11.8036 5.0028 1.9099 58.3826 12.9609 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.9.conv1d.weight', ( 0 ,.,.) = -1.2490e-03 -2.1708e-02 -1.3717e-02 ... -3.0470e-02 2.3697e-02 -2.0284e-02 5.8066e-04 -1.5938e-01 1.0033e-01 ... -1.2398e-01 7.3970e-02 -2.7781e-01 5.9249e-03 4.6126e-02 1.9371e-02 ... 3.9461e-01 -4.0300e-01 -8.3866e-02 ... ⋱ ... 2.8160e-01 3.5017e-01 4.8683e-03 ... -4.4023e-01 -1.8014e+00 1.0488e-01 -2.9543e-01 9.8797e-02 -4.6013e-02 ... -1.5038e-01 1.5641e-01 -1.1150e+00 -1.8047e-01 4.8216e-02 -8.1360e-02 ... -7.3457e-02 -1.7689e-01 1.3239e-01 ( 1 ,.,.) = -1.8107e-02 -1.6579e-02 -2.2401e-02 ... -3.5960e-03 2.1527e-02 1.0929e-02 -1.8054e-01 -2.7563e-01 4.2408e-02 ... 3.0541e-02 -4.6706e-02 -1.8934e-01 -1.1431e-01 2.2401e-01 -2.3183e-01 ... 2.9956e-02 -2.6556e-02 4.7747e-02 ... ⋱ ... -1.4706e-01 -3.5813e-02 3.4498e-01 ... -5.3065e-02 3.5055e-03 -4.3279e-02 6.3806e-02 -1.0685e-01 1.2551e-01 ... 1.1024e-01 1.9199e-01 2.5452e-01 -2.1525e-02 -5.1525e-02 1.2442e-02 ... 2.5958e-02 -6.9614e-02 1.0687e-01 ( 2 ,.,.) = 2.2546e-02 3.1870e-02 6.1550e-03 ... 2.5899e-02 2.4231e-03 -2.4090e-02 1.6648e-01 -2.2620e-01 1.8013e-03 ... 2.6781e-01 -2.2226e-01 -2.3612e-01 -2.5657e-01 1.0353e-02 6.2530e-02 ... 9.9296e-02 2.0786e-01 3.0163e-01 ... ⋱ ... -5.6254e-01 -1.4620e-01 -1.5079e-01 ... -1.5648e-02 1.4819e-01 -2.9422e-01 7.9525e-02 4.4902e-01 -1.9844e-01 ... 2.4591e-01 -2.6186e-02 9.1251e-02 -2.2625e-02 -3.3239e-01 5.0177e-02 ... -1.7797e-01 1.0420e-01 -4.0915e-02 ... (125,.,.) = 1.6619e-02 -1.2193e-02 1.8705e-02 ... -3.8572e-02 1.2710e-02 -4.4064e-03 -9.2771e-03 -2.2271e-01 1.7887e-01 ... 1.1817e-01 -1.3845e-01 1.2891e-01 -3.6453e-02 -2.4411e-01 3.2742e-01 ... 2.4662e-01 -1.0074e-01 -7.5640e-01 ... ⋱ ... 1.5101e-01 4.6638e-02 -4.4681e-01 ... -2.7759e-01 -2.9360e-01 -4.4486e-03 5.1798e-01 -5.7905e-01 -5.9879e-01 ... 1.4691e-01 3.9102e-01 -2.4114e-02 -4.9070e-02 7.3786e-02 1.1952e-01 ... 6.9462e-02 -4.3941e-02 -1.3187e-01 (126,.,.) = -2.0730e-02 -2.2372e-02 -1.4065e-02 ... -6.3372e-02 3.2751e-02 5.6936e-02 -1.6303e-01 -3.9686e-01 4.6270e-02 ... -8.9372e-02 -1.7251e-01 1.2557e-01 3.6537e-02 1.4654e-01 5.6433e-01 ... 3.5981e-01 4.9119e-02 -1.3890e-01 ... ⋱ ... -1.7771e-01 -1.9284e-01 3.4713e-01 ... 1.7397e-01 -3.4972e-02 4.2198e-01 -3.6182e-01 -2.9011e-01 -8.5047e-01 ... 2.9102e-02 -8.1940e-01 6.1122e-02 4.3997e-02 1.5453e-01 -7.1149e-02 ... -5.7751e-02 1.6488e-01 -2.2338e-01 (127,.,.) = -2.2192e-02 -2.3334e-02 -1.8660e-02 ... -1.6811e-02 1.4472e-02 -2.1724e-02 -3.5289e-01 7.6216e-02 1.3014e-01 ... 3.1498e-02 1.8133e-01 -8.8622e-02 2.3091e-01 -7.8763e-02 1.1455e-01 ... -4.8957e-02 -2.1180e-02 9.7200e-02 ... ⋱ ... -1.1430e-01 -9.1477e-02 5.9393e-02 ... -7.1157e-02 -3.4201e-02 -7.4520e-02 2.3236e-01 -9.3504e-02 8.0771e-02 ... -1.7873e-01 -3.8356e-02 -7.7936e-02 -3.5841e-02 -4.4784e-02 -3.8030e-02 ... 3.6888e-02 1.9318e-02 -3.7652e-03 [torch.FloatTensor of size 128x128x10]), ('module.encoder.cbhg.conv1d_banks.9.bn.weight', 0.4393 -0.9549 -1.0875 -1.0638 0.4982 -0.2653 -1.0011 0.5027 0.4402 0.6328 -1.1375 0.5881 -1.0941 0.4750 0.5537 -1.1034 0.4594 0.4753 0.4670 0.4599 0.3778 0.5000 0.4827 0.2698 0.4689 0.5821 0.4883 0.5259 0.6125 0.4771 0.4144 0.4943 0.9913 0.3862 0.4912 0.6556 0.5625 -1.0870 0.3494 0.4605 0.5620 0.5033 -1.0619 0.5407 0.4440 0.5110 0.4817 0.4087 0.3723 0.4755 0.5537 0.5215 0.4659 0.4154 0.4723 0.5282 0.4582 0.4558 0.4564 -1.0588 0.4236 -0.9117 0.4967 0.4320 -0.9488 0.4758 0.5198 0.4111 0.5109 0.4726 0.5565 0.5091 -1.0525 0.4263 0.4744 0.4414 0.6061 0.4788 0.4085 0.3768 0.5847 0.4593 -0.5649 0.4581 0.3943 -1.1525 0.4698 0.5119 0.7235 0.4772 -1.0129 0.4497 0.4374 0.4186 0.5272 -1.0513 0.4193 0.5946 0.5846 0.6834 0.5183 -1.0543 0.5302 -0.9866 0.5402 0.4783 0.4794 -1.0009 -1.0717 0.4186 0.5195 0.4428 0.4568 0.4693 0.4228 0.5647 0.3728 0.5470 0.3970 0.6895 0.3913 0.4568 0.4436 0.5078 0.5220 0.4835 0.4882 -0.4537 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.9.bn.bias', 0.0586 -0.1105 -0.1877 -0.2257 0.1193 -0.0651 -0.0004 0.0020 0.0956 0.1345 -0.1910 0.1584 -0.2718 0.0781 0.0980 -0.2071 0.0967 0.0668 0.0064 -0.0072 0.0819 0.0101 -0.0022 0.0207 -0.0344 -0.0508 0.0962 -0.1183 0.0321 0.0021 -0.1047 0.1579 0.0669 0.0278 0.1390 0.0801 0.0536 -0.1815 0.0936 0.0856 0.0218 0.1254 -0.1436 0.0402 -0.0151 0.1040 0.0806 0.0061 0.0105 -0.0671 -0.0062 0.1902 0.0505 0.0838 0.0965 0.2213 0.0852 0.0050 0.0234 -0.1593 0.0914 -0.2165 -0.0612 0.0422 -0.2179 -0.1791 -0.0908 0.0078 0.0684 -0.0234 -0.0024 0.0164 -0.2556 -0.0098 0.0991 0.0982 -0.0484 -0.1098 0.0130 0.0209 0.0349 0.0068 -0.0038 0.2075 0.1310 -0.0343 0.0645 0.1235 0.0625 0.0277 -0.2461 -0.1395 0.1059 -0.0418 0.0936 -0.2501 0.1003 0.0725 0.0844 0.1106 0.0130 -0.2846 0.1275 -0.2598 0.0714 0.0708 -0.0137 -0.0779 -0.0494 -0.1282 0.0250 -0.0276 0.0298 -0.0035 0.1645 0.0869 0.0116 0.0805 0.0296 0.1166 0.0299 0.0568 -0.0186 0.1003 0.1489 0.1541 -0.0144 -0.0573 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.9.bn.running_mean', 6.5877e-01 1.1888e+00 4.0905e+00 3.5394e+00 4.6724e-01 5.0541e+00 1.5191e-02 6.1297e-01 9.2574e-01 8.4650e-02 4.4347e+00 9.4158e+00 1.8806e+00 2.5577e+00 3.9566e-01 2.4452e+00 2.2579e-01 2.1187e+00 4.6313e-01 7.4665e-01 4.7345e-01 8.1957e-01 4.6520e-01 3.9985e+00 1.0356e+00 4.9184e-01 3.4647e-01 2.6489e-01 7.6358e-01 1.5014e+00 7.5902e-01 6.3828e-01 4.4555e-02 7.4685e-01 1.3885e-01 2.2320e-01 4.3190e-01 2.1437e+00 3.7883e-01 1.0564e+00 8.6116e-01 5.1337e-01 3.4238e+00 5.4124e-02 2.4536e-01 5.7007e-01 5.2306e-01 2.8920e+00 1.4835e+00 8.9156e-01 2.7911e+00 2.8729e-01 1.4278e+00 6.7166e-01 3.4051e-01 1.7970e+00 4.4385e-01 2.7944e-01 2.3780e+00 2.4005e+00 2.5483e-01 3.9349e+00 1.3342e+00 5.1655e-01 4.9710e+00 1.4501e+00 1.9810e+00 4.1460e-01 1.2548e+00 1.2262e+00 4.2413e-26 7.5281e-01 2.9965e+00 3.6136e-01 1.4276e+00 7.1920e-02 7.4376e+00 1.5449e+00 7.0433e-01 1.1369e+00 8.1408e+00 1.8017e+00 5.4147e+00 2.7499e-01 2.8041e-01 2.9431e+00 3.9210e-01 1.4347e+00 1.0004e-01 1.0050e+01 2.4921e+00 1.3566e+00 3.3763e-01 5.7150e-01 3.5560e-01 5.6552e+00 3.4802e-01 7.8090e+00 1.4393e+00 2.0824e-01 1.2301e-01 2.5660e+00 2.9211e-01 2.7372e+00 4.3169e-02 1.8924e-01 7.7597e-01 3.7526e+00 3.0975e+00 4.3442e-01 3.6992e-01 1.2951e+00 9.7277e-01 5.0649e-01 8.0088e-01 2.7125e-01 4.7215e-01 4.0176e-02 1.7603e+00 8.2303e-02 2.1448e-01 8.7293e-01 1.0862e+00 6.3130e-01 1.8604e+01 9.2569e-01 3.1713e+00 1.8322e+00 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.9.bn.running_var', 1.1393e+01 1.2072e+01 6.1747e+01 5.2921e+01 7.0069e+00 3.7881e+01 2.4796e-01 1.0894e+01 1.3262e+01 1.5698e+00 7.2534e+01 1.1644e+02 2.9410e+01 3.6548e+01 5.2717e+00 3.9736e+01 2.7101e+00 3.0697e+01 6.0937e+00 9.7516e+00 6.7279e+00 1.3124e+01 5.1916e+00 4.5021e+01 1.5338e+01 7.6580e+00 4.9303e+00 4.1603e+00 1.1069e+01 2.3312e+01 1.0472e+01 9.4337e+00 6.0192e-01 1.1085e+01 1.5251e+00 2.9384e+00 8.5122e+00 2.9296e+01 5.6508e+00 1.0549e+01 1.1747e+01 7.0393e+00 5.2226e+01 8.6393e-01 2.9830e+00 7.4713e+00 8.0775e+00 4.7513e+01 2.4230e+01 1.3114e+01 4.0316e+01 3.8614e+00 2.1979e+01 9.1312e+00 6.1349e+00 2.4939e+01 6.3974e+00 4.5273e+00 2.5589e+01 3.2393e+01 3.5256e+00 6.0869e+01 2.2742e+01 6.9768e+00 7.8588e+01 2.3854e+01 2.8853e+01 4.8536e+00 1.8289e+01 1.7872e+01 1.3410e-25 1.1533e+01 4.3636e+01 5.7138e+00 2.2906e+01 7.8805e-01 8.7948e+01 2.0688e+01 9.1638e+00 1.7218e+01 9.7784e+01 2.0290e+01 6.1067e+01 4.0045e+00 3.5471e+00 4.0518e+01 5.4415e+00 1.6939e+01 1.4608e+00 8.3743e+01 2.9854e+01 2.1994e+01 4.8434e+00 7.4962e+00 5.7864e+00 8.8313e+01 4.5509e+00 1.0137e+02 2.2057e+01 3.6852e+00 1.6732e+00 4.1326e+01 3.7805e+00 4.0370e+01 5.2707e-01 2.1986e+00 1.0860e+01 4.0454e+01 4.4730e+01 4.9725e+00 4.6923e+00 1.9501e+01 1.6277e+01 7.7052e+00 1.2423e+01 4.5366e+00 7.3450e+00 4.4356e-01 2.6865e+01 1.0230e+00 3.0405e+00 1.2984e+01 1.7935e+01 9.7486e+00 1.5390e+02 1.1770e+01 5.4263e+01 1.4682e+01 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.10.conv1d.weight', ( 0 ,.,.) = 3.1965e-02 -2.1600e-02 -1.0129e-02 ... -3.9500e-02 2.4110e-02 -5.2788e-02 -3.4690e-01 -2.2403e-01 -1.4392e-01 ... 1.9043e-01 -4.2190e-02 9.6729e-02 -5.6671e-02 8.6796e-02 4.9753e-02 ... 4.1030e-01 -5.7082e-02 -2.4434e-01 ... ⋱ ... -3.9421e-01 -3.9091e-01 1.9921e-01 ... -5.9211e-04 7.9745e-02 2.5677e-01 5.6460e-02 -4.6051e-01 -7.2606e-01 ... 1.4965e-01 2.2783e-02 2.5805e-01 -6.5163e-02 -6.0904e-02 -1.0758e-01 ... -1.2326e-01 1.9627e-01 5.3745e-02 ( 1 ,.,.) = -1.1012e-02 -2.4141e-02 7.4641e-03 ... -3.5029e-02 2.1609e-02 2.7473e-02 6.4160e-02 -4.5087e-02 2.4176e-02 ... -2.1199e+00 1.6511e-01 7.0646e-02 -2.0581e-02 -3.0219e-01 2.2424e-01 ... -1.9432e+00 -2.2556e-02 2.3356e-01 ... ⋱ ... 3.6412e-02 8.1937e-02 -3.6133e-01 ... -1.4206e-01 -1.0086e-01 3.7693e-03 -1.2741e-01 2.0047e-01 -3.4011e-01 ... -2.2786e-01 9.9893e-02 -9.1123e-02 4.0323e-02 -6.2912e-02 -1.3398e-01 ... 1.2671e-01 -3.4339e-01 5.6047e-03 ( 2 ,.,.) = 2.4882e-02 6.8116e-03 -2.3403e-03 ... 1.7273e-02 -3.0395e-02 1.5216e-02 -7.1228e-02 1.7549e-02 1.7411e-01 ... -3.0658e-01 -3.1027e-01 -1.8680e-02 2.0702e-02 3.8690e-01 3.5982e-01 ... -1.4068e-02 4.9060e-01 -6.0749e-01 ... ⋱ ... -3.4607e-01 1.7243e-01 -2.9192e-01 ... -1.2063e-01 -6.2334e-01 1.5975e-01 4.4745e-02 -6.6337e-01 9.9555e-02 ... 1.0091e-01 -5.8471e-01 -6.7909e-01 -3.9671e-01 3.0501e-02 -2.3217e-01 ... 5.8709e-02 -1.5712e-01 -1.7326e-01 ... (125,.,.) = 8.0183e-04 4.9156e-02 3.7375e-02 ... 3.8611e-02 -2.6888e-02 2.1561e-02 2.7643e-01 -4.8574e-01 3.0004e-01 ... -5.0056e-01 1.2600e-01 6.9725e-02 -1.2696e-01 -7.3011e-01 -2.5322e-01 ... 1.9872e-01 -4.8887e-01 3.7973e-01 ... ⋱ ... -5.3998e-01 9.9488e-02 5.8411e-02 ... -1.0019e+00 -3.1663e-01 -9.8524e-02 -2.3971e-01 6.2732e-02 -5.4200e-01 ... 1.1759e-01 -1.0818e+00 -4.4208e-01 -5.9369e-03 -2.3910e-02 -3.4175e-02 ... -4.2960e-01 1.6119e-01 -1.5830e-01 (126,.,.) = 3.0219e-02 -1.9569e-02 2.2828e-02 ... -4.9221e-03 -1.5746e-04 1.6835e-03 -5.2897e-02 -1.0896e+00 -2.5633e-02 ... -1.3852e+00 8.2560e-02 -3.2648e-01 1.7529e-01 3.1022e-01 7.6865e-02 ... -6.4377e-01 -1.1810e+00 -5.8760e-03 ... ⋱ ... -1.1371e-01 2.4317e-01 -2.2551e-01 ... 1.5635e-01 3.2738e-02 -2.9651e-01 -3.6801e-01 1.8591e-01 -8.9321e-01 ... -2.3886e-01 -2.5043e-02 -3.4334e-01 9.4663e-02 -1.5956e-02 4.3235e-03 ... 6.6401e-02 7.0955e-02 -9.5309e-02 (127,.,.) = 7.3776e-03 -1.5134e-02 -2.4655e-02 ... 3.2836e-02 2.8032e-03 8.1508e-03 -1.9972e-01 8.0481e-02 -3.8044e-02 ... 5.5677e-02 -1.1366e-01 -2.4903e-01 -9.5893e-02 2.1776e-02 3.0244e-01 ... -6.6698e-01 6.8781e-02 -3.9274e-01 ... ⋱ ... -7.4045e-01 -7.1610e-02 8.1012e-02 ... -9.6516e-01 -4.3659e-01 5.7861e-02 4.5081e-02 -8.4197e-03 -2.9812e-01 ... 9.3342e-02 -2.5426e-01 6.9766e-02 -2.3646e-01 -1.3777e-01 -2.1241e-01 ... -2.0303e-01 -3.3080e-01 -3.8868e-02 [torch.FloatTensor of size 128x128x11]), ('module.encoder.cbhg.conv1d_banks.10.bn.weight', 0.4344 0.6601 0.5257 0.4728 0.5926 0.6298 0.4142 0.4840 0.5788 0.5373 0.4757 0.4920 -1.0094 0.4214 -0.4071 0.4764 0.5595 0.4574 0.4613 0.4398 -0.1176 0.4166 -1.0335 0.4902 0.4966 0.5661 0.4466 0.4938 0.5169 0.4404 0.4754 0.3960 0.5418 0.4070 0.6619 0.4451 0.5086 0.5592 0.4527 0.4330 0.5056 -1.0021 0.5319 0.4430 0.5225 0.4582 -1.1048 0.5603 0.4973 0.5415 0.3753 0.4543 0.5833 -1.1240 0.5791 0.5694 0.4648 -1.1631 0.4888 0.5441 0.4827 0.5610 0.4867 0.5662 0.6429 -0.8323 -1.2634 0.4140 0.4953 -1.0699 0.4622 0.5307 0.5351 0.3556 0.5135 0.4880 0.9097 0.6635 0.6228 0.6124 0.5160 0.5250 0.5747 -0.9779 -1.1890 0.3801 -0.8307 0.4638 0.5340 0.4631 -1.1818 -0.0781 0.4186 0.4079 0.4136 0.5133 0.6921 0.7630 0.5433 -1.1873 0.4674 0.6153 0.4806 -1.1369 0.4613 0.5009 0.4665 0.4379 0.5447 0.4623 0.5299 0.5023 0.5153 -1.1337 0.5276 0.5530 0.2472 0.9015 0.8050 0.4522 0.4985 0.4078 0.5552 0.5015 0.4041 0.5260 0.5415 0.5411 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.10.bn.bias', 0.0919 0.1599 -0.1932 -0.0641 -0.0096 -0.0893 0.1003 -0.0897 -0.0102 -0.0688 0.0816 0.1825 0.0328 0.0858 -0.1020 -0.0027 0.1956 0.0956 0.1314 0.0746 -0.0201 0.0738 -0.0781 -0.0908 0.0215 0.0816 0.0249 -0.2307 0.0006 0.0656 -0.0424 -0.1228 0.0058 -0.1347 0.1288 0.1105 -0.1432 0.1302 0.0002 -0.0217 0.0076 -0.2547 -0.0165 -0.0844 -0.0056 0.0757 -0.1091 -0.1264 0.0169 0.0392 -0.0662 -0.0453 0.2432 -0.1353 0.0020 0.1203 0.1139 -0.2315 -0.0388 0.1765 0.0539 0.0279 0.0893 -0.0326 0.0203 -0.0750 -0.2031 0.1531 -0.0247 -0.1084 0.0231 0.1744 0.0488 0.0008 0.0818 0.1781 0.0758 0.0973 0.0312 -0.0496 0.0482 0.0648 -0.0318 -0.2598 -0.1971 -0.1048 0.0023 0.0289 -0.0186 0.1050 -0.1225 -0.0108 -0.0859 0.0186 0.0906 0.1087 0.1233 0.0986 -0.0136 -0.2576 0.0323 0.2639 -0.0318 -0.1497 0.1004 0.0851 -0.0893 0.0045 0.0408 -0.0955 0.0782 0.0695 -0.1207 -0.3635 0.1621 0.1723 -0.0080 0.1664 0.1579 0.1677 -0.0714 0.0826 0.0476 0.0803 0.0645 0.1041 0.0326 0.0649 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.10.bn.running_mean', 0.5715 0.4787 0.5886 0.8980 3.5874 0.8712 0.4066 0.3407 0.2876 0.2127 0.7389 3.5558 1.7456 0.2775 10.9636 0.4779 1.1393 0.6408 1.8196 1.6055 4.1998 1.6512 5.2940 1.6774 0.4384 0.6230 0.7512 3.3535 2.6237 1.3885 0.3585 0.4600 2.8188 0.5538 1.7370 0.5905 1.5130 3.0031 0.8443 1.7084 0.1227 4.0383 0.4113 0.1819 2.9395 0.2186 0.2448 1.1199 1.2371 1.2992 0.3041 0.1607 2.4229 3.3267 0.1393 1.7397 0.4127 1.6051 0.1115 0.7717 0.9341 0.1516 0.5486 1.1999 4.9575 11.2588 1.5768 0.4544 0.6733 1.8999 0.4666 1.3727 0.1095 0.3338 1.1352 0.4972 0.0371 1.5335 0.0357 0.5844 0.1059 1.0272 2.3088 4.1160 0.6063 0.2265 7.8183 0.6386 0.2476 0.3767 1.3118 8.1675 0.7651 0.0587 0.9415 0.7294 12.8802 1.8015 3.8814 2.8758 0.6577 3.3869 0.5129 2.2430 1.4347 1.2082 0.2646 0.8686 0.6948 0.9758 0.3923 0.1918 0.5296 5.1353 0.9857 2.3894 2.8527 0.0900 2.0014 0.7127 1.4246 0.3800 0.1628 0.5197 0.3114 0.9503 0.6195 0.0770 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.10.bn.running_var', 8.2030 6.9862 9.9357 15.2206 63.2403 13.4121 7.2176 5.0432 4.0066 2.1296 12.5899 53.5196 28.3303 3.4958 74.1906 7.4082 16.8727 10.0798 28.3960 28.3827 34.1655 25.4091 98.1128 28.2085 6.3762 10.2647 10.9746 48.4148 42.2198 15.5746 4.7592 5.8319 39.3070 8.7624 32.9965 9.7541 21.1747 38.0584 13.8016 14.1439 1.4819 63.9721 5.7939 2.3634 42.8073 3.4405 2.6276 18.2133 19.6721 22.8966 3.9301 2.2315 33.0830 50.7738 1.7183 22.4970 5.4776 24.4470 1.9185 12.9313 14.1143 2.5983 8.1474 18.5348 71.7504 126.3550 23.6516 7.1524 9.9798 25.4920 7.4487 20.8167 1.6687 4.8173 15.6865 5.9931 0.5863 21.7323 0.3726 8.5008 1.6352 14.4115 32.1514 70.6772 8.6440 3.3432 124.2477 8.7533 3.7762 6.4966 17.0977 49.6824 11.3608 0.8637 12.2889 11.0555 175.4644 26.1177 70.9077 44.7209 10.0761 47.5161 7.7064 32.5647 23.2061 20.2472 3.1556 13.3795 11.0379 17.0404 6.6057 2.9541 8.3151 84.5365 17.6665 32.6644 18.0309 0.8674 32.1254 10.9736 21.6199 5.6371 2.2261 7.9350 4.4895 15.9966 8.4143 1.1489 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.11.conv1d.weight', ( 0 ,.,.) = 1.4652e-02 5.3001e-03 1.0720e-03 ... 1.5744e-02 1.2328e-02 1.4525e-02 7.5329e-02 -1.8212e-01 -5.1388e-01 ... -1.1485e+00 -8.6555e-02 9.2271e-02 -1.5347e-01 -1.8767e-01 3.9370e-02 ... -7.1084e-02 6.2531e-02 -1.3709e-01 ... ⋱ ... -7.7978e-02 -2.1314e-02 4.5352e-02 ... -1.2343e-02 -3.4504e-01 -8.0914e-03 -6.4026e-01 6.0411e-02 -9.7488e-01 ... -2.4730e-01 -3.6128e-01 1.5796e-02 -1.0371e-01 -1.2962e-01 3.2543e-01 ... 4.1171e-03 -5.8695e-01 -6.4797e-01 ( 1 ,.,.) = -1.0019e-02 -2.5829e-02 4.0702e-03 ... -3.8123e-02 8.5839e-04 -1.2530e-02 9.8703e-02 2.6438e-01 -2.0426e-02 ... -1.3528e+00 3.5872e-02 6.7084e-02 -8.0820e-03 -7.2237e-02 -2.2399e-01 ... -5.5380e-02 -2.0954e-01 -3.6943e-01 ... ⋱ ... -3.1969e-02 2.7417e-01 1.2250e-01 ... -1.4495e-01 -8.4950e-01 -2.2803e-01 3.2143e-01 1.2398e-01 -4.5293e-01 ... -1.4413e-01 2.1823e-01 -3.8627e-01 1.7834e-02 2.2267e-02 -7.0417e-02 ... 4.2893e-02 -2.8627e-01 3.4247e-02 ( 2 ,.,.) = 1.9375e-02 2.3812e-02 1.1276e-02 ... 5.1828e-03 -2.3985e-02 2.3789e-02 -4.3739e-01 2.0894e-01 -3.8575e-02 ... 3.2274e-01 1.7369e-01 1.0408e-01 1.8813e-01 2.6524e-01 5.7974e-01 ... -8.7242e-03 -1.0690e-01 -7.8983e-01 ... ⋱ ... 2.3529e-01 -6.2904e-02 -1.4557e-01 ... -4.5544e-02 1.7732e-01 8.4309e-03 3.3294e-01 -8.4541e-02 8.7829e-02 ... -1.1917e-01 -4.7819e-02 -7.6159e-01 1.3981e-02 -1.6293e-02 -3.2810e-03 ... -2.1877e-01 1.0173e-01 -1.8190e-01 ... (125,.,.) = 3.4189e-03 -2.0456e-02 1.4243e-02 ... -2.3227e-03 9.3239e-04 -1.0247e-03 -3.3739e-02 -1.0891e-01 4.8999e-02 ... 1.4597e-02 1.6591e-01 5.4610e-01 2.9653e-01 -1.0343e-01 1.2764e-01 ... -2.8191e-01 1.4097e-01 -1.2534e-01 ... ⋱ ... -2.1604e-01 4.4984e-02 -2.8327e-03 ... -2.6177e-02 2.6116e-01 -3.5064e-01 -3.2880e-03 -4.0734e-02 -1.9771e-02 ... -1.2193e-01 -2.4205e-01 3.3782e-02 -5.5796e-02 3.7811e-02 2.0099e-02 ... 7.9827e-02 1.1639e-01 -3.2006e-01 (126,.,.) = -1.4516e-02 1.7064e-02 -9.8402e-03 ... -2.3872e-02 -3.4032e-02 1.4728e-02 1.5041e-01 -9.3227e-02 6.9412e-03 ... 1.4876e-01 1.2624e-01 -8.2570e-03 1.4423e-01 5.5396e-02 3.7413e-01 ... -3.2377e-02 9.6140e-03 -1.9368e-01 ... ⋱ ... -1.3670e-01 1.9511e-01 1.7143e-02 ... 2.1694e-01 2.6442e-01 1.4836e-01 1.3229e-01 -1.1568e-01 -2.9643e-01 ... 2.0208e-02 -8.4069e-01 -3.5946e-01 -5.8840e-02 3.1408e-02 4.1788e-02 ... 1.1578e-01 8.8983e-02 -9.6329e-02 (127,.,.) = 2.7546e-02 -5.6759e-03 1.0084e-02 ... 2.6578e-02 1.1931e-02 -1.4826e-02 -1.6599e-01 -2.7726e-02 1.0764e-01 ... -2.8034e-02 1.1052e-01 1.5459e-01 4.3881e-01 -1.1269e-01 -5.1397e-01 ... 1.1887e-01 -5.0752e-03 1.7655e-02 ... ⋱ ... -1.3443e-01 -3.9785e-01 -6.7975e-01 ... 2.2963e-01 -2.1617e-01 -5.5946e-01 -1.4329e-01 -7.1816e-02 8.6829e-02 ... -2.5498e-01 1.2733e-01 -5.0500e-01 -2.2701e-02 -3.5109e-02 8.2566e-02 ... 6.8880e-02 -2.8114e-02 -2.3018e-02 [torch.FloatTensor of size 128x128x12]), ('module.encoder.cbhg.conv1d_banks.11.bn.weight', 0.4696 0.5719 -1.1061 0.5807 -0.7647 0.4872 0.4623 0.4274 -1.3260 0.4248 0.4323 0.4575 0.5025 0.4771 0.4064 0.5374 0.6726 0.4568 0.4295 0.7191 0.4623 0.5528 -1.2894 0.4423 0.5022 -1.1094 0.4809 0.5221 0.4350 0.5107 0.4495 0.5038 -0.9673 -1.2126 0.7197 0.5469 0.7150 0.6002 0.4375 0.4025 0.5752 0.4551 -1.1043 0.4402 0.5183 -1.0622 0.7087 0.5022 0.4157 0.4661 0.4746 -1.0687 0.4714 0.4893 -1.1576 0.4664 0.4239 -1.0739 0.5324 0.5097 0.3901 0.5238 0.5563 0.4259 0.4894 0.4330 -1.1346 0.4826 0.4866 -1.1332 0.5377 -1.0953 0.4420 -0.9456 0.4166 0.5060 -1.2205 0.4793 -1.0951 0.4486 0.5078 0.5554 0.8730 0.6453 0.4621 -0.6677 0.4828 0.5072 0.4975 0.6089 -0.9835 -0.0207 0.5271 0.6392 -0.8556 -1.2751 -1.1145 0.4019 0.4133 0.5971 0.5816 0.4599 0.4032 0.9104 0.5933 0.4664 0.4314 0.4786 0.4945 -1.2253 0.6391 0.5243 0.5276 0.5435 0.4402 0.4819 -1.0076 0.4569 0.4705 0.4374 0.4443 0.6577 0.4741 0.5412 0.4809 -0.2688 0.4772 -1.3877 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.11.bn.bias', -0.1058 0.1084 -0.3531 0.1187 -0.0526 -0.0250 0.0292 0.0647 -0.2039 -0.0704 0.0664 0.0294 -0.0517 0.0872 0.1567 0.0910 -0.1163 0.0101 0.0445 -0.0107 0.1458 -0.0375 -0.2707 0.0549 0.0212 -0.1040 0.0820 0.1413 -0.1988 0.0427 0.1023 0.0553 -0.2724 -0.2291 0.1637 -0.0511 0.0080 -0.0833 -0.0119 0.0671 0.0635 -0.0437 -0.1125 0.0252 -0.1392 -0.2180 0.2414 0.0596 -0.0395 0.0883 -0.3027 -0.1139 -0.0174 -0.0760 -0.3178 -0.0409 -0.0582 -0.1743 0.0023 -0.1353 0.0581 0.0447 -0.0721 0.0643 0.1069 0.0617 -0.1114 0.0584 0.0626 -0.2247 0.0815 -0.0843 0.0049 -0.1601 0.0536 -0.0974 -0.1126 -0.0245 -0.2789 0.0085 -0.1055 0.1799 0.0619 0.0099 -0.0358 0.0111 0.0501 -0.0037 0.0825 0.0898 0.0390 -0.0124 0.0855 0.0772 -0.0457 -0.1813 -0.1343 -0.1044 -0.1445 0.1514 0.2075 -0.1159 -0.0705 -0.0395 -0.0904 0.1480 -0.0233 0.0901 0.0267 -0.1453 0.1292 -0.0030 -0.0621 0.0499 -0.1750 0.1231 -0.1312 -0.0642 0.0314 -0.0623 -0.0681 -0.2011 0.1394 0.0520 0.1993 -0.0699 -0.0923 -0.2286 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.11.bn.running_mean', 0.8973 1.0403 4.8064 2.1079 0.1551 0.8337 1.7971 0.6739 4.2610 1.1109 0.9439 0.5355 1.3070 0.2914 1.5545 1.5114 6.3411 0.3893 1.6662 3.2795 1.6572 6.7529 1.8553 1.4979 0.5757 4.7291 0.9452 0.8243 0.7778 1.3144 0.3643 0.7754 4.2364 3.2026 18.7853 1.7417 0.1808 2.4082 2.0200 0.6738 2.0175 1.1166 3.8472 0.6636 2.0998 4.3731 9.3946 0.4795 0.2454 0.5545 1.1134 2.0495 1.7427 0.7735 3.7053 0.3383 0.1242 3.1761 0.4819 1.1880 0.0979 2.6670 2.3404 1.3646 1.2772 1.5946 7.1215 0.9997 2.6096 3.1457 0.7870 1.3130 1.3897 7.1944 1.5634 0.5821 1.0111 0.8214 2.0198 0.2335 1.0773 1.2245 0.1167 0.3181 1.7782 0.1862 0.8499 1.5571 1.3499 7.5830 5.2754 0.0000 0.2575 0.3283 0.0546 2.8495 1.7159 1.3518 0.4363 1.7090 2.2579 1.9332 0.6331 0.5330 8.2135 0.7789 0.8577 0.9780 0.3793 1.0137 3.6428 1.2983 1.0218 0.4764 1.1906 0.9962 4.0012 0.7476 1.5306 0.6303 1.0833 1.0098 0.7644 0.3430 1.3532 13.6173 1.5928 1.0167 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.11.bn.running_var', 15.9795 17.4517 94.5124 26.5261 1.7434 13.4592 29.4983 10.8412 70.0869 19.7031 13.3471 6.9818 23.0768 3.9078 29.2266 27.8719 112.1345 5.1913 29.6777 46.6807 27.0041 89.0012 33.1502 22.0520 8.1558 79.3337 18.6742 14.1399 11.9580 20.0961 5.2169 12.5176 70.1408 57.7935 184.6222 21.1908 2.6207 39.1001 33.0373 9.4337 30.4736 16.5897 63.7404 8.9600 30.5162 75.9287 151.3965 7.5285 4.1469 9.7788 16.7911 35.3818 33.8144 12.2592 70.2199 6.0559 1.6863 39.9687 7.8825 15.8410 1.6622 49.5056 45.0633 20.5786 20.9901 27.1553 117.9534 15.9916 50.4664 55.2017 15.1110 21.9056 22.0118 85.8351 25.7032 9.7736 16.3477 12.8946 33.3585 3.1145 17.9771 16.4898 1.7553 5.7009 26.2157 2.5881 13.6531 29.6845 16.6336 118.7652 68.9620 0.0000 3.7575 5.1797 0.4994 55.4025 25.7519 18.1042 6.0330 25.7486 36.9491 38.7223 8.7716 10.5446 108.5823 13.5954 15.2862 12.7029 4.9249 14.5424 53.4624 24.4559 15.0345 6.6778 20.6577 19.7080 63.5889 13.8471 27.6995 9.9054 18.7738 17.2054 12.9331 5.1603 25.3986 81.0888 24.8573 14.9030 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.12.conv1d.weight', ( 0 ,.,.) = 2.5124e-03 -2.2784e-02 1.0984e-02 ... 4.9846e-04 2.8635e-02 1.0756e-03 -1.3106e-01 1.4341e-01 7.9323e-02 ... 6.3507e-02 -4.5839e-02 2.1501e-01 9.0208e-02 -1.7771e-01 -4.5272e-01 ... -6.9815e-02 -1.9479e-01 -1.8784e-03 ... ⋱ ... -1.3932e-01 -3.7012e-01 -4.7639e-01 ... -1.0966e+00 1.5456e-01 -9.4229e-01 5.7549e-02 -3.5620e-02 -2.3836e-01 ... 2.2370e-01 -3.1791e-01 -4.1511e-01 7.0249e-02 1.9633e-01 1.1341e-01 ... -3.2817e-01 2.5300e-01 -1.3422e-01 ( 1 ,.,.) = -1.7731e-02 4.5285e-02 -1.2814e-02 ... -2.7173e-03 -1.4069e-02 -3.6759e-03 -7.2258e-02 -4.0775e-01 -1.8876e-01 ... 4.4626e-02 -1.0953e-01 -4.6202e-01 -1.1532e-02 2.4754e-01 -1.5029e-01 ... 3.5624e-01 -4.6946e-02 -1.4037e-01 ... ⋱ ... -3.6031e-01 1.9696e-01 1.2519e-01 ... -1.9825e-01 -4.8447e-02 -1.3655e+00 3.2029e-01 -6.1391e-02 1.5598e-01 ... -1.5652e-01 1.6171e-01 1.2432e-01 6.3132e-02 -1.3603e-01 1.7496e-01 ... 5.9061e-02 9.7532e-02 -1.7508e-01 ( 2 ,.,.) = -6.3073e-03 -7.1700e-03 -9.2430e-03 ... 1.5414e-02 -1.2460e-02 -1.5091e-03 -1.9551e-01 1.3026e-01 1.2604e-01 ... 9.2214e-02 -1.9176e-01 2.9766e-01 1.9804e-01 -3.1485e-01 2.3391e-01 ... -1.3164e-01 4.3471e-01 -1.9872e-02 ... ⋱ ... 2.8241e-01 1.6263e-01 -1.1973e-01 ... -1.9607e-01 2.0565e-01 -4.4889e-01 1.6790e-01 -3.0801e-01 3.3417e-01 ... 4.0863e-02 -6.6297e-02 1.1063e-01 7.7762e-02 1.0632e-01 6.3908e-02 ... 1.2705e-01 -4.4205e-02 9.7256e-02 ... (125,.,.) = -2.7683e-02 1.0624e-02 -5.0701e-03 ... -2.5705e-02 -3.8035e-02 -1.6509e-02 -3.8700e-01 -1.5979e-02 -2.6519e-01 ... 3.7613e-02 -3.7213e-02 1.0377e-01 -6.5393e-02 -1.7880e-01 -4.2023e-02 ... 1.0308e-02 -6.8235e-02 1.3338e-02 ... ⋱ ... -6.8673e-01 1.2962e-01 -1.2936e-01 ... -1.4112e+00 3.2245e-01 3.7807e-02 1.8645e-02 -7.1574e-02 -3.7431e-01 ... 5.2542e-02 -5.8785e-02 3.6606e-02 -2.0383e-01 8.8988e-02 4.6355e-02 ... -2.3510e-01 -2.2312e-01 -3.7548e-01 (126,.,.) = 1.6585e-02 1.8607e-02 -3.3634e-02 ... 6.2257e-03 1.5424e-03 -2.4602e-02 -3.4146e-01 2.2077e-02 -4.2619e-01 ... 1.9176e-01 -1.5382e-02 1.8814e-01 2.1378e-01 -2.0700e-01 1.5258e-01 ... -2.4342e-01 -1.1730e-01 1.1733e-01 ... ⋱ ... 9.8764e-02 3.2841e-01 3.0355e-01 ... 5.4214e-02 -9.0069e-02 -3.2505e-01 -3.9483e-01 2.0615e-01 2.1718e-01 ... -4.8824e-01 -8.4747e-01 1.1944e-01 -3.3625e-02 -4.8884e-03 4.1738e-02 ... -1.3514e-01 -2.0050e-01 -1.4370e-01 (127,.,.) = 2.5590e-02 1.8387e-02 2.0806e-02 ... 8.1549e-03 -1.6850e-03 7.8334e-03 -2.8174e-01 -3.6004e-01 -2.3312e-01 ... 4.5390e-02 4.9092e-02 -4.5602e-02 6.9535e-01 1.4977e-01 -2.8423e-02 ... -1.3624e-01 1.0768e-01 -2.6318e-01 ... ⋱ ... 5.7602e-02 1.3463e-01 5.8125e-03 ... -2.4428e-02 -5.6623e-01 -2.7310e-01 5.5486e-02 2.0697e-01 -2.1729e-01 ... 2.6449e-01 -7.7781e-02 -2.6877e-01 8.7469e-02 -4.8216e-02 4.2947e-02 ... 2.2505e-01 -2.1960e-01 -1.7593e-01 [torch.FloatTensor of size 128x128x13]), ('module.encoder.cbhg.conv1d_banks.12.bn.weight', 0.5730 0.4872 -0.8539 0.4749 0.5131 -1.1585 -1.4734 0.7568 0.5510 0.5189 0.5052 0.6131 0.5743 0.5302 0.4877 0.5639 0.5930 -1.1419 0.4849 0.5213 -1.2542 0.5455 0.4479 -0.9798 0.4703 0.5385 -0.5705 -1.1007 0.5222 -1.0825 0.6221 0.4761 0.4963 0.5569 0.5680 0.5440 -1.0525 0.5683 0.5189 0.6212 0.4043 -0.9544 -1.3453 0.5191 0.6303 0.5956 0.5559 0.5394 1.3447 0.7187 0.5206 0.5399 0.4557 -1.2270 0.5964 0.4943 0.5270 0.7588 0.5271 0.6135 0.5529 0.4835 0.5829 0.0193 0.4772 0.4560 0.5157 0.4611 0.5422 0.5020 0.4983 0.5436 0.5950 0.4876 0.4689 -0.3098 0.5467 0.0583 0.4947 0.4951 0.4450 0.5582 0.5557 -0.9568 0.4925 0.5055 0.5056 0.5599 -1.2361 0.4219 0.4890 0.4836 0.6297 0.5976 0.6654 0.4469 -1.2179 0.6102 0.2783 0.4592 0.4686 0.4252 -1.3238 0.4724 0.3981 0.5550 0.6266 0.4612 0.5325 0.5209 0.6053 0.5557 0.4515 0.4776 0.4911 0.4925 0.5737 0.6716 0.4675 -1.0168 0.5330 0.5361 0.5203 0.5810 0.5359 0.4884 0.7344 0.4919 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.12.bn.bias', 0.0372 -0.0224 -0.2256 0.1013 0.0967 -0.1798 -0.1600 0.1434 0.0034 0.0257 0.0465 -0.1792 -0.0230 -0.0695 -0.0222 -0.0355 0.0586 -0.0271 0.0935 0.1585 -0.1269 -0.0820 0.1005 -0.2136 -0.0031 0.0654 -0.0345 -0.1961 -0.0700 -0.2523 0.1570 0.0983 0.0166 -0.0398 -0.1316 -0.0009 -0.2186 0.0827 -0.0064 0.1099 -0.1261 -0.0138 -0.1116 0.0792 -0.1398 0.1277 0.0497 -0.1172 0.1723 0.1022 0.0088 0.0905 0.0054 -0.1959 0.0717 -0.0222 -0.2334 0.1637 -0.0481 0.1706 -0.0529 -0.0409 0.0606 0.0078 -0.0984 0.1355 0.0632 -0.0456 0.1148 -0.0248 0.0301 0.0840 0.0625 -0.0165 -0.0693 -0.0636 -0.1416 -0.0125 0.0959 0.0557 0.0163 0.1104 -0.1205 -0.2128 -0.0318 0.0453 -0.0566 0.0477 -0.1948 0.0128 0.0273 0.0031 -0.0068 0.0112 0.1647 0.0586 -0.2183 -0.1291 -0.0029 0.1077 -0.0951 0.1773 -0.1475 0.0573 0.0275 -0.1403 0.2314 -0.1141 0.2082 0.1744 -0.1538 0.0801 0.0501 0.0025 -0.0200 0.0973 0.1787 0.2107 0.0047 -0.0693 0.0694 0.0482 0.0716 -0.0190 -0.1061 0.0990 0.1247 0.0677 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.12.bn.running_mean', 1.6643 0.7914 18.2355 0.6188 0.6880 3.9849 0.0993 2.8688 5.5944 0.4411 2.5454 0.9192 1.0979 0.9350 0.4640 6.2717 2.3771 7.7375 0.3193 0.4917 2.0095 1.0701 1.5729 14.6311 0.3919 0.6385 0.0995 7.7049 0.2759 4.8997 0.6901 0.4588 0.4694 1.7162 1.2722 0.4624 7.4349 0.2309 1.6574 6.2443 0.7438 9.8981 0.1851 1.1118 4.9262 2.5397 0.3932 1.3472 0.1245 4.5497 1.3292 1.7824 0.0314 3.1071 2.4115 0.7718 1.1463 3.7740 1.0823 0.2714 1.8357 1.1610 0.5519 6.8656 0.5687 0.3749 0.1267 2.3691 0.4179 1.6421 0.5482 0.3998 0.2772 0.9582 2.1013 7.3207 0.8778 6.0328 1.1068 0.3546 0.7101 2.3588 1.4954 16.8315 1.0917 0.7447 1.3966 1.6128 3.0740 0.8368 1.9746 0.7762 2.1567 0.5987 4.5678 0.4576 3.7072 4.7982 0.3503 1.0569 6.0528 1.2477 0.3078 0.0929 0.4153 0.6627 0.3931 0.6685 0.7672 0.5199 4.8562 0.0687 1.2443 3.8482 0.0919 0.3363 2.0254 2.4370 0.4994 2.8464 0.3332 0.7634 0.2546 3.4189 2.1983 0.4134 0.7395 1.0887 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.12.bn.running_var', 24.7021 12.1259 200.8881 9.5477 11.7550 64.6297 1.1414 52.8568 57.6167 6.9328 49.5094 15.8128 19.0611 15.9187 7.2106 59.7456 38.3615 116.4506 5.2705 8.0799 38.5880 16.4950 31.4097 191.6912 6.2062 10.4048 0.9764 117.2513 5.2514 73.4023 14.5581 7.2274 8.0633 33.8416 20.4415 7.9939 133.4245 2.7753 28.9187 87.0000 13.3420 139.6145 2.5720 20.2775 92.7000 48.3540 6.4112 22.0264 1.3546 68.5772 24.9688 34.0636 0.4005 51.7549 42.1333 12.8884 21.4131 65.8662 14.6326 5.1442 35.9827 19.4669 9.0503 72.6889 8.8729 5.6320 1.7500 43.4587 7.7485 26.1731 7.0052 5.8300 4.1561 17.6958 39.7497 50.7230 13.5891 33.3082 19.6486 6.0075 12.3842 41.6913 27.7635 232.8391 17.8972 13.6521 22.5059 26.8656 52.6071 15.8357 32.6477 12.5625 36.2878 9.6681 69.5989 6.4244 69.6563 53.4896 2.5188 17.5069 55.6254 23.0790 4.2944 1.2833 7.3206 11.1468 7.2908 10.5342 14.3333 6.6158 59.1373 0.8878 22.2354 72.2668 1.1781 5.2295 32.1831 35.1829 7.5751 34.5029 5.4838 12.5656 3.7906 41.8978 45.2942 7.5254 13.7072 20.0205 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.13.conv1d.weight', ( 0 ,.,.) = 1.7745e-02 -4.4126e-03 -4.8155e-03 ... -1.1811e-02 -1.2982e-02 4.8544e-03 1.3211e-01 6.4812e-03 3.9755e-02 ... 1.3613e-01 -1.4129e-01 2.5989e-01 1.3647e-01 -2.4757e-02 8.7259e-02 ... -3.0748e-01 -1.5349e-01 4.7045e-02 ... ⋱ ... 2.7293e-01 -6.1637e-02 2.6514e-01 ... -9.1236e-02 1.3647e-01 -4.1393e-01 1.8076e-01 1.4856e-01 -9.5415e-02 ... 2.6375e-02 9.1741e-02 -6.9988e-02 1.8714e-02 2.2266e-01 -8.0913e-03 ... -1.3792e-01 2.0871e-03 6.6364e-02 ( 1 ,.,.) = 1.4681e-02 -2.8576e-02 -3.3572e-03 ... -3.5963e-02 -2.6190e-03 -1.6106e-02 -3.0001e-01 -1.0300e-01 -2.9653e-01 ... -2.3369e-01 1.2638e-01 -8.7112e-02 -1.7102e-02 5.5029e-02 -2.2974e-01 ... -2.6807e-01 2.8310e-01 -2.0486e-01 ... ⋱ ... -7.0041e-01 -9.4564e-01 1.6966e-02 ... 2.6940e-01 -2.8557e-01 1.2184e-01 -3.2194e-01 1.2850e-01 1.9510e-01 ... 2.4279e-01 2.1059e-01 -5.5531e-01 6.1039e-02 -7.2539e-02 7.3692e-02 ... 1.1038e-01 1.2474e-02 -1.0941e-01 ( 2 ,.,.) = -2.4671e-02 3.1477e-02 -1.8527e-02 ... 7.6964e-03 1.1642e-02 -3.8481e-03 -2.4675e-01 -1.0154e-01 -1.0051e-01 ... 2.4323e-01 1.1612e-01 -3.4125e-01 1.4512e-03 3.5649e-01 -2.7982e-01 ... -9.7022e-01 -1.5814e-01 -4.0816e-01 ... ⋱ ... -3.7522e-02 2.2590e-02 1.0087e-01 ... 2.1287e-01 -3.6022e-01 -5.4785e-01 -5.4449e-01 -5.5681e-02 -2.9264e-01 ... 1.3152e-01 4.3553e-02 5.6911e-02 1.3604e-01 2.1225e-01 -1.1628e-01 ... 1.2776e-01 -5.8087e-02 9.8960e-02 ... (125,.,.) = -2.4497e-02 -3.7381e-03 -1.4529e-02 ... -8.6942e-03 -2.6910e-02 3.1877e-02 -6.6612e-02 1.3753e-01 -9.7248e-02 ... -1.5097e-01 1.2860e-01 -7.2238e-01 1.7896e-01 2.9136e-01 1.9650e-01 ... -2.0288e-01 -2.9753e-01 -1.3484e-01 ... ⋱ ... 9.3054e-02 1.7660e-01 1.1617e-01 ... -1.8692e-01 -2.5153e-01 -1.1928e+00 2.1528e-01 2.8482e-01 7.7727e-03 ... -3.3763e-02 -7.1726e-01 -2.4033e-01 -5.5266e-02 2.0036e-01 5.1280e-02 ... 6.3100e-03 -1.4751e-01 -8.7516e-02 (126,.,.) = -1.8795e-02 -1.4157e-02 1.8972e-02 ... 5.5805e-02 4.5948e-02 -5.0825e-03 1.0776e-01 -7.5393e-01 -4.2556e-02 ... 5.1656e-02 1.6126e-01 -4.3502e-02 -3.9349e-01 2.7921e-01 -2.4415e-01 ... 7.9829e-02 8.0948e-02 -2.1672e-01 ... ⋱ ... -4.9517e-02 -2.4103e-01 2.3441e-01 ... 6.0706e-02 -2.1137e-01 4.1497e-01 1.5899e-01 -2.3921e-01 1.2935e-01 ... 1.4634e-01 1.8446e-01 -1.5869e-01 1.4117e-01 9.5894e-03 1.1945e-02 ... -2.3359e-02 1.0747e-01 2.3452e-01 (127,.,.) = -2.3473e-02 -9.4031e-03 -1.5690e-02 ... -2.3350e-02 1.3103e-02 2.7526e-03 -3.6479e-01 -4.8598e-01 9.1681e-02 ... 1.8447e-01 -1.1075e-01 -5.3281e-02 1.7906e-02 2.0140e-01 2.0678e-02 ... 7.1253e-02 3.2277e-01 -5.8525e-01 ... ⋱ ... 2.8675e-01 -4.1663e-01 -1.8265e-01 ... 1.1046e-01 2.5976e-01 5.3310e-01 -1.2504e+00 -7.7143e-01 -8.7459e-01 ... -1.3034e+00 4.1625e-01 -4.3742e-01 -4.1942e-02 2.4496e-02 -2.3016e-01 ... -2.1655e-01 -2.3814e-01 2.8768e-01 [torch.FloatTensor of size 128x128x14]), ('module.encoder.cbhg.conv1d_banks.13.bn.weight', -0.6298 0.5734 0.6999 0.5770 0.5808 -1.0896 0.6252 0.4294 0.5675 0.4857 -0.9622 0.5191 -1.2440 0.5543 0.6442 0.4937 0.4889 0.5038 0.4638 0.6041 -1.0582 0.5609 0.5107 0.5738 1.2248 0.5928 0.6933 0.4571 0.4580 0.5306 -1.3751 0.5884 0.5455 0.5871 0.3757 0.4592 0.6180 0.5713 0.4721 0.4965 -1.3051 0.6142 0.5013 0.5209 -1.0818 0.5345 0.5207 0.5435 0.6100 0.4462 -1.2715 -1.1698 0.4006 0.5464 0.4761 0.4700 0.5575 0.5582 0.5443 0.6845 0.5603 0.4991 0.5492 0.5362 -0.9964 -1.1201 0.5724 0.5301 0.6036 -1.1646 0.4824 -1.1610 -1.1281 0.8662 0.5791 0.4489 0.4560 0.5783 0.5846 0.6263 0.4736 0.4545 0.5589 0.5207 0.5753 0.5958 0.4252 -1.1730 0.4535 -1.0682 0.5419 0.5654 0.4253 0.4681 0.4466 0.6438 -0.9214 0.6425 -0.8597 0.5556 0.7262 0.5962 0.5239 -1.0640 -1.1503 0.5666 0.4708 0.5036 0.7284 -1.0950 0.4702 0.5276 0.4963 0.5367 -1.0664 -1.8053 0.5561 0.7688 0.6790 0.4250 -0.9473 0.5733 0.5620 0.5780 -1.2931 0.5389 -1.0939 0.5530 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.13.bn.bias', -0.0561 0.1463 -0.0223 -0.1471 0.0307 -0.2083 0.3170 -0.0127 -0.0884 0.0868 -0.0308 0.0781 -0.2038 0.0971 0.2286 -0.0160 -0.0069 0.0550 -0.0146 -0.1066 -0.3287 0.0944 0.1207 -0.1343 0.1753 0.1776 -0.0046 0.1129 0.0106 -0.0064 -0.1826 -0.0928 0.1313 0.0583 0.0808 -0.0809 -0.0025 0.1123 0.0335 0.1228 -0.1225 0.0506 -0.2536 0.0916 -0.2610 -0.1203 -0.0946 0.1251 0.0648 -0.0789 -0.1172 -0.2161 -0.0128 0.0003 -0.0881 0.0959 -0.0816 0.1423 -0.0136 0.0030 0.1279 0.0361 0.0590 -0.1067 0.0429 -0.1987 0.0586 -0.2063 0.1145 -0.1234 0.0314 -0.0583 -0.0012 0.4081 0.0521 -0.0389 -0.0896 0.2290 0.0357 -0.2267 -0.0293 -0.0579 -0.0582 0.0758 -0.0022 0.1472 0.0484 -0.2202 0.0362 -0.1892 -0.0270 -0.0069 -0.1290 0.1452 0.2094 0.0482 -0.0642 0.0597 -0.1137 -0.1909 0.0971 -0.0526 0.0954 -0.0354 -0.2829 -0.0599 -0.1084 0.0489 0.1386 -0.2417 0.0975 0.1261 0.0397 0.1821 -0.2596 -0.1610 -0.1578 0.1419 0.1052 -0.1480 -0.0142 0.0103 -0.1412 -0.0728 -0.1254 -0.0122 -0.1145 -0.0517 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.13.bn.running_mean', 17.4920 1.0646 3.4161 2.5212 0.2399 3.9630 3.9160 1.2984 0.7471 0.1815 4.5067 1.1112 6.2854 1.6546 5.1655 0.5798 0.4880 2.4264 0.4774 2.8909 8.2921 0.5392 0.2935 1.0429 0.0977 1.4176 13.4034 1.8930 2.9164 1.0526 6.6331 1.1044 0.5818 3.0707 1.0960 0.6549 2.7949 0.4472 0.2479 0.8273 4.1181 1.4051 0.6379 4.2596 4.9504 2.7049 1.4155 0.6877 1.9380 1.3744 4.1370 8.1351 0.5726 0.7805 0.9762 0.7450 4.0150 2.4144 0.6979 7.2928 2.1864 0.4118 1.7665 1.2200 15.8744 1.8149 6.4295 0.7391 4.1496 6.8999 0.1356 6.1834 6.0212 3.2767 2.8325 0.4972 0.6639 4.4855 0.5347 4.7216 0.7470 0.7470 0.3932 0.7181 1.0118 0.3567 0.8297 3.6417 0.4934 8.8014 1.3052 0.7521 0.3537 1.0392 0.6150 1.0066 7.1790 3.1220 9.6088 1.3720 0.1756 3.4566 0.8404 4.0510 2.2497 0.9266 0.1584 0.6906 2.6445 1.3801 0.8279 1.8389 0.6896 0.6774 2.4618 0.1562 1.0760 3.2444 3.0993 0.6256 9.9324 2.0248 0.8545 2.7719 0.7054 0.5972 2.9989 1.9024 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.13.bn.running_var', 130.6758 16.3050 50.5998 34.0163 3.6374 64.6052 66.3693 23.5074 12.8490 2.1329 65.6858 21.4228 108.2539 30.2416 75.6314 10.1012 8.1167 46.0568 7.9003 53.8135 162.3741 9.2731 4.8722 15.9702 1.3483 24.6443 188.5522 37.6928 33.8167 18.8139 117.1080 15.8111 9.6284 57.6474 20.5505 9.7821 38.9682 7.1397 3.9510 14.9775 75.2078 25.4755 11.5768 74.0677 93.5028 47.9340 17.0871 11.7656 31.3784 27.1431 75.9914 132.0832 10.5657 14.9817 15.1234 13.5866 68.0268 39.2316 9.1437 110.1679 39.0589 5.5239 30.1566 22.4781 210.9006 28.0044 101.8984 12.3503 70.4904 124.0370 2.0574 117.9289 103.3480 53.4346 46.7365 7.8339 12.1007 79.9240 8.7048 64.1523 15.3030 14.1568 6.2242 12.8202 17.7651 6.3197 15.5565 70.9639 7.4195 156.1693 26.9980 11.9868 5.0034 19.4145 9.7820 18.3046 142.1911 47.1392 147.3607 21.9535 2.4825 64.3683 14.1823 64.5069 40.4032 16.1720 2.4418 15.4943 47.0530 25.9800 14.1696 33.9273 7.4451 12.6679 43.7350 2.3258 18.4054 65.9993 41.0610 10.8307 178.3945 29.6639 14.5938 47.5990 11.8685 9.8107 48.8423 35.8358 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.14.conv1d.weight', ( 0 ,.,.) = -9.6386e-03 3.0975e-03 -2.5502e-03 ... 1.5233e-02 2.2425e-02 -3.1626e-02 9.2219e-03 -9.3129e-02 -1.1708e-03 ... -6.1531e-01 4.4129e-02 -6.2089e-01 -3.7936e-01 -5.3634e-02 4.1889e-01 ... -2.1778e-01 5.3546e-01 -1.3575e-01 ... ⋱ ... 1.9159e-01 -3.5651e-01 -1.5055e-01 ... -2.5301e-01 -1.4246e-01 -7.7590e-02 -5.7804e-02 2.4481e-01 -5.1024e-01 ... 3.3023e-01 -1.1456e-01 -4.6667e-01 8.0287e-02 -6.9695e-02 -1.5106e-01 ... -2.3315e-01 -3.4729e-02 5.8015e-02 ( 1 ,.,.) = -1.7977e-02 -1.1838e-02 -2.9291e-02 ... 1.1945e-02 6.4109e-03 -2.0085e-02 2.2693e-01 1.1070e-01 -1.2156e-01 ... 3.9183e-02 1.7745e-01 -9.5893e-02 -4.8388e-02 -2.2365e-01 1.0250e-01 ... 1.1775e-01 5.5102e-02 -2.7197e-01 ... ⋱ ... 3.2907e-01 -3.3454e-01 1.9553e-01 ... -6.8944e-02 2.0750e-01 -5.4171e-01 1.0093e-01 -2.0825e-01 -2.1511e-01 ... -1.2759e-01 -5.0020e-01 -2.9427e-01 8.3563e-02 8.9445e-02 -1.6162e-01 ... -1.0580e-01 -3.8315e-02 1.1123e-01 ( 2 ,.,.) = 7.2138e-04 -3.1993e-02 9.4911e-03 ... -8.7489e-03 1.7652e-02 4.0780e-03 -1.3638e-02 -1.3663e-01 3.2960e-02 ... 4.7582e-02 -2.4319e-01 2.2738e-01 -7.7567e-01 -9.2321e-02 -1.1529e-01 ... -6.4581e-01 3.7072e-01 2.2190e-01 ... ⋱ ... 1.3387e-01 -1.0459e-02 -2.2410e-01 ... -7.1280e-01 9.4690e-02 -1.0131e+00 2.6751e-02 -2.9849e-01 -7.0005e-01 ... 1.3902e-01 -3.7694e-01 -1.2595e-01 -2.6215e-01 1.2256e-01 -2.8348e-01 ... 1.1236e-01 -2.6863e-01 -2.8805e-01 ... (125,.,.) = 1.3993e-02 -7.3525e-03 -2.6726e-02 ... 2.4495e-02 -4.0242e-03 3.1814e-02 1.4469e-01 1.2966e-01 2.0149e-01 ... -2.7853e-02 -6.0647e-01 -1.7823e-01 -1.1353e-01 -8.8484e-02 -5.0216e-01 ... -1.3258e-01 1.6498e-02 -3.3027e-01 ... ⋱ ... -9.3859e-02 -3.1071e-01 -1.3097e-01 ... -9.2499e-03 8.2636e-02 7.8388e-02 -4.2766e-01 -1.2047e+00 -1.3363e-01 ... -2.5266e-02 1.3353e-01 -1.5399e-03 -6.4824e-02 -9.3140e-02 -7.5200e-02 ... -4.9299e-02 -2.9521e-01 2.1217e-01 (126,.,.) = 1.1726e-02 -9.5735e-03 2.0397e-02 ... 1.7887e-02 1.1456e-02 -1.8427e-02 -4.9714e-02 1.2782e-02 7.9058e-02 ... 8.2599e-03 -1.2522e-01 -3.1907e-01 1.7756e-01 -1.2267e+00 2.0300e-01 ... 1.5960e-01 1.7278e-01 1.4348e-01 ... ⋱ ... 2.3941e-01 -1.3360e-01 -2.8874e-01 ... -1.0559e+00 2.5745e-01 8.1122e-03 -3.4058e-01 3.1620e-01 -1.1758e+00 ... 2.4224e-02 -5.0012e-02 5.8442e-02 3.5690e-02 8.2707e-02 3.0048e-02 ... -1.3737e-01 -4.6989e-02 3.6012e-02 (127,.,.) = -1.9347e-02 -1.3504e-02 8.1035e-03 ... 1.6393e-02 -2.4877e-02 -2.7250e-02 2.1160e-01 2.5268e-01 4.1301e-01 ... 4.0031e-02 8.8184e-03 -2.1723e-01 4.2787e-01 -3.9118e-01 -3.0853e-01 ... 1.4695e-01 -2.9853e-03 1.2570e-01 ... ⋱ ... -7.8121e-01 -1.0999e-01 7.8042e-02 ... 2.2160e-01 1.1807e-01 -7.1094e-01 -3.6032e-02 8.9521e-02 1.7480e-02 ... -3.0489e-01 -1.5970e-01 -6.4072e-01 1.6602e-01 1.1174e-02 -5.0506e-02 ... 2.8903e-01 1.0849e-01 -2.9205e-01 [torch.FloatTensor of size 128x128x15]), ('module.encoder.cbhg.conv1d_banks.14.bn.weight', 0.5219 0.5235 0.5185 0.4848 0.6538 0.6652 0.5997 0.4674 0.5417 0.5630 0.4184 0.5542 0.5512 0.8797 0.5544 0.4707 0.7109 -1.5327 0.4725 0.4867 0.4607 0.5565 0.4141 0.5092 0.6183 0.6033 0.6924 0.4890 -1.0302 0.4388 0.5975 0.5200 -1.1502 0.4562 0.5402 0.7230 0.4460 0.5195 0.4628 0.5026 0.5357 0.5895 0.5508 -1.0624 0.5107 0.7285 0.6568 0.4984 -1.0707 -1.3855 0.7256 0.4902 0.5819 0.1455 0.5163 -1.0136 0.4848 0.5624 -0.9739 0.5099 0.6078 0.5977 0.5011 0.6138 0.4872 0.4663 0.6338 -1.1063 0.4789 0.5408 0.5189 0.5583 0.0727 0.4613 0.7490 0.4590 -1.2506 0.5050 0.4902 0.4852 0.4187 0.5186 0.5131 0.5070 0.5693 -1.4681 0.6262 0.5092 0.5781 0.5648 0.5477 0.5868 0.5475 -1.1519 0.6011 0.5648 0.5236 0.4696 0.5879 0.5043 0.5454 -1.2099 0.4637 0.4895 0.5363 0.5922 0.5929 -1.2131 0.5626 -1.0432 0.5450 0.5449 0.5089 0.4572 0.6318 -1.2045 0.7876 0.5422 0.5030 0.6382 0.4502 0.6880 0.4789 -1.1577 -1.0733 0.7786 0.5372 0.5880 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.14.bn.bias', -0.0542 0.1266 0.1778 -0.0647 0.1476 0.0771 -0.1118 0.0865 0.0586 0.0690 -0.0246 -0.0222 -0.0052 0.0949 -0.0511 -0.0623 0.0722 -0.2330 0.0828 -0.0208 -0.0656 -0.1528 0.0490 0.0603 0.1099 0.0511 0.0996 -0.1498 -0.3277 -0.0455 0.0740 0.1005 0.0992 0.0310 -0.0707 -0.0016 0.0342 -0.0672 -0.1504 -0.0168 0.0355 -0.0045 -0.0201 -0.0226 0.0032 -0.1744 -0.0375 -0.0445 -0.1340 -0.2778 0.2175 0.0942 0.1344 -0.0153 0.1511 -0.1204 -0.0295 0.1024 -0.1679 0.0331 0.0851 0.0104 0.0666 -0.0171 0.1244 -0.0577 -0.1414 -0.0597 -0.0504 0.0238 0.0037 0.0110 0.0093 0.0258 0.0402 0.0357 -0.2347 -0.0108 0.0670 -0.2170 -0.0681 0.0122 -0.0017 -0.0375 -0.0331 -0.1268 0.0359 0.0069 0.0363 0.0017 0.0716 0.0311 0.0447 -0.2538 -0.0221 0.0460 -0.0071 -0.0262 0.1562 -0.0758 0.1113 -0.0886 0.0382 -0.0714 -0.0329 0.1428 -0.0073 -0.1931 0.1464 -0.1238 -0.0247 -0.0744 -0.0178 0.0669 0.1170 -0.1239 0.1023 -0.0063 -0.1049 0.1635 -0.1547 0.1345 -0.1381 -0.0946 0.0067 0.0341 -0.0042 0.0046 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.14.bn.running_mean', 1.6630 0.6570 1.0669 1.2803 2.3932 0.7346 2.2573 0.3021 0.6393 0.7293 0.2556 3.0511 1.7600 0.1173 1.2432 0.3600 1.1406 0.8445 0.6656 0.8301 1.0421 2.6189 0.3224 1.0295 0.8101 1.3762 0.0762 0.6325 5.1418 0.8932 0.9764 0.2143 7.0799 0.4281 0.5141 2.6536 1.4388 1.8036 1.0811 2.5022 0.9273 1.4977 1.5585 15.1198 0.6438 12.4013 2.5994 1.4340 0.5407 3.4046 4.0653 0.5159 2.6401 7.2569 0.2240 8.0407 0.7292 0.8736 10.5882 0.9707 2.2749 8.4105 1.3705 0.7365 0.4197 2.5428 2.4664 0.0913 0.7941 2.3891 0.6959 3.5539 15.5003 0.9059 2.1630 0.6838 0.6097 0.3797 0.5167 2.0228 1.0823 0.1929 0.4074 0.7406 0.5121 0.4157 8.3492 0.8532 0.5392 1.6911 0.3076 0.5448 0.4696 3.3314 0.4010 0.5486 2.4953 1.9634 0.4421 0.6672 7.0037 5.8962 0.4794 1.3660 0.8045 1.1081 0.6598 1.7219 4.9092 7.6263 2.3817 1.0561 0.1707 0.0651 9.3424 7.5609 0.5790 1.7707 1.0401 1.9168 1.4665 8.6957 0.7398 5.2830 6.6171 0.4233 1.1271 2.6168 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.14.bn.running_var', 32.5494 10.8456 22.1475 21.1634 45.5213 16.8906 33.3678 4.3462 12.7202 12.3183 4.3537 56.0020 34.1852 1.7884 18.7564 5.2967 18.5741 13.8993 12.8565 14.8653 19.3688 53.3120 5.2661 18.4368 15.0291 25.6777 1.2540 11.3215 92.9001 17.1117 19.6549 3.8317 118.1074 7.4878 9.8521 50.2246 27.7167 33.7030 19.7347 48.3847 15.2201 27.6255 28.1951 228.9194 12.7087 193.8901 49.8693 28.7549 8.2892 60.6948 58.8147 9.6232 51.1853 35.7705 4.3653 144.7705 11.7567 16.4082 171.8419 17.9198 42.9024 142.2819 16.0703 12.2012 7.6679 47.4451 50.2608 1.2489 15.4597 45.2365 12.3541 55.4427 74.4852 19.1685 39.5750 11.1432 10.0188 7.5313 10.2896 38.4401 19.4796 3.1335 6.7418 14.2166 8.8405 6.7862 106.6172 16.2704 7.6732 34.2658 5.4813 11.2076 8.8754 56.3180 5.0904 8.6993 48.9169 35.8389 7.9664 10.8085 121.9062 108.6408 9.0403 27.3738 11.2957 20.1465 13.1769 30.6585 90.7982 147.7374 43.3435 20.0636 3.1555 0.8837 141.9837 114.1726 10.1553 29.6741 20.3391 26.6045 31.1960 130.3760 13.1262 90.5731 98.7055 6.0907 16.6492 49.3750 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.15.conv1d.weight', ( 0 ,.,.) = 2.5623e-02 8.7796e-03 1.2732e-02 ... -2.4308e-02 -3.4531e-02 2.3106e-02 3.2059e-01 3.7947e-02 -1.5134e-01 ... 4.1423e-02 1.4764e-02 -7.1850e-01 -3.0223e-01 -2.4837e-02 -3.8476e-01 ... 2.5859e-01 -2.4867e-01 -2.0122e-01 ... ⋱ ... -3.2559e-01 -2.0264e-01 -2.0674e-01 ... -4.4623e-01 6.1603e-02 -1.4264e-01 2.1641e-02 3.0297e-01 -1.5960e-02 ... -6.5608e-01 1.6242e-01 1.1450e-01 7.3044e-02 1.4630e-01 8.5614e-02 ... -9.3979e-02 -3.5798e-02 -1.3267e-01 ( 1 ,.,.) = 1.8823e-02 -3.0998e-02 -8.2396e-03 ... -2.8736e-02 9.6009e-04 2.1219e-02 -2.3189e+00 -7.2369e-02 2.4549e-01 ... -9.9233e-03 -5.9065e-01 6.0773e-02 1.1587e-01 1.7568e-01 1.6188e-01 ... -7.3982e-01 -1.7959e-01 -9.6831e-01 ... ⋱ ... 1.1053e-01 -3.7228e-01 5.2072e-02 ... -6.5126e-01 -2.0501e-01 -6.8167e-02 -2.3758e-01 -5.0526e-01 -2.4295e-01 ... 3.8033e-02 -5.6007e-03 1.5756e-02 1.6876e-01 -1.5944e-01 -3.1784e-02 ... -4.3685e-01 -1.4064e-01 8.8937e-02 ( 2 ,.,.) = -3.0412e-02 -9.0673e-03 1.5075e-02 ... -1.4950e-02 3.6065e-03 -1.4769e-03 -3.9896e-01 1.4230e-01 -6.3218e-02 ... 4.4698e-01 -7.1939e-02 -3.8281e-02 -5.2769e-01 -3.1808e-01 3.9282e-03 ... -2.3788e-01 9.1319e-02 -6.3756e-01 ... ⋱ ... 2.7757e-01 3.0037e-01 1.9336e-01 ... 8.2704e-02 3.3772e-01 1.7109e-01 -5.2782e-01 -5.2583e-02 -4.1151e-01 ... 2.9238e-01 -1.5851e-02 1.7705e-01 -3.1717e-02 -8.4575e-02 7.6925e-02 ... -4.8261e-02 -1.1192e-02 -1.3210e-01 ... (125,.,.) = -2.4106e-02 6.6282e-03 -1.8465e-02 ... -2.3786e-02 2.1597e-03 -1.3986e-02 -1.4231e-01 3.4451e-02 1.0366e-01 ... -4.9271e-01 -1.2217e-01 -1.0208e-01 -2.5530e-01 1.1695e-01 3.4305e-02 ... 5.0204e-03 -3.1896e-01 -9.6129e-02 ... ⋱ ... 1.1271e-01 1.5310e-01 1.8242e-01 ... -1.4086e-01 4.2609e-02 -6.2007e-01 -1.0876e-01 -6.2901e-01 -3.5915e-01 ... 1.3079e-01 -6.1319e-01 5.1483e-02 -3.8345e-02 -5.3109e-02 -5.6655e-02 ... 1.9094e-02 -7.8271e-03 5.1170e-03 (126,.,.) = -5.0714e-03 -1.3371e-02 -5.9716e-03 ... -4.7321e-04 -1.5764e-02 2.2393e-03 -2.1972e-01 -1.0131e-02 -7.3959e-01 ... -1.0185e-01 -8.5600e-01 2.1385e-02 2.9147e-01 1.6190e-01 1.7895e-01 ... -1.2549e+00 1.2088e-01 -3.7305e-01 ... ⋱ ... 3.9247e-01 -5.5293e-01 1.1685e-01 ... 2.6583e-01 -2.7808e-01 1.0660e-02 1.9686e-01 -2.6261e-01 -3.2170e-01 ... -2.5084e-01 -3.7867e-01 -8.2786e-01 1.1004e-01 -1.3190e-01 -3.9576e-02 ... 9.9157e-03 -1.3757e-01 -4.7838e-02 (127,.,.) = -2.9438e-03 -6.5579e-03 1.0269e-02 ... -2.6457e-02 -1.8135e-02 8.6984e-03 -1.8795e-01 -2.1250e-02 -1.5791e-01 ... 1.1983e-01 1.2248e-01 -1.7003e-01 -4.6693e-03 -2.2383e-01 2.8204e-02 ... 2.2932e-02 -1.6864e-01 -4.6507e-01 ... ⋱ ... -1.7754e-02 1.6717e-01 -2.0567e-01 ... 1.1366e-01 -1.0704e-01 5.5078e-02 3.0380e-01 -1.5191e-01 2.2612e-01 ... 6.7546e-01 6.6147e-02 -1.0390e-01 1.0645e-01 7.5489e-02 1.0369e-01 ... 5.6466e-02 1.0624e-01 3.4812e-02 [torch.FloatTensor of size 128x128x16]), ('module.encoder.cbhg.conv1d_banks.15.bn.weight', 0.5354 0.4807 0.4978 0.5211 1.1324 0.4205 0.4623 0.4773 0.5666 0.5067 -1.3785 0.4616 0.4947 0.5664 0.5703 0.6450 0.5456 0.5743 0.6894 0.5498 0.5564 0.4933 0.6864 -1.2691 0.5701 0.4165 0.6027 0.5206 0.7312 0.4842 0.6203 0.5053 0.5330 0.5511 0.4457 0.5000 0.5081 0.6040 0.5334 0.4590 0.5156 0.4433 0.5143 0.6415 0.5571 0.5321 0.6534 0.8133 0.4513 0.7035 0.7359 0.4414 0.0663 0.5566 0.6337 0.4520 0.4172 -1.3365 -1.2600 0.4947 0.5563 0.5269 -1.1807 0.5702 0.5892 -0.7873 0.7370 0.4751 0.4577 0.6010 0.5396 -1.1463 0.5493 0.5495 0.6661 0.4422 0.5413 0.5884 -1.1723 0.5093 0.5242 -1.0168 0.5333 0.5542 0.7148 0.6392 0.9323 0.4502 0.5671 0.5374 0.5238 0.5455 0.5135 0.5911 0.4962 0.4329 0.6753 0.4411 1.1292 0.5388 0.5069 0.7309 0.7884 -1.3002 0.8160 0.4713 -1.1157 0.5198 0.4954 0.6287 0.5222 0.5438 0.4195 0.5281 0.5035 -1.0355 -1.0846 0.4906 0.5894 0.6320 0.5293 0.5036 0.6611 0.4767 0.5030 -1.1010 0.4676 -1.1475 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.15.bn.bias', 0.0524 -0.0104 -0.0355 -0.0305 0.1230 0.0507 0.0776 -0.1148 -0.0288 -0.0372 -0.1988 0.1076 0.0862 -0.0735 -0.0246 0.1221 -0.1754 0.0359 0.1625 0.0628 0.1405 -0.0864 0.1173 -0.1877 0.0042 0.1162 -0.0527 0.0511 0.0336 0.0657 0.0162 -0.0186 0.1740 -0.0569 -0.0111 0.0413 -0.1015 0.0365 0.0929 -0.0713 0.1852 -0.0286 0.0135 0.1054 0.0419 0.2542 0.0080 0.0018 -0.1296 0.0003 -0.0428 0.2202 0.0091 -0.0100 0.2106 0.2011 -0.0366 -0.2762 -0.2564 0.0600 0.1685 -0.0183 -0.2047 0.0162 0.0286 0.0164 0.2825 0.0731 0.1029 0.0540 0.0565 -0.2636 0.1703 0.0017 -0.0392 -0.0126 -0.0149 0.0537 -0.1298 0.0554 0.0028 -0.0728 0.0218 -0.1667 0.0594 0.1006 -0.0431 -0.0145 -0.0626 -0.0886 -0.0074 0.0046 0.0371 0.0730 0.0990 0.0714 0.0552 0.0027 0.0155 0.0420 -0.0998 -0.0555 0.0023 -0.2370 0.1692 0.1662 -0.2809 0.1173 0.0028 0.0015 0.0263 0.0441 0.0612 0.0263 0.1237 -0.0759 -0.2447 0.0603 0.0570 0.0240 -0.0808 0.0285 -0.1290 -0.1056 0.0802 -0.0803 0.0163 -0.0485 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.15.bn.running_mean', 0.9682 0.4254 2.4118 0.5825 0.0772 1.0123 0.1869 0.3811 2.3747 0.4519 3.6932 0.4964 0.1543 0.5014 3.8483 8.6164 0.7243 2.0867 5.3134 0.4526 0.7189 0.3508 9.6724 13.2361 0.7571 0.3200 1.0713 0.3110 5.7738 1.0436 2.2685 0.6763 0.9107 1.6475 0.7067 0.1283 1.1515 0.4261 3.3797 0.8306 0.3278 0.4212 1.1886 0.3921 1.7379 7.5866 1.2422 5.0648 0.9494 4.7285 3.9109 1.1449 0.0000 0.3054 7.2068 0.8918 1.2764 6.3968 3.8331 0.4384 1.5619 1.4865 1.6991 2.0914 1.7627 0.0007 3.1607 0.6808 0.3569 1.5402 0.3563 11.9602 1.8894 1.0778 3.6062 0.5507 2.2300 3.4993 3.8718 0.4806 0.9608 0.1307 1.4573 1.5545 0.1142 0.8029 0.5071 0.8431 0.9313 1.3237 1.2521 1.7532 0.3991 1.4072 1.0194 0.3311 2.3480 0.5486 0.0001 0.5424 1.0699 0.4954 8.2006 2.5720 3.0251 0.6186 11.2387 1.3537 1.1902 1.2435 1.0801 1.3664 0.3012 0.1557 1.1768 0.0840 0.5783 1.1622 0.5601 4.6154 0.5846 0.7167 5.6987 0.9826 1.2777 6.8550 0.3461 7.7890 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_banks.15.bn.running_var', 19.3441 7.2210 52.1670 10.0785 0.8694 19.9234 3.0400 7.1023 48.8550 6.8233 79.8547 9.1791 2.2923 8.2448 73.9919 141.0537 14.1035 40.1774 91.0158 9.2547 12.4517 5.1783 157.9901 231.3361 13.6326 5.5508 26.2938 6.0275 89.2656 19.1362 46.1800 10.6488 18.0966 33.1807 12.2791 1.9163 23.7852 8.4300 72.0607 14.9116 5.9400 6.2749 21.8647 6.2331 34.8539 116.9517 23.9962 107.2270 19.6034 88.5838 85.4811 23.7413 0.0000 5.1239 124.2238 15.7109 24.0241 124.8482 79.0445 7.0774 30.6366 26.3645 31.4898 44.0007 36.0663 0.0021 48.7602 11.8073 6.2930 23.4775 7.3132 242.1393 39.2080 16.3218 48.5017 9.5221 43.5962 52.8122 74.0767 7.5589 18.0335 1.5151 29.8387 33.0634 1.5266 11.5707 10.7495 16.7786 20.2605 22.8422 25.2861 35.6485 8.2998 25.4361 20.3728 6.9308 45.0786 10.2328 0.0008 8.8821 20.8823 10.3538 142.8971 43.0934 52.4043 10.1972 201.6355 27.1906 23.0948 23.9127 19.4774 23.5042 5.9305 2.9476 22.1587 1.1825 9.4386 23.9990 10.7355 100.3000 9.3527 13.9741 128.4238 18.1951 23.8309 136.7777 6.1809 149.2479 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.0.conv1d.weight', ( 0 ,.,.) = 3.0814e-01 4.3928e-02 5.9386e-02 1.1993e-01 5.8452e-02 4.1407e-01 -3.3929e-01 -2.6133e-01 -3.4512e-01 ⋮ 2.6667e-01 6.0643e-01 -3.8510e-01 -3.8135e-01 -1.7756e-01 1.8003e-01 -3.7152e-02 -2.8518e-03 -5.0003e-02 ( 1 ,.,.) = 6.7310e-03 -1.9074e-01 -4.0766e-01 -3.7384e-01 3.8173e-02 -8.6705e-02 -1.4769e-01 -4.1923e-02 -1.0642e-01 ⋮ -6.6837e-02 6.2199e-01 2.5974e-02 -1.1596e-01 -2.3071e-01 7.0891e-02 -6.6795e-01 2.1777e-01 1.5540e-01 ( 2 ,.,.) = -2.3889e-02 1.2623e-02 -1.6646e+00 3.5820e-01 -2.0916e-01 1.3814e+00 -1.3201e-01 -1.2885e-01 2.0583e-01 ⋮ -1.4197e-01 1.5728e-01 -4.6813e-02 4.1027e-02 1.1533e-01 -2.0033e-01 1.0978e-01 -1.6570e-01 -8.2350e-02 ... (125 ,.,.) = -9.2253e-03 -4.7290e-01 -6.3990e-01 1.8074e-01 5.8933e-01 3.6671e-01 -2.7379e-01 -1.4911e-01 -1.0068e-02 ⋮ 4.6931e-01 3.3843e-02 -9.3615e-02 1.8630e-01 2.2686e-01 -5.0762e-02 4.6377e-01 5.5982e-01 2.5739e-01 (126 ,.,.) = -3.0432e-01 2.8374e-01 -2.8736e-02 -6.6618e-02 -3.0130e-01 1.1153e-01 1.0508e-01 -2.2993e-02 6.0722e-02 ⋮ -1.5460e-01 3.0680e-01 -2.1939e-01 1.0348e-02 1.1076e-01 -2.4765e-01 -1.2953e-01 2.0722e-01 -2.8834e-01 (127 ,.,.) = -3.5357e-01 5.5979e-03 -1.1791e-01 -1.3253e-01 -1.0162e-01 -4.8239e-01 -5.9012e-02 1.8944e-01 -2.2635e-01 ⋮ 1.0666e-01 3.3147e-01 -3.4622e-02 -2.3130e-02 3.4870e-01 2.0421e-01 -3.4532e-01 4.7640e-01 -1.8542e-01 [torch.FloatTensor of size 128x2048x3]), ('module.encoder.cbhg.conv1d_projections.0.bn.weight', 0.6646 0.8037 0.5687 -0.3298 0.3331 0.5846 0.7305 0.5224 0.5024 0.3957 0.4767 0.6206 0.7052 -0.4729 0.4754 0.4523 0.3607 0.5262 0.5655 0.2685 0.5239 0.4226 0.7581 0.4334 0.9309 0.3395 0.3421 0.4946 0.6343 0.3663 0.5080 0.5530 0.3504 0.3564 0.5745 0.6107 0.4151 0.6471 0.5921 0.4796 0.7057 0.4181 0.4424 0.8080 0.5520 0.5493 0.7327 0.4723 0.5089 0.4511 1.0728 0.7614 0.4652 0.5035 0.5966 0.5513 0.6273 0.4190 0.6361 0.6292 0.4825 0.5430 0.4997 0.6707 0.3601 0.4622 0.6502 -0.3914 0.6328 0.7951 0.5793 0.5665 0.8058 0.4266 0.6564 0.6774 1.7108 0.5781 0.4265 0.4686 1.0867 0.4749 -0.0049 0.6564 -0.5044 0.5283 1.3802 0.4614 0.5294 0.5872 0.4923 0.5253 0.6028 0.5324 0.5360 -0.4594 0.6569 0.5223 0.5158 0.6301 0.2296 0.4971 0.5606 0.3990 0.1675 0.6395 0.8900 0.7790 0.6523 0.4834 0.4640 0.7101 0.4878 1.0137 0.2852 0.3972 0.5455 0.4692 0.2003 0.5367 0.4520 0.5018 0.5938 0.4570 0.5801 0.5462 0.4063 0.5967 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.0.bn.bias', 0.4170 -0.0814 0.5861 -0.9265 0.1705 0.4536 -0.2573 -0.1538 1.0144 0.1783 -0.6816 0.4903 0.0312 1.1306 0.2297 -0.2553 -0.0439 0.6143 -0.0467 -0.5743 -1.1164 0.5284 0.6900 -0.6266 -0.3952 0.1032 0.4301 0.5007 -0.7266 0.0655 -0.5523 -0.0068 0.4664 -0.9305 0.7540 0.0923 0.0029 0.1134 -0.3432 0.3094 0.8301 0.0221 0.7642 -0.2436 0.4244 0.2470 0.2969 -1.0682 1.0367 0.3307 0.8700 -0.4417 0.4280 0.7529 0.9893 -0.3281 0.2944 0.2873 0.6717 0.1098 -0.8139 0.2548 -1.1177 0.4356 0.1965 0.2115 0.1635 0.5615 0.7445 0.3350 0.3104 0.1556 0.2709 0.1253 0.6787 -0.4149 -0.0092 -0.0661 0.5637 0.7340 0.5761 0.8845 -1.7518 0.9397 -0.5018 0.5829 -0.4377 0.7131 0.1205 0.4713 0.5456 0.7895 -0.1775 0.7853 0.5514 0.2082 -0.0249 -0.0716 0.6822 0.2103 0.7948 0.3662 0.0700 0.0494 0.3584 0.3580 0.9084 0.8465 0.0509 0.0737 0.6599 0.3823 0.1007 -0.0749 -0.0053 -0.0542 0.0226 1.2841 -0.1019 -0.1142 -0.7577 -0.0020 -0.1057 0.4090 -0.8755 0.6086 -0.6107 0.3299 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.0.bn.running_mean', 0.2210 0.0529 1.3052 0.0680 0.2829 4.6885 0.0422 0.3684 0.4773 0.2964 0.0397 0.1658 5.2520 1.7325 10.4315 0.4226 0.0934 4.8479 0.8025 0.1040 0.2436 0.0433 0.0000 20.9054 0.0000 0.0833 0.0000 4.5476 3.2742 1.0132 0.2117 9.1506 0.0000 0.0000 1.2540 18.8371 0.0970 0.5599 0.0559 6.6174 6.5105 0.1380 0.0998 1.5197 21.1912 4.4543 5.1361 0.0000 0.0000 10.7140 0.3029 12.9685 6.7112 0.1514 5.7137 0.2961 0.1161 5.8437 20.4616 0.4551 0.0253 0.3599 23.7951 0.0565 1.5301 1.6234 0.0763 14.0220 19.9584 28.1291 4.8282 0.0908 2.7046 1.5839 0.0581 15.3864 0.0195 0.1280 22.8007 0.0701 0.0049 0.0750 0.0000 11.3023 0.7947 5.9371 0.0394 0.0097 2.5397 4.2118 44.7946 5.3529 6.1608 33.0016 0.1493 1.0825 23.0581 0.0943 6.2559 0.2262 0.0000 5.2692 0.6173 0.0426 0.0000 15.9105 0.0000 2.2465 0.0751 7.9508 0.0000 0.0000 0.1523 0.0193 0.0817 0.1041 1.2988 1.0068 0.0189 22.4981 5.0923 0.2965 5.1567 5.4683 0.0821 2.8410 0.2114 0.1182 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.0.bn.running_var', 10.1925 2.7325 40.7506 2.2552 5.7887 125.8415 1.2202 11.9626 13.4820 8.2248 1.1643 1.7009 137.7545 42.2349 215.7925 10.8992 4.0058 130.2910 21.7233 1.8677 6.0895 1.6989 0.0000 275.7753 0.0000 2.8889 0.0000 106.1226 101.9748 22.6417 6.4413 208.0539 0.0000 0.0000 33.9741 362.8148 3.0024 18.1610 2.1819 145.9947 149.0669 3.7496 4.9660 36.9479 307.1859 99.5560 129.2765 0.0005 0.0000 262.8981 16.3489 288.6512 158.7979 7.0622 123.5744 5.8243 6.8162 113.7955 358.1396 17.4511 0.7085 12.1286 283.4341 2.8528 38.1696 37.2666 3.2165 239.5753 336.7714 544.2413 116.3443 4.9104 81.7024 37.3843 3.3905 341.0682 0.5235 6.1596 301.3521 2.4722 0.0616 2.5850 0.0000 276.1491 18.7097 161.9815 1.8596 0.1399 57.3707 119.9483 365.9788 149.1642 156.6599 396.5631 4.4044 23.1890 556.5134 3.8096 158.6554 10.4037 0.0000 128.4091 16.4895 1.0318 0.0000 304.3759 0.0005 69.4994 3.5712 144.1262 0.0000 0.0000 5.1930 0.7164 1.2742 3.1561 40.9657 25.8127 0.4160 376.6815 91.2860 8.4013 129.7561 116.1720 3.4888 56.6032 7.4175 6.8700 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.1.conv1d.weight', ( 0 ,.,.) = -7.9624e-02 9.5472e-02 -6.5400e-02 -9.0511e-02 -1.2361e-01 -2.9827e-01 -1.0070e-01 -4.9918e-01 1.5549e-01 ⋮ -2.0914e-01 8.3182e-01 3.5420e-01 -9.0347e-02 -1.9814e-01 1.5357e-01 2.0842e-01 -1.6191e-01 -1.3249e-01 ( 1 ,.,.) = -2.6338e-01 2.0028e-01 -5.5048e-01 -6.3165e-02 2.0704e-01 4.7220e-01 1.5427e-01 -2.4553e-01 -2.8350e-01 ⋮ 2.4556e-02 -2.8573e-01 6.1484e-02 2.4085e-01 5.7162e-03 3.0859e-01 3.4030e-01 1.2176e-01 1.1497e-01 ( 2 ,.,.) = 1.2434e-02 4.0604e-01 2.5618e-01 8.7541e-02 -5.6916e-02 -5.7893e-01 -1.9563e-01 -7.3344e-02 -6.0057e-02 ⋮ 1.0950e-01 3.1177e-02 -1.4969e-03 -8.3942e-02 -1.2569e-01 -2.7982e-01 1.3968e-01 -2.1056e-01 -3.8614e-01 ... (125,.,.) = -2.0339e-01 -9.2574e-02 1.9148e-01 1.9614e-01 1.8801e-01 5.5208e-01 -5.3128e-02 4.0566e-01 -3.9731e-01 ⋮ 2.4068e-01 3.5523e-02 -1.7478e-01 1.3921e-01 -3.7821e-01 -7.7398e-02 3.1290e-01 1.0250e-01 1.6110e-01 (126,.,.) = 1.8149e-01 1.4161e-01 -2.1990e-01 9.2477e-02 1.8051e-01 3.1933e-01 4.1670e-02 6.6928e-02 2.5147e-01 ⋮ -2.6391e-01 -7.2917e-02 -5.0047e-02 1.1001e-01 3.4186e-01 2.3474e-01 1.2854e-01 1.6606e-01 -1.7315e-02 (127,.,.) = -2.3090e-01 1.5042e-01 6.3597e-01 -3.4157e-02 3.5767e-03 3.4091e-01 4.9821e-02 -5.0298e-02 5.0703e-02 ⋮ 2.7414e-02 1.3014e-01 2.9182e-01 1.6557e-01 4.1488e-01 1.0505e-01 8.3979e-02 -3.8755e-01 -1.7537e-02 [torch.FloatTensor of size 128x128x3]), ('module.encoder.cbhg.conv1d_projections.1.bn.weight', 0.4536 0.3744 0.3930 0.4028 0.1871 0.3296 0.5046 0.3456 0.5806 0.6941 0.1379 0.4948 0.3155 0.2385 0.2987 0.4990 0.3377 0.4929 0.3430 0.6483 0.4081 0.5141 0.8667 0.7486 0.3455 0.8252 0.4224 0.3750 0.6520 0.3503 0.7377 0.6230 0.4638 0.7627 0.4598 0.4961 0.6081 0.4609 -0.2415 0.3845 0.6408 0.6914 0.7386 0.5714 0.2910 0.6573 0.5436 0.2170 0.4840 0.7204 0.2262 0.1596 0.5102 0.3687 0.4028 -0.2457 0.4936 0.4966 0.7913 0.4287 0.5383 0.5517 0.6433 0.6154 0.1992 -0.2114 0.3606 0.1503 0.6788 0.5619 0.5162 0.5477 0.5638 0.2326 0.5656 0.6506 0.4850 0.3377 0.3538 0.3885 0.2085 0.3807 0.1975 0.5298 0.8238 0.8133 0.3542 0.2515 0.8385 0.6650 0.6492 0.4470 0.2840 0.1060 0.4425 0.8025 0.4587 0.5400 0.7236 0.6267 0.6552 0.8278 0.2184 0.2870 0.2500 -0.2185 0.5200 0.5064 0.2679 0.6613 0.5352 0.4196 0.5948 0.4679 0.4783 0.4823 0.6754 0.2985 0.1426 0.1292 0.4998 -0.1704 0.5583 0.1927 0.4749 0.2017 0.5920 0.3369 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.1.bn.bias', 0.0224 -0.2522 0.2020 0.2377 0.0741 -0.0521 -0.1118 -0.1838 0.1101 -0.0721 0.0287 0.2076 -0.0525 0.0451 -0.0014 0.0122 0.1394 -0.0573 -0.1542 0.0116 -0.0492 0.0087 -0.0062 -0.1424 0.1519 -0.0766 -0.1431 0.1400 -0.2043 -0.0008 -0.0737 -0.0599 -0.0529 -0.0173 -0.1263 -0.0410 0.0255 -0.1789 -0.1656 -0.0316 -0.0390 0.0087 -0.1299 -0.0694 -0.1189 -0.0169 -0.0213 -0.0727 -0.0509 0.0268 0.0202 0.1228 -0.0614 0.0028 -0.2373 0.1186 -0.0383 -0.2088 0.0736 0.1062 -0.0432 0.0197 -0.0035 -0.1340 -0.0258 -0.5595 0.0046 -0.0175 -0.0872 -0.1930 0.2642 -0.0697 0.0933 0.0899 -0.0901 0.0514 -0.1184 -0.0806 -0.0036 0.0551 -0.1512 -0.3144 0.0791 0.0281 -0.0380 -0.1759 -0.0889 -0.0217 -0.1630 0.1199 0.1335 0.0501 -0.0056 -0.0496 -0.2410 -0.1760 -0.1461 0.1274 -0.0090 -0.0659 0.0040 -0.0251 -0.1979 -0.0803 -0.0057 -0.0306 0.0850 0.1497 -0.0224 -0.0528 -0.1483 -0.0866 -0.0954 -0.3011 0.0900 0.0274 -0.1553 -0.0255 0.0925 0.0752 -0.0417 -0.0615 -0.1534 -0.1725 0.0274 -0.0475 0.0416 0.1518 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.1.bn.running_mean', -3.5261 1.9191 -0.1514 5.6740 2.1472 2.9415 3.8381 0.5452 -2.8366 -0.8421 -4.5869 -9.6256 8.2001 6.1331 6.1160 4.5427 6.6272 5.4206 -5.9134 9.2229 -0.2210 -8.7986 4.6308 10.5411 -0.4133 1.3050 1.7876 -0.1916 -13.8851 6.0357 -3.5755 -4.1219 -5.5356 1.2398 -11.9075 -5.1448 2.4730 6.0906 -0.5620 5.0333 17.5819 7.1774 -8.2541 2.5919 11.7343 3.0234 -8.7040 9.8797 0.1937 -7.9667 0.2270 1.2282 -1.4745 -1.1372 1.0151 -15.1402 11.5094 2.8092 10.8497 6.5561 -2.9229 5.3241 1.9052 6.8665 -2.0176 -1.5431 -2.9267 6.5498 -8.8167 -0.1278 18.3676 -3.3496 4.1914 7.4549 -2.3751 2.1590 6.0434 -3.1926 11.5383 -2.7574 7.1369 -6.9124 -4.5749 2.3652 4.7353 3.2585 9.8200 -0.7865 -9.5018 7.3156 1.1741 -2.2538 5.6080 -1.7893 -12.8355 -6.6571 3.1501 2.1819 8.3854 -9.3287 -0.4803 -12.0142 -0.7332 0.8964 7.8064 0.6350 -5.6905 5.0025 0.1746 -0.8536 5.9803 12.3864 4.7458 1.7846 3.8921 4.2181 -9.9728 0.8573 1.1001 6.4840 2.7009 -10.3976 -2.7385 -2.1108 12.0861 3.0430 0.1592 6.1069 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.conv1d_projections.1.bn.running_var', 15.6029 16.0301 18.0898 9.2120 35.9527 16.2714 38.2591 22.0204 21.8823 18.5069 18.2544 12.6123 11.2886 13.9873 15.9373 11.0099 10.1090 40.7009 22.7567 11.3390 19.0217 27.5536 27.9724 29.1658 9.1891 34.6404 13.5092 11.2521 57.5699 9.4996 21.5500 36.2159 17.4185 32.5253 16.5135 12.8911 19.9967 21.7646 19.1525 22.9861 16.1516 30.4740 44.5169 17.5279 15.5373 14.6567 31.9899 13.7391 12.5149 20.9873 9.4325 25.0610 11.6550 13.4542 13.1076 17.1637 56.5875 27.5515 19.5355 10.7034 34.9299 33.0139 20.3878 31.6955 14.9919 16.4654 14.5991 24.7710 14.9264 24.2895 18.3864 18.8397 23.3574 18.7483 13.5217 41.0459 24.1666 24.7859 23.9255 25.7402 14.9454 10.2282 30.3052 16.6936 32.2529 38.8537 20.7137 29.7605 19.3282 17.5068 18.4878 46.9227 14.5460 49.6237 9.6516 36.9525 16.7046 12.6520 20.6808 39.4360 23.3886 29.7674 16.2744 13.4464 11.3885 8.5819 19.9894 10.8315 13.5345 22.2764 16.8990 25.8441 21.5513 9.1293 24.4879 19.5241 32.1755 13.3793 18.0319 35.8084 18.4339 13.5934 22.9312 23.8098 21.8152 9.1292 21.1067 13.2162 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.pre_highway.weight', 7.6424e-03 2.5910e-02 5.8759e-02 ... 2.5778e-02 5.1379e-02 -4.2433e-03 -2.4070e-02 7.6139e-02 4.9189e-02 ... 6.4792e-02 4.8108e-02 -7.3651e-02 -3.4366e-02 -7.9627e-02 -4.4232e-02 ... 3.5146e-02 -8.0961e-02 -5.9398e-02 ... ⋱ ... 7.6387e-02 1.1982e-02 -7.0450e-04 ... 5.7644e-02 5.5235e-02 -7.8248e-02 6.9834e-02 -7.8794e-02 6.8049e-02 ... 4.9438e-02 2.3717e-02 5.5143e-03 -2.9588e-02 -8.4442e-02 2.6452e-02 ... -4.1797e-02 4.1798e-02 2.6897e-02 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.0.H.weight', -3.7753e-01 -1.1873e-01 -7.2885e-02 ... -1.3706e-01 -5.0812e-01 -3.1839e-01 -3.1247e-01 -1.9993e-01 -8.0211e-01 ... -3.2072e-01 2.9533e-01 -2.5461e-02 6.9581e-02 -6.6954e-02 -1.6043e-01 ... -1.3403e-01 -3.1564e-01 -3.1844e-01 ... ⋱ ... -3.6133e-01 1.4214e-02 1.2277e-01 ... -3.4546e-01 1.7992e-01 -1.3199e-01 -9.9197e-02 -3.4521e-02 -1.2004e-01 ... -3.2145e-01 -1.7860e-01 -1.6176e-01 1.3408e-01 3.0038e-03 -1.0454e-01 ... -1.3727e-04 3.0389e-02 -8.2818e-02 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.0.H.bias', -0.5865 -0.3727 -0.0313 0.0137 -0.1726 -0.1130 0.0186 -0.2501 0.0633 -0.1866 0.3170 0.0386 -0.3819 -0.1964 -0.3243 -0.3649 -0.1672 0.0802 -0.4135 -0.5153 -0.3261 -1.1598 -0.0941 -0.1548 -0.3674 -0.3337 -0.1484 -0.3088 0.2971 -0.0667 -0.2780 0.0620 -0.7120 -0.4120 0.1090 0.1015 -0.3369 0.2702 -0.2184 -0.2087 -0.7956 -0.1349 -0.2185 -0.4237 -0.2828 -0.3616 0.0301 0.2710 -0.3773 -0.4989 0.5445 0.0151 -0.3162 -0.1979 -0.4540 -0.3024 -0.2572 -0.6145 -0.2004 -0.2538 0.0036 -0.1976 -0.1802 -0.4072 -0.3396 0.2006 0.0554 0.3043 -0.2079 0.0379 0.0255 -0.1156 -0.4118 0.0619 -0.2979 -0.0777 -0.4252 0.2074 0.1137 0.2852 -0.1483 -0.0824 -0.1568 -0.2427 -0.2911 -0.3581 -0.0607 0.0008 -0.1334 -0.1024 0.4847 -0.5820 0.4444 -0.1540 -0.3775 0.0230 -0.2560 -0.0936 -0.3532 0.0069 0.1352 -0.0705 0.0704 -0.1387 0.2529 -0.2255 -0.0730 -0.1451 -0.3517 0.0273 -0.3147 -0.3470 -0.5478 -0.5797 -0.1703 -0.3998 -0.3479 -0.4414 -0.1925 -0.1970 -0.2915 0.5914 -0.0482 0.4137 -0.1621 0.1581 -0.6951 0.1022 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.0.T.weight', 4.2534e-01 -1.5347e-01 -2.6262e-01 ... 5.3239e-01 5.2091e-02 -9.6632e-02 2.0936e-01 3.7558e-01 2.5726e-01 ... 4.1763e-02 3.9232e-02 -1.2335e-01 -1.4236e-02 1.9510e-01 6.7196e-01 ... -1.5553e-01 -9.3686e-02 -1.0255e+00 ... ⋱ ... -4.6444e-02 1.9064e-01 -1.6661e-01 ... 1.2270e+00 -1.3526e-01 -1.0569e-01 -4.0731e-01 -2.1208e-02 -3.2148e-01 ... -7.6064e-02 5.6086e-01 2.5529e-01 5.8205e-03 5.0058e-01 1.7055e-02 ... -4.7348e-01 1.8220e-01 -5.3471e-02 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.0.T.bias', -1.6184 -1.0429 -1.3170 -1.3835 -0.4131 -0.7471 -1.3204 -1.1247 -1.5811 -1.2261 -1.1172 -0.8929 -1.2016 -1.7639 -1.4354 -1.2490 -0.9711 -1.1048 -1.1083 -1.2327 -1.0377 -1.7015 -0.7842 -1.0672 -0.6804 -1.0488 -1.0506 -1.0861 -0.8274 -1.4053 -0.8013 -0.8329 -1.5080 -1.0560 -1.0205 -0.9094 -1.3420 -1.3321 -0.9121 -1.4326 -1.2343 -0.8188 -1.0797 -1.0230 -0.8973 -0.5101 -0.9391 -1.6211 -1.0409 -1.3219 -1.6896 -0.4855 -1.0583 -0.9602 -1.4328 -0.9121 -1.3012 -1.2760 -1.0642 -1.4541 -0.5764 -1.4959 -1.1411 -0.8906 -0.9969 -0.8866 -1.4349 -0.9254 -0.8737 -1.0402 -0.8703 -1.2408 -0.9886 -1.3280 -1.1675 -1.4920 -1.1507 -1.2237 -1.3544 -0.9596 -1.1802 -0.9479 -0.1265 -0.5966 -1.0964 -0.8681 -1.1972 -0.7485 -1.0767 -0.9577 -1.4237 -1.4708 -1.5494 -0.9201 -1.5097 -0.9607 -0.8944 -1.1210 -1.0278 -0.7736 -0.8854 -1.1615 -1.6448 -0.9676 -1.5240 -0.7192 -1.2818 -0.3621 -1.5124 -1.5226 -1.3278 -1.3497 -1.2536 -1.1199 -1.0604 -1.5927 -0.8126 -1.0362 -0.4343 -0.8960 -1.2733 -1.5969 -1.3012 -1.1393 -0.6058 -1.0270 -1.4415 -1.2745 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.1.H.weight', -1.2903e-01 -3.1060e-01 1.0214e-01 ... -3.9553e-01 -1.0350e-01 -9.7856e-02 1.8118e-01 1.3375e-02 1.9977e-01 ... -1.5329e-02 -9.9447e-02 -3.4168e-01 3.8714e-02 1.3862e-01 -6.2023e-02 ... -6.8721e-01 5.3937e-02 1.0958e-01 ... ⋱ ... 9.2582e-02 7.8591e-02 -1.9832e-02 ... -1.0931e-01 2.1502e-01 -2.8412e-01 2.1670e-01 -2.8787e-01 1.7272e-02 ... -5.8934e-01 -4.1751e-01 1.1831e-01 -8.7897e-02 -7.7806e-02 -3.2964e-01 ... -4.8497e-01 8.8042e-01 5.9294e-02 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.1.H.bias', 0.0852 0.1422 -0.2038 -0.4496 -0.1156 -0.3202 -0.0841 0.3041 -0.4135 -0.3150 -0.2499 -0.1967 -0.1445 0.1640 -0.0770 -0.3320 -0.1990 -0.2207 -0.1029 -0.0388 -0.1529 -0.3993 -0.2705 -0.4385 -0.4369 0.4232 -0.0673 -0.1282 -0.0281 -0.3006 -0.3058 -0.1669 -0.3118 -0.2587 -0.4164 0.3357 0.0765 -0.2905 -0.0282 -0.3647 -0.1817 0.0875 -0.3000 -0.3787 -0.2694 -0.0468 -0.1556 -0.1751 -0.1883 -0.2814 0.3436 -0.2187 -0.1210 -0.3293 -0.1639 0.3319 0.5747 0.0551 -0.2188 0.1805 -0.0447 0.0290 -0.3679 -0.2118 0.0838 -0.5260 -0.3697 -0.1344 -0.2490 -0.3448 -0.0436 -0.2243 -0.1787 0.0374 -0.1492 -0.3518 -0.2739 -0.2630 -0.0347 -0.4719 0.0626 -0.5353 0.0813 -0.5074 -0.2277 -0.0963 -0.2280 -0.2200 -0.2598 -0.3971 0.0129 0.0856 -0.3171 0.0893 -0.3148 -0.1448 -0.3684 -0.1098 0.1464 -0.1974 -0.2048 -0.2799 -0.3617 -0.3129 0.1010 -0.2835 -0.1181 -0.2271 0.3809 -0.2949 -0.3094 0.1898 -0.0179 -0.3059 -0.2801 -0.2374 -0.3814 -0.1881 -0.3585 -0.2194 -0.0044 0.1414 -0.2586 -0.1702 0.1336 -0.0514 -0.2933 -0.1440 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.1.T.weight', -2.7291e-01 -8.4563e-02 -5.1859e-01 ... 6.6748e-01 -3.1786e-01 1.7333e-01 -4.4926e-03 1.5435e-01 -2.6212e-01 ... 2.8715e-01 -4.1305e-01 2.7199e-01 -1.1743e-01 1.0964e-01 2.5068e-01 ... -3.6285e-02 -4.6603e-01 -3.1373e-01 ... ⋱ ... 1.6104e-01 3.8883e-01 2.7417e-01 ... 5.4226e-01 -1.6939e-01 -5.0346e-02 -3.9585e-01 1.5602e-01 -2.7457e-01 ... -3.7775e-02 2.2263e-01 4.5654e-01 3.1321e-01 -8.3668e-02 3.3402e-01 ... -1.5699e-01 -7.2676e-02 -1.5020e-02 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.1.T.bias', -0.8735 -0.9374 -0.5376 -0.7254 -0.6993 -1.1141 -0.6093 -0.5540 -1.1648 -1.1545 -0.5369 -0.9158 -0.7628 -1.0001 -1.0033 -0.6922 -0.6518 -0.4973 -0.9699 -0.7759 -0.8246 -0.8996 -0.9112 -0.9865 -0.8448 -0.7220 -1.1545 -1.0397 -0.7810 -1.0055 -0.6162 -0.8090 -0.9596 -0.9937 -0.6643 -0.7168 -0.8435 -0.8227 -1.2901 -0.9706 -0.6679 -0.9203 -0.9916 -0.9663 -0.4326 -0.8739 -0.8837 -1.0407 -0.6802 -1.2371 -1.1714 -0.8032 -0.9634 -1.2142 -0.7706 -0.6415 -0.8907 -1.0116 -0.7812 -0.5034 -0.8496 -1.4659 -0.7487 -1.1192 -0.5994 -0.9254 -1.5166 -1.0100 -0.8003 -0.8005 -0.3146 -0.9065 -1.0724 -0.5451 -0.8384 -0.7926 -0.9792 -1.1625 -0.8456 -0.8491 -0.8859 -0.9054 -0.3732 -0.8253 -0.9892 -1.1470 -0.7574 -0.8059 -0.8198 -0.7491 -1.0176 -0.4271 -0.7732 -0.5335 -1.0020 -0.7807 -0.7100 -0.8340 -0.7395 -1.0114 -0.7979 -1.0000 -1.0414 -0.9079 -1.1180 -0.6561 -0.9785 -0.7766 -0.7542 -0.8809 -1.0164 -0.8534 -0.8284 -1.0077 -0.9135 -1.2533 -0.7508 -0.6234 -0.4095 -0.8056 -0.8089 -0.9309 -1.1503 -1.0148 -0.9564 -0.9559 -1.1823 -0.9343 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.2.H.weight', -4.1199e-01 -2.5735e-01 -1.1230e-01 ... -3.3251e-01 2.1625e-01 -1.0272e-01 -5.3729e-01 -1.6226e-01 -1.8906e-01 ... -2.6635e-01 -1.7354e-01 -1.5279e-01 -6.4851e-02 -3.5364e-01 -3.8214e-01 ... -2.6754e-01 -2.2258e-01 2.1006e-01 ... ⋱ ... -1.7147e-01 -5.9854e-03 -9.5970e-02 ... -6.7819e-02 6.9713e-02 -8.3691e-02 -3.1862e-01 -7.5826e-02 -1.8070e-01 ... -3.9192e-01 -2.1869e-01 -1.9810e-01 4.5282e-02 -3.1387e-02 -1.6171e-01 ... -1.7033e-01 -2.2421e-01 -1.2614e-01 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.2.H.bias', -0.2122 -0.2343 -0.1348 -0.1589 -0.1955 -0.2032 0.2329 -0.2732 -0.0481 -0.2075 0.2106 0.0758 -0.0937 0.1371 -0.3273 -0.3655 -0.2403 -0.2024 -0.2262 -0.0624 -0.2996 -0.0521 0.0110 -0.2283 -0.3445 -0.1740 -0.2238 -0.2046 -0.3556 -0.1143 0.2028 -0.2763 -0.3627 0.2728 -0.0679 0.0887 -0.3453 -0.3231 -0.0891 -0.1272 -0.0018 -0.1163 -0.2272 -0.2614 -0.2545 -0.1889 -0.1265 -0.0453 -0.3391 -0.0437 -0.2636 -0.3149 -0.2720 -0.3746 -0.2083 0.0788 -0.1900 0.0926 -0.2319 -0.2448 -0.2503 -0.2315 -0.1846 -0.0152 -0.1811 -0.2365 -0.0769 -0.0788 -0.2445 -0.2496 0.3307 -0.1891 -0.2120 -0.4236 -0.3208 -0.0614 -0.3653 -0.2695 0.0829 0.0220 -0.2381 -0.1541 -0.0260 -0.1162 -0.3177 -0.3546 -0.2281 -0.2564 -0.2692 -0.0759 -0.1668 -0.3211 -0.2489 -0.2288 -0.3650 -0.1224 -0.1687 -0.0150 0.0104 -0.3318 -0.2807 -0.0672 -0.2096 -0.3078 -0.1677 0.0028 -0.1035 -0.1555 -0.3375 -0.3359 -0.2737 -0.3322 -0.2961 -0.3377 -0.0618 -0.4349 0.2850 -0.1604 -0.2247 -0.2893 -0.1868 0.3563 -0.1133 -0.2874 -0.0059 0.0826 -0.2108 -0.2225 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.2.T.weight', 7.8885e-02 3.3253e-01 1.8680e-01 ... 4.2248e-01 2.0193e-01 2.0562e-01 1.3543e-01 4.2545e-01 3.1608e-01 ... -1.3264e-01 -4.2416e-02 2.7504e-01 -8.7588e-01 1.8678e-01 1.1842e-01 ... -5.6120e-02 1.2951e-01 7.0980e-02 ... ⋱ ... 1.6965e-01 -1.4437e-01 -3.9676e-01 ... 5.1236e-01 5.9884e-04 -1.4009e-01 -4.0851e-01 -1.3036e-01 -3.5849e-01 ... 1.6514e-01 4.4848e-01 1.5563e-02 -3.3969e-01 8.3092e-02 1.9582e-02 ... 2.3415e-01 -4.9319e-01 4.0139e-01 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.2.T.bias', -0.8153 -0.7770 -0.7813 -1.1026 -0.8793 -0.9383 -0.5527 -0.7065 -0.8250 -0.9231 -0.5422 -0.5655 -0.6027 -0.5251 -1.0486 -1.0117 -0.6038 -0.4492 -0.9319 -0.7762 -0.8418 -0.8602 -0.5735 -1.0869 -0.7593 -0.9178 -0.9908 -1.0369 -0.7088 -0.8628 -0.6217 -0.8813 -0.8341 -0.6698 -0.4780 -0.2960 -1.1446 -1.1255 -0.8254 -0.9693 -0.6734 -0.7371 -0.9512 -0.6976 -0.5678 -0.5780 -0.7421 -0.9606 -0.7212 -0.9026 -0.9789 -0.7526 -1.0406 -0.8483 -0.8528 -0.7178 -1.1469 -0.9338 -0.8029 -0.9971 -0.8680 -1.0543 -0.7668 -0.5666 -1.1054 -0.8139 -0.8125 -1.1192 -0.7838 -0.6822 -0.9070 -0.9213 -1.0835 -1.0484 -0.8505 -0.8808 -0.7637 -1.0705 -0.4922 -1.0723 -0.7494 -0.7859 -0.8495 -0.6520 -0.8290 -1.0147 -0.9081 -0.9225 -0.8036 -0.3723 -0.9247 -0.7306 -0.9188 -0.7129 -0.8161 -0.8964 -0.9712 -0.6160 -0.6626 -1.1156 -0.8525 -0.8618 -0.7801 -1.0254 -1.0769 -0.7159 -0.9362 -0.4980 -0.8991 -0.8185 -0.8928 -0.9891 -1.0699 -0.8665 -0.6343 -0.9490 -0.4492 -1.1470 -0.4669 -0.6436 -0.8893 -1.0752 -0.8446 -0.9765 -0.6132 -0.7831 -1.0923 -1.0290 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.3.H.weight', -2.0468e-01 -3.7653e-01 -2.5355e-01 ... -2.8372e-01 3.3695e-01 -2.2480e-01 -4.6384e-01 -2.0348e-01 1.8848e-02 ... -7.7341e-02 1.1872e-01 -3.9130e-01 -7.8048e-01 1.7562e-02 2.8630e-02 ... -2.9237e-01 5.8745e-02 -4.5702e-01 ... ⋱ ... 3.7976e-02 -9.6244e-02 -1.5971e-01 ... -2.0324e-01 2.6845e-01 -8.0729e-02 -1.0789e-01 3.6107e-02 1.4457e-01 ... -5.0796e-02 -5.0617e-01 -2.9675e-01 3.7937e-03 -3.9479e-01 1.3545e-01 ... -3.1718e-01 9.4908e-02 -2.5654e-01 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.3.H.bias', -0.2521 -0.1256 -0.1813 -0.1982 -0.2478 -0.1292 -0.2231 -0.2275 -0.2880 -0.2083 -0.1963 -0.2633 -0.0872 -0.2966 -0.1935 -0.2297 0.0641 -0.1298 -0.2047 -0.1377 -0.0130 -0.2552 -0.1561 -0.3377 -0.2934 -0.1902 -0.2115 -0.2594 0.0602 -0.2319 -0.0750 0.0117 -0.0910 -0.1090 -0.2593 -0.2097 -0.3011 -0.1155 -0.0650 -0.2091 -0.0570 -0.2633 -0.1900 -0.1681 -0.2742 -0.1280 -0.1124 -0.0569 -0.2899 0.0240 -0.3327 -0.1901 -0.1869 -0.1819 -0.2468 -0.2594 -0.1714 -0.1905 -0.2057 -0.2568 -0.2461 -0.2513 -0.1808 -0.1319 -0.3379 -0.1989 -0.1165 -0.2927 -0.1664 -0.2408 -0.1338 -0.0668 -0.1319 -0.1546 -0.1039 -0.2541 -0.1639 -0.2998 -0.0444 -0.1570 -0.2315 -0.2158 -0.1707 -0.2178 -0.1815 -0.1527 -0.2077 0.0212 -0.2341 -0.2250 -0.3521 -0.2077 0.0823 -0.1912 -0.2247 -0.2378 -0.1769 -0.1661 -0.2944 -0.0687 -0.2118 0.0014 -0.1662 -0.2974 -0.2595 -0.2134 -0.2725 -0.2799 -0.2954 -0.2916 -0.1599 -0.1841 -0.1736 -0.1969 -0.0531 -0.3190 -0.1531 -0.1976 0.0470 0.1481 -0.2712 -0.2474 -0.2162 -0.1807 -0.1803 -0.2616 -0.1937 -0.1448 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.highways.3.T.weight', 2.5584e-01 6.0501e-01 -1.2028e-02 ... 1.4831e-01 -6.3848e-01 5.3987e-01 -2.5646e-01 9.3154e-01 5.8705e-02 ... -9.9718e-02 6.1999e-01 -6.4101e-02 3.0758e-01 4.8709e-01 5.0105e-01 ... 2.6345e-01 -8.4133e-04 -2.3307e-02 ... ⋱ ... 2.2438e-01 6.7495e-02 1.4857e-01 ... 5.7131e-01 3.8120e-01 1.6802e-01 -1.2598e-01 1.9640e-01 8.7319e-02 ... 4.7567e-02 -1.6886e-01 2.6160e-01 1.5070e-01 -5.0804e-01 -6.2564e-02 ... 1.5024e-01 -3.4810e-01 6.8077e-01 [torch.FloatTensor of size 128x128]), ('module.encoder.cbhg.highways.3.T.bias', -0.8211 -0.9055 -0.8022 -0.8859 -0.8126 -0.8288 -0.2889 -0.6799 -1.0391 -0.8127 -0.3466 -0.7359 -0.6630 -0.8266 -1.1506 -0.8722 -0.7322 -0.6060 -0.4909 -0.5537 -1.0603 -0.7157 -0.6458 -0.5851 -1.0656 -0.7572 -1.0238 -0.9974 -0.8249 -0.7353 -0.5338 -0.8046 -0.8106 -0.5974 -0.5874 -0.5483 -0.8189 -0.8038 -1.0415 -0.8823 -0.6835 -0.7159 -0.9431 -0.7083 -0.5089 -0.6600 -0.8509 -0.9963 -0.8149 -0.8327 -0.9570 -0.8497 -1.0057 -0.8482 -0.9211 -0.7666 -0.7587 -0.7446 -0.7783 -0.7760 -0.6613 -0.9420 -0.8696 -0.6928 -0.7344 -0.6867 -0.9212 -0.9734 -0.9513 -0.7135 -0.7699 -0.6956 -0.6958 -1.1282 -0.7229 -0.7191 -0.6430 -0.6834 -0.6007 -0.9842 -0.6797 -0.8361 -0.7900 -0.7384 -0.8635 -1.0334 -0.8858 -0.9168 -0.6682 -0.7500 -0.7028 -0.8536 -0.6623 -0.8275 -0.7837 -0.6650 -0.8374 -0.8792 -0.6498 -1.0726 -0.6899 -0.6971 -0.7316 -0.9693 -1.1316 -0.7718 -0.9469 -0.6285 -0.3868 -0.7878 -0.7114 -0.7186 -0.8038 -0.7342 -0.6545 -0.9427 -0.5628 -0.8622 -0.6102 -0.4838 -0.6442 -0.6065 -0.8593 -0.9375 -0.5052 -0.6902 -0.7971 -0.9253 [torch.FloatTensor of size 128]), ('module.encoder.cbhg.gru.weight_ih_l0', 3.2007e-02 5.4455e-01 -7.1443e-01 ... -9.4162e-02 -4.3151e-01 -6.8749e-01 2.8130e-01 -5.0280e-01 -2.2537e-01 ... 1.4932e-01 7.0647e-01 -4.4751e-01 4.5330e-01 -1.2628e+00 2.0161e-01 ... 1.6025e-01 6.3556e-01 9.8590e-01 ... ⋱ ... 2.9572e-02 -1.3180e-01 4.3215e-01 ... 1.5172e-01 1.8325e-01 1.8813e-01 -1.6686e-01 -5.6037e-01 1.7777e-01 ... 1.3757e-01 -5.7515e-02 -2.3497e-02 1.6238e-01 1.8440e-01 3.2782e-01 ... -3.0081e-01 1.9484e-03 2.0204e-01 [torch.FloatTensor of size 384x128]), ('module.encoder.cbhg.gru.weight_hh_l0', 2.8526e-01 -3.1090e-01 3.6447e-01 ... -5.1902e-02 3.2500e-02 -1.9262e-01 1.1010e-01 -2.3967e-01 4.5641e-02 ... 2.8706e-01 5.7539e-01 1.9892e-01 2.4201e-01 4.6614e-01 -1.0062e+00 ... 2.0535e-01 -8.0792e-02 8.0792e-02 ... ⋱ ... 4.1073e-01 -3.2760e-01 2.7843e-01 ... -1.4044e+00 -5.8386e-01 2.3167e-01 -2.3053e-02 -4.4727e-01 1.9749e-01 ... 2.8223e-02 -1.8616e+00 4.7450e-02 5.1008e-01 1.4143e-02 1.1105e-01 ... -2.9560e-01 4.1204e-01 -1.6465e+00 [torch.FloatTensor of size 384x128]), ('module.encoder.cbhg.gru.bias_ih_l0', -0.4155 -0.3321 -0.3048 -0.6956 -0.3557 -0.3485 -0.3999 -0.6418 -0.3979 0.0681 -0.5136 -0.3784 -0.2780 -0.5601 0.2084 -0.2112 -0.2885 -0.1304 -0.2071 -0.2958 -0.3458 -0.3430 -0.3756 -0.2285 -0.2183 -0.4922 -0.0754 -0.4418 -0.4054 -0.3207 -0.3050 -0.2280 -0.1868 -0.4659 -0.3845 -0.1640 -0.3571 -0.2205 -0.3029 -0.4748 -0.3206 -0.3986 -0.3350 -0.2090 -0.5457 -0.1707 -0.1325 -0.2547 -0.4407 -0.4723 -0.2774 -0.5082 -0.3236 -0.3112 -0.4210 -0.3686 -0.1771 -0.3533 -0.1693 -0.3630 -0.5839 0.0258 -0.3645 -0.4769 -0.4490 -0.2534 -0.2482 -0.4709 -0.4346 -0.4537 -0.3565 -0.2979 -0.2655 -0.3364 -0.1936 -0.2150 -0.3746 0.0178 -0.3488 -0.3854 -0.4342 -0.3697 -0.1661 -0.2310 -0.2932 -0.4776 -0.4042 -0.3637 -0.5312 -0.2954 -0.2021 -0.4496 -0.1322 -0.4744 -0.1750 -0.1595 -0.1616 -0.4852 -0.3841 -0.0904 -0.2831 -0.3742 -0.2667 -0.2912 -0.2815 -0.4033 -0.2178 -0.3491 -0.3175 -0.4574 -0.4432 -0.3113 -0.3502 -0.3915 -0.2927 -0.5635 -0.0827 -0.3517 -0.2997 -0.2913 -0.3453 -0.1541 -0.2960 -0.3400 -0.3662 -0.3617 -0.3401 -0.2206 0.0005 0.2320 0.0617 -0.2193 0.1220 0.1716 0.0387 0.1160 0.4116 0.8825 0.2311 0.1303 0.6885 0.1943 1.0475 -0.2056 0.7786 0.2430 0.8656 -0.0933 0.5916 -0.1442 0.2344 0.2769 0.1473 0.3291 0.1934 0.4735 -0.0404 -0.0812 -0.4299 -0.0574 0.1115 0.2498 0.2578 0.0039 0.5349 0.2167 -0.1984 0.3020 0.4183 0.0827 0.5940 0.4513 -0.0776 0.0700 -0.3964 0.0625 0.0910 0.6217 0.4383 0.2796 0.1113 0.0774 0.2578 0.1069 -0.0121 -0.0714 -0.0098 0.1720 0.2910 0.6613 0.3495 0.2671 0.0405 -0.2306 0.0031 0.2956 0.0708 0.0342 0.1232 -0.1602 -0.0502 0.1984 0.1321 0.4394 -0.0117 0.9800 0.3483 0.0724 -0.0905 0.3448 0.1114 0.3323 -0.2112 0.6743 0.2827 0.1825 0.1283 0.0693 -0.0141 0.1136 0.0255 0.2815 0.0372 -0.1901 1.0761 0.4441 0.5777 0.6788 0.1622 -0.3291 -0.1040 0.2530 0.2226 0.0436 -0.0288 -0.0954 0.3503 0.1399 0.2990 0.2709 0.1577 0.5494 0.0870 -0.3541 0.5600 -0.0591 0.0816 0.6198 0.3193 -0.2191 0.1602 -0.0296 -0.1147 -0.0194 -0.0492 -0.2893 0.0315 -0.0166 -0.0184 0.0121 -0.0088 -0.0027 0.0265 0.0132 0.0018 -0.0272 -0.0002 -0.0125 0.0045 -0.0020 -0.0041 0.0166 -0.0004 -0.0344 0.0080 -0.0104 -0.0004 0.0003 -0.0094 -0.0328 0.0091 0.0158 0.0027 0.0115 0.0098 0.0391 -0.0165 0.0250 0.0223 -0.0112 -0.0005 -0.0163 0.0075 -0.0054 -0.0146 0.0169 -0.0056 -0.0101 -0.0117 0.0372 0.0263 -0.0326 0.0485 -0.0069 -0.0269 -0.0044 -0.0074 -0.0025 -0.0048 0.0194 0.0300 -0.0119 0.0037 -0.0352 -0.0390 0.0608 -0.0027 0.0059 -0.0266 -0.0211 -0.0342 0.0032 -0.0065 -0.0209 -0.0110 0.0007 -0.0052 0.0088 0.0066 -0.0215 -0.0257 -0.0106 0.0212 -0.0171 -0.0154 -0.0073 -0.0020 -0.0270 0.0151 -0.0326 0.0075 0.0117 0.0249 0.0320 0.0157 0.0160 0.0388 0.0345 0.0266 -0.0044 0.0264 -0.0309 -0.0014 -0.0206 0.0185 -0.0013 0.0004 0.0215 -0.0258 0.0112 -0.0389 -0.0145 0.0122 0.0118 0.0131 -0.0098 0.0018 -0.0337 0.0028 0.0133 0.0160 -0.0130 0.0274 0.0097 0.0143 -0.0441 -0.0135 -0.0019 0.0555 -0.0274 -0.0183 0.0123 -0.0122 -0.0438 [torch.FloatTensor of size 384]), ('module.encoder.cbhg.gru.bias_hh_l0', -0.3324 -0.2636 -0.3443 -0.7007 -0.3168 -0.2623 -0.4678 -0.5638 -0.4448 0.2232 -0.3805 -0.2953 -0.3160 -0.5969 0.2410 -0.2382 -0.3254 -0.1246 -0.1753 -0.3835 -0.4032 -0.4362 -0.4141 -0.2424 -0.3258 -0.3726 -0.1523 -0.3338 -0.4174 -0.2525 -0.3905 -0.2104 -0.2400 -0.3530 -0.3890 -0.2062 -0.3817 -0.2957 -0.4194 -0.5868 -0.3884 -0.3095 -0.4296 -0.1893 -0.5139 -0.2669 -0.1194 -0.3773 -0.4873 -0.4685 -0.1368 -0.4118 -0.3678 -0.2931 -0.4648 -0.3296 -0.2718 -0.3486 -0.0516 -0.2209 -0.6266 0.1345 -0.3854 -0.4379 -0.4165 -0.2786 -0.2046 -0.4067 -0.3728 -0.4412 -0.4051 -0.2663 -0.3059 -0.4357 -0.2192 -0.1052 -0.2918 -0.0670 -0.4026 -0.4681 -0.3601 -0.2093 -0.2464 -0.2066 -0.3683 -0.4958 -0.4797 -0.3821 -0.5263 -0.2097 -0.1749 -0.3779 -0.2407 -0.4482 -0.1921 -0.2409 -0.2942 -0.5252 -0.4661 -0.1872 -0.2110 -0.3630 -0.3668 -0.2823 -0.2967 -0.3671 -0.2426 -0.2324 -0.2456 -0.3978 -0.3852 -0.3906 -0.3389 -0.4650 -0.3562 -0.6517 -0.0692 -0.3172 -0.3680 -0.3382 -0.2662 -0.2351 -0.3653 -0.2909 -0.2948 -0.2182 -0.3879 -0.2211 -0.0638 0.2472 0.0489 -0.0662 0.0359 0.1067 0.0882 0.0249 0.4015 0.8504 0.2799 0.0461 0.6607 0.2023 1.0062 -0.2213 0.8321 0.3461 0.9642 -0.1767 0.4647 -0.1128 0.2089 0.3121 0.0332 0.2687 0.2742 0.4417 0.0058 -0.1918 -0.3485 -0.0085 0.0888 0.1906 0.2361 -0.1330 0.6340 0.1948 -0.2205 0.3279 0.3521 0.1421 0.5140 0.4459 -0.0698 0.0853 -0.3250 0.1003 0.0428 0.4710 0.3304 0.2012 0.0356 0.0750 0.2146 0.0363 0.0198 -0.0927 -0.0490 0.2173 0.4293 0.7105 0.4576 0.2871 0.0533 -0.2388 0.0702 0.3513 0.0898 -0.0335 0.0856 -0.1336 -0.0065 0.1672 0.1079 0.3741 0.0139 1.0074 0.3532 0.0353 -0.0585 0.3025 0.1956 0.1709 -0.2493 0.6448 0.2334 0.1509 0.1396 0.1179 0.0650 0.1516 0.0633 0.3230 -0.0687 -0.2229 0.9816 0.3944 0.4913 0.7222 0.2434 -0.3003 -0.0925 0.2632 0.3519 0.1211 -0.0183 -0.0195 0.4433 0.1283 0.3443 0.3609 0.0334 0.5134 -0.0298 -0.2188 0.5276 0.0114 0.0276 0.5599 0.3557 -0.2125 0.1562 -0.0786 -0.0632 -0.0968 -0.0286 -0.4307 -0.0858 0.0490 0.0511 -0.0643 0.0182 0.0145 -0.0713 -0.0547 0.0017 0.0403 0.0069 0.0232 -0.0281 0.0315 0.0454 -0.0308 0.0274 0.0587 -0.0261 0.0316 0.0094 -0.0150 0.0226 0.0829 -0.0262 -0.0449 -0.0152 -0.0277 -0.0229 -0.1034 0.0613 -0.0601 -0.0383 0.0514 -0.0016 0.0361 -0.0196 0.0211 0.0380 -0.0685 0.0311 0.0322 0.0471 -0.0937 -0.0973 0.0930 -0.1017 0.0296 0.0931 0.0100 0.0059 0.0206 0.0215 -0.0494 -0.0895 0.0149 -0.0236 0.0978 0.0867 -0.1546 0.0023 -0.0112 0.0661 0.0772 0.0852 -0.0093 0.0098 0.0716 0.0540 -0.0106 0.0131 -0.0172 -0.0238 0.0653 0.0811 0.0017 -0.0489 0.0220 0.0361 0.0340 0.0156 0.0678 -0.0318 0.0601 -0.0316 -0.0399 -0.0763 -0.1014 -0.0504 -0.0512 -0.1088 -0.0949 -0.0490 0.0054 -0.0582 0.0834 -0.0200 0.0834 -0.0350 0.0003 -0.0014 -0.0655 0.0722 -0.0105 0.1002 0.0321 -0.0231 -0.0414 -0.0302 0.0381 -0.0080 0.0859 0.0093 -0.0398 -0.0581 0.0648 -0.0445 -0.0229 -0.0461 0.1085 0.0413 -0.0018 -0.1469 0.0784 0.0475 -0.0404 0.0248 0.0912 [torch.FloatTensor of size 384]), ('module.encoder.cbhg.gru.weight_ih_l0_reverse', 3.9114e-01 2.9058e-01 2.1761e-01 ... 6.4113e-02 2.4866e-02 2.0332e-01 -1.0174e-01 -3.9031e-01 -3.4186e-01 ... -1.7112e-01 -3.6747e-01 1.9842e-01 -6.4938e-01 7.2838e-01 1.8530e-01 ... -3.3848e-01 -5.1010e-01 2.4907e-02 ... ⋱ ... -2.7396e-02 -5.2042e-02 1.7369e-01 ... -1.7471e-01 1.4356e-01 -1.5332e-01 -1.5901e-01 -1.2751e-01 -1.3193e-01 ... -1.4338e-01 1.6812e-01 1.0909e-02 -1.3883e-01 -9.4185e-02 -7.3227e-02 ... -1.6537e-02 -8.9718e-02 1.9451e-01 [torch.FloatTensor of size 384x128]), ('module.encoder.cbhg.gru.weight_hh_l0_reverse', -9.0639e-02 -1.0697e-01 5.7864e-01 ... 7.3469e-02 -1.2463e-02 -2.9977e-01 7.5671e-02 -3.0402e-01 -3.2234e-02 ... -4.2422e-01 -4.2969e-01 4.5207e-01 8.1829e-03 -3.2530e-01 -1.0873e-01 ... -3.3056e-01 -2.7219e-01 4.2230e-01 ... ⋱ ... -1.5967e-01 -2.3172e-01 -3.3635e-01 ... -9.2274e-01 -2.5547e-01 -2.0461e-01 -4.4421e-02 4.6869e-02 -1.8232e-01 ... -1.9337e-01 -1.5851e+00 2.4948e-01 4.0944e-02 -3.1717e-01 1.9521e-01 ... 2.7803e-01 1.8609e-01 -1.1735e+00 [torch.FloatTensor of size 384x128]), ('module.encoder.cbhg.gru.bias_ih_l0_reverse', -3.9950e-01 -3.2197e-01 -1.3405e-01 -3.2604e-01 -5.6099e-01 -2.8250e-01 -1.9845e-01 -3.1501e-01 -1.3463e-01 -5.6306e-02 -3.3890e-01 -4.3012e-01 -2.8359e-01 -2.2534e-01 -2.9135e-01 -4.1709e-01 -4.8963e-01 -2.6772e-01 -3.2265e-01 -5.5409e-01 -3.2530e-01 -2.0639e-01 -4.3631e-01 -4.2849e-01 -2.5627e-01 -2.3475e-01 -1.8201e-01 -2.5451e-01 -5.4825e-01 -2.9104e-01 -5.2274e-01 -4.0654e-01 -3.4696e-01 -2.7961e-01 -2.9184e-01 -4.3392e-01 -1.2083e-01 -2.7390e-01 -2.5458e-01 -3.6923e-01 -3.2601e-01 -4.3903e-01 -4.0077e-01 -4.9911e-01 -7.1123e-01 -2.9732e-01 -2.0078e-01 -4.2338e-01 -1.3699e-01 -1.6686e-01 -2.2572e-01 -1.8250e-01 -2.4997e-01 -2.6874e-01 -1.9583e-01 -2.4155e-01 -2.2515e-01 -2.9014e-01 -2.9633e-01 -5.5905e-01 -3.7270e-01 -2.5255e-01 -2.9211e-01 -2.9391e-01 -2.3087e-01 -4.1957e-01 -4.2438e-01 -3.2394e-01 -3.4436e-01 -3.5398e-01 -3.5293e-01 -2.9384e-01 -3.7704e-01 -5.6181e-01 -3.2076e-01 -3.3572e-01 -4.2303e-01 -3.5085e-01 -1.1059e-01 -3.9819e-01 -2.7747e-01 -2.2572e-01 -3.1374e-01 -6.2688e-01 -3.4165e-01 -2.8978e-01 -1.8716e-01 -7.6714e-01 -4.0007e-02 -4.8165e-01 -1.8806e-01 -4.2463e-01 -3.6502e-01 -3.3924e-01 -2.8496e-01 -3.3000e-01 -3.8442e-01 -3.8950e-01 -3.5248e-01 -4.5389e-01 -1.9361e-01 -3.3409e-01 -6.1777e-01 -1.5660e-01 -4.0810e-01 -2.7564e-01 -3.7781e-01 -3.9068e-01 -3.2880e-01 -3.6881e-01 -5.4130e-01 -3.6642e-01 -1.5730e-01 -2.2717e-01 -3.7211e-01 -1.1571e-01 -4.5240e-02 -2.6568e-01 -2.9602e-01 -4.1572e-01 -2.0671e-01 -5.1318e-01 -3.3514e-01 -5.7200e-01 -3.1647e-01 -3.4052e-01 -4.6007e-01 -4.2392e-01 -9.5999e-02 1.9549e-01 6.2338e-02 3.6352e-01 6.6420e-02 2.2219e-03 2.7094e-01 2.1709e-02 -1.6166e-01 1.3865e+00 3.2875e-02 3.7118e-01 5.3729e-01 -1.1936e-01 3.8217e-03 1.2877e-01 1.6440e-01 -9.4198e-02 1.4893e-01 4.6403e-02 5.4027e-01 4.7590e-01 6.0755e-02 -2.3104e-01 4.8152e-01 2.9563e-01 -6.8155e-02 -1.4239e-01 1.0185e-01 7.0267e-02 -1.1993e-01 6.3600e-01 1.6313e-01 -9.9922e-02 1.7793e-01 1.9935e-01 1.2773e-01 2.9412e-01 1.2534e-01 4.4731e-01 7.1293e-02 1.1292e-01 -8.9293e-02 3.4067e-01 1.7642e-01 -1.6595e-01 5.9712e-01 -1.0002e-01 1.9485e-01 1.9248e-01 1.0541e+00 7.2503e-01 9.6133e-01 1.9803e-01 6.9215e-03 5.8363e-03 1.5684e-01 6.0974e-01 3.1397e-01 -7.1457e-02 1.7027e-01 7.2058e-01 1.8164e-01 3.4612e-01 4.1798e-01 1.1724e-01 6.3091e-02 4.3745e-02 1.1416e-01 -6.2468e-02 -1.9612e-01 8.5380e-01 1.2593e-01 3.4382e-01 5.9211e-01 -2.1804e-01 1.3949e-01 4.4558e-02 2.8600e-01 5.1862e-02 2.0543e-01 -2.3415e-01 -1.3609e-01 2.2412e-01 -1.0314e-01 -2.8997e-01 4.8292e-02 -4.4030e-03 1.4104e+00 3.2007e-01 1.2052e-01 9.1789e-02 2.0554e-01 7.6838e-02 3.4112e-01 1.3785e-01 -2.2267e-02 -1.2027e-01 4.9677e-01 1.8642e-02 4.0587e-01 -1.0680e-01 3.3480e-02 1.0233e+00 1.4441e-01 -1.1909e-01 -3.0416e-01 -2.4717e-01 -5.9753e-02 -7.1970e-02 -2.8127e-02 -6.7975e-02 1.8998e-01 -5.9482e-02 -2.2827e-01 1.3982e+00 1.0563e+00 -9.7214e-03 -6.2421e-02 4.8036e-02 -3.0814e-02 -2.7699e-01 3.1028e-01 5.1062e-01 -1.1891e-01 -9.7181e-02 6.2741e-02 -1.1922e-01 -1.6630e-02 -1.6036e-02 2.1753e-02 -1.7682e-02 5.4466e-03 -1.1839e-02 2.4157e-02 -2.7820e-03 -1.2229e-02 -2.3654e-02 -4.3722e-03 -1.3692e-02 1.3510e-02 -1.5224e-03 3.9796e-02 -1.5609e-02 -7.2699e-03 1.8364e-02 5.2180e-02 -7.0756e-03 -2.6243e-02 -1.8955e-02 -7.5550e-03 -3.2503e-02 -3.0747e-02 3.7648e-03 -6.3179e-03 -9.0970e-03 5.7825e-03 -1.6808e-02 1.1765e-02 -1.9976e-02 -8.9799e-03 1.6962e-03 3.2469e-02 4.8755e-02 -2.5837e-02 2.7148e-02 -9.4387e-03 4.4295e-03 -4.7579e-02 1.4396e-02 -1.5253e-02 1.3900e-02 9.5453e-05 -3.3910e-02 -2.8879e-03 -1.6263e-02 -4.2506e-03 -8.4950e-03 9.2556e-03 2.9998e-02 4.6513e-03 -5.2122e-02 -4.4481e-02 1.5333e-02 2.2972e-02 -1.2462e-02 -3.4175e-03 4.6433e-03 -1.8690e-03 1.7714e-02 -1.9650e-03 -2.6035e-02 1.6315e-02 -1.1207e-02 1.3503e-02 -1.1432e-02 7.8864e-03 -1.4371e-02 5.3492e-02 -2.3405e-02 -4.6617e-03 1.4812e-02 9.7913e-03 2.2707e-02 -3.8388e-02 -1.3629e-03 4.2772e-02 2.2527e-02 3.8240e-02 9.8582e-03 1.6111e-02 -2.1650e-03 -1.9264e-02 2.3081e-02 -3.5951e-02 1.0299e-02 -1.5311e-02 2.5592e-03 -3.0966e-02 -1.5051e-02 -1.7087e-02 1.3155e-02 -1.0265e-02 1.2869e-02 -8.2125e-03 -2.9881e-02 -1.3611e-02 -1.8783e-03 -1.5054e-02 -2.1433e-02 -6.1975e-03 3.2115e-02 -6.9664e-03 4.0004e-03 -8.5284e-03 -1.5563e-02 -1.0801e-02 8.9668e-03 1.5517e-03 1.4980e-03 -1.9663e-02 9.3043e-03 -1.8283e-02 -1.7655e-02 -1.7385e-02 -1.5408e-02 -3.1511e-02 -1.6622e-02 4.2455e-02 6.7878e-02 3.2178e-02 8.9143e-03 9.9809e-03 -2.0120e-02 -3.9052e-03 -6.8284e-03 [torch.FloatTensor of size 384]), ('module.encoder.cbhg.gru.bias_hh_l0_reverse', -0.4021 -0.3769 -0.2140 -0.4048 -0.4409 -0.2962 -0.3530 -0.3547 -0.2006 -0.0939 -0.2148 -0.3912 -0.2509 -0.3723 -0.2090 -0.3679 -0.6192 -0.2744 -0.2821 -0.6300 -0.2462 -0.3459 -0.4790 -0.3206 -0.3240 -0.2225 -0.2448 -0.3012 -0.5411 -0.1877 -0.6550 -0.3096 -0.3862 -0.3820 -0.2281 -0.4488 -0.2567 -0.2342 -0.3323 -0.3611 -0.1804 -0.5137 -0.3178 -0.4012 -0.6883 -0.3295 -0.2554 -0.4798 -0.2267 -0.2884 -0.1101 -0.3275 -0.2861 -0.3019 -0.2099 -0.2571 -0.3100 -0.3889 -0.2770 -0.5019 -0.3791 -0.2051 -0.3247 -0.3686 -0.2871 -0.3502 -0.4598 -0.2021 -0.3041 -0.4478 -0.3348 -0.3264 -0.3935 -0.5275 -0.3446 -0.3875 -0.3014 -0.3336 -0.1255 -0.3976 -0.2708 -0.1827 -0.2783 -0.6199 -0.4063 -0.3431 -0.2897 -0.6350 -0.1377 -0.3942 -0.0478 -0.3938 -0.4251 -0.2682 -0.2178 -0.2168 -0.3732 -0.4737 -0.3745 -0.3642 -0.1977 -0.2043 -0.5977 -0.1634 -0.5290 -0.3496 -0.3478 -0.4321 -0.2788 -0.2496 -0.4516 -0.3969 -0.1678 -0.2187 -0.3149 -0.0863 -0.0248 -0.2742 -0.2219 -0.4662 -0.1856 -0.3597 -0.3117 -0.4578 -0.2414 -0.3564 -0.4715 -0.4142 -0.1192 0.2282 0.0998 0.3681 -0.0937 -0.0333 0.2070 0.0235 -0.1256 1.3631 0.0545 0.3700 0.6947 -0.1998 0.0415 0.1162 0.1360 -0.0844 0.2483 0.0018 0.5331 0.5480 0.0129 -0.0840 0.5069 0.3349 -0.0571 -0.0355 0.0312 0.1560 -0.1759 0.5901 0.1501 0.0128 0.2224 0.0474 0.0988 0.2010 0.1655 0.4528 0.0771 0.1318 -0.0493 0.2723 0.0769 -0.2386 0.6518 -0.0598 0.3625 0.1767 0.9518 0.6138 1.0032 0.1404 0.0313 0.0960 0.2134 0.4506 0.2143 -0.0808 0.2908 0.6168 0.1607 0.3082 0.2554 0.1755 0.0763 0.1613 0.2722 -0.1114 -0.2297 0.8030 0.0292 0.3153 0.5183 -0.1982 0.1279 0.0764 0.1635 -0.0785 0.1573 -0.2740 -0.2416 0.2376 -0.1834 -0.3681 0.0012 0.0962 1.2691 0.2833 0.2742 0.0638 0.2562 0.1418 0.3912 0.1948 -0.0925 -0.0010 0.6521 0.0980 0.3893 -0.1378 -0.0100 1.0812 0.1997 -0.1443 -0.3224 -0.3206 -0.0624 -0.1788 0.1056 0.0937 0.2355 -0.0579 -0.2225 1.5624 0.9694 -0.1254 -0.0760 0.1729 0.0904 -0.2842 0.1807 0.4844 -0.1173 -0.1267 0.1129 -0.2652 0.0412 0.0245 -0.0584 0.0137 -0.0256 0.0435 -0.0638 -0.0002 0.0219 0.0493 -0.0016 0.0335 -0.0372 -0.0163 -0.1106 0.0539 0.0132 -0.0349 -0.1317 0.0202 0.0458 0.0272 -0.0015 0.1109 0.0943 -0.0222 0.0050 0.0363 -0.0365 0.0504 -0.0554 0.0515 0.0207 0.0082 -0.0852 -0.1534 0.0594 -0.0727 0.0335 -0.0088 0.1092 -0.0471 0.0300 -0.0515 -0.0115 0.0812 0.0010 0.0595 0.0341 0.0243 -0.0234 -0.0822 -0.0227 0.1416 0.1211 -0.0128 -0.0706 0.0334 0.0051 -0.0237 -0.0058 -0.0463 0.0036 0.0705 -0.0559 0.0278 -0.0305 0.0348 -0.0263 0.0674 -0.1551 0.0591 0.0149 -0.0758 -0.0294 -0.0772 0.1146 0.0091 -0.1018 -0.0859 -0.1003 -0.0120 -0.0486 0.0101 0.0587 -0.0660 0.0979 -0.0533 0.0480 -0.0106 0.0422 0.0438 0.0387 -0.0297 0.0189 -0.0110 0.0288 0.0946 0.0560 0.0084 0.0286 0.0514 0.0132 -0.0559 0.0266 -0.0069 0.0305 0.0457 0.0135 -0.0472 0.0024 -0.0213 0.0408 -0.0099 0.0599 0.0540 0.0526 0.0459 0.0755 0.0624 -0.1091 -0.2088 -0.0742 -0.0100 -0.0230 0.0552 0.0098 0.0287 [torch.FloatTensor of size 384]), ('module.decoder.input_layer.weight', 5.8912e-02 -1.6154e-01 3.1346e-01 ... -8.0450e-02 -1.7943e-01 -4.4032e-01 2.2493e-02 -5.4650e-01 4.6403e-01 ... 1.4908e-02 -2.3215e-01 2.7284e-02 9.7050e-02 4.0065e-01 -2.3527e-02 ... -3.0692e-01 7.2489e-02 2.4635e-01 ... ⋱ ... -3.5142e-03 6.1435e-02 -6.3044e-03 ... 4.7249e-02 1.6870e-02 -2.7558e-01 -9.6333e-01 2.4835e-01 -1.5142e-01 ... 4.1154e-01 -8.1726e-02 -4.9164e-02 3.7292e-01 2.6657e-01 -3.5470e-01 ... 6.5989e-02 7.4646e-02 2.0836e-01 [torch.FloatTensor of size 256x256]), ('module.decoder.prenet.layers.0.weight', 1.7273e-02 3.9850e-02 3.8051e-02 ... 2.3819e-01 2.4384e-01 3.2467e-01 2.7207e-02 -2.0696e-02 4.7891e-02 ... -5.0082e-02 2.9328e-02 -4.2173e-02 -4.6377e-02 -5.8427e-02 5.9433e-02 ... 9.2617e-03 2.8656e-02 2.6222e-01 ... ⋱ ... -6.4205e-02 -1.9410e-02 2.5475e-02 ... 9.8911e-02 5.9283e-02 1.9599e-01 -1.0034e-02 -1.4641e-02 -5.9885e-02 ... 3.0947e-01 1.8075e-01 3.6630e-01 -4.7642e-02 4.9048e-01 -8.2729e-02 ... -7.3440e-01 -2.3007e-01 1.2729e-01 [torch.FloatTensor of size 256x400]), ('module.decoder.prenet.layers.0.bias', 1.2574 -0.0297 0.8272 0.1237 -0.6448 -0.3559 -0.6406 -1.0272 -0.9062 -0.3825 -1.1827 -0.4707 -0.3198 -0.5765 -0.2746 0.2238 -0.5342 -0.2980 -0.3369 -0.3599 -0.6481 -0.2758 -0.5455 -0.1720 -0.9845 -0.0390 -0.5530 -0.1325 -1.4598 1.6848 -0.2840 -0.4153 -0.9280 0.3325 -0.0751 -1.5679 -0.6500 -0.4325 -0.8774 -0.0603 -0.0973 0.9125 -0.6883 -0.1912 -0.4294 -1.2876 1.7439 -0.0499 -0.7083 -1.3910 1.2655 -1.7360 -0.0509 -0.4689 -0.4943 -0.9908 -0.2800 -0.4613 -0.0472 -0.7536 0.2111 -0.5066 -0.8105 -0.6232 -0.2159 -0.4624 1.1024 0.3514 0.8865 -0.0117 -0.3849 -0.5286 -0.7260 0.2438 -0.2764 -1.1041 -0.7391 -0.4548 0.9607 0.2563 1.0791 -1.1938 -1.4059 -0.0218 -0.5807 -0.0211 -1.1994 -0.3751 -0.4187 -0.6417 -0.5826 -1.8560 -0.3148 -0.2558 0.1604 -0.1702 -0.0172 0.3932 -0.7518 0.4139 -0.0991 0.2794 -0.3773 -0.0568 -0.4718 1.8045 -0.3920 0.2627 0.2605 0.4709 -0.2575 0.1251 -0.0557 1.7317 -0.4669 -0.3428 -0.0377 0.1490 -0.3555 -1.0507 -0.4364 -0.7193 0.3214 -0.3130 -1.2584 0.2316 -0.2101 1.6875 -0.5976 -0.1807 -1.1705 0.9523 -0.4907 -0.6100 0.7783 -1.3257 0.2497 -0.5033 -0.3916 -1.2852 1.0912 -1.1056 -0.0825 0.3236 -0.6353 -0.6368 -0.6667 -0.1264 -0.8609 0.4090 -0.2521 -0.1683 -0.0963 0.2585 -1.3112 -0.2364 -0.4060 -0.3156 -0.2951 -0.0159 -0.5303 -0.5743 -0.3195 -1.0213 1.1266 -0.3234 1.2449 -0.7429 -0.6232 -0.5871 -0.9705 -0.6896 2.2192 -1.2712 0.9313 -0.3202 1.3223 -0.2517 -0.0472 -0.5210 -0.2545 1.4358 -0.2809 -0.6639 -0.7321 1.2307 -1.3464 -0.2210 1.6999 -0.4472 -1.1767 2.2247 -0.5072 1.6496 -0.4081 -0.0079 1.4584 -1.0027 1.2522 1.2128 -0.9032 1.2793 -0.6196 -0.3898 -0.9331 0.3800 1.7871 0.1263 0.9310 -0.4832 1.0980 0.4972 -0.0218 0.1663 -0.1926 -0.4412 -0.4890 0.3012 -0.4918 -0.5552 -0.7084 1.7566 1.2132 -0.6182 -0.6995 -0.5213 0.2395 -0.4751 1.3885 0.0127 -0.7025 -1.3511 -0.0942 -0.0687 0.1353 -0.5863 -0.8277 -0.2539 -0.4305 -1.5516 -0.3325 -0.3001 -0.0283 0.9374 -0.8915 0.3668 -0.2711 -1.1822 -0.4988 -1.7255 0.2521 0.3801 -0.9418 -1.5530 -0.3145 0.2827 [torch.FloatTensor of size 256]), ('module.decoder.prenet.layers.1.weight', -1.3509e-03 -4.9811e-02 -3.0601e+00 ... 1.2224e-02 6.3256e-04 -5.1213e-01 -7.1447e-04 -2.4918e-02 -2.3079e-04 ... -4.0298e-01 8.0245e-02 5.5424e-02 1.8214e-03 -3.6424e-02 -2.4487e-03 ... 9.2734e-03 3.9090e-05 -1.6429e-01 ... ⋱ ... 4.1529e-03 4.6767e-02 3.3784e-03 ... -4.9681e-04 -4.5640e-03 -1.7410e+00 2.1006e-03 6.3024e-04 1.9130e-03 ... -2.2718e-02 -8.2343e-03 -9.1887e-02 7.8437e-04 -2.0041e-02 -3.0045e-01 ... 4.5585e-03 -2.2033e-03 -2.6096e-01 [torch.FloatTensor of size 128x256]), ('module.decoder.prenet.layers.1.bias', 0.0052 -0.0042 -0.0056 -0.1226 -0.3664 -0.0042 -0.9969 0.3418 0.2695 0.3532 -0.0236 -0.7367 -0.0029 0.2559 0.0523 -1.1205 -0.0039 -0.1988 0.2844 0.0206 -0.8661 0.5405 -0.1517 -0.0011 -0.1248 0.0041 -0.0378 -0.0906 0.0570 0.4385 0.5438 0.0329 0.0029 -0.9980 0.3965 -0.0089 -1.1389 -0.1909 0.2859 -0.0189 -0.0014 0.1089 0.0194 -0.0660 -0.6653 -0.1948 -0.0060 0.3791 -0.0064 0.3199 0.0274 0.0885 -0.0027 0.2967 0.4002 0.0870 -0.0071 0.0584 -0.0073 -0.8491 0.1680 -0.0017 0.3167 -0.0034 0.0167 0.1565 0.0116 0.3706 -0.0044 -0.0025 0.2208 -0.0007 0.1015 0.1852 -0.0199 -0.0067 0.2664 0.0052 0.0027 0.0724 -0.6335 0.3221 -0.0028 -0.0143 -0.0027 -0.1618 0.3082 -0.5741 0.1174 0.0087 0.0087 -0.0076 0.0071 -1.0007 -0.4847 -0.0075 -0.0015 0.0655 0.5898 0.1552 0.0606 -0.0090 -0.0814 -0.0840 0.0140 -0.0111 -0.2604 -0.0040 0.0634 0.0555 0.4157 -0.0055 0.2500 0.3200 -0.1563 -0.0049 -0.7962 0.0093 -0.6326 -0.0270 0.1141 -0.3724 -0.0036 0.4631 -0.0074 0.0300 -0.5286 0.2050 [torch.FloatTensor of size 128]), ('module.decoder.attention_rnn.rnn_cell.weight_ih', 4.7004e-03 -3.1616e-03 5.6298e-03 ... 4.1007e-01 -1.1769e-01 1.5694e-02 -2.1963e-02 -4.6315e-02 -3.6587e-02 ... 2.2953e-02 4.3843e-01 -2.3121e-02 1.5446e-02 -1.2748e-02 1.3458e-02 ... 3.1746e-01 -1.2326e-01 -3.6464e-01 ... ⋱ ... 1.1824e-02 1.0912e-01 3.9496e-02 ... -3.3261e-01 -8.9657e-02 -2.0552e-01 -4.6938e-02 4.5614e-02 -6.5803e-02 ... -1.6535e-01 1.5010e-01 1.0953e-01 9.1592e-04 1.6509e-03 1.6584e-03 ... 6.4068e-04 -2.5964e-02 -5.1105e-02 [torch.FloatTensor of size 768x384]), ('module.decoder.attention_rnn.rnn_cell.weight_hh', 2.3133e-01 -1.3324e-01 6.8962e-02 ... -1.3204e-01 -1.4500e-01 -8.0639e-02 3.8879e-01 -4.8046e-01 5.5766e-01 ... 8.2341e-01 -7.1176e-02 -1.7622e-01 -1.3903e-01 -2.3238e-01 1.5178e+00 ... 5.0531e-01 -2.3371e-01 1.5687e-01 ... ⋱ ... 4.3659e-02 -1.8181e-02 1.4424e-01 ... -2.3447e+00 1.1663e-01 2.8783e-02 1.3827e-01 -1.5128e-01 -2.8599e-02 ... 1.6510e-01 -7.6042e-01 6.6707e-02 1.8982e-01 6.8345e-04 1.3821e-01 ... -2.7056e-01 4.6980e-03 -1.2982e+00 [torch.FloatTensor of size 768x256]), ('module.decoder.attention_rnn.rnn_cell.bias_ih', -0.1681 -0.4455 -0.0855 -0.2734 -0.2902 -0.3903 -0.0983 -0.3286 0.0890 0.1034 -0.3072 -0.1984 -0.3123 -0.3316 -0.1818 -0.2477 -0.3372 -0.2289 -0.2351 -0.1868 -0.0510 -0.1700 -0.6182 -0.4167 -0.3103 -0.3025 -0.1819 -0.1224 -0.1600 -0.1490 -0.3483 -0.3005 0.0135 0.2279 -0.3633 -0.5285 -0.0272 -0.0846 -0.3191 -0.2163 -0.3472 0.4668 -0.2450 -0.1129 -0.2643 -0.1567 -0.2560 -0.4324 -0.0101 -0.0095 -0.4006 -0.2120 -0.2007 0.0248 -0.3071 -0.2836 -0.1074 -0.2105 -0.2390 0.0074 -0.1894 -0.2280 -0.0391 -0.3245 -0.1079 -0.2870 -0.2642 -0.1295 -0.1607 -0.6550 0.2058 -0.1507 -0.3115 -0.3720 -0.5016 -0.0754 -0.1310 -0.0842 -0.4348 -0.1354 0.0201 -0.4004 -0.3730 -0.0806 -0.1812 -0.3134 -0.4739 -0.1196 -0.1110 0.0135 -0.2281 -0.2914 -0.1191 -0.1852 -0.1134 -0.1330 -0.0775 -0.1139 -0.0373 -0.1966 -0.1528 -0.3453 -0.3157 -0.2631 -0.2700 -0.1754 0.0399 -0.3159 0.3600 -0.1543 -0.0227 -0.4256 -0.0859 -0.4042 -0.2984 -0.1030 -0.5381 -0.3036 0.0867 -0.1804 -0.0901 -0.0799 0.2442 -0.2991 -0.0975 -0.2165 -0.1760 -0.2790 -0.2457 -0.2866 -0.1029 -0.2726 -0.1853 -0.3342 -0.4926 -0.3865 -0.0976 -0.1514 -0.3091 -0.3345 -0.2481 -0.2871 -0.1821 -0.0286 0.0421 -0.3300 -0.5272 0.0578 -0.4468 -0.2035 -0.2977 -0.3573 -0.2553 -0.0288 -0.2515 -0.3434 -0.1521 -0.0886 -0.0664 -0.3043 -0.3734 -0.3612 -0.3121 -0.2435 -0.3681 0.3880 -0.1687 -0.1656 -0.2758 -0.1889 -0.3179 -0.6930 -0.2761 -0.1971 -0.2255 0.0814 -0.0797 -0.1322 -0.2608 -0.0973 0.1345 -0.2937 -0.2879 -0.2330 -0.3231 -0.3696 -0.2892 0.1844 -0.3503 -0.4406 -0.0191 0.1077 -0.0890 -0.4048 -0.2865 -0.3503 0.0461 -0.1139 -0.2748 -0.0668 -0.5985 -0.4329 -0.3664 -0.0504 -0.0366 -0.2466 -0.4081 -0.3905 -0.4826 -0.1981 -0.1180 -0.0637 -0.3156 -0.2758 -0.1164 -0.4320 -0.1839 -0.3343 -0.1842 -0.2677 -0.1974 -0.3704 0.1662 -0.0225 -0.0275 -0.2811 -0.0940 -0.0287 -0.2577 -0.4922 -0.3372 -0.2350 -0.2281 0.0020 -0.1796 -0.1772 -0.2580 -0.2861 -0.2207 -0.2044 -0.0210 -0.4599 -0.1910 -0.3374 -0.4828 -0.3753 -0.2594 -0.1496 0.1280 -0.2443 -0.1205 -0.1102 -0.2538 -0.2303 -0.2475 -0.0475 -0.3693 0.5033 -0.2319 0.4061 -0.2932 0.2920 0.3194 0.2956 0.4079 -0.1020 -0.3090 0.3526 -0.1023 0.6887 0.4414 0.0618 0.5093 -0.0824 0.2660 -0.1215 0.5445 0.5103 -0.5923 0.5078 0.8807 0.3953 -0.3786 -0.0840 0.2337 -0.3545 -0.2247 0.7402 0.0166 -0.5177 0.5897 1.0498 -0.3881 -0.0463 0.0124 0.2867 0.6141 -0.3570 -0.8429 0.1788 -0.3630 -0.8819 0.4702 0.4737 -1.2320 -0.9242 0.3525 0.5829 -0.5321 0.6006 0.8337 0.3379 -0.1771 -0.8779 0.6235 -0.1372 -0.4700 0.6897 0.4867 0.3006 -0.2144 0.2536 -0.1740 -0.4235 -0.5595 0.9610 0.0160 0.6872 -0.4486 -0.3360 0.3887 -0.1723 0.3747 -0.2266 0.3598 -0.5144 -0.0569 0.7886 0.6107 -0.3781 0.0526 -0.1562 0.2887 -0.4112 0.0853 0.1482 0.6354 -0.4740 0.0292 1.0279 0.4255 0.1468 -0.7064 -0.0603 -0.0846 -0.2341 0.2208 0.8614 0.1438 -0.0266 0.5498 0.5653 -0.1237 0.4341 -0.6534 0.0016 0.1957 0.4631 0.0945 0.5586 0.2923 0.2120 0.6231 0.3097 -0.0693 0.1286 -0.3333 0.3914 -0.2655 -0.7806 0.0748 0.4576 0.4348 0.9007 -0.3443 0.8456 0.4984 0.3020 -0.0093 0.4828 0.4315 -0.1974 0.2839 0.2093 0.4478 -0.2094 0.5609 0.6765 -0.2177 0.6834 0.0592 0.4823 0.7880 -0.0486 0.0854 -0.1783 0.3725 -0.0166 0.7774 0.2578 0.2012 -0.6923 0.4831 0.2770 0.2333 0.9920 0.9016 1.0211 0.6877 0.8542 0.4827 -0.3787 0.7731 -0.7951 0.2204 0.3461 0.4762 0.8330 0.6178 0.1453 0.2667 -0.1538 -0.5029 0.2894 0.7792 -0.2938 0.0511 -0.2496 -0.2892 -0.3114 0.5537 -0.3806 -0.3456 -0.0560 0.2599 0.6866 0.2795 -0.3941 -0.2718 0.5049 0.6830 0.6329 0.5453 -0.3399 0.3233 0.3098 0.9976 0.4178 0.1519 0.1064 -0.5418 -0.2228 0.6781 0.3510 0.9180 0.0556 -0.1268 0.1080 -0.0807 0.1642 0.2019 -0.2052 0.0701 0.5781 0.3520 0.2864 -0.0198 0.4802 0.0686 -0.1297 0.3799 -0.5801 0.2267 -0.3331 0.1789 0.6574 -0.2033 0.4127 0.3014 -0.4878 -0.1851 0.2485 0.0927 0.2120 0.9425 0.1457 0.2978 0.4554 0.4970 -0.5606 -0.2625 0.5333 -0.3710 0.2715 -0.2672 0.5287 0.1724 -0.1804 0.4135 0.2100 0.5956 0.1735 -0.1697 0.0212 0.0500 0.0162 0.0217 -0.0038 -0.0292 -0.0375 -0.0807 0.0075 -0.0146 0.0072 -0.0216 -0.0081 -0.0122 -0.0221 0.0021 -0.1028 0.0248 0.0013 -0.0282 0.0260 -0.0007 0.0086 -0.0044 -0.0003 -0.0160 -0.0177 0.0968 0.0884 -0.0166 0.0074 -0.0274 0.0712 0.0031 0.0033 -0.0614 -0.0058 -0.0979 -0.0347 0.0047 -0.5094 -0.0331 0.0248 0.0739 -0.1455 -0.0031 -0.0054 -0.0301 0.0418 -0.0298 -0.0051 -0.0114 0.0393 0.0115 -0.0170 0.0274 -0.0185 0.0036 0.0992 -0.0060 -0.0107 -0.0020 -0.0102 -0.0238 0.0093 -0.0593 0.0540 0.0948 0.0009 0.0047 -0.0669 -0.0652 -0.0174 -0.0050 -0.0460 -0.0368 0.0073 -0.0226 0.0407 -0.1166 -0.0127 0.0295 -0.0485 -0.0491 0.0840 -0.0081 -0.0437 0.0251 -0.0226 0.0027 -0.0135 0.0099 0.0272 0.0331 -0.0159 0.0967 0.0151 -0.2298 0.0420 0.0340 0.0214 -0.0144 -0.0021 0.0025 -0.0134 0.0341 -0.0051 0.5071 0.0183 0.0527 -0.0089 0.0089 0.0229 -0.0395 -0.0221 -0.0034 0.0094 0.3358 0.0210 0.0231 -0.0355 0.0187 0.1024 0.1728 -0.0074 -0.0199 0.0116 0.0285 0.0012 -0.0164 0.0300 0.0188 0.0261 0.0009 0.0303 0.0122 -0.0124 -0.0008 -0.0001 -0.0164 -0.0088 -0.1008 -0.0192 0.0444 0.0021 -0.0120 0.1039 0.0013 0.0005 -0.0113 -0.0187 0.0402 0.0104 0.0056 -0.0675 0.0169 -0.0246 -0.0080 -0.0207 -0.0091 -0.0100 0.0288 -0.0229 0.0028 -0.5419 0.0176 -0.0364 -0.0238 -0.0585 0.0434 -0.0093 0.0032 -0.0320 0.0736 0.0288 0.0025 0.0717 0.0175 0.1066 -0.3397 -0.0192 -0.0216 -0.0105 0.0057 -0.1191 0.0371 0.1501 0.0231 -0.0081 0.0215 0.2007 0.0494 0.0154 -0.0065 0.0078 -0.0508 -0.0921 -0.0307 0.0148 0.0052 -0.0226 -0.0015 -0.0533 0.0225 0.0718 -0.0091 -0.0112 0.0106 0.0108 -0.0658 -0.0417 0.0511 0.0098 -0.0226 0.0397 -0.0010 0.0005 -0.0055 0.0009 -0.0248 0.0215 0.0667 -0.0166 0.0769 -0.0188 -0.0282 -0.2376 0.0055 0.0075 -0.0412 0.0070 0.0015 0.0111 0.0065 -0.0147 0.0011 -0.0076 0.0141 -0.0171 -0.0178 -0.0050 -0.0329 0.0181 0.0225 -0.0003 -0.0145 0.0534 -0.1561 0.0157 0.0602 -0.0238 0.0468 0.0414 0.0061 0.2552 [torch.FloatTensor of size 768]), ('module.decoder.attention_rnn.rnn_cell.bias_hh', -0.1619 -0.3476 -0.1480 -0.3070 -0.3330 -0.4126 -0.1426 -0.3965 0.0937 0.0854 -0.2954 -0.2285 -0.3341 -0.3151 -0.1267 -0.2881 -0.3631 -0.1389 -0.2565 -0.1751 -0.0839 -0.1953 -0.5660 -0.3454 -0.2550 -0.2794 -0.2598 -0.1146 -0.2088 -0.1589 -0.2954 -0.2815 0.0091 0.2411 -0.2810 -0.5252 -0.0954 -0.1224 -0.2838 -0.1160 -0.2590 0.4528 -0.2510 -0.1378 -0.2298 -0.1317 -0.2359 -0.3929 -0.0677 0.0196 -0.3766 -0.2093 -0.1193 0.0242 -0.3060 -0.3011 -0.1356 -0.3085 -0.2883 0.0095 -0.0880 -0.2309 -0.0859 -0.3217 -0.0338 -0.3068 -0.2076 -0.0829 -0.1129 -0.6464 0.1541 -0.1448 -0.3018 -0.3953 -0.4760 -0.1088 -0.0596 -0.0990 -0.3875 -0.0677 0.0721 -0.3655 -0.3776 -0.0781 -0.1582 -0.3194 -0.4237 -0.1050 -0.0755 0.0387 -0.2771 -0.3062 -0.1700 -0.1959 -0.1171 -0.1218 -0.0978 -0.0586 -0.1273 -0.2727 -0.2560 -0.3704 -0.3867 -0.3156 -0.3319 -0.1371 0.0549 -0.3129 0.4112 -0.1779 -0.0990 -0.4332 -0.0537 -0.3977 -0.2975 -0.1883 -0.5482 -0.1984 -0.0025 -0.1368 -0.1487 -0.0403 0.3522 -0.2982 -0.0614 -0.1865 -0.2529 -0.2171 -0.1985 -0.2645 -0.0844 -0.3852 -0.3014 -0.2764 -0.4070 -0.3464 -0.1338 -0.2450 -0.3135 -0.3405 -0.3546 -0.2439 -0.2328 -0.0099 -0.0225 -0.3018 -0.4255 0.0968 -0.3711 -0.2472 -0.2916 -0.3173 -0.3281 -0.0802 -0.2683 -0.3201 -0.1384 -0.1097 -0.0033 -0.2716 -0.4538 -0.3782 -0.3252 -0.1798 -0.2970 0.3557 -0.0889 -0.1667 -0.3569 -0.1980 -0.2802 -0.6866 -0.2729 -0.1824 -0.2946 0.0190 -0.1397 -0.1238 -0.2137 -0.0725 0.1644 -0.2956 -0.2130 -0.2431 -0.4066 -0.3374 -0.2426 0.0936 -0.2441 -0.4642 -0.0439 0.1466 -0.1685 -0.3575 -0.3338 -0.2467 -0.0382 -0.1084 -0.2676 -0.0279 -0.5669 -0.3248 -0.2988 -0.1384 -0.0428 -0.1908 -0.4249 -0.3513 -0.4417 -0.2214 -0.1841 -0.1598 -0.2495 -0.3004 -0.0978 -0.4285 -0.2434 -0.3418 -0.2656 -0.3367 -0.2058 -0.3585 0.2135 -0.0383 -0.0590 -0.3584 -0.1003 -0.0823 -0.2290 -0.4151 -0.3287 -0.2609 -0.2074 -0.0302 -0.2000 -0.1967 -0.2884 -0.2419 -0.2180 -0.1499 0.0607 -0.4445 -0.2275 -0.3392 -0.5443 -0.4030 -0.1525 -0.1266 0.1708 -0.2400 -0.1670 -0.2054 -0.2090 -0.2149 -0.2566 -0.1421 -0.3399 0.5824 -0.2527 0.3676 -0.3132 0.3163 0.2471 0.2204 0.4109 -0.1702 -0.3129 0.3991 -0.1923 0.6788 0.3854 0.0071 0.5050 -0.1384 0.2599 -0.1039 0.5401 0.4116 -0.5869 0.5164 0.8553 0.3429 -0.3181 -0.1754 0.3036 -0.3375 -0.1901 0.7914 -0.0785 -0.6208 0.5135 1.0519 -0.3579 0.0026 -0.0774 0.3235 0.6447 -0.2960 -0.7971 0.2223 -0.4455 -0.7894 0.4417 0.4320 -1.2065 -0.8539 0.2963 0.5805 -0.6462 0.6600 0.8335 0.3425 -0.1960 -0.8870 0.6678 -0.1122 -0.4294 0.6735 0.4285 0.3091 -0.1661 0.2526 -0.1500 -0.3594 -0.5651 0.9179 0.0467 0.6760 -0.3944 -0.3406 0.4742 -0.1682 0.4339 -0.3198 0.3731 -0.5405 -0.0980 0.8088 0.6149 -0.4575 0.0069 -0.0509 0.2913 -0.4123 0.1005 0.1729 0.6298 -0.4936 0.0730 1.0227 0.4433 0.0877 -0.6209 -0.0473 -0.0910 -0.2771 0.2363 0.7982 0.2361 -0.0301 0.5295 0.5625 -0.0675 0.4057 -0.6459 -0.0210 0.1949 0.3733 0.1092 0.5491 0.3457 0.1826 0.6330 0.2792 -0.1493 0.1782 -0.3537 0.3738 -0.3377 -0.7687 0.0234 0.4615 0.3424 0.8100 -0.2883 0.7503 0.4715 0.2516 0.0903 0.3937 0.4879 -0.1865 0.1914 0.1662 0.5099 -0.2456 0.5576 0.6465 -0.1639 0.7299 -0.0028 0.5129 0.8677 -0.0218 -0.0252 -0.0817 0.3497 -0.0177 0.8177 0.2856 0.1286 -0.6258 0.4768 0.2549 0.2557 0.9397 0.9453 0.9781 0.6264 0.7413 0.4938 -0.3236 0.7286 -0.7392 0.2140 0.2990 0.5049 0.8075 0.6013 0.1991 0.3121 -0.2264 -0.5586 0.1848 0.7634 -0.2268 0.0361 -0.2531 -0.3944 -0.2888 0.5843 -0.3288 -0.2433 -0.0277 0.2993 0.6837 0.3319 -0.3886 -0.3487 0.4654 0.6222 0.6338 0.4947 -0.3158 0.4046 0.2970 1.0081 0.4049 0.1201 0.0893 -0.5924 -0.2084 0.6545 0.3832 0.9156 0.1390 -0.1476 0.0552 -0.0764 0.1637 0.1733 -0.2394 0.1264 0.5378 0.3810 0.2349 0.0309 0.4756 0.0135 -0.1033 0.3523 -0.6723 0.2053 -0.3769 0.0946 0.6838 -0.2735 0.3993 0.2562 -0.4144 -0.2369 0.2371 0.0963 0.2089 0.9221 0.2022 0.2522 0.4877 0.4430 -0.4568 -0.2242 0.5223 -0.3433 0.2435 -0.3096 0.4233 0.1803 -0.2181 0.3297 0.1887 0.4948 0.0645 0.1853 0.0461 -0.5405 0.0505 -0.2320 0.0252 -0.0698 -0.1343 -0.1055 -0.1967 0.0221 -0.0856 0.4344 0.0555 0.0140 -0.0541 -0.0627 0.3555 -0.0124 0.0921 -0.0967 0.4111 0.0825 0.1014 0.0064 0.0683 -0.3561 0.0492 0.0461 -0.2335 -0.1464 -0.2263 -0.1506 0.3772 0.1911 0.0898 -0.1831 0.2928 0.0019 -0.0671 0.0978 -0.4679 0.2038 0.1759 -0.1874 -0.4478 -0.2019 0.0195 -0.6356 0.5889 0.0839 0.0249 -0.0093 0.1125 -0.1698 -0.1128 0.0459 -0.4563 0.0387 0.0413 0.1062 -0.0449 0.3089 0.1521 -0.1559 -0.1409 -0.0080 -0.1416 -0.0880 0.0510 0.4468 -0.0293 -0.1358 -0.0869 -0.0722 -0.1814 -0.0355 -0.1569 -0.0244 -0.1034 -0.1868 -0.1137 0.0116 0.2406 -0.1332 0.1920 0.1413 0.0873 -0.1358 -0.3472 -0.1419 0.2209 -0.1519 -0.0581 -0.1016 -0.2099 0.2941 0.0614 -0.3624 -0.1279 0.0294 -0.0761 0.0743 -0.2552 0.0505 0.0670 0.2179 -0.0527 1.1843 -0.1803 0.0802 0.0763 0.0695 0.2161 -0.0324 0.1221 -0.1342 -0.0348 0.2924 -0.0477 -0.5574 -0.2621 0.5131 0.0358 0.2213 0.1481 0.0540 -0.1209 0.3502 -0.0047 0.3568 0.2608 0.3326 -0.0042 0.0541 -0.1348 0.1228 -0.1018 0.1038 -0.0196 0.0890 0.2493 0.3795 0.0309 0.0020 0.0691 0.0798 0.3880 -0.1273 0.1769 0.2971 0.1089 0.0605 0.0870 -0.1426 -0.1277 -0.2745 -0.3569 -0.3517 -0.0392 0.0041 -0.0905 0.1713 0.0787 -0.0161 -0.8084 0.0701 -0.1106 -0.1163 -0.1231 0.2874 -0.1083 -0.0241 0.1892 -0.0507 -0.1197 -0.0090 0.2675 0.1027 0.0621 -0.2644 0.1653 0.3257 -0.1145 0.2373 -0.1805 0.2422 0.2630 0.0240 0.0365 -0.0354 0.5086 -0.2078 0.1933 -0.0428 -0.0236 0.0174 0.0381 -0.1284 0.3418 -0.0086 0.0068 -0.0063 -0.1209 -0.2977 0.0815 0.0506 -0.2095 0.0531 -0.0646 -0.0838 -0.2099 0.0826 -0.0646 0.2612 -0.0063 -0.0467 -0.0608 -0.0934 -0.1225 -0.0386 0.0333 0.5756 -0.1831 0.2527 -0.0004 -0.1814 -0.1829 0.0491 -0.0370 -0.0921 0.0687 -0.0148 0.3860 0.0291 0.1489 0.0311 -0.1610 -0.0939 0.0767 0.0855 0.0008 -0.2122 0.2774 -0.1368 0.0284 -0.1148 -0.1983 -0.0610 -0.0545 0.1757 -0.0837 0.0626 -0.0300 0.0745 -0.4166 [torch.FloatTensor of size 768]), ('module.decoder.attention_rnn.alignment_model.query_layer.weight', 5.8217e-01 -3.4247e-02 -4.2030e-01 ... -4.0968e-01 8.5390e-02 4.6055e-01 -6.2852e-02 7.3241e-01 1.9948e-01 ... 7.4486e-01 9.9272e-02 1.8832e-01 2.8498e-03 6.8163e-02 -1.8771e-01 ... -2.8722e-02 1.2512e-01 -2.2392e-02 ... ⋱ ... 4.7293e-02 2.6991e-01 -3.1210e-01 ... 6.0747e-01 -1.4412e-02 -2.3233e-01 4.3386e-03 3.1905e-01 -2.3568e-01 ... 6.9467e-01 7.6569e-02 5.6162e-01 -7.3181e-02 -2.0433e-01 -2.2061e-01 ... 2.2420e-01 1.8482e-01 -1.5150e-01 [torch.FloatTensor of size 256x256]), ('module.decoder.attention_rnn.alignment_model.v.weight', Columns 0 to 9 0.5073 -0.8066 -0.5430 -0.0781 0.5228 -0.6178 -0.7605 0.5892 0.3176 0.4537 Columns 10 to 19 -0.4171 0.6084 0.1839 -0.7084 -0.4441 -1.6197 -0.4959 -0.9911 -0.6512 -0.2274 Columns 20 to 29 -0.5381 0.6234 -0.4004 -0.8469 0.4973 -0.8723 -0.4026 0.2807 0.5562 -0.2542 Columns 30 to 39 -0.5353 -0.1086 -0.3278 0.7822 0.8560 1.0251 0.3340 0.2907 0.8487 -0.9684 Columns 40 to 49 0.2930 -0.5106 0.7091 0.6632 -0.7062 -0.5953 0.6418 -0.7575 0.2727 -0.9261 Columns 50 to 59 0.6242 -0.7467 1.1074 -1.0174 -0.2931 0.8765 -1.4872 -0.5117 1.3068 -0.8304 Columns 60 to 69 0.2666 -0.8220 -0.6618 0.2560 -0.3534 -0.1411 -1.1381 -0.4390 0.9555 -0.3471 Columns 70 to 79 -0.8656 -0.4469 -0.8662 -0.3345 0.7019 0.6659 0.5447 -1.0600 0.8054 0.5610 Columns 80 to 89 0.6442 -0.7685 -0.8629 -0.7881 0.7093 0.9787 0.3471 -0.5890 -0.5512 -0.4742 Columns 90 to 99 -0.4012 -0.4171 -0.4594 -0.5549 -0.5748 -0.7700 -0.7150 0.6140 0.5824 -0.1414 Columns 100 to 109 0.3770 0.5924 -0.4207 -0.7606 0.4449 -0.1035 0.6338 0.8180 1.0246 -0.5367 Columns 110 to 119 0.4984 0.5632 0.5072 0.4643 -0.4524 -0.7255 0.5640 0.6078 1.0864 0.2769 Columns 120 to 129 -0.6761 -0.3424 -0.7378 0.4411 -0.3803 0.4045 -0.7586 0.7523 0.2877 -0.5737 Columns 130 to 139 -0.6083 -0.6420 0.8977 0.9262 0.5735 -0.8141 0.6196 0.7017 -0.6651 0.9567 Columns 140 to 149 0.7958 -0.6955 0.2351 -0.7377 -0.4900 -0.0508 0.5433 -0.7096 -1.1429 -0.3475 Columns 150 to 159 -0.7877 0.9206 -0.5850 -1.1290 0.7658 0.5059 0.9300 0.9337 0.7968 0.5796 Columns 160 to 169 0.7807 0.4674 -0.8088 -0.9657 -0.5101 0.7808 -0.3687 0.4910 -0.4080 1.1659 Columns 170 to 179 0.7607 0.1435 0.9547 0.3607 -0.5578 -0.7379 1.2265 -0.4966 -0.2176 -0.6519 Columns 180 to 189 -0.6896 -0.3904 -0.8627 0.3932 0.7155 0.4569 0.5685 0.6334 0.8212 -0.7214 Columns 190 to 199 -0.7570 0.6596 0.4377 0.7303 -0.5479 0.5378 1.0405 -0.5907 -0.2744 -0.7873 Columns 200 to 209 0.3606 -0.3971 0.0997 -0.6636 -0.4120 -0.5314 0.2740 0.6491 0.8219 -0.6500 Columns 210 to 219 0.3358 1.0261 -0.5197 -1.4257 0.7639 0.5901 1.0980 0.3868 0.3822 0.4242 Columns 220 to 229 0.9219 -0.8746 -0.8677 -0.9909 0.4973 -0.8149 -0.5387 0.6924 -1.3391 0.4169 Columns 230 to 239 0.5728 0.6056 -1.0567 -0.5872 0.7191 -0.3696 0.2235 -0.4116 -0.5580 0.5378 Columns 240 to 249 -0.4537 0.4198 -0.6692 -0.8861 -0.2353 -0.9916 0.5921 -0.6078 -0.9091 -0.6674 Columns 250 to 255 -0.5588 0.5099 0.8359 -0.4494 -0.7441 0.5094 [torch.FloatTensor of size 1x256]), ('module.decoder.project_to_decoder_in.weight', 1.7090e-02 -1.5314e-01 2.3427e-02 ... -4.0826e-02 7.2217e-02 -7.9281e-02 6.5305e-02 -1.3720e-01 6.3315e-02 ... -3.4179e-02 6.6730e-03 -1.4187e-01 1.3014e-01 1.4892e-02 -6.4547e-02 ... 9.2366e-02 1.0338e-01 1.1845e-01 ... ⋱ ... 6.2698e-02 -4.4816e-02 -2.8500e-02 ... -2.0856e-01 5.4064e-02 -7.1827e-02 5.5420e-03 -5.5788e-04 4.9956e-02 ... -8.5185e-02 -3.2172e-02 -1.4255e-01 4.1809e-02 -1.2650e-01 5.5656e-02 ... -4.2012e-02 -1.4137e-02 -1.6233e-01 [torch.FloatTensor of size 256x512]), ('module.decoder.project_to_decoder_in.bias', -0.0729 -0.2827 0.1118 -0.0552 0.0032 0.0943 -0.1231 0.1936 0.0312 0.0930 -0.1576 -0.0244 -0.2076 -0.0441 -0.0181 0.1134 0.1269 -0.0643 0.0213 -0.2247 -0.0852 -0.0004 -0.0464 0.1204 -0.0111 -0.0043 -0.0793 -0.1642 0.0791 -0.1492 0.0745 -0.0026 0.0297 -0.0307 -0.0568 0.0283 0.1270 -0.1008 -0.0651 0.0315 0.1378 0.0780 0.1301 0.0409 -0.1453 0.0380 -0.2262 -0.0416 0.0032 -0.0030 -0.0308 -0.0902 -0.1086 -0.0271 0.0075 0.1064 -0.1719 -0.1063 -0.1929 -0.0272 0.0355 0.1189 0.0705 -0.1847 -0.1368 -0.1176 -0.1104 0.1135 0.1158 -0.0149 -0.0117 0.1930 -0.0138 -0.0000 -0.0603 -0.0073 0.0229 -0.0834 -0.1326 -0.0476 0.1620 0.1176 0.1045 -0.1281 -0.1108 0.1548 0.0974 0.0707 0.1988 -0.0117 0.2109 -0.0471 -0.0105 -0.0242 0.0535 0.2667 -0.2243 -0.2015 0.2367 0.1542 0.0132 0.0792 -0.0275 -0.0020 0.1622 -0.0105 0.0358 0.0155 0.0508 -0.2329 -0.1213 -0.0849 0.1247 -0.0858 0.0492 0.0653 -0.1860 -0.1709 -0.0788 0.0936 0.1256 -0.1903 0.1031 0.1291 0.0779 -0.1129 -0.1542 -0.2169 -0.0414 -0.0035 0.1739 -0.2442 0.0305 0.0882 -0.0153 -0.1542 -0.0818 -0.0500 0.0210 -0.0720 0.0030 0.0696 0.0871 -0.0157 -0.0520 0.0367 -0.1358 -0.0309 0.1577 -0.1377 0.0137 -0.0637 0.0874 -0.1855 0.0585 0.1164 0.0031 -0.0132 0.0757 -0.1253 0.2182 -0.0690 0.1712 -0.1668 0.1482 -0.0694 0.0394 0.1385 -0.0414 0.0532 -0.0451 0.0992 0.0341 -0.1527 0.0802 0.2008 -0.0263 0.0494 -0.0201 0.0747 0.1764 -0.2041 0.1243 -0.0636 0.0933 0.1667 0.1320 -0.1841 0.0046 0.0358 0.0354 0.0346 0.1220 0.1459 -0.0471 -0.0443 0.1796 0.0054 0.1263 -0.1085 0.2157 0.1334 0.0768 0.0626 -0.1337 0.2519 -0.0244 0.2387 -0.0890 0.1807 -0.0319 -0.1225 0.0283 -0.0626 -0.0355 0.1421 -0.0180 0.0384 0.0579 -0.1816 -0.0709 0.0547 -0.0697 -0.1428 0.0438 -0.1040 0.0245 -0.0847 0.0092 -0.1438 0.1096 0.1755 0.1201 -0.0789 0.0149 -0.1176 0.1574 0.0123 -0.0054 0.0103 -0.0059 -0.1272 0.0023 -0.0200 0.0168 0.0094 0.0279 -0.0089 -0.0046 0.1179 0.0226 -0.0539 0.0648 0.0334 0.0096 -0.0831 [torch.FloatTensor of size 256]), ('module.decoder.decoder_rnns.0.weight_ih', 3.6559e-01 1.3628e-01 -2.1633e-01 ... 2.7516e-01 -7.1986e-02 4.4005e-02 3.6500e-02 -1.9909e-01 1.4216e-01 ... -4.3430e-01 -8.3087e-02 2.8016e-02 2.8174e-01 -5.7776e-02 6.6599e-02 ... -2.8246e-01 3.5993e-02 -2.9273e-01 ... ⋱ ... 3.3959e-02 -1.1470e-01 -1.0531e-01 ... -8.3257e-01 -1.3246e-01 -4.4173e-02 -1.0553e-01 1.2328e-01 1.7012e-01 ... 7.6643e-02 -1.1219e+00 -1.8551e-01 3.1992e-02 3.2217e-02 -4.4496e-02 ... 8.5311e-02 5.9092e-02 -3.5393e-01 [torch.FloatTensor of size 768x256]), ('module.decoder.decoder_rnns.0.weight_hh', -1.2001e-01 1.3434e-01 2.1710e-01 ... 2.0419e-01 1.1873e-01 -4.3647e-02 -1.8234e-01 1.6046e-01 4.4518e-02 ... -4.1734e-01 -1.2173e-01 5.9824e-02 2.9158e-01 -2.7247e-02 1.5671e-02 ... -2.5096e-01 1.0294e-01 -3.6500e-01 ... ⋱ ... -1.2932e-01 1.9027e-01 8.0898e-02 ... -4.4098e-01 -6.3198e-02 1.6503e-01 7.6857e-02 1.4576e-01 -5.6706e-02 ... -5.8169e-02 -1.4532e+00 -8.6998e-02 1.1599e-02 -5.3002e-02 1.2298e-01 ... -2.4869e-01 -1.9728e-01 -7.1696e-01 [torch.FloatTensor of size 768x256]), ('module.decoder.decoder_rnns.0.bias_ih', 0.0116 0.1026 -0.3138 -0.0812 -0.1848 -0.0472 -0.0573 -0.1596 0.1095 -0.1096 0.0399 -0.0648 -0.0006 -0.0839 -0.1251 0.0715 -0.1008 -0.0078 -0.3360 -0.0493 -0.1719 -0.0271 0.0416 -0.0603 0.0246 0.0474 -0.0994 -0.1429 -0.0604 0.0297 0.0072 -0.0515 -0.0197 0.1027 0.0355 -0.1087 -0.2336 -0.1757 -0.0605 0.0160 0.0194 -0.0887 -0.0401 -0.0388 -0.0008 -0.2236 -0.0077 -0.0266 -0.1645 -0.2540 -0.0875 -0.1967 -0.0359 -0.2148 -0.2301 -0.2453 0.0215 0.0686 -0.0301 -0.0570 -0.0487 -0.2362 0.1002 -0.1362 0.0661 0.0404 -0.0586 0.0189 -0.0559 -0.2214 -0.0091 -0.2396 -0.1825 -0.1755 -0.0987 -0.0925 0.0073 -0.2031 0.0795 -0.1654 -0.2210 -0.0647 0.0640 -0.2269 -0.3008 0.0154 -0.0520 -0.1203 -0.0603 -0.0096 -0.2002 -0.0891 -0.0554 0.0654 0.1383 -0.1248 -0.2894 0.0546 -0.1538 0.0979 0.0249 -0.2352 -0.3390 -0.1669 -0.0283 -0.0669 0.0010 -0.0169 -0.1507 0.0345 -0.3200 -0.0612 -0.1117 -0.0534 -0.1404 0.2807 -0.1804 -0.1148 -0.1073 0.0365 0.0027 0.0137 -0.0650 -0.0113 -0.1049 -0.2029 -0.1193 -0.0268 -0.0879 -0.1249 -0.1956 0.0903 -0.0405 0.0081 -0.0355 -0.0648 0.0218 0.0174 -0.0131 -0.1615 0.1092 -0.1362 -0.1437 0.0170 -0.1591 -0.2226 0.0392 0.0823 0.0571 -0.0379 -0.1287 -0.1448 -0.1040 -0.0879 -0.1232 -0.1087 -0.0433 -0.0447 -0.1160 0.0422 -0.3190 0.1619 -0.0090 -0.0351 -0.1185 -0.0622 -0.2895 -0.1695 -0.1118 -0.1018 0.1545 -0.0051 -0.0879 -0.0131 -0.0221 -0.1833 -0.0629 -0.0490 -0.0730 -0.1051 0.0227 0.0742 -0.0764 0.0373 -0.0566 0.0861 -0.1826 -0.0210 0.0308 -0.1552 -0.1281 -0.2854 -0.4552 -0.1615 -0.2446 -0.1199 -0.0503 -0.0199 -0.0964 -0.0792 -0.1881 0.0043 -0.1212 0.1179 0.0685 -0.0345 -0.1844 -0.1544 0.0468 -0.1170 -0.0268 -0.0597 -0.0840 -0.1276 -0.0280 -0.1454 -0.1130 -0.2788 -0.1131 -0.1978 -0.2831 -0.1843 0.0336 0.0062 -0.0806 -0.1110 -0.0737 -0.2455 -0.0519 -0.0734 -0.1923 0.0887 -0.0954 0.1921 -0.2211 -0.0959 -0.2155 -0.1280 -0.0314 -0.0361 -0.0616 -0.1519 -0.0341 -0.0482 -0.1468 0.1076 0.1481 -0.1121 -0.0883 -0.1053 -0.2161 -0.1013 0.1392 -0.0382 -0.1268 0.0306 0.2110 -0.0911 -0.1882 -0.0585 -0.3644 -0.3121 -0.3706 -0.0836 -0.0487 -0.3909 0.0021 -0.2100 0.1115 -0.1886 0.2046 -0.3390 -0.3742 -0.1119 -0.1970 -0.3390 -0.2637 0.2894 0.0295 0.1343 -0.0575 -0.3028 -0.3809 0.0002 -0.2692 -0.0959 -0.3593 0.1367 -0.0041 0.2088 0.0392 -0.2355 -0.1882 -0.1888 0.0519 0.1439 -0.1117 0.3682 -0.1050 -0.0296 -0.1464 -0.1588 -0.5404 0.0030 -0.0316 -0.1582 -0.2943 -0.5640 -0.1246 -0.1133 0.0050 0.0116 -0.2623 -0.3293 0.0076 -0.1639 -0.2843 -0.5337 0.0355 -0.5059 0.0644 -0.2754 -0.4274 0.1347 -0.1783 0.4087 -0.0890 -0.1513 -0.1492 -0.2107 -0.4024 -0.0959 -0.1784 0.1751 -0.2568 -0.1581 -0.1029 0.2524 -0.1973 -0.2729 -0.2149 -0.0277 -0.0726 -0.0636 -0.0894 -0.5654 -0.1124 -0.2568 -0.2910 -0.3897 -0.1918 -0.1756 -0.4428 -0.0418 0.0086 -0.3306 -0.2856 -0.1898 -0.3579 -0.2198 0.0391 -0.0010 -0.1312 0.0009 0.1106 -0.0939 -0.4470 -0.1426 -0.1748 0.3235 0.3637 -0.3854 -0.1076 -0.2241 0.0947 0.3205 -0.0960 0.1857 -0.2068 -0.4152 -0.3777 -0.4928 0.1879 -0.1609 -0.1060 0.0344 -0.2410 -0.2384 0.1273 -0.1050 -0.1658 0.0253 -0.2137 -0.3622 0.1716 0.0826 -0.3177 0.0944 -0.0560 -0.1088 -0.1462 -0.2455 -0.0555 0.1439 -0.1452 -0.0877 -0.2945 -0.4925 0.0943 -0.3228 0.1097 -0.5276 -0.2068 -0.2778 0.1640 -0.2064 -0.2133 -0.2543 0.1313 -0.0169 -0.0107 0.0558 -0.2382 -0.3065 -0.0992 -0.3163 0.0072 -0.1490 0.0076 0.0391 -0.1969 0.0529 0.0410 -0.0718 -0.2115 0.1903 -0.1566 -0.3415 0.1298 -0.0619 -0.0652 -0.1414 -0.6575 -0.1621 0.0498 -0.1954 -0.2742 -0.2291 -0.2887 -0.0133 0.2165 -0.0861 -0.1793 0.0527 -0.2240 0.0093 -0.1294 -0.1094 -0.0843 -0.1329 0.3152 0.2734 -0.0271 -0.3386 -0.1904 -0.3857 0.1276 0.0057 -0.1233 -0.2079 -0.1157 -0.0624 -0.1648 0.1341 -0.3484 -0.1133 -0.1405 0.0717 -0.1241 -0.0331 -0.2593 -0.0928 -0.5085 -0.2845 -0.2354 0.0968 -0.2742 -0.0812 -0.0152 -0.2941 -0.0230 -0.2153 -0.0577 -0.5114 -0.1473 0.0519 -0.2951 -0.4320 0.3635 -0.3880 0.1546 0.1765 -0.2611 0.5834 -0.2497 0.0361 -0.1838 -0.0717 0.2678 -0.0296 0.0205 0.1644 0.1198 -0.0023 0.0031 -0.0260 -0.0159 0.1111 0.0499 -0.0536 -0.0138 0.0354 -0.0211 0.0925 0.0875 0.0469 -0.0875 0.1935 0.0199 -0.0382 0.0812 0.0034 -0.2168 -0.0978 0.0372 -0.0333 0.0374 0.0015 0.0494 0.0922 -0.0918 -0.0536 0.1023 -0.0560 -0.0291 -0.0145 -0.0448 -0.0223 0.1015 -0.0350 0.0976 -0.0192 -0.0650 -0.0362 0.0225 0.0827 -0.1371 0.0351 -0.0181 0.1006 0.1167 0.0896 0.0491 -0.0217 -0.1128 -0.0359 -0.0438 -0.0624 -0.0189 -0.0766 0.0596 -0.1193 0.0042 0.0136 -0.1350 -0.0131 0.0784 -0.0061 0.0569 -0.1080 0.0013 0.0588 -0.0402 -0.0803 -0.0275 0.0468 -0.0059 -0.0754 -0.1380 0.0538 0.0695 -0.0841 -0.0465 0.0494 -0.1125 -0.0412 0.0260 0.1146 0.0210 -0.0319 0.0130 -0.1552 0.0356 -0.0929 -0.0504 -0.0012 -0.1067 -0.0106 0.1740 0.0110 0.0401 -0.0967 -0.1308 0.0004 0.0047 -0.0413 -0.0357 -0.0455 -0.0414 -0.0731 0.0042 -0.0046 0.0821 0.1060 -0.0106 -0.0450 0.0643 -0.0281 -0.0262 0.0426 -0.0245 -0.0389 -0.0209 -0.0336 0.0942 0.0998 -0.0571 -0.0123 0.0568 0.0984 0.1121 -0.0788 0.0284 -0.0105 -0.0030 -0.0153 -0.0423 0.0468 -0.1107 0.0714 -0.0133 0.0367 -0.1167 -0.0888 0.1007 0.0662 -0.0684 -0.0047 0.0017 -0.0323 -0.0148 0.0574 0.0354 -0.0143 0.0548 0.0104 0.0476 -0.1408 0.0038 -0.0288 -0.0104 0.1055 0.0429 0.1401 -0.0483 -0.0150 0.0790 -0.0687 -0.1195 -0.0189 -0.0479 0.0160 0.0425 -0.0302 -0.0481 -0.0722 0.0308 0.1236 0.0740 -0.1174 -0.0542 -0.0076 0.1416 0.0775 0.1884 -0.1214 -0.0534 -0.0561 0.0081 -0.0081 0.0216 0.0133 -0.0611 0.0391 0.0067 0.0640 -0.0384 -0.0196 0.0738 -0.0014 -0.0181 -0.0618 0.0182 0.0695 -0.1315 -0.1069 0.0079 0.1305 0.0698 -0.0203 0.0490 -0.1055 0.0355 0.1844 0.0102 -0.1427 -0.0112 0.0253 0.0110 -0.0273 -0.0241 0.0149 -0.0387 -0.0220 -0.0458 0.0472 -0.0510 0.1099 0.0484 0.0937 0.0841 0.0037 -0.0002 0.0521 0.1503 -0.0738 0.0980 0.0208 0.0268 0.0838 0.0424 0.0680 -0.0440 -0.0758 -0.0018 -0.1226 0.0783 0.0248 -0.1410 -0.1026 -0.0708 0.0066 -0.0500 0.0091 0.0687 0.0846 -0.0361 -0.0387 [torch.FloatTensor of size 768]), ('module.decoder.decoder_rnns.0.bias_hh', 0.0042 0.1111 -0.3103 -0.1271 -0.1130 -0.0656 -0.0049 -0.0918 0.1188 -0.1934 -0.0494 -0.1164 0.0008 0.0061 -0.0870 0.0299 -0.1979 -0.0144 -0.3335 -0.1234 -0.1466 -0.1000 -0.0188 -0.0645 0.0232 0.0021 -0.0752 -0.1038 -0.0668 0.0727 0.0252 -0.0399 0.0318 0.0879 0.0672 -0.1222 -0.2979 -0.2387 -0.0111 0.0526 -0.0813 -0.1491 -0.0286 -0.0322 0.0231 -0.1679 -0.0139 -0.0160 -0.0515 -0.1932 -0.1135 -0.2193 -0.0541 -0.1310 -0.2596 -0.2688 0.0214 0.0701 0.0096 -0.1421 -0.1155 -0.2064 0.0991 -0.1400 -0.0249 0.0176 0.0394 -0.0096 -0.1243 -0.1579 -0.0926 -0.2307 -0.1207 -0.1375 -0.0572 -0.0335 0.0136 -0.2447 -0.0035 -0.1106 -0.2063 -0.1745 0.0050 -0.2409 -0.2786 -0.0014 0.0102 -0.2031 -0.1204 -0.0191 -0.1959 -0.1792 -0.0660 0.0503 0.1396 -0.0541 -0.2321 0.0733 -0.1332 0.1345 0.1285 -0.2291 -0.3353 -0.1185 -0.0281 -0.0944 0.0002 -0.0867 -0.1763 0.0847 -0.3148 -0.0184 -0.0744 0.0178 -0.2249 0.2073 -0.0990 -0.0539 -0.1523 -0.0237 0.0022 -0.0947 -0.0867 0.0824 -0.0821 -0.1758 -0.1056 -0.0506 -0.0606 -0.0911 -0.1624 0.0703 -0.0820 0.0506 -0.1328 -0.0933 0.0788 0.0181 -0.0469 -0.1765 0.0647 -0.2340 -0.0692 -0.0387 -0.1666 -0.2138 0.0242 0.0346 0.0815 -0.1328 -0.0997 -0.2159 -0.1835 -0.0537 -0.0824 -0.1758 -0.0042 -0.0286 -0.0872 0.0690 -0.3654 0.0983 -0.0576 -0.0149 -0.1035 -0.1025 -0.2634 -0.1861 -0.1264 -0.0306 0.1751 0.0535 -0.1058 -0.0394 0.0141 -0.1536 -0.1000 -0.0354 -0.0880 -0.0140 0.0217 0.0326 -0.0266 0.0421 -0.0979 0.0594 -0.2280 -0.0128 -0.0354 -0.1245 -0.1304 -0.3170 -0.4053 -0.1311 -0.2920 -0.1561 -0.0170 -0.1150 -0.1055 -0.0096 -0.2044 -0.0082 -0.1748 0.0607 0.1315 -0.0368 -0.1580 -0.0707 0.0110 -0.0662 -0.0059 -0.0127 -0.0637 -0.0410 -0.0695 -0.0809 -0.0061 -0.2834 -0.1259 -0.1546 -0.2083 -0.1456 0.0344 0.0436 -0.0591 -0.1168 -0.0742 -0.1912 0.0213 -0.1858 -0.2408 0.0388 -0.2000 0.0976 -0.2520 -0.1265 -0.1548 -0.0887 -0.0697 -0.1213 -0.0624 -0.2007 -0.1163 0.0224 -0.1230 0.0376 0.1997 -0.1378 -0.0530 -0.1542 -0.2331 -0.1263 0.1647 -0.0709 -0.1749 0.0214 0.1705 -0.1382 -0.2513 -0.0267 -0.3754 -0.3117 -0.3513 -0.1398 -0.0325 -0.3871 -0.0803 -0.2171 0.0714 -0.2038 0.2390 -0.3954 -0.3988 -0.0583 -0.2001 -0.2779 -0.2794 0.3728 0.0373 0.0427 -0.0305 -0.3127 -0.4093 -0.0199 -0.2454 -0.0989 -0.4193 0.0441 -0.0247 0.1405 -0.0003 -0.1877 -0.2137 -0.1100 -0.0063 0.1514 -0.1429 0.4215 -0.1083 -0.0198 -0.1503 -0.1437 -0.4363 0.0391 -0.1130 -0.2549 -0.3494 -0.5362 -0.1038 -0.0823 -0.0448 0.0560 -0.2175 -0.3661 -0.0451 -0.1249 -0.2658 -0.5777 0.0566 -0.5527 0.1006 -0.2796 -0.3901 0.1461 -0.2739 0.4560 -0.1693 -0.2087 -0.2006 -0.2097 -0.4541 0.0022 -0.1919 0.1777 -0.2549 -0.1393 -0.1658 0.2293 -0.2756 -0.2081 -0.1765 0.0023 -0.0410 0.0412 -0.0355 -0.6104 -0.1107 -0.2472 -0.3081 -0.3861 -0.1937 -0.2567 -0.3899 0.0115 0.0708 -0.3805 -0.2083 -0.2268 -0.3510 -0.2104 -0.0575 0.0838 -0.1000 -0.0301 0.1804 -0.0747 -0.3950 -0.1388 -0.1143 0.3405 0.3727 -0.3108 -0.1403 -0.1836 0.0327 0.3649 -0.1736 0.2483 -0.2233 -0.3806 -0.3548 -0.5686 0.1409 -0.1579 -0.1564 0.0969 -0.1982 -0.2221 0.1137 -0.0958 -0.2466 0.0216 -0.2015 -0.2721 0.2536 0.1372 -0.2389 -0.0116 -0.0861 -0.0796 -0.1176 -0.2535 -0.0471 0.1410 -0.1576 -0.0605 -0.2079 -0.4816 0.0724 -0.3252 0.1362 -0.4509 -0.1791 -0.2064 0.1147 -0.1740 -0.1169 -0.1542 0.2077 0.0161 0.0644 0.1317 -0.2694 -0.2627 -0.0779 -0.2949 0.0189 -0.0914 -0.0288 -0.0287 -0.1364 -0.0282 0.1261 -0.1099 -0.2123 0.1912 -0.2073 -0.3168 0.1360 -0.0579 -0.1098 -0.0925 -0.6834 -0.0782 0.0324 -0.2223 -0.3597 -0.2253 -0.2625 -0.0561 0.2256 -0.1943 -0.1527 -0.0046 -0.2793 -0.0383 -0.0812 -0.1225 -0.0634 -0.1258 0.3138 0.1624 0.0012 -0.2708 -0.1231 -0.3206 0.0504 -0.0592 -0.0843 -0.2638 -0.1014 -0.1095 -0.1662 0.1683 -0.4395 -0.1191 -0.1543 0.0179 -0.1179 -0.0284 -0.2038 -0.1136 -0.4138 -0.2548 -0.3018 0.1471 -0.2990 -0.0374 0.0073 -0.4089 0.0363 -0.2930 -0.1435 -0.4855 -0.1557 0.0815 -0.1920 -0.3947 0.3894 -0.3425 0.0469 0.1552 -0.3287 0.5439 -0.3570 -0.0456 -0.2012 -0.0602 0.2460 -0.0841 0.0327 0.1146 0.0026 0.0211 -0.0018 -0.0020 -0.0151 0.0170 0.0712 -0.9146 0.0990 -0.0730 -0.0422 0.0213 0.0218 0.3115 -0.0326 0.2234 -0.0092 0.0164 0.2102 0.2056 -0.4255 -0.3124 0.0089 0.0482 -0.8111 0.9576 0.1418 0.2175 -0.1597 -0.0254 0.1397 -0.0067 -0.0704 -0.2137 -0.0762 0.0501 0.0640 -0.1101 0.0945 -1.4477 -0.0689 0.5754 0.1268 0.6138 -0.1677 0.0381 0.6246 -0.8393 0.4178 0.0601 0.0632 -0.1121 -0.0552 0.0443 0.0054 -0.0325 0.1100 0.8461 -0.1402 -0.5887 0.0491 0.2759 -0.0163 0.1433 0.1522 0.0863 -0.0041 -0.0554 -0.0603 0.0465 -0.1433 0.0240 0.0014 -0.2563 -0.7774 -0.0132 -1.0181 0.0069 0.9774 -0.3694 0.1139 -0.0422 -0.1354 -0.0217 0.2823 0.1224 0.0714 0.0818 0.1184 -0.0363 -0.1562 0.2061 -0.0570 0.3028 -0.1641 -0.0463 0.0938 0.1599 -0.5191 -0.8698 -0.1021 0.0880 0.3685 0.6563 -0.1343 -0.0177 0.1332 -0.0198 0.0474 -0.0175 -0.0540 0.8769 -0.0693 -0.0076 0.0311 -0.1253 -0.0898 0.2434 -0.0222 -0.0348 -0.7679 0.0044 0.6514 -0.5531 -0.0947 -0.1544 0.0394 0.5309 -0.7383 0.1689 0.0608 -0.0255 0.0057 -1.6258 0.0028 -0.0664 0.0916 -0.1107 -0.0384 -0.0042 -0.0933 -0.0590 -0.2738 -0.0114 -0.0263 0.0572 0.0355 -0.2665 0.0462 0.0197 0.0074 0.1245 0.0309 0.2234 0.1179 -0.1558 0.6181 -0.5217 -0.0059 0.3696 -0.0557 0.0426 0.2030 -0.3931 0.0701 -0.0442 -0.2549 0.3216 -0.0748 0.0191 0.1211 -0.0072 0.5377 -0.6181 -0.0973 0.1911 0.0109 -0.1307 -0.4863 0.0556 0.0896 0.9876 -0.5548 -0.2935 -0.0198 -0.0405 -0.0869 -0.0429 -0.0023 0.2128 -0.0045 0.0467 -0.0113 -0.0384 0.0080 -0.0161 0.6916 -0.0166 0.0240 -0.0277 0.2721 0.0821 -0.1025 -0.0044 -0.2611 0.1824 0.0853 -0.1366 0.3852 -0.1902 0.0962 0.1037 0.0067 -0.3831 0.0098 -0.1219 0.2122 -0.0188 -0.1904 0.0031 -0.1028 -0.0183 -0.1007 -0.8012 -0.0116 -1.4384 0.0189 0.0253 -0.0159 -0.0389 -0.0802 0.3610 0.1274 0.7281 -0.0443 0.0524 -0.0328 0.6525 0.8515 0.0932 -0.6413 0.0486 0.0461 -0.1145 0.1472 0.0492 -0.8275 -0.0682 -0.2112 -0.0395 -0.0588 0.0417 0.8943 -0.0396 -0.2103 -0.0018 [torch.FloatTensor of size 768]), ('module.decoder.decoder_rnns.1.weight_ih', -3.6364e-01 -2.6506e-01 7.8591e-01 ... -1.3263e-01 -5.3440e-03 -4.3392e-01 -2.1735e-01 -1.8720e-01 4.5403e-01 ... -9.5811e-01 4.1448e-01 -3.0742e-01 2.8933e-02 -4.2304e-02 -1.1554e-01 ... -3.4010e-02 1.6157e-01 -9.3898e-02 ... ⋱ ... 3.0325e-02 1.1506e-01 9.0589e-02 ... -8.4466e-01 2.5002e-01 -6.8798e-02 1.7801e-02 -2.8568e-01 4.4764e-01 ... 7.1204e-02 -8.3421e-01 1.2687e-01 1.2704e-01 4.8787e-02 -4.5239e-02 ... 2.0003e-02 5.0038e-03 -9.1381e-01 [torch.FloatTensor of size 768x256]), ('module.decoder.decoder_rnns.1.weight_hh', -3.1674e-01 1.5555e-01 1.0488e-01 ... 3.6669e-02 1.9682e-02 -4.6027e-03 -1.2635e-01 1.2078e-01 2.6343e-02 ... -1.6014e-01 5.4211e-02 -1.3663e-01 -3.4995e-01 4.0837e-01 1.4931e-01 ... 8.0041e-02 -2.0933e-01 4.1624e-02 ... ⋱ ... -4.4772e-02 -6.1291e-02 -7.8127e-02 ... 6.5423e-02 -2.3841e-01 3.9626e-02 7.4623e-02 -1.1501e-01 -5.4775e-02 ... 1.4159e-01 -7.3516e-01 3.5701e-02 -5.0545e-02 1.0573e-01 9.4021e-02 ... -5.1631e-02 -5.5289e-02 -2.7928e-01 [torch.FloatTensor of size 768x256]), ('module.decoder.decoder_rnns.1.bias_ih', 0.0161 -0.2106 -0.1567 -0.0901 -0.0065 -0.1537 0.0150 -0.0654 0.0950 0.0379 -0.2259 0.0302 0.0128 -0.0933 -0.1179 -0.1413 -0.1486 0.0902 -0.0666 -0.1984 -0.0714 -0.1478 -0.1638 -0.2794 -0.2149 -0.0315 -0.2278 -0.0413 -0.2394 -0.2310 -0.0373 -0.1389 -0.3094 -0.1109 -0.1711 0.0158 -0.2210 -0.2303 -0.0105 -0.0883 -0.0134 -0.1149 -0.0812 0.0202 -0.1412 -0.1242 -0.0948 0.0356 -0.1055 -0.0592 0.1496 0.0591 -0.3036 -0.2505 -0.0464 0.1181 -0.0155 -0.1671 0.0203 -0.1516 0.0739 -0.2100 0.0965 -0.0370 -0.1614 -0.0380 0.0549 -0.0357 -0.0849 -0.0465 -0.0845 0.0470 0.0219 -0.0216 -0.0565 -0.2434 -0.0487 -0.1201 -0.1402 -0.0420 -0.2527 -0.0892 -0.1734 -0.2498 -0.0974 0.0880 -0.2854 0.0772 -0.0818 -0.1870 -0.0949 -0.1218 -0.1674 -0.1651 0.0324 -0.1301 -0.3811 0.0123 -0.0809 -0.1178 0.3110 -0.0853 -0.1555 0.0774 -0.0949 -0.0849 -0.0677 -0.1184 -0.0709 -0.0861 -0.0828 -0.2025 -0.1450 -0.2319 -0.3776 -0.1650 -0.2081 -0.1478 -0.1336 -0.1770 -0.0088 -0.3239 0.0586 -0.1410 -0.2162 -0.0972 0.0047 -0.1156 -0.1884 -0.1813 -0.0480 0.0568 -0.1547 0.0292 -0.2348 -0.1502 -0.1792 -0.0679 0.0075 -0.0511 -0.0995 -0.0050 -0.1246 0.0481 0.0052 -0.1969 0.0770 0.0025 -0.1349 -0.1334 -0.0663 -0.1144 -0.2174 -0.1507 0.0506 -0.3121 -0.0684 -0.0428 0.0144 0.1166 -0.1358 -0.0253 -0.2588 -0.1596 -0.2702 -0.1665 -0.1578 0.1028 -0.2309 -0.1845 -0.0786 0.0341 -0.1347 -0.0432 -0.0907 -0.1125 0.0401 -0.1313 -0.1104 0.0108 -0.0418 -0.1488 -0.1767 0.0035 -0.1041 -0.1411 -0.1741 -0.1440 -0.2116 -0.0333 0.1046 -0.1199 0.0020 0.1167 -0.1293 -0.1000 -0.0590 -0.1810 -0.0605 -0.0159 -0.1939 0.0644 0.3138 -0.2611 -0.0374 -0.0624 0.0077 -0.0710 -0.0554 -0.0316 -0.2957 -0.2357 -0.0607 -0.2450 0.0108 0.0031 -0.0522 -0.0200 -0.0565 -0.0321 -0.0489 -0.3216 0.0163 -0.2290 -0.1914 -0.2923 -0.1887 -0.0709 -0.3365 0.0693 -0.2024 0.0565 0.0519 -0.1501 0.0095 -0.1000 0.0121 0.0034 -0.1042 0.0343 -0.2012 -0.0273 -0.1544 -0.1012 0.0563 -0.0843 -0.3579 -0.0363 -0.1358 -0.0825 -0.0118 0.0642 -0.3474 -0.1091 -0.3064 -0.2555 -0.2866 0.1826 -0.1787 -0.1699 -0.2186 -0.1440 -0.1974 -0.1488 0.0242 -0.0983 -0.0321 0.1103 -0.1252 -0.3090 -0.1676 -0.2535 0.1397 -0.4286 -0.1312 -0.1856 -0.0058 0.0640 0.0455 -0.0702 -0.2094 -0.2680 -0.3292 -0.3329 -0.1218 -0.4625 -0.4592 -0.0138 -0.2065 -0.4887 -0.7757 -0.3677 -0.4162 -0.4135 -0.0597 -0.0408 -0.2657 -0.0848 -0.3341 0.0131 -0.0718 -0.2522 -0.1699 0.0643 -0.1576 -0.3075 -0.2884 0.2457 -0.0677 0.0426 -0.0335 0.1525 0.0226 0.1067 -0.1645 -0.1681 -0.3755 -0.0258 -0.2320 0.0103 0.2279 -0.1818 -0.4434 -0.2847 -0.1631 -0.1206 -0.2189 -0.1171 -0.1472 -0.1508 -0.0977 -0.2185 -0.1634 -0.2713 0.0024 -0.1060 -0.3060 -0.2532 0.0156 -0.4579 -0.2077 -0.2801 -0.2113 -0.1879 -0.2466 -0.3033 -0.0793 0.1780 0.0601 -0.5718 -0.3377 -0.2801 -0.4055 0.1178 0.0063 -0.0825 -1.0610 -0.1387 -0.3935 -0.2567 -0.2262 -0.0690 -0.0822 -0.0991 0.0844 -0.0199 0.1050 -0.0393 -0.1375 -0.1090 0.0774 -0.1014 -0.2368 -0.0461 0.1315 -0.1751 -0.1407 -0.3758 0.0634 -0.0199 -0.0257 -0.0853 0.0903 -0.1848 0.0822 -0.2187 -0.3271 -0.0986 -0.3356 0.0497 -0.0158 -0.3689 -0.2620 -0.2978 -0.1648 -0.1171 -0.1028 -0.0078 0.0092 0.1768 -0.2913 -0.1046 0.2401 -0.1752 -0.2658 -0.2075 -0.0342 -0.2617 -0.4422 -0.0732 -0.2689 -0.3803 -0.3199 -0.0777 -0.1500 0.1703 0.1294 -0.1190 0.0926 -0.1979 -0.1918 -0.0786 -0.0187 0.1721 -0.1686 -0.1133 -0.5045 -0.2371 -0.1655 -0.2561 -0.3836 -0.3589 -0.1271 -0.2759 -0.0886 -0.3338 0.0147 -0.2431 -0.0960 -0.1151 -0.0702 0.0363 -0.0362 -0.1142 -0.0480 -0.2897 -0.0726 -0.1873 -0.1961 0.3917 0.0731 -0.1447 0.0337 0.0892 0.0501 -0.1371 -0.0466 -0.3429 0.1212 -0.0822 -0.1891 -0.4294 -0.1515 -0.2784 0.0438 -0.0686 -0.1327 -0.6258 -0.1193 -0.1879 -0.1034 -0.1172 -0.1994 0.2450 -0.0538 -0.0365 0.1052 -0.0631 -0.1641 -0.2942 -0.1621 -0.2282 -0.1330 -0.2956 -0.1734 -0.3522 0.0242 0.0362 -0.0066 -0.1580 -0.1177 0.1182 -0.1358 -0.3128 -0.0754 -0.1553 0.3873 -0.1631 -0.1126 -0.0638 0.1006 -0.1420 -0.3438 0.1019 -0.2894 -0.3294 -0.2455 -0.0723 -0.5689 -0.3298 0.3349 -0.2919 -0.0271 0.0006 0.0410 0.0224 -0.0159 -0.0622 -0.0453 0.0096 0.0816 0.0147 0.0579 -0.0014 0.0989 0.0776 -0.0299 -0.0200 0.0143 -0.0280 0.0763 0.0570 -0.0043 0.0898 0.0591 -0.0897 0.0078 -0.0049 -0.0673 0.1265 0.0189 -0.0323 -0.0259 0.0235 -0.0521 -0.1112 -0.0040 0.0540 -0.0421 0.1141 0.0293 -0.0700 -0.0463 -0.0511 0.0708 0.0453 0.0397 -0.0245 0.0004 0.0540 -0.0515 0.0936 0.0049 0.0658 0.0072 -0.0174 0.0268 0.0221 -0.0423 -0.0472 0.0065 0.0357 -0.0367 0.0257 -0.0182 0.0242 0.0223 -0.0066 -0.0580 -0.1144 -0.0070 -0.0524 -0.0113 0.0243 0.0029 -0.0222 -0.0513 -0.0407 0.0707 0.0641 0.0241 0.0237 -0.0146 0.0505 -0.0242 0.0908 0.0525 -0.0110 -0.0093 0.0529 -0.0539 -0.0636 -0.0440 -0.0540 0.0253 -0.0503 -0.0127 -0.0450 0.0331 -0.0559 0.0619 -0.0694 0.0036 -0.0033 0.0851 0.0391 0.0945 -0.0290 0.0497 0.0378 0.0257 0.1128 -0.0048 -0.0476 0.0217 0.0472 -0.0109 -0.0200 0.0862 -0.0244 0.0131 0.0291 0.0182 0.0783 0.0798 -0.0112 -0.0029 0.0435 0.0223 -0.0374 0.0301 -0.0166 0.0427 0.0372 0.0344 -0.0577 -0.0557 -0.0718 -0.0424 -0.0053 0.0446 0.0384 0.0382 0.0618 0.0631 -0.0040 -0.0489 -0.0479 0.0454 0.0713 -0.0754 0.0602 -0.0309 0.0088 0.0690 0.0244 0.0634 -0.0526 -0.0353 -0.0173 -0.0457 -0.0557 0.0128 0.0224 0.0060 -0.0155 -0.0410 -0.0239 -0.0538 -0.0239 -0.0003 -0.0371 0.0510 -0.0597 -0.0236 -0.0809 0.0212 0.0308 0.0259 0.0005 0.0601 0.0140 0.0893 0.0021 0.0550 0.0050 0.0266 0.0980 -0.0430 0.1279 0.0411 0.0152 -0.0121 0.0384 -0.0195 0.0058 0.0138 0.0329 0.0565 0.0095 -0.0037 -0.0056 -0.0489 0.0723 0.0207 -0.0042 -0.0027 0.0249 0.0578 -0.0596 -0.0084 -0.0575 0.0052 0.0358 0.0892 -0.0271 -0.0473 -0.0053 0.0653 -0.0098 0.0424 -0.0312 -0.0554 -0.0118 0.0423 -0.0367 0.0336 0.0107 0.0195 0.0705 -0.0218 0.0099 -0.1557 0.0597 0.0458 -0.0155 -0.0662 0.0109 0.0228 -0.0491 0.0640 -0.0082 0.0067 0.0677 0.0180 -0.1119 -0.0287 -0.0505 -0.0164 -0.0862 0.0353 0.0347 -0.0385 -0.0876 -0.0662 0.0427 -0.0347 -0.0592 [torch.FloatTensor of size 768]), ('module.decoder.decoder_rnns.1.bias_hh', 0.0343 -0.2205 -0.1759 -0.0543 -0.1046 -0.1600 -0.0251 -0.0729 0.0555 0.0757 -0.1488 0.0289 0.0294 -0.0351 -0.1385 -0.1604 -0.1146 0.0331 -0.1539 -0.1825 -0.0130 -0.0408 -0.0533 -0.2980 -0.2090 0.0510 -0.2458 -0.0469 -0.1718 -0.2487 0.0683 -0.1317 -0.2785 -0.1509 -0.2421 0.0064 -0.2040 -0.1809 0.0043 -0.0727 -0.0423 -0.1522 -0.1706 -0.0559 -0.0913 -0.0576 -0.0368 0.0890 -0.1199 -0.0260 0.1055 -0.0416 -0.3200 -0.1624 -0.0024 0.1192 -0.0467 -0.1662 -0.0506 -0.1071 0.0309 -0.1860 0.1392 0.0104 -0.1818 -0.1027 0.0228 -0.0738 -0.1512 -0.0057 -0.1338 0.1350 -0.0071 -0.0731 0.0125 -0.2226 0.0066 -0.2240 -0.0826 0.0208 -0.2509 -0.1016 -0.0789 -0.2964 -0.0714 -0.0189 -0.2788 0.0617 -0.0546 -0.2184 -0.0392 -0.0767 -0.1837 -0.1876 0.0068 -0.1204 -0.3699 0.0203 -0.0665 -0.0626 0.2156 -0.0202 -0.1446 0.0053 -0.1995 0.0019 -0.1375 -0.1398 -0.0581 -0.1123 -0.0599 -0.2106 -0.2039 -0.2349 -0.4244 -0.1016 -0.2071 -0.1010 -0.0527 -0.2783 -0.0990 -0.3034 0.0274 -0.0464 -0.2196 -0.1248 0.0454 -0.1037 -0.2293 -0.1677 -0.0734 0.0381 -0.1746 -0.0282 -0.3104 -0.0508 -0.2080 -0.1150 0.0742 -0.0503 -0.1552 -0.1185 -0.1432 0.0289 0.0230 -0.2688 0.0384 0.0051 -0.2207 -0.1441 -0.1035 -0.0416 -0.2482 -0.1536 0.0599 -0.3514 -0.0670 -0.0691 0.0697 0.0022 -0.0792 -0.0655 -0.1497 -0.1727 -0.1993 -0.1741 -0.1507 0.0256 -0.2359 -0.1115 -0.0119 -0.0390 -0.2418 -0.0209 -0.0632 -0.1474 -0.0096 -0.0870 -0.0659 -0.0452 -0.1415 -0.1103 -0.1115 -0.0588 -0.1483 -0.1410 -0.1995 -0.1629 -0.2313 -0.1055 0.0205 -0.0544 -0.0922 0.1010 -0.0542 -0.0307 -0.0544 -0.1246 -0.0428 0.0680 -0.1804 0.0182 0.2025 -0.2346 -0.1066 -0.1023 0.0508 -0.0058 -0.0669 -0.0128 -0.2045 -0.2560 -0.0448 -0.2328 -0.0610 -0.0236 -0.0091 -0.0614 -0.0297 -0.1093 -0.0025 -0.3452 0.0339 -0.1429 -0.1356 -0.2519 -0.1821 -0.0945 -0.3060 -0.0134 -0.2135 0.1063 0.0048 -0.0873 0.0461 -0.0530 0.0275 0.0654 -0.1037 0.0794 -0.1844 -0.1053 -0.1306 -0.1436 0.0886 -0.0442 -0.2772 -0.0387 -0.2094 -0.0317 -0.0487 0.0835 -0.2631 -0.0988 -0.2111 -0.1708 -0.2594 0.1489 -0.2586 -0.1723 -0.1183 -0.1991 -0.2226 -0.1170 0.1049 -0.1086 -0.0038 0.0941 -0.0149 -0.2777 -0.1084 -0.1581 0.1168 -0.4400 -0.1210 -0.1577 0.0378 0.1086 0.0963 -0.0075 -0.1803 -0.2887 -0.2981 -0.3096 -0.0898 -0.5208 -0.4997 -0.1008 -0.2818 -0.4848 -0.8144 -0.3606 -0.3231 -0.3712 -0.0826 0.0635 -0.3109 -0.1405 -0.3643 -0.0223 -0.0984 -0.2161 -0.0859 0.0105 -0.2006 -0.3644 -0.3318 0.2235 -0.0831 0.0963 -0.0246 0.0886 0.0322 0.0677 -0.0947 -0.2206 -0.3130 -0.0638 -0.2915 0.0181 0.2332 -0.1881 -0.4241 -0.3320 -0.1552 -0.2024 -0.2510 -0.0858 -0.1571 -0.1714 -0.1187 -0.2142 -0.1673 -0.3569 0.0169 -0.1147 -0.2604 -0.1895 0.0421 -0.4577 -0.2308 -0.2758 -0.2051 -0.2527 -0.2452 -0.2647 -0.0610 0.2031 0.0937 -0.5974 -0.2579 -0.2465 -0.3053 0.0942 0.0322 -0.1968 -1.1592 -0.1901 -0.4790 -0.2278 -0.2477 -0.1650 -0.0122 -0.0584 0.0147 0.0447 0.0728 -0.0359 -0.1528 -0.0141 0.0775 -0.1813 -0.2707 -0.0423 0.1164 -0.1781 -0.1497 -0.4498 -0.0401 -0.1371 0.0586 -0.0882 0.1225 -0.0984 0.1207 -0.1802 -0.2274 -0.1551 -0.2592 -0.0032 -0.0738 -0.4396 -0.3603 -0.3281 -0.2016 -0.1089 -0.1903 0.0015 0.0555 0.2191 -0.2732 -0.1100 0.1814 -0.0939 -0.2828 -0.1997 -0.0086 -0.1791 -0.4105 -0.1703 -0.2484 -0.3473 -0.2770 -0.0827 -0.2055 0.2116 0.0806 -0.0951 0.1163 -0.1722 -0.2641 0.0257 -0.0635 0.1146 -0.1596 -0.0988 -0.5993 -0.2583 -0.2489 -0.3199 -0.3233 -0.3933 -0.1140 -0.3902 -0.0218 -0.3324 0.0244 -0.2429 -0.1285 -0.1399 -0.0639 0.0139 0.0145 -0.0901 -0.0889 -0.2751 -0.1627 -0.2586 -0.1496 0.3225 0.1047 -0.1462 0.1028 0.0736 0.0093 -0.1218 -0.0591 -0.2482 0.0617 -0.0374 -0.1418 -0.3741 -0.1243 -0.2819 0.0454 -0.0003 -0.1040 -0.6222 -0.0681 -0.1977 -0.0129 -0.1055 -0.1850 0.1884 -0.0491 -0.0594 0.0235 -0.1445 -0.2277 -0.2087 -0.1417 -0.2355 -0.2547 -0.2894 -0.1692 -0.2899 -0.0690 -0.0026 -0.0061 -0.1618 -0.2000 0.0268 -0.1220 -0.2953 -0.0996 -0.1603 0.3013 -0.1819 0.0007 -0.1308 0.0423 -0.1589 -0.3795 0.1657 -0.2544 -0.2691 -0.2338 -0.0087 -0.5695 -0.2870 0.2887 -0.3195 0.6581 -0.1343 0.0633 -0.0689 0.0666 -0.0068 -0.2195 -0.1215 0.5227 0.0606 0.0053 -0.0376 0.0531 -0.0131 -0.0115 0.0475 -0.0162 0.0722 -0.0447 0.0016 -0.0067 0.0684 0.0160 -0.0604 0.1673 -0.0504 -0.0696 0.0741 0.0518 0.2055 0.0799 0.0108 -0.1300 -0.2944 -0.0724 0.0093 0.0684 0.1222 0.5372 -0.0267 -0.4729 -0.0197 -0.2383 -0.0014 0.0198 0.1358 0.6865 0.0791 0.0291 -0.0594 0.0305 0.0072 0.0318 0.0038 0.1854 -0.1952 0.0003 -0.0036 0.1036 0.1039 0.0105 0.0012 -0.0858 -0.0073 0.0589 0.0402 -0.0344 -0.0479 0.0019 0.0362 0.0719 -1.6178 0.1029 -1.5088 0.0293 -0.0899 0.9377 0.0752 0.0553 0.0616 0.0100 -0.0749 -0.0305 -0.2133 -0.0395 1.6026 -0.0588 -0.0054 0.0482 -0.2315 -0.0295 -0.2107 0.1377 -0.1784 0.0777 -0.4296 -0.0197 0.0128 0.0665 -0.0302 0.0309 0.2284 1.3962 0.0285 0.0996 -0.0420 0.0581 -0.0585 -0.0662 -0.0241 -0.0395 -0.0429 0.1045 0.1187 0.0248 -0.0720 0.0664 -0.0156 0.0216 -0.3421 0.0187 0.1154 -0.0078 0.0833 0.0151 -0.0610 -0.0475 -0.1498 -0.0867 0.1174 -0.0104 0.1037 -0.3127 -0.0403 -0.0522 0.0026 -0.0765 0.4049 -0.1799 0.1748 -0.0341 0.1855 0.0352 0.0172 0.0335 -0.0136 0.1373 0.1667 -0.7157 0.0535 0.1069 -0.2175 -0.2596 0.0062 -0.0110 -0.0409 -0.9490 -0.1014 -0.1753 0.1308 -0.0018 0.1232 0.0663 -0.0458 -0.1241 -0.0379 0.0233 -0.0178 -0.0743 -0.0754 0.0997 -0.0881 -0.0222 0.2767 0.1961 -0.2217 -0.0473 -0.1927 0.2394 0.0497 0.1041 0.0031 -0.0140 -0.1149 -0.0349 -0.0570 -0.0878 0.0808 0.0906 -0.0186 -0.0714 -0.4216 0.0257 0.0696 -0.0053 -0.0602 0.0716 0.0157 0.5212 0.0040 0.1328 0.0821 -0.0231 -0.0143 -0.0797 -0.1586 -0.0039 -0.1710 -0.0004 -0.1199 0.0152 0.1131 -0.0531 0.1532 -0.0085 -0.1425 -0.0158 -0.1678 0.0270 -0.0502 -0.0820 0.0701 0.2335 -0.0213 0.2500 0.0314 -0.1142 0.1697 0.2488 0.1649 0.0025 -0.0153 -0.1582 -0.0162 -0.0760 0.0152 -0.1606 0.0137 -0.0789 0.0946 0.1557 -1.0529 0.0398 0.0007 -0.6346 -0.0021 -0.0241 -0.1064 -0.1453 0.1590 0.0571 -0.0148 0.0049 0.1183 0.0900 0.0004 [torch.FloatTensor of size 768]), ('module.decoder.proj_to_mel.weight', -1.3074e-03 -2.4511e-03 4.0127e-03 ... -1.8974e-04 -3.5570e-03 -1.0731e-02 1.6458e-04 -3.4461e-03 -1.7653e-02 ... -1.2636e-03 -2.7327e-04 1.3112e-02 -4.7382e-03 2.9522e-03 -3.0874e-02 ... 2.8109e-04 -1.5848e-03 -6.4812e-03 ... ⋱ ... -4.6943e-03 4.4080e-03 -1.1201e-02 ... 1.1059e-01 -9.0196e-04 1.6526e-02 -6.3213e-03 5.4862e-03 -4.3771e-03 ... 1.0128e-01 -4.8409e-03 1.2473e-02 -2.1717e-03 4.6354e-03 -1.0125e-02 ... 9.4980e-02 -1.4286e-03 2.9772e-02 [torch.FloatTensor of size 400x256]), ('module.decoder.proj_to_mel.bias', 1.00000e-02 * 0.0876 0.2869 0.7675 1.1309 1.3614 1.3701 1.3839 1.4532 1.4647 1.5598 1.5797 1.6161 1.6791 1.6373 1.5957 1.5270 1.5398 1.4873 1.4737 1.4344 1.3897 1.3177 1.2835 1.2909 1.2791 1.2945 1.2757 1.2166 1.2300 1.1754 1.1505 1.1620 1.2075 1.2446 1.2896 1.2816 1.3150 1.3853 1.4304 1.4351 1.3969 1.3827 1.3353 1.3309 1.3441 1.3446 1.3858 1.4163 1.4484 1.5118 1.5367 1.5440 1.4973 1.4682 1.4973 1.5031 1.5215 1.5343 1.5256 1.5085 1.4796 1.4687 1.4534 1.4743 1.4257 1.3159 1.1628 1.0977 1.0931 1.0937 1.2023 1.2768 1.3716 1.4418 1.4656 1.4682 1.3487 1.2776 1.1769 0.6768 -0.0025 0.3184 0.7693 1.1664 1.2635 1.3745 1.4168 1.4754 1.4790 1.5083 1.6034 1.5930 1.6553 1.6128 1.5650 1.5256 1.5177 1.4417 1.4139 1.3828 1.3538 1.3233 1.2828 1.2898 1.3020 1.2633 1.2407 1.2393 1.2278 1.1765 1.1779 1.1244 1.1394 1.1836 1.2727 1.3140 1.3260 1.3680 1.3976 1.3810 1.3369 1.3492 1.3322 1.3065 1.2844 1.3270 1.3645 1.4034 1.4259 1.4383 1.4636 1.4869 1.4434 1.4048 1.4612 1.4850 1.4780 1.5197 1.4922 1.4267 1.4171 1.4324 1.4412 1.4399 1.3997 1.2464 1.1530 1.0524 1.0748 1.0952 1.1387 1.2539 1.3213 1.3723 1.4633 1.4234 1.2991 1.2339 1.0897 0.6178 -0.0179 0.3282 0.7514 1.1336 1.3210 1.3531 1.3454 1.4746 1.4648 1.5131 1.6053 1.6047 1.6204 1.6006 1.5564 1.5380 1.5117 1.4441 1.4121 1.3964 1.3132 1.2740 1.2346 1.2087 1.2822 1.2630 1.2516 1.2121 1.1918 1.1358 1.1214 1.1183 1.1306 1.1800 1.2391 1.2644 1.3117 1.3605 1.4171 1.3564 1.3415 1.3094 1.2988 1.2651 1.2902 1.3181 1.3491 1.3242 1.4091 1.4292 1.4441 1.4335 1.4005 1.3585 1.3968 1.4410 1.4538 1.4958 1.4630 1.4457 1.3883 1.3910 1.3679 1.3587 1.3276 1.2073 1.0925 1.0449 1.0399 1.0732 1.1572 1.2606 1.3236 1.3726 1.4031 1.3764 1.3142 1.2088 1.0817 0.5831 0.0094 0.2855 0.7391 1.1503 1.2671 1.3340 1.3187 1.3782 1.4229 1.4405 1.5457 1.5796 1.5900 1.5767 1.5167 1.4924 1.4926 1.4209 1.3982 1.3704 1.3177 1.2525 1.2445 1.2593 1.2587 1.2303 1.2157 1.1921 1.1971 1.1388 1.1351 1.1308 1.1380 1.1683 1.2366 1.2392 1.2875 1.3089 1.3943 1.3405 1.3329 1.3344 1.3163 1.2668 1.3138 1.3248 1.3536 1.3607 1.4112 1.4539 1.4694 1.4653 1.4381 1.4191 1.4100 1.4421 1.4614 1.4395 1.4460 1.3918 1.3694 1.3848 1.4002 1.3500 1.3336 1.1924 1.0975 1.0437 1.0253 1.1050 1.1091 1.2645 1.3191 1.3368 1.4045 1.3772 1.2685 1.1781 1.0324 0.6481 -0.0258 0.3058 0.7237 1.1226 1.2792 1.2757 1.3443 1.3900 1.4495 1.4699 1.5437 1.5629 1.5834 1.5482 1.4947 1.4375 1.4624 1.4047 1.3745 1.3185 1.2726 1.2390 1.2192 1.1896 1.2406 1.2000 1.1671 1.1556 1.1712 1.1227 1.1275 1.1069 1.1242 1.1525 1.2164 1.2421 1.2874 1.3315 1.3654 1.3286 1.3372 1.3077 1.3018 1.2429 1.2884 1.3012 1.3073 1.3178 1.4200 1.3940 1.4591 1.4395 1.3646 1.3977 1.3908 1.3935 1.4500 1.4397 1.4495 1.3929 1.3619 1.3456 1.3574 1.3470 1.3032 1.1834 1.0622 1.0472 1.0244 1.0552 1.1053 1.2159 1.3469 1.3601 1.3859 1.3516 1.2573 1.1643 1.0473 0.6013 [torch.FloatTensor of size 400]), ('module.postnet.conv1d_banks.0.conv1d.weight', (0 ,.,.) = 5.1769e-01 -1.9350e-01 -5.1565e-02 ⋮ -1.4517e-02 7.1059e-02 6.6071e-02 (1 ,.,.) = -1.0924e-02 -8.0526e-02 3.5597e-02 ⋮ -2.0654e-01 -1.3508e-01 3.8456e-01 (2 ,.,.) = 2.9784e+00 -6.2878e-01 -1.7459e-01 ⋮ 7.2299e-02 2.2709e-01 6.0340e-01 ... (77,.,.) = -9.9861e-01 9.4519e-02 1.9491e-01 ⋮ 1.0430e-01 -1.9140e-02 2.6940e-01 (78,.,.) = 2.1744e-01 -8.0680e-02 2.1582e-01 ⋮ -4.2295e-02 1.6425e-02 -2.3594e-03 (79,.,.) = 1.7220e+00 -1.5493e+00 2.8362e-01 ⋮ 1.4140e-01 1.6998e-02 4.2408e-01 [torch.FloatTensor of size 80x80x1]), ('module.postnet.conv1d_banks.0.bn.weight', -11.9678 -12.2330 0.5874 -12.7266 -14.8812 -2.5761 -11.9427 -4.2313 0.6492 -14.3888 0.5280 0.5165 -3.7921 0.6091 -1.2100 -5.2468 0.4883 -9.6767 0.2881 0.4058 -11.3032 -13.2620 -14.9764 0.3754 0.3768 -11.5367 -1.2537 -4.8199 0.4803 0.3263 -5.1572 -11.5061 -12.7830 -11.6226 -4.4590 0.3148 0.6764 -11.8550 -12.6242 0.0109 -14.1101 0.6742 -13.7539 -11.9381 0.4237 -12.1203 0.2748 0.2623 -13.3269 0.2197 -14.4030 -13.9489 1.0069 -13.2212 -13.3118 -11.0843 0.6281 0.3118 0.5690 -15.3363 -9.2287 0.3996 -12.4018 0.2682 0.4161 0.3363 -2.4713 -11.9829 0.2552 -12.6530 -4.9388 0.6500 -2.2789 -12.4364 0.4697 0.3311 -12.2295 -3.9541 -11.6577 0.3231 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.0.bn.bias', -0.1770 -0.5260 -0.3186 -0.4653 -0.3628 0.0852 -0.3752 0.1456 -0.6313 -0.3645 -0.1815 -0.1057 0.0914 -0.3637 0.3904 -0.0688 0.0782 -0.0764 -0.2970 -0.2733 -0.7308 -0.3784 -0.7094 0.0536 -0.2227 -0.0357 0.2188 -4.1813 -0.4352 -0.3667 -0.5476 0.1492 -0.1594 -0.5768 -0.3024 -0.1974 -0.2981 -0.3037 2.2149 -0.5135 -0.6731 -0.3076 -1.0072 -0.2102 -0.0984 -0.4764 -0.3976 -0.0539 -0.0830 -0.1599 -0.2148 -0.4588 -0.6995 -0.2935 -0.4384 -0.3426 -0.2200 -0.1131 -0.4610 -0.0497 -0.7868 -0.2811 -0.3139 -0.1979 -0.2312 -0.2283 -0.3468 -0.4847 -0.3325 -0.4417 -4.7423 -0.1914 -0.1727 -0.1415 -0.6530 -0.3162 -0.5499 -0.6805 -0.5549 -0.2024 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.0.bn.running_mean', 2.1287e-05 1.3011e-05 3.0102e-01 7.9208e-06 2.9520e-04 3.8135e-02 1.0149e-05 4.4172e-05 9.3081e-01 1.1545e-04 1.1260e+00 1.9549e+00 1.2658e-04 2.7999e-01 6.6247e-02 3.7660e-05 1.8550e-01 1.7950e-05 2.6923e+00 1.3024e-01 6.4194e-06 1.9988e-05 3.6879e-04 1.2148e-01 3.0188e-02 1.2351e-05 1.2680e-02 1.2651e-06 1.6360e-01 8.9487e-02 4.3512e-05 2.1093e-05 2.2089e-05 1.8914e-05 1.8063e-04 3.8698e+00 1.5270e-01 1.3995e-05 5.7557e-06 2.8174e+00 1.2101e-05 2.5533e+00 2.9588e-06 3.3828e-06 1.7701e-01 6.5255e-05 1.2166e-01 1.9112e+00 1.5575e-05 3.8261e+00 2.3136e-05 2.2370e-05 2.5962e-01 2.6652e-05 4.7766e-05 1.0692e-05 4.4529e-01 1.2559e-01 2.7961e-01 1.3356e-05 1.8169e-04 1.9655e-01 8.2612e-06 2.1740e+00 6.1940e-02 3.7653e-01 1.4797e-04 2.1150e-05 5.7097e-02 4.5371e-05 6.2037e-06 1.6608e-01 1.8344e-04 5.5461e-05 1.8992e-01 3.4101e-02 4.6944e-05 5.0050e-05 2.6779e-05 5.0445e-02 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.0.bn.running_var', 3.4790e-07 1.3100e-07 9.3850e-02 9.8571e-08 4.2421e-06 7.1133e-04 9.6986e-08 2.3501e-07 1.9607e-01 1.1185e-06 7.5229e-01 8.2943e-01 1.5405e-06 3.7033e-02 3.0053e-03 3.7897e-07 4.2127e-02 1.3493e-07 1.8891e+00 9.1971e-02 5.4875e-08 1.6678e-07 1.0535e-05 5.2598e-02 1.0646e-02 1.3590e-07 4.9546e-04 3.7495e-09 3.6607e-02 3.8562e-02 4.6355e-07 1.0277e-07 3.1599e-07 1.6569e-07 1.9677e-06 3.2907e+00 2.8827e-02 2.4286e-07 7.4433e-08 1.6466e+00 7.0617e-08 1.3520e+00 1.6808e-08 2.3399e-08 4.7179e-02 8.3522e-07 2.4160e-02 1.0465e+00 2.0186e-07 3.3099e+00 3.4907e-07 1.5072e-07 1.6966e-02 4.8098e-07 3.1123e-07 1.3480e-07 1.4162e-01 2.3337e-02 7.2239e-02 1.5036e-07 2.4603e-06 4.7200e-02 7.7703e-08 1.1384e+00 9.7417e-03 1.0520e-01 1.4845e-06 2.0648e-07 1.6840e-02 9.0840e-07 2.7039e-08 3.1538e-02 4.6559e-06 5.6221e-07 2.9305e-02 4.8931e-03 7.6341e-07 5.7731e-07 2.6599e-07 4.4374e-02 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.1.conv1d.weight', (0 ,.,.) = 2.2803e-01 3.5122e-02 9.0641e-04 5.3618e-02 -7.8795e-02 1.4725e-02 ⋮ -6.7398e-02 3.0243e-02 -1.4825e-01 -3.2397e-02 4.5505e-02 6.4718e-02 (1 ,.,.) = 4.4825e-01 2.4676e-01 -1.0999e-01 -1.5665e-01 2.9031e-02 -4.8570e-03 ⋮ -8.9446e-02 1.8276e-01 2.4644e-02 6.5687e-02 -1.9755e-02 -2.3221e-02 (2 ,.,.) = -4.8638e-01 1.2996e+00 -3.9909e-01 -1.7756e-01 -5.2663e-02 5.4679e-01 ⋮ 1.1216e-01 8.3459e-02 -1.1575e-01 -8.2108e-02 -3.9344e-01 2.8520e-02 ... (77,.,.) = 1.3718e-01 4.9113e-01 -3.1217e-03 1.6377e-02 1.8865e-01 -6.7507e-02 ⋮ 7.4536e-02 -4.8456e-02 4.2454e-03 -1.0714e-01 2.5783e-01 6.0486e-02 (78,.,.) = 1.4156e-01 8.3407e-02 -1.1070e-01 2.2991e-01 1.2974e-01 8.5432e-02 ⋮ 1.3325e-02 -7.5131e-02 1.7926e-01 -8.9462e-03 -4.1264e-04 2.6007e-02 (79,.,.) = 3.1951e-01 -8.2399e-01 -2.8575e-01 3.6496e-02 -1.9432e-01 -3.7100e-01 ⋮ 8.6748e-02 -1.2156e-01 -2.9631e-02 -1.3142e-01 1.2614e-02 -1.6323e-01 [torch.FloatTensor of size 80x80x2]), ('module.postnet.conv1d_banks.1.bn.weight', -4.6559 -4.0374 0.5697 0.2481 -2.0727 0.1974 -4.4233 -12.7494 -11.2140 -10.3780 -12.1605 -13.5825 0.2552 -7.1670 -4.3186 -13.1333 0.4902 -14.6622 -4.2768 -13.1279 -4.3025 0.5430 0.2588 -2.9486 -13.0833 -4.4657 0.2853 -4.3336 -11.0073 0.4570 -10.7826 0.4264 0.3180 0.7737 -15.5740 -0.0243 -12.4222 -15.6479 0.4490 0.2373 -11.2645 0.2832 -13.6475 0.2258 -13.5477 -11.1852 -12.1963 -24.0367 -4.1066 -4.9623 0.4799 0.7183 -11.3978 0.0855 -13.4413 0.2855 0.0430 0.3015 -15.7421 -14.6467 0.3438 0.7918 -13.1479 -3.8025 0.6624 -4.3095 -10.4288 0.0540 -14.9388 -2.1997 -10.7721 -0.8526 -11.0237 -13.7308 -13.1054 -12.2283 0.2761 -11.2452 -10.0655 0.1193 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.1.bn.bias', -3.5955 -2.4005 -0.4212 -0.3180 -0.1121 -0.2129 -0.1396 -0.2378 0.1549 -0.2506 0.6684 -0.4975 -0.2724 -0.3186 -1.2307 -0.4911 -0.3951 0.0476 -2.0947 -0.3600 -0.1530 -0.2154 -0.1930 -0.1384 -0.5023 -2.6071 -0.4027 -3.1746 -0.1386 -0.4783 -0.4405 -0.5392 -0.1531 -0.5572 -0.2009 -0.2193 -0.4634 -0.5115 0.2173 -0.4267 -0.5161 -0.1203 -0.3652 -0.4581 -0.5642 -0.1202 -0.2451 -0.5672 -4.2068 -4.4568 -0.0325 -0.4939 0.4986 0.2563 -0.4145 -0.2797 -0.3181 -0.2340 -0.3363 -0.4155 -0.2964 0.0162 -0.5236 -0.4651 -0.6325 -3.2684 -0.3006 0.3432 -0.5174 -0.2651 0.3860 0.5317 -0.3071 -0.5193 -0.5852 -2.6626 -0.3978 -0.6819 -0.3028 -0.1223 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.1.bn.running_mean', 2.2363e-04 2.9094e-04 2.3534e-01 2.4942e-02 1.3132e-04 1.7823e-02 1.7010e-05 2.1813e-05 1.3471e-05 1.5838e-05 4.8694e-06 3.0307e-05 1.1366e-01 4.4294e-05 4.5411e-05 1.9422e-05 2.8304e-01 4.2274e-05 3.1824e-05 4.1937e-05 2.1202e-05 1.4217e+00 5.6688e+00 6.3756e-05 2.3630e-05 2.1025e-06 9.9421e-02 6.7157e-06 1.1267e-05 1.7153e-01 4.9026e-06 1.7071e-01 2.8186e-02 5.9749e-01 2.0170e-05 3.7440e+00 1.7805e-05 3.5869e-06 2.8605e-01 8.7782e-02 1.0984e-05 4.1899e+00 2.3876e-05 1.3776e-01 7.9306e-05 1.1640e-05 3.5394e-05 2.7546e-04 1.5698e-06 1.9729e-06 2.5174e-01 6.5254e-01 6.0419e-06 6.2503e+00 3.1011e-05 5.1328e-02 6.1270e+00 2.8697e+00 1.7399e-05 4.1985e-05 2.5586e-02 3.5167e-01 4.7537e-06 6.9266e-06 3.3323e-01 2.5710e-04 3.6380e-05 6.8674e+00 9.0483e-06 1.6014e-05 1.0817e-05 3.3237e-05 2.1958e-06 1.7984e-05 1.2633e-05 1.6877e-07 1.6081e-01 4.6447e-06 7.3561e-06 5.9757e+00 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.1.bn.running_var', 3.6303e-05 6.7024e-05 5.4235e-02 5.1356e-03 3.3565e-06 4.4920e-03 3.1855e-07 2.6009e-07 6.1145e-07 3.7904e-07 7.6811e-08 8.5661e-07 6.2907e-02 9.7532e-07 6.8136e-07 2.7929e-07 1.1577e-01 3.2672e-06 9.4797e-08 2.8717e-06 2.0015e-07 9.1117e-01 8.3589e+00 1.0567e-06 1.7558e-06 1.3394e-08 2.9529e-02 5.6320e-08 1.6233e-07 7.0599e-02 2.8805e-08 7.6354e-02 4.6784e-03 1.2298e-01 2.5616e-07 2.8811e+00 3.1228e-07 2.9966e-08 5.1873e-02 2.1567e-02 6.6883e-07 3.7025e+00 4.6636e-07 2.7070e-02 6.0551e-06 2.2597e-07 1.8449e-07 5.8589e-05 9.0792e-09 1.6147e-08 2.2007e-01 8.9547e-02 7.5446e-08 7.9119e+00 5.5627e-07 4.0713e-02 7.4552e+00 2.0425e+00 1.5050e-07 2.4783e-06 4.8531e-03 1.8566e-01 3.7803e-08 4.1020e-08 4.3489e-02 4.7513e-05 1.8622e-07 1.0191e+01 1.8340e-07 3.2625e-07 1.0982e-07 5.2538e-07 9.5139e-09 1.8233e-06 6.4853e-07 3.3042e-10 5.1054e-02 2.2896e-07 1.1318e-07 7.8123e+00 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.2.conv1d.weight', (0 ,.,.) = -1.4133e+00 1.9358e+00 -2.2761e+00 -2.6115e-01 -6.4256e-01 -9.4213e-01 -5.8635e-02 2.6985e-01 -3.4424e-01 ⋮ -3.3533e-01 -2.6463e-01 -6.2940e-01 -4.1993e-01 -4.0857e-01 -5.8928e-01 2.4180e-01 8.2423e-01 3.4042e-01 (1 ,.,.) = -6.3384e-01 5.0494e-01 6.8654e-01 -6.1688e-02 2.3843e-01 8.5413e-02 1.7572e-01 7.3334e-02 -3.5420e-01 ⋮ 1.0617e-02 3.4396e-02 1.1085e-01 1.0521e-01 1.0961e-01 1.7268e-01 -9.4826e-02 2.1547e-01 3.2199e-01 (2 ,.,.) = 5.9874e-01 1.4641e+00 -2.4349e-01 -1.1605e-01 7.6056e-02 -4.8551e-01 -4.4155e-02 9.2776e-02 -2.0977e-01 ⋮ -5.5105e-02 1.1099e-01 3.2921e-01 -3.0498e-03 1.0972e-01 3.7460e-01 2.8244e-01 5.9271e-01 1.1813e+00 ... (77,.,.) = -3.3863e-01 3.4396e-02 -9.0110e-01 2.6420e-01 -1.7286e-01 -2.4213e-02 4.7889e-02 3.2799e-01 -6.6698e-02 ⋮ -1.3399e-01 -9.5224e-02 -7.3040e-02 -1.1115e-02 2.4881e-02 -3.5365e-02 2.4877e-01 1.2823e-01 1.5913e-01 (78,.,.) = 2.5783e-01 3.3355e-01 8.9789e-02 -7.6974e-03 -5.6598e-02 -2.3089e-02 2.2232e-02 5.1629e-03 -3.7251e-02 ⋮ -9.2541e-02 1.7696e-02 3.5425e-02 -6.2737e-02 -5.6951e-02 -3.3937e-02 1.1730e-01 3.1792e-01 3.2876e-01 (79,.,.) = 7.1183e-01 -4.5185e-02 4.4113e-01 -3.0985e-01 1.8060e-01 -7.5757e-02 -9.0891e-02 8.3236e-02 -4.6741e-02 ⋮ -2.8031e-02 3.6725e-02 -1.4995e-01 -2.8456e-01 -4.5680e-02 -8.1578e-02 3.2646e-01 4.2007e-01 4.3912e-01 [torch.FloatTensor of size 80x80x3]), ('module.postnet.conv1d_banks.2.bn.weight', 1.1050 -12.6208 -0.6726 -4.5285 0.1909 0.4239 -0.0046 -4.7432 -7.8804 -4.4433 -14.6568 0.5739 1.0171 -5.0379 -11.2187 -4.2271 -7.7085 -8.0594 -1.3077 0.9681 -3.9361 -17.2516 -5.4521 0.0564 0.2879 0.4618 -13.9111 -3.3999 0.1262 -14.1449 -15.6368 -3.6486 0.4093 0.1321 0.0248 -4.5585 -20.8631 -3.5994 -11.6284 -14.0767 -4.4640 -13.8108 -12.9264 0.2905 -15.1739 -1.8180 -11.9922 -8.4710 -0.0151 -0.5992 0.2398 -12.1313 -3.7233 0.7356 -14.2762 -18.0932 -2.5095 0.6036 -12.8876 1.1553 -11.8942 0.9971 -4.0731 0.5014 -4.7933 0.9338 0.3654 -12.9173 0.1026 -14.0002 -8.5936 0.5326 -2.1148 -3.5482 -13.0111 -18.3692 0.5072 -0.8550 -4.5312 -8.0246 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.2.bn.bias', -0.5262 -0.2552 -0.3539 -2.0449 -0.1213 -0.2403 -4.1364 -3.9105 -0.7265 -4.1398 -0.4853 -0.2733 -0.7255 -4.6119 -0.0156 -3.2841 0.1398 -0.5341 -0.3197 -0.6035 -3.9686 -0.2954 -0.1068 -0.2325 -0.3792 -0.0235 -0.2452 -0.1333 1.4335 -0.5091 -0.3646 -3.0977 -0.4734 0.4052 -1.2922 -3.6614 -0.4880 -0.1273 -0.5268 -0.4922 -0.1440 -0.4297 -0.4102 -0.3040 -0.3347 -0.2140 -0.3307 -0.1347 0.5467 0.2248 -0.1635 -0.4661 -1.5422 -0.0478 -0.3919 -0.2397 0.9143 -0.2806 -0.3381 -0.7008 -0.1280 -0.6332 -1.0078 -0.1564 -4.5764 -0.5432 -0.1128 -0.3690 1.3157 -0.4662 -0.4705 -0.2594 -0.1613 -0.0501 -0.3376 -0.3958 -0.0910 0.0476 -3.5011 -0.9231 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.2.bn.running_mean', 5.6327e-01 1.1281e-05 3.5915e-01 1.2443e-06 3.6771e-02 1.0874e+00 4.7118e+00 1.7777e-06 4.6231e-06 5.6352e-07 3.9949e-05 1.0184e-01 2.5457e-01 1.4301e-06 5.0850e-06 5.9391e-06 5.5783e-06 1.1063e-06 2.1314e-04 5.6636e-01 1.9147e-07 3.6496e-05 3.3147e-05 5.0854e+00 2.1090e-02 1.9207e-01 3.2702e-05 3.6508e-05 8.4609e+00 2.1203e-05 1.6400e-05 1.8826e-06 1.6477e-01 7.0851e+00 8.7817e+00 1.2372e-07 5.8524e-05 2.7282e-05 7.8823e-06 1.8508e-05 2.0674e-05 1.2403e-06 7.4226e-06 8.3879e-02 5.3513e-05 1.0964e-04 2.9298e-06 2.0372e-05 5.7011e+00 3.3937e-01 4.5090e-02 2.0355e-05 7.5500e-06 7.9768e-02 1.0328e-05 3.2163e-05 3.6317e-05 2.8576e-01 7.1058e-06 8.7182e-01 6.5551e-06 5.9124e-01 3.8902e-07 1.0481e+00 3.0005e-06 3.4302e-01 1.0958e-01 1.8918e-05 7.7344e+00 4.7039e-05 1.6219e-05 6.2909e-01 2.4814e-05 6.8329e-06 4.5856e-05 8.4643e-05 5.9838e-01 8.4416e-01 1.6249e-07 4.5940e-07 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.2.bn.running_var', 8.5514e-02 1.8193e-07 3.7855e-01 1.5033e-08 6.3139e-03 8.5694e-01 4.1488e+00 3.2303e-08 3.3413e-08 4.2499e-10 9.4977e-07 3.7310e-02 2.6780e-02 4.3901e-09 5.3359e-08 1.7452e-08 7.7886e-08 8.0926e-09 4.5635e-06 8.2369e-02 3.3076e-10 1.0183e-06 8.9923e-07 5.0726e+00 5.1358e-03 1.1375e-01 8.6792e-07 5.6404e-07 1.4767e+01 2.7675e-07 2.7659e-07 1.7719e-08 5.3666e-02 1.0820e+01 1.5185e+01 1.0127e-10 1.6437e-06 2.5839e-07 1.6270e-07 6.3398e-07 5.0490e-07 3.4103e-09 6.3605e-08 4.0738e-02 1.3719e-06 3.3032e-06 1.4438e-08 1.9050e-07 6.0625e+00 4.8638e-02 1.6690e-02 2.1606e-07 4.0492e-08 1.8964e-02 1.5395e-07 1.0584e-06 1.5166e-06 1.5925e-01 1.2890e-07 1.4933e-01 5.4070e-08 1.1468e-01 2.1891e-09 9.0264e-01 1.4358e-07 4.5188e-02 1.1012e-01 4.2025e-07 1.2277e+01 1.3405e-06 4.2785e-07 1.9064e-01 4.0780e-07 1.0703e-07 1.2103e-07 2.4075e-06 4.2777e-01 2.8713e-01 8.7225e-10 4.4934e-09 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.3.conv1d.weight', (0 ,.,.) = -1.3261e+00 -2.0517e-01 -4.4074e-01 7.2879e-02 2.4019e-01 -1.9528e-01 -1.5823e-02 -2.8905e-01 2.7706e-01 -4.2438e-01 -6.4276e-02 6.8291e-02 ⋮ -5.1208e-02 -1.0592e-01 1.4846e-02 -3.9983e-03 8.5784e-02 1.0458e-01 1.7470e-01 8.6012e-02 -2.6250e-02 -1.8079e-02 1.5872e-02 1.1640e-01 (1 ,.,.) = -3.0618e-01 1.9623e-01 -4.7689e-01 3.7074e-02 -5.9695e-01 -5.6618e-01 2.5753e-02 -5.4921e-02 5.4218e-02 2.6022e-02 1.5518e-02 1.4428e-01 ⋮ 1.2883e-01 9.6835e-02 6.7881e-02 -1.6458e-01 1.4528e-01 4.6192e-02 1.0268e-01 -1.5206e-01 -1.3590e-02 -5.6363e-02 3.4911e-03 -3.2345e-01 (2 ,.,.) = -4.5412e-02 2.2586e-01 3.5024e-01 5.4553e-01 2.3056e-02 1.1484e-02 6.0658e-02 -6.6450e-02 -9.8695e-02 7.8628e-02 6.5969e-03 3.2407e-02 ⋮ 4.2043e-02 -1.1394e-02 -1.5054e-01 -2.1846e-02 -5.7694e-02 7.4430e-02 4.4309e-02 -1.0059e-01 3.6066e-02 2.2695e-01 1.2893e-01 1.8071e-01 ... (77,.,.) = 9.2358e-01 -3.5119e-02 -5.9368e-01 -1.1777e-01 -4.0169e-01 2.6219e-01 3.1201e-01 -3.7066e-01 -7.5183e-02 4.6646e-02 -3.9222e-02 2.0479e-02 ⋮ 2.2368e-02 3.6631e-02 -1.6998e-01 4.6614e-02 2.6064e-02 5.3832e-02 9.7212e-03 1.9178e-01 6.9384e-02 3.0382e-03 6.0725e-02 4.4700e-01 (78,.,.) = -1.0538e-01 -1.1898e-01 -1.0235e-01 -1.4249e-01 -8.1546e-02 -1.5799e-02 -8.2799e-02 -8.0971e-02 5.4072e-03 3.2377e-02 5.7772e-03 4.8725e-02 ⋮ -1.5251e-02 -5.8531e-02 2.1212e-02 -6.8767e-02 7.3147e-03 3.5619e-03 1.1491e-01 1.1213e-01 -1.4844e-01 -1.1175e-01 -5.2830e-02 -1.0134e-01 (79,.,.) = 1.3038e-01 2.6935e-01 1.2239e-01 -1.0109e-01 -1.2836e-02 -2.7897e-02 6.5750e-02 1.0875e-01 -8.8257e-04 2.9252e-02 -3.2304e-02 2.3341e-02 ⋮ -5.9310e-02 -1.7101e-02 -9.5936e-02 -2.6483e-02 3.8124e-02 -5.4581e-02 -2.2384e-02 1.0250e-01 1.1930e-01 1.7073e-01 5.8444e-02 2.3007e-01 [torch.FloatTensor of size 80x80x4]), ('module.postnet.conv1d_banks.3.bn.weight', -2.5268 0.4578 -12.8922 0.0399 0.0035 0.5769 -4.0805 -4.1822 0.0547 -3.5851 -9.6606 -3.9226 -4.4454 -13.0319 0.4712 -4.4964 -11.3242 0.3891 -3.9971 -14.5917 -3.2207 -1.1421 -4.2382 -12.9617 -4.2000 0.3799 -4.8247 -2.3899 -3.1779 -4.3621 0.1742 -4.8606 -3.2211 0.4293 0.4671 0.6593 -0.0235 0.0619 0.2185 -4.0747 -4.9047 0.1288 -4.2326 0.4182 0.6918 0.5098 -8.0619 0.4858 -11.1034 -4.1302 -3.1056 0.0811 0.3427 -2.8402 -11.3985 -11.4768 0.0237 -2.0880 -0.0084 -4.7499 0.4351 -0.3997 0.4541 -4.9137 -11.7080 0.9272 0.0016 -4.0587 -14.6625 -0.1130 -4.9753 0.2320 -11.6048 -1.7832 -2.4467 -12.0570 -4.6870 -15.0827 -1.8075 -4.0728 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.3.bn.bias', -0.1262 -0.3013 -0.8146 0.6035 -0.3680 -0.2605 -0.3404 -3.5481 0.0310 -0.1748 -0.4248 -0.3888 -1.1308 -0.5568 -0.5755 -3.9179 -0.4536 -0.4253 -1.2827 -0.4353 -0.3216 -3.5588 -3.9900 -0.1729 -0.7320 -0.1958 -3.7417 0.1820 -0.3782 -3.8825 -0.0597 -4.7667 -0.2451 -0.2762 0.2594 -0.5799 0.0799 0.0899 -0.0234 -0.8848 -3.8244 -0.1071 -3.9127 -0.4297 -0.1180 -0.2737 -0.5355 -0.0077 -0.1744 -4.1097 -0.0084 -0.2154 -0.4333 -0.5057 -1.2638 -0.5293 -0.3836 0.0018 -2.3551 -4.5227 -0.0428 0.4557 -0.3260 -3.9979 -0.1996 -0.0859 -1.3325 -4.4337 -0.5358 1.0591 -4.1141 -0.3156 -0.1987 -0.3252 -0.3384 -0.6654 -4.1062 -0.3360 -2.9950 -3.3073 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.3.bn.running_mean', 9.0527e-05 6.7213e-01 2.2939e-06 9.4556e+00 7.7267e+00 2.5540e-01 7.7247e-06 1.1427e-08 1.0398e+01 9.6055e-05 3.0045e-06 2.4925e-06 8.6642e-06 3.0603e-06 3.1670e-01 4.4633e-06 1.0813e-06 3.3733e-01 5.9336e-06 1.1803e-05 5.7429e-06 1.4443e-05 1.2036e-06 7.2304e-06 5.8735e-06 3.4058e-01 7.3811e-06 1.1589e-04 2.6694e-05 1.2107e-10 1.3585e+01 2.1797e-06 1.2628e-05 2.9931e+00 2.0679e-01 9.1944e-01 4.9761e+00 1.2069e+01 3.6748e+00 5.5600e-06 1.0438e-08 7.5946e+00 6.3475e-06 2.8479e-01 3.9223e-01 1.1487e+00 3.7495e-06 4.1301e-01 4.5427e-07 5.0823e-07 1.7342e-05 9.3364e+00 2.5754e-01 1.1765e-05 1.9680e-06 1.3843e-06 8.1019e+00 1.7430e-05 1.4960e+00 1.1329e-06 3.1432e-02 8.6991e-01 6.7299e-02 3.7045e-06 2.1253e-06 8.8729e-01 9.0992e+00 3.5421e-07 2.4848e-05 2.8979e-05 4.1043e-07 2.8082e+00 2.6206e-06 1.7932e-04 7.1267e-05 4.8661e-07 1.7040e-07 4.4248e-06 1.5325e-05 2.1064e-06 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.3.bn.running_var', 1.2437e-06 3.6682e-01 1.5979e-08 1.6722e+01 1.1510e+01 3.1597e-01 7.3698e-08 2.7022e-12 2.3216e+01 2.7142e-05 5.0977e-08 3.0948e-08 6.7951e-08 3.4256e-08 9.0787e-02 1.9166e-07 9.8555e-09 1.4556e-01 3.7993e-08 2.0463e-07 9.7002e-08 6.5270e-08 1.2219e-08 1.5361e-07 7.6163e-08 1.8414e-01 4.5620e-08 2.9798e-06 7.6708e-07 3.0188e-13 3.2307e+01 7.0671e-08 4.0008e-07 1.7935e+00 3.5820e-01 2.9136e-01 5.2004e+00 3.2772e+01 2.9079e+00 7.3876e-08 8.5879e-12 1.1208e+01 6.4916e-08 6.4894e-02 2.0577e-01 5.4269e-01 4.1126e-08 1.7521e-01 9.4886e-10 2.7684e-09 9.6865e-07 1.6125e+01 1.5095e-01 1.8722e-07 3.4655e-08 2.8854e-08 1.2195e+01 2.7359e-07 9.6612e-01 6.5774e-09 9.0307e-03 3.8151e-01 2.9937e-02 1.8729e-07 2.4018e-08 1.7031e-01 1.5647e+01 2.8821e-09 1.3710e-06 6.7044e-07 3.3338e-09 4.7245e+00 1.7651e-08 4.5999e-06 3.1728e-06 2.7243e-09 4.4030e-10 4.1070e-08 1.1915e-07 5.6752e-08 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.4.conv1d.weight', (0 ,.,.) = 1.5686e+00 -6.4739e-01 1.4181e-01 4.0395e-01 -7.0657e-01 -4.1751e-01 -2.0063e-02 1.7128e-01 -4.3060e-01 9.8187e-02 -4.4967e-02 1.4617e-01 1.0732e-01 1.3728e-01 1.6912e-01 ⋮ 1.5863e-01 1.4459e-02 -3.8073e-03 -1.2433e-01 -3.4891e-02 1.5925e-01 6.6438e-02 -9.0607e-03 -6.1685e-02 3.7638e-03 1.9384e-01 1.6836e-01 -5.1740e-02 -1.5689e-01 1.0534e-03 (1 ,.,.) = -1.7182e-01 -1.8121e-01 -1.7164e-01 -1.9797e-01 -3.2164e-01 -2.1415e-02 3.4693e-02 -2.7767e-02 1.8646e-01 9.8169e-02 -6.0786e-02 -4.2094e-02 -4.6286e-02 6.9078e-02 9.2483e-02 ⋮ 5.5376e-02 6.4676e-02 -3.8600e-02 5.1214e-02 -8.2247e-02 7.9675e-02 6.6527e-02 -3.0137e-02 4.3228e-02 -6.6019e-02 2.2593e-01 1.6549e-01 1.3740e-01 1.9141e-01 7.4694e-02 (2 ,.,.) = 5.8797e-01 1.1582e-01 2.7823e-01 1.8149e-01 4.8132e-02 1.1961e-01 2.0705e-01 -5.0408e-02 3.7282e-02 -1.9043e-01 -1.5806e-01 4.0247e-02 8.6887e-02 1.2589e-01 -1.8701e-01 ⋮ -8.8763e-02 1.3013e-02 -1.0821e-02 1.5986e-02 3.8196e-02 1.5440e-04 -5.4442e-02 8.4634e-03 -1.2476e-01 5.0944e-02 3.9365e-01 1.0105e-01 1.2978e-01 2.1548e-02 6.2940e-02 ... (77,.,.) = -1.5847e-01 2.6119e-01 -5.9377e-02 -1.8277e+00 -1.5449e+00 2.7968e-01 2.7494e-01 -2.2583e-01 1.4109e-01 -7.7504e-01 -6.5495e-02 -6.3089e-02 1.9451e-01 2.7907e-01 -4.3442e-02 ⋮ 4.2854e-02 5.0023e-02 -6.5699e-02 -7.9344e-02 -1.7509e-01 6.1825e-02 1.6443e-01 -1.5240e-02 1.2528e-01 1.4501e-01 -1.0310e-01 3.6294e-01 -8.2844e-04 1.3113e-01 1.6783e-01 (78,.,.) = -9.2892e-02 -2.0476e-01 -1.9328e-01 -1.6904e-01 -3.7297e-01 6.4856e-02 -2.9276e-02 6.3521e-02 -1.6848e-02 -3.5620e-02 1.7215e-02 6.9829e-02 -1.4160e-01 3.9499e-02 4.0538e-02 ⋮ 3.3965e-02 6.5948e-02 2.1840e-02 5.3068e-02 -3.8973e-02 6.9934e-02 -3.0774e-02 3.1385e-03 3.4650e-02 -5.8947e-02 7.1828e-02 4.0206e-02 1.0805e-01 2.2138e-01 1.2469e-01 (79,.,.) = -1.5203e-01 -3.8435e-02 1.3500e-02 1.6194e-02 -1.2227e-01 6.4620e-02 2.8093e-02 1.2704e-02 4.0136e-02 -2.4529e-01 -3.5442e-02 2.9202e-02 2.3826e-02 4.1322e-03 -1.2359e-01 ⋮ -5.1499e-02 3.8150e-02 4.7023e-03 4.8181e-02 -3.8601e-02 -4.0502e-02 2.0468e-03 -5.0560e-02 5.2808e-02 8.2607e-03 3.4581e-02 -1.8643e-03 7.2634e-02 1.5115e-01 5.9412e-02 [torch.FloatTensor of size 80x80x5]), ('module.postnet.conv1d_banks.4.bn.weight', -3.1114 -3.6436 -11.5585 -10.4227 -0.4435 -4.2407 -2.6719 0.5268 -21.6844 1.4152 0.8414 -7.4078 0.0875 -0.8201 -2.0815 -4.2865 0.9424 1.1091 -4.4419 -11.9075 -8.4086 -2.7601 0.4206 0.4259 0.3753 -4.8195 0.7709 -11.3462 0.5276 0.9512 -11.5833 -1.1161 0.4165 0.9368 0.7865 -4.8721 -2.7109 -3.8780 -4.2648 -12.0094 0.0308 0.6082 -10.4139 0.0122 0.6591 0.6041 -1.3099 -0.8088 0.0389 0.8472 -13.2373 -3.2597 -12.4455 0.1006 -2.6336 -11.6239 -4.3382 -0.7267 0.3470 -10.8323 0.5100 -0.0568 0.6117 -3.5144 0.3754 0.0783 -1.6908 0.7457 -13.5645 -3.8569 -0.6424 -4.6047 -1.3007 0.6398 0.5641 -13.1959 -1.5564 -15.4920 -1.1279 -0.0059 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.4.bn.bias', -0.0907 0.1247 -0.6848 -0.6421 -0.1463 -1.9022 -0.4610 -0.3081 -0.4921 -0.6449 -0.4662 -0.1597 -0.0268 -0.0688 -0.1862 -3.4868 -0.3786 -0.2628 -1.9394 -0.4781 -0.5795 -3.9740 -0.2431 -0.3317 -0.3059 -4.1944 -0.4192 -0.4415 -0.2498 -0.1404 -0.5222 -0.0211 -0.2708 -0.6674 -0.6530 -0.5205 -0.1410 -0.2570 -0.2570 -0.4062 0.8527 -0.5576 -0.1164 -0.2757 -0.1256 -0.6343 -0.2397 0.1046 1.1796 -0.1107 -0.2626 -0.7130 -0.5751 -0.1367 -0.2514 -0.5131 -2.3786 0.1182 0.0462 -0.6482 -0.3057 -1.7059 0.0097 -0.2467 -0.1131 -0.2379 -0.2998 -0.0255 -0.4650 -2.1786 -0.1979 -4.2684 -0.0443 -0.1549 -0.4398 -0.5989 -0.1807 -0.5320 0.3008 -4.1098 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.4.bn.running_mean', 1.4791e-05 7.1580e-06 5.4625e-07 1.5165e-06 2.9518e-01 1.1891e-05 4.3821e-02 1.6258e-01 9.1277e-05 4.3748e-01 4.9826e-01 4.8062e-06 1.0348e+01 1.1714e-01 1.5927e-05 1.1033e-06 6.1062e-01 3.8079e-01 5.2303e-06 8.5180e-06 1.2921e-08 8.4226e-07 8.1688e-02 1.3962e+00 6.3438e-02 2.4138e-06 1.9679e+00 1.0141e-06 2.7648e-01 3.3054e-01 1.9053e-06 2.1194e-01 7.0494e-02 8.1467e-01 2.8048e-01 7.4746e-06 3.4446e-05 4.9036e-05 4.3190e-04 3.0069e-06 8.9921e+00 3.8910e-01 7.9356e-06 1.2102e+01 1.1947e+00 9.2566e-01 1.3713e-05 4.0264e-01 1.3412e+01 8.5061e-01 1.3104e-06 3.8468e-06 2.3956e-06 1.9958e+01 1.2555e-04 1.4633e-06 2.1924e-07 3.1256e-01 3.9039e+00 3.6868e-06 4.6208e-01 4.9890e-01 8.7584e-02 2.2797e-05 6.0309e-02 1.0651e+01 2.3329e-01 3.6643e-01 6.9491e-06 7.5810e-06 4.1290e-01 3.0629e-06 2.8954e-01 4.0581e-01 7.8708e-02 8.6479e-06 1.9115e-05 1.5491e-05 1.8777e-05 3.3491e+00 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.4.bn.running_var', 1.5262e-06 1.3771e-07 5.3700e-09 2.2306e-08 8.0874e-02 6.9410e-08 3.0631e-03 1.6535e-01 2.8116e-06 6.4422e-02 1.4913e-01 5.5257e-08 2.0834e+01 1.7277e-02 2.9805e-07 1.5038e-08 1.0679e-01 4.8342e-02 8.1840e-09 6.3791e-07 2.6678e-11 2.2232e-09 5.4488e-02 6.7553e-01 1.8869e-02 1.2107e-07 2.3370e+00 3.0896e-09 7.6846e-02 7.0930e-02 2.5471e-08 2.4525e-02 2.3188e-02 1.3394e-01 7.9304e-02 2.3824e-07 2.9613e-06 9.3945e-07 4.6336e-05 4.3658e-08 1.5431e+01 9.4522e-02 1.2893e-07 2.6148e+01 4.4857e-01 5.1562e-01 6.7592e-07 9.9170e-02 3.2691e+01 4.5267e-01 3.0760e-09 3.3099e-08 2.6102e-08 7.2174e+01 5.0823e-06 1.1059e-08 9.1769e-10 6.5382e-02 5.2364e+00 6.6680e-08 1.0776e-01 1.1825e-01 3.3046e-02 4.7272e-07 3.1214e-02 2.3556e+01 1.5954e-02 2.6601e-01 1.4740e-07 7.1719e-08 9.2206e-02 9.0131e-08 4.1669e-02 2.6487e-01 3.4801e-02 1.5547e-07 7.8035e-07 4.5604e-07 3.1844e-07 2.0638e+00 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.5.conv1d.weight', (0 ,.,.) = -5.4860e-01 2.0713e-01 1.6055e+00 -5.2223e-01 1.8030e+00 2.6348e-01 -2.0093e-01 4.5769e-02 6.3496e-01 2.9092e-01 1.0631e-01 1.8241e-01 -1.3228e-01 7.9001e-02 4.3514e-02 1.4466e-01 -8.5870e-02 -1.6509e-01 ⋮ -1.5503e-02 4.7154e-02 5.2289e-02 1.6896e-02 2.2193e-02 1.3157e-01 -1.2542e-01 -3.9076e-02 -4.4819e-02 2.0163e-01 2.1832e-01 3.3319e-01 -2.2883e-01 -2.0393e-01 -7.0349e-02 -2.6324e-02 -5.9681e-02 3.6948e-01 (1 ,.,.) = -1.7623e-01 -1.3750e-01 -3.1823e-02 -1.9775e-01 5.8317e-02 1.9174e-01 -3.5452e-02 -2.3127e-02 -2.1820e-02 2.3226e-02 1.4172e-01 2.6835e-01 -1.3102e-01 -2.6183e-02 -6.7891e-02 -1.7501e-02 -8.8783e-02 -5.0563e-02 ⋮ -4.0606e-02 2.6082e-02 6.4175e-02 4.1513e-02 9.5760e-02 5.5027e-02 -1.0897e-02 9.1545e-02 4.0820e-02 5.1575e-02 4.0873e-02 -2.8130e-02 3.7406e-02 -4.9818e-03 1.2074e-02 -1.0079e-02 1.5794e-02 3.5829e-03 (2 ,.,.) = 8.6653e-01 7.1018e-01 1.1930e-01 6.7257e-01 3.2135e-01 2.2231e-01 -5.5292e-01 -1.8671e-01 1.7102e-01 1.4232e-01 2.1184e-01 -8.6735e-02 6.7562e-02 1.0823e-02 1.7989e-01 1.9343e-01 2.2654e-01 -5.1714e-02 ⋮ -9.9573e-02 -6.6490e-02 -2.6533e-02 6.5583e-02 2.2089e-02 3.8572e-02 5.8930e-02 -1.1670e-02 1.4769e-02 7.4453e-02 5.9849e-03 7.0727e-02 3.0224e-01 5.2385e-03 2.7315e-01 2.0238e-01 1.1285e-01 3.0515e-01 ... (77,.,.) = -1.4380e-01 -1.2970e-01 -7.8037e-02 -1.1121e-01 -9.8119e-02 4.0675e-01 -2.5799e-01 -3.3438e-01 -2.3150e-01 -2.2831e-01 -1.6482e-01 1.7353e-01 -4.2796e-02 -4.1783e-02 1.7217e-02 3.3401e-02 4.2506e-02 -6.9167e-02 ⋮ 2.7723e-02 3.0782e-02 9.9269e-03 3.5218e-02 -4.7910e-02 -3.2218e-02 -5.2994e-03 2.0554e-02 2.8580e-02 3.0602e-02 -1.3117e-02 -5.0382e-02 -6.8279e-02 -1.0015e-02 -1.8131e-02 -7.6639e-02 -7.6285e-02 -2.6653e-01 (78,.,.) = 1.0527e-01 9.4512e-02 1.0673e-01 1.6742e-01 5.7785e-02 -2.8438e-02 7.4955e-02 3.0069e-02 6.6862e-02 3.0888e-02 3.1180e-02 1.3977e-01 9.2024e-03 2.2835e-02 -4.1930e-02 1.0184e-01 7.6296e-02 6.0169e-03 ⋮ -9.7722e-02 -1.0904e-03 -6.9749e-02 -6.9405e-03 -1.2642e-01 -9.5243e-02 5.8125e-03 -6.7751e-02 1.1047e-01 8.2299e-03 -4.2008e-02 5.1742e-02 7.3912e-02 8.3876e-02 6.2410e-02 7.7047e-02 6.0862e-02 7.4805e-02 (79,.,.) = -1.0010e+00 -9.8334e-02 -1.6020e+00 7.5413e-01 -4.1561e-01 1.0840e+00 2.4568e-01 3.1564e-01 -3.4452e-01 -4.6273e-02 1.0520e+00 -9.7148e-01 -7.6527e-02 -2.2401e-01 7.9422e-02 -2.6896e-01 3.6150e-01 6.4700e-02 ⋮ 1.4473e-01 3.2172e-01 2.0931e-01 1.8520e-01 -2.2031e-01 -1.4759e-01 -6.1705e-02 -6.6544e-03 -1.3486e-01 -9.8805e-02 -1.5604e-01 -6.9360e-02 -3.2565e-01 -4.4507e-01 -4.6296e-01 2.4712e-01 3.9803e-01 7.2480e-01 [torch.FloatTensor of size 80x80x6]), ('module.postnet.conv1d_banks.5.bn.weight', -1.9655 -0.9887 -10.7031 0.2270 -2.5000 -10.3362 -5.2901 -10.6125 -4.5590 -11.7356 0.3789 -12.5152 -2.9383 -4.9282 -1.7910 -10.5731 0.5355 -12.9075 0.5282 0.5553 0.3319 0.1124 -4.7897 0.2692 0.8829 0.3214 -9.8628 -1.3829 -0.0948 0.1667 0.0405 -3.3630 -9.5162 0.5304 -11.7365 -2.2216 0.1461 0.5525 0.3589 -6.6794 -0.0055 -4.9119 -10.5283 0.9588 0.4601 0.3697 0.5197 -1.0671 0.0264 0.3258 -11.7318 -2.6943 -12.6896 -0.7154 -11.4747 -10.0193 0.6377 1.0143 0.4361 -4.1749 0.5932 0.4845 -8.0335 -11.2354 -8.8210 -3.1390 -11.2160 0.0663 0.6004 -12.1574 -2.4649 -9.8693 -10.7270 0.5790 -0.7557 0.9959 -3.4245 -0.0401 -4.1284 -4.7673 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.5.bn.bias', 0.0029 -4.3787 -0.2540 -0.2102 -0.3568 -0.0817 0.2838 -0.2897 -1.2939 -0.1694 -0.0895 0.3677 -0.2739 -4.3863 0.1692 -0.5726 -0.4461 -0.2443 -0.2804 0.1098 -0.1973 1.5201 -4.3825 -1.7465 -0.3371 -0.3231 -0.3295 -0.1735 0.5555 -0.2971 -1.6905 -4.2038 -0.4203 -0.2420 -0.5524 -0.0997 -0.2813 -0.4033 -0.4262 -0.2482 0.7620 -4.1580 -0.7885 -0.6467 -0.1021 -0.0127 -0.3365 -0.4070 -1.0600 0.1982 -0.3990 -0.2539 -0.3529 0.3436 -0.3795 -0.3374 -0.3083 -0.6688 -0.2550 -0.1713 0.0910 -0.3509 -0.3530 -0.6210 -0.4778 -4.2850 -0.3199 1.1630 -0.3874 -0.4239 -0.2612 -0.4558 -0.3260 -0.5247 0.0129 -0.1892 -4.2348 -4.2263 -4.3344 -0.0833 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.5.bn.running_mean', 6.6881e-05 1.9716e-05 4.1062e-06 1.0871e+01 3.4829e-05 5.4487e-06 1.9180e-05 7.2671e-06 9.0508e-06 3.4231e-06 3.3783e-02 3.4214e-06 4.0916e-05 1.5526e-06 1.4476e-04 2.3460e-06 1.0616e+00 1.0300e-06 1.0699e+00 9.0276e-01 1.0792e-01 1.4327e+01 2.2017e-07 4.1265e+00 6.1647e-01 4.5485e-02 1.0817e-05 3.3503e-01 1.6846e-05 1.7599e+01 1.5968e+01 2.5268e-07 2.2920e-06 2.3139e-01 6.2033e-07 4.5507e-05 7.7429e+00 1.5459e-01 7.0055e+00 9.1036e-06 9.9461e+00 4.3763e-06 1.2773e-08 7.7017e-01 5.2742e+00 7.2511e-02 2.0556e-01 3.8826e-01 1.4060e+01 6.8772e+00 2.5492e-06 1.2919e-05 6.1022e-06 9.8789e-01 1.6830e-06 3.6800e-06 8.8781e-01 4.8203e-01 1.0681e-01 4.1911e-07 1.6110e-01 9.8968e-01 4.5397e-06 2.4744e-06 4.6496e-06 6.7548e-08 5.9375e-07 1.5855e+01 1.8195e-01 1.4893e-06 2.1666e-05 3.2978e-06 4.4232e-05 4.8400e-01 6.3990e-01 4.5483e-01 1.0750e-07 3.2715e-01 2.9006e-07 4.9801e-05 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.5.bn.running_var', 1.2244e-06 5.9879e-08 4.1235e-08 2.1726e+01 7.6592e-07 7.7188e-08 1.0208e-07 1.2389e-07 6.8075e-08 2.7681e-08 3.5808e-02 2.7561e-08 5.8326e-07 4.8485e-08 3.2186e-06 2.3307e-08 5.5778e-01 9.4557e-09 6.2819e-01 7.9096e-01 1.0496e-01 3.8622e+01 1.9758e-09 3.9146e+00 3.4936e-01 2.1316e-02 2.4903e-07 5.6055e-02 2.3548e-07 5.9798e+01 4.5718e+01 1.8836e-09 4.9038e-08 6.8371e-02 5.3013e-09 6.8085e-07 1.2107e+01 7.2701e-02 8.8856e+00 2.6411e-07 1.7349e+01 2.9812e-07 1.6223e-10 2.3136e-01 4.9386e+00 2.3262e-02 1.1678e-01 1.1364e-01 3.5626e+01 2.2471e+01 3.4680e-08 3.9239e-07 4.5764e-08 4.5338e-01 2.8175e-08 6.0937e-08 5.9102e-01 1.1198e-01 4.9348e-02 9.3823e-10 1.3880e-01 1.2235e+00 5.1027e-08 5.1064e-08 3.4925e-08 2.1195e-11 7.2309e-09 4.6023e+01 4.7265e-02 1.1407e-08 7.4242e-07 1.9322e-07 5.6772e-07 1.6085e-01 4.4126e-01 7.5727e-02 3.8255e-10 3.3904e-02 3.7415e-10 1.2396e-06 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.6.conv1d.weight', (0 ,.,.) = -1.0649e+00 1.3727e+00 1.9003e-01 ... -1.9994e-01 3.3153e+00 -5.1286e+00 6.0406e-02 -2.1307e-01 1.3011e-01 ... -7.6565e-01 1.6152e-01 5.9789e-01 7.5477e-01 -1.1030e-01 3.3991e-01 ... -9.8794e-02 1.3341e-01 -1.6148e+00 ... ⋱ ... -2.2227e-02 1.5231e-02 1.0512e-01 ... 2.2787e-01 2.4669e-01 4.8626e-01 -6.6963e-02 -2.3941e-02 -4.3805e-03 ... 4.8652e-02 5.6056e-04 3.2899e-01 -1.3463e-01 1.1059e-01 -2.1093e-01 ... -3.8216e-01 -2.2131e-01 -4.6672e-03 (1 ,.,.) = 1.9496e+00 -1.3853e+00 2.4915e+00 ... -2.8040e+00 2.5646e+00 -3.2165e+00 -4.1844e-01 -4.0175e-01 1.6977e-01 ... 2.6866e-01 6.1458e-01 1.1135e-01 -5.1263e-01 -3.4525e-01 -3.1196e-02 ... -2.3048e-02 3.7472e-01 4.6155e-01 ... ⋱ ... 1.7881e-01 1.0789e-01 2.4740e-01 ... 2.3902e-02 8.6342e-02 1.2528e-02 1.9066e-02 -2.1986e-03 7.4409e-02 ... 1.1230e-02 6.7261e-02 1.5289e-01 -2.4646e-01 -3.2945e-01 -6.5856e-02 ... 1.2225e-01 -8.9026e-02 -9.9658e-02 (2 ,.,.) = 3.6556e-01 1.9130e-01 4.4784e-01 ... -1.6449e-01 6.8066e-02 -1.1009e-02 4.2291e-02 1.3204e-01 1.5855e-01 ... 3.1037e-01 1.4537e-01 2.1174e-01 1.3034e-01 -7.7696e-02 4.2838e-02 ... -8.1076e-02 6.7180e-02 -8.8425e-02 ... ⋱ ... 1.0065e-03 9.5219e-02 -1.1813e-02 ... 6.7723e-03 3.1524e-02 2.6009e-02 1.0982e-01 -1.3729e-01 4.4807e-02 ... 7.5306e-02 5.6696e-02 -1.8068e-01 1.8904e-01 1.3797e-01 2.5836e-01 ... 2.0835e-01 1.6837e-01 1.1935e-02 ... (77,.,.) = 1.0173e+00 7.1137e-01 -7.3726e-02 ... -5.4410e-02 -4.9887e-02 -3.7306e-01 -6.0327e-01 -3.2560e-01 1.6308e-02 ... -2.2698e-01 -2.2651e-01 -4.7624e-01 -1.3541e-02 -1.5023e-02 1.1122e-01 ... 2.3900e-01 2.5451e-02 -1.3337e-01 ... ⋱ ... -1.1223e-01 -1.4351e-03 -5.6440e-03 ... 2.6192e-02 1.3253e-02 -8.7034e-02 3.0891e-03 1.4962e-01 5.6578e-02 ... 6.6190e-02 9.9097e-02 1.1227e-01 1.1993e-01 1.4432e-01 6.9030e-02 ... 5.5823e-02 3.9141e-02 3.2510e-02 (78,.,.) = 1.6438e+00 -1.2697e-01 -4.9258e-01 ... -7.0856e-01 -3.7720e+00 -6.7071e+00 -4.9945e-01 -1.7624e-01 2.3253e-01 ... -2.7442e-01 2.8344e-01 5.4155e-01 -7.6488e-03 9.0009e-02 -7.3368e-02 ... -1.9346e-02 -5.6370e-02 -1.0089e-01 ... ⋱ ... 3.8947e-02 8.0353e-02 7.6696e-02 ... 6.1145e-02 -1.6484e-01 -2.6345e-01 7.9000e-02 7.5109e-02 6.8631e-02 ... 3.8451e-02 -9.6734e-02 -2.5716e-01 2.6664e-01 1.3880e-01 2.1747e-01 ... 1.8959e-01 -2.9664e-02 -4.0854e-01 (79,.,.) = 6.4669e-01 9.4086e-02 -5.0195e-01 ... -2.7949e-01 -3.9004e-01 -4.9352e-01 -2.8771e-01 -1.7709e-01 -3.3198e-01 ... -6.6746e-02 1.3041e-01 -1.3144e-01 1.8937e-01 2.3438e-01 8.7372e-02 ... -7.0633e-02 -2.0086e-01 -3.7153e-02 ... ⋱ ... 4.0741e-02 2.6941e-02 1.8963e-01 ... 1.4338e-01 1.1682e-01 2.5949e-01 1.1112e-01 -7.1629e-03 9.1774e-02 ... 1.5254e-01 9.0325e-03 1.7725e-01 1.6518e-01 3.6820e-02 1.0384e-01 ... -3.3557e-02 -4.4994e-02 2.1228e-02 [torch.FloatTensor of size 80x80x7]), ('module.postnet.conv1d_banks.6.bn.weight', 1.1234 -1.2926 -10.5510 0.7360 0.8886 -8.5146 -6.4528 -0.6868 0.4879 0.9858 0.6172 0.5446 -11.2630 -10.3431 -11.7081 0.5923 0.6918 -10.9081 -11.1192 0.0431 -3.6628 0.5876 -12.2840 -7.0464 0.5582 -10.5683 -10.1524 0.7902 1.2558 1.0967 -10.5113 0.6058 1.2726 1.3562 -10.5654 1.4586 -10.0198 0.6231 -8.3551 0.0070 0.7449 1.0341 0.3115 -0.7384 1.6992 -8.6107 0.7889 -2.5240 -10.2320 1.3290 0.6859 -11.5253 1.4087 -14.0153 -12.3844 -2.2739 0.7766 1.3140 -9.7110 -1.0587 -9.7685 -1.7783 0.5942 -1.4126 0.7143 -10.1922 -11.2625 1.3250 1.0208 -2.5101 -4.1294 -9.9524 -4.3481 -1.6137 0.7320 -1.8037 1.4938 -10.5113 0.6225 -0.1926 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.6.bn.bias', -0.4338 -0.0459 -0.2383 -0.2096 -0.2667 -0.5104 -0.0578 0.0849 -0.1379 -0.4549 -0.1499 -0.4967 -0.7976 -0.6348 -0.2504 -0.2461 -0.3132 -0.3489 -0.4976 0.8112 -0.2946 -0.4577 -0.3724 -0.2919 -0.5411 -0.5325 -0.5417 -0.5636 -0.3449 -0.6064 -0.0808 -0.1803 -0.2893 -0.2053 -0.2342 -0.1587 -0.6827 -0.6597 -0.3597 0.4662 0.0072 -0.5884 -0.1537 -0.3313 -0.7711 -0.1141 -0.0630 -0.4278 -0.2871 -0.5986 -0.5690 -0.6288 -0.6113 0.3876 -0.3945 0.0004 -0.0573 -0.7955 -0.2263 -0.1895 -0.4541 0.0696 -0.3114 0.1292 -0.0607 1.0270 -0.3494 -0.4729 -0.2606 0.0859 -4.3991 -0.4413 -3.2549 -0.3289 -0.3401 0.3182 -0.5562 -0.3727 0.0064 -0.7360 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.6.bn.running_mean', 4.3804e-01 3.4535e-01 1.3789e-06 7.8018e-01 2.0125e+00 6.3429e-07 3.9701e-07 4.5727e-01 1.0556e+00 3.9728e-01 4.3544e-01 3.6977e-01 1.2993e-06 5.0158e-07 1.5312e-05 2.9213e-01 3.4550e-01 6.0703e-06 7.2830e-07 1.1630e+01 1.4107e-05 5.9418e-01 1.8864e-05 4.7275e-06 4.3268e-01 1.0982e-06 1.4959e-05 4.6972e-01 3.5489e-01 3.9912e-01 3.6602e-06 7.9997e-01 2.9541e-01 4.3830e-01 4.7996e-06 4.9214e-01 5.5694e-07 3.3097e-01 6.1603e-07 1.5817e+01 2.2433e-01 4.0282e-01 1.4865e+00 2.0431e-01 3.2347e-01 8.5819e-08 8.5859e-01 1.9079e-05 9.9729e-06 3.3272e-01 4.5262e-01 4.7821e-07 5.1499e-01 1.6921e-06 7.5501e-06 6.7233e-05 4.4490e-01 4.1153e-01 5.2981e-06 2.7974e-01 1.8517e-06 3.5282e-01 3.6116e-01 3.1202e-01 7.7057e+00 6.4928e-06 1.9840e-06 3.5685e-01 5.5001e-01 3.1763e-05 1.5337e-05 1.3852e-06 3.7034e-06 4.6679e-01 5.9364e-01 2.2923e-05 4.2884e-01 2.9967e-06 2.1568e+00 8.6376e-01 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.6.bn.running_var', 6.9108e-02 4.8026e-02 1.3288e-08 6.0707e-01 8.9584e-01 1.2293e-08 4.6750e-09 1.4253e-01 5.9941e-01 7.3726e-02 3.2148e-01 1.9845e-01 5.7209e-08 3.4721e-09 3.2750e-07 4.5857e-01 8.8278e-02 6.7437e-08 1.2995e-09 2.4064e+01 2.4312e-07 1.4615e-01 1.0230e-06 9.2319e-08 5.6174e-02 3.6997e-08 2.0017e-07 8.7901e-02 4.2786e-02 5.6097e-02 7.0912e-08 3.2873e-01 5.6683e-02 9.3703e-02 5.3004e-08 1.1316e-01 3.4978e-09 8.3531e-02 1.1983e-08 4.4049e+01 2.3214e-01 9.6722e-02 6.0587e-01 1.5013e-01 3.9874e-02 6.7535e-11 6.0061e-01 2.5476e-07 2.0624e-07 5.3841e-02 3.0385e-01 2.6532e-09 9.7681e-02 3.1858e-08 4.2094e-08 1.6999e-06 2.4220e-01 6.5469e-02 7.7248e-08 1.0687e-01 9.6119e-08 4.3373e-02 3.4932e-01 5.6417e-02 1.1625e+01 9.4767e-08 1.7245e-08 1.0143e-01 1.5751e-01 1.5589e-07 3.1991e-08 1.5279e-09 1.0600e-07 1.0521e-01 5.8041e-01 5.7862e-07 6.8317e-02 2.4081e-08 1.9565e+00 7.4052e-01 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.7.conv1d.weight', (0 ,.,.) = 1.0529e+00 3.8757e+00 3.7754e+00 ... 4.4254e-01 -3.4523e-01 -1.0291e-01 1.5095e+00 2.4575e+00 1.6089e+00 ... 1.7952e-01 -1.2080e+00 -6.9600e-01 6.5311e-01 1.1343e+00 1.4454e+00 ... 8.3564e-01 7.0717e-01 -1.5440e-01 ... ⋱ ... -5.8892e-01 -6.6297e-01 -3.7407e-01 ... -2.4059e-03 4.1757e-01 3.0214e-01 -2.3222e-01 -4.4655e-01 -4.5594e-01 ... 1.1238e-02 4.7837e-01 1.5670e-01 1.1653e+00 3.5081e-01 -2.5347e-01 ... -2.3556e-01 2.3423e-01 -1.9039e-01 (1 ,.,.) = -1.6885e+00 1.9148e+00 -8.8712e-01 ... -5.7404e-01 9.3855e-01 -1.8792e+00 6.1432e-02 -4.1108e-01 4.7087e-02 ... -8.2081e-01 -6.1591e-01 -9.2098e-02 1.9864e-02 2.4329e-02 -9.4784e-03 ... 3.0884e-01 1.7533e-01 1.2645e-01 ... ⋱ ... 8.2526e-02 8.2653e-02 -5.7011e-02 ... 3.6185e-01 3.3817e-01 4.4439e-01 1.6853e-01 7.1762e-02 -3.8038e-02 ... 2.2377e-01 1.5718e-01 1.7180e-01 1.4553e-01 -1.6342e-01 -1.1863e-01 ... 1.0773e-01 -2.1940e-01 -9.9362e-02 (2 ,.,.) = -5.3311e-02 2.3856e-01 3.6569e-01 ... 9.0417e-02 3.6500e-01 6.7456e-01 1.5800e-01 1.5400e-01 -1.4746e-01 ... -1.0636e-02 9.1024e-02 -7.5079e-03 -1.4288e-01 4.5058e-02 7.4069e-02 ... 7.3295e-02 7.5404e-02 2.2525e-01 ... ⋱ ... 8.2440e-02 6.3924e-02 -1.6160e-01 ... 5.9316e-02 2.6346e-02 -2.0503e-02 -1.0922e-03 1.1213e-01 -1.8133e-01 ... 4.4410e-02 1.0853e-01 -4.5268e-02 3.2731e-01 3.3002e-01 -1.4442e-01 ... 7.0902e-02 1.3694e-01 2.7840e-01 ... (77,.,.) = 4.3486e-01 3.6720e-01 1.1861e-01 ... -1.2157e-01 3.8807e-01 1.3101e+00 -4.6372e-02 5.2028e-02 2.9571e-01 ... 4.1118e-01 1.7782e-01 2.5223e-01 -1.6780e-01 -3.2026e-01 7.4724e-02 ... -2.3654e-01 -3.5995e-01 5.3968e-02 ... ⋱ ... -1.0499e-01 -2.9694e-02 -3.1468e-02 ... -1.5553e-01 -1.7116e-01 -1.2240e-01 -2.0299e-01 -7.8974e-02 -2.1433e-02 ... -1.7333e-01 -1.5325e-01 -1.1611e-01 -1.4810e-01 -8.7836e-02 4.9869e-01 ... 2.8257e-01 2.5560e-01 3.3153e-01 (78,.,.) = 3.1916e-01 -5.1179e-01 -5.4707e-01 ... 5.5400e-01 -5.0809e-01 -2.0861e+00 -1.9279e+00 -7.0329e-01 -8.3487e-01 ... -4.5820e-02 1.2540e+00 1.3178e+00 2.2729e-01 3.3453e-01 9.6079e-02 ... -3.1711e-01 2.8326e-01 2.2800e-01 ... ⋱ ... 7.6057e-03 -1.9277e-01 5.0295e-02 ... 8.5391e-02 1.8244e-01 3.4017e-01 3.2960e-02 -1.0860e-01 5.6157e-02 ... 1.4160e-01 3.2270e-01 3.4672e-01 3.1290e-01 7.7094e-02 1.4394e-01 ... 2.8697e-01 1.1527e-01 1.4380e-01 (79,.,.) = 4.3437e-02 -1.3139e-01 -2.8921e-01 ... -3.6401e-01 -3.9041e-01 -3.4898e-01 -6.4615e-02 -1.1073e-01 -9.7782e-02 ... -1.9688e-02 -5.5769e-02 2.3761e-02 -1.0203e-01 -7.6795e-02 -6.0822e-02 ... -8.2102e-02 1.5033e-01 2.0831e-01 ... ⋱ ... 2.2731e-01 1.8507e-01 1.3185e-01 ... 8.3878e-02 -2.1591e-02 -6.5322e-02 1.1341e-01 4.7051e-02 5.4270e-02 ... 4.8335e-04 -4.4908e-02 -1.3254e-01 -1.3947e-03 6.5450e-02 6.8548e-02 ... -5.2640e-02 -6.0732e-02 -1.5454e-01 [torch.FloatTensor of size 80x80x8]), ('module.postnet.conv1d_banks.7.bn.weight', 0.2272 -1.2905 -11.5955 -10.1562 -0.6380 0.9985 -1.8432 -3.3960 -9.9084 -6.8331 0.9846 1.0592 -9.2445 -1.8706 -0.0822 1.0948 0.7832 -12.2910 -9.9979 0.6151 -7.9451 -1.4832 0.9173 -9.6940 -0.0553 0.4439 0.2380 0.1145 -0.0336 0.6230 -9.9262 0.7364 0.8043 0.6777 1.0693 1.0780 -6.0967 -10.6311 0.2970 0.8357 -25.8087 1.6300 -10.5496 1.4628 -2.3917 0.3279 -9.6571 0.4302 1.1084 1.2249 -0.7977 -9.5146 -0.8734 0.4514 1.0442 0.7329 0.8109 0.8417 1.6141 -10.7616 -11.1164 -2.1931 0.6783 -11.2959 1.7023 -10.7230 0.5006 -29.0832 -1.0327 -10.6104 0.4630 0.8340 0.6053 0.5325 -9.7004 0.5927 -0.0174 -14.3934 -2.5815 -0.0260 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.7.bn.bias', -0.1861 -0.3294 -0.4716 -0.3899 0.1369 -0.5080 -0.2138 -0.2117 -0.2370 -0.6222 -0.8410 -0.4249 -0.2393 0.2996 -1.3290 -0.2597 -0.7508 -0.3970 -0.3295 -0.4606 -0.4718 -1.7030 -0.0700 -0.0053 -4.2104 -0.1452 -0.3915 -0.1965 -4.5503 0.0065 -0.3280 -0.0803 -0.1242 -0.2823 -0.3452 -0.1443 0.6449 -0.3834 -0.1812 -0.4946 -0.4245 -0.3413 -0.4955 -0.3791 -0.3720 -0.2363 -0.6808 -0.1327 -0.5609 -0.4934 0.1324 -0.3644 0.0195 -0.0893 -0.2136 -0.2123 -0.3184 0.1364 -0.5017 -0.4111 -0.2907 -0.1674 -0.5139 -0.4316 -0.3985 -0.3544 -0.5609 -0.1713 -0.0734 -0.2134 -0.4465 -0.6532 0.1022 -0.2596 -0.2295 -0.3866 -4.4528 -0.6484 -0.2331 0.4115 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.7.bn.running_mean', 1.6550e-02 6.5145e-02 1.9891e-06 5.4034e-06 7.5269e-01 5.0953e-01 4.9518e-05 1.9469e-05 7.6647e-06 2.7089e-06 5.3703e-01 4.1142e-01 4.1399e-06 1.7659e-04 6.9984e-01 3.8809e-01 5.6094e-01 1.3415e-05 3.6147e-06 2.3200e-01 1.3136e-06 1.9973e-05 9.6954e-01 1.4150e-05 2.4474e-01 1.2881e-02 2.8005e-03 1.8118e-03 4.0178e-01 3.7376e+00 2.5784e-07 2.0043e+01 3.2837e-01 2.6965e-01 4.4999e-01 5.4624e-01 1.0382e-05 2.4678e-10 9.0943e-02 3.8726e-01 1.4177e-05 3.8693e-01 2.6754e-06 5.6915e-01 7.8282e-05 3.4047e-02 2.5442e-07 1.1157e-02 4.1637e-01 4.8645e-01 6.3600e-01 3.6457e-06 6.9767e-01 2.0002e+01 7.8001e-01 3.4457e-01 1.1104e-01 2.3491e+01 3.6410e-01 4.6522e-06 1.3357e-08 5.2513e-06 2.6707e-01 3.3389e-08 4.1382e-01 2.0373e-06 7.9752e-05 2.6121e-05 7.2409e-02 3.5990e-09 2.0751e-01 6.8377e-01 2.1786e-01 8.5901e-02 1.4765e-05 5.3136e-01 1.0216e+01 1.6747e-06 2.0335e-05 5.0371e+00 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_banks.7.bn.running_var', 3.5441e-02 2.7839e-02 8.9386e-09 9.2294e-08 4.3831e-01 1.0638e-01 1.3888e-06 4.8367e-07 6.0299e-07 6.9106e-08 1.0926e-01 8.5390e-02 2.9100e-07 1.8051e-06 3.1060e-01 1.2558e-01 2.1669e-01 1.2312e-07 5.0283e-08 1.1093e-01 3.4354e-09 2.2049e-07 1.4796e+00 4.3755e-07 3.0358e-02 2.0056e-02 1.2561e-03 7.3848e-04 5.6786e-02 4.6312e+00 2.0893e-10 7.2274e+01 2.2853e-01 3.6890e-01 1.0857e-01 1.6555e-01 1.4318e-07 7.3806e-13 4.6212e-02 1.4513e-01 2.5276e-07 6.5982e-02 2.9542e-08 8.2092e-02 8.8085e-07 2.4429e-02 5.6086e-10 1.5524e-02 8.6545e-02 7.6177e-02 2.7218e-01 1.9632e-07 2.0211e-01 6.7753e+01 1.6525e-01 1.1700e-01 3.5328e-02 9.9202e+01 4.7470e-02 6.9109e-08 1.7118e-11 9.1332e-08 1.7900e-01 5.3306e-11 6.9717e-02 1.6193e-08 3.3868e-06 5.5110e-07 3.8218e-02 1.4721e-11 8.3060e-02 2.7641e-01 1.0380e-01 2.5674e-02 1.8893e-06 3.2152e-01 1.8757e+01 9.2221e-09 7.9450e-07 4.9587e+00 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_projections.0.conv1d.weight', ( 0 ,.,.) = -1.3957e-01 -1.2377e-01 -7.3275e-02 -1.4006e-02 1.5525e-01 2.0347e-01 3.1622e-02 -1.2623e-02 2.2276e-01 ⋮ 5.6694e-03 6.4154e-02 1.2554e-01 -1.9284e-01 -2.1595e-01 -3.1800e-01 -5.1342e-02 -7.2566e-02 -8.4694e-02 ( 1 ,.,.) = 1.2366e-01 4.6452e-02 4.3532e-02 -3.3146e-02 1.1825e-01 4.0882e-02 1.7882e-01 8.2681e-02 -2.3802e-01 ⋮ -2.5566e-02 6.5342e-02 2.3106e-02 6.9835e-02 1.1464e-01 9.0261e-02 -1.3521e-01 -1.4328e-01 -2.3939e-01 ( 2 ,.,.) = 4.3039e-02 1.7098e-01 2.8486e-01 -1.0624e-01 -4.0530e-02 -2.6259e-02 3.6910e-01 2.5975e-01 3.2112e-01 ⋮ 5.4088e-03 -2.3620e-02 -1.2758e-02 -2.0870e-02 -7.2901e-02 8.4686e-02 -4.1611e-02 -5.8244e-02 -1.3187e-01 ... (253,.,.) = 6.7897e-02 3.1555e-01 -1.0163e-01 8.5174e-02 9.9121e-03 -2.6476e-02 -1.3393e-01 -4.0839e-01 -1.3982e-01 ⋮ 1.8264e-01 3.2260e-02 2.5612e-02 2.6153e-02 -5.8576e-02 -7.5391e-02 -8.1473e-02 -6.3125e-02 -1.1814e-01 (254,.,.) = 3.1201e-02 1.7619e-01 5.9133e-02 8.9054e-02 -8.6981e-03 -2.5639e-02 1.9719e-02 4.4718e-01 1.6546e-01 ⋮ -1.0220e-02 -8.4788e-02 1.4365e-01 -3.9882e-01 7.7564e-03 2.0031e-02 -1.8677e-01 -2.0325e-01 -2.1621e-01 (255,.,.) = 1.5317e-01 3.0041e-02 4.7304e-01 1.6127e-02 -9.4994e-02 1.3071e-01 -8.1051e-02 -1.4608e-01 -1.1953e-01 ⋮ -1.0212e-01 6.9462e-02 1.4669e-01 1.3095e-01 2.4410e-01 1.6540e-01 -7.0448e-02 -2.9191e-03 -5.3479e-02 [torch.FloatTensor of size 256x640x3]), ('module.postnet.conv1d_projections.0.bn.weight', 1.0064 1.1528 0.5419 0.6797 0.1785 1.0887 0.8183 0.8454 0.6532 1.1385 1.0083 0.5864 0.7713 0.7591 0.8494 1.1892 0.9368 0.6308 0.5809 0.4602 1.4810 0.7448 0.6929 0.7922 0.7155 1.1933 0.6255 0.8659 1.0119 0.5903 0.4898 0.4952 1.3038 0.5721 0.5777 1.1893 1.2678 0.2680 1.0207 0.5819 1.0601 0.6649 -0.5641 0.7843 1.0840 0.7581 0.5106 1.0091 1.5557 0.9729 1.1628 1.0890 0.6408 1.2018 0.5435 1.1324 0.8478 0.8005 0.3795 0.6147 0.5328 0.8774 0.8778 0.4229 1.0529 0.9138 0.6883 0.5685 -0.5754 0.6103 0.6324 -0.5165 0.8187 0.8786 0.7707 0.7669 1.0023 0.4995 0.7858 0.7484 0.6385 0.7918 0.4057 0.8278 1.0741 0.9531 0.8309 0.7774 0.6918 0.4724 0.4893 0.8787 0.3343 0.6209 1.0224 1.1883 1.1650 0.8231 0.7204 0.6680 1.1056 0.4447 0.8551 1.0782 1.0202 1.0038 0.8246 0.5232 0.5444 0.9478 -0.7929 1.2236 0.4436 0.5050 0.6681 0.4004 0.8633 1.0582 0.5568 1.6561 1.1483 0.6300 1.5092 0.9100 -0.0984 1.0042 0.5777 0.7218 0.7370 1.0029 0.7365 1.0735 1.0629 0.9720 0.6372 1.0330 0.5227 0.8334 0.7459 0.5253 1.0443 0.4467 1.4061 1.5157 1.1923 0.7804 1.1574 0.7108 0.8723 0.9795 0.9159 1.4306 0.6564 0.6755 0.8446 0.8883 0.7525 0.8763 1.0265 0.3895 1.4116 0.4657 1.5146 0.6319 0.3985 0.3801 1.3908 0.7146 1.2559 1.3587 0.7255 0.7276 0.8790 1.3085 0.9402 0.9422 1.1547 1.0940 0.8019 0.5170 0.9326 0.8033 -0.6534 1.1897 0.7316 0.8126 0.7897 0.7867 1.2840 1.3320 1.1048 0.7991 0.4574 0.4702 1.0722 1.2828 0.6300 0.6376 0.9071 0.8140 0.8752 1.0837 0.4878 0.9065 1.1854 0.8245 1.1576 0.5712 1.0625 0.5408 0.5943 0.6183 1.1269 0.8454 1.0975 0.7584 0.5840 1.1025 0.6991 0.9294 0.5743 0.3285 1.2038 0.4224 1.5905 0.8091 0.7251 1.1017 0.6961 0.9184 0.5409 0.6847 0.4472 0.6680 0.4501 1.2013 0.4849 1.2200 0.4478 0.5941 0.8964 0.8002 0.6811 0.6297 1.0296 0.6396 0.5490 0.9496 0.2422 0.6818 0.7932 0.5752 0.8121 0.8411 0.2931 0.8513 [torch.FloatTensor of size 256]), ('module.postnet.conv1d_projections.0.bn.bias', 1.2812 17.1439 14.9504 10.2376 13.5551 16.2688 18.4524 13.8033 18.5095 7.7307 15.3203 13.8901 14.3580 17.4806 17.8106 4.2802 10.4670 17.6544 16.7808 16.5132 16.6126 5.4833 15.3519 2.8125 15.0179 1.9103 15.6746 16.0496 15.5229 6.0329 10.9956 17.0978 8.8174 17.2174 2.4152 15.8846 7.8960 15.6841 13.3945 15.9559 13.0596 13.7260 9.3993 14.2777 19.0674 18.1998 16.3006 11.1128 2.8543 3.8619 13.3833 16.3539 14.2264 12.3962 19.2930 5.4506 17.7114 10.9192 17.4551 13.6520 14.5601 8.8583 8.4442 12.7413 12.1143 8.1509 14.9320 9.1842 7.5563 9.0867 17.3073 13.9380 17.2866 17.2508 16.3253 12.1834 10.2829 8.4941 17.8155 6.2684 12.0686 17.6839 15.2739 13.4139 14.0117 4.3912 11.4854 3.1571 7.0127 15.3345 17.3543 11.8236 15.9150 14.1339 17.2696 12.2230 8.5823 7.0506 3.5910 16.2675 11.4610 13.9098 8.7838 11.5545 9.6257 13.1291 14.2880 14.6919 14.4129 9.2726 13.0777 12.5134 19.5631 12.8410 16.0184 17.7974 11.6444 6.0009 16.6255 16.1276 13.1532 13.8993 16.2597 12.3058 14.6869 9.1101 1.5676 17.7581 11.2021 14.0878 16.6297 18.8601 13.8957 13.6028 10.4222 4.1116 16.1921 13.8891 16.2134 18.0202 16.0847 5.8780 12.3433 4.3988 10.7676 6.0629 15.0563 9.2647 13.2574 14.3415 13.1033 2.2080 16.3508 11.3074 18.9634 -0.1173 15.5784 8.7789 11.0170 16.4970 5.0056 10.0968 18.0503 10.1258 15.0371 18.7282 13.3921 14.1134 10.7251 8.1674 9.5328 3.7456 14.1296 6.9803 14.3006 13.5137 10.7654 9.8910 10.0740 12.0639 10.0878 10.4553 10.3515 12.2953 12.6187 16.2756 10.9309 15.9904 9.5199 6.4851 6.5385 16.8993 17.7989 10.0278 14.5653 6.5099 12.6526 4.6957 13.1451 15.9175 3.8352 15.5789 13.7593 16.6251 13.9896 9.4880 7.2424 9.1136 9.5095 18.4899 19.1777 17.1391 8.6616 5.6155 15.3622 11.7768 12.6844 12.8171 3.6978 6.9844 16.1621 10.3522 6.2863 17.1446 6.6472 16.9068 16.2942 11.2442 11.6082 5.0071 16.3506 14.0592 12.3528 19.9916 10.3705 13.1318 16.8948 10.8688 15.7174 13.2476 12.0157 19.4167 14.4521 16.9671 12.6741 6.8380 14.8143 8.1581 13.5745 10.2358 14.0739 13.2618 14.1687 5.1842 21.4828 17.2211 [torch.FloatTensor of size 256]), ('module.postnet.conv1d_projections.0.bn.running_mean', 2.7753 0.3818 0.6334 173.1261 0.1075 0.5092 1.5174 1318.5923 2.3557 1.5238 1486.0178 0.3207 5.9736 0.6928 1.2868 1.5721 0.5284 2.3495 0.6995 0.1308 0.8089 14.1699 0.8847 0.7157 994.1609 4.1544 90.2139 3.0803 0.7165 4.6952 1.1713 0.4256 2.0226 0.7897 2.3223 0.6126 1660.7887 0.6905 1649.1803 0.7538 0.3325 1.1968 7.3061 6.8361 0.1586 1.1100 1.7231 1535.1931 0.3486 4.3748 1712.8698 1585.2599 176.4229 0.3289 0.3252 1.3155 938.7871 0.7834 148.8547 0.6936 1.0115 1.1828 2.8153 0.5586 2.7201 1.1228 1.8763 1.7236 19.2655 239.5662 2.7836 1.3761 296.3056 913.4092 2.0334 0.9690 1702.1686 4.1709 0.9101 1.6110 1.4030 0.9680 0.9362 2.2457 1490.8156 2.9453 1298.7178 0.2146 4.1666 1.1035 1.5424 3.6634 0.7759 0.5930 2.0226 4.2671 0.5106 1.2427 11.0188 1307.5266 1.7876 1.1753 5.3495 1.4040 1.1873 1492.0095 4.7756 0.4510 2.1644 0.5204 0.9193 1754.6244 83.8922 1.0006 3.4189 251.2662 3.6058 1024.0649 1.6469 0.2748 1425.5830 2.7002 0.1112 3.5934 0.0000 268.0367 3.7619 491.7171 0.5491 0.9947 0.7634 0.7405 1.0291 259.1695 1.1268 1.1428 1.5604 6.6672 9.2801 1.2068 1544.9845 6.3380 0.0696 1.5572 1551.6989 4.1661 224.0713 8.1500 0.7414 1424.6758 1.0982 0.5337 7.0338 2.8724 0.1823 1.1163 0.6475 21.1496 2.8433 0.6622 0.7595 0.6273 0.0477 292.5533 0.2493 2.5269 1678.3840 4.8717 1770.2681 1800.8817 1180.3121 1.7565 0.1644 3.3804 4.1438 136.4131 1667.0657 1652.9028 8.4203 0.8433 3.1129 4.2489 15.3234 0.3517 1.0394 1227.4275 1261.4265 1.9013 2.3953 1.4071 2.4481 4.8915 1.4859 0.7486 1612.6017 0.6507 0.6986 2.5622 0.3399 6.5700 2.0957 0.0565 2.1218 0.9189 691.7375 1432.2279 2.2276 2.7685 0.9708 3.7125 1.5601 2.5054 0.0597 5.2947 0.3169 210.7291 4.7917 4.0418 7.4134 2.0436 0.2562 1.0144 2.8074 1.9981 0.6851 162.5275 0.6670 6.1842 3.5061 0.9552 1.5708 1.4534 0.4530 0.5706 0.7868 284.6758 0.2575 2.8801 0.3087 1.1671 232.4743 85.9347 10.7732 1.2698 0.9208 0.6425 0.5448 3.0128 0.3295 1.6000 1.0468 8.7842 1.4011 0.9018 1.6300 0.8632 [torch.FloatTensor of size 256]), ('module.postnet.conv1d_projections.0.bn.running_var', 87.6942 7.1318 3.9194 20.9474 0.3606 20.5983 13.0474 344.4141 9.4391 6.7778 323.8777 2.4856 14.0690 23.9334 8.4666 8.3945 3.4187 9.3119 3.6598 0.6194 25.1080 54.8546 4.9288 3.8281 183.1740 152.2856 27.4477 16.0468 4.5722 21.0806 5.6672 14.7286 12.1208 2.9059 59.0178 4.9073 411.7721 2.8066 444.3587 2.9010 7.0539 7.6301 42.0356 26.9875 20.4478 10.1745 9.6833 427.8193 3.5250 41.9636 516.1542 393.0963 21.4590 4.3005 6.8636 12.1839 111.5021 5.9088 71.1304 4.0510 4.5807 6.5794 13.2356 2.5152 16.9195 4.1334 9.8563 8.3859 97.8492 23.7796 16.0726 7.0877 26.4761 126.4229 19.0890 5.9119 444.1166 47.8581 8.0636 7.6867 9.7661 9.9720 3.9200 9.8997 443.1576 23.4630 249.8578 1.3360 23.5595 7.3501 6.8262 17.0668 3.8783 4.9632 29.8222 28.6368 4.3469 7.3488 47.0580 283.7618 12.8363 6.3048 32.6211 12.6005 5.2659 418.9775 20.6403 5.1937 13.2954 4.2208 4.4080 502.2701 47.2290 7.4357 13.4018 295.2262 13.6607 334.0442 4.8749 6.5240 366.2472 16.7205 5.6659 12.5702 0.0000 27.1990 104.4047 71.3939 4.5043 8.3918 6.6005 9.5063 7.0208 35.3314 4.8568 5.2463 6.5439 37.8944 24.7531 7.8764 429.4724 52.3462 3.5580 28.2907 499.4714 14.6136 19.3546 68.2662 10.0461 454.9553 8.8021 4.4240 33.5397 14.6746 5.6596 4.7589 4.2637 558.6557 19.8264 2.9783 7.4728 2.8951 1.7417 23.0010 1.3080 14.2484 512.3925 31.2630 631.1724 514.1356 293.9054 15.0251 4.8618 26.9479 19.3160 50.9048 424.8301 453.1019 41.4095 4.3910 12.6802 15.6503 83.9994 2.0973 4.7766 251.0884 397.8469 24.1732 24.4222 9.3091 69.4376 17.9582 10.6635 3.7720 435.2577 7.4856 5.7441 13.6646 6.8971 18.4054 16.5748 1.4229 15.4817 5.4005 95.4689 368.6776 12.5717 9.3346 3.1895 12.7112 5.1520 19.3879 2.3535 20.2337 2.8619 31.6802 17.9289 32.6600 37.4601 10.6008 2.5121 3.9772 15.4718 9.1469 4.6861 31.5696 10.4808 26.3760 20.5761 6.5184 8.2492 6.2479 1.9784 5.1187 5.4346 22.5699 2.8018 15.6621 2.4941 8.5163 48.1840 31.3503 21.0698 11.0356 7.7330 2.9362 5.3684 28.6198 0.8928 7.6283 6.7332 37.0229 9.1180 5.0407 9.2884 7.9118 [torch.FloatTensor of size 256]), ('module.postnet.conv1d_projections.1.conv1d.weight', ( 0 ,.,.) = 5.2195e-02 1.9907e-02 -7.0854e-02 2.7770e-01 1.4073e+00 1.1054e+00 5.4670e-01 4.2194e-01 -1.3198e-01 ⋮ 1.9469e-01 1.1500e-01 -1.7242e-01 6.7335e-01 1.2255e+00 3.9269e-01 1.7183e-03 4.5895e-01 5.6313e-01 ( 1 ,.,.) = -1.0461e-01 -1.1721e-01 -2.7959e-01 2.5528e-01 7.7451e-01 4.1646e-01 9.0024e-02 4.9471e-01 6.7004e-01 ⋮ -1.1452e-01 3.7357e-01 -5.2436e-01 8.3869e-01 1.3527e+00 6.5560e-01 3.7751e-01 6.1575e-01 5.9562e-02 ( 2 ,.,.) = 9.4125e-02 8.5159e-02 1.2492e-01 8.1844e-01 1.6799e+00 1.2010e+00 6.1985e-01 8.4654e-01 5.3842e-01 ⋮ 1.4908e-02 1.7344e-01 -2.5035e-03 1.3253e+00 1.9885e+00 1.3415e+00 6.8385e-01 7.1794e-01 3.1948e-01 ... (77 ,.,.) = -1.0669e-01 -8.0844e-02 -2.5232e-01 1.1422e-02 1.0861e+00 5.3154e-01 5.2617e-01 6.5394e-01 -6.2136e-02 ⋮ 4.6888e-01 4.8954e-01 2.3976e-01 9.7884e-01 1.8881e+00 5.1218e-01 1.9927e-01 7.5671e-01 6.0120e-03 (78 ,.,.) = -1.1719e-01 4.8443e-02 9.3944e-02 5.3119e-01 1.1578e+00 4.9808e-01 8.0624e-01 1.2984e+00 4.4143e-01 ⋮ 3.4782e-01 4.7327e-01 1.8600e-01 8.2889e-01 1.0548e+00 4.3066e-01 2.2504e-01 2.0290e-01 2.1498e-01 (79 ,.,.) = 1.1439e-01 -1.7348e-02 -3.0525e-01 2.0294e-01 7.2653e-01 4.0443e-01 4.9828e-01 4.0238e-01 1.2768e-01 ⋮ 2.4098e-01 1.2117e-01 4.9355e-01 4.3716e-01 8.7794e-01 4.6880e-01 3.7854e-01 4.1184e-01 3.0943e-01 [torch.FloatTensor of size 80x256x3]), ('module.postnet.conv1d_projections.1.bn.weight', 1.1131 0.9724 1.0952 2.0732 0.5533 1.1140 0.6586 0.9462 0.5307 0.8061 1.5967 0.8221 0.8902 0.9065 0.4332 1.3648 1.5116 0.8725 0.8722 1.0873 1.2751 0.9191 0.9641 0.8794 0.4741 0.7738 0.7530 0.6498 0.9927 1.0602 1.1159 1.1088 0.7379 0.8107 1.4319 0.9653 0.6382 0.4784 0.4891 1.1939 0.7339 0.6188 0.8833 2.4050 -0.3177 0.9896 1.1539 0.9532 1.2655 2.0908 0.8568 0.6974 1.0724 1.1096 0.8678 0.8552 1.0003 0.7046 1.0005 1.0480 0.5479 1.1870 0.2342 1.0157 1.1361 1.0313 0.9024 1.1572 0.1972 0.7557 1.2411 1.2459 1.3304 0.9391 0.6778 0.8988 1.1606 1.3418 1.2340 0.6855 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_projections.1.bn.bias', -0.3643 -0.1151 -0.4266 -0.2956 0.0577 -0.4398 -0.3374 -0.4977 -0.3623 -0.4534 -0.2369 -0.1629 -0.4315 -0.2628 -0.1724 -0.5756 -0.1301 -0.4351 -0.3917 -0.2696 -0.1161 -0.4591 -0.4481 -0.4318 0.0008 -0.2230 -0.2198 -0.4657 -0.4189 -0.2279 -0.5386 -0.5261 -0.4154 -0.3129 -0.5103 -0.2308 -0.1940 -0.2578 -0.3725 -0.5918 0.2227 -0.5157 -0.2997 -0.6149 -0.2551 -0.6179 -0.1135 -0.2885 -0.2444 -0.5553 0.0099 -0.3510 -0.4529 -0.2292 0.0338 -0.4650 -0.5090 -0.1376 -0.2306 -0.4613 -0.1367 -0.1225 -0.1226 -0.3641 -0.1382 -0.4473 -0.4051 -0.2548 -0.0901 0.0033 -0.5289 -0.6576 -0.3296 -0.3922 -0.2572 -0.3455 -0.3026 -0.3475 -0.1392 -0.1041 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_projections.1.bn.running_mean', 4794.6543 3585.3281 4854.7217 5939.0063 2431.0156 4948.5771 3388.5972 4649.7622 2335.0654 2515.7034 5599.3979 2067.6697 3672.5522 2640.3167 1735.0277 5550.9165 6470.2280 4514.9443 3035.8079 3553.8733 6698.5781 3057.2085 3212.4167 3854.0215 832.7523 3202.8611 1762.6179 2960.1370 4398.3604 3143.0359 4999.7422 5101.7256 3999.0593 3128.9534 4951.9438 5651.3735 2015.0485 1073.7839 3401.7195 3991.4004 3251.1260 2128.0889 3225.2791 8126.6465 -1240.8994 4030.9817 5127.5361 4162.8989 5225.8721 7719.8164 3666.3826 3584.5942 4275.7173 3859.2456 3013.1509 3769.0432 3723.0259 2336.5325 4272.7793 4369.0854 1019.1653 4665.3867 1117.1483 4695.1147 3874.8955 3881.8813 4239.8242 4921.7319 888.8702 4234.1348 3747.1763 3375.3000 3834.5476 3834.1763 3178.7554 3425.8835 4922.0522 5076.1162 3843.5701 2470.2336 [torch.FloatTensor of size 80]), ('module.postnet.conv1d_projections.1.bn.running_var', 2238.4553 780.7103 2429.7249 2660.2173 233.1463 1949.3705 633.8268 1360.0896 263.0512 484.6798 2081.5149 235.7928 674.4207 409.4161 297.9118 2178.4915 5070.1694 1261.4553 740.6363 978.0815 5075.9692 579.2091 738.3559 1188.6294 91.6367 754.7532 339.0374 574.3027 1214.8538 713.2219 2442.1743 2224.1912 1043.5409 392.1762 1358.2344 3114.9062 257.3273 276.4440 549.5962 817.6767 609.9324 393.8593 394.2612 4004.6206 97.1118 744.3381 2400.0366 1287.6052 1797.8269 5475.1440 891.5662 755.5529 1328.1738 1087.8168 711.0801 859.1395 937.4152 276.7247 1025.1550 963.1095 172.1337 1515.5322 94.4020 2225.9226 1343.9473 659.2188 1427.4015 2310.4495 50.0436 1735.0659 795.8002 596.3701 951.0836 914.1016 672.8875 560.1312 2155.1333 2150.8623 832.1367 319.0779 [torch.FloatTensor of size 80]), ('module.postnet.pre_highway.weight', 6.2643e-02 2.4435e-03 -1.2216e-02 ... -5.7193e-02 -9.9890e-02 1.6946e-02 -1.0881e-01 7.2113e-02 9.5153e-02 ... -1.0533e-01 -8.7101e-02 4.5390e-02 -3.6340e-04 -6.7453e-02 -8.3466e-02 ... -7.3356e-02 5.7696e-02 -4.9411e-02 ... ⋱ ... 9.3740e-02 -3.9298e-02 6.4824e-02 ... 1.1028e-01 5.7303e-02 -4.1076e-02 2.2935e-02 9.1074e-02 -8.9565e-02 ... 9.3405e-02 -5.5724e-02 -7.9542e-02 6.1049e-02 2.0629e-02 2.0692e-02 ... -3.5433e-02 -8.5093e-02 -2.7144e-02 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.0.H.weight', 2.4867e-01 4.1168e-02 -5.3582e-02 ... -9.9787e-02 3.2798e-01 -5.0461e-02 -6.6196e-03 -1.9700e-01 -1.6881e-01 ... 4.9598e-01 -7.2049e-02 2.4578e-01 2.2826e-01 -2.1728e-01 2.0021e-01 ... 3.1759e-01 2.9525e-02 1.7780e-01 ... ⋱ ... 6.7725e-02 6.6767e-01 -1.8588e-01 ... 9.5651e-01 3.6561e-02 5.3880e-02 8.1281e-02 -1.3331e-01 2.2614e-01 ... 1.0429e-01 -1.0845e-01 1.0289e-01 -8.0866e-02 7.2808e-02 2.8952e-01 ... -3.7574e-02 -1.4812e-01 2.7187e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.0.H.bias', -0.1444 -0.2888 -0.3170 0.0658 -0.0633 -0.2048 -0.0974 0.2035 0.0461 0.1384 -0.1181 0.1240 -0.0482 0.0639 -0.1455 -0.3379 -0.1332 -0.2531 0.1723 0.3509 0.0866 0.0905 0.1788 0.1534 -0.0045 -0.2354 -0.1100 0.0690 0.2795 -0.1951 -0.0301 0.0274 0.8453 0.1355 -0.1199 0.3028 0.0114 -0.3806 -0.1682 0.2034 0.4429 -0.4412 0.0460 0.0364 -0.0529 -0.1125 -0.1021 0.1971 0.0005 -0.0324 0.1708 0.3175 -0.6596 0.0186 0.2776 -0.1286 0.0511 -0.1114 0.1317 -0.0814 0.1630 -0.0739 -0.0087 -0.3484 -0.3247 0.1713 0.1003 0.1114 0.1164 0.2701 0.2484 0.0330 -0.0751 -0.0210 0.0601 -0.1773 0.1507 0.0427 -0.1083 -0.0843 [torch.FloatTensor of size 80]), ('module.postnet.highways.0.T.weight', 1.0957e-01 2.7731e-01 -1.2678e-01 ... -8.3220e-02 -6.2637e-02 -2.5856e-01 1.7775e-02 2.9560e-01 6.6594e-02 ... -1.6999e-01 -1.0914e-01 9.8891e-02 1.2318e-01 7.0119e-02 1.5634e-01 ... -1.2690e-01 8.3795e-02 -3.7894e-02 ... ⋱ ... -6.3693e-02 -6.7203e-01 -2.5071e-01 ... 7.2427e-02 4.0980e-01 3.5086e-01 1.4521e-01 -2.2465e-01 -1.8351e-01 ... -3.6090e-01 -1.9848e-01 7.7147e-02 1.6308e-01 -6.5072e-01 -1.4871e-02 ... -1.8386e-01 -1.3974e-01 3.3131e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.0.T.bias', -0.9706 -0.9539 -1.0019 -0.6401 -0.6802 -0.7004 -1.1880 -1.0708 -0.8670 -0.8222 -1.0117 -1.0952 -0.9540 -1.0298 -1.0624 -0.6120 -0.3936 -0.8775 -1.0260 -0.9652 -0.4894 -0.9425 -0.9753 -0.8730 -1.3253 -1.0251 -1.0140 -0.8511 -0.7494 -1.0202 -1.0308 -0.9396 -1.0584 -0.7414 -0.9854 -0.8145 -1.1791 -1.1111 -0.7695 -0.7219 -0.7493 -1.2727 -1.0238 -0.5376 -1.0984 -0.8949 -0.7888 -0.9382 -0.7201 -0.5765 -0.8168 -1.0600 -1.0613 -0.8644 -0.9836 -1.3913 -1.2095 -1.0889 -0.6595 -0.4375 -0.9259 -1.0337 -1.2776 -0.7168 -1.1136 -1.0901 -0.8697 -0.9424 -1.0496 -0.7632 -0.7484 -1.1693 -0.8174 -0.9601 -0.8869 -0.8564 -0.6329 -0.7576 -0.8810 -1.0124 [torch.FloatTensor of size 80]), ('module.postnet.highways.1.H.weight', 9.1230e-01 -1.3596e-01 -1.4093e-01 ... 1.5030e-01 7.6379e-01 -3.3012e-01 6.1809e-01 1.2302e+00 -4.1767e-01 ... -2.9080e-01 -6.2494e-02 2.1230e-01 -1.0950e-01 2.2074e-01 6.3138e-01 ... 1.8428e-01 -7.8434e-02 -3.1081e-01 ... ⋱ ... 1.4937e-01 4.2980e-01 2.6760e-02 ... 6.7943e-01 4.4142e-01 -2.7297e-01 -3.7320e-02 -1.9871e-01 1.0014e-01 ... -1.4039e-01 8.2422e-02 -4.0768e-01 -1.1515e-01 -2.6997e-01 1.8682e-01 ... -7.9322e-02 1.3985e-01 1.2204e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.1.H.bias', -0.2227 0.0567 0.0236 0.2375 -0.0518 0.0564 0.0382 -0.2974 -0.1225 -0.0670 -0.3467 0.0550 -0.1570 -0.1601 -0.0278 -0.1593 -0.0291 -0.0163 -0.0050 0.0043 0.0402 0.1176 0.3112 0.1470 -0.1887 -0.1433 0.0668 -0.0419 -0.0184 -0.1379 -0.1999 0.2844 -0.2743 -0.2320 0.1254 -0.1168 0.0154 -0.2265 -0.0329 0.0959 -0.0949 -0.1417 0.2770 -0.3467 -0.1311 0.0278 0.0717 0.0237 -0.0419 0.1390 -0.2300 -0.0031 -0.1261 -0.1790 0.1752 -0.1531 -0.1270 0.0876 0.3785 0.1306 -0.0067 -0.0225 -0.1686 -0.1789 0.3102 0.5037 0.1552 -0.2649 0.0821 0.0415 -0.0635 0.0556 -0.0018 0.0486 -0.2480 0.1004 -0.1115 0.0045 -0.0126 -0.1510 [torch.FloatTensor of size 80]), ('module.postnet.highways.1.T.weight', -4.9927e-01 -2.6072e-01 -1.0327e+00 ... -3.0688e-01 9.2184e-01 -8.2108e-01 -2.4358e-01 -2.5978e-01 -1.2319e-01 ... -1.5505e-02 4.4448e-01 2.7695e-01 2.6090e-01 -3.5126e-01 2.7459e-02 ... 5.0993e-02 -7.9095e-02 1.7979e-01 ... ⋱ ... 2.3238e-03 -1.8175e-01 -2.5332e-01 ... -2.2763e-01 -4.7735e-02 1.1920e-01 -1.3132e-01 -3.8461e-01 -3.0022e-01 ... -2.1166e-02 -1.6448e-01 -2.1012e-01 -1.8645e-01 -3.8559e-01 2.9771e-02 ... 1.2365e-01 6.4848e-02 -2.0881e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.1.T.bias', -0.9412 -0.7066 -0.6889 -0.9583 -0.9480 -0.7388 -0.5661 -0.9317 -1.0198 -0.9792 -0.1181 -0.8724 -1.0719 -0.8781 -0.9275 -0.5863 -0.5480 -0.6873 -0.7334 -0.9447 -0.5334 -0.8045 -0.7259 -0.4503 -1.1942 -0.7621 -0.8239 -0.7301 -0.7982 -0.9111 -0.9003 -0.7060 -0.2966 -1.0202 -0.8996 -1.1389 -1.0155 -1.0354 -0.5346 -0.5635 -0.5768 -0.9595 -0.6603 -0.6183 -1.1203 -0.8846 -0.4604 -0.8136 -0.5419 -0.5879 -0.8771 -0.9569 -0.8667 -0.8589 -0.8157 -0.9056 -1.0193 -0.8903 -0.1502 -1.0415 -1.0744 -0.6627 -1.0806 -0.6878 -0.6414 -0.6909 -0.9609 -0.6565 -0.8993 -0.8040 -1.0628 -0.7982 -0.5010 -0.6472 -0.8983 -0.9107 -0.4975 -0.5973 -0.9040 -0.9348 [torch.FloatTensor of size 80]), ('module.postnet.highways.2.H.weight', -2.4274e-01 -2.7250e-01 4.9373e-02 ... 1.1653e-01 4.0595e-01 -1.3510e-01 3.1051e-02 -2.2746e-01 8.5297e-02 ... -9.7518e-02 7.8655e-02 -7.2070e-02 -1.0259e-01 -1.5005e-01 7.2323e-01 ... 2.5662e-02 2.6174e-01 3.0316e-02 ... ⋱ ... -2.1651e-02 -2.5406e-01 1.0460e-01 ... -2.5282e-01 2.2565e-01 2.3399e-01 -1.0750e-01 1.9786e-01 3.0018e-01 ... -2.9912e-01 6.4900e-02 5.8003e-02 1.6612e-02 2.2036e-03 3.4828e-01 ... -3.8414e-03 2.4652e-01 4.8551e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.2.H.bias', -0.0003 -0.0291 0.0044 0.0063 0.0281 0.0094 0.0038 -0.0918 -0.0292 -0.0110 0.0265 -0.0524 -0.0170 -0.1395 -0.0965 -0.0057 -0.1812 -0.0947 -0.2224 0.0169 0.0092 0.0035 -0.0374 -0.0112 -0.1434 -0.0074 -0.1889 -0.0438 -0.1702 -0.0233 0.0092 -0.0700 -0.0268 0.0029 0.0047 -0.0607 -0.0209 0.0061 0.0031 -0.1285 0.0135 -0.0079 -0.1550 -0.1114 -0.0640 -0.0217 -0.0174 -0.1433 -0.0250 -0.0081 -0.0363 -0.0123 0.0056 -0.0015 0.0048 -0.0157 0.0011 -0.1992 -0.0086 -0.0597 -0.0104 -0.1039 -0.0304 -0.1452 0.0157 -0.1376 0.0116 -0.2570 0.0149 0.0108 -0.3532 -0.0075 -0.0160 -0.0134 -0.0906 -0.1336 0.0165 -0.0841 -0.0459 -0.0090 [torch.FloatTensor of size 80]), ('module.postnet.highways.2.T.weight', 3.6572e-01 5.9279e-02 -1.4287e-01 ... -2.6212e-01 -1.1769e-01 2.0280e-01 1.6128e-01 -5.0609e-02 1.7673e-01 ... -1.5705e-01 7.6402e-02 9.5267e-02 -1.4162e-01 1.2445e-01 2.3100e-01 ... -9.0434e-02 1.4724e-02 2.4342e-02 ... ⋱ ... -1.0872e-01 2.2763e-01 3.7120e-01 ... 2.9659e-01 5.8435e-02 -3.6852e-01 2.0416e-01 -6.6940e-03 4.8543e-02 ... -5.4834e-02 2.4496e-01 -3.3553e-02 1.0520e-01 2.6947e-01 -3.5717e-01 ... -1.0301e+00 5.2067e-01 2.1574e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.2.T.bias', -0.7631 -1.1226 -0.8584 -0.7720 -0.7048 -0.7164 -0.9709 -0.9457 -0.8494 -0.4088 -0.6414 -1.0601 -1.0043 -0.9389 -0.8011 -0.6059 -0.6583 -0.7376 -0.4929 -1.0052 -0.7678 -1.0918 -0.9072 -0.7922 -1.0403 -1.1020 -1.0353 -0.7563 -0.8397 -0.7072 -0.9279 -0.6711 -0.3463 -0.8225 -0.7371 -1.0051 -1.1878 -0.9544 -1.0134 -0.8527 -0.9266 -1.0247 -1.0826 -0.7876 -1.2755 -0.7468 -0.5426 -1.0819 -0.4936 -1.2068 -0.6783 -0.6852 -0.9803 -0.7238 -0.9590 -0.6381 -0.9939 -1.0709 -0.8101 -0.6400 -0.4283 -0.9627 -0.9541 -0.7106 -0.7013 -1.0249 -0.9598 -0.3898 -1.1135 -0.7361 -1.0572 -1.1059 -0.5316 -0.9943 -0.6925 -0.8180 -0.9290 -0.9704 -0.9643 -0.6030 [torch.FloatTensor of size 80]), ('module.postnet.highways.3.H.weight', -2.6135e-01 1.6507e-01 8.8397e-02 ... 5.1202e-04 -2.0750e-01 1.0729e-01 -1.2196e-01 -2.7860e-01 -5.0616e-02 ... 5.1362e-01 -1.6859e-01 -2.2015e-01 -1.5429e-01 1.3958e-01 2.1380e-01 ... -2.3058e-01 -3.3214e-03 6.4230e-02 ... ⋱ ... 1.1218e-01 1.5627e-01 3.7896e-02 ... 1.4440e-01 4.5716e-02 5.7027e-02 -9.7311e-02 -4.6075e-02 9.4683e-02 ... -4.8174e-02 2.6431e-01 -4.8024e-02 -5.8064e-02 -2.0418e-01 -3.5700e-02 ... 3.8692e-01 1.0238e-01 -1.2282e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.3.H.bias', -0.0055 -0.0279 0.0057 0.0040 -0.1291 0.0077 0.0094 -0.0461 -0.0703 0.0151 -0.0295 -0.0323 -0.0245 0.0045 0.0158 0.0180 0.0138 0.0105 -0.0382 -0.0117 -0.0201 0.0082 0.0087 -0.0220 -0.0206 -0.0449 -0.0226 0.0170 -0.0161 -0.1995 -0.0356 -0.0145 -0.1746 -0.0214 0.0035 0.0142 -0.0630 0.0146 0.0069 -0.0204 -0.1873 -0.0125 -0.0455 -0.2047 0.0027 0.0093 0.0152 0.0221 -0.1992 0.0091 -0.0254 0.0187 -0.0254 0.0112 -0.0168 0.0057 0.0044 -0.0189 -0.1177 -0.0120 -0.0111 -0.1336 -0.0134 -0.0094 -0.0523 -0.0551 -0.0800 0.0031 -0.0382 0.0032 0.0198 -0.1366 -0.0048 -0.0050 -0.0746 -0.0078 0.0085 -0.1046 -0.0479 -0.1155 [torch.FloatTensor of size 80]), ('module.postnet.highways.3.T.weight', 3.7147e-01 -9.9447e-02 3.6920e-02 ... -7.1776e-02 -6.0466e-03 2.2426e-01 -1.7026e-02 9.7215e-02 2.8594e-02 ... -2.9590e-01 -6.8981e-01 2.1689e-01 -9.5328e-02 7.0704e-02 5.8119e-01 ... -1.4032e-01 4.2478e-02 -1.0148e-01 ... ⋱ ... -4.4207e-03 2.0096e-01 3.9831e-01 ... -2.6489e-01 6.0947e-01 -1.2633e-01 -3.0431e-01 1.5210e-01 -6.5500e-02 ... 2.9948e-01 -4.6027e-01 4.6829e-02 2.5476e-01 1.2621e-02 6.4398e-02 ... 6.0937e-02 -1.7206e-01 1.1278e-01 [torch.FloatTensor of size 80x80]), ('module.postnet.highways.3.T.bias', -0.7148 -0.7708 -0.9425 -0.6730 -0.1652 -0.4560 -0.7011 -0.7110 -1.1224 -0.9510 -0.8533 -1.1357 -0.8920 -1.2216 -1.0353 -0.8836 -0.6786 -0.4523 -0.9874 -0.8611 -0.5129 -1.0361 -0.8023 -0.9454 -0.9796 -0.9083 -0.9954 -0.9217 -0.9016 -0.8483 -0.7846 -0.7049 -0.7861 -0.6688 -0.7724 -0.8507 -0.9932 -1.0269 -1.1675 -0.7789 -0.5356 -0.7966 -0.9293 -0.8827 -0.9904 -0.6809 -0.3062 -1.1263 -0.7787 -0.8213 -0.9363 -0.9193 -1.0666 -0.9542 -0.8626 -1.1382 -1.1062 -0.9254 -0.7698 -0.9057 -0.8849 -0.7466 -0.6831 -0.6838 -0.8847 -0.5496 -0.4942 -0.5930 -0.8724 -0.8466 -0.7382 -0.7323 -0.8553 -0.7705 -0.5874 -0.8952 -0.8524 -0.9613 -0.6429 -0.8777 [torch.FloatTensor of size 80]), ('module.postnet.gru.weight_ih_l0', 6.4939e-01 -7.9124e-01 4.0934e-02 ... -3.9241e-01 -1.3943e+00 -4.7296e-01 3.7583e-01 5.4636e-01 6.5514e-02 ... -1.0450e+00 2.3660e-01 7.3014e-01 3.7677e-01 -3.4706e-01 -5.9777e-01 ... -1.3496e-02 -1.6330e-01 -5.6673e-02 ... ⋱ ... 3.3034e-02 5.7420e-02 2.5007e-01 ... -5.5500e-02 -3.0111e-03 -2.8446e-02 2.9808e-02 -7.2426e-02 -2.0248e-01 ... -8.7287e-02 7.4915e-02 5.7272e-02 -3.2471e-02 1.0483e-03 -7.3799e-02 ... 7.3655e-03 -5.5721e-03 5.8514e-02 [torch.FloatTensor of size 240x80]), ('module.postnet.gru.weight_hh_l0', 4.3076e-01 6.5784e-01 -6.0003e-02 ... -4.1792e-01 3.4497e-01 -1.4939e-01 4.5665e-02 -2.8360e-01 -9.2253e-01 ... 1.7441e-01 3.9119e-01 1.5422e-01 -1.8379e-01 5.7175e-01 2.3410e-01 ... 4.4199e-01 -2.0712e-01 6.1023e-01 ... ⋱ ... 5.0059e-02 1.3538e-01 -4.8315e-01 ... -6.3149e+00 -9.0348e-02 -1.4320e-01 -8.1590e-02 3.6711e-01 -9.3420e-02 ... 1.8250e-01 -4.6494e+00 2.9142e-01 9.1866e-02 1.0972e-01 9.2470e-01 ... -1.2952e+00 -1.1462e-01 -2.7750e+00 [torch.FloatTensor of size 240x80]), ('module.postnet.gru.bias_ih_l0', -0.2505 -0.2566 -0.0974 0.0358 -0.6026 -0.7274 -0.4809 -0.1709 -0.1794 0.1124 -0.0431 -0.1600 -0.3489 -0.7068 -0.0657 -0.4445 -0.1368 -0.4630 -0.8422 -0.3410 -0.1546 -0.3175 -0.1967 -0.5859 -0.1293 -0.1607 -0.1061 -1.0092 -0.9594 -0.3489 -0.3379 -0.3372 -0.1905 -0.2381 -0.0517 -0.3846 -0.6198 -0.0969 -0.2238 -0.5174 -0.6875 0.0439 -0.3773 -0.0792 -0.2570 -0.2687 -0.0525 -0.4541 0.1255 -0.1408 -0.5055 -0.0363 -0.3748 0.0178 -0.1843 -0.1789 -0.1925 -0.6474 -0.3790 -0.1726 -0.4518 -0.2185 -0.1005 -0.0675 -0.6689 -0.4840 -0.2047 0.0146 -0.7786 0.0093 -0.1294 -0.7795 -0.0116 -0.3725 -0.5102 -0.0309 -0.1630 -0.2327 -0.1237 -0.1725 -0.0096 0.2268 0.3844 0.0508 -0.3199 -0.6231 0.4347 -0.4611 0.3184 0.3416 -0.1471 -0.0379 0.2543 0.0177 0.1520 0.2060 -0.2037 -0.4328 -0.1686 -0.3331 0.1283 -0.0728 -0.1500 -0.3873 -0.5087 0.0413 -0.3006 -0.5866 -0.0884 0.2979 -0.3157 -0.0681 0.2709 0.2048 0.2323 -0.1317 -0.5729 0.5457 -0.2612 -0.1786 -0.0329 0.6196 -0.0559 0.1978 -0.5102 0.2469 0.3289 0.1784 0.6776 -0.0149 -0.1520 0.6013 -0.0514 0.3295 -0.3994 -0.3703 -0.1482 -0.1113 0.2019 -0.1002 -0.2029 -0.2571 -0.2910 0.5520 -0.1560 -0.1629 -0.1631 -0.0962 -0.2216 0.6470 -0.0803 -0.0350 0.0309 -0.4257 -0.0802 0.6969 0.2613 0.4401 0.4948 0.0739 -0.0251 0.0023 0.0191 0.0396 0.0194 0.0033 -0.0041 0.0449 0.0369 -0.0515 0.0297 -0.0687 0.0342 -0.0017 0.0375 0.0114 -0.0395 -0.0169 -0.0059 0.0648 0.0229 0.0080 0.0081 0.0039 -0.0746 -0.0197 0.0403 0.0102 -0.0143 0.0011 0.0460 -0.0270 -0.0287 0.0037 -0.0048 0.0057 -0.0010 -0.0220 -0.0365 0.0062 -0.0006 -0.0752 -0.0103 0.0092 0.0432 -0.0323 -0.0246 -0.0276 -0.0161 0.0710 -0.0051 -0.0090 -0.0139 0.0007 0.0440 0.0234 -0.0163 0.0096 0.0006 -0.0080 -0.0034 0.0223 0.0298 -0.0372 -0.0089 -0.0040 0.0252 0.0349 0.0002 -0.0280 0.0382 0.0054 0.0439 0.0573 -0.0127 -0.0217 0.0431 -0.0227 0.0479 -0.0526 [torch.FloatTensor of size 240]), ('module.postnet.gru.bias_hh_l0', -0.1257 -0.2917 -0.2099 -0.1458 -0.5051 -0.7141 -0.4886 -0.1806 -0.0801 -0.0011 -0.1355 -0.0332 -0.2401 -0.6729 0.0369 -0.5681 -0.2924 -0.4986 -0.6292 -0.1636 0.0330 -0.2659 -0.1499 -0.6166 -0.1754 -0.2659 -0.0580 -0.9808 -0.9100 -0.2288 -0.3440 -0.2564 -0.0311 -0.3258 -0.0530 -0.4248 -0.6790 -0.1839 -0.0955 -0.4905 -0.5771 0.0053 -0.2151 -0.0441 -0.1503 -0.2841 -0.0549 -0.4694 0.0984 0.0305 -0.6117 -0.1276 -0.5375 -0.0053 -0.0537 -0.3159 -0.3062 -0.6065 -0.3066 -0.1555 -0.5569 -0.3278 -0.2265 -0.0701 -0.6201 -0.5877 -0.2278 -0.0455 -0.7931 -0.0257 -0.0752 -0.8125 -0.0217 -0.2383 -0.7227 -0.0374 -0.0956 -0.0429 -0.0079 -0.0540 -0.0398 0.0893 0.3214 -0.0547 -0.1813 -0.6316 0.3147 -0.4584 0.2949 0.4137 -0.1904 -0.1737 0.3177 0.0534 0.0006 0.1048 -0.3409 -0.3505 -0.0794 -0.2770 0.0726 -0.0269 -0.2318 -0.2172 -0.4086 0.0503 -0.2107 -0.4169 -0.2070 0.3670 -0.3112 -0.2121 0.2480 0.2252 0.2346 0.0665 -0.5720 0.5851 -0.1444 -0.3231 -0.0742 0.4876 -0.1952 0.0997 -0.5438 0.1961 0.4218 0.2565 0.6775 -0.0210 -0.2373 0.4852 0.0347 0.3227 -0.5669 -0.2215 -0.0340 0.0668 0.2541 -0.1506 -0.1952 -0.1168 -0.3041 0.5843 -0.1984 -0.0784 -0.1815 0.0982 -0.1089 0.6586 -0.1627 0.1438 -0.0038 -0.5779 -0.1350 0.5497 0.3276 0.4120 0.4888 0.1240 0.0674 -0.0034 -0.0403 -0.0864 -0.0746 -0.0197 0.0273 -0.1097 -0.0878 0.0931 -0.0665 0.1534 -0.0978 0.0156 -0.0760 -0.0529 0.0921 0.0650 0.0317 -0.1756 -0.0500 -0.0205 -0.0125 -0.0099 0.1807 0.0561 -0.0880 -0.0888 0.1137 -0.0067 -0.1369 0.0769 0.0662 -0.0109 0.0135 -0.0188 0.0013 0.0450 0.0826 -0.0246 0.0009 0.1502 0.0246 -0.0208 -0.1099 0.0851 0.0495 0.0956 0.0220 -0.1547 0.0215 0.0153 0.0452 0.0081 -0.0984 -0.0656 0.0387 -0.0298 0.0016 0.0163 0.0176 -0.0551 -0.0687 0.0780 0.0383 0.0157 -0.0634 -0.0696 -0.0070 0.0538 -0.0851 -0.0383 -0.0924 -0.1641 0.0580 0.0465 -0.0984 0.0515 -0.1018 0.1188 [torch.FloatTensor of size 240]), ('module.postnet.gru.weight_ih_l0_reverse', -1.9571e-01 -7.1658e-02 -1.3314e-01 ... -8.6115e-02 9.6457e-02 3.1947e-01 2.7792e-01 -6.2562e-01 -2.6053e-01 ... 1.8648e-01 -2.5338e-01 -4.5606e-01 3.9198e-01 3.2769e-01 1.4105e-01 ... -2.6209e-01 6.2235e-01 -3.0175e-01 ... ⋱ ... 3.2538e-02 -5.2638e-02 -2.9837e-02 ... -1.7957e-01 3.9589e-02 1.6884e-01 1.6656e-01 2.3316e-01 7.8167e-02 ... 6.7045e-03 1.5034e-02 2.1484e-01 6.0119e-02 4.4352e-02 2.4869e-02 ... -1.8634e-01 1.0526e-01 -5.2547e-01 [torch.FloatTensor of size 240x80]), ('module.postnet.gru.weight_hh_l0_reverse', 1.7044e+00 -4.6533e-01 5.0316e-02 ... 6.1649e-02 -8.8314e-02 -2.6902e-01 -5.2162e-01 -5.4064e-01 2.2873e-01 ... -7.5252e-01 -5.2345e-02 -2.6505e-02 1.8254e-01 -2.3672e-01 2.1709e-01 ... 1.1011e+00 -2.7084e-01 -3.3749e-01 ... ⋱ ... -9.3583e-02 -4.7568e-02 3.3401e-01 ... -1.2074e+00 4.2794e-02 1.9333e+00 -2.1337e-01 -3.5673e-01 -7.3989e-02 ... -2.4008e-01 -9.2097e-01 1.1843e-01 7.3042e-03 3.4483e-01 6.1662e-02 ... -3.6307e-01 1.1115e-01 1.4985e+00 [torch.FloatTensor of size 240x80]), ('module.postnet.gru.bias_ih_l0_reverse', -0.2834 -0.1341 -0.1891 -0.1270 -0.0979 0.0250 -0.3082 -0.1290 -0.2570 -0.2486 0.0449 -0.0050 -0.2715 -0.1192 -0.1564 -0.1569 -0.2715 -0.1705 -0.0844 -0.4546 -0.2333 -0.1900 -0.4071 0.1633 -0.4212 -0.0644 -0.0426 -0.0172 -0.0573 -0.1484 -0.0129 -0.0973 -0.0774 -0.0528 -0.0528 -0.2679 -0.0995 -0.2083 -0.3124 0.0962 -0.2425 -0.0225 -0.2383 -0.0144 -0.0467 -0.0518 -0.0031 0.1910 -0.0607 -0.1113 -0.3463 0.1373 -0.2563 -0.2841 -0.2629 -0.1472 -0.1935 -0.1878 -0.0706 -0.5098 -0.1341 -0.0148 -0.2255 -0.0354 0.0876 -0.0750 -0.1063 -0.1258 -0.3909 -0.0564 -0.2521 -0.1769 -0.2716 -0.1493 -0.0496 -0.1065 -0.3822 -0.1321 0.0315 -0.1653 -0.0777 0.3395 -0.2367 -0.2367 -0.2159 -0.2686 0.7723 0.0111 0.1415 0.0081 -0.2737 -0.0718 0.2837 0.1893 -0.3025 0.0827 -0.1914 0.2433 0.1645 0.1867 -0.1336 0.0415 -0.0685 0.1470 0.0657 -0.3073 -0.1858 0.1482 0.1246 -0.2530 0.1587 0.0094 -0.1678 0.0576 -0.1429 -0.0371 0.3333 0.1869 0.2635 -0.1213 -0.0520 -0.3667 -0.0626 0.3608 0.5578 -0.0925 0.1044 -0.0112 -0.3684 0.0313 -0.0474 0.2086 0.0270 0.1090 0.8672 0.1158 -0.2498 0.1239 0.0900 -0.4163 -0.2231 0.1602 0.1050 0.0660 0.4067 0.1662 -0.3401 0.8094 0.1300 -0.0209 -0.0355 1.0411 0.1142 0.2019 0.0569 -0.1053 1.9178 0.3461 0.0406 -0.0130 -0.0287 0.0203 -0.0028 0.0256 -0.0373 0.0146 -0.0237 -0.0067 -0.0089 -0.0216 0.0407 0.0337 -0.0112 0.0036 0.0085 0.0190 -0.0212 0.0172 -0.0325 0.0043 0.0076 -0.0122 -0.0075 -0.0015 -0.0089 0.0133 0.0669 0.0499 -0.0491 0.0011 -0.0022 -0.0078 -0.0192 0.0348 0.0217 0.0040 0.0124 -0.0149 -0.0068 0.0506 -0.0112 -0.0273 -0.0091 -0.0442 -0.0102 0.0340 0.0467 -0.0276 0.0458 0.0079 -0.0068 0.0548 0.0014 0.0232 0.0111 0.0248 0.0341 -0.0333 0.0020 -0.0115 -0.0184 0.0211 -0.0325 -0.0263 -0.0312 -0.0194 0.0092 0.0252 0.0259 0.0115 0.0070 0.0371 -0.0159 -0.0056 0.0381 0.0253 -0.0043 0.0014 -0.0099 0.0038 [torch.FloatTensor of size 240]), ('module.postnet.gru.bias_hh_l0_reverse', -0.2487 -0.1271 -0.1493 0.0263 0.0129 0.0412 -0.1970 -0.3277 -0.3353 -0.2026 -0.0747 -0.1377 -0.2107 -0.1382 -0.2139 -0.1606 -0.4580 -0.2586 -0.0390 -0.4157 -0.1222 -0.2138 -0.3036 0.0978 -0.3649 -0.0272 -0.0919 -0.0896 0.0116 -0.0178 -0.1166 0.0206 -0.0256 -0.0908 -0.0805 -0.2593 -0.1308 -0.3071 -0.2022 0.0909 -0.1760 -0.1884 -0.1719 -0.1515 -0.0075 0.0053 -0.0405 0.2614 -0.1736 0.0894 -0.3122 0.0602 -0.3385 -0.3736 -0.2544 0.0354 -0.1137 -0.3462 -0.1956 -0.4153 -0.1730 0.0075 -0.1659 0.0991 -0.0186 -0.1592 -0.0166 -0.2992 -0.2731 -0.0349 -0.2382 -0.0450 -0.1722 -0.2531 -0.1807 -0.1135 -0.2719 -0.2557 -0.0144 -0.2027 0.0466 0.1740 -0.3893 -0.1778 -0.2417 -0.1082 0.6921 0.0596 0.1283 -0.0760 -0.2241 -0.0975 0.3336 0.2793 -0.2995 0.0315 -0.1422 0.2703 0.1530 0.2506 -0.1593 0.0975 -0.2239 0.2017 0.1103 -0.2734 -0.0930 -0.0133 0.0313 -0.2820 0.0176 0.1565 -0.2222 0.0820 -0.1499 0.1038 0.2361 0.1533 0.2672 -0.1057 -0.0180 -0.2015 -0.0127 0.4115 0.5030 -0.0607 0.2297 -0.0195 -0.1869 -0.1665 -0.0110 0.1128 -0.0069 0.1217 0.8779 0.2806 -0.2943 0.1384 -0.0913 -0.3071 -0.2291 0.0558 -0.0693 -0.0422 0.3369 -0.0317 -0.1905 0.7677 0.2580 0.0240 0.0153 0.9563 0.1226 0.2429 0.1906 -0.0149 1.8935 0.3781 -0.0177 0.0269 0.0797 -0.0511 0.0123 -0.0557 0.0786 -0.0151 0.0576 0.0093 0.0177 0.0563 -0.0821 -0.0707 0.0199 -0.0187 -0.0318 -0.0384 0.0585 -0.0411 0.0692 -0.0140 -0.0212 0.0343 0.0252 0.0025 0.0241 -0.0278 -0.1441 -0.1080 0.1002 0.0010 0.0017 0.0162 0.0458 -0.0687 -0.0423 -0.0112 -0.0268 0.0391 0.0182 -0.0957 0.0330 0.0602 0.0215 0.1023 0.0070 -0.0670 -0.1032 0.0485 -0.1075 -0.0313 0.0147 -0.0862 -0.0018 -0.0198 -0.0436 -0.0486 -0.0878 0.0881 -0.0095 0.0378 0.0495 -0.0407 0.0855 0.0551 0.0526 0.0357 -0.0338 -0.0639 -0.0750 -0.0179 -0.0145 -0.0488 0.0365 0.0254 -0.0829 -0.0473 -0.0232 -0.0070 0.0135 -0.0169 [torch.FloatTensor of size 240]), ('module.last_linear.weight', 9.0400e-03 -7.2088e-03 -1.4630e-02 ... 6.1971e-03 -1.5822e-03 -2.7374e-03 1.1868e-02 -4.7611e-03 -1.6505e-02 ... 6.2229e-03 -1.4371e-03 -2.4970e-03 1.3643e-02 -4.6501e-03 -2.1297e-02 ... 1.0202e-02 -2.7155e-03 -2.3471e-03 ... ⋱ ... 1.9957e-03 -6.6300e-03 1.2878e-02 ... -4.6978e-03 -3.2197e-02 -1.5346e-03 2.5341e-03 -6.8375e-03 1.1034e-02 ... -4.8485e-03 -3.2630e-02 -2.1417e-03 2.9717e-03 -7.6311e-03 9.8761e-03 ... -4.5076e-03 -3.3754e-02 -2.9296e-03 [torch.FloatTensor of size 1025x160]), ('module.last_linear.bias', 1.00000e-04 * -1.3252 -1.4030 -1.1655 ⋮ -0.1640 0.0590 1.3231 [torch.FloatTensor of size 1025])])
EXAMPLES FROM TRAINING SET¶
In [5]:
import pandas as pd df = pd.read_csv('/data/shared/KeithIto/LJSpeech-1.0/metadata.csv', delimiter='|')
In [6]:
sentence = df.iloc[120, 1].lower().replace(',','') print(sentence) align = tts(model, sentence, CONFIG, use_cuda, ap)
that he has a 5 an 8 or a 3 before him unless the press work is of the best:
/home/erogol/miniconda3/envs/pytorch/lib/python3.6/site-packages/librosa/util/utils.py:1725: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`. if np.issubdtype(x.dtype, float) or np.issubdtype(x.dtype, complex):
> Run-time: 8.474236488342285
/home/erogol/miniconda3/envs/pytorch/lib/python3.6/site-packages/librosa/display.py:656: FutureWarning: Conversion of the second argument of issubdtype from `complex` to `np.complexfloating` is deprecated. In future, it will be treated as `np.complex128 == np.dtype(complex).type`. if np.issubdtype(data.dtype, np.complex):
Your browser does not support the audio element.
NEW EXAMPLES¶
In [10]:
sentence = "That's all folks." model.decoder.max_decoder_steps = 300 alignment = tts(model, sentence, CONFIG, use_cuda, ap)
/home/erogol/miniconda3/envs/pytorch/lib/python3.6/site-packages/librosa/util/utils.py:1725: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`. if np.issubdtype(x.dtype, float) or np.issubdtype(x.dtype, complex):
> Run-time: 1.5912322998046875
/home/erogol/miniconda3/envs/pytorch/lib/python3.6/site-packages/librosa/display.py:656: FutureWarning: Conversion of the second argument of issubdtype from `complex` to `np.complexfloating` is deprecated. In future, it will be treated as `np.complex128 == np.dtype(complex).type`. if np.issubdtype(data.dtype, np.complex):
Your browser does not support the audio element.