diff --git a/.travis.yml b/.travis.yml index 83ba25a3..5f20cb78 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,5 +17,9 @@ matrix: python: "3.6" install: pip install --quiet -r requirements_tests.txt env: TEST_SUITE="unittest" + - name: "Unit tests" + python: "3.6" + install: pip install --quiet -r requirements_tests.txt + env: TEST_SUITE="testscripts" script: ./.travis/script diff --git a/.travis/script b/.travis/script index 76d74aea..c793d9e7 100755 --- a/.travis/script +++ b/.travis/script @@ -14,9 +14,15 @@ if [[ "$TEST_SUITE" == "unittest" ]]; then pushd tts_namespace nosetests TTS.speaker_encoder.tests --nocapture nosetests TTS.vocoder.tests --nocapture - nosetests TTS.tests --nocapture - nosetests TTS.tf.tests --nocapture + nosetests TTS.tts.tests --nocapture + nosetests TTS.tts.tf.tests --nocapture popd - # Test server package - ./tests/test_server_package.sh +fi + +if [[ "$TEST_SUITE" == "testscripts" ]]; then + # Test server package + ./tts/tests/test_server_package.sh + # test model training scripts + ./tts/tests/test_tts_train.sh + ./vocoder/tests/test_vocoder_train.sh fi diff --git a/compute_statistics.py b/compute_statistics.py deleted file mode 100755 index 399ae512..00000000 --- a/compute_statistics.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import os -import argparse - -import numpy as np -from tqdm import tqdm - -from TTS.datasets.preprocess import load_meta_data -from TTS.utils.io import load_config -from TTS.utils.audio import AudioProcessor - -def main(): - """Run preprocessing process.""" - parser = argparse.ArgumentParser( - description="Compute mean and variance of spectrogtram features.") - parser.add_argument("--config_path", type=str, required=True, - help="TTS config file path to define audio processin parameters.") - parser.add_argument("--out_path", default=None, type=str, - help="directory to save the output file.") - args = parser.parse_args() - - # load config - CONFIG = load_config(args.config_path) - CONFIG.audio['signal_norm'] = False # do not apply earlier normalization - CONFIG.audio['stats_path'] = None # discard pre-defined stats - - # load audio processor - ap = AudioProcessor(**CONFIG.audio) - - # load the meta data of target dataset - dataset_items = load_meta_data(CONFIG.datasets)[0] # take only train data - print(f" > There are {len(dataset_items)} files.") - - mel_sum = 0 - mel_square_sum = 0 - linear_sum = 0 - linear_square_sum = 0 - N = 0 - for item in tqdm(dataset_items): - # compute features - wav = ap.load_wav(item[1]) - linear = ap.spectrogram(wav) - mel = ap.melspectrogram(wav) - - # compute stats - N += mel.shape[1] - mel_sum += mel.sum(1) - linear_sum += linear.sum(1) - mel_square_sum += (mel ** 2).sum(axis=1) - linear_square_sum += (linear ** 2).sum(axis=1) - - mel_mean = mel_sum / N - mel_scale = np.sqrt(mel_square_sum / N - mel_mean ** 2) - linear_mean = linear_sum / N - linear_scale = np.sqrt(linear_square_sum / N - linear_mean ** 2) - - output_file_path = os.path.join(args.out_path, "scale_stats.npy") - stats = {} - stats['mel_mean'] = mel_mean - stats['mel_std'] = mel_scale - stats['linear_mean'] = linear_mean - stats['linear_std'] = linear_scale - - print(f' > Avg mel spec mean: {mel_mean.mean()}') - print(f' > Avg mel spec scale: {mel_scale.mean()}') - print(f' > Avg linear spec mean: {linear_mean.mean()}') - print(f' > Avg lienar spec scale: {linear_scale.mean()}') - - # set default config values for mean-var scaling - CONFIG.audio['stats_path'] = output_file_path - CONFIG.audio['signal_norm'] = True - # remove redundant values - del CONFIG.audio['max_norm'] - del CONFIG.audio['min_level_db'] - del CONFIG.audio['symmetric_norm'] - del CONFIG.audio['clip_norm'] - stats['audio_config'] = CONFIG.audio - np.save(output_file_path, stats, allow_pickle=True) - print(f' > scale_stats.npy is saved to {output_file_path}') - - -if __name__ == "__main__": - main() diff --git a/datasets/TTSDataset.py b/datasets/TTSDataset.py deleted file mode 100644 index 7fe966d7..00000000 --- a/datasets/TTSDataset.py +++ /dev/null @@ -1,240 +0,0 @@ -import os -import numpy as np -import collections -import torch -import random -from torch.utils.data import Dataset - -from TTS.utils.text import text_to_sequence, phoneme_to_sequence, pad_with_eos_bos -from TTS.utils.data import prepare_data, prepare_tensor, prepare_stop_target - - -class MyDataset(Dataset): - def __init__(self, - outputs_per_step, - text_cleaner, - compute_linear_spec, - ap, - meta_data, - tp=None, - batch_group_size=0, - min_seq_len=0, - max_seq_len=float("inf"), - use_phonemes=True, - phoneme_cache_path=None, - phoneme_language="en-us", - enable_eos_bos=False, - verbose=False): - """ - Args: - outputs_per_step (int): number of time frames predicted per step. - text_cleaner (str): text cleaner used for the dataset. - compute_linear_spec (bool): compute linear spectrogram if True. - ap (TTS.utils.AudioProcessor): audio processor object. - meta_data (list): list of dataset instances. - batch_group_size (int): (0) range of batch randomization after sorting - sequences by length. - min_seq_len (int): (0) minimum sequence length to be processed - by the loader. - max_seq_len (int): (float("inf")) maximum sequence length. - use_phonemes (bool): (true) if true, text converted to phonemes. - phoneme_cache_path (str): path to cache phoneme features. - phoneme_language (str): one the languages from - https://github.com/bootphon/phonemizer#languages - enable_eos_bos (bool): enable end of sentence and beginning of sentences characters. - verbose (bool): print diagnostic information. - """ - self.batch_group_size = batch_group_size - self.items = meta_data - self.outputs_per_step = outputs_per_step - self.sample_rate = ap.sample_rate - self.cleaners = text_cleaner - self.compute_linear_spec = compute_linear_spec - self.min_seq_len = min_seq_len - self.max_seq_len = max_seq_len - self.ap = ap - self.tp = tp - self.use_phonemes = use_phonemes - self.phoneme_cache_path = phoneme_cache_path - self.phoneme_language = phoneme_language - self.enable_eos_bos = enable_eos_bos - self.verbose = verbose - if use_phonemes and not os.path.isdir(phoneme_cache_path): - os.makedirs(phoneme_cache_path, exist_ok=True) - if self.verbose: - print("\n > DataLoader initialization") - print(" | > Use phonemes: {}".format(self.use_phonemes)) - if use_phonemes: - print(" | > phoneme language: {}".format(phoneme_language)) - print(" | > Number of instances : {}".format(len(self.items))) - self.sort_items() - - def load_wav(self, filename): - audio = self.ap.load_wav(filename) - return audio - - @staticmethod - def load_np(filename): - data = np.load(filename).astype('float32') - return data - - def _generate_and_cache_phoneme_sequence(self, text, cache_path): - """generate a phoneme sequence from text. - since the usage is for subsequent caching, we never add bos and - eos chars here. Instead we add those dynamically later; based on the - config option.""" - phonemes = phoneme_to_sequence(text, [self.cleaners], - language=self.phoneme_language, - enable_eos_bos=False, - tp=self.tp) - phonemes = np.asarray(phonemes, dtype=np.int32) - np.save(cache_path, phonemes) - return phonemes - - def _load_or_generate_phoneme_sequence(self, wav_file, text): - file_name = os.path.splitext(os.path.basename(wav_file))[0] - cache_path = os.path.join(self.phoneme_cache_path, - file_name + '_phoneme.npy') - try: - phonemes = np.load(cache_path) - except FileNotFoundError: - phonemes = self._generate_and_cache_phoneme_sequence(text, - cache_path) - except (ValueError, IOError): - print(" > ERROR: failed loading phonemes for {}. " - "Recomputing.".format(wav_file)) - phonemes = self._generate_and_cache_phoneme_sequence(text, - cache_path) - if self.enable_eos_bos: - phonemes = pad_with_eos_bos(phonemes, tp=self.tp) - phonemes = np.asarray(phonemes, dtype=np.int32) - return phonemes - - def load_data(self, idx): - text, wav_file, speaker_name = self.items[idx] - wav = np.asarray(self.load_wav(wav_file), dtype=np.float32) - - if self.use_phonemes: - text = self._load_or_generate_phoneme_sequence(wav_file, text) - else: - text = np.asarray( - text_to_sequence(text, [self.cleaners], tp=self.tp), dtype=np.int32) - - assert text.size > 0, self.items[idx][1] - assert wav.size > 0, self.items[idx][1] - - sample = { - 'text': text, - 'wav': wav, - 'item_idx': self.items[idx][1], - 'speaker_name': speaker_name - } - return sample - - def sort_items(self): - r"""Sort instances based on text length in ascending order""" - lengths = np.array([len(ins[0]) for ins in self.items]) - - idxs = np.argsort(lengths) - new_items = [] - ignored = [] - for i, idx in enumerate(idxs): - length = lengths[idx] - if length < self.min_seq_len or length > self.max_seq_len: - ignored.append(idx) - else: - new_items.append(self.items[idx]) - # shuffle batch groups - if self.batch_group_size > 0: - for i in range(len(new_items) // self.batch_group_size): - offset = i * self.batch_group_size - end_offset = offset + self.batch_group_size - temp_items = new_items[offset:end_offset] - random.shuffle(temp_items) - new_items[offset:end_offset] = temp_items - self.items = new_items - - if self.verbose: - print(" | > Max length sequence: {}".format(np.max(lengths))) - print(" | > Min length sequence: {}".format(np.min(lengths))) - print(" | > Avg length sequence: {}".format(np.mean(lengths))) - print(" | > Num. instances discarded by max-min (max={}, min={}) seq limits: {}".format( - self.max_seq_len, self.min_seq_len, len(ignored))) - print(" | > Batch group size: {}.".format(self.batch_group_size)) - - def __len__(self): - return len(self.items) - - def __getitem__(self, idx): - return self.load_data(idx) - - def collate_fn(self, batch): - r""" - Perform preprocessing and create a final data batch: - 1. Sort batch instances by text-length - 2. Convert Audio signal to Spectrograms. - 3. PAD sequences wrt r. - 4. Load to Torch. - """ - - # Puts each data field into a tensor with outer dimension batch size - if isinstance(batch[0], collections.Mapping): - - text_lenghts = np.array([len(d["text"]) for d in batch]) - - # sort items with text input length for RNN efficiency - text_lenghts, ids_sorted_decreasing = torch.sort( - torch.LongTensor(text_lenghts), dim=0, descending=True) - - wav = [batch[idx]['wav'] for idx in ids_sorted_decreasing] - item_idxs = [ - batch[idx]['item_idx'] for idx in ids_sorted_decreasing - ] - text = [batch[idx]['text'] for idx in ids_sorted_decreasing] - speaker_name = [batch[idx]['speaker_name'] - for idx in ids_sorted_decreasing] - - # compute features - mel = [self.ap.melspectrogram(w).astype('float32') for w in wav] - - mel_lengths = [m.shape[1] for m in mel] - - # compute 'stop token' targets - stop_targets = [ - np.array([0.] * (mel_len - 1) + [1.]) for mel_len in mel_lengths - ] - - # PAD stop targets - stop_targets = prepare_stop_target(stop_targets, - self.outputs_per_step) - - # PAD sequences with longest instance in the batch - text = prepare_data(text).astype(np.int32) - - # PAD features with longest instance - mel = prepare_tensor(mel, self.outputs_per_step) - - # B x D x T --> B x T x D - mel = mel.transpose(0, 2, 1) - - # convert things to pytorch - text_lenghts = torch.LongTensor(text_lenghts) - text = torch.LongTensor(text) - mel = torch.FloatTensor(mel).contiguous() - mel_lengths = torch.LongTensor(mel_lengths) - stop_targets = torch.FloatTensor(stop_targets) - - # compute linear spectrogram - if self.compute_linear_spec: - linear = [self.ap.spectrogram(w).astype('float32') for w in wav] - linear = prepare_tensor(linear, self.outputs_per_step) - linear = linear.transpose(0, 2, 1) - assert mel.shape[1] == linear.shape[1] - linear = torch.FloatTensor(linear).contiguous() - else: - linear = None - return text, text_lenghts, speaker_name, linear, mel, mel_lengths, \ - stop_targets, item_idxs - - raise TypeError(("batch must contain tensors, numbers, dicts or lists;\ - found {}".format(type(batch[0])))) diff --git a/datasets/preprocess.py b/datasets/preprocess.py deleted file mode 100644 index e8700c6b..00000000 --- a/datasets/preprocess.py +++ /dev/null @@ -1,207 +0,0 @@ -import os -from glob import glob -import re -import sys -from TTS.utils.generic_utils import split_dataset - - -def load_meta_data(datasets): - meta_data_train_all = [] - meta_data_eval_all = [] - for dataset in datasets: - name = dataset['name'] - root_path = dataset['path'] - meta_file_train = dataset['meta_file_train'] - meta_file_val = dataset['meta_file_val'] - preprocessor = get_preprocessor_by_name(name) - - meta_data_train = preprocessor(root_path, meta_file_train) - if meta_file_val is None: - meta_data_eval, meta_data_train = split_dataset(meta_data_train) - else: - meta_data_eval = preprocessor(root_path, meta_file_val) - meta_data_train_all += meta_data_train - meta_data_eval_all += meta_data_eval - return meta_data_train_all, meta_data_eval_all - - -def get_preprocessor_by_name(name): - """Returns the respective preprocessing function.""" - thismodule = sys.modules[__name__] - return getattr(thismodule, name.lower()) - - -def tweb(root_path, meta_file): - """Normalize TWEB dataset. - https://www.kaggle.com/bryanpark/the-world-english-bible-speech-dataset - """ - txt_file = os.path.join(root_path, meta_file) - items = [] - speaker_name = "tweb" - with open(txt_file, 'r') as ttf: - for line in ttf: - cols = line.split('\t') - wav_file = os.path.join(root_path, cols[0] + '.wav') - text = cols[1] - items.append([text, wav_file, speaker_name]) - return items - - -# def kusal(root_path, meta_file): -# txt_file = os.path.join(root_path, meta_file) -# texts = [] -# wavs = [] -# with open(txt_file, "r", encoding="utf8") as f: -# frames = [ -# line.split('\t') for line in f -# if line.split('\t')[0] in self.wav_files_dict.keys() -# ] -# # TODO: code the rest -# return {'text': texts, 'wavs': wavs} - - -def mozilla(root_path, meta_file): - """Normalizes Mozilla meta data files to TTS format""" - txt_file = os.path.join(root_path, meta_file) - items = [] - speaker_name = "mozilla" - with open(txt_file, 'r') as ttf: - for line in ttf: - cols = line.split('|') - wav_file = cols[1].strip() - text = cols[0].strip() - wav_file = os.path.join(root_path, "wavs", wav_file) - items.append([text, wav_file, speaker_name]) - return items - - -def mozilla_de(root_path, meta_file): - """Normalizes Mozilla meta data files to TTS format""" - txt_file = os.path.join(root_path, meta_file) - items = [] - speaker_name = "mozilla" - with open(txt_file, 'r', encoding="ISO 8859-1") as ttf: - for line in ttf: - cols = line.strip().split('|') - wav_file = cols[0].strip() - text = cols[1].strip() - folder_name = f"BATCH_{wav_file.split('_')[0]}_FINAL" - wav_file = os.path.join(root_path, folder_name, wav_file) - items.append([text, wav_file, speaker_name]) - return items - - -def mailabs(root_path, meta_files=None): - """Normalizes M-AI-Labs meta data files to TTS format""" - speaker_regex = re.compile("by_book/(male|female)/(?P[^/]+)/") - if meta_files is None: - csv_files = glob(root_path+"/**/metadata.csv", recursive=True) - else: - csv_files = meta_files - # meta_files = [f.strip() for f in meta_files.split(",")] - items = [] - for csv_file in csv_files: - txt_file = os.path.join(root_path, csv_file) - folder = os.path.dirname(txt_file) - # determine speaker based on folder structure... - speaker_name_match = speaker_regex.search(txt_file) - if speaker_name_match is None: - continue - speaker_name = speaker_name_match.group("speaker_name") - print(" | > {}".format(csv_file)) - with open(txt_file, 'r') as ttf: - for line in ttf: - cols = line.split('|') - if meta_files is None: - wav_file = os.path.join(folder, 'wavs', cols[0] + '.wav') - else: - wav_file = os.path.join(root_path, folder.replace("metadata.csv", ""), 'wavs', cols[0] + '.wav') - if os.path.isfile(wav_file): - text = cols[1].strip() - items.append([text, wav_file, speaker_name]) - else: - raise RuntimeError("> File %s does not exist!"%(wav_file)) - return items - - -def ljspeech(root_path, meta_file): - """Normalizes the Nancy meta data file to TTS format""" - txt_file = os.path.join(root_path, meta_file) - items = [] - speaker_name = "ljspeech" - with open(txt_file, 'r') as ttf: - for line in ttf: - cols = line.split('|') - wav_file = os.path.join(root_path, 'wavs', cols[0] + '.wav') - text = cols[1] - items.append([text, wav_file, speaker_name]) - return items - - -def nancy(root_path, meta_file): - """Normalizes the Nancy meta data file to TTS format""" - txt_file = os.path.join(root_path, meta_file) - items = [] - speaker_name = "nancy" - with open(txt_file, 'r') as ttf: - for line in ttf: - utt_id = line.split()[1] - text = line[line.find('"') + 1:line.rfind('"') - 1] - wav_file = os.path.join(root_path, "wavn", utt_id + ".wav") - items.append([text, wav_file, speaker_name]) - return items - - -def common_voice(root_path, meta_file): - """Normalize the common voice meta data file to TTS format.""" - txt_file = os.path.join(root_path, meta_file) - items = [] - with open(txt_file, 'r') as ttf: - for line in ttf: - if line.startswith("client_id"): - continue - cols = line.split("\t") - text = cols[2] - speaker_name = cols[0] - wav_file = os.path.join(root_path, "clips", cols[1] + ".wav") - items.append([text, wav_file, speaker_name]) - return items - - -def libri_tts(root_path, meta_files=None): - """https://ai.google/tools/datasets/libri-tts/""" - items = [] - if meta_files is None: - meta_files = glob(f"{root_path}/**/*trans.tsv", recursive=True) - for meta_file in meta_files: - _meta_file = os.path.basename(meta_file).split('.')[0] - speaker_name = _meta_file.split('_')[0] - chapter_id = _meta_file.split('_')[1] - _root_path = os.path.join(root_path, f"{speaker_name}/{chapter_id}") - with open(meta_file, 'r') as ttf: - for line in ttf: - cols = line.split('\t') - wav_file = os.path.join(_root_path, cols[0] + '.wav') - text = cols[1] - items.append([text, wav_file, speaker_name]) - for item in items: - assert os.path.exists(item[1]), f" [!] wav files don't exist - {item[1]}" - return items - - -def custom_turkish(root_path, meta_file): - txt_file = os.path.join(root_path, meta_file) - items = [] - speaker_name = "turkish-female" - skipped_files = [] - with open(txt_file, 'r', encoding='utf-8') as ttf: - for line in ttf: - cols = line.split('|') - wav_file = os.path.join(root_path, 'wavs', cols[0].strip() + '.wav') - if not os.path.exists(wav_file): - skipped_files.append(wav_file) - continue - text = cols[1].strip() - items.append([text, wav_file, speaker_name]) - print(f" [!] {len(skipped_files)} files skipped. They don't exist...") - return items diff --git a/distribute.py b/distribute.py deleted file mode 100644 index b0fc8b07..00000000 --- a/distribute.py +++ /dev/null @@ -1,178 +0,0 @@ -# edited from https://github.com/fastai/imagenet-fast/blob/master/imagenet_nv/distributed.py -import os, sys -import math -import time -import subprocess -import argparse -import torch -import torch.distributed as dist -from torch.utils.data.sampler import Sampler -from torch.autograd import Variable -from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors -from TTS.utils.generic_utils import create_experiment_folder - - -class DistributedSampler(Sampler): - """ - Non shuffling Distributed Sampler - """ - - def __init__(self, dataset, num_replicas=None, rank=None): - super(DistributedSampler, self).__init__(dataset) - if num_replicas is None: - if not dist.is_available(): - raise RuntimeError("Requires distributed package to be available") - num_replicas = dist.get_world_size() - if rank is None: - if not dist.is_available(): - raise RuntimeError("Requires distributed package to be available") - rank = dist.get_rank() - self.dataset = dataset - self.num_replicas = num_replicas - self.rank = rank - self.epoch = 0 - self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas)) - self.total_size = self.num_samples * self.num_replicas - - def __iter__(self): - indices = torch.arange(len(self.dataset)).tolist() - - # add extra samples to make it evenly divisible - indices += indices[:(self.total_size - len(indices))] - assert len(indices) == self.total_size - - # subsample - indices = indices[self.rank:self.total_size:self.num_replicas] - assert len(indices) == self.num_samples - - return iter(indices) - - def __len__(self): - return self.num_samples - - def set_epoch(self, epoch): - self.epoch = epoch - - -def reduce_tensor(tensor, num_gpus): - rt = tensor.clone() - dist.all_reduce(rt, op=dist.reduce_op.SUM) - rt /= num_gpus - return rt - - -def init_distributed(rank, num_gpus, group_name, dist_backend, dist_url): - assert torch.cuda.is_available(), "Distributed mode requires CUDA." - - # Set cuda device so everything is done on the right GPU. - torch.cuda.set_device(rank % torch.cuda.device_count()) - - # Initialize distributed communication - dist.init_process_group( - dist_backend, - init_method=dist_url, - world_size=num_gpus, - rank=rank, - group_name=group_name) - - -def apply_gradient_allreduce(module): - - # sync model parameters - for p in module.state_dict().values(): - if not torch.is_tensor(p): - continue - dist.broadcast(p, 0) - - def allreduce_params(): - if module.needs_reduction: - module.needs_reduction = False - # bucketing params based on value types - buckets = {} - for param in module.parameters(): - if param.requires_grad and param.grad is not None: - tp = type(param.data) - if tp not in buckets: - buckets[tp] = [] - buckets[tp].append(param) - for tp in buckets: - bucket = buckets[tp] - grads = [param.grad.data for param in bucket] - coalesced = _flatten_dense_tensors(grads) - dist.all_reduce(coalesced, op=dist.reduce_op.SUM) - coalesced /= dist.get_world_size() - for buf, synced in zip( - grads, _unflatten_dense_tensors(coalesced, grads)): - buf.copy_(synced) - - for param in list(module.parameters()): - - def allreduce_hook(*_): - Variable._execution_engine.queue_callback(allreduce_params) - - if param.requires_grad: - param.register_hook(allreduce_hook) - - def set_needs_reduction(self, *_): - self.needs_reduction = True - - module.register_forward_hook(set_needs_reduction) - return module - - -def main(): - """ - Call train.py as a new process and pass command arguments - """ - parser = argparse.ArgumentParser() - parser.add_argument( - '--continue_path', - type=str, - help='Training output folder to continue training. Use to continue a training. If it is used, "config_path" is ignored.', - default='', - required='--config_path' not in sys.argv) - parser.add_argument( - '--restore_path', - type=str, - help='Model file to be restored. Use to finetune a model.', - default='') - parser.add_argument( - '--config_path', - type=str, - help='Path to config file for training.', - required='--continue_path' not in sys.argv - ) - args = parser.parse_args() - - # OUT_PATH = create_experiment_folder(CONFIG.output_path, CONFIG.run_name, - # True) - # stdout_path = os.path.join(OUT_PATH, "process_stdout/") - - num_gpus = torch.cuda.device_count() - group_id = time.strftime("%Y_%m_%d-%H%M%S") - - # set arguments for train.py - command = ['train.py'] - command.append('--continue_path={}'.format(args.continue_path)) - command.append('--restore_path={}'.format(args.restore_path)) - command.append('--config_path={}'.format(args.config_path)) - command.append('--group_id=group_{}'.format(group_id)) - command.append('') - - # run processes - processes = [] - for i in range(num_gpus): - my_env = os.environ.copy() - my_env["PYTHON_EGG_CACHE"] = "/tmp/tmp{}".format(i) - command[-1] = '--rank={}'.format(i) - stdout = None if i == 0 else open(os.devnull, 'w') - p = subprocess.Popen(['python3'] + command, stdout=stdout, env=my_env) - processes.append(p) - print(command) - - for p in processes: - p.wait() - - -if __name__ == '__main__': - main() diff --git a/layers/__init__.py b/layers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/layers/common_layers.py b/layers/common_layers.py deleted file mode 100644 index b7d02c2d..00000000 --- a/layers/common_layers.py +++ /dev/null @@ -1,389 +0,0 @@ -import torch -from torch import nn -from torch.autograd import Variable -from torch.nn import functional as F - - -class Linear(nn.Module): - def __init__(self, - in_features, - out_features, - bias=True, - init_gain='linear'): - super(Linear, self).__init__() - self.linear_layer = torch.nn.Linear( - in_features, out_features, bias=bias) - self._init_w(init_gain) - - def _init_w(self, init_gain): - torch.nn.init.xavier_uniform_( - self.linear_layer.weight, - gain=torch.nn.init.calculate_gain(init_gain)) - - def forward(self, x): - return self.linear_layer(x) - - -class LinearBN(nn.Module): - def __init__(self, - in_features, - out_features, - bias=True, - init_gain='linear'): - super(LinearBN, self).__init__() - self.linear_layer = torch.nn.Linear( - in_features, out_features, bias=bias) - self.batch_normalization = nn.BatchNorm1d(out_features, momentum=0.1, eps=1e-5) - self._init_w(init_gain) - - def _init_w(self, init_gain): - torch.nn.init.xavier_uniform_( - self.linear_layer.weight, - gain=torch.nn.init.calculate_gain(init_gain)) - - def forward(self, x): - out = self.linear_layer(x) - if len(out.shape) == 3: - out = out.permute(1, 2, 0) - out = self.batch_normalization(out) - if len(out.shape) == 3: - out = out.permute(2, 0, 1) - return out - - -class Prenet(nn.Module): - def __init__(self, - in_features, - prenet_type="original", - prenet_dropout=True, - out_features=[256, 256], - bias=True): - super(Prenet, self).__init__() - self.prenet_type = prenet_type - self.prenet_dropout = prenet_dropout - in_features = [in_features] + out_features[:-1] - if prenet_type == "bn": - self.linear_layers = nn.ModuleList([ - LinearBN(in_size, out_size, bias=bias) - for (in_size, out_size) in zip(in_features, out_features) - ]) - elif prenet_type == "original": - self.linear_layers = nn.ModuleList([ - Linear(in_size, out_size, bias=bias) - for (in_size, out_size) in zip(in_features, out_features) - ]) - - def forward(self, x): - for linear in self.linear_layers: - if self.prenet_dropout: - x = F.dropout(F.relu(linear(x)), p=0.5, training=self.training) - else: - x = F.relu(linear(x)) - return x - - -#################### -# ATTENTION MODULES -#################### - - -class LocationLayer(nn.Module): - def __init__(self, - attention_dim, - attention_n_filters=32, - attention_kernel_size=31): - super(LocationLayer, self).__init__() - self.location_conv1d = nn.Conv1d( - in_channels=2, - out_channels=attention_n_filters, - kernel_size=attention_kernel_size, - stride=1, - padding=(attention_kernel_size - 1) // 2, - bias=False) - self.location_dense = Linear( - attention_n_filters, attention_dim, bias=False, init_gain='tanh') - - def forward(self, attention_cat): - processed_attention = self.location_conv1d(attention_cat) - processed_attention = self.location_dense( - processed_attention.transpose(1, 2)) - return processed_attention - - -class GravesAttention(nn.Module): - """ Discretized Graves attention: - - https://arxiv.org/abs/1910.10288 - - https://arxiv.org/pdf/1906.01083.pdf - """ - COEF = 0.3989422917366028 # numpy.sqrt(1/(2*numpy.pi)) - - def __init__(self, query_dim, K): - super(GravesAttention, self).__init__() - self._mask_value = 1e-8 - self.K = K - # self.attention_alignment = 0.05 - self.eps = 1e-5 - self.J = None - self.N_a = nn.Sequential( - nn.Linear(query_dim, query_dim, bias=True), - nn.ReLU(), - nn.Linear(query_dim, 3*K, bias=True)) - self.attention_weights = None - self.mu_prev = None - self.init_layers() - - def init_layers(self): - torch.nn.init.constant_(self.N_a[2].bias[(2*self.K):(3*self.K)], 1.) # bias mean - torch.nn.init.constant_(self.N_a[2].bias[self.K:(2*self.K)], 10) # bias std - - def init_states(self, inputs): - if self.J is None or inputs.shape[1]+1 > self.J.shape[-1]: - self.J = torch.arange(0, inputs.shape[1]+2.0).to(inputs.device) + 0.5 - self.attention_weights = torch.zeros(inputs.shape[0], inputs.shape[1]).to(inputs.device) - self.mu_prev = torch.zeros(inputs.shape[0], self.K).to(inputs.device) - - # pylint: disable=R0201 - # pylint: disable=unused-argument - def preprocess_inputs(self, inputs): - return None - - def forward(self, query, inputs, processed_inputs, mask): - """ - shapes: - query: B x D_attention_rnn - inputs: B x T_in x D_encoder - processed_inputs: place_holder - mask: B x T_in - """ - gbk_t = self.N_a(query) - gbk_t = gbk_t.view(gbk_t.size(0), -1, self.K) - - # attention model parameters - # each B x K - g_t = gbk_t[:, 0, :] - b_t = gbk_t[:, 1, :] - k_t = gbk_t[:, 2, :] - - # dropout to decorrelate attention heads - g_t = torch.nn.functional.dropout(g_t, p=0.5, training=self.training) - - # attention GMM parameters - sig_t = torch.nn.functional.softplus(b_t) + self.eps - - mu_t = self.mu_prev + torch.nn.functional.softplus(k_t) - g_t = torch.softmax(g_t, dim=-1) + self.eps - - j = self.J[:inputs.size(1)+1] - - # attention weights - phi_t = g_t.unsqueeze(-1) * (1 / (1 + torch.sigmoid((mu_t.unsqueeze(-1) - j) / sig_t.unsqueeze(-1)))) - - # discritize attention weights - alpha_t = torch.sum(phi_t, 1) - alpha_t = alpha_t[:, 1:] - alpha_t[:, :-1] - alpha_t[alpha_t == 0] = 1e-8 - - # apply masking - if mask is not None: - alpha_t.data.masked_fill_(~mask, self._mask_value) - - context = torch.bmm(alpha_t.unsqueeze(1), inputs).squeeze(1) - self.attention_weights = alpha_t - self.mu_prev = mu_t - return context - - -class OriginalAttention(nn.Module): - """Following the methods proposed here: - - https://arxiv.org/abs/1712.05884 - - https://arxiv.org/abs/1807.06736 + state masking at inference - - Using sigmoid instead of softmax normalization - - Attention windowing at inference time - """ - # Pylint gets confused by PyTorch conventions here - #pylint: disable=attribute-defined-outside-init - def __init__(self, query_dim, embedding_dim, attention_dim, - location_attention, attention_location_n_filters, - attention_location_kernel_size, windowing, norm, forward_attn, - trans_agent, forward_attn_mask): - super(OriginalAttention, self).__init__() - self.query_layer = Linear( - query_dim, attention_dim, bias=False, init_gain='tanh') - self.inputs_layer = Linear( - embedding_dim, attention_dim, bias=False, init_gain='tanh') - self.v = Linear(attention_dim, 1, bias=True) - if trans_agent: - self.ta = nn.Linear( - query_dim + embedding_dim, 1, bias=True) - if location_attention: - self.location_layer = LocationLayer( - attention_dim, - attention_location_n_filters, - attention_location_kernel_size, - ) - self._mask_value = -float("inf") - self.windowing = windowing - self.win_idx = None - self.norm = norm - self.forward_attn = forward_attn - self.trans_agent = trans_agent - self.forward_attn_mask = forward_attn_mask - self.location_attention = location_attention - - def init_win_idx(self): - self.win_idx = -1 - self.win_back = 2 - self.win_front = 6 - - def init_forward_attn(self, inputs): - B = inputs.shape[0] - T = inputs.shape[1] - self.alpha = torch.cat( - [torch.ones([B, 1]), - torch.zeros([B, T])[:, :-1] + 1e-7], dim=1).to(inputs.device) - self.u = (0.5 * torch.ones([B, 1])).to(inputs.device) - - def init_location_attention(self, inputs): - B = inputs.shape[0] - T = inputs.shape[1] - self.attention_weights_cum = Variable(inputs.data.new(B, T).zero_()) - - def init_states(self, inputs): - B = inputs.shape[0] - T = inputs.shape[1] - self.attention_weights = Variable(inputs.data.new(B, T).zero_()) - if self.location_attention: - self.init_location_attention(inputs) - if self.forward_attn: - self.init_forward_attn(inputs) - if self.windowing: - self.init_win_idx() - - def preprocess_inputs(self, inputs): - return self.inputs_layer(inputs) - - def update_location_attention(self, alignments): - self.attention_weights_cum += alignments - - def get_location_attention(self, query, processed_inputs): - attention_cat = torch.cat((self.attention_weights.unsqueeze(1), - self.attention_weights_cum.unsqueeze(1)), - dim=1) - processed_query = self.query_layer(query.unsqueeze(1)) - processed_attention_weights = self.location_layer(attention_cat) - energies = self.v( - torch.tanh(processed_query + processed_attention_weights + - processed_inputs)) - energies = energies.squeeze(-1) - return energies, processed_query - - def get_attention(self, query, processed_inputs): - processed_query = self.query_layer(query.unsqueeze(1)) - energies = self.v(torch.tanh(processed_query + processed_inputs)) - energies = energies.squeeze(-1) - return energies, processed_query - - def apply_windowing(self, attention, inputs): - back_win = self.win_idx - self.win_back - front_win = self.win_idx + self.win_front - if back_win > 0: - attention[:, :back_win] = -float("inf") - if front_win < inputs.shape[1]: - attention[:, front_win:] = -float("inf") - # this is a trick to solve a special problem. - # but it does not hurt. - if self.win_idx == -1: - attention[:, 0] = attention.max() - # Update the window - self.win_idx = torch.argmax(attention, 1).long()[0].item() - return attention - - def apply_forward_attention(self, alignment): - # forward attention - fwd_shifted_alpha = F.pad(self.alpha[:, :-1].clone().to(alignment.device), - (1, 0, 0, 0)) - # compute transition potentials - alpha = ((1 - self.u) * self.alpha - + self.u * fwd_shifted_alpha - + 1e-8) * alignment - # force incremental alignment - if not self.training and self.forward_attn_mask: - _, n = fwd_shifted_alpha.max(1) - val, n2 = alpha.max(1) - for b in range(alignment.shape[0]): - alpha[b, n[b] + 3:] = 0 - alpha[b, :( - n[b] - 1 - )] = 0 # ignore all previous states to prevent repetition. - alpha[b, - (n[b] - 2 - )] = 0.01 * val[b] # smoothing factor for the prev step - # renormalize attention weights - alpha = alpha / alpha.sum(dim=1, keepdim=True) - return alpha - - def forward(self, query, inputs, processed_inputs, mask): - """ - shapes: - query: B x D_attn_rnn - inputs: B x T_en x D_en - processed_inputs:: B x T_en x D_attn - mask: B x T_en - """ - if self.location_attention: - attention, _ = self.get_location_attention( - query, processed_inputs) - else: - attention, _ = self.get_attention( - query, processed_inputs) - # apply masking - if mask is not None: - attention.data.masked_fill_(~mask, self._mask_value) - # apply windowing - only in eval mode - if not self.training and self.windowing: - attention = self.apply_windowing(attention, inputs) - - # normalize attention values - if self.norm == "softmax": - alignment = torch.softmax(attention, dim=-1) - elif self.norm == "sigmoid": - alignment = torch.sigmoid(attention) / torch.sigmoid( - attention).sum( - dim=1, keepdim=True) - else: - raise ValueError("Unknown value for attention norm type") - - if self.location_attention: - self.update_location_attention(alignment) - - # apply forward attention if enabled - if self.forward_attn: - alignment = self.apply_forward_attention(alignment) - self.alpha = alignment - - context = torch.bmm(alignment.unsqueeze(1), inputs) - context = context.squeeze(1) - self.attention_weights = alignment - - # compute transition agent - if self.forward_attn and self.trans_agent: - ta_input = torch.cat([context, query.squeeze(1)], dim=-1) - self.u = torch.sigmoid(self.ta(ta_input)) - return context - - -def init_attn(attn_type, query_dim, embedding_dim, attention_dim, - location_attention, attention_location_n_filters, - attention_location_kernel_size, windowing, norm, forward_attn, - trans_agent, forward_attn_mask, attn_K): - if attn_type == "original": - return OriginalAttention(query_dim, embedding_dim, attention_dim, - location_attention, - attention_location_n_filters, - attention_location_kernel_size, windowing, - norm, forward_attn, trans_agent, - forward_attn_mask) - if attn_type == "graves": - return GravesAttention(query_dim, attn_K) - raise RuntimeError( - " [!] Given Attention Type '{attn_type}' is not exist.") diff --git a/layers/gst_layers.py b/layers/gst_layers.py deleted file mode 100644 index 8058d5ed..00000000 --- a/layers/gst_layers.py +++ /dev/null @@ -1,169 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F - - -class GST(nn.Module): - """Global Style Token Module for factorizing prosody in speech. - - See https://arxiv.org/pdf/1803.09017""" - - def __init__(self, num_mel, num_heads, num_style_tokens, embedding_dim): - super().__init__() - self.encoder = ReferenceEncoder(num_mel, embedding_dim) - self.style_token_layer = StyleTokenLayer(num_heads, num_style_tokens, - embedding_dim) - - def forward(self, inputs): - enc_out = self.encoder(inputs) - style_embed = self.style_token_layer(enc_out) - - return style_embed - - -class ReferenceEncoder(nn.Module): - """NN module creating a fixed size prosody embedding from a spectrogram. - - inputs: mel spectrograms [batch_size, num_spec_frames, num_mel] - outputs: [batch_size, embedding_dim] - """ - - def __init__(self, num_mel, embedding_dim): - - super().__init__() - self.num_mel = num_mel - filters = [1] + [32, 32, 64, 64, 128, 128] - num_layers = len(filters) - 1 - convs = [ - nn.Conv2d( - in_channels=filters[i], - out_channels=filters[i + 1], - kernel_size=(3, 3), - stride=(2, 2), - padding=(1, 1)) for i in range(num_layers) - ] - self.convs = nn.ModuleList(convs) - self.bns = nn.ModuleList([ - nn.BatchNorm2d(num_features=filter_size) - for filter_size in filters[1:] - ]) - - post_conv_height = self.calculate_post_conv_height( - num_mel, 3, 2, 1, num_layers) - self.recurrence = nn.GRU( - input_size=filters[-1] * post_conv_height, - hidden_size=embedding_dim // 2, - batch_first=True) - - def forward(self, inputs): - batch_size = inputs.size(0) - x = inputs.view(batch_size, 1, -1, self.num_mel) - # x: 4D tensor [batch_size, num_channels==1, num_frames, num_mel] - for conv, bn in zip(self.convs, self.bns): - x = conv(x) - x = bn(x) - x = F.relu(x) - - x = x.transpose(1, 2) - # x: 4D tensor [batch_size, post_conv_width, - # num_channels==128, post_conv_height] - post_conv_width = x.size(1) - x = x.contiguous().view(batch_size, post_conv_width, -1) - # x: 3D tensor [batch_size, post_conv_width, - # num_channels*post_conv_height] - self.recurrence.flatten_parameters() - memory, out = self.recurrence(x) - # out: 3D tensor [seq_len==1, batch_size, encoding_size=128] - - return out.squeeze(0) - - @staticmethod - def calculate_post_conv_height(height, kernel_size, stride, pad, - n_convs): - """Height of spec after n convolutions with fixed kernel/stride/pad.""" - for _ in range(n_convs): - height = (height - kernel_size + 2 * pad) // stride + 1 - return height - - -class StyleTokenLayer(nn.Module): - """NN Module attending to style tokens based on prosody encodings.""" - - def __init__(self, num_heads, num_style_tokens, - embedding_dim): - super().__init__() - self.query_dim = embedding_dim // 2 - self.key_dim = embedding_dim // num_heads - self.style_tokens = nn.Parameter( - torch.FloatTensor(num_style_tokens, self.key_dim)) - nn.init.orthogonal_(self.style_tokens) - self.attention = MultiHeadAttention( - query_dim=self.query_dim, - key_dim=self.key_dim, - num_units=embedding_dim, - num_heads=num_heads) - - def forward(self, inputs): - batch_size = inputs.size(0) - prosody_encoding = inputs.unsqueeze(1) - # prosody_encoding: 3D tensor [batch_size, 1, encoding_size==128] - tokens = torch.tanh(self.style_tokens) \ - .unsqueeze(0) \ - .expand(batch_size, -1, -1) - # tokens: 3D tensor [batch_size, num tokens, token embedding size] - style_embed = self.attention(prosody_encoding, tokens) - - return style_embed - - -class MultiHeadAttention(nn.Module): - ''' - input: - query --- [N, T_q, query_dim] - key --- [N, T_k, key_dim] - output: - out --- [N, T_q, num_units] - ''' - - def __init__(self, query_dim, key_dim, num_units, num_heads): - - super().__init__() - self.num_units = num_units - self.num_heads = num_heads - self.key_dim = key_dim - - self.W_query = nn.Linear( - in_features=query_dim, out_features=num_units, bias=False) - self.W_key = nn.Linear( - in_features=key_dim, out_features=num_units, bias=False) - self.W_value = nn.Linear( - in_features=key_dim, out_features=num_units, bias=False) - - def forward(self, query, key): - queries = self.W_query(query) # [N, T_q, num_units] - keys = self.W_key(key) # [N, T_k, num_units] - values = self.W_value(key) - - split_size = self.num_units // self.num_heads - queries = torch.stack( - torch.split(queries, split_size, dim=2), - dim=0) # [h, N, T_q, num_units/h] - keys = torch.stack( - torch.split(keys, split_size, dim=2), - dim=0) # [h, N, T_k, num_units/h] - values = torch.stack( - torch.split(values, split_size, dim=2), - dim=0) # [h, N, T_k, num_units/h] - - # score = softmax(QK^T / (d_k ** 0.5)) - scores = torch.matmul(queries, keys.transpose(2, 3)) # [h, N, T_q, T_k] - scores = scores / (self.key_dim**0.5) - scores = F.softmax(scores, dim=3) - - # out = score * V - out = torch.matmul(scores, values) # [h, N, T_q, num_units/h] - out = torch.cat( - torch.split(out, 1, dim=0), - dim=3).squeeze(0) # [N, T_q, num_units] - - return out diff --git a/layers/losses.py b/layers/losses.py deleted file mode 100644 index f7745b6e..00000000 --- a/layers/losses.py +++ /dev/null @@ -1,246 +0,0 @@ -import numpy as np -import torch -from torch import nn -from torch.nn import functional -from TTS.utils.generic_utils import sequence_mask - - -class L1LossMasked(nn.Module): - - def __init__(self, seq_len_norm): - super(L1LossMasked, self).__init__() - self.seq_len_norm = seq_len_norm - - def forward(self, x, target, length): - """ - Args: - x: A Variable containing a FloatTensor of size - (batch, max_len, dim) which contains the - unnormalized probability for each class. - target: A Variable containing a LongTensor of size - (batch, max_len, dim) which contains the index of the true - class for each corresponding step. - length: A Variable containing a LongTensor of size (batch,) - which contains the length of each data in a batch. - Returns: - loss: An average loss value in range [0, 1] masked by the length. - """ - # mask: (batch, max_len, 1) - target.requires_grad = False - mask = sequence_mask( - sequence_length=length, max_len=target.size(1)).unsqueeze(2).float() - if self.seq_len_norm: - norm_w = mask / mask.sum(dim=1, keepdim=True) - out_weights = norm_w.div(target.shape[0] * target.shape[2]) - mask = mask.expand_as(x) - loss = functional.l1_loss( - x * mask, target * mask, reduction='none') - loss = loss.mul(out_weights.to(loss.device)).sum() - else: - mask = mask.expand_as(x) - loss = functional.l1_loss( - x * mask, target * mask, reduction='sum') - loss = loss / mask.sum() - return loss - - -class MSELossMasked(nn.Module): - - def __init__(self, seq_len_norm): - super(MSELossMasked, self).__init__() - self.seq_len_norm = seq_len_norm - - def forward(self, x, target, length): - """ - Args: - x: A Variable containing a FloatTensor of size - (batch, max_len, dim) which contains the - unnormalized probability for each class. - target: A Variable containing a LongTensor of size - (batch, max_len, dim) which contains the index of the true - class for each corresponding step. - length: A Variable containing a LongTensor of size (batch,) - which contains the length of each data in a batch. - Returns: - loss: An average loss value in range [0, 1] masked by the length. - """ - # mask: (batch, max_len, 1) - target.requires_grad = False - mask = sequence_mask( - sequence_length=length, max_len=target.size(1)).unsqueeze(2).float() - if self.seq_len_norm: - norm_w = mask / mask.sum(dim=1, keepdim=True) - out_weights = norm_w.div(target.shape[0] * target.shape[2]) - mask = mask.expand_as(x) - loss = functional.mse_loss( - x * mask, target * mask, reduction='none') - loss = loss.mul(out_weights.to(loss.device)).sum() - else: - mask = mask.expand_as(x) - loss = functional.mse_loss( - x * mask, target * mask, reduction='sum') - loss = loss / mask.sum() - return loss - - -class AttentionEntropyLoss(nn.Module): - # pylint: disable=R0201 - def forward(self, align): - """ - Forces attention to be more decisive by penalizing - soft attention weights - - TODO: arguments - TODO: unit_test - """ - entropy = torch.distributions.Categorical(probs=align).entropy() - loss = (entropy / np.log(align.shape[1])).mean() - return loss - - -class BCELossMasked(nn.Module): - - def __init__(self, pos_weight): - super(BCELossMasked, self).__init__() - self.pos_weight = pos_weight - - def forward(self, x, target, length): - """ - Args: - x: A Variable containing a FloatTensor of size - (batch, max_len) which contains the - unnormalized probability for each class. - target: A Variable containing a LongTensor of size - (batch, max_len) which contains the index of the true - class for each corresponding step. - length: A Variable containing a LongTensor of size (batch,) - which contains the length of each data in a batch. - Returns: - loss: An average loss value in range [0, 1] masked by the length. - """ - # mask: (batch, max_len, 1) - target.requires_grad = False - mask = sequence_mask(sequence_length=length, max_len=target.size(1)).float() - loss = functional.binary_cross_entropy_with_logits( - x * mask, target * mask, pos_weight=self.pos_weight, reduction='sum') - loss = loss / mask.sum() - return loss - - -class GuidedAttentionLoss(torch.nn.Module): - def __init__(self, sigma=0.4): - super(GuidedAttentionLoss, self).__init__() - self.sigma = sigma - - def _make_ga_masks(self, ilens, olens): - B = len(ilens) - max_ilen = max(ilens) - max_olen = max(olens) - ga_masks = torch.zeros((B, max_olen, max_ilen)) - for idx, (ilen, olen) in enumerate(zip(ilens, olens)): - ga_masks[idx, :olen, :ilen] = self._make_ga_mask(ilen, olen, self.sigma) - return ga_masks - - def forward(self, att_ws, ilens, olens): - ga_masks = self._make_ga_masks(ilens, olens).to(att_ws.device) - seq_masks = self._make_masks(ilens, olens).to(att_ws.device) - losses = ga_masks * att_ws - loss = torch.mean(losses.masked_select(seq_masks)) - return loss - - @staticmethod - def _make_ga_mask(ilen, olen, sigma): - grid_x, grid_y = torch.meshgrid(torch.arange(olen), torch.arange(ilen)) - grid_x, grid_y = grid_x.float(), grid_y.float() - return 1.0 - torch.exp(-(grid_y / ilen - grid_x / olen) ** 2 / (2 * (sigma ** 2))) - - @staticmethod - def _make_masks(ilens, olens): - in_masks = sequence_mask(ilens) - out_masks = sequence_mask(olens) - return out_masks.unsqueeze(-1) & in_masks.unsqueeze(-2) - - -class TacotronLoss(torch.nn.Module): - def __init__(self, c, stopnet_pos_weight=10, ga_sigma=0.4): - super(TacotronLoss, self).__init__() - self.stopnet_pos_weight = stopnet_pos_weight - self.ga_alpha = c.ga_alpha - self.config = c - # postnet decoder loss - if c.loss_masking: - self.criterion = L1LossMasked(c.seq_len_norm) if c.model in [ - "Tacotron" - ] else MSELossMasked(c.seq_len_norm) - else: - self.criterion = nn.L1Loss() if c.model in ["Tacotron" - ] else nn.MSELoss() - # guided attention loss - if c.ga_alpha > 0: - self.criterion_ga = GuidedAttentionLoss(sigma=ga_sigma) - # stopnet loss - # pylint: disable=not-callable - self.criterion_st = BCELossMasked(pos_weight=torch.tensor(stopnet_pos_weight)) if c.stopnet else None - - def forward(self, postnet_output, decoder_output, mel_input, linear_input, - stopnet_output, stopnet_target, output_lens, decoder_b_output, - alignments, alignment_lens, alignments_backwards, input_lens): - - return_dict = {} - # decoder and postnet losses - if self.config.loss_masking: - decoder_loss = self.criterion(decoder_output, mel_input, - output_lens) - if self.config.model in ["Tacotron", "TacotronGST"]: - postnet_loss = self.criterion(postnet_output, linear_input, - output_lens) - else: - postnet_loss = self.criterion(postnet_output, mel_input, - output_lens) - else: - decoder_loss = self.criterion(decoder_output, mel_input) - if self.config.model in ["Tacotron", "TacotronGST"]: - postnet_loss = self.criterion(postnet_output, linear_input) - else: - postnet_loss = self.criterion(postnet_output, mel_input) - loss = decoder_loss + postnet_loss - return_dict['decoder_loss'] = decoder_loss - return_dict['postnet_loss'] = postnet_loss - - # stopnet loss - stop_loss = self.criterion_st( - stopnet_output, stopnet_target, - output_lens) if self.config.stopnet else torch.zeros(1) - if not self.config.separate_stopnet and self.config.stopnet: - loss += stop_loss - return_dict['stopnet_loss'] = stop_loss - - # backward decoder loss (if enabled) - if self.config.bidirectional_decoder: - if self.config.loss_masking: - decoder_b_loss = self.criterion(torch.flip(decoder_b_output, dims=(1, )), mel_input, output_lens) - else: - decoder_b_loss = self.criterion(torch.flip(decoder_b_output, dims=(1, )), mel_input) - decoder_c_loss = torch.nn.functional.l1_loss(torch.flip(decoder_b_output, dims=(1, )), decoder_output) - loss += decoder_b_loss + decoder_c_loss - return_dict['decoder_b_loss'] = decoder_b_loss - return_dict['decoder_c_loss'] = decoder_c_loss - - # double decoder consistency loss (if enabled) - if self.config.double_decoder_consistency: - decoder_b_loss = self.criterion(decoder_b_output, mel_input, output_lens) - # decoder_c_loss = torch.nn.functional.l1_loss(decoder_b_output, decoder_output) - attention_c_loss = torch.nn.functional.l1_loss(alignments, alignments_backwards) - loss += decoder_b_loss + attention_c_loss - return_dict['decoder_coarse_loss'] = decoder_b_loss - return_dict['decoder_ddc_loss'] = attention_c_loss - - # guided attention loss (if enabled) - if self.config.ga_alpha > 0: - ga_loss = self.criterion_ga(alignments, input_lens, alignment_lens) - loss += ga_loss * self.ga_alpha - return_dict['ga_loss'] = ga_loss * self.ga_alpha - - return_dict['loss'] = loss - return return_dict - diff --git a/layers/tacotron.py b/layers/tacotron.py deleted file mode 100644 index 20fd1e52..00000000 --- a/layers/tacotron.py +++ /dev/null @@ -1,496 +0,0 @@ -# coding: utf-8 -import torch -from torch import nn -from .common_layers import Prenet, init_attn, Linear - - -class BatchNormConv1d(nn.Module): - r"""A wrapper for Conv1d with BatchNorm. It sets the activation - function between Conv and BatchNorm layers. BatchNorm layer - is initialized with the TF default values for momentum and eps. - - Args: - in_channels: size of each input sample - out_channels: size of each output samples - kernel_size: kernel size of conv filters - stride: stride of conv filters - padding: padding of conv filters - activation: activation function set b/w Conv1d and BatchNorm - - Shapes: - - input: batch x dims - - output: batch x dims - """ - - def __init__(self, - in_channels, - out_channels, - kernel_size, - stride, - padding, - activation=None): - - super(BatchNormConv1d, self).__init__() - self.padding = padding - self.padder = nn.ConstantPad1d(padding, 0) - self.conv1d = nn.Conv1d( - in_channels, - out_channels, - kernel_size=kernel_size, - stride=stride, - padding=0, - bias=False) - # Following tensorflow's default parameters - self.bn = nn.BatchNorm1d(out_channels, momentum=0.99, eps=1e-3) - self.activation = activation - # self.init_layers() - - def init_layers(self): - if type(self.activation) == torch.nn.ReLU: - w_gain = 'relu' - elif type(self.activation) == torch.nn.Tanh: - w_gain = 'tanh' - elif self.activation is None: - w_gain = 'linear' - else: - raise RuntimeError('Unknown activation function') - torch.nn.init.xavier_uniform_( - self.conv1d.weight, gain=torch.nn.init.calculate_gain(w_gain)) - - def forward(self, x): - x = self.padder(x) - x = self.conv1d(x) - x = self.bn(x) - if self.activation is not None: - x = self.activation(x) - return x - - -class Highway(nn.Module): - # TODO: Try GLU layer - def __init__(self, in_size, out_size): - super(Highway, self).__init__() - self.H = nn.Linear(in_size, out_size) - self.H.bias.data.zero_() - self.T = nn.Linear(in_size, out_size) - self.T.bias.data.fill_(-1) - self.relu = nn.ReLU() - self.sigmoid = nn.Sigmoid() - # self.init_layers() - - def init_layers(self): - torch.nn.init.xavier_uniform_( - self.H.weight, gain=torch.nn.init.calculate_gain('relu')) - torch.nn.init.xavier_uniform_( - self.T.weight, gain=torch.nn.init.calculate_gain('sigmoid')) - - def forward(self, inputs): - H = self.relu(self.H(inputs)) - T = self.sigmoid(self.T(inputs)) - return H * T + inputs * (1.0 - T) - - -class CBHG(nn.Module): - """CBHG module: a recurrent neural network composed of: - - 1-d convolution banks - - Highway networks + residual connections - - Bidirectional gated recurrent units - - Args: - in_features (int): sample size - K (int): max filter size in conv bank - projections (list): conv channel sizes for conv projections - num_highways (int): number of highways layers - - Shapes: - - input: B x D x T_in - - output: B x T_in x D*2 - """ - - def __init__(self, - in_features, - K=16, - conv_bank_features=128, - conv_projections=[128, 128], - highway_features=128, - gru_features=128, - num_highways=4): - super(CBHG, self).__init__() - self.in_features = in_features - self.conv_bank_features = conv_bank_features - self.highway_features = highway_features - self.gru_features = gru_features - self.conv_projections = conv_projections - self.relu = nn.ReLU() - # list of conv1d bank with filter size k=1...K - # TODO: try dilational layers instead - self.conv1d_banks = nn.ModuleList([ - BatchNormConv1d(in_features, - conv_bank_features, - kernel_size=k, - stride=1, - padding=[(k - 1) // 2, k // 2], - activation=self.relu) for k in range(1, K + 1) - ]) - # max pooling of conv bank, with padding - # TODO: try average pooling OR larger kernel size - out_features = [K * conv_bank_features] + conv_projections[:-1] - activations = [self.relu] * (len(conv_projections) - 1) - activations += [None] - # setup conv1d projection layers - layer_set = [] - for (in_size, out_size, ac) in zip(out_features, conv_projections, - activations): - layer = BatchNormConv1d(in_size, - out_size, - kernel_size=3, - stride=1, - padding=[1, 1], - activation=ac) - layer_set.append(layer) - self.conv1d_projections = nn.ModuleList(layer_set) - # setup Highway layers - if self.highway_features != conv_projections[-1]: - self.pre_highway = nn.Linear(conv_projections[-1], - highway_features, - bias=False) - self.highways = nn.ModuleList([ - Highway(highway_features, highway_features) - for _ in range(num_highways) - ]) - # bi-directional GPU layer - self.gru = nn.GRU(gru_features, - gru_features, - 1, - batch_first=True, - bidirectional=True) - - def forward(self, inputs): - # (B, in_features, T_in) - x = inputs - # (B, hid_features*K, T_in) - # Concat conv1d bank outputs - outs = [] - for conv1d in self.conv1d_banks: - out = conv1d(x) - outs.append(out) - x = torch.cat(outs, dim=1) - assert x.size(1) == self.conv_bank_features * len(self.conv1d_banks) - for conv1d in self.conv1d_projections: - x = conv1d(x) - x += inputs - x = x.transpose(1, 2) - if self.highway_features != self.conv_projections[-1]: - x = self.pre_highway(x) - # Residual connection - # TODO: try residual scaling as in Deep Voice 3 - # TODO: try plain residual layers - for highway in self.highways: - x = highway(x) - # (B, T_in, hid_features*2) - # TODO: replace GRU with convolution as in Deep Voice 3 - self.gru.flatten_parameters() - outputs, _ = self.gru(x) - return outputs - - -class EncoderCBHG(nn.Module): - def __init__(self): - super(EncoderCBHG, self).__init__() - self.cbhg = CBHG( - 128, - K=16, - conv_bank_features=128, - conv_projections=[128, 128], - highway_features=128, - gru_features=128, - num_highways=4) - - def forward(self, x): - return self.cbhg(x) - - -class Encoder(nn.Module): - r"""Encapsulate Prenet and CBHG modules for encoder""" - - def __init__(self, in_features): - super(Encoder, self).__init__() - self.prenet = Prenet(in_features, out_features=[256, 128]) - self.cbhg = EncoderCBHG() - - def forward(self, inputs): - r""" - Args: - inputs (FloatTensor): embedding features - - Shapes: - - inputs: batch x time x in_features - - outputs: batch x time x 128*2 - """ - # B x T x prenet_dim - outputs = self.prenet(inputs) - outputs = self.cbhg(outputs.transpose(1, 2)) - return outputs - - -class PostCBHG(nn.Module): - def __init__(self, mel_dim): - super(PostCBHG, self).__init__() - self.cbhg = CBHG( - mel_dim, - K=8, - conv_bank_features=128, - conv_projections=[256, mel_dim], - highway_features=128, - gru_features=128, - num_highways=4) - - def forward(self, x): - return self.cbhg(x) - - -class Decoder(nn.Module): - """Decoder module. - - Args: - in_features (int): input vector (encoder output) sample size. - memory_dim (int): memory vector (prev. time-step output) sample size. - r (int): number of outputs per time step. - memory_size (int): size of the past window. if <= 0 memory_size = r - TODO: arguments - """ - - # Pylint gets confused by PyTorch conventions here - #pylint: disable=attribute-defined-outside-init - - def __init__(self, in_features, memory_dim, r, memory_size, attn_type, attn_windowing, - attn_norm, prenet_type, prenet_dropout, forward_attn, - trans_agent, forward_attn_mask, location_attn, attn_K, - separate_stopnet, speaker_embedding_dim): - super(Decoder, self).__init__() - self.r_init = r - self.r = r - self.in_features = in_features - self.max_decoder_steps = 500 - self.use_memory_queue = memory_size > 0 - self.memory_size = memory_size if memory_size > 0 else r - self.memory_dim = memory_dim - self.separate_stopnet = separate_stopnet - self.query_dim = 256 - # memory -> |Prenet| -> processed_memory - prenet_dim = memory_dim * self.memory_size + speaker_embedding_dim if self.use_memory_queue else memory_dim + speaker_embedding_dim - self.prenet = Prenet( - prenet_dim, - prenet_type, - prenet_dropout, - out_features=[256, 128]) - # processed_inputs, processed_memory -> |Attention| -> Attention, attention, RNN_State - # attention_rnn generates queries for the attention mechanism - self.attention_rnn = nn.GRUCell(in_features + 128, self.query_dim) - - self.attention = init_attn(attn_type=attn_type, - query_dim=self.query_dim, - embedding_dim=in_features, - attention_dim=128, - location_attention=location_attn, - attention_location_n_filters=32, - attention_location_kernel_size=31, - windowing=attn_windowing, - norm=attn_norm, - forward_attn=forward_attn, - trans_agent=trans_agent, - forward_attn_mask=forward_attn_mask, - attn_K=attn_K) - # (processed_memory | attention context) -> |Linear| -> decoder_RNN_input - self.project_to_decoder_in = nn.Linear(256 + in_features, 256) - # decoder_RNN_input -> |RNN| -> RNN_state - self.decoder_rnns = nn.ModuleList( - [nn.GRUCell(256, 256) for _ in range(2)]) - # RNN_state -> |Linear| -> mel_spec - self.proj_to_mel = nn.Linear(256, memory_dim * self.r_init) - # learn init values instead of zero init. - self.stopnet = StopNet(256 + memory_dim * self.r_init) - - def set_r(self, new_r): - self.r = new_r - - def _reshape_memory(self, memory): - """ - Reshape the spectrograms for given 'r' - """ - # Grouping multiple frames if necessary - if memory.size(-1) == self.memory_dim: - memory = memory.view(memory.shape[0], memory.size(1) // self.r, -1) - # Time first (T_decoder, B, memory_dim) - memory = memory.transpose(0, 1) - return memory - - def _init_states(self, inputs): - """ - Initialization of decoder states - """ - B = inputs.size(0) - T = inputs.size(1) - # go frame as zeros matrix - if self.use_memory_queue: - self.memory_input = torch.zeros(1, device=inputs.device).repeat(B, self.memory_dim * self.memory_size) - else: - self.memory_input = torch.zeros(1, device=inputs.device).repeat(B, self.memory_dim) - # decoder states - self.attention_rnn_hidden = torch.zeros(1, device=inputs.device).repeat(B, 256) - self.decoder_rnn_hiddens = [ - torch.zeros(1, device=inputs.device).repeat(B, 256) - for idx in range(len(self.decoder_rnns)) - ] - self.context_vec = inputs.data.new(B, self.in_features).zero_() - # cache attention inputs - self.processed_inputs = self.attention.preprocess_inputs(inputs) - - def _parse_outputs(self, outputs, attentions, stop_tokens): - # Back to batch first - attentions = torch.stack(attentions).transpose(0, 1) - stop_tokens = torch.stack(stop_tokens).transpose(0, 1) - outputs = torch.stack(outputs).transpose(0, 1).contiguous() - outputs = outputs.view( - outputs.size(0), -1, self.memory_dim) - outputs = outputs.transpose(1, 2) - return outputs, attentions, stop_tokens - - def decode(self, inputs, mask=None): - # Prenet - processed_memory = self.prenet(self.memory_input) - # Attention RNN - self.attention_rnn_hidden = self.attention_rnn( - torch.cat((processed_memory, self.context_vec), -1), - self.attention_rnn_hidden) - self.context_vec = self.attention( - self.attention_rnn_hidden, inputs, self.processed_inputs, mask) - # Concat RNN output and attention context vector - decoder_input = self.project_to_decoder_in( - torch.cat((self.attention_rnn_hidden, self.context_vec), -1)) - - # Pass through the decoder RNNs - for idx in range(len(self.decoder_rnns)): - self.decoder_rnn_hiddens[idx] = self.decoder_rnns[idx]( - decoder_input, self.decoder_rnn_hiddens[idx]) - # Residual connection - decoder_input = self.decoder_rnn_hiddens[idx] + decoder_input - decoder_output = decoder_input - - # predict mel vectors from decoder vectors - output = self.proj_to_mel(decoder_output) - # output = torch.sigmoid(output) - # predict stop token - stopnet_input = torch.cat([decoder_output, output], -1) - if self.separate_stopnet: - stop_token = self.stopnet(stopnet_input.detach()) - else: - stop_token = self.stopnet(stopnet_input) - output = output[:, : self.r * self.memory_dim] - return output, stop_token, self.attention.attention_weights - - def _update_memory_input(self, new_memory): - if self.use_memory_queue: - if self.memory_size > self.r: - # memory queue size is larger than number of frames per decoder iter - self.memory_input = torch.cat([ - new_memory, self.memory_input[:, :( - self.memory_size - self.r) * self.memory_dim].clone() - ], dim=-1) - else: - # memory queue size smaller than number of frames per decoder iter - self.memory_input = new_memory[:, :self.memory_size * self.memory_dim] - else: - # use only the last frame prediction - # assert new_memory.shape[-1] == self.r * self.memory_dim - self.memory_input = new_memory[:, self.memory_dim * (self.r - 1):] - - def forward(self, inputs, memory, mask, speaker_embeddings=None): - """ - Args: - inputs: Encoder outputs. - memory: Decoder memory (autoregression. If None (at eval-time), - decoder outputs are used as decoder inputs. If None, it uses the last - output as the input. - mask: Attention mask for sequence padding. - - Shapes: - - inputs: batch x time x encoder_out_dim - - memory: batch x #mel_specs x mel_spec_dim - """ - # Run greedy decoding if memory is None - memory = self._reshape_memory(memory) - outputs = [] - attentions = [] - stop_tokens = [] - t = 0 - self._init_states(inputs) - self.attention.init_states(inputs) - while len(outputs) < memory.size(0): - if t > 0: - new_memory = memory[t - 1] - self._update_memory_input(new_memory) - if speaker_embeddings is not None: - self.memory_input = torch.cat([self.memory_input, speaker_embeddings], dim=-1) - output, stop_token, attention = self.decode(inputs, mask) - outputs += [output] - attentions += [attention] - stop_tokens += [stop_token.squeeze(1)] - t += 1 - return self._parse_outputs(outputs, attentions, stop_tokens) - - def inference(self, inputs, speaker_embeddings=None): - """ - Args: - inputs: encoder outputs. - speaker_embeddings: speaker vectors. - - Shapes: - - inputs: batch x time x encoder_out_dim - - speaker_embeddings: batch x embed_dim - """ - outputs = [] - attentions = [] - stop_tokens = [] - t = 0 - self._init_states(inputs) - self.attention.init_win_idx() - self.attention.init_states(inputs) - while True: - if t > 0: - new_memory = outputs[-1] - self._update_memory_input(new_memory) - if speaker_embeddings is not None: - self.memory_input = torch.cat([self.memory_input, speaker_embeddings], dim=-1) - output, stop_token, attention = self.decode(inputs, None) - stop_token = torch.sigmoid(stop_token.data) - outputs += [output] - attentions += [attention] - stop_tokens += [stop_token] - t += 1 - if t > inputs.shape[1] / 4 and (stop_token > 0.6 - or attention[:, -1].item() > 0.6): - break - elif t > self.max_decoder_steps: - print(" | > Decoder stopped with 'max_decoder_steps") - break - return self._parse_outputs(outputs, attentions, stop_tokens) - - -class StopNet(nn.Module): - r""" - Args: - in_features (int): feature dimension of input. - """ - - def __init__(self, in_features): - super(StopNet, self).__init__() - self.dropout = nn.Dropout(0.1) - self.linear = nn.Linear(in_features, 1) - torch.nn.init.xavier_uniform_( - self.linear.weight, gain=torch.nn.init.calculate_gain('linear')) - - def forward(self, inputs): - outputs = self.dropout(inputs) - outputs = self.linear(outputs) - return outputs diff --git a/layers/tacotron2.py b/layers/tacotron2.py deleted file mode 100644 index f11aee65..00000000 --- a/layers/tacotron2.py +++ /dev/null @@ -1,353 +0,0 @@ -import torch -from torch.autograd import Variable -from torch import nn -from torch.nn import functional as F -from .common_layers import init_attn, Prenet, Linear - - -class ConvBNBlock(nn.Module): - def __init__(self, in_channels, out_channels, kernel_size, activation=None): - super(ConvBNBlock, self).__init__() - assert (kernel_size - 1) % 2 == 0 - padding = (kernel_size - 1) // 2 - self.convolution1d = nn.Conv1d(in_channels, - out_channels, - kernel_size, - padding=padding) - self.batch_normalization = nn.BatchNorm1d(out_channels, momentum=0.1, eps=1e-5) - self.dropout = nn.Dropout(p=0.5) - if activation == 'relu': - self.activation = nn.ReLU() - elif activation == 'tanh': - self.activation = nn.Tanh() - else: - self.activation = nn.Identity() - - def forward(self, x): - o = self.convolution1d(x) - o = self.batch_normalization(o) - o = self.activation(o) - o = self.dropout(o) - return o - - -class Postnet(nn.Module): - def __init__(self, output_dim, num_convs=5): - super(Postnet, self).__init__() - self.convolutions = nn.ModuleList() - self.convolutions.append( - ConvBNBlock(output_dim, 512, kernel_size=5, activation='tanh')) - for _ in range(1, num_convs - 1): - self.convolutions.append( - ConvBNBlock(512, 512, kernel_size=5, activation='tanh')) - self.convolutions.append( - ConvBNBlock(512, output_dim, kernel_size=5, activation=None)) - - def forward(self, x): - o = x - for layer in self.convolutions: - o = layer(o) - return o - - -class Encoder(nn.Module): - def __init__(self, output_input_dim=512): - super(Encoder, self).__init__() - self.convolutions = nn.ModuleList() - for _ in range(3): - self.convolutions.append( - ConvBNBlock(output_input_dim, output_input_dim, 5, 'relu')) - self.lstm = nn.LSTM(output_input_dim, - int(output_input_dim / 2), - num_layers=1, - batch_first=True, - bias=True, - bidirectional=True) - self.rnn_state = None - - def forward(self, x, input_lengths): - o = x - for layer in self.convolutions: - o = layer(o) - o = o.transpose(1, 2) - o = nn.utils.rnn.pack_padded_sequence(o, - input_lengths, - batch_first=True) - self.lstm.flatten_parameters() - o, _ = self.lstm(o) - o, _ = nn.utils.rnn.pad_packed_sequence(o, batch_first=True) - return o - - def inference(self, x): - o = x - for layer in self.convolutions: - o = layer(o) - o = o.transpose(1, 2) - # self.lstm.flatten_parameters() - o, _ = self.lstm(o) - return o - - -# adapted from https://github.com/NVIDIA/tacotron2/ -class Decoder(nn.Module): - # Pylint gets confused by PyTorch conventions here - #pylint: disable=attribute-defined-outside-init - def __init__(self, input_dim, frame_dim, r, attn_type, attn_win, attn_norm, - prenet_type, prenet_dropout, forward_attn, trans_agent, - forward_attn_mask, location_attn, attn_K, separate_stopnet, - speaker_embedding_dim): - super(Decoder, self).__init__() - self.frame_dim = frame_dim - self.r_init = r - self.r = r - self.encoder_embedding_dim = input_dim - self.separate_stopnet = separate_stopnet - self.max_decoder_steps = 1000 - self.gate_threshold = 0.5 - - # model dimensions - self.query_dim = 1024 - self.decoder_rnn_dim = 1024 - self.prenet_dim = 256 - self.attn_dim = 128 - self.p_attention_dropout = 0.1 - self.p_decoder_dropout = 0.1 - - # memory -> |Prenet| -> processed_memory - prenet_dim = self.frame_dim - self.prenet = Prenet(prenet_dim, - prenet_type, - prenet_dropout, - out_features=[self.prenet_dim, self.prenet_dim], - bias=False) - - self.attention_rnn = nn.LSTMCell(self.prenet_dim + input_dim, - self.query_dim, - bias=True) - - self.attention = init_attn(attn_type=attn_type, - query_dim=self.query_dim, - embedding_dim=input_dim, - attention_dim=128, - location_attention=location_attn, - attention_location_n_filters=32, - attention_location_kernel_size=31, - windowing=attn_win, - norm=attn_norm, - forward_attn=forward_attn, - trans_agent=trans_agent, - forward_attn_mask=forward_attn_mask, - attn_K=attn_K) - - self.decoder_rnn = nn.LSTMCell(self.query_dim + input_dim, - self.decoder_rnn_dim, - bias=True) - - self.linear_projection = Linear(self.decoder_rnn_dim + input_dim, - self.frame_dim * self.r_init) - - self.stopnet = nn.Sequential( - nn.Dropout(0.1), - Linear(self.decoder_rnn_dim + self.frame_dim * self.r_init, - 1, - bias=True, - init_gain='sigmoid')) - self.memory_truncated = None - - def set_r(self, new_r): - self.r = new_r - - def get_go_frame(self, inputs): - B = inputs.size(0) - memory = torch.zeros(1, device=inputs.device).repeat(B, - self.frame_dim * self.r) - return memory - - def _init_states(self, inputs, mask, keep_states=False): - B = inputs.size(0) - # T = inputs.size(1) - if not keep_states: - self.query = torch.zeros(1, device=inputs.device).repeat( - B, self.query_dim) - self.attention_rnn_cell_state = torch.zeros( - 1, device=inputs.device).repeat(B, self.query_dim) - self.decoder_hidden = torch.zeros(1, device=inputs.device).repeat( - B, self.decoder_rnn_dim) - self.decoder_cell = torch.zeros(1, device=inputs.device).repeat( - B, self.decoder_rnn_dim) - self.context = torch.zeros(1, device=inputs.device).repeat( - B, self.encoder_embedding_dim) - self.inputs = inputs - self.processed_inputs = self.attention.preprocess_inputs(inputs) - self.mask = mask - - def _reshape_memory(self, memory): - """ - Reshape the spectrograms for given 'r' - """ - # Grouping multiple frames if necessary - if memory.size(-1) == self.frame_dim: - memory = memory.view(memory.shape[0], memory.size(1) // self.r, -1) - # Time first (T_decoder, B, frame_dim) - memory = memory.transpose(0, 1) - return memory - - def _parse_outputs(self, outputs, stop_tokens, alignments): - alignments = torch.stack(alignments).transpose(0, 1) - stop_tokens = torch.stack(stop_tokens).transpose(0, 1) - outputs = torch.stack(outputs).transpose(0, 1).contiguous() - outputs = outputs.view(outputs.size(0), -1, self.frame_dim) - outputs = outputs.transpose(1, 2) - return outputs, stop_tokens, alignments - - def _update_memory(self, memory): - if len(memory.shape) == 2: - return memory[:, self.frame_dim * (self.r - 1):] - return memory[:, :, self.frame_dim * (self.r - 1):] - - def decode(self, memory): - ''' - shapes: - - memory: B x r * self.frame_dim - ''' - # self.context: B x D_en - # query_input: B x D_en + (r * self.frame_dim) - query_input = torch.cat((memory, self.context), -1) - # self.query and self.attention_rnn_cell_state : B x D_attn_rnn - self.query, self.attention_rnn_cell_state = self.attention_rnn( - query_input, (self.query, self.attention_rnn_cell_state)) - self.query = F.dropout(self.query, self.p_attention_dropout, - self.training) - self.attention_rnn_cell_state = F.dropout( - self.attention_rnn_cell_state, self.p_attention_dropout, - self.training) - # B x D_en - self.context = self.attention(self.query, self.inputs, - self.processed_inputs, self.mask) - # B x (D_en + D_attn_rnn) - decoder_rnn_input = torch.cat((self.query, self.context), -1) - # self.decoder_hidden and self.decoder_cell: B x D_decoder_rnn - self.decoder_hidden, self.decoder_cell = self.decoder_rnn( - decoder_rnn_input, (self.decoder_hidden, self.decoder_cell)) - self.decoder_hidden = F.dropout(self.decoder_hidden, - self.p_decoder_dropout, self.training) - # B x (D_decoder_rnn + D_en) - decoder_hidden_context = torch.cat((self.decoder_hidden, self.context), - dim=1) - # B x (self.r * self.frame_dim) - decoder_output = self.linear_projection(decoder_hidden_context) - # B x (D_decoder_rnn + (self.r * self.frame_dim)) - stopnet_input = torch.cat((self.decoder_hidden, decoder_output), dim=1) - if self.separate_stopnet: - stop_token = self.stopnet(stopnet_input.detach()) - else: - stop_token = self.stopnet(stopnet_input) - # select outputs for the reduction rate self.r - decoder_output = decoder_output[:, :self.r * self.frame_dim] - return decoder_output, self.attention.attention_weights, stop_token - - def forward(self, inputs, memories, mask, speaker_embeddings=None): - memory = self.get_go_frame(inputs).unsqueeze(0) - memories = self._reshape_memory(memories) - memories = torch.cat((memory, memories), dim=0) - memories = self._update_memory(memories) - if speaker_embeddings is not None: - memories = torch.cat([memories, speaker_embeddings], dim=-1) - memories = self.prenet(memories) - - self._init_states(inputs, mask=mask) - self.attention.init_states(inputs) - - outputs, stop_tokens, alignments = [], [], [] - while len(outputs) < memories.size(0) - 1: - memory = memories[len(outputs)] - decoder_output, attention_weights, stop_token = self.decode(memory) - outputs += [decoder_output.squeeze(1)] - stop_tokens += [stop_token.squeeze(1)] - alignments += [attention_weights] - - outputs, stop_tokens, alignments = self._parse_outputs( - outputs, stop_tokens, alignments) - return outputs, alignments, stop_tokens - - def inference(self, inputs, speaker_embeddings=None): - memory = self.get_go_frame(inputs) - memory = self._update_memory(memory) - - self._init_states(inputs, mask=None) - self.attention.init_states(inputs) - - outputs, stop_tokens, alignments, t = [], [], [], 0 - while True: - memory = self.prenet(memory) - if speaker_embeddings is not None: - memory = torch.cat([memory, speaker_embeddings], dim=-1) - decoder_output, alignment, stop_token = self.decode(memory) - stop_token = torch.sigmoid(stop_token.data) - outputs += [decoder_output.squeeze(1)] - stop_tokens += [stop_token] - alignments += [alignment] - - if stop_token > 0.7 and t > inputs.shape[0] / 2: - break - if len(outputs) == self.max_decoder_steps: - print(" | > Decoder stopped with 'max_decoder_steps") - break - - memory = self._update_memory(decoder_output) - t += 1 - - outputs, stop_tokens, alignments = self._parse_outputs( - outputs, stop_tokens, alignments) - - return outputs, alignments, stop_tokens - - def inference_truncated(self, inputs): - """ - Preserve decoder states for continuous inference - """ - if self.memory_truncated is None: - self.memory_truncated = self.get_go_frame(inputs) - self._init_states(inputs, mask=None, keep_states=False) - else: - self._init_states(inputs, mask=None, keep_states=True) - - self.attention.init_win_idx() - self.attention.init_states(inputs) - outputs, stop_tokens, alignments, t = [], [], [], 0 - stop_flags = [True, False, False] - while True: - memory = self.prenet(self.memory_truncated) - decoder_output, alignment, stop_token = self.decode(memory) - stop_token = torch.sigmoid(stop_token.data) - outputs += [decoder_output.squeeze(1)] - stop_tokens += [stop_token] - alignments += [alignment] - - if stop_token > 0.7: - break - if len(outputs) == self.max_decoder_steps: - print(" | > Decoder stopped with 'max_decoder_steps") - break - - self.memory_truncated = decoder_output - t += 1 - - outputs, stop_tokens, alignments = self._parse_outputs( - outputs, stop_tokens, alignments) - - return outputs, alignments, stop_tokens - - def inference_step(self, inputs, t, memory=None): - """ - For debug purposes - """ - if t == 0: - memory = self.get_go_frame(inputs) - self._init_states(inputs, mask=None) - - memory = self.prenet(memory) - decoder_output, stop_token, alignment = self.decode(memory) - stop_token = torch.sigmoid(stop_token.data) - memory = decoder_output - return decoder_output, stop_token, alignment diff --git a/models/__init__.py b/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/models/tacotron.py b/models/tacotron.py deleted file mode 100644 index ba42610c..00000000 --- a/models/tacotron.py +++ /dev/null @@ -1,160 +0,0 @@ -# coding: utf-8 -import torch -from torch import nn - -from TTS.layers.gst_layers import GST -from TTS.layers.tacotron import Decoder, Encoder, PostCBHG -from TTS.models.tacotron_abstract import TacotronAbstract - - -class Tacotron(TacotronAbstract): - def __init__(self, - num_chars, - num_speakers, - r=5, - postnet_output_dim=1025, - decoder_output_dim=80, - attn_type='original', - attn_win=False, - attn_norm="sigmoid", - prenet_type="original", - prenet_dropout=True, - forward_attn=False, - trans_agent=False, - forward_attn_mask=False, - location_attn=True, - attn_K=5, - separate_stopnet=True, - bidirectional_decoder=False, - double_decoder_consistency=False, - ddc_r=None, - gst=False, - memory_size=5): - super(Tacotron, - self).__init__(num_chars, num_speakers, r, postnet_output_dim, - decoder_output_dim, attn_type, attn_win, - attn_norm, prenet_type, prenet_dropout, - forward_attn, trans_agent, forward_attn_mask, - location_attn, attn_K, separate_stopnet, - bidirectional_decoder, double_decoder_consistency, - ddc_r, gst) - decoder_in_features = 512 if num_speakers > 1 else 256 - encoder_in_features = 512 if num_speakers > 1 else 256 - speaker_embedding_dim = 256 - proj_speaker_dim = 80 if num_speakers > 1 else 0 - # base model layers - self.embedding = nn.Embedding(num_chars, 256, padding_idx=0) - self.embedding.weight.data.normal_(0, 0.3) - self.encoder = Encoder(encoder_in_features) - self.decoder = Decoder(decoder_in_features, decoder_output_dim, r, - memory_size, attn_type, attn_win, attn_norm, - prenet_type, prenet_dropout, forward_attn, - trans_agent, forward_attn_mask, location_attn, - attn_K, separate_stopnet, proj_speaker_dim) - self.postnet = PostCBHG(decoder_output_dim) - self.last_linear = nn.Linear(self.postnet.cbhg.gru_features * 2, - postnet_output_dim) - # speaker embedding layers - if num_speakers > 1: - self.speaker_embedding = nn.Embedding(num_speakers, speaker_embedding_dim) - self.speaker_embedding.weight.data.normal_(0, 0.3) - self.speaker_project_mel = nn.Sequential( - nn.Linear(speaker_embedding_dim, proj_speaker_dim), nn.Tanh()) - self.speaker_embeddings = None - self.speaker_embeddings_projected = None - # global style token layers - if self.gst: - gst_embedding_dim = 256 - self.gst_layer = GST(num_mel=80, - num_heads=4, - num_style_tokens=10, - embedding_dim=gst_embedding_dim) - # backward pass decoder - if self.bidirectional_decoder: - self._init_backward_decoder() - # setup DDC - if self.double_decoder_consistency: - self.coarse_decoder = Decoder( - decoder_in_features, decoder_output_dim, ddc_r, memory_size, - attn_type, attn_win, attn_norm, prenet_type, prenet_dropout, - forward_attn, trans_agent, forward_attn_mask, location_attn, - attn_K, separate_stopnet, proj_speaker_dim) - - - def forward(self, characters, text_lengths, mel_specs, mel_lengths=None, speaker_ids=None): - """ - Shapes: - - characters: B x T_in - - text_lengths: B - - mel_specs: B x T_out x D - - speaker_ids: B x 1 - """ - self._init_states() - input_mask, output_mask = self.compute_masks(text_lengths, mel_lengths) - # B x T_in x embed_dim - inputs = self.embedding(characters) - # B x speaker_embed_dim - if speaker_ids is not None: - self.compute_speaker_embedding(speaker_ids) - if self.num_speakers > 1: - # B x T_in x embed_dim + speaker_embed_dim - inputs = self._concat_speaker_embedding(inputs, - self.speaker_embeddings) - # B x T_in x encoder_in_features - encoder_outputs = self.encoder(inputs) - # sequence masking - encoder_outputs = encoder_outputs * input_mask.unsqueeze(2).expand_as(encoder_outputs) - # global style token - if self.gst: - # B x gst_dim - encoder_outputs = self.compute_gst(encoder_outputs, mel_specs) - if self.num_speakers > 1: - encoder_outputs = self._concat_speaker_embedding( - encoder_outputs, self.speaker_embeddings) - # decoder_outputs: B x decoder_in_features x T_out - # alignments: B x T_in x encoder_in_features - # stop_tokens: B x T_in - decoder_outputs, alignments, stop_tokens = self.decoder( - encoder_outputs, mel_specs, input_mask, - self.speaker_embeddings_projected) - # sequence masking - if output_mask is not None: - decoder_outputs = decoder_outputs * output_mask.unsqueeze(1).expand_as(decoder_outputs) - # B x T_out x decoder_in_features - postnet_outputs = self.postnet(decoder_outputs) - # sequence masking - if output_mask is not None: - postnet_outputs = postnet_outputs * output_mask.unsqueeze(2).expand_as(postnet_outputs) - # B x T_out x posnet_dim - postnet_outputs = self.last_linear(postnet_outputs) - # B x T_out x decoder_in_features - decoder_outputs = decoder_outputs.transpose(1, 2).contiguous() - if self.bidirectional_decoder: - decoder_outputs_backward, alignments_backward = self._backward_pass(mel_specs, encoder_outputs, input_mask) - return decoder_outputs, postnet_outputs, alignments, stop_tokens, decoder_outputs_backward, alignments_backward - if self.double_decoder_consistency: - decoder_outputs_backward, alignments_backward = self._coarse_decoder_pass(mel_specs, encoder_outputs, alignments, input_mask) - return decoder_outputs, postnet_outputs, alignments, stop_tokens, decoder_outputs_backward, alignments_backward - return decoder_outputs, postnet_outputs, alignments, stop_tokens - - @torch.no_grad() - def inference(self, characters, speaker_ids=None, style_mel=None): - inputs = self.embedding(characters) - self._init_states() - if speaker_ids is not None: - self.compute_speaker_embedding(speaker_ids) - if self.num_speakers > 1: - inputs = self._concat_speaker_embedding(inputs, - self.speaker_embeddings) - encoder_outputs = self.encoder(inputs) - if self.gst and style_mel is not None: - encoder_outputs = self.compute_gst(encoder_outputs, style_mel) - if self.num_speakers > 1: - encoder_outputs = self._concat_speaker_embedding( - encoder_outputs, self.speaker_embeddings) - decoder_outputs, alignments, stop_tokens = self.decoder.inference( - encoder_outputs, self.speaker_embeddings_projected) - postnet_outputs = self.postnet(decoder_outputs) - postnet_outputs = self.last_linear(postnet_outputs) - decoder_outputs = decoder_outputs.transpose(1, 2) - return decoder_outputs, postnet_outputs, alignments, stop_tokens diff --git a/models/tacotron2.py b/models/tacotron2.py deleted file mode 100644 index 4a22b7fa..00000000 --- a/models/tacotron2.py +++ /dev/null @@ -1,169 +0,0 @@ -import torch -from torch import nn - -from TTS.layers.gst_layers import GST -from TTS.layers.tacotron2 import Decoder, Encoder, Postnet -from TTS.models.tacotron_abstract import TacotronAbstract - - -# TODO: match function arguments with tacotron -class Tacotron2(TacotronAbstract): - def __init__(self, - num_chars, - num_speakers, - r, - postnet_output_dim=80, - decoder_output_dim=80, - attn_type='original', - attn_win=False, - attn_norm="softmax", - prenet_type="original", - prenet_dropout=True, - forward_attn=False, - trans_agent=False, - forward_attn_mask=False, - location_attn=True, - attn_K=5, - separate_stopnet=True, - bidirectional_decoder=False, - double_decoder_consistency=False, - ddc_r=None, - gst=False): - super(Tacotron2, - self).__init__(num_chars, num_speakers, r, postnet_output_dim, - decoder_output_dim, attn_type, attn_win, - attn_norm, prenet_type, prenet_dropout, - forward_attn, trans_agent, forward_attn_mask, - location_attn, attn_K, separate_stopnet, - bidirectional_decoder, double_decoder_consistency, - ddc_r, gst) - decoder_in_features = 512 if num_speakers > 1 else 512 - encoder_in_features = 512 if num_speakers > 1 else 512 - proj_speaker_dim = 80 if num_speakers > 1 else 0 - # base layers - self.embedding = nn.Embedding(num_chars, 512, padding_idx=0) - if num_speakers > 1: - self.speaker_embedding = nn.Embedding(num_speakers, 512) - self.speaker_embedding.weight.data.normal_(0, 0.3) - self.encoder = Encoder(encoder_in_features) - self.decoder = Decoder(decoder_in_features, self.decoder_output_dim, r, attn_type, attn_win, - attn_norm, prenet_type, prenet_dropout, - forward_attn, trans_agent, forward_attn_mask, - location_attn, attn_K, separate_stopnet, proj_speaker_dim) - self.postnet = Postnet(self.postnet_output_dim) - # global style token layers - if self.gst: - gst_embedding_dim = encoder_in_features - self.gst_layer = GST(num_mel=80, - num_heads=4, - num_style_tokens=10, - embedding_dim=gst_embedding_dim) - # backward pass decoder - if self.bidirectional_decoder: - self._init_backward_decoder() - # setup DDC - if self.double_decoder_consistency: - self.coarse_decoder = Decoder( - decoder_in_features, self.decoder_output_dim, ddc_r, attn_type, - attn_win, attn_norm, prenet_type, prenet_dropout, forward_attn, - trans_agent, forward_attn_mask, location_attn, attn_K, - separate_stopnet, proj_speaker_dim) - - @staticmethod - def shape_outputs(mel_outputs, mel_outputs_postnet, alignments): - mel_outputs = mel_outputs.transpose(1, 2) - mel_outputs_postnet = mel_outputs_postnet.transpose(1, 2) - return mel_outputs, mel_outputs_postnet, alignments - - def forward(self, text, text_lengths, mel_specs=None, mel_lengths=None, speaker_ids=None): - self._init_states() - # compute mask for padding - # B x T_in_max (boolean) - input_mask, output_mask = self.compute_masks(text_lengths, mel_lengths) - # B x D_embed x T_in_max - embedded_inputs = self.embedding(text).transpose(1, 2) - # B x T_in_max x D_en - encoder_outputs = self.encoder(embedded_inputs, text_lengths) - # adding speaker embeddding to encoder output - # TODO: multi-speaker - # B x speaker_embed_dim - if speaker_ids is not None: - self.compute_speaker_embedding(speaker_ids) - if self.num_speakers > 1: - # B x T_in x embed_dim + speaker_embed_dim - encoder_outputs = self._add_speaker_embedding(encoder_outputs, - self.speaker_embeddings) - encoder_outputs = encoder_outputs * input_mask.unsqueeze(2).expand_as(encoder_outputs) - # global style token - if self.gst: - # B x gst_dim - encoder_outputs = self.compute_gst(encoder_outputs, mel_specs) - # B x mel_dim x T_out -- B x T_out//r x T_in -- B x T_out//r - decoder_outputs, alignments, stop_tokens = self.decoder( - encoder_outputs, mel_specs, input_mask) - # sequence masking - if mel_lengths is not None: - decoder_outputs = decoder_outputs * output_mask.unsqueeze(1).expand_as(decoder_outputs) - # B x mel_dim x T_out - postnet_outputs = self.postnet(decoder_outputs) - postnet_outputs = decoder_outputs + postnet_outputs - # sequence masking - if output_mask is not None: - postnet_outputs = postnet_outputs * output_mask.unsqueeze(1).expand_as(postnet_outputs) - # B x T_out x mel_dim -- B x T_out x mel_dim -- B x T_out//r x T_in - decoder_outputs, postnet_outputs, alignments = self.shape_outputs( - decoder_outputs, postnet_outputs, alignments) - if self.bidirectional_decoder: - decoder_outputs_backward, alignments_backward = self._backward_pass(mel_specs, encoder_outputs, input_mask) - return decoder_outputs, postnet_outputs, alignments, stop_tokens, decoder_outputs_backward, alignments_backward - if self.double_decoder_consistency: - decoder_outputs_backward, alignments_backward = self._coarse_decoder_pass(mel_specs, encoder_outputs, alignments, input_mask) - return decoder_outputs, postnet_outputs, alignments, stop_tokens, decoder_outputs_backward, alignments_backward - return decoder_outputs, postnet_outputs, alignments, stop_tokens - - @torch.no_grad() - def inference(self, text, speaker_ids=None): - embedded_inputs = self.embedding(text).transpose(1, 2) - encoder_outputs = self.encoder.inference(embedded_inputs) - if speaker_ids is not None: - self.compute_speaker_embedding(speaker_ids) - if self.num_speakers > 1: - encoder_outputs = self._add_speaker_embedding(encoder_outputs, - self.speaker_embeddings) - decoder_outputs, alignments, stop_tokens = self.decoder.inference( - encoder_outputs) - postnet_outputs = self.postnet(decoder_outputs) - postnet_outputs = decoder_outputs + postnet_outputs - decoder_outputs, postnet_outputs, alignments = self.shape_outputs( - decoder_outputs, postnet_outputs, alignments) - return decoder_outputs, postnet_outputs, alignments, stop_tokens - - def inference_truncated(self, text, speaker_ids=None): - """ - Preserve model states for continuous inference - """ - embedded_inputs = self.embedding(text).transpose(1, 2) - encoder_outputs = self.encoder.inference_truncated(embedded_inputs) - encoder_outputs = self._add_speaker_embedding(encoder_outputs, - speaker_ids) - mel_outputs, alignments, stop_tokens = self.decoder.inference_truncated( - encoder_outputs) - mel_outputs_postnet = self.postnet(mel_outputs) - mel_outputs_postnet = mel_outputs + mel_outputs_postnet - mel_outputs, mel_outputs_postnet, alignments = self.shape_outputs( - mel_outputs, mel_outputs_postnet, alignments) - return mel_outputs, mel_outputs_postnet, alignments, stop_tokens - - - def _speaker_embedding_pass(self, encoder_outputs, speaker_ids): - # TODO: multi-speaker - # if hasattr(self, "speaker_embedding") and speaker_ids is None: - # raise RuntimeError(" [!] Model has speaker embedding layer but speaker_id is not provided") - # if hasattr(self, "speaker_embedding") and speaker_ids is not None: - - # speaker_embeddings = speaker_embeddings.expand(encoder_outputs.size(0), - # encoder_outputs.size(1), - # -1) - # encoder_outputs = encoder_outputs + speaker_embeddings - # return encoder_outputs - pass diff --git a/models/tacotron_abstract.py b/models/tacotron_abstract.py deleted file mode 100644 index 75a1a5cd..00000000 --- a/models/tacotron_abstract.py +++ /dev/null @@ -1,180 +0,0 @@ -import copy -from abc import ABC, abstractmethod - -import torch -from torch import nn - -from TTS.utils.generic_utils import sequence_mask - - -class TacotronAbstract(ABC, nn.Module): - def __init__(self, - num_chars, - num_speakers, - r, - postnet_output_dim=80, - decoder_output_dim=80, - attn_type='original', - attn_win=False, - attn_norm="softmax", - prenet_type="original", - prenet_dropout=True, - forward_attn=False, - trans_agent=False, - forward_attn_mask=False, - location_attn=True, - attn_K=5, - separate_stopnet=True, - bidirectional_decoder=False, - double_decoder_consistency=False, - ddc_r=None, - gst=False): - """ Abstract Tacotron class """ - super().__init__() - self.num_chars = num_chars - self.r = r - self.decoder_output_dim = decoder_output_dim - self.postnet_output_dim = postnet_output_dim - self.gst = gst - self.num_speakers = num_speakers - self.bidirectional_decoder = bidirectional_decoder - self.double_decoder_consistency = double_decoder_consistency - self.ddc_r = ddc_r - self.attn_type = attn_type - self.attn_win = attn_win - self.attn_norm = attn_norm - self.prenet_type = prenet_type - self.prenet_dropout = prenet_dropout - self.forward_attn = forward_attn - self.trans_agent = trans_agent - self.forward_attn_mask = forward_attn_mask - self.location_attn = location_attn - self.attn_K = attn_K - self.separate_stopnet = separate_stopnet - - # layers - self.embedding = None - self.encoder = None - self.decoder = None - self.postnet = None - - # global style token - if self.gst: - self.gst_layer = None - - # model states - self.speaker_embeddings = None - self.speaker_embeddings_projected = None - - # additional layers - self.decoder_backward = None - self.coarse_decoder = None - - ############################# - # INIT FUNCTIONS - ############################# - - def _init_states(self): - self.speaker_embeddings = None - self.speaker_embeddings_projected = None - - def _init_backward_decoder(self): - self.decoder_backward = copy.deepcopy(self.decoder) - - def _init_coarse_decoder(self): - self.coarse_decoder = copy.deepcopy(self.decoder) - self.coarse_decoder.r_init = self.ddc_r - self.coarse_decoder.set_r(self.ddc_r) - - ############################# - # CORE FUNCTIONS - ############################# - - @abstractmethod - def forward(self): - pass - - @abstractmethod - def inference(self): - pass - - ############################# - # COMMON COMPUTE FUNCTIONS - ############################# - - def compute_masks(self, text_lengths, mel_lengths): - """Compute masks against sequence paddings.""" - # B x T_in_max (boolean) - device = text_lengths.device - input_mask = sequence_mask(text_lengths).to(device) - output_mask = None - if mel_lengths is not None: - max_len = mel_lengths.max() - r = self.decoder.r - max_len = max_len + (r - (max_len % r)) if max_len % r > 0 else max_len - output_mask = sequence_mask(mel_lengths, max_len=max_len).to(device) - return input_mask, output_mask - - def _backward_pass(self, mel_specs, encoder_outputs, mask): - """ Run backwards decoder """ - decoder_outputs_b, alignments_b, _ = self.decoder_backward( - encoder_outputs, torch.flip(mel_specs, dims=(1,)), mask, - self.speaker_embeddings_projected) - decoder_outputs_b = decoder_outputs_b.transpose(1, 2).contiguous() - return decoder_outputs_b, alignments_b - - def _coarse_decoder_pass(self, mel_specs, encoder_outputs, alignments, - input_mask): - """ Double Decoder Consistency """ - T = mel_specs.shape[1] - if T % self.coarse_decoder.r > 0: - padding_size = self.coarse_decoder.r - (T % self.coarse_decoder.r) - mel_specs = torch.nn.functional.pad(mel_specs, - (0, 0, 0, padding_size, 0, 0)) - decoder_outputs_backward, alignments_backward, _ = self.coarse_decoder( - encoder_outputs.detach(), mel_specs, input_mask) - # scale_factor = self.decoder.r_init / self.decoder.r - alignments_backward = torch.nn.functional.interpolate( - alignments_backward.transpose(1, 2), - size=alignments.shape[1], - mode='nearest').transpose(1, 2) - decoder_outputs_backward = decoder_outputs_backward.transpose(1, 2) - decoder_outputs_backward = decoder_outputs_backward[:, :T, :] - return decoder_outputs_backward, alignments_backward - - ############################# - # EMBEDDING FUNCTIONS - ############################# - - def compute_speaker_embedding(self, speaker_ids): - """ Compute speaker embedding vectors """ - if hasattr(self, "speaker_embedding") and speaker_ids is None: - raise RuntimeError( - " [!] Model has speaker embedding layer but speaker_id is not provided" - ) - if hasattr(self, "speaker_embedding") and speaker_ids is not None: - self.speaker_embeddings = self.speaker_embedding(speaker_ids).unsqueeze(1) - if hasattr(self, "speaker_project_mel") and speaker_ids is not None: - self.speaker_embeddings_projected = self.speaker_project_mel( - self.speaker_embeddings).squeeze(1) - - def compute_gst(self, inputs, mel_specs): - """ Compute global style token """ - # pylint: disable=not-callable - gst_outputs = self.gst_layer(mel_specs) - inputs = self._add_speaker_embedding(inputs, gst_outputs) - return inputs - - @staticmethod - def _add_speaker_embedding(outputs, speaker_embeddings): - speaker_embeddings_ = speaker_embeddings.expand( - outputs.size(0), outputs.size(1), -1) - outputs = outputs + speaker_embeddings_ - return outputs - - @staticmethod - def _concat_speaker_embedding(outputs, speaker_embeddings): - speaker_embeddings_ = speaker_embeddings.expand( - outputs.size(0), outputs.size(1), -1) - outputs = torch.cat([outputs, speaker_embeddings_], dim=-1) - return outputs diff --git a/notebooks/CheckSpectrograms.ipynb b/notebooks/CheckSpectrograms.ipynb index 66c3c7cc..dbb7a1be 100644 --- a/notebooks/CheckSpectrograms.ipynb +++ b/notebooks/CheckSpectrograms.ipynb @@ -16,9 +16,9 @@ "outputs": [], "source": [ "%matplotlib inline\n", - "from TTS.utils.audio import AudioProcessor\n", - "from TTS.utils.visual import plot_spectrogram\n", - "from TTS.utils.generic_utils import load_config\n", + "from TTS.tts.utils.audio import AudioProcessor\n", + "from TTS.tts.utils.visual import plot_spectrogram\n", + "from TTS.tts.utils.generic_utils import load_config\n", "import glob \n", "import IPython.display as ipd" ] diff --git a/notebooks/ExtractTTSpectrogram.ipynb b/notebooks/ExtractTTSpectrogram.ipynb index c747c764..b28489e0 100644 --- a/notebooks/ExtractTTSpectrogram.ipynb +++ b/notebooks/ExtractTTSpectrogram.ipynb @@ -22,12 +22,12 @@ "import numpy as np\n", "from tqdm import tqdm as tqdm\n", "from torch.utils.data import DataLoader\n", - "from TTS.datasets.TTSDataset import MyDataset\n", - "from TTS.layers.losses import L1LossMasked\n", - "from TTS.utils.audio import AudioProcessor\n", - "from TTS.utils.visual import plot_spectrogram\n", - "from TTS.utils.generic_utils import load_config, setup_model, sequence_mask\n", - "from TTS.utils.text.symbols import make_symbols, symbols, phonemes\n", + "from TTS.tts.datasets.TTSDataset import MyDataset\n", + "from TTS.tts.layers.losses import L1LossMasked\n", + "from TTS.tts.utils.audio import AudioProcessor\n", + "from TTS.tts.utils.visual import plot_spectrogram\n", + "from TTS.tts.utils.generic_utils import load_config, setup_model, sequence_mask\n", + "from TTS.tts.utils.text.symbols import make_symbols, symbols, phonemes\n", "\n", "%matplotlib inline\n", "\n", @@ -108,7 +108,7 @@ "metadata": {}, "outputs": [], "source": [ - "preprocessor = importlib.import_module('TTS.datasets.preprocess')\n", + "preprocessor = importlib.import_module('TTS.tts.datasets.preprocess')\n", "preprocessor = getattr(preprocessor, DATASET.lower())\n", "meta_data = preprocessor(DATA_PATH,METADATA_FILE)\n", "dataset = MyDataset(checkpoint['r'], C.text_cleaner, False, ap, meta_data,tp=C.characters if 'characters' in C.keys() else None, use_phonemes=C.use_phonemes, phoneme_cache_path=C.phoneme_cache_path, enable_eos_bos=C.enable_eos_bos_chars)\n", diff --git a/notebooks/TestAttention.ipynb b/notebooks/TestAttention.ipynb index 92b1d6c4..51413099 100644 --- a/notebooks/TestAttention.ipynb +++ b/notebooks/TestAttention.ipynb @@ -36,14 +36,14 @@ "import librosa\n", "import librosa.display\n", "\n", - "from TTS.layers import *\n", - "from TTS.utils.audio import AudioProcessor\n", - "from TTS.utils.generic_utils import setup_model\n", - "from TTS.utils.io import load_config\n", - "from TTS.utils.text import text_to_sequence\n", - "from TTS.utils.synthesis import synthesis\n", - "from TTS.utils.visual import plot_alignment\n", - "from TTS.utils.measures import alignment_diagonal_score\n", + "from TTS.tts.layers import *\n", + "from TTS.tts.utils.audio import AudioProcessor\n", + "from TTS.tts.utils.generic_utils import setup_model\n", + "from TTS.tts.utils.io import load_config\n", + "from TTS.tts.utils.text import text_to_sequence\n", + "from TTS.tts.utils.synthesis import synthesis\n", + "from TTS.tts.utils.visual import plot_alignment\n", + "from TTS.tts.utils.measures import alignment_diagonal_score\n", "\n", "import IPython\n", "from IPython.display import Audio\n", @@ -96,7 +96,7 @@ "outputs": [], "source": [ "# LOAD TTS MODEL\n", - "from TTS.utils.text.symbols import make_symbols, symbols, phonemes\n", + "from TTS.tts.utils.text.symbols import make_symbols, symbols, phonemes\n", "\n", "# multi speaker \n", "if CONFIG.use_speaker_embedding:\n", diff --git a/notebooks/dataset_analysis/AnalyzeDataset-Copy1.ipynb b/notebooks/dataset_analysis/AnalyzeDataset-Copy1.ipynb new file mode 100644 index 00000000..390b20e2 --- /dev/null +++ b/notebooks/dataset_analysis/AnalyzeDataset-Copy1.ipynb @@ -0,0 +1,3406 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "TTS_PATH = \"/home/erogol/projects/\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "import os\n", + "import sys\n", + "sys.path.append(TTS_PATH) # set this if TTS is not installed globally\n", + "import glob\n", + "import librosa\n", + "import numpy as np\n", + "import pandas as pd\n", + "from scipy.stats import norm\n", + "from tqdm import tqdm_notebook as tqdm\n", + "from multiprocessing import Pool\n", + "from matplotlib import pylab as plt\n", + "from collections import Counter\n", + "from TTS.tts.datasets.preprocess import *\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "DATA_PATH = \"/home/erogol/Data/Spectie/audio/output/\"\n", + "META_DATA = \"metadata.txt\"\n", + "NUM_PROC = 8" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "ename": "FileNotFoundError", + "evalue": "[Errno 2] No such file or directory: '/home/erogol/Data/Spectie/audio/output/metadata.txt'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# use your own preprocessor at this stage - TTS/datasets/proprocess.py\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mitems\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmozilla_de\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mDATA_PATH\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mMETA_DATA\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\" > Number of audio files: {}\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/Projects/TTS/tts_namespace/TTS/datasets/preprocess.py\u001b[0m in \u001b[0;36mmozilla_de\u001b[0;34m(root_path, meta_file)\u001b[0m\n\u001b[1;32m 81\u001b[0m \u001b[0mitems\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 82\u001b[0m \u001b[0mspeaker_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"mozilla\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 83\u001b[0;31m \u001b[0;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtxt_file\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'r'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mencoding\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"ISO 8859-1\"\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mttf\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 84\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mline\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mttf\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 85\u001b[0m \u001b[0mcols\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mline\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'|'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '/home/erogol/Data/Spectie/audio/output/metadata.txt'" + ] + } + ], + "source": [ + "# use your own preprocessor at this stage - TTS/datasets/proprocess.py\n", + "items = mozilla_de(DATA_PATH, META_DATA)\n", + "print(\" > Number of audio files: {}\".format(len(items)))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "# check wavs if exist\n", + "wav_files = []\n", + "for item in items:\n", + " wav_file = item[1].strip()\n", + " wav_files.append(wav_file)\n", + " if not os.path.exists(wav_file):\n", + " print(wav_file)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_119.wav']\n" + ] + } + ], + "source": [ + "# show duplicate items\n", + "c = Counter(wav_files)\n", + "duplicates = [item for item, count in c.items() if count > 1]\n", + "print(duplicates)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "folders = [w.split('/')[5] for w in wav_files]" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'BATCH_10_FINAL',\n", + " 'BATCH_11_FINAL',\n", + " 'BATCH_12_FINAL',\n", + " 'BATCH_13_FINAL',\n", + " 'BATCH_14_FINAL',\n", + " 'BATCH_15_FINAL',\n", + " 'BATCH_16_FINAL',\n", + " 'BATCH_17_FINAL',\n", + " 'BATCH_18_FINAL',\n", + " 'BATCH_19_FINAL',\n", + " 'BATCH_1_FINAL',\n", + " 'BATCH_20_FINAL',\n", + " 'BATCH_2_FINAL',\n", + " 'BATCH_3_FINAL',\n", + " 'BATCH_4_FINAL',\n", + " 'BATCH_5_FINAL',\n", + " 'BATCH_6_FINAL',\n", + " 'BATCH_7_FINAL',\n", + " 'BATCH_8_FINAL',\n", + " 'BATCH_9_FINAL'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "set(folders)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/erogol/miniconda3/lib/python3.7/site-packages/ipykernel_launcher.py:18: TqdmDeprecationWarning: This function will be removed in tqdm==5.0.0\n", + "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "afbb94c274fe4913b256a8756584c0f6", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, max=14610.0), HTML(value='')))" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "def load_item(item):\n", + " file_name = item[1].strip()\n", + " text = item[0].strip()\n", + " audio = librosa.load(file_name, sr=None)\n", + " sr = audio[1]\n", + " audio = audio[0]\n", + " audio_len = len(audio) / sr\n", + " text_len = len(text)\n", + " return file_name, text, text_len, audio, audio_len\n", + "\n", + "# This will take a while depending on size of dataset\n", + "if NUM_PROC == 1:\n", + " data = []\n", + " for m in tqdm(items):\n", + " data += [load_item(m)]\n", + "else:\n", + " with Pool(8) as p:\n", + " data = list(tqdm(p.imap(load_item, items), total=len(items)))" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/erogol/miniconda3/lib/python3.7/site-packages/ipykernel_launcher.py:3: TqdmDeprecationWarning: This function will be removed in tqdm==5.0.0\n", + "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`\n", + " This is separate from the ipykernel package so we can avoid doing imports until\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "53b7f6adb4db47279927ec064addb3c7", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, max=14610.0), HTML(value='')))" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " > Number of words: 27102\n" + ] + } + ], + "source": [ + "# count words in the dataset\n", + "w_count = Counter()\n", + "for item in tqdm(data):\n", + " text = item[1].lower().strip()\n", + " for word in text.split():\n", + " w_count[word] += 1\n", + "print(\" > Number of words: {}\".format(len(w_count)))" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/erogol/miniconda3/lib/python3.7/site-packages/ipykernel_launcher.py:3: TqdmDeprecationWarning: This function will be removed in tqdm==5.0.0\n", + "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`\n", + " This is separate from the ipykernel package so we can avoid doing imports until\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8b48c3415e2a4ac1a174502c2308501d", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, max=14610.0), HTML(value='')))" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "text_vs_durs = {} # text length vs audio duration\n", + "text_len_counter = Counter() # number of sentences with the keyed length\n", + "for item in tqdm(data):\n", + " text = item[1].lower().strip()\n", + " text_len = len(text)\n", + " text_len_counter[text_len] += 1\n", + " audio_len = item[-1]\n", + " try:\n", + " text_vs_durs[text_len] += [audio_len]\n", + " except:\n", + " text_vs_durs[text_len] = [audio_len]" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "# text_len vs avg_audio_len, median_audio_len, std_audio_len\n", + "text_vs_avg = {}\n", + "text_vs_median = {}\n", + "text_vs_std = {}\n", + "for key, durs in text_vs_durs.items():\n", + " text_vs_avg[key] = np.mean(durs)\n", + " text_vs_median[key] = np.median(durs)\n", + " text_vs_std[key] = np.std(durs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "Collapsed": "false" + }, + "source": [ + "### Avg audio length per char" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "Collapsed": "false", + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_7.wav', 'Schickes Heimkino!', 18, array([1.28518932e-05, 1.68334354e-05, 1.03571265e-05, ...,\n", + " 2.77877753e-05, 1.10460878e-05, 2.05760971e-05], dtype=float32), 1.5862083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_12.wav', 'Das sieht ihm ähnlich.', 23, array([7.6380376e-05, 9.3327515e-05, 6.1386294e-05, ..., 3.4380835e-05,\n", + " 2.6692895e-05, 2.2882025e-06], dtype=float32), 1.6567083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_20.wav', 'Oh, das Programm ist mir neu.', 29, array([-3.6327918e-05, -5.8332487e-05, -5.0294046e-05, ...,\n", + " -3.2606560e-05, -5.3037817e-05, -3.6754736e-05], dtype=float32), 1.8241458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_21.wav', 'Niemand ist ein Alleskönner.', 29, array([2.5469655e-05, 1.5675920e-05, 2.6378759e-05, ..., 3.4840865e-05,\n", + " 3.4687979e-05, 2.3448023e-05], dtype=float32), 1.9034583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_25.wav', 'Dagegen ist kein Kraut gewachsen.', 33, array([8.6409571e-05, 1.6211446e-04, 1.2149933e-04, ..., 1.4264301e-05,\n", + " 2.6473885e-05, 4.1174495e-05], dtype=float32), 1.91225)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_39.wav', 'Seid gegrüÃ\\x9ft!', 15, array([-4.95165441e-05, -9.18527076e-05, -1.06668835e-04, ...,\n", + " -4.00948884e-05, -6.23805026e-05, -4.42093369e-05], dtype=float32), 1.1808541666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_43.wav', 'Nicht mit dem FuÃ\\x9f!', 19, array([-2.4153460e-05, -9.5195399e-05, -1.8093537e-04, ...,\n", + " 2.0667248e-05, 2.7399163e-05, 5.0344559e-05], dtype=float32), 1.4363958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_44.wav', 'Wissen ist Macht.', 17, array([-1.9221216e-05, -2.1811753e-05, -4.0165878e-06, ...,\n", + " -5.0537183e-06, -1.3825783e-05, -2.8384518e-05], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_45.wav', 'Guck mal, ein Eichhörnchen!', 28, array([-8.8387278e-05, -7.1484370e-05, -9.1183894e-05, ...,\n", + " -2.6602589e-05, 1.1369466e-05, -1.4236821e-06], dtype=float32), 1.5245208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_58.wav', 'Ich bin mein eigener Hund.', 26, array([-1.3441265e-05, -1.3771249e-05, 2.1415319e-06, ...,\n", + " -2.9998329e-05, 6.4692267e-06, 1.6420488e-05], dtype=float32), 1.91225)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_68.wav', 'Lach ich, oder was?', 19, array([1.20631594e-04, 2.69133277e-04, 3.61918297e-04, ...,\n", + " 2.52288628e-05, 1.12787602e-05, 2.01150815e-05], dtype=float32), 1.7272083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_76.wav', 'Moment mal, das ist neu.', 24, array([-4.0444505e-05, -5.6087447e-05, -7.0869857e-05, ...,\n", + " -5.9735464e-07, 1.4513580e-05, 1.7241922e-05], dtype=float32), 1.6743333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_79.wav', 'Wie lange zieht der Tee schon?', 30, array([ 1.3359761e-05, 1.4845427e-06, -8.4266394e-06, ...,\n", + " 8.4090761e-06, 5.6682808e-07, 1.4266146e-06], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_95.wav', 'Schlaf gut!', 11, array([-8.3705861e-05, -1.3769916e-04, -1.0772650e-04, ...,\n", + " -1.2876300e-05, -3.5042558e-05, -1.5538299e-05], dtype=float32), 1.0839166666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_97.wav', 'Entschuldigen Sie die Verwechslung!', 35, array([-4.3585667e-05, -4.9360351e-05, -2.4610319e-05, ...,\n", + " -1.4282005e-05, -7.0760620e-07, -2.8634834e-06], dtype=float32), 1.9210833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_99.wav', 'Schönes Ding!', 14, array([-4.9598326e-05, -4.2029962e-05, -2.2566113e-05, ...,\n", + " 7.5142352e-06, -3.1275456e-05, -1.8421564e-05], dtype=float32), 0.9252916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_101.wav', 'Dann nichts wie weg hier!', 25, array([ 1.2582598e-05, 1.4227808e-05, 1.0588883e-05, ...,\n", + " 1.8725707e-07, -4.0784824e-05, -7.0644560e-06], dtype=float32), 1.7095833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_120.wav', \"Wie geht's?\", 11, array([ 3.6131805e-05, 2.3445213e-05, 4.7948160e-05, ...,\n", + " -3.3656095e-05, -4.0791183e-05, -4.5296023e-05], dtype=float32), 0.9341041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_179.wav', 'Das ganze Haus hat gewackelt.', 29, array([ 1.31893430e-05, -2.02163919e-05, -5.92077959e-06, ...,\n", + " -8.03239527e-06, -1.91841791e-05, -1.46886205e-05], dtype=float32), 1.9034583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_186.wav', 'Woher kommt all der Hass?', 25, array([-1.0393358e-05, -4.2540119e-05, -1.8952907e-05, ...,\n", + " 1.9931360e-05, 2.8833035e-06, 2.6874868e-06], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_189.wav', 'Stillgestanden!', 15, array([ 4.4343769e-06, 1.3210945e-05, 1.7683087e-05, ...,\n", + " 2.6131744e-05, -5.4923967e-06, 9.4311863e-06], dtype=float32), 1.2689791666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_199.wav', 'Eine Sache zur Zeit.', 20, array([5.1501018e-05, 6.3279913e-05, 7.3763011e-05, ..., 1.0348874e-05,\n", + " 1.0562905e-05, 3.0424892e-05], dtype=float32), 1.4804583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_218.wav', 'Nichts für ungut!', 18, array([-4.0355466e-05, -4.5107645e-05, -7.7510209e-05, ...,\n", + " -2.0305148e-05, -3.0419576e-05, -1.7718892e-05], dtype=float32), 1.2337291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_220.wav', 'Sieh genau hin!', 15, array([-1.2045763e-02, -1.6849384e-02, -1.4799301e-02, ...,\n", + " 1.6059141e-06, -1.4713467e-05, 1.0609662e-05], dtype=float32), 1.3042291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_224.wav', 'Und welches Baujahr?', 20, array([-3.5566740e-05, -2.3342436e-05, -2.8526230e-05, ...,\n", + " 3.1306794e-05, 3.2872085e-05, 2.9171426e-05], dtype=float32), 1.6743333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_226.wav', 'Sofort umkehren!', 16, array([ 1.2734158e-04, 1.4998924e-04, 1.2418727e-04, ...,\n", + " -6.3872926e-06, -5.1714401e-06, -1.2052229e-05], dtype=float32), 1.3923541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_232.wav', 'Da muss man locker bleiben.', 27, array([-3.2585725e-05, -3.3840271e-05, 1.3126293e-05, ...,\n", + " -1.8632261e-05, -6.3017387e-06, -5.6675367e-06], dtype=float32), 1.6567083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_237.wav', 'Probier es mal mit Aceton.', 26, array([ 7.5771743e-05, 1.0223542e-04, 1.0343192e-04, ...,\n", + " -2.1570906e-05, -3.1918564e-05, -1.1135696e-05], dtype=float32), 1.8858125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_244.wav', 'Kommt drauf an.', 15, array([ 2.7207607e-05, 1.8057373e-05, 1.2512723e-05, ...,\n", + " -6.0103289e-06, -2.1828011e-05, -8.1472344e-06], dtype=float32), 1.3571041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_246.wav', 'Man darf gespannt sein.', 23, array([-2.3668355e-03, -3.7321844e-03, -3.6732492e-03, ...,\n", + " 1.7768043e-06, 2.0778492e-05, 5.1516781e-06], dtype=float32), 1.5685833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_251.wav', 'Daran scheiden sich die Geister.', 32, array([-2.39492147e-05, -4.70898958e-05, -2.53186899e-05, ...,\n", + " -4.88899059e-06, -1.34801885e-05, 1.04552892e-05], dtype=float32), 1.8153333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_258.wav', 'Was habt ihr heute erlebt?', 26, array([ 3.5868085e-05, 8.2530729e-05, 4.6677309e-05, ...,\n", + " -8.4167405e-06, -2.0942105e-05, -6.2113932e-06], dtype=float32), 1.7888958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_265.wav', 'Lass das sein!', 14, array([2.4356419e-05, 5.5347311e-05, 5.1189338e-05, ..., 2.7182332e-05,\n", + " 1.6106302e-05, 2.1714099e-05], dtype=float32), 1.2425208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_267.wav', 'Auch heute noch.', 16, array([ 1.6202603e-05, 1.8275598e-05, 1.5345126e-05, ...,\n", + " -9.9319268e-06, -1.4463866e-05, 7.9376441e-06], dtype=float32), 1.4363958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_270.wav', 'Wir sehen uns in Bielefeld.', 27, array([5.0975410e-05, 4.6619494e-05, 5.2299667e-05, ..., 2.4641362e-05,\n", + " 2.0409352e-05, 1.7508868e-05], dtype=float32), 1.8065208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_274.wav', 'Gerald muss Dampf ablassen.', 27, array([-1.4112990e-04, -2.2197423e-04, -2.2060136e-04, ...,\n", + " -4.0291343e-05, -3.2744192e-05, -1.7507429e-05], dtype=float32), 1.7712708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_277.wav', 'Sehen Sie selbst!', 17, array([-3.6524234e-05, -2.8097162e-05, 4.4066533e-06, ...,\n", + " 2.1528131e-06, -1.2273627e-05, -8.5409883e-06], dtype=float32), 1.4275833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_282.wav', 'Haben wir jemanden vergessen?', 29, array([-2.1900923e-05, -8.0311016e-05, -4.5058856e-05, ...,\n", + " 8.6369282e-06, 2.3358027e-05, 1.4141980e-05], dtype=float32), 1.6919583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_300.wav', 'Oh, der Besuch ist da!', 22, array([-1.1763951e-06, -6.4509544e-07, -2.1343028e-05, ...,\n", + " 8.3751611e-06, -2.0755753e-05, -3.9365756e-07], dtype=float32), 1.5157083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_303.wav', 'Kannst du das bitte übernehmen?', 32, array([1.9790201e-05, 2.5795589e-05, 2.3016226e-05, ..., 4.4700668e-05,\n", + " 2.9440445e-05, 4.1151830e-05], dtype=float32), 1.965125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_309.wav', 'Ich muss verrückt sein.', 24, array([-3.7773843e-05, -2.5238944e-05, -4.5549310e-05, ...,\n", + " -1.4228171e-05, -1.3738420e-05, -2.5079733e-05], dtype=float32), 1.4099583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_312.wav', 'Gestrichen!', 11, array([4.6765574e-05, 8.2428312e-05, 6.1315681e-05, ..., 1.7959255e-06,\n", + " 5.7119927e-08, 3.7900886e-06], dtype=float32), 0.9693541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_321.wav', 'Gott atmet nicht.', 17, array([3.9337472e-05, 4.7041980e-05, 5.6819965e-05, ..., 1.6601467e-05,\n", + " 1.5404070e-05, 3.0179035e-05], dtype=float32), 1.6831458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_327.wav', 'Das ist mir auch klar.', 22, array([ 6.4578126e-05, 9.0902526e-05, 7.7864941e-05, ...,\n", + " -1.0411938e-05, -3.7324537e-06, 1.4365208e-05], dtype=float32), 1.5421458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_329.wav', 'Es sieht nach Unsinn aus.', 25, array([ 1.1480927e-06, 7.0667493e-06, -3.8140864e-05, ...,\n", + " 5.6332779e-06, 3.7668069e-05, 7.3043757e-06], dtype=float32), 1.9827708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_333.wav', 'Das ist nur von auÃ\\x9fen.', 23, array([-3.8521201e-05, -4.7468315e-05, -3.4236415e-05, ...,\n", + " 5.2493826e-05, 3.7984686e-05, 3.3584591e-05], dtype=float32), 1.9915625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_334.wav', 'Ich habe gerade ein DéjÃ\\xa0-vu.', 30, array([ 4.4728897e-04, 3.7400136e-04, -4.0894563e-04, ...,\n", + " 2.4757979e-05, 1.1479871e-05, 2.5551706e-05], dtype=float32), 1.9387083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_336.wav', 'Ich muss mich verzählt haben.', 30, array([-3.9173494e-05, -2.9986420e-05, -1.9012801e-05, ...,\n", + " -6.0724019e-06, 2.7600961e-05, -3.4350986e-05], dtype=float32), 1.6831458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_342.wav', 'So kann man sich täuschen.', 27, array([-3.5296402e-05, -6.0332448e-05, -5.2051670e-05, ...,\n", + " -1.2274999e-05, -6.2373409e-05, 1.2240975e-05], dtype=float32), 1.5068958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_345.wav', 'Ich weiÃ\\x9f nicht woher.', 22, array([-2.05518299e-05, -1.30783865e-05, -1.48754107e-05, ...,\n", + " -5.49699544e-05, -3.01012133e-05, -1.70801268e-05], dtype=float32), 1.4980833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_352.wav', 'Bist du jetzt beleidigt?', 24, array([-1.0385954e-05, 1.1672010e-05, -2.3844843e-05, ...,\n", + " 6.0053999e-06, -2.3204884e-05, -9.7573111e-06], dtype=float32), 1.9298958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_357.wav', 'Gib mir zwei Minuten, ja?', 25, array([-1.8705783e-05, -3.0273133e-05, -2.4814160e-05, ...,\n", + " 1.4705538e-05, 9.7520942e-06, 1.7873571e-06], dtype=float32), 1.8065208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_360.wav', 'Voll der Psycho-Blick!', 22, array([ 5.0691519e-06, 1.2665058e-05, 1.4902340e-06, ...,\n", + " 9.9865492e-06, -2.0948526e-05, -1.1750392e-05], dtype=float32), 1.4980833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_365.wav', 'Mein Freund ist Musiker.', 24, array([ 4.2413834e-05, 2.3999601e-05, 1.0646096e-05, ...,\n", + " -1.9632445e-05, -2.5183452e-05, -1.8877656e-05], dtype=float32), 1.7272083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_386.wav', 'Hast du Knoblauch gegessen?', 27, array([ 4.2124993e-06, 1.6061234e-05, 1.6008022e-05, ...,\n", + " 4.7057729e-05, -5.8230005e-05, -6.6850065e-05], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_391.wav', 'Ist mir gar nicht aufgefallen.', 30, array([-1.2801524e-04, -1.8332504e-04, -1.6864720e-04, ...,\n", + " -1.7935792e-05, 1.3743926e-05, 4.5144670e-06], dtype=float32), 1.6390833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_396.wav', 'Verdammt noch mal!', 18, array([-1.9188805e-05, 2.9282862e-06, 3.1274089e-06, ...,\n", + " 3.8011989e-05, 4.4447512e-05, 3.0465781e-05], dtype=float32), 1.3218541666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_403.wav', 'Klingt moralisch einwandfrei.', 29, array([-1.5154625e-06, -1.1907745e-05, -3.7140951e-06, ...,\n", + " 1.4816231e-06, -1.0694354e-05, -2.7909247e-05], dtype=float32), 1.8770208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_412.wav', 'Wie wunderschön du bist.', 25, array([ 8.1452117e-06, 1.2316134e-05, 1.2410718e-05, ...,\n", + " -2.5919973e-05, -1.5394140e-05, -1.6787388e-05], dtype=float32), 1.7800833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_419.wav', 'Ich kann nichts erkennen.', 25, array([-2.1261691e-05, -2.6662590e-05, -3.2895186e-05, ...,\n", + " -8.6166056e-06, 1.0871788e-06, -5.8716050e-06], dtype=float32), 1.4363958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_423.wav', 'Jetzt aber zackig!', 18, array([ 2.4374567e-06, 2.0842881e-05, -1.5250983e-05, ...,\n", + " -1.6002667e-05, -4.2002972e-05, -2.0723968e-05], dtype=float32), 1.2953958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_425.wav', 'Ich bin schon ganz wirr im Kopf.', 32, array([2.9025901e-05, 3.5920395e-05, 4.5607205e-05, ..., 1.6718976e-05,\n", + " 2.1111184e-05, 3.3797973e-05], dtype=float32), 1.98275)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_430.wav', 'Ihr gefällt die Kulisse.', 25, array([ 2.0069625e-05, 6.2984320e-05, 4.6121866e-05, ...,\n", + " -3.1357740e-05, -2.2353357e-05, -2.2545100e-05], dtype=float32), 1.6919583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_445.wav', 'GrüÃ\\x9f dich!', 12, array([-1.0602423e-05, -7.0546007e-06, 1.1231577e-05, ...,\n", + " -4.8423290e-06, -2.5039872e-05, -2.4532073e-05], dtype=float32), 0.7842916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_456.wav', 'Nach mir die Sintflut!', 22, array([ 2.0728099e-05, -9.0359263e-06, -4.4944873e-06, ...,\n", + " 6.8659042e-06, -1.2404760e-05, -2.2153192e-06], dtype=float32), 1.5862083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_460.wav', 'Was soll das denn bringen?', 26, array([ 3.9292016e-05, 5.6996982e-05, 6.4746971e-05, ...,\n", + " -3.1001658e-05, -9.7075417e-06, -1.9902369e-05], dtype=float32), 1.7888958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_461.wav', 'Er lädt immer noch.', 20, array([-1.6651324e-05, -5.8167420e-06, 5.8412393e-06, ...,\n", + " -5.8599158e-05, -5.3942535e-05, -2.6054968e-05], dtype=float32), 1.2337291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_479.wav', 'Was sollen wir nur tun?', 23, array([-4.4440752e-05, -5.3991145e-05, -4.1732972e-05, ...,\n", + " -5.2980035e-06, 1.0908753e-05, 1.9730707e-05], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_481.wav', 'Schluss damit!', 14, array([-2.9023191e-05, -4.2109135e-05, -3.8624265e-05, ...,\n", + " -1.9805097e-05, -6.0203884e-06, 1.1789062e-05], dtype=float32), 0.9605416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_483.wav', 'Können sie mir ihr Passwort geben?', 35, array([ 2.5537942e-05, 5.2574283e-05, 5.7736743e-05, ...,\n", + " -5.4731267e-06, -2.9014491e-05, 3.6238887e-06], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_491.wav', 'Sie muss zum BogenschieÃ\\x9fen.', 28, array([-3.1108371e-05, -5.1357423e-05, -7.0860064e-05, ...,\n", + " -4.0438888e-05, -2.6810346e-06, -1.3582417e-05], dtype=float32), 1.9387083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_513.wav', 'Gib ihm die Schaufel wieder!', 28, array([-2.5840678e-05, -2.4174828e-05, -1.2895588e-05, ...,\n", + " 3.6998503e-05, 3.0887943e-05, 1.9229607e-05], dtype=float32), 1.7448333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_514.wav', 'Ich will mich kurzfassen.', 25, array([-5.4538796e-06, 1.6863480e-05, -2.4184583e-05, ...,\n", + " -7.9238208e-07, 9.8597202e-06, 2.5041477e-06], dtype=float32), 1.7448333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_515.wav', 'Die ist hart im Nehmen.', 23, array([ 3.2496322e-05, 3.8166479e-05, 3.2249674e-05, ...,\n", + " -1.0363748e-05, 1.9095280e-05, 9.2708688e-06], dtype=float32), 1.7360208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_516.wav', 'Oh mein Gott!', 13, array([ 1.0293347e-05, 2.3256578e-05, -2.6419082e-06, ...,\n", + " -1.2127157e-05, 1.4263560e-06, 3.2800324e-06], dtype=float32), 0.8812291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_517.wav', 'Einer noch!', 11, array([ 1.8490386e-05, 9.7866017e-05, 1.1555837e-04, ...,\n", + " -5.3282761e-08, -1.5481584e-05, 1.1070631e-06], dtype=float32), 0.7578541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_536.wav', 'Da hat er sich verhaspelt.', 26, array([-1.2101016e-05, -4.1350278e-05, -2.5068364e-05, ...,\n", + " -9.8568984e-05, 1.2527088e-04, 2.5078503e-04], dtype=float32), 1.6390833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_538.wav', 'Kann ich mir nicht vorstellen.', 30, array([-7.1259085e-05, -6.6917557e-05, -7.5606287e-05, ...,\n", + " -1.7281625e-05, 1.9208239e-06, 9.8984492e-06], dtype=float32), 1.5950208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_541.wav', 'Kannst du sie mal anstupsen?', 28, array([-3.0119493e-06, 3.5770699e-06, 8.4955855e-06, ...,\n", + " 1.3389642e-05, 2.2122082e-05, 1.8456800e-05], dtype=float32), 1.67875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_554.wav', 'Das wird nicht billig.', 22, array([-1.2833251e-05, -2.6942225e-05, -1.1592191e-05, ...,\n", + " -1.1226616e-05, 2.4460544e-05, 4.6120007e-05], dtype=float32), 1.3570833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_555.wav', 'Ã\\x9cberall wird hier gebaut.', 26, array([ 3.0397489e-06, 1.6576083e-05, 1.7184460e-05, ...,\n", + " -4.7443868e-06, 1.7984281e-07, 1.7898132e-05], dtype=float32), 1.5950208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_556.wav', 'Was möchten Sie zu trinken?', 28, array([3.6597925e-05, 3.9522194e-05, 3.4265908e-05, ..., 4.9602304e-04,\n", + " 4.0240673e-04, 2.1699475e-04], dtype=float32), 1.7888958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_559.wav', 'Waren Sie schon einmal bei uns?', 31, array([ 2.5204083e-06, -9.7146321e-06, 1.0508998e-05, ...,\n", + " 1.6337053e-05, 4.2958636e-05, 3.6466561e-05], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_579.wav', 'Traut sich sonst noch jemand?', 29, array([-3.4311914e-05, -1.9934920e-05, -3.6420348e-05, ...,\n", + " -8.5477677e-06, -8.7745884e-06, -2.7311040e-05], dtype=float32), 1.9739583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_587.wav', 'Hier noch mal die Kurzform.', 27, array([ 4.8683055e-06, -9.0082349e-06, -6.4492651e-06, ...,\n", + " 1.2890940e-05, 1.4272653e-05, 9.0988487e-06], dtype=float32), 1.9475)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_594.wav', 'Haste mal nen Euro?', 19, array([-8.6395357e-06, -1.0812845e-05, -3.0906973e-05, ...,\n", + " 9.5510404e-06, 1.9230547e-05, 3.1346096e-06], dtype=float32), 1.4011458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_599.wav', 'Wie schreibt man das?', 21, array([-3.6024519e-06, -2.5525418e-05, -2.9170100e-05, ...,\n", + " -1.0803048e-05, 3.5519159e-05, 6.3340508e-06], dtype=float32), 1.6831458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_600.wav', 'Er kann es nicht mehr hören.', 29, array([-3.8066657e-05, -3.2469205e-05, -5.3206204e-05, ...,\n", + " 2.6021740e-05, -1.0833596e-06, 1.9787998e-05], dtype=float32), 1.9210833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_601.wav', 'Bleib einfach cool.', 19, array([-4.1984731e-05, -2.3916245e-05, -3.1576215e-05, ...,\n", + " -1.8820670e-05, 6.2404342e-07, -9.7557686e-06], dtype=float32), 1.7712708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_603.wav', 'Davon können Sie ausgehen.', 27, array([ 1.0824577e-05, -1.7968627e-05, -1.6179658e-05, ...,\n", + " -5.5361601e-05, -4.2508735e-05, -3.1106232e-05], dtype=float32), 1.8153333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_606.wav', 'So ist das im Leben.', 20, array([ 1.0786475e-05, -1.3495748e-05, 6.5641157e-06, ...,\n", + " -3.1349493e-05, -2.5596510e-05, -2.9100025e-05], dtype=float32), 1.6655208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_625.wav', 'Du musst anders fragen.', 23, array([ 4.8367940e-03, 6.8724523e-03, 6.1804145e-03, ...,\n", + " -7.8923513e-06, 1.7550767e-06, 7.2876783e-06], dtype=float32), 1.7360208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_628.wav', 'Es war nicht alles schlecht.', 28, array([ 1.08825125e-05, 1.04639130e-05, 8.46001694e-06, ...,\n", + " -2.05042506e-05, 7.06381434e-06, 2.37766089e-05], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_643.wav', 'Das lasse ich mir nicht bieten!', 31, array([-8.2775728e-07, -4.0987805e-05, -1.7558119e-05, ...,\n", + " -2.1388867e-06, -4.9800960e-06, -1.3807499e-05], dtype=float32), 1.8065208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_665.wav', 'Hallo, ich bin der Neue!', 24, array([-2.4004371e-04, -3.8098267e-04, -3.8909691e-04, ...,\n", + " -3.5481004e-05, 3.5560199e-05, -1.3612277e-05], dtype=float32), 1.7800833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_667.wav', 'Fastest du?', 11, array([-6.0218765e-05, -8.1393919e-05, -8.6645297e-05, ...,\n", + " 6.8678496e-06, -8.2385115e-05, -5.4868913e-05], dtype=float32), 1.2072708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_675.wav', 'Nur um das klarzustellen.', 25, array([ 2.7598284e-05, 4.3499585e-05, -7.3542742e-06, ...,\n", + " 4.4517365e-06, -9.3571025e-06, 3.8795395e-05], dtype=float32), 1.8681875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_699.wav', 'Jetzt wird es gemein.', 21, array([ 2.8973442e-05, 5.4584369e-05, 2.5356880e-05, ...,\n", + " 7.6631528e-05, 5.6628844e-05, -4.1394928e-06], dtype=float32), 1.8681875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_704.wav', 'So sieht das aus.', 17, array([7.2620540e-05, 1.0683333e-04, 1.9689680e-04, ..., 2.9477818e-05,\n", + " 1.5229379e-05, 4.7805424e-05], dtype=float32), 1.7448333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_710.wav', 'Gute Nacht ihr Lausbuben!', 25, array([-3.4681521e-04, -4.7425818e-04, -4.6133957e-04, ...,\n", + " 8.0735008e-06, -6.7210376e-06, 6.1622823e-06], dtype=float32), 1.8153333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_727.wav', 'Tschüss, Mädels!', 18, array([ 5.8768086e-07, -7.6773445e-05, -4.4017674e-05, ...,\n", + " -7.9999263e-05, 3.1158263e-06, 9.4530027e-05], dtype=float32), 1.4275833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_750.wav', 'Geh mir nicht auf den Keks.', 27, array([ 3.7033031e-05, -1.8765691e-05, 3.5605895e-05, ...,\n", + " -4.1894207e-05, -5.0918239e-05, -8.2971856e-05], dtype=float32), 1.8505833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_758.wav', \"Dir werd ich's zeigen.\", 22, array([ 5.9986287e-05, 3.1676023e-05, 9.2681257e-05, ...,\n", + " -2.7595996e-05, -4.2494954e-05, -1.1851616e-06], dtype=float32), 1.8505833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_764.wav', 'Macht euch bereit!', 18, array([1.5598367e-04, 1.9868747e-04, 1.1692408e-04, ..., 8.2378487e-05,\n", + " 6.5455366e-05, 4.8687412e-05], dtype=float32), 1.4628333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_766.wav', 'Da kiekste wa?', 14, array([ 5.4184136e-07, -6.1094812e-05, -6.1461476e-05, ...,\n", + " 9.7159907e-05, 2.3223305e-05, 8.9147768e-05], dtype=float32), 1.5862083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_778.wav', 'Das gibt es ja nicht!', 21, array([ 2.0350570e-04, 3.1676778e-04, 2.1080665e-04, ...,\n", + " -6.1200735e-05, 1.1813832e-05, -2.1792879e-05], dtype=float32), 1.3570833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_10_FINAL/10_789.wav', 'Das ist nicht mein Problem.', 27, array([-5.5885310e-05, -6.4690561e-05, -3.0270432e-05, ...,\n", + " -7.1330876e-05, -1.6931441e-05, -1.1536635e-05], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_23.wav', 'Finde dich damit ab.', 20, array([ 7.2009592e-05, -2.1050539e-05, -8.4551131e-05, ...,\n", + " 5.7306173e-05, 9.7603959e-05, 1.5820342e-04], dtype=float32), 1.3394583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_41.wav', 'Wie im Wilden Westen!', 21, array([ 1.4756477e-05, 3.1426986e-05, 9.2355578e-05, ...,\n", + " 8.1666811e-05, 7.9924212e-06, -1.6274511e-05], dtype=float32), 1.9915729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_63.wav', 'Da gehe ich mit.', 16, array([-1.10742374e-04, -1.88132090e-05, 1.54691588e-05, ...,\n", + " 2.89936361e-06, -3.01086147e-05, 3.05973408e-05], dtype=float32), 1.7183958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_75.wav', 'Warum nur werktags?', 19, array([-0.00052728, -0.00052381, -0.00042873, ..., -0.00014365,\n", + " -0.00010449, -0.00010741], dtype=float32), 1.7183958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_76.wav', 'Geht ihr zur Kommunion?', 23, array([-1.0898075e-04, -9.7388023e-05, -6.8978305e-05, ...,\n", + " -5.0831288e-05, -1.5921889e-05, 6.4072694e-05], dtype=float32), 1.7271979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_80.wav', 'Ihr Blick spricht Bände.', 25, array([-4.6483423e-05, -1.6536529e-04, -9.5357966e-05, ...,\n", + " -8.0715154e-06, -4.8390953e-05, -5.0536739e-05], dtype=float32), 1.6655104166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_85.wav', 'Ruhe in Frieden.', 16, array([ 1.12481954e-04, 1.02392871e-04, 1.89193961e-05, ...,\n", + " -1.02047234e-05, -6.91346722e-05, -7.76782108e-05], dtype=float32), 1.7095729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_97.wav', 'Es wird hart gekämpft.', 23, array([-0.0001628 , -0.00018412, -0.00010292, ..., 0.0001769 ,\n", + " 0.00018152, 0.00018817], dtype=float32), 1.8681979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_98.wav', 'Warum das alles?', 16, array([-9.8717544e-05, -8.1991704e-05, -1.4659751e-04, ...,\n", + " -6.5778313e-06, -7.7343866e-05, 1.8901783e-05], dtype=float32), 1.3218333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_109.wav', 'Und Action!', 11, array([-2.8484770e-05, 8.8463985e-06, 5.4628901e-05, ...,\n", + " 6.9029898e-05, -7.5049247e-06, 2.7110993e-05], dtype=float32), 1.23371875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_112.wav', 'Bist du dir sicher?', 19, array([ 1.8312603e-05, -8.6757791e-07, -5.3837293e-06, ...,\n", + " 1.1187289e-05, -3.2346459e-05, 9.6363983e-06], dtype=float32), 1.6302708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_113.wav', 'Nur über meine Leiche!', 23, array([ 7.7449629e-05, 1.5036203e-04, 1.0243297e-04, ...,\n", + " -9.4819125e-06, -6.9288013e-05, 2.3950559e-05], dtype=float32), 1.8858229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_120.wav', 'Hoffentlich schafft er das.', 27, array([-1.6298418e-05, 1.6150392e-05, 2.2071041e-04, ...,\n", + " 5.1459443e-05, -2.1589445e-05, 3.2091139e-05], dtype=float32), 1.9210729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_147.wav', 'Komm, spiel mit mir!', 20, array([ 1.9483854e-05, 1.7799211e-06, 3.3775228e-05, ...,\n", + " 2.8417478e-05, -4.2961314e-05, -3.5597783e-05], dtype=float32), 1.9386979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_152.wav', 'Ui ui ui!', 9, array([5.5120941e-05, 5.6017692e-05, 4.3216096e-06, ..., 7.1505703e-05,\n", + " 3.5192006e-05, 7.0440023e-05], dtype=float32), 1.14559375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_157.wav', 'Riech mal!', 10, array([ 1.6765174e-05, 6.2451771e-05, 1.0707039e-04, ...,\n", + " -7.5908087e-05, -1.0923214e-04, -7.9517071e-05], dtype=float32), 1.03984375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_165.wav', 'Ich war nicht dabei.', 20, array([-9.2572387e-05, -7.4509240e-05, -3.5020537e-05, ...,\n", + " 2.8946462e-05, 6.8536661e-05, 1.4004428e-05], dtype=float32), 1.8065104166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_170.wav', 'Danke für die Einladung.', 25, array([-5.4829288e-05, -5.2409945e-05, -1.6216440e-05, ...,\n", + " 1.8202516e-05, 1.6152997e-05, 7.3245174e-05], dtype=float32), 1.5597708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_197.wav', 'So soll es sein.', 16, array([ 6.0843304e-05, 1.4244186e-05, -1.4521269e-05, ...,\n", + " -1.3551622e-04, -8.4085783e-05, -1.3086156e-04], dtype=float32), 1.4363958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_200.wav', 'Erschütternd!', 14, array([-1.85466139e-04, -1.61985561e-04, -1.26282161e-04, ...,\n", + " 6.37752237e-05, 1.00840225e-04, 1.20959485e-04], dtype=float32), 1.1543958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_203.wav', 'Nur das Ã\\x9cbliche.', 17, array([ 7.9542246e-05, 8.5164116e-05, 5.9246326e-05, ...,\n", + " -2.9600615e-05, 4.1036237e-05, 5.5239609e-05], dtype=float32), 1.8153229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_206.wav', 'Die hat nämlich ein Loch.', 26, array([-1.4263311e-05, 3.4131535e-05, -3.4750206e-05, ...,\n", + " -5.7866608e-05, 1.9035106e-05, 3.3172044e-05], dtype=float32), 1.9827604166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_230.wav', 'Hol das Stöckchen.', 19, array([-0.00064988, -0.00065917, -0.00059873, ..., 0.00020419,\n", + " 0.00022752, 0.00016691], dtype=float32), 1.4452083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_237.wav', 'Und bei dir?', 12, array([-2.9914919e-04, -2.2948935e-04, -2.3748397e-04, ...,\n", + " 1.1257434e-05, -3.9087045e-05, -2.3366434e-05], dtype=float32), 1.07509375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_264.wav', 'Es liegt in der Natur der Sache.', 32, array([ 3.1785059e-04, 3.4756004e-04, 3.4774767e-04, ...,\n", + " -3.1788899e-05, -7.7856974e-05, -7.3492403e-05], dtype=float32), 1.9563229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_268.wav', 'Mission erfolgreich!', 20, array([-5.1757845e-05, -2.9873547e-05, -5.2602922e-05, ...,\n", + " -1.0881226e-04, -7.0386566e-05, -4.1912252e-05], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_274.wav', 'Kommt nicht in die Tüte!', 25, array([-2.6346192e-05, -6.4550313e-06, -4.2296477e-05, ...,\n", + " 6.7257854e-05, 5.5296507e-05, 6.6974962e-06], dtype=float32), 1.8505729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_284.wav', 'Ja, guten Tag!', 14, array([ 3.1975062e-05, 7.6259523e-05, 7.8669080e-05, ...,\n", + " -1.8048113e-05, -4.4206077e-05, -4.7247828e-05], dtype=float32), 1.9739375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_308.wav', 'Es ist noch nicht lange her.', 28, array([ 2.2859822e-06, 6.0211198e-05, 5.7821064e-05, ...,\n", + " -8.3175619e-06, -2.3456680e-05, -1.9626390e-05], dtype=float32), 1.8681979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_316.wav', 'Wiedersehen!', 12, array([2.8599703e-05, 6.1528997e-05, 8.9646070e-05, ..., 2.7208553e-06,\n", + " 2.9898734e-05, 9.2172457e-05], dtype=float32), 1.12796875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_328.wav', 'Mir ist schwindelig.', 20, array([ 2.4521294e-05, 5.4549360e-05, 2.9534258e-06, ...,\n", + " -8.9185494e-05, -1.0303867e-04, -5.3436386e-05], dtype=float32), 1.7976979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_329.wav', 'Sprechen sie deutsch?', 21, array([-2.4279220e-04, -2.6937225e-04, -2.3713916e-04, ...,\n", + " -2.8695989e-05, -2.7513888e-06, 5.1191882e-06], dtype=float32), 1.5333333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_360.wav', 'So war es nicht gemeint.', 24, array([-5.8561371e-05, 8.4504954e-06, 3.6038864e-06, ...,\n", + " 9.6144824e-05, 5.4328477e-05, 8.8002511e-05], dtype=float32), 1.8681979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_399.wav', 'Schluss jetzt!', 14, array([ 1.60011361e-04, 1.10784895e-04, 1.05728453e-04, ...,\n", + " 1.56215738e-05, -7.51677726e-06, 3.21154062e-06], dtype=float32), 1.1940625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_414.wav', 'Sehen Sie genau hin!', 20, array([ 4.0775692e-05, 7.8341225e-05, 5.9709568e-05, ...,\n", + " 1.6227934e-05, 3.3044285e-05, -1.1752409e-06], dtype=float32), 1.7448229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_432.wav', 'Christina Habeck?', 17, array([-7.0921145e-05, -8.7887020e-05, -1.0741340e-04, ...,\n", + " 6.9928697e-05, 6.0020051e-05, 4.4092048e-05], dtype=float32), 1.6831354166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_491.wav', 'Olé, olé!', 11, array([-3.5300669e-05, -3.0546897e-05, -4.6127847e-05, ...,\n", + " -4.5910983e-06, 9.3032322e-06, 4.1992083e-05], dtype=float32), 1.3394583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_498.wav', 'Nur mal so als Anregung.', 24, array([-5.8754493e-05, -2.6690983e-05, -4.8782116e-05, ...,\n", + " -4.1356816e-05, -3.8702921e-05, -2.8129245e-05], dtype=float32), 1.929875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_503.wav', 'Ich glaube ihr kein Wort.', 25, array([-1.92081643e-06, -2.77346317e-05, -5.22437476e-05, ...,\n", + " 6.71621965e-05, 1.27864005e-05, 3.48269168e-05], dtype=float32), 1.9915625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_537.wav', 'Wie könnt ihr es wagen?', 24, array([-1.4561453e-03, -1.4608348e-03, -1.4617005e-03, ...,\n", + " 7.5047151e-06, -8.1957251e-07, 1.6147833e-05], dtype=float32), 1.8417604166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_540.wav', 'Nach was schmeckt das genau?', 28, array([5.2316565e-05, 4.9443977e-05, 5.7626901e-05, ..., 2.5021756e-05,\n", + " 4.5578519e-05, 5.3426527e-05], dtype=float32), 1.9651354166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_548.wav', 'Gänsehaut pur!', 15, array([-9.5325144e-05, -7.7983823e-05, -6.6722314e-05, ...,\n", + " 5.7276593e-05, 2.5111651e-05, 1.1992834e-05], dtype=float32), 1.4628333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_564.wav', 'Höret, höret!', 15, array([-6.9055131e-05, -6.1163970e-05, -7.0053116e-05, ...,\n", + " -1.7221355e-05, -7.2541329e-06, 1.8846076e-06], dtype=float32), 1.3658958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_574.wav', 'Das Haus ist umstellt.', 22, array([ 4.3151813e-05, 5.5632776e-05, 2.7663889e-05, ...,\n", + " -4.0600127e-05, -3.0027895e-05, -4.6370071e-05], dtype=float32), 1.7183958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_606.wav', 'Den versteht keiner.', 20, array([-6.2417603e-05, -8.2428480e-05, -4.4267428e-05, ...,\n", + " -6.2675332e-05, -4.0452942e-05, -5.3965356e-05], dtype=float32), 1.7272083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_612.wav', 'Halten Sie sich fest!', 21, array([2.8007184e-05, 3.2632157e-05, 6.2635645e-06, ..., 5.3581707e-06,\n", + " 1.5780075e-05, 2.3362747e-06], dtype=float32), 1.6390729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_640.wav', 'Können Sie sich ausweisen?', 27, array([-4.1133004e-05, -3.4346365e-05, -2.0997140e-06, ...,\n", + " 2.5395755e-05, 1.5488129e-05, 1.3214269e-05], dtype=float32), 1.9298854166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_645.wav', 'Genug ist genug.', 16, array([1.4217473e-04, 1.3088981e-04, 1.2007774e-04, ..., 8.0914921e-05,\n", + " 5.1820301e-05, 7.9144287e-05], dtype=float32), 1.7448229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_647.wav', 'Da bin ich ganz bei Ihnen!', 26, array([-6.2454426e-05, -7.3873220e-05, -9.7365184e-05, ...,\n", + " 1.7943923e-05, 1.8189858e-05, 2.0363577e-05], dtype=float32), 1.7183854166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_666.wav', 'Ich hasse dich!', 15, array([-4.7738231e-06, 1.0362664e-06, 9.6731110e-06, ...,\n", + " 3.2887896e-05, 6.7240894e-06, 7.3296378e-06], dtype=float32), 1.5509583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_676.wav', 'Jetzt weiÃ\\x9f ich es wieder.', 26, array([-2.9731807e-05, -2.5498804e-05, -5.7221558e-05, ...,\n", + " -1.3199271e-05, -1.1122796e-05, -1.5994978e-05], dtype=float32), 1.9915729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_708.wav', 'Täuschkörper einsetzen!', 25, array([3.3980694e-05, 5.6047942e-05, 3.6845995e-05, ..., 2.0433601e-05,\n", + " 5.5359560e-05, 3.6635800e-05], dtype=float32), 1.9563229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_711.wav', 'So sind die Regeln.', 19, array([ 1.0646171e-05, 2.1217951e-05, -8.0062582e-06, ...,\n", + " -4.2156036e-05, -1.8816583e-05, -4.4005763e-05], dtype=float32), 1.6038229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_712.wav', 'Es schmeckt nach Zimt.', 22, array([ 2.2929296e-05, 2.9111379e-05, 4.6064979e-05, ...,\n", + " -1.8768259e-06, 7.4329464e-06, 1.2982395e-05], dtype=float32), 1.6831354166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_717.wav', 'Auch bei feuchtem Wetter nicht.', 31, array([1.6887316e-05, 6.2355371e-05, 7.5977659e-05, ..., 1.6490449e-05,\n", + " 2.1054177e-05, 1.1164552e-05], dtype=float32), 1.965125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_731.wav', 'Warum denn nicht?', 17, array([ 6.4304750e-06, -6.7788221e-07, -1.0204109e-06, ...,\n", + " -9.7024295e-06, -3.1934254e-05, -2.7286467e-05], dtype=float32), 1.25134375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_746.wav', 'Was isst du da?', 15, array([ 4.1260464e-05, 1.0193682e-05, 3.5085955e-05, ...,\n", + " -3.5494733e-05, -1.2306450e-05, 1.2647797e-05], dtype=float32), 1.6919479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_750.wav', 'Alle schreien hier!', 19, array([-1.3079788e-04, -1.3171590e-04, -1.1580650e-04, ...,\n", + " -2.0512020e-05, -2.3779969e-05, -2.4454272e-05], dtype=float32), 1.7007708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_754.wav', 'Das ist genau mein Ding.', 24, array([-1.1629934e-05, -2.1403244e-05, 1.6778110e-06, ...,\n", + " 1.0532378e-05, 4.3498221e-05, 4.0848565e-05], dtype=float32), 1.6390729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_757.wav', 'Wo denken Sie hin?', 18, array([ 2.1430247e-05, 2.1772265e-05, 2.0838190e-05, ...,\n", + " 2.2910473e-05, -5.1848092e-06, -1.5559262e-06], dtype=float32), 1.4540208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_758.wav', 'Reine Gewöhnungssache.', 23, array([-4.3785589e-05, -4.8620215e-05, -4.8604503e-05, ...,\n", + " 1.0856102e-05, 7.9429465e-06, 6.5844351e-06], dtype=float32), 1.6126458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_760.wav', 'Tschüss!', 9, array([1.6893557e-05, 3.7733011e-05, 4.6923491e-05, ..., 3.5450230e-05,\n", + " 5.7595411e-05, 5.0426086e-05], dtype=float32), 0.6873541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_765.wav', 'Vergiss die Waschtasche nicht!', 30, array([-5.2931227e-05, -5.9350517e-05, -5.4635959e-05, ...,\n", + " -3.9712177e-05, -3.0881067e-05, -1.9957897e-05], dtype=float32), 1.929875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_788.wav', 'Längs oder quer?', 17, array([-5.8456011e-05, -4.5964895e-05, -2.6546955e-05, ...,\n", + " 1.1356072e-05, 1.8672996e-05, -7.0059104e-07], dtype=float32), 1.5597708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_14.wav', 'Wer hat euch geschickt?', 23, array([-1.1148760e-04, 2.4612555e-05, 9.3476447e-05, ...,\n", + " -9.7927412e-05, -3.4095574e-05, -1.7279797e-05], dtype=float32), 1.856)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_34.wav', 'Wo bin ich hier nur gelandet?', 29, array([-1.3307537e-05, -1.0089541e-04, -1.2360289e-05, ...,\n", + " -4.9649680e-05, -7.3272109e-05, -6.8251233e-05], dtype=float32), 1.9306666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_40.wav', 'Natürlich behauptet sie das.', 29, array([ 1.2778574e-04, 5.9959311e-05, -8.1008322e-05, ...,\n", + " 1.9905625e-04, 2.6344018e-05, 1.1490170e-04], dtype=float32), 1.952)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_43.wav', 'Du hattest recht.', 17, array([-1.1000242e-04, -1.6242996e-04, -2.2294538e-04, ...,\n", + " 1.1730633e-04, -8.3676481e-05, -2.5764350e-05], dtype=float32), 1.152)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_44.wav', 'Verklagen Sie mich doch!', 24, array([ 1.94306958e-05, 1.91541476e-04, 6.15894969e-05, ...,\n", + " -1.00529454e-04, -2.00755429e-04, 5.24241113e-05], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_45.wav', 'Die Bremse schleift.', 20, array([ 1.8599353e-04, 8.8273533e-05, 1.5005667e-04, ...,\n", + " -1.6525917e-04, -2.2365544e-05, -2.3978014e-04], dtype=float32), 1.5466666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_51.wav', 'Hilfe!', 6, array([-1.7958642e-04, -2.2338594e-04, -2.7969983e-04, ...,\n", + " -1.4840752e-04, -3.4539087e-05, 3.2946355e-06], dtype=float32), 0.704)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_57.wav', 'Jetzt liegt es an dir.', 22, array([ 2.1328227e-04, 8.1810067e-05, -1.6158322e-04, ...,\n", + " 1.6350237e-04, 1.0099774e-04, 1.6040609e-05], dtype=float32), 1.568)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_58.wav', 'Wo kann ich das kaufen?', 23, array([-9.1674337e-05, -1.6169342e-04, -1.8347435e-04, ...,\n", + " 4.6268760e-06, 2.3974455e-05, -1.1637783e-04], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_60.wav', 'Kann man jetzt auch nicht mehr ändern.', 39, array([-3.5826775e-04, -3.3033665e-04, -2.3628448e-04, ...,\n", + " -1.9967039e-04, -1.7616056e-05, 6.7053217e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_68.wav', 'Hör mir doch mal zu.', 21, array([-1.0109342e-04, -3.4855773e-06, 9.0611480e-05, ...,\n", + " -1.0345047e-04, -4.0894301e-05, -6.3259591e-05], dtype=float32), 1.4613333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_75.wav', 'Gibt es die Person wirklich?', 28, array([1.8891362e-04, 2.3809298e-04, 1.1160582e-04, ..., 2.3936841e-06,\n", + " 4.5461587e-05, 9.1474227e-05], dtype=float32), 1.952)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_79.wav', 'Wo waren wir stehen geblieben?', 30, array([-6.7620305e-05, 3.2152042e-05, 6.8106332e-05, ...,\n", + " -1.8769420e-04, -6.5137865e-05, -2.5653889e-04], dtype=float32), 1.824)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_91.wav', 'Grundgütiger!', 14, array([ 7.70497209e-05, -5.13312625e-05, 7.22193681e-06, ...,\n", + " -1.11605725e-04, -1.26782295e-04, 8.50337819e-05], dtype=float32), 1.3546666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_92.wav', 'Wer bist du?', 12, array([-4.3348764e-04, -4.4667200e-04, -4.2408684e-04, ...,\n", + " -3.9185648e-05, -3.1797776e-05, -2.2222506e-04], dtype=float32), 1.024)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_95.wav', 'Schon gut.', 10, array([-3.07407812e-04, -4.31929773e-04, -5.19388705e-04, ...,\n", + " -1.07154076e-04, -7.57433227e-05, -1.24133236e-04], dtype=float32), 0.9173333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_99.wav', 'Murat, was ist los mit dir?', 27, array([-3.84323685e-05, 6.48807691e-05, -5.84455011e-05, ...,\n", + " 1.45171012e-04, -1.50349506e-05, 1.20676006e-04], dtype=float32), 1.8453333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_101.wav', 'HeiÃ\\x9fe Würstchen!', 18, array([-0.00027939, -0.00039175, -0.00025548, ..., 0.00027689,\n", + " 0.00011903, 0.00012768], dtype=float32), 1.3866666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_106.wav', 'Ich will auch mal einer werden.', 31, array([ 1.36086979e-04, -1.76298781e-05, -4.00176577e-05, ...,\n", + " 1.72844579e-04, 1.29597363e-04, -1.02162725e-04], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_112.wav', 'Ich will auch haben!', 20, array([-4.40885342e-05, -2.34828622e-04, -3.29593284e-04, ...,\n", + " -3.05666414e-04, -1.31685141e-04, -1.00833015e-04], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_116.wav', 'Setz dich bitte gerade hin!', 27, array([-2.2211492e-04, -2.0630175e-04, -1.4655131e-04, ...,\n", + " 1.6456892e-04, 1.0634777e-06, -1.4669505e-04], dtype=float32), 1.9306666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_122.wav', 'Findest du mich erwachsen?', 26, array([3.0208268e-04, 3.6579225e-04, 3.3154435e-04, ..., 6.2579543e-06,\n", + " 4.9250040e-05, 1.8107957e-04], dtype=float32), 1.696)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_123.wav', 'Schrei nicht so!', 16, array([ 8.03208750e-05, 1.33657450e-04, -1.13144284e-04, ...,\n", + " 4.64295183e-04, 4.82034549e-04, 2.86602415e-04], dtype=float32), 1.152)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_136.wav', 'Das kam unerwartet.', 19, array([-3.3067852e-05, -4.8878199e-05, 5.8831414e-05, ...,\n", + " -3.5621467e-04, -3.7723745e-04, -2.3875662e-04], dtype=float32), 1.7386666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_149.wav', 'Das ergibt doch keinen Sinn.', 28, array([6.0471892e-05, 8.1125305e-05, 2.7437322e-04, ..., 9.1583250e-05,\n", + " 2.0055164e-04, 2.2477485e-04], dtype=float32), 1.9733333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_155.wav', 'Aller Abschied fällt schwer.', 29, array([-2.2813781e-04, -5.5478893e-05, 1.6814301e-04, ...,\n", + " 1.2765558e-04, 1.7368943e-04, 2.6105065e-04], dtype=float32), 1.6533333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_165.wav', 'Erkennst du mich nicht?', 23, array([-2.3624673e-04, -3.1934463e-04, -2.9434697e-04, ...,\n", + " 1.7059442e-04, 1.9742029e-06, 1.3172596e-04], dtype=float32), 1.4293333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_169.wav', 'Willst du sie mal streicheln?', 29, array([ 1.9991475e-04, 3.4090909e-04, 3.2008073e-04, ...,\n", + " 4.6425943e-05, -8.5656990e-05, -1.2934266e-05], dtype=float32), 1.9413333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_181.wav', 'Zur Anmeldung klicken Sie hier.', 31, array([ 5.3989668e-05, -9.8630007e-05, -1.1361165e-04, ...,\n", + " -2.2555150e-05, 3.3015600e-05, 1.0129590e-04], dtype=float32), 1.92)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_194.wav', 'Elvis war nie tot.', 18, array([-6.78355209e-05, -5.90024465e-05, -1.47034181e-04, ...,\n", + " 1.19253775e-04, 2.40493591e-05, 3.28276219e-04], dtype=float32), 1.696)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_196.wav', 'Irgendetwas zu verzollen?', 25, array([-1.2399687e-04, -3.0497483e-06, -1.2210968e-04, ...,\n", + " 1.4703360e-05, 4.4073422e-05, 2.5880148e-04], dtype=float32), 1.696)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_216.wav', 'Du bist doch nicht aus Zucker.', 30, array([-3.7417009e-05, -2.1370529e-04, -1.0503333e-04, ...,\n", + " -3.4687804e-05, -1.0006884e-04, 8.2270970e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_217.wav', 'Bald hat er sein Abi.', 21, array([-7.6955817e-05, -7.4724245e-05, -5.4779473e-05, ...,\n", + " -3.2609492e-05, -1.9532166e-04, -4.0988740e-05], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_227.wav', 'Da lacht das Herz.', 18, array([0.000232 , 0.00019664, 0.00015979, ..., 0.00012966, 0.0001156 ,\n", + " 0.00015061], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_238.wav', 'Steht mir die Bluse?', 20, array([ 5.00293754e-05, 1.15090246e-04, -1.61606382e-04, ...,\n", + " -1.10758898e-04, 9.87306703e-05, 2.25929121e-04], dtype=float32), 1.3653333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_240.wav', 'Kommt ihr zurecht?', 18, array([-1.4166623e-04, -1.7185905e-04, -1.0146119e-04, ...,\n", + " -1.9281202e-05, -4.6475827e-05, -7.9622550e-05], dtype=float32), 1.5466666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_246.wav', 'Her damit!', 10, array([-1.0743736e-04, -6.3287393e-05, 5.4618115e-05, ...,\n", + " 1.7166793e-04, 1.5052129e-04, -4.3305259e-05], dtype=float32), 0.9386666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_256.wav', 'Talente muss man fördern.', 26, array([ 2.9789119e-06, 2.0445570e-05, 3.6582744e-05, ...,\n", + " -8.0595542e-05, 2.8049317e-06, -2.4196431e-04], dtype=float32), 1.6426666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_298.wav', 'Kein Kommentar!', 15, array([2.0757825e-04, 2.0225085e-05, 1.0584419e-04, ..., 2.2611262e-05,\n", + " 2.2597586e-04, 5.2457988e-05], dtype=float32), 1.1093333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_304.wav', 'Der atmet noch.', 15, array([-0.0001642 , -0.00022683, -0.00021831, ..., 0.00013961,\n", + " 0.00017319, 0.00013602], dtype=float32), 1.2586666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_308.wav', 'Das dauert aber lange!', 22, array([4.1067542e-05, 4.3461972e-05, 1.7915755e-04, ..., 1.1849359e-04,\n", + " 1.6261388e-04, 1.4937650e-05], dtype=float32), 1.44)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_311.wav', 'Du kennst mich, Danton.', 23, array([-5.2089547e-04, -4.7035489e-04, -5.9835758e-04, ...,\n", + " -9.4374191e-05, -2.0053205e-05, 1.2992002e-06], dtype=float32), 1.8346666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_314.wav', 'Mein Gott, Walter!', 18, array([ 4.9858125e-05, -2.4514409e-05, -4.7797763e-05, ...,\n", + " -2.9001143e-05, -1.4190034e-04, -2.5762929e-05], dtype=float32), 1.2586666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_318.wav', 'Und was machst du sonst so?', 27, array([ 0.00041733, 0.00037329, 0.00035271, ..., -0.00016106,\n", + " -0.00041058, -0.00029774], dtype=float32), 1.6106666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_331.wav', 'Dort wird dir geholfen.', 23, array([-1.9671346e-04, -1.1574107e-04, 5.4965103e-06, ...,\n", + " 4.3039094e-05, -3.2543256e-05, -7.8007070e-05], dtype=float32), 1.5466666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_336.wav', 'Was ist denn hier los?', 22, array([0.00012079, 0.00029083, 0.00013022, ..., 0.00036718, 0.00031168,\n", + " 0.00049887], dtype=float32), 1.4506666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_358.wav', 'Gleich sind wir dort.', 21, array([ 1.5992192e-04, 2.5509403e-04, 2.3052108e-04, ...,\n", + " 1.9194868e-04, 6.2326435e-05, -2.0080882e-04], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_366.wav', 'Sind sie gut informiert?', 24, array([-1.2915327e-04, 5.4154119e-05, 9.4311297e-05, ...,\n", + " 1.4842945e-04, 1.6595995e-04, 1.6055972e-04], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_376.wav', \"Was soll's, ich bin bereit.\", 27, array([-0.00025371, -0.00037118, -0.00054651, ..., -0.00013142,\n", + " 0.000133 , 0.0001903 ], dtype=float32), 1.8133333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_387.wav', 'Was soll das heiÃ\\x9fen?', 21, array([ 6.26799228e-05, -1.15550021e-04, -1.60253039e-04, ...,\n", + " -1.14853225e-04, 3.62789683e-06, -1.25641367e-04], dtype=float32), 1.6106666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_398.wav', 'Oder so!', 8, array([-0.00011172, -0.00021632, -0.0003379 , ..., 0.00016637,\n", + " 0.00021105, 0.00035037], dtype=float32), 0.9386666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_431.wav', 'Fauche mich nicht so an!', 24, array([-1.69856430e-04, -2.14659201e-04, -1.17017007e-04, ...,\n", + " 1.06098436e-04, 1.30685687e-04, 8.11223654e-05], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_439.wav', 'Genau zweihundert.', 18, array([ 4.3691549e-04, 4.2721629e-04, 2.1283170e-04, ...,\n", + " -1.0831581e-05, 6.4474931e-05, 1.3399551e-04], dtype=float32), 1.4186666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_446.wav', 'Ja ja, das schickt!', 19, array([-1.5079082e-05, 1.2119063e-04, 1.9518439e-04, ...,\n", + " -8.6470172e-05, -3.4930470e-04, -3.7717246e-04], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_460.wav', 'Stein schlägt Schere.', 22, array([ 5.7708825e-05, 1.6740670e-04, 1.9982990e-04, ...,\n", + " -3.3077580e-05, 1.1591193e-04, 7.5874494e-05], dtype=float32), 1.936)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_468.wav', 'Simsalabim!', 11, array([-1.8192175e-05, -1.2427589e-04, 4.0916457e-05, ...,\n", + " -3.6532696e-05, 2.9238325e-05, 2.0148496e-05], dtype=float32), 1.0506666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_478.wav', 'Bitte Zutreffendes ankreuzen.', 29, array([-5.4858734e-05, -6.8480607e-05, -7.1117909e-05, ...,\n", + " -3.5092820e-05, 4.6205354e-05, 3.1237360e-05], dtype=float32), 1.968)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_480.wav', 'Dich kenne ich doch!', 20, array([-3.4106572e-04, -2.6489299e-04, -1.9887066e-04, ...,\n", + " 5.8086891e-05, 2.0823347e-04, -4.3870667e-05], dtype=float32), 1.4026666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_484.wav', 'Und los!', 8, array([ 2.0759732e-04, 2.4903464e-04, -3.9741102e-05, ...,\n", + " -1.4017121e-04, -2.2582384e-04, -2.2852831e-04], dtype=float32), 0.8906666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_485.wav', 'Der Patient ist eh schon tot.', 29, array([ 2.8383749e-04, 1.6098749e-04, 5.8996215e-05, ...,\n", + " -1.5776475e-04, -1.0137054e-04, -1.0374457e-04], dtype=float32), 1.92)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_487.wav', 'Und zwar nicht zu knapp!', 24, array([-4.9983555e-05, 1.0859955e-04, 1.3262806e-04, ...,\n", + " 1.4716771e-04, 2.1034098e-04, 2.6678585e-04], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_488.wav', 'Was ist mit dem Co-Piloten?', 27, array([-4.6707326e-04, -3.3664281e-04, -1.6913723e-04, ...,\n", + " 9.7057833e-05, -3.0600113e-05, -3.3933247e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_510.wav', 'Sie würde ihr letztes Hemd geben.', 34, array([ 1.5112071e-04, 9.9046929e-06, -7.1756775e-05, ...,\n", + " 1.4958363e-04, 2.2523174e-04, 4.5510088e-04], dtype=float32), 1.92)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_521.wav', 'Das wird eh nur Werbung sein.', 29, array([-0.00043494, -0.00045403, -0.00052693, ..., -0.00037776,\n", + " -0.00013905, -0.00029146], dtype=float32), 1.84)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_539.wav', 'Jetzt gibt es Kloppe.', 21, array([ 5.6757370e-05, 1.2752461e-05, -1.0132902e-04, ...,\n", + " -2.8363563e-04, -4.8957689e-04, -4.9631519e-04], dtype=float32), 1.4666666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_549.wav', 'Nee, lieber nicht.', 18, array([-6.2041539e-03, -6.1025852e-03, -5.7721483e-03, ...,\n", + " -4.7201215e-06, -8.9430447e-05, -4.9632461e-05], dtype=float32), 1.5626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_563.wav', 'Er soll schlieÃ\\x9flich etwas lernen.', 34, array([-5.03349729e-05, -2.22053477e-05, 5.14282438e-05, ...,\n", + " 1.08890556e-04, 3.83222614e-05, 6.10036659e-05], dtype=float32), 1.8346666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_566.wav', 'Angeblich ja.', 13, array([ 1.7242544e-04, 1.8572621e-04, 1.3631192e-04, ...,\n", + " -4.0973751e-05, -1.5965881e-04, -1.0953719e-04], dtype=float32), 1.2373333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_567.wav', 'Wie wäre es mit Wiesbaden?', 27, array([-9.5517004e-05, -2.3826263e-04, -1.0132407e-04, ...,\n", + " 4.5667308e-05, 1.4000830e-04, 2.1524900e-05], dtype=float32), 1.9093333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_576.wav', 'Hört mal zu, ihr Checker!', 26, array([-0.00049925, -0.00049119, -0.00044878, ..., 0.00019171,\n", + " 0.00023476, 0.00022403], dtype=float32), 1.7013333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_580.wav', \"Irgendwann wird's langweilig.\", 29, array([-0.00039041, -0.00038523, -0.00025343, ..., -0.00031044,\n", + " -0.00019142, -0.00014154], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_586.wav', 'Spuck ihn wieder aus!', 21, array([ 0.00012375, 0.00025117, 0.0001871 , ..., -0.00021903,\n", + " -0.00034992, -0.00024192], dtype=float32), 1.712)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_589.wav', 'Unterschätze den Knirps nicht.', 31, array([2.5606243e-04, 2.5400775e-04, 2.3841709e-04, ..., 2.1033855e-05,\n", + " 1.9420990e-04, 1.0694992e-04], dtype=float32), 1.968)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_596.wav', 'Darf ich vorkosten?', 19, array([-1.3477511e-04, -2.3315112e-04, 1.3153857e-05, ...,\n", + " 1.0751128e-04, 1.8084023e-04, 1.6106233e-04], dtype=float32), 1.4506666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_597.wav', 'Ich traue mich nicht!', 21, array([-2.9329595e-04, -3.9892262e-04, -2.9478277e-04, ...,\n", + " -1.0763263e-04, 1.1553553e-04, 7.1091476e-05], dtype=float32), 1.4506666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_602.wav', 'Warum seid ihr so leise?', 24, array([ 2.9226076e-05, 1.6949150e-04, 1.3950269e-04, ...,\n", + " 2.4965027e-05, 7.3044146e-05, -1.8916466e-05], dtype=float32), 1.5786666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_603.wav', 'Nun stellt euch nicht so an!', 28, array([1.4806543e-04, 1.4012858e-04, 7.7195640e-05, ..., 1.4235765e-04,\n", + " 1.3738184e-04, 1.3289873e-05], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_608.wav', 'Das Essen wird kalt.', 20, array([ 2.36780070e-05, -1.06394495e-04, -1.18256241e-04, ...,\n", + " 8.05624004e-05, -4.60968913e-05, -8.52375670e-05], dtype=float32), 1.3866666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_614.wav', 'Fachidioten soll es auch geben.', 31, array([ 7.9924423e-05, 2.0709680e-04, -6.6771558e-05, ...,\n", + " 2.4189356e-05, 6.7659719e-05, -2.3424522e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_627.wav', 'Du bist vielleicht eine Knalltüte!', 35, array([ 1.7171216e-04, -3.8676033e-05, -8.2237340e-05, ...,\n", + " -1.8530877e-04, -1.3380373e-04, -1.6169780e-04], dtype=float32), 1.8773333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_629.wav', 'Natürlich nicht seine eigene.', 30, array([-2.2751655e-04, -1.5005520e-04, -9.8528086e-05, ...,\n", + " 1.8771169e-04, 2.7484499e-04, 3.0332521e-04], dtype=float32), 1.8026666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_630.wav', 'Halten Sie die Presse zurück!', 30, array([ 3.1129293e-06, 7.3669260e-05, 3.3459681e-05, ...,\n", + " -1.5276406e-04, 2.6472675e-05, -1.9852230e-05], dtype=float32), 1.76)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_636.wav', 'Ruf schnell die Polizei!', 24, array([5.1400399e-05, 6.7014749e-05, 5.1501669e-05, ..., 1.8976731e-04,\n", + " 2.0147586e-04, 1.5075490e-04], dtype=float32), 1.5573333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_637.wav', 'Dann nimmt man sie sich.', 24, array([-0.00050762, -0.00047607, -0.00053025, ..., 0.00035113,\n", + " 0.00017673, 0.00026363], dtype=float32), 1.856)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_674.wav', 'Gibst du mir deine Nummer?', 26, array([-1.0660516e-04, -1.8238377e-05, 9.7913333e-05, ...,\n", + " 3.0329258e-05, 9.0803427e-05, 2.0600615e-05], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_675.wav', 'Man kann nicht alles haben.', 27, array([ 3.6246947e-04, 3.3836463e-04, 3.9515106e-04, ...,\n", + " 1.9603693e-05, -1.0797187e-07, 4.7195343e-05], dtype=float32), 1.696)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_686.wav', 'Wie oft denn noch?', 18, array([-0.00025807, -0.00045327, -0.00041516, ..., -0.00053778,\n", + " -0.00065512, -0.00057833], dtype=float32), 1.2906666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_703.wav', 'Der Erste in was?', 17, array([3.7513164e-05, 2.3692524e-05, 9.2795723e-05, ..., 1.8559145e-04,\n", + " 8.4898209e-05, 1.3820640e-05], dtype=float32), 1.4323645833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_720.wav', 'Wie denn nun?', 13, array([-7.8975081e-06, -2.1718148e-05, 2.7641279e-05, ...,\n", + " 3.3564411e-05, 3.3564411e-05, 1.9743769e-05], dtype=float32), 0.9525625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_722.wav', 'Ihm wäre das zu müÃ\\x9fig.', 25, array([ 5.1333802e-05, 6.3180065e-05, -1.3820640e-05, ...,\n", + " -1.9743769e-05, 3.9487541e-06, -4.7385049e-05], dtype=float32), 1.93334375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_724.wav', 'Ã\\x96l ist ausgelaufen.', 20, array([-3.7513164e-05, -7.8975081e-06, -1.5795016e-05, ...,\n", + " -1.3820640e-05, -1.3820640e-05, 4.5410670e-05], dtype=float32), 1.6087708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_730.wav', 'Willkommen im Neuland!', 22, array([-6.910320e-05, -6.515444e-05, 1.382064e-05, ..., -3.356441e-05,\n", + " -1.974377e-06, 8.489821e-05], dtype=float32), 1.6652083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_737.wav', 'Kannst du mich mal zwicken?', 27, array([ 3.9487539e-05, 3.9487541e-06, 3.3564411e-05, ...,\n", + " -1.3820640e-05, -3.1590032e-05, 5.9231312e-05], dtype=float32), 1.6087604166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_752.wav', 'Friede sei mit dir.', 19, array([-0.00018362, -0.00025075, -0.00027839, ..., -0.00025864,\n", + " -0.0002389 , -0.00026457], dtype=float32), 1.2347916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_753.wav', 'Mit Speck fängt man Mäuse.', 28, array([-1.61898919e-04, -1.04641986e-04, -8.68725911e-05, ...,\n", + " -5.92313118e-05, 6.31800649e-05, 7.70007027e-05], dtype=float32), 1.6087604166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_766.wav', 'Bin ich die Auskunft oder was?', 30, array([2.96156559e-05, 1.04641986e-04, 1.26360130e-04, ...,\n", + " 2.46797135e-04, 2.94182188e-04, 3.25772213e-04], dtype=float32), 1.99684375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_767.wav', 'Sesam, öffne dich!', 19, array([-3.8500351e-04, -3.3366971e-04, -3.5933661e-04, ...,\n", + " -5.9231312e-05, -2.3692524e-05, 2.9615656e-05], dtype=float32), 1.4253125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_772.wav', 'Er kennt seine Pappenheimer.', 28, array([-3.7513164e-05, -1.9743769e-05, -1.3820640e-05, ...,\n", + " -8.6872591e-05, -1.5202703e-04, -1.7177081e-04], dtype=float32), 1.7146145833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_792.wav', 'Da geht noch was.', 17, array([ 2.0336083e-04, 1.6979642e-04, 1.6189892e-04, ...,\n", + " -4.9359427e-05, -2.9615656e-05, -7.3051953e-05], dtype=float32), 1.25596875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_803.wav', 'Er macht es eben gründlich.', 28, array([-5.5282559e-05, -8.2923834e-05, 1.9743769e-05, ...,\n", + " -9.4770097e-05, -1.8361707e-04, -2.5469463e-04], dtype=float32), 1.9615625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_816.wav', 'Spionierst du mich aus?', 23, array([3.5538786e-04, 4.5015797e-04, 4.8767112e-04, ..., 4.3436296e-05,\n", + " 1.7769393e-04, 1.7769393e-04], dtype=float32), 1.7992708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_817.wav', 'Komm zurück!', 13, array([4.0672167e-04, 2.2902773e-04, 6.3180065e-05, ..., 3.7513164e-05,\n", + " 4.7385049e-05, 6.3180065e-05], dtype=float32), 1.11484375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_819.wav', 'Sie schwebt auf Wolke sieben.', 29, array([7.5026328e-05, 1.2438576e-04, 1.5005266e-04, ..., 1.1056512e-04,\n", + " 1.4215514e-04, 1.3820639e-04], dtype=float32), 1.9756770833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_826.wav', 'Wehret den Anfängen!', 21, array([ 1.4610391e-04, 1.3425764e-04, 1.2636013e-04, ...,\n", + " -5.9231311e-06, -1.5795016e-05, -2.9615656e-05], dtype=float32), 1.8486666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_834.wav', 'Altes Haus, lass dich drücken!', 31, array([1.75719557e-04, 1.63873294e-04, 8.88469658e-05, ...,\n", + " 1.04641986e-04, 2.15207096e-04, 1.46103906e-04], dtype=float32), 1.9333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_837.wav', 'Nicht nötig.', 13, array([-1.6189892e-04, -7.7000703e-05, -5.7256933e-05, ...,\n", + " 3.5538786e-05, 4.5410670e-05, 1.9743769e-05], dtype=float32), 1.2277395833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_838.wav', 'Wir sind eine Familie.', 22, array([-1.2241138e-04, -1.5992454e-04, -2.3100211e-04, ...,\n", + " 7.3051953e-05, 5.9231312e-05, 6.9103196e-05], dtype=float32), 1.7146041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_842.wav', 'Was schlagen Sie vor?', 21, array([ 3.1590032e-05, 3.5538786e-05, 4.9359427e-05, ...,\n", + " -8.6872591e-05, -6.1205690e-05, -1.2438576e-04], dtype=float32), 1.3406458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_844.wav', 'Probier mal!', 12, array([ 1.4018077e-04, 1.6782204e-04, 2.2902773e-04, ...,\n", + " -2.1718148e-05, 4.9359427e-05, 7.3051953e-05], dtype=float32), 1.0583958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_855.wav', 'Der Schein trügt.', 18, array([ 1.3228325e-04, 4.3436296e-05, 9.8718847e-06, ...,\n", + " 7.5026328e-05, 7.8975081e-06, -3.9487541e-06], dtype=float32), 1.45353125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_861.wav', 'Du hast mich nie geliebt.', 25, array([ 1.02667604e-04, 1.57950155e-04, 1.50052656e-04, ...,\n", + " -2.17181478e-05, 2.76412793e-05, 0.00000000e+00], dtype=float32), 1.7146041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_872.wav', 'Chili ist scharf.', 17, array([-1.1253949e-04, -8.6872591e-05, -1.1648824e-04, ...,\n", + " -1.1846262e-04, -2.5666901e-05, 1.9743770e-06], dtype=float32), 1.7710520833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_874.wav', 'Das lässt mich kalt.', 21, array([ 2.1718148e-05, 3.3564411e-05, 5.3308180e-05, ...,\n", + " -1.1846262e-05, -1.9743769e-05, -7.3051953e-05], dtype=float32), 1.5805416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_12_FINAL/12_881.wav', 'Kinder brauchen Helden.', 23, array([-1.8361707e-04, -1.4610391e-04, -1.1846262e-04, ...,\n", + " -1.9743770e-06, -2.7641279e-05, 5.9231312e-05], dtype=float32), 1.79221875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_3.wav', 'Voll der gute Vergleich!', 24, array([-3.94875406e-06, -1.08590735e-04, -1.40180768e-04, ...,\n", + " 3.94875387e-05, 1.12539492e-04, 1.16488241e-04], dtype=float32), 1.6087604166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_14.wav', 'Gibt es das überhaupt?', 23, array([-1.0069323e-04, -1.5202703e-04, -1.8164268e-04, ...,\n", + " -6.9103196e-05, -3.9487539e-05, -6.5154440e-05], dtype=float32), 1.5523125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_35.wav', 'Bleib wachsam.', 14, array([-1.5597578e-04, -1.4807828e-04, -3.1590032e-05, ...,\n", + " -1.9743770e-06, -5.9231311e-06, 4.5410670e-05], dtype=float32), 1.2983020833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_43.wav', 'Jeder hat das Recht auf Bildung.', 32, array([ 5.72569334e-05, 1.04641986e-04, 1.89540195e-04, ...,\n", + " -7.50263280e-05, -5.92313118e-05, -1.14513867e-04], dtype=float32), 1.8204479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_64.wav', 'Nur nicht politisch werden!', 27, array([-7.8975081e-06, 8.2923834e-05, 1.3425764e-04, ...,\n", + " -8.0949460e-05, -6.3180065e-05, -1.3623202e-04], dtype=float32), 1.6652083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_67.wav', 'Wir sprechen uns später noch mal.', 34, array([ 6.8037030e-03, 6.8649091e-03, 7.0327311e-03, ...,\n", + " 5.9231311e-06, -3.1590032e-05, -1.5795016e-05], dtype=float32), 1.9051145833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_69.wav', 'Wem gehört welcher Becher?', 27, array([ 6.0810812e-04, 1.8756582e-04, 8.8846966e-05, ...,\n", + " 8.6872591e-05, -1.5795016e-05, -2.1323272e-04], dtype=float32), 1.7498854166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_74.wav', 'Was kann der Arbeiter dafür?', 29, array([ 6.71288217e-05, 7.89750775e-05, 1.02667604e-04, ...,\n", + " -5.52825586e-05, -2.56669009e-05, -1.57950162e-05], dtype=float32), 1.86278125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_94.wav', 'Wir möchten abreisen.', 22, array([-1.2636013e-04, -7.3051953e-05, -7.7000703e-05, ...,\n", + " -3.1590032e-05, -4.1461917e-05, -1.7769393e-05], dtype=float32), 1.7075520833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_97.wav', 'Halbe Fahrt voraus!', 19, array([ 5.3308180e-05, 2.7641279e-05, -1.1253949e-04, ...,\n", + " -7.8975081e-06, 1.9743769e-05, 7.3051953e-05], dtype=float32), 1.5382083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_101.wav', 'Gute Wahl!', 10, array([-5.3308180e-05, -4.1461917e-05, -4.3436296e-05, ...,\n", + " 1.9743769e-05, 2.5666901e-05, -1.9743769e-05], dtype=float32), 0.8608333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_111.wav', 'Ich kenne den doch gar nicht!', 29, array([ 4.9359427e-05, 3.5538786e-05, 6.9103196e-05, ...,\n", + " -2.7641279e-05, 1.3228325e-04, 7.7000703e-05], dtype=float32), 1.98978125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_135.wav', 'Die Hände auf den Rücken!', 27, array([-7.7000703e-05, -5.1333802e-05, -7.1077571e-05, ...,\n", + " -2.7641279e-05, -4.1461917e-05, 1.7769393e-05], dtype=float32), 1.6087604166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_142.wav', 'Am Deal wird nichts geändert.', 30, array([1.4412952e-04, 1.6979642e-04, 1.7571956e-04, ..., 4.5410670e-05,\n", + " 5.7256933e-05, 6.1205690e-05], dtype=float32), 1.9051145833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_148.wav', 'Das ist eine Wucht.', 19, array([-4.93594271e-05, -1.57950155e-04, -1.08590735e-04, ...,\n", + " 2.44822761e-04, 1.61898919e-04, 1.16488241e-04], dtype=float32), 1.58053125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_152.wav', 'Renitent!', 9, array([2.8233591e-04, 2.6061776e-04, 2.2902773e-04, ..., 1.5795015e-04,\n", + " 1.5202703e-04, 2.9615656e-05], dtype=float32), 1.3124166666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_161.wav', 'Ist noch Kaffee da?', 19, array([-1.46103906e-04, -6.91031964e-05, -1.02667604e-04, ...,\n", + " -7.89750775e-05, -2.17181478e-05, 7.89750811e-06], dtype=float32), 1.6369895833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_166.wav', 'Da werden Erinnerungen wach.', 28, array([ 2.1718148e-05, 1.9743769e-05, -9.8718854e-05, ...,\n", + " 8.4898209e-05, 9.2795723e-05, 1.1846262e-05], dtype=float32), 1.7922291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_175.wav', 'Suchen Sie die Herausforderung?', 31, array([-1.4215514e-04, -9.4770097e-05, -1.2833450e-04, ...,\n", + " -4.5410670e-05, -8.2923834e-05, -6.9103196e-05], dtype=float32), 1.764)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_213.wav', 'Kommt ihr mit zur Demo?', 23, array([-7.3051953e-05, -3.7513164e-05, -6.3180065e-05, ...,\n", + " 6.1205690e-05, 1.2241138e-04, 1.4807828e-04], dtype=float32), 1.7781041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_214.wav', 'Was sagt er?', 12, array([-2.6456651e-04, -2.2507898e-04, -2.0928397e-04, ...,\n", + " 4.3436296e-05, 8.0949460e-05, 1.8164268e-04], dtype=float32), 1.622875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_215.wav', 'Ich will mehr Geld!', 19, array([-8.4898209e-05, -9.4770097e-05, -1.1451387e-04, ...,\n", + " -1.1056512e-04, -8.2923834e-05, -1.1846262e-04], dtype=float32), 1.5664270833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_219.wav', 'Du bist überstimmt.', 20, array([ 1.04641986e-04, 6.91031964e-05, 2.76412793e-05, ...,\n", + " -1.02667604e-04, -2.58643384e-04, -2.05335207e-04], dtype=float32), 1.52409375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_242.wav', 'Rutsch mir doch den Buckel runter.', 34, array([-5.7256933e-05, -3.9487541e-06, 4.5410670e-05, ...,\n", + " 1.6979642e-04, 7.5026328e-05, -1.5795016e-05], dtype=float32), 1.9615625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_280.wav', 'Und ab dafür!', 14, array([ 7.1077571e-05, 1.1056512e-04, 2.0138646e-04, ...,\n", + " -4.3436296e-05, 2.7641279e-05, -6.9103196e-05], dtype=float32), 1.2030416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_283.wav', 'Er meint den Doppeldecker.', 26, array([-8.0949460e-05, -7.7000703e-05, -2.9615656e-05, ...,\n", + " -1.2833450e-04, -8.0949460e-05, -1.8164268e-04], dtype=float32), 1.79221875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_284.wav', 'Oder spricht etwas dagegen?', 27, array([ 2.0533521e-04, 1.4215514e-04, 1.4018077e-04, ...,\n", + " -1.3820639e-04, -7.8975077e-05, -1.6584767e-04], dtype=float32), 1.7851666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_288.wav', 'Auf zu neuen Ufern!', 19, array([ 3.5736224e-04, 4.6990174e-04, 6.1798003e-04, ...,\n", + " 9.2795723e-05, 2.1718148e-05, -4.9359427e-05], dtype=float32), 1.7216666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_291.wav', 'Kostprobe gefällig?', 20, array([-1.7571956e-04, -2.3889962e-04, -1.9348894e-04, ...,\n", + " -2.5864338e-04, -1.6584767e-04, -2.9615656e-05], dtype=float32), 1.4182604166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_299.wav', 'Der Wein muss noch atmen.', 25, array([-3.5341349e-04, -2.4482276e-04, -2.2705336e-04, ...,\n", + " -6.1205690e-05, 5.9231311e-06, 4.5410670e-05], dtype=float32), 1.9333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_300.wav', 'Das ist nichts Ernstes.', 23, array([1.5597578e-04, 1.7177081e-04, 6.1205690e-05, ..., 2.7641279e-05,\n", + " 3.1590032e-05, 4.9359427e-05], dtype=float32), 1.9121666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_311.wav', 'Nee, lass mal stecken.', 22, array([ 2.3692524e-05, 3.1590032e-05, -4.7385049e-05, ...,\n", + " 3.8105476e-04, 4.1264479e-04, 6.8313448e-04], dtype=float32), 1.79221875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_321.wav', 'Ha, das war die Rache!', 22, array([-1.6979642e-04, 3.3564411e-05, 1.1056512e-04, ...,\n", + " 1.6387329e-04, 2.7048966e-04, 2.0533521e-04], dtype=float32), 1.7922291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_326.wav', 'Eigentlich ist es logisch.', 26, array([ 7.3051953e-05, 3.9487541e-06, 2.5666901e-05, ...,\n", + " -1.5795016e-05, -7.1077571e-05, 7.8975081e-06], dtype=float32), 1.7075520833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_351.wav', 'Der Wein muss atmen können.', 28, array([2.9615656e-05, 4.3436296e-05, 8.0949460e-05, ..., 4.7385049e-05,\n", + " 1.7769393e-05, 1.9743770e-06], dtype=float32), 1.8063229166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_354.wav', 'Mieter haben Rechte.', 20, array([-1.5795016e-05, -9.8718847e-06, 3.3564411e-05, ...,\n", + " -2.1520710e-04, -1.5992454e-04, -4.5410670e-05], dtype=float32), 1.7216666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_369.wav', 'Was für eine Erkenntnis!', 25, array([-1.02667604e-04, -8.68725911e-05, -4.73850487e-05, ...,\n", + " 3.35644108e-05, 7.70007027e-05, 8.68725911e-05], dtype=float32), 1.9192291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_371.wav', 'Ich schieÃ\\x9fe mit rechts.', 24, array([ 1.3623202e-04, 7.8975077e-05, 4.3436296e-05, ...,\n", + " -1.1056512e-04, -1.1451387e-04, -7.3051953e-05], dtype=float32), 1.86278125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_376.wav', 'Ist Scooter nicht eine Band?', 28, array([ 3.5538786e-05, 0.0000000e+00, -5.9231311e-06, ...,\n", + " -6.3180065e-05, -1.3820639e-04, -1.2043700e-04], dtype=float32), 1.9474479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_387.wav', 'Wir sind Dickhäuter.', 21, array([1.52027031e-04, 1.12539492e-04, 1.02667604e-04, ...,\n", + " 1.38206397e-05, 5.92313108e-06, 8.09494595e-05], dtype=float32), 1.5946458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_389.wav', 'Sei nicht so streng mit ihm!', 28, array([-3.5538786e-05, 1.7769393e-05, 7.1077571e-05, ...,\n", + " -1.1451387e-04, -1.6189892e-04, -2.0928397e-04], dtype=float32), 1.9474479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_390.wav', 'Na ja, was willst du machen?', 28, array([-1.3820640e-05, -4.1461917e-05, -4.5410670e-05, ...,\n", + " -9.0821341e-05, -1.1846262e-05, -4.3436296e-05], dtype=float32), 1.93334375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_419.wav', 'Die Einschläge kommen näher.', 30, array([ 7.5026328e-05, 5.5282559e-05, 1.5597578e-04, ...,\n", + " 3.1590032e-05, 2.1718148e-05, -4.7385049e-05], dtype=float32), 1.9192291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_420.wav', 'Willst du mit mir gehen?', 24, array([-1.2438576e-04, -1.9546332e-04, -1.6782204e-04, ...,\n", + " -3.7513164e-05, -1.0661636e-04, 7.7000703e-05], dtype=float32), 1.8204479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_426.wav', 'Hier bitte eine Unterschrift.', 29, array([1.6979642e-04, 1.8361707e-04, 1.7177081e-04, ..., 1.5202703e-04,\n", + " 2.1718148e-05, 0.0000000e+00], dtype=float32), 1.891)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_430.wav', 'Zum Glück nicht.', 17, array([-6.7128822e-05, -9.8718854e-05, -3.1590032e-05, ...,\n", + " -7.3051953e-05, -9.4770097e-05, -1.1056512e-04], dtype=float32), 1.1571770833333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_437.wav', 'Einfach nur top!', 16, array([-3.9684979e-04, -4.2646544e-04, -4.0277292e-04, ...,\n", + " -2.4087400e-04, -3.7513164e-05, -1.4412952e-04], dtype=float32), 1.35475)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_439.wav', 'Mach dir nichts daraus.', 23, array([ 4.2843982e-04, 5.0938927e-04, 4.6595297e-04, ...,\n", + " 7.8975081e-06, -3.1590032e-05, 2.5666901e-05], dtype=float32), 1.5523229166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_443.wav', 'Lauf doch nicht immer durchs Bild!', 34, array([-3.5538786e-04, -1.7769393e-04, -1.1451387e-04, ...,\n", + " -3.9487539e-05, -4.3436296e-05, -3.9487539e-05], dtype=float32), 1.9192291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_464.wav', 'Hände hoch!', 12, array([-2.0138646e-04, -1.3425764e-04, -8.0949460e-05, ...,\n", + " 2.0138646e-04, 1.8756582e-04, 2.6061776e-04], dtype=float32), 1.04428125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_475.wav', 'Was weiÃ\\x9f ich denn?', 19, array([-2.7641279e-05, -1.9743770e-06, 8.2923834e-05, ...,\n", + " 7.3051953e-05, 9.8718854e-05, -4.9359427e-05], dtype=float32), 1.52409375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_487.wav', 'Ich will noch nicht ins Bett!', 29, array([ 5.7256933e-05, -7.8975081e-06, 1.7769393e-05, ...,\n", + " -2.9615656e-05, -1.1846262e-05, 2.5666901e-05], dtype=float32), 1.9615625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_499.wav', 'Darüber kann man streiten.', 27, array([ 0.00011846, 0.00020534, 0.00027839, ..., -0.00031195,\n", + " -0.00021521, -0.00017769], dtype=float32), 1.8486666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_500.wav', 'Dazu braucht man Ruhe.', 22, array([-1.1056512e-04, -1.4610391e-04, -1.3425764e-04, ...,\n", + " 6.9103196e-05, 1.6189892e-04, 2.2507898e-04], dtype=float32), 1.5523229166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_524.wav', 'Das Zeug ist wirklich gut.', 26, array([-3.9487541e-06, 3.5538786e-05, -9.8718847e-06, ...,\n", + " 1.1846262e-05, 1.9743769e-05, 9.8718847e-06], dtype=float32), 1.891)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_539.wav', 'Betäubungsgewehr geladen!', 26, array([ 3.5538786e-05, 2.3692524e-05, 0.0000000e+00, ...,\n", + " -9.2795723e-05, -1.9151457e-04, -1.8756582e-04], dtype=float32), 1.8768854166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_547.wav', 'Was wird wie geschrieben?', 25, array([-0.00012636, -0.00020336, -0.0002231 , ..., 0.00021521,\n", + " 0.00020336, 0.0001619 ], dtype=float32), 1.8768854166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_569.wav', 'Nicht schon wieder, bitte.', 26, array([ 0.00035736, 0.00043436, 0.00037316, ..., -0.00013821,\n", + " -0.00013031, -0.0001619 ], dtype=float32), 1.5523229166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_582.wav', 'Bist du blind?', 14, array([ 1.9743769e-05, -1.5795016e-05, -5.7256933e-05, ...,\n", + " 0.0000000e+00, 8.6872591e-05, 4.5410670e-05], dtype=float32), 1.2841875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_592.wav', 'Blinzeln zählt nicht.', 22, array([ 5.9231311e-06, -4.5410670e-05, -9.8718854e-05, ...,\n", + " 1.6387329e-04, 1.3820639e-04, 7.1077571e-05], dtype=float32), 1.8768958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_595.wav', 'Ja, warum denn bitte schön nicht?', 34, array([-0.00036329, -0.00033959, -0.00036131, ..., -0.00016585,\n", + " -0.00021521, -0.0001619 ], dtype=float32), 1.93334375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_597.wav', 'Mir wäre das peinlich.', 23, array([ 5.9231312e-05, 1.2241138e-04, 7.5026328e-05, ...,\n", + " -1.5795016e-05, -8.2923834e-05, -6.7128822e-05], dtype=float32), 1.8345520833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_637.wav', 'So kann es gehen.', 17, array([-7.8975081e-06, 3.9487541e-06, 4.1461917e-05, ...,\n", + " 5.1333802e-05, 1.3030888e-04, 3.9487539e-05], dtype=float32), 1.3688645833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_642.wav', 'Es bleibt spannend.', 19, array([1.8559145e-04, 1.8559145e-04, 1.5597578e-04, ..., 7.3051953e-05,\n", + " 5.7256933e-05, 1.1451387e-04], dtype=float32), 1.559375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_647.wav', 'Marek will noch mal.', 20, array([-2.44822761e-04, -1.04641986e-04, -8.09494595e-05, ...,\n", + " 1.48078281e-04, 1.81642681e-04, 2.50745885e-04], dtype=float32), 1.79221875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_661.wav', 'Ruhig Brauner!', 14, array([-1.02667604e-04, -4.73850487e-05, 8.09494595e-05, ...,\n", + " -9.87188469e-06, -8.88469658e-05, -1.12539492e-04], dtype=float32), 1.2771354166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_682.wav', 'Meine Rede!', 11, array([5.9428747e-04, 5.0544052e-04, 2.0730958e-04, ..., 7.5026328e-05,\n", + " 6.5154440e-05, 6.5154440e-05], dtype=float32), 1.0725104166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_685.wav', 'Was versprichst du dir davon?', 29, array([-1.1846262e-05, -9.8718847e-06, 4.3436296e-05, ...,\n", + " -2.3692524e-05, 1.9743770e-06, 2.7641279e-05], dtype=float32), 1.7710520833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_693.wav', 'Ich nehme euch alle.', 20, array([-3.1590032e-05, -5.9231311e-06, -7.5026328e-05, ...,\n", + " -8.8846966e-05, -7.3051953e-05, -5.1333802e-05], dtype=float32), 1.7851666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_711.wav', 'Warum nämlich?', 15, array([ 5.9231312e-05, 5.9231312e-05, 3.1590032e-05, ...,\n", + " 1.1846262e-05, -5.9231311e-06, -7.5026328e-05], dtype=float32), 1.3688645833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_712.wav', 'Das hätte ich beinahe vergessen.', 33, array([-2.1125835e-04, -2.4482276e-04, -1.4610391e-04, ...,\n", + " 9.0821341e-05, 1.7966831e-04, 1.0661636e-04], dtype=float32), 1.9192291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_726.wav', 'Möchtest du auch einen Muffin?', 31, array([-3.9487539e-05, -2.7641279e-05, 6.3180065e-05, ...,\n", + " 1.7769393e-05, 6.7128822e-05, 7.1077571e-05], dtype=float32), 1.9545104166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_727.wav', 'Es hat nichts mit dir zu tun.', 29, array([-1.6584767e-04, -1.9348894e-04, -2.7641279e-04, ...,\n", + " 6.5154440e-05, 4.3436296e-05, 1.2438576e-04], dtype=float32), 1.7569479166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_732.wav', 'Vielleicht war ich etwas vorschnell.', 36, array([1.7177081e-04, 1.6584767e-04, 8.6872591e-05, ..., 1.9546332e-04,\n", + " 1.8954019e-04, 1.5597578e-04], dtype=float32), 1.9192291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_735.wav', 'Hatschi!', 8, array([ 1.2043700e-04, -1.7769393e-05, -1.9743770e-06, ...,\n", + " -1.1846262e-05, -4.5410670e-05, -7.7000703e-05], dtype=float32), 0.8114375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_739.wav', 'Ich bleibe dabei.', 17, array([-2.0533521e-04, -1.2438576e-04, -5.5282559e-05, ...,\n", + " 4.5410670e-05, -1.3820640e-05, -7.7000703e-05], dtype=float32), 1.2418541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_762.wav', 'Nicht zu fassen!', 16, array([1.7414006e-03, 1.4353720e-03, 9.6547039e-04, ..., 6.3180065e-05,\n", + " 1.8164268e-04, 8.0949460e-05], dtype=float32), 1.1712916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_773.wav', 'Gute Besserung!', 15, array([-5.1333802e-05, 0.0000000e+00, 2.1718148e-05, ...,\n", + " -1.2636013e-04, -1.9546332e-04, -1.4215514e-04], dtype=float32), 1.2771354166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_775.wav', 'Ja, so ist es wohl.', 19, array([-1.61898919e-04, 1.97437694e-05, 1.02667604e-04, ...,\n", + " -6.51544397e-05, -1.26360130e-04, -6.71288217e-05], dtype=float32), 1.44646875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_778.wav', 'Mich selbst hat das überrascht.', 32, array([7.7000703e-05, 1.1846262e-04, 1.2241138e-04, ..., 1.3820639e-04,\n", + " 9.8718847e-06, 1.3820640e-05], dtype=float32), 1.8275)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_782.wav', 'Wer kennt das nicht?', 20, array([-2.7641279e-05, 7.8975081e-06, -3.7513164e-05, ...,\n", + " -2.3297648e-04, -2.2902773e-04, -2.4087400e-04], dtype=float32), 1.72871875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_787.wav', 'Ich liebe diese Musik!', 22, array([-1.8361707e-04, -6.9103196e-05, -9.0821341e-05, ...,\n", + " 5.6862057e-04, 6.2587752e-04, 5.3110742e-04], dtype=float32), 1.8580729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_800.wav', 'Na endlich!', 11, array([-1.1846262e-04, -1.5202703e-04, -8.4898209e-05, ...,\n", + " 9.0821341e-05, -9.0821341e-05, -7.8975081e-06], dtype=float32), 0.91021875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_805.wav', 'Juliane gruselt sich.', 21, array([1.3425764e-04, 7.1077571e-05, 6.5154440e-05, ..., 9.8718854e-05,\n", + " 8.6872591e-05, 5.1333802e-05], dtype=float32), 1.86278125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_808.wav', 'Der andere nimmt.', 17, array([-8.6872591e-05, -1.1451387e-04, -8.2923834e-05, ...,\n", + " 2.5666901e-05, -7.3051953e-05, -7.5026328e-05], dtype=float32), 1.52409375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_814.wav', 'Wieso ich?', 10, array([-1.14513867e-04, -1.02667604e-04, -1.77693932e-04, ...,\n", + " -1.18462622e-05, 0.00000000e+00, 1.38206397e-05], dtype=float32), 0.9031666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_816.wav', 'Die Haare müssen ab.', 21, array([ 1.9546332e-04, 1.2636013e-04, 2.1125835e-04, ...,\n", + " 9.8718847e-06, -4.1461917e-05, -5.5282559e-05], dtype=float32), 1.2065729166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_13_FINAL/13_844.wav', 'Die ganze Woche steht das schon an.', 35, array([ 1.0602404e-03, 1.1017023e-03, 9.0031594e-04, ...,\n", + " -3.3564411e-05, -3.5538786e-05, 0.0000000e+00], dtype=float32), 1.8839479166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_7.wav', 'Meinen Respekt hast du.', 23, array([-8.1613541e-07, 3.6258320e-05, 5.8615900e-05, ...,\n", + " -3.0361010e-05, 4.6051988e-05, 6.1613529e-05], dtype=float32), 1.568)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_12.wav', 'Mein SchweiÃ\\x9f stinkt nicht.', 27, array([1.2758464e-03, 1.4472028e-03, 1.4819785e-03, ..., 1.1448720e-05,\n", + " 2.5002395e-05, 5.3266147e-05], dtype=float32), 1.872)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_24.wav', 'So sieht es jedenfalls aus.', 27, array([ 3.5462443e-05, -3.6511621e-05, -2.4387444e-05, ...,\n", + " 7.4399744e-05, 7.2159133e-07, 2.3660252e-05], dtype=float32), 1.808)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_31.wav', 'Es brennt lichterloh.', 21, array([-7.8527468e-05, -1.9054073e-04, -1.8275550e-04, ...,\n", + " -1.4771417e-05, 2.4868292e-05, -1.4910699e-05], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_36.wav', 'Hat jemand Deo dabei?', 21, array([5.0298637e-05, 4.8803475e-05, 5.4532258e-05, ..., 3.4226623e-06,\n", + " 9.2322180e-06, 3.0618612e-05], dtype=float32), 1.7386666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_44.wav', 'Der Hund will raus.', 19, array([-8.2374172e-05, -8.4805586e-05, -9.4096496e-05, ...,\n", + " 2.0108973e-05, 3.4747383e-05, -3.9627314e-05], dtype=float32), 1.5413333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_46.wav', 'Nur Fliegen ist schöner.', 25, array([-2.5430196e-05, -6.4560918e-05, -6.8181558e-05, ...,\n", + " 6.0105547e-05, 9.7991426e-05, 2.9888753e-05], dtype=float32), 1.6693333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_57.wav', 'Endlich wieder Nachschub!', 25, array([-3.0662410e-05, -3.7799236e-05, -1.0512020e-04, ...,\n", + " -1.2799338e-04, -3.7069469e-05, 3.4687200e-05], dtype=float32), 1.568)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_63.wav', \"Jetzt langt's dann aber.\", 24, array([ 1.3113129e-06, -5.7142366e-05, 3.9664551e-06, ...,\n", + " 4.8476216e-04, 4.0935431e-04, 5.0957059e-04], dtype=float32), 1.8453333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_76.wav', 'Ist hier noch ein Platz frei?', 29, array([ 4.6084756e-06, 2.1333383e-06, 1.0840034e-05, ...,\n", + " 4.7717163e-05, -4.3301993e-06, 5.9024904e-07], dtype=float32), 1.7653333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_91.wav', 'Möchten Sie durch?', 19, array([5.3242915e-05, 1.1775635e-04, 9.1564674e-05, ..., 6.9772730e-05,\n", + " 3.2825061e-05, 5.5504606e-05], dtype=float32), 1.1786666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_94.wav', 'Du hast sie angemalt.', 21, array([-8.2009647e-06, -7.8560508e-05, -1.1781590e-04, ...,\n", + " 5.8809797e-05, 3.5827401e-05, -3.8682600e-05], dtype=float32), 1.5946666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_97.wav', 'Anfassen heiÃ\\x9ft kaufen.', 23, array([ 6.7132327e-04, 6.4567651e-04, 4.5344225e-04, ...,\n", + " -2.1742040e-05, -1.2411790e-04, -3.8199389e-05], dtype=float32), 1.472)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_104.wav', 'Warum nicht lieber hier?', 24, array([-1.0701143e-05, -1.5738879e-06, 6.8153045e-06, ...,\n", + " -6.3156702e-05, -1.6941859e-04, -6.0139148e-05], dtype=float32), 1.4986666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_112.wav', 'Das war ein Abenteuer.', 22, array([ 2.6408197e-05, -6.0915321e-05, -9.1295704e-05, ...,\n", + " -5.6715970e-05, -3.1489210e-05, 1.5612791e-06], dtype=float32), 1.9466666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_127.wav', 'Das wäre fatal.', 16, array([ 4.4660061e-05, -6.5924425e-05, -5.6830704e-05, ...,\n", + " -5.5352357e-06, 3.0260082e-05, 9.7271128e-05], dtype=float32), 1.4666666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_141.wav', 'Nicht doch!', 11, array([-1.4546166e-04, -1.4626759e-04, -9.7611184e-05, ...,\n", + " 9.3360104e-05, 3.5025540e-05, -1.6926177e-06], dtype=float32), 0.928)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_146.wav', 'Heiliger Strohsack!', 19, array([-3.7175673e-04, -2.1206291e-04, -8.9090288e-05, ...,\n", + " 1.0547445e-04, 1.0614831e-04, 5.8346381e-05], dtype=float32), 1.376)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_153.wav', 'Gehen wir in die Eisdiele?', 26, array([-3.72752729e-05, -6.43968451e-05, -1.19852075e-05, ...,\n", + " 6.90084271e-05, -1.81738214e-05, -2.24471933e-05], dtype=float32), 1.4826666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_158.wav', 'Das ist halt so.', 16, array([ 2.1661433e-05, -9.2656213e-05, -2.0038491e-05, ...,\n", + " 3.4980503e-06, 8.1309692e-05, -1.6156602e-05], dtype=float32), 1.2853333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_174.wav', 'Ich habe dich noch nie gesehen.', 31, array([ 1.68298247e-05, 2.35711445e-06, -1.13152724e-04, ...,\n", + " -5.31522637e-05, 5.38938584e-05, 1.89053408e-05], dtype=float32), 1.8773333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_179.wav', 'Das muss hart für dich sein.', 29, array([-9.2038817e-06, -9.7612574e-06, -6.3460277e-05, ...,\n", + " -5.0950723e-05, 2.0168585e-05, -1.5738755e-05], dtype=float32), 1.5893333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_183.wav', \"Packen wir's!\", 13, array([-2.2114466e-05, 6.0876686e-05, -8.3392551e-05, ...,\n", + " 3.5826326e-06, -1.4385004e-05, -5.6348257e-05], dtype=float32), 0.9546666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_194.wav', 'Wir werden siegen!', 18, array([ 1.6911860e-04, 7.4598174e-05, 1.0261347e-04, ...,\n", + " 6.5378241e-05, 3.2076507e-06, -6.6169787e-06], dtype=float32), 1.3333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_197.wav', 'Darf ich mal bei dir abbeiÃ\\x9fen?', 31, array([-1.0340806e-05, 7.1646286e-06, 3.3313339e-05, ...,\n", + " -7.5323747e-05, -2.6892374e-07, -3.3816039e-05], dtype=float32), 1.76)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_199.wav', 'Das ging aber fix!', 18, array([-9.3143040e-05, -4.3784836e-05, -1.1206182e-04, ...,\n", + " 8.7669920e-05, 1.0557293e-05, 4.2041685e-07], dtype=float32), 1.328)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_211.wav', 'Ich habe nachgedacht.', 21, array([ 5.0232731e-05, 1.2072114e-04, 1.8210443e-04, ...,\n", + " -6.5402834e-05, -5.1763345e-05, -6.0046054e-06], dtype=float32), 1.5093333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_229.wav', 'Wir lassen uns nicht erpressen.', 31, array([1.37981799e-04, 1.52958339e-04, 1.10953624e-04, ...,\n", + " 6.50644288e-05, 8.02592767e-05, 1.01248879e-04], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_236.wav', 'Sag du es mir.', 14, array([ 7.4462928e-06, -2.0409609e-05, -3.6314952e-05, ...,\n", + " -2.1986765e-05, -8.3042978e-05, 8.2145634e-06], dtype=float32), 1.216)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_240.wav', 'Ich vermisse ihn seit gestern.', 30, array([ 2.9365596e-04, 3.4678026e-04, 3.5397714e-04, ...,\n", + " -1.5735781e-05, -2.9272232e-05, 4.2558597e-05], dtype=float32), 1.9893333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_257.wav', 'So kannte ich sie gar nicht.', 28, array([ 4.4733344e-05, 7.7341829e-05, 1.1480036e-04, ...,\n", + " -1.8965245e-04, -1.4387793e-04, -1.2223862e-04], dtype=float32), 1.8133333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_258.wav', 'Dem Kind geht es gut.', 21, array([ 2.3389544e-05, -1.0488247e-05, 1.0429079e-05, ...,\n", + " -8.0030593e-05, -9.8967379e-05, -4.5314195e-05], dtype=float32), 1.3066666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_260.wav', 'Lasst es krachen!', 17, array([-2.1083563e-04, -8.3892046e-05, -3.2037347e-05, ...,\n", + " -6.8306355e-05, -1.3884228e-04, -6.5104126e-05], dtype=float32), 1.2)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_266.wav', 'Wie sehen Sie überhaupt aus?', 29, array([-1.0680479e-05, -1.9320854e-05, -7.0852952e-06, ...,\n", + " -1.0408241e-05, 3.3198389e-06, 2.1512881e-06], dtype=float32), 1.8826666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_281.wav', 'Damit könnte es klappen.', 25, array([-2.3432081e-05, -2.4900844e-05, -1.3450766e-04, ...,\n", + " 2.1617279e-05, 3.1534404e-05, -2.2315735e-05], dtype=float32), 1.488)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_307.wav', 'Tut das Husten weh?', 19, array([ 9.1145994e-06, 1.5820089e-05, 5.0116945e-05, ...,\n", + " 1.9206882e-05, -2.6969181e-05, -2.7526901e-05], dtype=float32), 1.5626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_310.wav', 'Und jetzt kräftig kurbeln!', 27, array([-8.4867512e-05, -1.3528325e-05, 6.7344299e-05, ...,\n", + " -5.5355646e-05, 3.2757125e-05, -1.3706725e-05], dtype=float32), 1.968)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_311.wav', 'Und was bekommt man geboten?', 28, array([-9.42486338e-07, -6.20736901e-05, -1.13615904e-04, ...,\n", + " 1.05647247e-04, 4.75407724e-05, 7.68981190e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_313.wav', 'Nimm doch mal den Hut ab!', 25, array([-1.4411381e-06, 1.8580539e-04, 1.8933907e-04, ...,\n", + " -1.0257358e-04, -9.1900030e-05, -2.2193763e-04], dtype=float32), 1.5733333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_322.wav', 'Der ist sauber.', 15, array([1.3459381e-04, 1.1068168e-04, 1.4088971e-04, ..., 1.4206764e-04,\n", + " 1.0958829e-05, 9.0381429e-05], dtype=float32), 1.344)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_333.wav', 'Danke für nichts!', 18, array([-2.6258719e-04, -2.9124424e-04, -4.0630574e-04, ...,\n", + " 9.1923815e-05, -9.6123731e-06, 3.9555922e-05], dtype=float32), 1.408)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_351.wav', 'Hier ist sie.', 13, array([-3.23740860e-05, -1.03745086e-04, -6.84802653e-05, ...,\n", + " 6.36538107e-06, 6.47425259e-05, -2.68384956e-05], dtype=float32), 1.2693333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_354.wav', 'Ist sie international bekannt?', 30, array([ 1.5060005e-05, 5.7448578e-05, 1.3811006e-04, ...,\n", + " 6.0413648e-05, -4.7934391e-05, -1.9190535e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_363.wav', 'Ich meine ja nur.', 17, array([ 5.6321147e-05, 9.9655284e-05, -8.9936962e-05, ...,\n", + " 1.1549123e-05, 3.7268135e-05, 7.3645397e-06], dtype=float32), 1.1253333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_390.wav', 'Gib mal die Seriennummer durch.', 31, array([ 7.2849958e-05, 9.1718932e-05, 5.6555116e-05, ...,\n", + " -2.9702240e-05, 3.8465154e-05, 2.2035034e-05], dtype=float32), 1.9466666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_399.wav', 'Steht das Wasser auf dem Herd?', 30, array([6.5801214e-05, 1.3084775e-04, 8.1372353e-05, ..., 6.8494905e-05,\n", + " 2.1234882e-06, 2.7409065e-05], dtype=float32), 1.84)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_401.wav', 'Oh ja!', 6, array([ 2.2632883e-05, -2.7574149e-05, 2.7717488e-05, ...,\n", + " 2.9032512e-07, 1.7548422e-05, -1.3465881e-05], dtype=float32), 0.7146666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_409.wav', 'Ja oder nein?', 13, array([ 3.4988134e-05, -6.8858870e-05, -8.5955844e-06, ...,\n", + " -4.4800227e-06, 1.7184280e-05, 3.7901282e-05], dtype=float32), 1.4346666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_411.wav', 'Ist doch Jacke wie Hose.', 24, array([ 1.1507938e-04, 5.0565839e-05, -2.7287895e-05, ...,\n", + " 3.7775626e-05, -1.4040452e-05, 1.4159415e-06], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_413.wav', 'Ich habe es nie gelernt.', 24, array([ 2.58978853e-05, 6.50478396e-05, -1.03702390e-04, ...,\n", + " 8.01785427e-05, 3.00699157e-05, -1.05522995e-04], dtype=float32), 1.776)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_429.wav', 'Nicht schon wieder eine Razzia!', 31, array([-5.1378167e-05, -2.5352152e-05, -3.2764001e-05, ...,\n", + " 2.1145966e-05, 5.4651609e-05, -7.9359561e-05], dtype=float32), 1.888)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_431.wav', 'Niemand will es gewesen sein.', 29, array([6.13634029e-06, 1.00043821e-04, 1.26646410e-04, ...,\n", + " 4.00160025e-05, 6.57281998e-05, 1.20079676e-04], dtype=float32), 1.6426666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_443.wav', 'Ihr seid doch bloÃ\\x9f neidisch.', 29, array([ 4.71922749e-06, -1.42986255e-05, 4.10590292e-05, ...,\n", + " -1.13690789e-04, -4.82848300e-05, 3.64537264e-05], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_453.wav', 'Lesen lohnt sich.', 17, array([-1.1143904e-04, -9.7466742e-05, -1.4505965e-04, ...,\n", + " -1.1429377e-04, -8.0892445e-05, -8.6921274e-05], dtype=float32), 1.6426666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_458.wav', 'Oder er wurde dabei gestört.', 29, array([-1.8823694e-05, -3.1060394e-05, -9.3846960e-05, ...,\n", + " -1.2105788e-05, -3.4755056e-05, 3.5802004e-05], dtype=float32), 1.84)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_459.wav', 'Die Seele baumeln lassen.', 25, array([-4.6934008e-05, -1.4115409e-04, -1.9004452e-04, ...,\n", + " -4.7015623e-05, -2.2894224e-07, -4.3300730e-05], dtype=float32), 1.6746666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_468.wav', 'Der Nächste, bitte!', 20, array([ 8.1093880e-05, 2.9958397e-05, -3.9947310e-05, ...,\n", + " 6.6704742e-05, 1.2609754e-04, 1.1871241e-04], dtype=float32), 1.3386666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_469.wav', 'Wird schon schiefgehen.', 23, array([-1.8012641e-05, -6.1548446e-05, -1.2534855e-04, ...,\n", + " -2.9845067e-05, 3.1653948e-05, 1.2874776e-04], dtype=float32), 1.552)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_476.wav', 'Keine falsche Bewegung!', 23, array([-1.3065083e-04, -1.9577878e-04, -9.6719399e-05, ...,\n", + " 9.7838973e-05, -1.6546634e-05, 3.1119489e-05], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_484.wav', 'Danach geht es ins Bett.', 24, array([1.4125947e-04, 1.4533960e-04, 1.3352933e-04, ..., 4.6569412e-06,\n", + " 8.5400243e-06, 1.0347654e-04], dtype=float32), 1.8826666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_495.wav', 'Vorwärts immer, rückwarts nimmer!', 35, array([ 9.8868964e-05, 1.4638813e-04, 8.2029030e-05, ...,\n", + " 3.1947344e-05, -3.3244356e-05, -8.5653497e-05], dtype=float32), 1.5893333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_508.wav', 'Ein Spanngurt ist gerissen.', 27, array([-1.3210842e-05, 5.2183852e-05, 1.1509426e-05, ...,\n", + " -6.6147322e-06, -1.3790486e-05, 4.0188141e-05], dtype=float32), 1.952)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_511.wav', 'Das musst du gerade sagen!', 26, array([ 8.16162283e-05, 1.48853534e-04, 1.20252385e-04, ...,\n", + " -2.43115683e-05, 3.36854064e-05, -3.11621625e-05], dtype=float32), 1.9893333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_517.wav', 'Lösen Sie das Captcha!', 23, array([-3.2288870e-05, 5.6598521e-05, 4.2188087e-05, ...,\n", + " 7.7064447e-05, -4.7475376e-05, 4.4163811e-05], dtype=float32), 1.6746666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_520.wav', 'Ihr werdet schon sehen.', 23, array([-6.5363100e-05, 4.7253379e-05, 5.9942446e-05, ...,\n", + " 3.2326661e-05, 8.2957842e-05, 7.4098658e-05], dtype=float32), 1.7973333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_521.wav', 'Ich erkläre es dir.', 20, array([ 5.3491673e-05, -1.2072490e-05, 3.4197161e-05, ...,\n", + " -3.4515979e-05, -5.6132449e-05, 1.3709931e-04], dtype=float32), 1.5093333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_525.wav', 'Hau rein!', 9, array([ 2.57931824e-04, 2.11816674e-04, 1.78339556e-04, ...,\n", + " 7.76832676e-05, 1.51795175e-05, -4.37384588e-05], dtype=float32), 1.104)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_526.wav', 'Tief durchatmen!', 16, array([-2.6787920e-05, -3.2204316e-05, -5.5490927e-05, ...,\n", + " 2.2508255e-05, 5.4639313e-05, 1.8989524e-05], dtype=float32), 1.5253333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_538.wav', 'Und was bringt das?', 19, array([-5.9224880e-05, -4.4477289e-05, 3.8521583e-05, ...,\n", + " 9.5605545e-05, 1.2830349e-06, 1.5070126e-05], dtype=float32), 1.6213333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_541.wav', 'Karnickelfangschlag?', 20, array([3.9227842e-05, 3.2782922e-05, 4.6346566e-05, ..., 1.3389443e-05,\n", + " 3.6067817e-05, 6.0468155e-05], dtype=float32), 1.728)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_562.wav', 'Ist ja mega!', 12, array([-1.1508126e-04, -1.5385580e-04, -1.8046032e-04, ...,\n", + " -4.1180385e-05, 2.7804810e-05, -9.9901524e-07], dtype=float32), 0.992)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_570.wav', 'Jasmin, du bist dran.', 21, array([-6.0017886e-05, 3.1120195e-05, 1.0854354e-04, ...,\n", + " -2.5416332e-06, 4.4546370e-05, -4.6334655e-05], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_579.wav', 'Läuft es separat ab?', 21, array([ 2.2939121e-05, 2.0304271e-05, 4.7305216e-06, ...,\n", + " -4.0958774e-05, 8.3991254e-06, -4.0800154e-05], dtype=float32), 1.7813333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_594.wav', 'Ich zitiere!', 12, array([ 7.3269119e-05, 4.1316580e-06, -7.5483302e-05, ...,\n", + " 4.5700057e-05, 1.0702889e-06, 1.2143076e-05], dtype=float32), 1.2853333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_597.wav', 'Die Karten sind ja markiert!', 28, array([-7.7787427e-06, 1.3373171e-05, 1.1130486e-04, ...,\n", + " -3.4429740e-05, -9.2525712e-05, -3.0399795e-05], dtype=float32), 1.8613333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_600.wav', 'Weniger ist manchmal mehr.', 26, array([-3.2105188e-05, -1.2411436e-04, -1.7373836e-04, ...,\n", + " 1.9536817e-05, 4.0033923e-05, -4.9835093e-05], dtype=float32), 1.6693333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_610.wav', 'Zur Hölle mit ihm!', 19, array([ 4.1287938e-05, -1.5668693e-05, -4.7829257e-05, ...,\n", + " 1.2091287e-04, 3.0301053e-05, 5.0707073e-05], dtype=float32), 1.28)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_622.wav', 'Sonst kommt die Polizei.', 24, array([ 1.33967542e-05, -2.86651575e-05, 1.20430150e-05, ...,\n", + " -4.97728324e-05, -9.77511445e-05, -1.07504595e-04], dtype=float32), 1.9786666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_623.wav', 'Papa fährt immer schneller.', 28, array([-4.1551000e-05, 1.8333099e-05, -4.5995697e-05, ...,\n", + " 7.4864365e-05, -2.8456698e-05, -3.1763777e-06], dtype=float32), 1.7653333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_627.wav', 'Das Problem kenne ich.', 22, array([-1.6575548e-06, -6.4681786e-05, -2.4183499e-05, ...,\n", + " -6.1924133e-05, 4.0877181e-05, -4.8742072e-06], dtype=float32), 1.3973333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_646.wav', 'Gerade jetzt wird es spannend.', 30, array([-7.0382644e-05, -2.6976499e-05, -8.4537001e-05, ...,\n", + " 1.9848225e-05, 1.8570287e-05, 1.1454727e-04], dtype=float32), 1.952)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_650.wav', 'Pass mal auf!', 13, array([ 8.8038476e-05, 6.2287538e-05, 8.6767104e-05, ...,\n", + " -4.7867183e-05, 1.7106903e-06, -2.8001863e-05], dtype=float32), 1.0773333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_653.wav', 'Führe mich nicht in Versuchung!', 32, array([ 1.5389375e-04, 8.4856605e-05, 1.1764471e-04, ...,\n", + " -4.1702488e-06, 4.8200640e-05, 3.7042355e-05], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_658.wav', 'Dabei soll es bleiben.', 22, array([-6.8817273e-05, -1.4116750e-04, -2.5068663e-04, ...,\n", + " 3.3109423e-05, -1.2034771e-05, 5.3297503e-05], dtype=float32), 1.3653333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_677.wav', 'Ich denke nicht daran.', 22, array([ 2.7965652e-06, -8.1217448e-05, -1.5171595e-04, ...,\n", + " -6.0021226e-05, 5.8105360e-07, -2.3721210e-05], dtype=float32), 1.472)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_702.wav', 'Sieh zu, dass du Land gewinnst!', 31, array([-3.9686485e-05, -4.1371659e-05, -5.1444043e-05, ...,\n", + " -6.5746033e-05, -6.9277223e-05, -3.0258396e-05], dtype=float32), 1.9466666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_705.wav', 'Was sagt uns das?', 17, array([-1.11950721e-04, -1.12432775e-04, -1.54395209e-04, ...,\n", + " 1.18786911e-05, -6.98161457e-05, -2.93514750e-05], dtype=float32), 1.6426666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_715.wav', 'Von nichts komm nichts.', 23, array([ 5.0694278e-05, -1.0824220e-04, -7.8278521e-05, ...,\n", + " 5.2878531e-05, 3.1005864e-05, 2.5896241e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_718.wav', 'Warum auch?', 11, array([ 2.5824769e-05, 7.0119269e-05, 3.9937982e-05, ...,\n", + " 1.3905319e-05, -2.6308078e-05, -5.1800267e-05], dtype=float32), 0.9493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_721.wav', 'Wo wohne ich noch mal?', 22, array([ 1.1702570e-04, 1.8368529e-04, 1.5237987e-04, ...,\n", + " -3.3846823e-05, -4.2944125e-06, 2.2590933e-05], dtype=float32), 1.6)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_725.wav', 'Zum Wohl!', 9, array([-2.1576473e-06, 2.8079157e-05, -2.9355248e-05, ...,\n", + " -2.9330091e-05, -3.0764484e-05, -1.3724362e-05], dtype=float32), 0.7466666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_735.wav', 'Wie geht es dir?', 16, array([ 3.0780422e-05, -4.9582297e-05, -8.5829226e-05, ...,\n", + " 2.1407772e-05, -4.8474238e-05, -4.5784309e-05], dtype=float32), 1.232)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_746.wav', 'Einmal drücken reicht.', 23, array([-4.4286557e-05, -5.6155724e-05, -5.2055671e-05, ...,\n", + " -5.5887984e-05, 1.7236773e-05, 9.8498596e-05], dtype=float32), 1.4373333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_761.wav', 'Ersatz muss her.', 16, array([ 8.3686442e-05, 9.1279635e-06, -8.3661522e-05, ...,\n", + " 3.3542208e-05, 9.7035401e-05, -4.7421363e-05], dtype=float32), 1.3333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_769.wav', 'Kennen Sie diesen Eisbären?', 28, array([ 1.8226114e-04, 1.1602399e-04, 8.7942906e-05, ...,\n", + " -3.1415253e-05, 6.8828485e-05, 2.8598015e-05], dtype=float32), 1.7173333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_774.wav', 'Du tüdelst wohl!', 17, array([4.2244592e-05, 4.7479767e-05, 4.4327684e-05, ..., 2.9398587e-05,\n", + " 1.3265206e-04, 9.8947305e-05], dtype=float32), 1.312)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_776.wav', 'Einen Versuch ist es wert.', 26, array([-2.0919964e-05, -8.0129103e-05, -7.8644814e-05, ...,\n", + " 3.4572986e-05, 8.1091166e-05, 5.6626621e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_779.wav', 'Kruzifix noch mal!', 18, array([ 5.9276794e-05, 7.1346542e-05, 1.3115312e-05, ...,\n", + " -7.0933937e-05, 2.6771322e-05, 3.3997876e-05], dtype=float32), 1.792)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_781.wav', 'Sind die echt?', 14, array([-3.2039690e-05, -4.8189206e-05, -9.0187306e-05, ...,\n", + " 2.1210299e-05, 9.5539394e-07, -6.0049209e-05], dtype=float32), 1.1946666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_789.wav', 'Wie war euer Jahrgangstreffen?', 30, array([ 9.86098894e-05, 1.05807514e-04, 1.31781504e-04, ...,\n", + " -6.47349443e-05, 5.55652514e-06, 6.68639914e-05], dtype=float32), 1.9946666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_796.wav', 'Langt das?', 10, array([-2.58835917e-05, -1.11602596e-04, -2.00994928e-05, ...,\n", + " 3.40378210e-05, 4.15314862e-05, -2.47353237e-05], dtype=float32), 1.2586666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_851.wav', 'Nein, das gehört so.', 21, array([4.30460314e-05, 1.00948644e-04, 1.14135793e-04, ...,\n", + " 2.88395531e-04, 1.62498865e-04, 8.75307087e-05], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_852.wav', 'Stellen Sie Blickkontakt her.', 29, array([-2.3877754e-05, -3.1883523e-05, -1.3378897e-04, ...,\n", + " -3.8810729e-05, 4.3067663e-05, 3.8920269e-05], dtype=float32), 1.9946666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_858.wav', 'Also echt jetzt!', 16, array([ 1.62354499e-05, 4.22473058e-05, -1.46273105e-05, ...,\n", + " -2.93930316e-05, 5.34094252e-05, 7.98595574e-05], dtype=float32), 1.216)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_7.wav', 'Ich glaube nicht.', 17, array([-1.0143876e-05, -3.8619244e-05, 8.2748767e-05, ...,\n", + " -9.9806406e-05, -4.3946784e-05, 6.9558562e-05], dtype=float32), 1.1946666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_18.wav', 'Hier ist es sicherer.', 21, array([ 4.6870970e-05, 9.9823235e-05, -4.0877108e-05, ...,\n", + " -1.4616339e-05, 7.3614872e-05, 1.0970575e-04], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_49.wav', 'Ja ja, als ob!', 14, array([ 5.3198488e-05, 1.8346685e-04, -2.1753046e-06, ...,\n", + " 1.7834389e-05, 5.3522737e-05, 8.4725587e-05], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_59.wav', 'Geh, such deine Schwester!', 26, array([ 9.13840049e-05, 1.68439132e-04, 3.04173911e-04, ...,\n", + " -8.56241095e-05, -1.02150196e-04, 8.91289255e-06], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_94.wav', 'Gib mir meinen Becher wieder!', 29, array([-2.1092707e-04, -2.3195105e-04, -2.0152969e-04, ...,\n", + " 8.9153917e-05, -2.4260396e-06, 5.9283586e-05], dtype=float32), 1.8453333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_100.wav', 'Das führt doch zu nichts.', 26, array([-1.0273771e-04, -8.6229462e-05, -1.2574486e-04, ...,\n", + " 2.4963025e-05, 4.4582037e-05, 4.7964921e-05], dtype=float32), 1.9733333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_105.wav', 'Wo denn?', 8, array([-4.0845240e-05, 1.0149255e-04, 5.9910049e-05, ...,\n", + " -3.8421931e-05, 2.8110459e-05, 1.7339922e-05], dtype=float32), 0.9493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_106.wav', 'Du sitzt hinten.', 16, array([ 1.1350374e-04, 1.3197908e-04, 5.9344729e-05, ...,\n", + " -1.6409816e-04, -7.1399249e-05, -4.2459251e-05], dtype=float32), 1.44)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_112.wav', 'Das kann ich nicht.', 19, array([-9.4199102e-05, -3.3980414e-05, 9.0330948e-05, ...,\n", + " 1.1509175e-04, 2.2319029e-05, 5.1328014e-05], dtype=float32), 1.4186666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_139.wav', 'Das hat sie gelernt.', 20, array([ 1.5456244e-04, 3.1872053e-04, 3.7880472e-04, ...,\n", + " -8.6764321e-06, -1.7240205e-05, -5.7155878e-05], dtype=float32), 1.4826666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_153.wav', 'Nicht alle Teenager sind so.', 28, array([7.9220721e-05, 5.8759109e-05, 1.1493213e-04, ..., 6.8786328e-05,\n", + " 1.5815135e-04, 8.5130850e-05], dtype=float32), 1.9946666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_156.wav', 'Frische Seeluft macht gesund.', 29, array([ 1.8124521e-04, 1.7306159e-04, 5.9669415e-05, ...,\n", + " 4.9480139e-05, 1.2296322e-04, -5.5897519e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_164.wav', 'Gönn dir!', 10, array([ 5.2993961e-05, 2.8179937e-05, 7.8242076e-05, ...,\n", + " -4.9057824e-05, 1.8003910e-05, 8.8817593e-05], dtype=float32), 0.9386666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_176.wav', 'Sag ich doch!', 13, array([ 4.2398951e-05, 5.6847359e-05, 7.0788061e-05, ...,\n", + " -3.2739328e-05, 9.7135853e-05, 6.0795941e-05], dtype=float32), 1.2373333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_177.wav', 'Das darf doch nicht wahr sein.', 30, array([-5.1426803e-05, -5.0517308e-05, 4.6803252e-05, ...,\n", + " -8.1146150e-05, 2.9068062e-05, 7.5193479e-05], dtype=float32), 1.8773333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_199.wav', 'Jetzt sind wir quitt.', 21, array([-2.4918138e-05, 8.0159109e-05, -7.1328832e-05, ...,\n", + " -2.1099215e-04, -3.0862509e-05, -3.5725458e-05], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_207.wav', 'Eben ging das noch.', 19, array([-5.0324921e-05, 1.3549793e-04, -3.3347860e-05, ...,\n", + " 9.8024408e-05, 1.5384333e-04, 1.5966935e-04], dtype=float32), 1.53875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_208.wav', 'Bug oder Feature?', 17, array([-3.7243055e-06, 6.9413843e-05, 7.5392752e-05, ...,\n", + " 5.2070121e-05, 2.8219682e-05, 8.4193009e-05], dtype=float32), 1.8053020833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_274.wav', 'Wir brauchen mehr davon!', 24, array([-2.0753406e-04, -1.9484414e-05, -2.8117347e-04, ...,\n", + " 1.2726737e-04, 2.6360145e-04, 2.9073044e-04], dtype=float32), 1.91434375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_280.wav', 'Lass uns raus gehen.', 20, array([ 1.03469618e-04, 1.97744346e-04, -7.93442814e-06, ...,\n", + " 8.44921742e-05, 2.30915975e-05, -1.33781205e-05], dtype=float32), 1.5508645833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_286.wav', 'SchluÃ\\x9f mit lustig.', 19, array([ 2.99623178e-04, 2.43378381e-04, 1.65333462e-04, ...,\n", + " -2.71533063e-05, 7.85075972e-05, -1.17198346e-04], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_302.wav', 'Woher nehmt ihr eure Bildung?', 29, array([1.7700881e-04, 2.1893253e-04, 1.3036304e-04, ..., 1.3868474e-04,\n", + " 1.0062666e-04, 8.4173589e-05], dtype=float32), 1.9749270833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_308.wav', 'Du fährst, ich schieÃ\\x9fe!', 25, array([1.5563566e-04, 1.4856170e-04, 2.2446582e-04, ..., 6.8505600e-05,\n", + " 2.0769508e-04, 1.1925176e-04], dtype=float32), 1.99915625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_328.wav', 'Wirkt die Betäubung noch?', 26, array([-8.7537330e-05, -3.0825776e-04, -2.8424736e-04, ...,\n", + " 1.1261477e-04, 2.0012977e-04, 1.0000553e-04], dtype=float32), 1.9022291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_385.wav', 'Es kann nur einen geben!', 24, array([-1.8947560e-04, -2.3450297e-05, -1.2145152e-04, ...,\n", + " -6.9378242e-05, -1.1301338e-04, -2.5457976e-04], dtype=float32), 1.8901145833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_400.wav', 'Wer weiÃ\\x9f es?', 13, array([ 8.2401210e-05, 1.2261249e-05, 1.3193028e-04, ...,\n", + " -9.9374527e-05, -2.4473227e-05, 7.3499345e-05], dtype=float32), 1.49028125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_406.wav', 'Tja, das ist Pech.', 18, array([2.4313416e-04, 4.7331341e-05, 1.6022228e-04, ..., 3.0806483e-04,\n", + " 2.9170502e-04, 3.0395557e-04], dtype=float32), 1.7810729166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_412.wav', 'Alles muss raus.', 16, array([2.3146431e-04, 2.1641712e-04, 1.4716707e-04, ..., 1.4341300e-04,\n", + " 3.7975753e-06, 9.1287213e-05], dtype=float32), 1.708375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_415.wav', 'Stell die Heizung höher.', 25, array([-3.96930409e-05, 1.02812344e-04, 1.21250734e-04, ...,\n", + " -3.47016321e-05, -2.01824150e-04, -9.76954325e-05], dtype=float32), 1.74471875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_418.wav', 'Etwa über mich?', 16, array([-0.00020996, -0.00011494, -0.00010331, ..., -0.00017556,\n", + " -0.00020319, -0.00027111], dtype=float32), 1.7689479166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_421.wav', 'Das ist natürlich bitter.', 26, array([-3.3627803e-04, -2.5203897e-04, -2.3072124e-04, ...,\n", + " 4.6018063e-06, 1.7239379e-05, 4.0267703e-05], dtype=float32), 1.878)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_440.wav', 'Hier knicken.', 13, array([-0.000481 , -0.00023708, -0.00018911, ..., -0.00022185,\n", + " -0.00025873, -0.00026997], dtype=float32), 1.30853125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_464.wav', 'Alles Lügen!', 13, array([-0.00027017, -0.00016623, -0.00022159, ..., -0.00033337,\n", + " -0.00044782, -0.00022404], dtype=float32), 1.4175833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_465.wav', 'Alles oder nichts!', 18, array([2.8375158e-05, 6.5034241e-05, 9.6457785e-05, ..., 1.0699107e-04,\n", + " 9.6596435e-05, 1.2572719e-04], dtype=float32), 1.7931875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_467.wav', 'Warum bleibst du stehen?', 24, array([-1.4808709e-04, -1.8631479e-04, -1.2836477e-04, ...,\n", + " -6.0794730e-05, -1.5104183e-05, -2.5347929e-04], dtype=float32), 1.91434375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_473.wav', 'Zumindest ein bisschen.', 23, array([-0.00024013, -0.00025727, -0.00025987, ..., -0.00023257,\n", + " -0.00033333, -0.00025996], dtype=float32), 1.5993229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_474.wav', 'Sprich mir nach!', 16, array([-1.7584162e-04, -1.6248986e-04, -8.6785782e-05, ...,\n", + " 3.5318243e-04, 3.7314874e-04, 3.2366288e-04], dtype=float32), 1.4175833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_500.wav', 'Sehr witzig!', 12, array([ 7.5077987e-05, 1.1926649e-04, 1.8323194e-04, ...,\n", + " -3.8680941e-04, -3.2216642e-04, -3.3234112e-04], dtype=float32), 1.39334375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_502.wav', 'Achtung, Achtung!', 17, array([-4.0950408e-04, -2.9606355e-04, -3.7786187e-04, ...,\n", + " -2.1742952e-05, 3.0543149e-05, 8.8129680e-05], dtype=float32), 1.5145104166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_512.wav', 'Wo bitte schön steht das?', 26, array([ 2.2647387e-04, 1.4740237e-04, 1.2381608e-04, ...,\n", + " -1.1670060e-04, -5.8438465e-05, -5.2704141e-05], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_513.wav', 'SchlieÃ\\x9fen Sie bitte die Luke.', 30, array([ 0.00012086, 0.00019177, 0.00012352, ..., -0.00014259,\n", + " -0.00024671, -0.00014045], dtype=float32), 1.69625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_549.wav', 'Ich hasse meinen Wecker.', 24, array([-1.9575720e-05, -1.5009989e-04, -1.6873972e-04, ...,\n", + " -6.5268898e-05, -1.8595096e-04, -1.7330179e-04], dtype=float32), 1.6235625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_576.wav', 'Nicht so laut!', 14, array([-1.6541444e-04, -8.3816949e-06, -1.0135791e-04, ...,\n", + " 3.1510697e-04, 4.1878404e-04, 3.6531710e-04], dtype=float32), 1.4539375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_595.wav', 'Ich tu mein Bestes.', 19, array([ 8.3501960e-05, 1.7197721e-04, 2.2250456e-04, ...,\n", + " -1.2569079e-04, -1.3276993e-04, -2.5823418e-04], dtype=float32), 1.74471875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_597.wav', 'Alle guten Dinge sind drei.', 27, array([-1.1909505e-05, -8.7172106e-05, -1.2401433e-04, ...,\n", + " -1.4987224e-04, -1.3219267e-05, -7.9211000e-05], dtype=float32), 1.7568333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_607.wav', 'Welche Vase?', 12, array([-1.8119848e-04, -2.7736003e-04, -1.8833524e-04, ...,\n", + " 5.6385907e-05, 1.3869893e-04, 1.9968288e-04], dtype=float32), 1.4539375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_651.wav', 'Zeig mal deine Muckis.', 22, array([-0.00038406, -0.0003124 , -0.00026326, ..., 0.00032153,\n", + " 0.00029355, 0.0004676 ], dtype=float32), 1.82953125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_661.wav', 'Wir sind umzingelt.', 19, array([ 4.0317194e-05, 2.1714004e-04, 1.5210512e-04, ...,\n", + " 1.1821459e-04, 9.8579549e-05, -3.1008281e-06], dtype=float32), 1.57509375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_669.wav', 'Du zitterst ja!', 15, array([-0.0002655 , -0.00018808, -0.00023504, ..., 0.00028222,\n", + " 0.00025013, 0.00041103], dtype=float32), 1.2116145833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_687.wav', 'Ob sie schon Hunger haben?', 26, array([-7.1925861e-05, 1.8567745e-06, -5.7103756e-05, ...,\n", + " 2.6770154e-04, 7.6355340e-05, 2.2662200e-05], dtype=float32), 1.8416458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_699.wav', 'Das wird schon wieder.', 22, array([-2.5816666e-04, -8.4095438e-05, -1.2401373e-05, ...,\n", + " -1.9085000e-04, -2.3972438e-04, -1.5835713e-04], dtype=float32), 1.69625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_700.wav', 'Köpfe runter!', 14, array([ 8.14295272e-05, 1.14302085e-04, 1.28549975e-04, ...,\n", + " -2.10746948e-04, -2.65351351e-04, -3.40027531e-04], dtype=float32), 1.32065625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_712.wav', 'Sie sollten sich schämen!', 26, array([ 2.6346499e-04, 9.5443167e-05, 1.6159609e-04, ...,\n", + " -2.1241463e-04, -1.5395934e-04, -8.9938527e-05], dtype=float32), 1.6477916666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_732.wav', 'Schwund ist überall.', 21, array([-0.00039054, -0.00025168, -0.00026237, ..., 0.00020222,\n", + " 0.0002156 , 0.00019633], dtype=float32), 1.6356666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_734.wav', 'Schon fertig?', 13, array([-6.8748363e-06, 5.9082297e-05, -3.8726441e-05, ...,\n", + " -1.3909466e-04, -2.0350730e-04, -1.0977411e-04], dtype=float32), 1.2237291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_743.wav', 'Musst du da reinschieÃ\\x9fen?', 26, array([0.00038867, 0.00026221, 0.0002308 , ..., 0.0001513 , 0.00017203,\n", + " 0.00012958], dtype=float32), 1.91434375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_764.wav', 'Das wäre mir neu.', 18, array([-1.6335897e-04, -1.3920359e-04, -6.9949492e-05, ...,\n", + " 3.2939854e-05, 3.5769459e-05, -3.7220154e-05], dtype=float32), 1.91434375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_773.wav', 'Mission gescheitert!', 20, array([ 5.22215014e-05, 1.20894714e-04, 1.96668057e-04, ...,\n", + " -2.58956774e-04, -1.39872835e-04, -1.39142721e-04], dtype=float32), 1.82953125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_782.wav', 'Dir kann geholfen werden.', 25, array([-5.4091932e-05, -2.9271763e-05, 1.2364880e-04, ...,\n", + " -1.4125406e-04, -2.3545137e-04, -2.5170582e-04], dtype=float32), 1.7810625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_790.wav', 'Vertraust du mir blind?', 23, array([-1.3496955e-04, -4.5282133e-05, 1.7263924e-04, ...,\n", + " 1.0330205e-05, -1.9022463e-04, -1.3715150e-04], dtype=float32), 1.6235520833333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_793.wav', 'Wie stellen Sie sich das vor?', 29, array([5.7090012e-05, 9.3246163e-05, 1.4314597e-04, ..., 1.8600497e-04,\n", + " 1.2342732e-04, 2.2610810e-04], dtype=float32), 1.8901145833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_802.wav', 'Ist es nicht so?', 16, array([ 8.5881460e-05, 1.9039282e-04, 2.1635044e-04, ...,\n", + " 1.2600829e-04, 4.5968747e-05, -1.7667595e-05], dtype=float32), 1.4297083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_808.wav', 'Willst du mich umbringen?', 25, array([3.4704231e-04, 2.2213293e-04, 1.1007244e-04, ..., 1.0426929e-05,\n", + " 6.0499657e-05, 4.4495686e-05], dtype=float32), 1.7326041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_817.wav', 'Da ist die Tür!', 16, array([ 0.00014472, 0.00027025, 0.00040617, ..., -0.0001791 ,\n", + " -0.00014576, -0.00017543], dtype=float32), 1.7931770833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_15_FINAL/15_820.wav', 'Ihr könnt nicht fliehen.', 25, array([ 3.3208958e-04, 1.8373384e-04, 2.8849186e-05, ...,\n", + " -1.9994991e-04, -4.2732576e-05, 5.1437601e-05], dtype=float32), 1.9870416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_8.wav', 'Erkennst du ihn wieder?', 23, array([-7.1132112e-05, 1.8191178e-04, 2.2640963e-04, ...,\n", + " -1.5948209e-04, -4.8810096e-05, -7.1736489e-05], dtype=float32), 1.69625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_29.wav', 'Du bist so ein Charmeur!', 24, array([ 8.7156383e-05, -7.5441625e-05, -8.7413508e-05, ...,\n", + " -3.7287452e-04, -2.6756592e-04, -2.7199855e-04], dtype=float32), 1.99915625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_30.wav', 'Das Wochenende war sehr schón.', 31, array([0.00010696, 0.00019241, 0.00022398, ..., 0.00018996, 0.00018264,\n", + " 0.00021606], dtype=float32), 1.9506875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_44.wav', 'Na, GroÃ\\x9fer!', 12, array([2.9556373e-05, 1.2606342e-04, 2.0366564e-04, ..., 1.8486078e-04,\n", + " 1.2593277e-04, 1.4429759e-04], dtype=float32), 1.7083645833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_53.wav', 'Lassen wir das!', 15, array([-1.0015550e-03, -1.1123064e-03, -1.0633026e-03, ...,\n", + " -8.7814760e-06, 1.5665671e-04, 2.6885752e-04], dtype=float32), 1.2843020833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_57.wav', 'Es kann jeden treffen.', 22, array([-1.2930187e-04, -3.5622310e-05, 1.1325534e-04, ...,\n", + " 2.8466255e-05, -1.7107872e-04, -3.0454184e-04], dtype=float32), 1.8295416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_61.wav', 'Das dürfen Sie nicht!', 22, array([7.9696401e-05, 2.5238540e-05, 2.6919068e-05, ..., 2.0004300e-04,\n", + " 1.7159608e-04, 2.0384404e-04], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_69.wav', 'Oder muss man die einfrieren?', 29, array([ 2.3387831e-04, 2.0287969e-04, 2.3305746e-04, ...,\n", + " -2.0109433e-04, -1.5938835e-04, 1.9864538e-06], dtype=float32), 1.9628020833333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_75.wav', 'Nur für einen Tag.', 19, array([ 1.1010072e-04, 7.5059768e-05, 1.5811465e-04, ...,\n", + " -1.6034159e-04, 6.0707155e-09, -5.6600587e-05], dtype=float32), 1.6114375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_85.wav', 'Ã\\x9cberall lauern Fallen.', 23, array([ 7.3672440e-05, 1.1084337e-04, 5.4723707e-05, ...,\n", + " -3.4976221e-04, -1.6772485e-04, -2.3993225e-04], dtype=float32), 1.7931875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_89.wav', 'Schön, dass du da warst.', 25, array([-1.5644990e-04, -1.6062504e-04, -1.5125731e-04, ...,\n", + " -1.4215022e-04, -3.6906120e-05, -1.1689674e-04], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_104.wav', 'Bleib, wo du bist!', 18, array([-6.7565779e-05, -2.1604590e-06, 1.6737869e-04, ...,\n", + " -5.7721576e-05, -1.0027820e-05, -4.2661872e-05], dtype=float32), 1.9022291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_113.wav', 'Erwischt!', 9, array([ 8.53675301e-05, -1.39195807e-04, -1.12849986e-04, ...,\n", + " -6.49508947e-05, -6.88307264e-05, -2.25101539e-04], dtype=float32), 1.06621875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_119.wav', 'Dann lass es liegen.', 20, array([-1.4928725e-04, 2.6696865e-05, -8.1158723e-05, ...,\n", + " 1.0134692e-04, 7.8540448e-05, -3.6887606e-05], dtype=float32), 1.7326041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_125.wav', \"Mach 'ne Fliege!\", 16, array([-4.2133670e-05, -4.1710995e-05, -9.2710856e-05, ...,\n", + " 6.1932937e-05, 5.9015078e-05, 1.2269965e-04], dtype=float32), 1.2964166666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_132.wav', 'Bei wem?', 8, array([ 3.2050626e-05, -1.8802975e-05, 6.2951531e-06, ...,\n", + " 3.6152644e-05, 5.9682232e-05, 1.7530509e-04], dtype=float32), 1.2479583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_164.wav', 'Einer reicht.', 13, array([-2.7248763e-05, -1.8096254e-04, -6.8749752e-05, ...,\n", + " -5.8457640e-06, -6.7224923e-06, -2.3102484e-05], dtype=float32), 1.5145104166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_167.wav', 'Komm du mal hier her!', 21, array([-1.5554769e-04, 3.7891259e-06, 4.7066398e-05, ...,\n", + " -2.3639805e-05, 2.0737947e-05, 4.9913662e-05], dtype=float32), 1.9022291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_187.wav', 'Die Dämmerung bricht an.', 25, array([-1.3250955e-06, 2.9998255e-05, 7.1768205e-05, ...,\n", + " 8.1620914e-05, -2.1789680e-05, -2.0792277e-04], dtype=float32), 1.82953125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_209.wav', 'Ich will nur mal gucken!', 24, array([ 2.0323754e-05, -4.8527312e-05, 7.2813884e-05, ...,\n", + " 5.2759733e-05, -1.1957207e-05, -4.8190817e-05], dtype=float32), 1.74471875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_219.wav', 'Weg damit!', 10, array([-3.5334317e-05, -1.1389485e-04, -8.2927254e-05, ...,\n", + " 9.7957432e-05, 2.3025880e-04, 8.2124512e-05], dtype=float32), 0.9935208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_228.wav', 'Der kleine Tümpel?', 19, array([-1.11052366e-04, -1.58417228e-04, 1.12858004e-04, ...,\n", + " -7.95750821e-05, 1.25983679e-05, 3.80305464e-05], dtype=float32), 1.7810729166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_261.wav', 'Danke vielmals!', 15, array([-1.0886707e-04, -2.8663597e-04, -2.3995244e-04, ...,\n", + " -9.9315126e-05, -1.0518550e-04, 8.9717643e-05], dtype=float32), 1.9870416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_262.wav', 'Greifen Sie zu!', 15, array([ 1.7402765e-04, 5.4675427e-05, -2.1378555e-05, ...,\n", + " -3.0241612e-05, -1.6510607e-05, 1.9972253e-05], dtype=float32), 1.5145208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_270.wav', 'Sein Telefon ist verwanzt.', 26, array([ 1.7227376e-05, 1.3369569e-04, 2.4036576e-04, ...,\n", + " -1.2941840e-04, -7.5057469e-05, 4.6790487e-05], dtype=float32), 1.9628125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_280.wav', 'Das kann ich nicht gutheiÃ\\x9fen.', 30, array([-2.46016367e-04, -1.46169405e-04, -1.01338104e-04, ...,\n", + " -2.12353916e-06, -4.44089965e-05, 4.71521271e-05], dtype=float32), 1.9385833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_284.wav', 'Nicht im Geringsten.', 20, array([ 6.9896785e-05, 4.9565413e-05, -5.2745858e-05, ...,\n", + " 4.9021692e-05, 4.1371193e-05, -4.8943206e-05], dtype=float32), 1.9870416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_290.wav', 'Magst du Sushi?', 15, array([ 4.5281922e-06, -7.7349956e-05, -9.6111427e-05, ...,\n", + " 6.7945102e-06, 5.8605725e-05, -4.7947608e-05], dtype=float32), 1.5993229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_294.wav', 'Ich hätte warten sollen.', 25, array([ 1.3215349e-05, 2.5886698e-05, 9.2406181e-06, ...,\n", + " 3.3613727e-05, -7.8962090e-05, 3.6267331e-05], dtype=float32), 1.5872083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_300.wav', 'Vielen Dank für den Hinweis.', 29, array([ 1.21899466e-04, 1.44075893e-04, 1.06153289e-04, ...,\n", + " 1.94679887e-04, -1.92022708e-05, -8.20819259e-05], dtype=float32), 1.7326041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_306.wav', 'Her mit dem Zaster!', 19, array([ 9.2032889e-05, -7.7123856e-05, 1.8857928e-06, ...,\n", + " 5.2272848e-05, 1.2463648e-04, -4.8004724e-05], dtype=float32), 1.90221875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_324.wav', 'Moment mal!', 11, array([-9.6486969e-05, -8.5642452e-05, 1.3726056e-05, ...,\n", + " 3.6692109e-05, 2.4882122e-05, -5.4820499e-05], dtype=float32), 1.2721875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_332.wav', 'Lass es sein.', 13, array([-6.2611114e-05, 8.5420121e-05, 1.1575574e-06, ...,\n", + " 1.8824625e-05, 2.6618896e-05, 5.5844474e-05], dtype=float32), 1.4296979166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_334.wav', 'Wir kommen bei ihnen vorbei.', 28, array([ 3.2983281e-04, 5.1712846e-05, -1.6061698e-04, ...,\n", + " 8.1734914e-05, -2.4410097e-05, 1.5291570e-04], dtype=float32), 1.99915625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_350.wav', 'Es fehlt nicht mehr viel.', 25, array([ 3.4581102e-05, -3.2403619e-05, 6.4223466e-05, ...,\n", + " -4.1160070e-05, 2.3247363e-05, 1.4443042e-04], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_351.wav', 'So entdeckt man Fehler.', 23, array([-1.5804017e-05, -7.4724383e-05, 1.1222719e-05, ...,\n", + " 4.8898462e-05, 3.6749603e-05, -3.3983986e-05], dtype=float32), 1.708375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_356.wav', 'Salve!', 6, array([-1.3447071e-04, 5.3523188e-05, 8.5717998e-05, ...,\n", + " 4.4749868e-05, -5.5393906e-05, 1.0913220e-05], dtype=float32), 1.0056354166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_374.wav', 'Angeber!', 8, array([ 4.8461781e-05, 1.5487269e-04, 9.4685849e-05, ...,\n", + " -1.4769383e-04, -1.8351457e-05, -1.8764535e-05], dtype=float32), 1.1146875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_376.wav', 'Wer duckt sich da weg?', 22, array([ 8.9025889e-05, 2.0651723e-04, -8.5901571e-05, ...,\n", + " 8.8148518e-05, 1.3756873e-04, 1.2379605e-04], dtype=float32), 1.6356770833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_16_FINAL/16_396.wav', 'Schlaf schön.', 14, array([ 1.56835347e-04, 2.10795515e-05, 6.19498023e-05, ...,\n", + " -4.29836909e-05, -1.05784595e-04, 4.19116714e-06], dtype=float32), 1.1631458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_22.wav', 'Eindeutig nein.', 15, array([ 1.7040480e-06, -2.4771760e-05, 2.0656289e-05, ...,\n", + " -4.9639581e-05, -6.2789266e-05, -6.4883228e-05], dtype=float32), 1.885)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_26.wav', 'Sie nickte.', 11, array([ 1.3571361e-04, 1.4810856e-04, 1.6444136e-04, ...,\n", + " -8.4158353e-05, -6.3345658e-05, -6.6707049e-05], dtype=float32), 1.3556458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_34.wav', 'Von wegen Rabenmutter!', 22, array([ 3.9614300e-05, 3.0917236e-05, 1.4100775e-05, ...,\n", + " 3.3664131e-05, -3.6520869e-05, -5.6032222e-05], dtype=float32), 1.7171666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_35.wav', 'Woran liegt das?', 16, array([ 1.03992148e-04, 8.12370126e-05, 1.09074477e-04, ...,\n", + " 5.26995609e-05, -2.80062741e-05, -1.37729285e-05], dtype=float32), 1.4718541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_68.wav', 'Das ist schlecht fürs Geschäft.', 33, array([ 3.3433552e-04, 4.7215325e-04, 3.9332887e-04, ...,\n", + " -3.3291522e-05, -7.3073941e-05, -6.2871884e-05], dtype=float32), 1.975375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_70.wav', 'Das überlege ich mir noch.', 27, array([-2.7926452e-04, -4.7232458e-04, -4.5905521e-04, ...,\n", + " -5.0401053e-05, -7.6573851e-05, -1.9868592e-05], dtype=float32), 1.7688125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_71.wav', 'Oder er behält ihn.', 20, array([-1.95691573e-05, 1.42454119e-05, -1.12822245e-05, ...,\n", + " 6.27729896e-05, 6.37731318e-06, 7.33020497e-05], dtype=float32), 1.7429791666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_76.wav', 'Viel Vergnügen!', 16, array([-1.4641756e-04, -2.3690579e-04, -2.0291538e-04, ...,\n", + " -6.4597036e-05, -3.9596798e-05, -5.9615340e-05], dtype=float32), 1.2975625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_82.wav', 'Sehr schön erklärt.', 21, array([ 2.8675116e-04, 4.4330378e-04, 3.8435950e-04, ...,\n", + " 9.6497361e-06, 3.9338884e-06, -3.2766162e-05], dtype=float32), 1.5880625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_104.wav', 'Du bist nicht fair.', 19, array([ 8.8045017e-05, 1.6864744e-04, 1.3682757e-04, ...,\n", + " -9.7046555e-05, -1.7125324e-04, -8.5282416e-05], dtype=float32), 1.5105833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_119.wav', 'Die in Pulverform.', 18, array([ 2.4460370e-04, 3.1504090e-04, 2.7829470e-04, ...,\n", + " 4.2608990e-05, -1.4765085e-05, -1.9486206e-05], dtype=float32), 1.7817083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_141.wav', 'Hier machen wir einen Schnitt.', 30, array([ 1.7057944e-04, 2.5346698e-04, 2.6541931e-04, ...,\n", + " -5.5827346e-05, -5.5662604e-05, -4.4612902e-05], dtype=float32), 1.6074166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_161.wav', 'Ganz und gar nicht!', 19, array([-1.09976885e-04, -1.06159037e-04, -9.40025275e-05, ...,\n", + " 5.14636531e-06, -7.86106375e-06, -1.38592986e-05], dtype=float32), 1.6655208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_162.wav', 'Du schnarchst.', 14, array([ 9.8031691e-05, 1.0789345e-04, 1.0408189e-04, ...,\n", + " 2.8527650e-06, 1.8555178e-05, -1.7833072e-05], dtype=float32), 1.2911041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_175.wav', 'Die in der zweiten Reihe.', 25, array([-7.3503418e-04, -1.0330433e-03, -9.6690352e-04, ...,\n", + " 1.0845856e-04, 9.5128053e-05, 1.3117766e-04], dtype=float32), 1.6590625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_185.wav', 'Viertel nach neun.', 18, array([-0.00025316, -0.00042128, -0.00041847, ..., 0.00012852,\n", + " 0.00010431, 0.00010823], dtype=float32), 1.794625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_214.wav', 'Der hat gut reden!', 18, array([-3.1999915e-04, -4.8188152e-04, -4.3341244e-04, ...,\n", + " 7.4479853e-05, 1.0070496e-04, 9.9988407e-05], dtype=float32), 1.5105833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_230.wav', \"Was gibt's denn?\", 16, array([ 5.3894956e-04, 7.5124111e-04, 6.7086820e-04, ...,\n", + " -4.5820485e-05, -5.6413213e-05, -2.6967809e-05], dtype=float32), 1.6719791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_233.wav', 'Fahren Sie bitte schneller.', 27, array([ 4.8254660e-04, 7.2192971e-04, 6.9296843e-04, ...,\n", + " -3.3325745e-05, 1.5315249e-05, 3.6237780e-05], dtype=float32), 1.8204583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_243.wav', 'Keine Ursache!', 14, array([-2.2485174e-04, -3.4637007e-04, -2.4121681e-04, ...,\n", + " -5.3969983e-05, -1.2160699e-05, -7.7381246e-06], dtype=float32), 1.2588333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_245.wav', 'Ich glaube, es geht los.', 24, array([-7.1201968e-05, -1.1457155e-04, -8.4426887e-05, ...,\n", + " 6.9712019e-05, 1.4468420e-05, 7.2575887e-05], dtype=float32), 1.8398125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_266.wav', 'Nicht sehr lange.', 17, array([-4.25688399e-04, -5.72862104e-04, -4.54291090e-04, ...,\n", + " 1.15649045e-05, -7.03342175e-06, 9.42021143e-06], dtype=float32), 1.34275)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_268.wav', 'Fahr vorsichtig!', 16, array([ 4.7249952e-05, 6.6685003e-05, 8.1438702e-05, ...,\n", + " -7.7767829e-05, -4.4103599e-05, -3.7954072e-05], dtype=float32), 1.4589375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_278.wav', 'Dann aber mit Fanfare.', 22, array([ 3.0009818e-04, 5.0011458e-04, 4.6210812e-04, ...,\n", + " -1.1364354e-04, -6.8604320e-05, -7.7980949e-05], dtype=float32), 1.975375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_280.wav', 'Habe ich doch!', 14, array([-0.00020733, -0.00032169, -0.00027389, ..., -0.00016337,\n", + " -0.00020018, -0.00013392], dtype=float32), 1.233)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_311.wav', 'Regnet es drauÃ\\x9fen?', 19, array([ 7.4172771e-04, 9.9716149e-04, 9.2472351e-04, ...,\n", + " -1.0082213e-04, -1.2750884e-04, -8.1061611e-05], dtype=float32), 1.8721041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_316.wav', 'Das ist eine lange Geschichte.', 30, array([ 2.24287433e-04, 1.93610642e-04, 1.16401294e-04, ...,\n", + " -1.26720734e-05, 2.45919164e-05, 5.34417049e-05], dtype=float32), 1.975375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_342.wav', 'Welches Rad?', 12, array([4.8121543e-05, 4.5563989e-05, 2.0835963e-05, ..., 3.9729348e-05,\n", + " 3.7650581e-05, 3.3080996e-05], dtype=float32), 1.6397083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_367.wav', 'Nichts zu danken!', 17, array([-3.7277619e-05, -4.9238584e-05, -7.1403243e-05, ...,\n", + " -3.3696429e-05, 3.0755796e-06, -3.4646106e-05], dtype=float32), 1.5105833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_376.wav', 'Bitte noch einmal!', 18, array([ 0.00030744, 0.00045197, 0.00040104, ..., -0.00010688,\n", + " -0.00015312, -0.00013671], dtype=float32), 1.6267916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_378.wav', 'Immer in diese Richtung!', 24, array([-4.00174977e-05, 3.99114288e-05, 1.92868242e-06, ...,\n", + " -1.14653565e-04, -7.80621922e-05, -3.85478379e-05], dtype=float32), 1.975375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_380.wav', 'Gefällt dir die Farbe rot?', 27, array([ 3.0378540e-04, 4.3046009e-04, 3.8851614e-04, ...,\n", + " 2.1661093e-05, -2.6406319e-06, -1.4788465e-05], dtype=float32), 1.9495625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_393.wav', 'Gib mir mal die Knarre.', 23, array([-4.2177099e-04, -5.7642709e-04, -4.9111585e-04, ...,\n", + " -8.2453604e-05, -1.6147584e-05, -7.7549201e-05], dtype=float32), 1.8075416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_410.wav', 'Einer geht noch.', 16, array([ 1.5394030e-04, 2.1875372e-04, 2.0080485e-04, ...,\n", + " -5.6117624e-05, -5.4007505e-05, -1.0993878e-05], dtype=float32), 1.3169166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_418.wav', 'Setzen Sie sich!', 16, array([-2.4320874e-05, -3.2748470e-05, -2.0884192e-05, ...,\n", + " 6.3705025e-05, 1.3131127e-04, 7.7887824e-05], dtype=float32), 1.4976875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_423.wav', 'Es geht ja nicht anders.', 24, array([-2.8326374e-04, -3.8826582e-04, -3.3924755e-04, ...,\n", + " -6.5105633e-05, -6.3098807e-05, -8.6217944e-05], dtype=float32), 1.9495625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_451.wav', 'Kopf hoch!', 10, array([-1.68412909e-04, -1.73757420e-04, -1.55442147e-04, ...,\n", + " -8.23870796e-05, -1.52904060e-04, -1.15380506e-04], dtype=float32), 1.3685625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_458.wav', 'Endlich geht es weiter!', 23, array([-1.5851386e-03, 2.6465717e-03, 5.2893539e-03, ...,\n", + " 3.7729558e-06, 3.5277069e-05, -3.3997758e-06], dtype=float32), 1.70425)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_460.wav', 'Schluss mit der Raserei!', 24, array([ 2.88873882e-04, 4.21624194e-04, 4.14417736e-04, ...,\n", + " -1.55140384e-04, -1.10896304e-04, -8.53765887e-05], dtype=float32), 1.6526041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_470.wav', 'Der Kerl ist dufte.', 19, array([-7.1235799e-04, -1.0205780e-03, -9.3518692e-04, ...,\n", + " -1.5202124e-04, -1.4708345e-04, -7.5756463e-05], dtype=float32), 1.9624791666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_475.wav', 'Nicht hauen!', 12, array([-3.9160714e-04, -5.2419491e-04, -4.0734027e-04, ...,\n", + " -3.5391298e-05, -1.9862022e-05, -4.2017076e-05], dtype=float32), 1.613875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_489.wav', 'Davon ist auszugehen.', 21, array([-3.8098158e-05, -1.8117305e-05, -9.3444651e-05, ...,\n", + " -4.6410118e-05, -5.4083579e-05, -6.1566949e-05], dtype=float32), 1.8591875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_490.wav', 'Ã\\x84ndern wir das!', 16, array([-3.1039584e-04, -5.0911406e-04, -3.8009215e-04, ...,\n", + " -1.0358073e-05, 2.3063526e-06, -3.8572562e-05], dtype=float32), 1.4847708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_537.wav', 'Viel hilft viel.', 16, array([-7.0020906e-04, -9.7590697e-04, -8.4232452e-04, ...,\n", + " 2.6748754e-05, 3.9436178e-05, -1.5542679e-05], dtype=float32), 1.5105833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_561.wav', 'Voll abgezogen!', 15, array([0.0009425 , 0.00131688, 0.00114336, ..., 0.00054311, 0.00053014,\n", + " 0.00059172], dtype=float32), 1.304)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_569.wav', 'Was ist Liebe?', 14, array([ 1.7119097e-04, 2.4002905e-04, 1.4028113e-04, ...,\n", + " -1.1777198e-05, 4.3154125e-07, 1.1548834e-05], dtype=float32), 1.3814791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_613.wav', 'Bitte wenden Sie.', 17, array([-3.2704248e-04, -4.7001868e-04, -4.4811977e-04, ...,\n", + " 3.9887604e-05, 4.2593329e-05, -1.2635800e-05], dtype=float32), 1.5751458333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_618.wav', \"Das spar'n wir uns jetzt.\", 25, array([-2.1968294e-04, -2.5130660e-04, -2.3470224e-04, ...,\n", + " 4.6512545e-05, 1.0168094e-04, 8.9639499e-05], dtype=float32), 1.9882916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_625.wav', 'Doppelt hält besser.', 21, array([1.0242802e-04, 1.4422902e-04, 1.5433358e-04, ..., 1.8618872e-05,\n", + " 2.6657151e-05, 8.0320706e-06], dtype=float32), 1.3169166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_645.wav', 'Die beiden werden bestimmt schwer.', 34, array([ 2.2716461e-04, 3.7214963e-04, 3.4043228e-04, ...,\n", + " -7.0017355e-05, -5.9255068e-05, -4.9753759e-05], dtype=float32), 1.975375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_650.wav', 'Dort steppt der Bär.', 21, array([3.4111144e-05, 3.9471229e-06, 1.3943841e-05, ..., 2.8798750e-04,\n", + " 3.4306329e-04, 2.3900693e-04], dtype=float32), 1.8204583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_658.wav', 'Offensichtlich nicht.', 21, array([-6.4643849e-05, -1.4843927e-04, -1.9616121e-04, ...,\n", + " 6.0427959e-05, 2.8176541e-05, 1.0887287e-04], dtype=float32), 1.8462708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_690.wav', 'Ganz sicher.', 12, array([-2.1219352e-04, -2.6916104e-04, -2.2152660e-04, ...,\n", + " -8.3999286e-05, -3.9927592e-05, -1.1057539e-04], dtype=float32), 1.4460208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_734.wav', 'Bin ich ein Mensch?', 19, array([7.3225739e-05, 8.4229468e-05, 6.0397753e-05, ..., 1.4409037e-04,\n", + " 5.4610227e-05, 2.8432718e-05], dtype=float32), 1.8721041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_737.wav', 'Wohl bekommts.', 14, array([3.8544985e-04, 5.4862851e-04, 4.7615587e-04, ..., 1.1308860e-05,\n", + " 1.5347328e-05, 3.9165672e-05], dtype=float32), 1.5880416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_747.wav', 'So eine will ich auch.', 22, array([-0.00023466, -0.00034498, -0.00035786, ..., 0.00014857,\n", + " 0.00014895, 0.00018565], dtype=float32), 1.9366458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_751.wav', 'Guter Rat ist teuer.', 20, array([-5.8482616e-05, -9.7700511e-05, -1.4372601e-04, ...,\n", + " 8.8569423e-06, 4.0626270e-05, -2.2441051e-05], dtype=float32), 1.885)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_785.wav', 'Noch Fragen?', 12, array([-5.4637530e-05, -9.7329437e-05, -6.5443433e-05, ...,\n", + " 1.3526098e-05, -1.7008400e-05, -2.3395469e-05], dtype=float32), 1.542875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_790.wav', 'Wie tut man das?', 16, array([ 1.4673925e-06, -7.7766053e-06, 2.2737586e-05, ...,\n", + " -2.2371720e-04, -2.6603421e-04, -2.1358255e-04], dtype=float32), 1.6009583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_794.wav', 'Ein billiger Trick.', 19, array([ 2.5642010e-05, 5.9448335e-05, 7.9047953e-05, ...,\n", + " -1.4398795e-05, -2.7475784e-05, -3.0437941e-05], dtype=float32), 1.4912291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_1.wav', 'Woher soll ich sie kennen?', 26, array([-7.0670452e-05, -2.2751169e-04, 3.6274258e-05, ...,\n", + " 6.2137144e-05, -1.4069478e-04, 1.5651318e-04], dtype=float32), 1.865875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_4.wav', 'Wo soll es hingehen?', 20, array([-4.3062766e-05, 6.9635964e-05, 2.7200711e-05, ...,\n", + " 7.3389943e-05, 9.7813630e-05, 7.5023250e-05], dtype=float32), 1.526625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_5.wav', 'Ã\\x84tsch!', 7, array([-5.5343335e-05, -1.0754153e-04, 1.0636374e-04, ...,\n", + " -2.3993191e-04, -1.1428300e-04, -1.9587418e-04], dtype=float32), 1.2964166666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_24.wav', 'Den mit dem Hund.', 17, array([-2.9083933e-05, -4.5743432e-06, -1.1590145e-04, ...,\n", + " -6.4060594e-05, -5.3663935e-06, -6.9100148e-05], dtype=float32), 1.5993333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_42.wav', 'Sieh mal schnell nach!', 22, array([ 1.1187345e-05, -2.7101662e-04, -4.0457569e-05, ...,\n", + " 3.8478026e-04, 1.3185160e-04, 1.9724603e-04], dtype=float32), 1.8295416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_43.wav', 'Zieh Leine!', 11, array([ 1.4326118e-04, 1.4733149e-04, 2.3666536e-04, ...,\n", + " -8.1889502e-06, -2.2159066e-04, -1.0789347e-04], dtype=float32), 1.4115416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_55.wav', 'Meistens eher nicht.', 20, array([-1.2833555e-04, -4.5777502e-04, -2.9062675e-04, ...,\n", + " 3.7303114e-05, 1.7912805e-04, 9.5502997e-05], dtype=float32), 1.968875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_89.wav', \"Komm Du 'mal hier her!\", 22, array([-2.4593925e-05, -1.5391175e-04, -3.5177112e-05, ...,\n", + " -2.8054212e-05, -8.3761133e-06, -3.3427594e-05], dtype=float32), 1.8840625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_102.wav', 'Keine halben Sachen.', 20, array([-0.00010684, -0.00018609, -0.00036967, ..., 0.00014736,\n", + " 0.00013171, 0.00024668], dtype=float32), 1.7810625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_172.wav', 'Zugriff!', 8, array([-1.7191633e-04, -2.6422989e-04, -1.8970467e-04, ...,\n", + " 1.4085844e-05, -6.5849432e-05, -1.2668260e-04], dtype=float32), 1.3994166666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_187.wav', 'Viel SpaÃ\\x9f dabei!', 17, array([ 1.26890489e-04, 4.78873408e-04, 3.36644967e-04, ...,\n", + " -1.14277915e-04, 1.15070587e-04, -4.50995103e-05], dtype=float32), 1.8234791666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_189.wav', 'Krass, oder?', 12, array([ 5.2673863e-06, -3.2042470e-05, 6.3032145e-05, ...,\n", + " 4.9474946e-04, 4.8315409e-04, 3.1584961e-04], dtype=float32), 1.4054791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_195.wav', 'Hat es geregnet?', 16, array([-1.5500655e-05, -2.4765370e-05, -1.3535780e-04, ...,\n", + " 1.0218658e-04, -7.7519953e-06, 8.1419450e-05], dtype=float32), 1.4539375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_200.wav', 'Die Maschine läuft heiÃ\\x9f.', 26, array([-2.9662095e-05, -1.3571499e-04, -4.9048278e-05, ...,\n", + " 4.0860983e-04, 3.3467117e-04, 2.8713685e-04], dtype=float32), 1.890125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_232.wav', 'Friss ScheiÃ\\x9fe!', 15, array([-3.1462018e-04, -4.3994249e-04, -1.8601233e-04, ...,\n", + " 1.2004693e-04, 6.4006366e-05, 1.4038217e-04], dtype=float32), 1.4539375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_244.wav', 'Wasser marsch!', 14, array([-5.2966818e-05, -1.3111959e-06, -2.3756520e-05, ...,\n", + " -4.7830945e-05, -1.0526282e-04, 5.8504538e-05], dtype=float32), 1.7326041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_255.wav', 'Ein Halbstarker!', 16, array([8.9307170e-05, 4.3556365e-04, 5.6998286e-04, ..., 7.5660588e-05,\n", + " 1.9409347e-04, 7.0803260e-05], dtype=float32), 1.6841458333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_266.wav', 'Stell den Fernseher ab!', 23, array([-6.1183324e-05, -1.4089182e-04, -1.1948228e-04, ...,\n", + " -1.9923897e-04, -1.7150129e-04, -2.3940729e-04], dtype=float32), 1.7568333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_283.wav', 'Kopf oder Zahl?', 15, array([-1.3454640e-04, -4.2848653e-05, -2.3553993e-04, ...,\n", + " -6.3240882e-06, -5.2672884e-05, -1.6467538e-04], dtype=float32), 1.550875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_300.wav', 'Jetzt verstanden?', 17, array([-1.7701862e-04, 3.8073360e-06, 6.6768931e-05, ...,\n", + " 1.5635177e-04, 2.4184166e-04, 2.0308173e-04], dtype=float32), 1.4781666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_305.wav', 'Jeder nur eine Kugel!', 21, array([ 1.0893906e-04, 3.5140860e-05, -8.6934997e-05, ...,\n", + " -1.5842280e-04, -7.1798029e-05, -2.1561602e-05], dtype=float32), 1.9385833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_327.wav', 'Leider nein.', 12, array([ 0.00020925, 0.00038225, 0.00030209, ..., -0.0002834 ,\n", + " -0.00024066, -0.000164 ], dtype=float32), 1.0783333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_345.wav', 'Irgendwas ist anders.', 21, array([-0.00026138, -0.00012453, -0.00022627, ..., -0.00013074,\n", + " -0.00016786, -0.00011485], dtype=float32), 1.9991666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_371.wav', 'Ein bisschen.', 13, array([ 6.19466882e-05, 1.81855256e-04, 2.56517378e-04, ...,\n", + " 9.61327260e-06, 2.89863237e-05, -1.07233864e-04], dtype=float32), 1.211625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_375.wav', 'Wir sitzen fest.', 16, array([-1.0016920e-05, -5.8360743e-05, -5.3961080e-06, ...,\n", + " -1.4201126e-07, -8.1081940e-05, -1.3083526e-05], dtype=float32), 1.5326875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_424.wav', 'Hat er nicht gesagt.', 20, array([1.4237937e-04, 3.5439979e-04, 4.2451522e-04, ..., 2.9889754e-05,\n", + " 4.3811939e-05, 5.3790947e-05], dtype=float32), 1.6235625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_428.wav', 'Mach das ordentlich!', 20, array([-2.2249017e-04, -3.4736985e-04, -2.4423364e-04, ...,\n", + " -4.8614937e-05, 1.6576583e-04, 1.4303469e-04], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_430.wav', 'Zurück zum Thema.', 18, array([0.00021488, 0.00048195, 0.00039156, ..., 0.00020808, 0.0002092 ,\n", + " 0.00014525], dtype=float32), 1.7023125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_433.wav', 'Auf mich hört sowieso niemand.', 31, array([-4.4078504e-05, 1.2701395e-04, 1.5659831e-04, ...,\n", + " 3.2407068e-05, 1.3882274e-04, 3.7292095e-06], dtype=float32), 1.8295416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_434.wav', 'Weiter so!', 10, array([ 7.8434707e-05, 2.3782127e-04, 2.0620505e-04, ...,\n", + " -3.0293613e-06, 7.3579846e-05, 2.1203174e-04], dtype=float32), 0.9571666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_437.wav', 'Darüber herrscht Konsens.', 26, array([ 2.0915098e-04, 1.6340525e-04, -4.4762099e-05, ...,\n", + " 3.0228088e-05, -5.6204710e-05, 1.4202976e-04], dtype=float32), 1.9991666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_440.wav', 'Was ist so schlimm daran?', 25, array([ 5.3402138e-05, -1.7599798e-04, 1.1747003e-04, ...,\n", + " 1.8220089e-04, 2.5114723e-04, 2.9130204e-04], dtype=float32), 1.8053125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_447.wav', 'Brüllend komisch!', 18, array([2.5463186e-04, 3.0699532e-04, 1.7949699e-04, ..., 1.3379526e-04,\n", + " 6.0049937e-05, 4.3341170e-05], dtype=float32), 1.4660416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_462.wav', 'Sehr einfallsreich!', 19, array([ 1.6625131e-04, 1.4804797e-04, 6.6010347e-05, ...,\n", + " -2.8519373e-05, -1.5197203e-05, -1.2542940e-04], dtype=float32), 1.6356875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_468.wav', 'Einer fehlt hier noch.', 22, array([0.00021585, 0.0002281 , 0.00034421, ..., 0.00031288, 0.00025684,\n", + " 0.00014126], dtype=float32), 1.5448125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_505.wav', 'Wollen wir?', 11, array([-0.000173 , -0.00033364, -0.00012876, ..., 0.00012244,\n", + " 0.00032144, 0.00014797], dtype=float32), 1.029875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_514.wav', 'Und wir singen zusammen!', 24, array([ 0.00028886, 0.00030063, 0.00037314, ..., -0.00011231,\n", + " -0.00017524, -0.00013442], dtype=float32), 1.890125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_541.wav', 'Hier, fang!', 11, array([-9.6539197e-06, 9.8090044e-05, 7.5100412e-05, ...,\n", + " 1.8568999e-04, 3.1414471e-04, 1.8397035e-04], dtype=float32), 1.5326875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_548.wav', 'Ignorieren Sie die Warnung nicht.', 33, array([-7.0703449e-05, -2.1341034e-06, -2.6835096e-05, ...,\n", + " 1.0051801e-04, 6.5389222e-06, 2.1216212e-04], dtype=float32), 1.9809791666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_558.wav', 'Nirgends ist ein Ausweg.', 24, array([ 0.0002789 , 0.00025432, 0.00026059, ..., -0.0001307 ,\n", + " -0.00015316, -0.0001602 ], dtype=float32), 1.8295416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_563.wav', 'Er will schmusen.', 17, array([ 8.3865758e-05, -4.9942853e-05, 5.9117421e-05, ...,\n", + " -4.3004973e-05, -1.0278272e-04, -8.9234527e-05], dtype=float32), 1.3146041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_590.wav', 'GrüÃ\\x9f Gott!', 12, array([ 3.8686660e-05, 8.4167688e-05, -4.1444160e-05, ...,\n", + " 7.9078745e-05, 6.6285960e-05, 7.3457479e-05], dtype=float32), 1.1328541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_591.wav', 'Doch, muss es.', 14, array([-2.7301039e-05, -9.8715776e-05, -5.1679286e-05, ...,\n", + " 1.7480909e-04, 8.8697474e-05, -8.7942986e-05], dtype=float32), 1.5811666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_594.wav', 'Höchst verdächtig!', 20, array([-1.5668831e-04, -1.4814634e-05, 1.2133464e-06, ...,\n", + " 1.1010807e-04, 5.0348262e-05, 3.2340708e-05], dtype=float32), 1.4781666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_597.wav', 'Hat man das schon mal gehört?', 30, array([ 2.9468083e-05, 8.5217485e-05, -1.1223685e-05, ...,\n", + " 1.4429020e-05, -3.4263925e-05, -1.7569761e-04], dtype=float32), 1.708375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_628.wav', 'Habt noch ein wenig Geduld.', 27, array([-3.1721203e-05, -6.6361958e-05, 6.2947714e-05, ...,\n", + " 9.7825025e-05, -1.3173591e-04, 3.6439680e-05], dtype=float32), 1.9143541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_630.wav', 'Och, Schnucki!', 14, array([-3.5877591e-05, -2.9018152e-04, -1.0041694e-04, ...,\n", + " 1.2557590e-04, 8.4289997e-05, 1.0620209e-04], dtype=float32), 1.4781666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_654.wav', 'Womit kann ich dienen?', 22, array([-3.79744961e-05, 4.58159229e-05, 5.13197449e-07, ...,\n", + " 5.17356311e-05, 2.12984141e-05, 1.14942064e-04], dtype=float32), 1.6235625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_657.wav', 'Ich bin der Gerichtsvollzieher.', 31, array([ 2.9084453e-05, -2.4720324e-05, 1.8879551e-06, ...,\n", + " -2.5064335e-04, -1.8888044e-04, -4.7750240e-05], dtype=float32), 1.9749166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_670.wav', 'Gute Nacht zusammen!', 20, array([ 1.36263785e-04, 8.22485454e-05, 1.07259955e-04, ...,\n", + " -1.70976884e-04, -4.60869487e-05, -1.28792832e-04], dtype=float32), 1.7810833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_691.wav', 'Läuft die Waschmaschine noch?', 30, array([-1.7628371e-04, 3.7217360e-05, 5.7620698e-05, ...,\n", + " 4.7630738e-06, -1.4578988e-04, -2.1564976e-05], dtype=float32), 1.7326041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_728.wav', 'Der zweite war nicht mehr so chic.', 34, array([ 3.7413691e-05, 2.5557930e-04, 3.8776739e-06, ...,\n", + " -1.6214621e-04, -2.7943292e-05, -4.3322394e-05], dtype=float32), 1.8416458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_748.wav', 'Das Licht wird schwächer.', 26, array([-8.6605805e-06, -9.4557421e-05, -4.0338778e-05, ...,\n", + " -4.2446409e-05, 4.2122399e-05, -6.5777012e-06], dtype=float32), 1.8416458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_789.wav', 'Du hast mich durchschaut.', 25, array([-7.2653616e-05, -5.6117566e-05, -2.1032026e-04, ...,\n", + " -1.6650984e-05, -4.1212854e-05, 1.1137113e-04], dtype=float32), 1.7144375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_797.wav', 'Kennt ihr den Weg?', 18, array([-1.6756072e-04, -1.5301499e-04, -6.5641878e-05, ...,\n", + " 2.0324395e-04, 1.4747797e-04, 2.2508665e-04], dtype=float32), 1.5205833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_835.wav', 'Alles klar bei dir?', 19, array([ 1.1695884e-04, 1.1995935e-05, -1.2846527e-04, ...,\n", + " -1.9988464e-04, -2.4078601e-05, -4.2752044e-06], dtype=float32), 1.4054583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_18_FINAL/18_841.wav', 'Kommt jemand mit?', 17, array([ 4.9882954e-05, 4.0318602e-05, 1.2408203e-04, ...,\n", + " -1.1336284e-04, -1.6859797e-04, -3.4263285e-05], dtype=float32), 1.6356875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_6.wav', 'Nur vom Hörensagen.', 20, array([ 4.0408637e-04, 5.5643718e-04, 5.7215214e-04, ...,\n", + " -7.1763410e-05, -1.0798458e-04, -3.2582655e-05], dtype=float32), 1.7205)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_14.wav', 'Ich weiÃ\\x9f es nicht mehr.', 24, array([0.00023374, 0.00015971, 0.0001749 , ..., 0.00011659, 0.00024648,\n", + " 0.00010209], dtype=float32), 1.6233125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_17.wav', 'Lass es raus!', 13, array([-3.1531116e-04, -3.3344212e-04, -5.9053692e-04, ...,\n", + " 5.4772248e-05, -1.1641844e-05, -6.8900968e-05], dtype=float32), 1.4902916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_68.wav', 'Sie müssen mir glauben!', 24, array([ 1.4851260e-04, 2.9638095e-04, 2.5485444e-04, ...,\n", + " -1.8143297e-05, 4.6757654e-05, 4.2184558e-05], dtype=float32), 1.708375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_92.wav', 'Ihre Bestellung, bitte!', 23, array([ 9.8706114e-05, 2.2661808e-04, 1.6781769e-04, ...,\n", + " 5.1173961e-06, -2.6828362e-04, -2.2934456e-04], dtype=float32), 1.4297083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_95.wav', 'Was können Sie mir anbieten?', 29, array([-1.9375395e-04, -3.1588171e-04, -3.9896931e-04, ...,\n", + " 1.0834881e-04, -1.4949654e-05, -1.3323028e-05], dtype=float32), 1.7689583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_100.wav', 'Also nicht missverstehen!', 25, array([-1.1475936e-04, 3.5450608e-05, 5.9234120e-05, ...,\n", + " 7.9908222e-07, -7.6752185e-05, 3.1952815e-05], dtype=float32), 1.9507083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_101.wav', 'Jeder macht mal Fehler.', 23, array([-2.0121370e-05, 3.3358188e-05, 1.4433647e-05, ...,\n", + " 2.5029780e-04, 1.0649080e-04, 2.8118977e-04], dtype=float32), 1.8416458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_107.wav', 'Immer dasselbe mit dir.', 23, array([ 4.04063358e-05, 2.61971072e-05, -1.03683014e-04, ...,\n", + " -2.34830455e-04, -1.33784546e-04, -7.84191070e-05], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_113.wav', 'Jetzt erinnere ich mich.', 24, array([ 5.7016779e-05, 9.8553166e-05, 8.2001083e-05, ...,\n", + " 2.6238111e-05, 1.3704958e-05, -8.3586237e-05], dtype=float32), 1.9870416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_131.wav', 'Freiwillige vor!', 16, array([ 5.72854588e-05, 1.07770924e-04, 1.99439557e-04, ...,\n", + " -4.32070919e-05, -3.67913685e-06, 1.42182573e-04], dtype=float32), 1.5300416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_137.wav', 'Ich lehne ihn sogar ab.', 23, array([ 4.1758478e-05, 1.8570285e-05, 2.1333873e-04, ...,\n", + " 2.0144802e-05, -3.2468499e-05, 4.0363415e-05], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_148.wav', 'Setz dich!', 10, array([-1.4053716e-04, -1.2715683e-04, -3.6183195e-04, ...,\n", + " 8.8158406e-05, -4.2700492e-05, 1.4811622e-04], dtype=float32), 1.1631458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_159.wav', 'Wie lief die Klausur?', 21, array([4.5470217e-05, 1.4640424e-04, 9.2724607e-05, ..., 1.4090222e-04,\n", + " 1.8730978e-04, 8.1763144e-05], dtype=float32), 1.7931875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_188.wav', 'So viel Zeit muss sein!', 23, array([-7.5860844e-05, -1.8835207e-04, -2.0893685e-04, ...,\n", + " -5.3442498e-05, -6.1138802e-05, -8.8275759e-05], dtype=float32), 1.7810833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_194.wav', 'Zeit fürs Bettchen.', 20, array([-9.7486656e-05, -5.1642677e-05, -8.1966471e-05, ...,\n", + " -7.5118078e-05, -3.0586343e-05, -7.1709837e-05], dtype=float32), 1.6599166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_236.wav', 'Wir sind gleich da.', 19, array([-6.8177519e-06, 6.7671383e-05, -1.0620675e-04, ...,\n", + " 4.5802376e-06, -7.1226568e-05, -5.8944144e-05], dtype=float32), 1.6622916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_243.wav', 'Herrgott noch mal!', 18, array([ 1.7256364e-04, 1.5818405e-04, 2.4684667e-04, ...,\n", + " -1.7978776e-04, -2.2976559e-05, -3.1599044e-05], dtype=float32), 1.4440208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_262.wav', 'Früher war alles besser.', 25, array([ 1.6410025e-04, 2.0620895e-04, 2.0922835e-04, ...,\n", + " 4.5493864e-05, -7.6417935e-05, 7.0160553e-05], dtype=float32), 1.9385833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_264.wav', 'Wie heiÃ\\x9ft du?', 14, array([ 2.3004458e-04, 3.3690900e-04, 3.8855671e-04, ...,\n", + " -1.7735986e-04, -6.0517366e-05, 1.4090910e-05], dtype=float32), 1.24025)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_267.wav', 'Siehst du?', 10, array([ 8.0912840e-05, 5.0722783e-06, 6.0588944e-05, ...,\n", + " -1.2716564e-04, 2.9675630e-05, -1.6470523e-05], dtype=float32), 1.187375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_269.wav', 'Totgesagte leben länger.', 25, array([-1.0916409e-05, -1.7836766e-05, -5.1411305e-05, ...,\n", + " -1.2148214e-04, -2.2084620e-04, 8.5974034e-06], dtype=float32), 1.7568333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_291.wav', 'Bin ich männlich?', 18, array([-2.0014251e-05, 2.6616051e-05, 1.2375216e-04, ...,\n", + " 1.3375390e-04, 5.5609209e-05, -7.4272582e-05], dtype=float32), 1.4418125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_295.wav', 'Was war in dem Umschlag?', 24, array([-6.2635612e-05, -4.7769913e-06, -1.3995348e-05, ...,\n", + " 7.0862757e-06, 9.2074784e-05, 9.0880349e-06], dtype=float32), 1.9507083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_316.wav', 'Ich bin bedient.', 16, array([2.5768091e-05, 1.6018275e-05, 3.7452736e-04, ..., 7.7061843e-05,\n", + " 1.8039568e-04, 7.1911185e-05], dtype=float32), 1.6599166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_317.wav', 'Tschüssikowski!', 16, array([ 1.3183661e-04, 8.4080348e-05, -2.6853681e-05, ...,\n", + " 5.1806877e-05, 1.5268542e-05, -6.9305977e-05], dtype=float32), 1.4539375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_323.wav', 'Fang mich, wenn du kannst!', 26, array([-4.0345873e-05, 3.4187411e-05, -3.7680857e-05, ...,\n", + " -8.6350832e-05, -1.6245214e-04, -5.1246581e-05], dtype=float32), 1.7447291666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_343.wav', 'Ich bin kein Einbrecher!', 24, array([ 2.2356608e-05, -6.4235406e-05, -9.0699705e-06, ...,\n", + " 1.2990409e-04, 7.6688739e-05, -4.0372826e-05], dtype=float32), 1.878)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_346.wav', 'Hör nicht auf ihn.', 19, array([-2.9778299e-05, 3.8957646e-06, -7.7031938e-05, ...,\n", + " 1.9274552e-04, 1.7162508e-04, -1.3842691e-06], dtype=float32), 1.2964166666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_349.wav', 'Eine letzte Windung noch.', 25, array([-1.8898114e-05, -4.0488834e-05, 1.2324851e-04, ...,\n", + " -7.7293364e-05, 8.3202161e-05, 1.5701227e-04], dtype=float32), 1.5508541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_359.wav', 'Mir nach!', 9, array([ 9.4505500e-05, 2.3980458e-04, 3.7063317e-05, ...,\n", + " -4.1811028e-04, -4.7733358e-04, -4.6703668e-04], dtype=float32), 1.3489375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_360.wav', 'Schon wieder?', 13, array([-2.7792374e-04, -4.0585164e-04, -4.3411212e-04, ...,\n", + " -6.9041176e-05, -2.6838092e-07, 5.3586686e-05], dtype=float32), 1.0783333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_363.wav', 'Heidi funkelt ihn an.', 21, array([-1.39060983e-04, -9.78735334e-05, 9.33348783e-05, ...,\n", + " -1.00029130e-04, -1.25095859e-04, -1.00360034e-04], dtype=float32), 1.9506875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_375.wav', 'Kein Signal gefunden.', 21, array([-1.1299809e-04, -9.9104131e-05, -2.1005377e-05, ...,\n", + " -2.4724935e-04, 5.5919631e-06, 4.7323024e-06], dtype=float32), 1.8416458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_403.wav', 'Entschuldigen Sie die Störung!', 31, array([ 6.84832412e-05, 1.86067002e-04, -1.04915016e-04, ...,\n", + " 1.84468547e-04, 4.62387870e-05, -5.50564218e-05], dtype=float32), 1.8174166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_404.wav', 'Guter Mann!', 11, array([ 4.2475749e-05, -3.8101676e-05, 8.2924860e-05, ...,\n", + " -9.0844223e-06, 8.0864724e-05, -4.9711874e-05], dtype=float32), 1.1268125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_416.wav', 'Oder etwa nicht?', 16, array([ 1.6924678e-05, 8.7618108e-05, 1.1962327e-04, ...,\n", + " -1.5572428e-04, -1.2718650e-04, -2.7018292e-05], dtype=float32), 1.5266458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_427.wav', 'Wer weiÃ\\x9f das schon.', 20, array([-1.2090163e-05, -1.1217411e-04, -3.4340650e-05, ...,\n", + " -1.9305095e-05, 1.0599474e-04, -7.2453047e-05], dtype=float32), 1.9157916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_440.wav', 'Walter hat es verpatzt.', 23, array([-9.9328121e-05, -3.7155328e-07, -5.4411164e-05, ...,\n", + " 1.3715628e-04, -4.9349186e-05, -1.4098950e-04], dtype=float32), 1.9628125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_451.wav', 'So läuft das nicht.', 20, array([-1.21481185e-04, -1.13304653e-04, -2.73915475e-07, ...,\n", + " 1.47375540e-04, 1.44234422e-04, -2.10445778e-05], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_457.wav', 'Unverhofft kommt oft.', 21, array([-1.8882036e-05, -2.5487921e-05, 2.6220470e-04, ...,\n", + " 5.6016044e-05, -7.5536453e-05, -4.1967660e-06], dtype=float32), 1.865875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_461.wav', 'Bist du noch Single?', 20, array([-7.4286567e-05, -1.6158549e-04, -1.6719839e-04, ...,\n", + " -9.1800161e-05, -1.2240406e-04, 3.6517587e-05], dtype=float32), 1.8416458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_468.wav', 'Mein Licht ist kaputt.', 22, array([ 4.02122387e-05, -1.00659774e-04, -8.88236755e-05, ...,\n", + " -4.64872028e-05, -2.63940365e-06, 7.19727832e-05], dtype=float32), 1.7735)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_489.wav', 'Gemeinsam sind wir dumm!', 24, array([-1.4583243e-04, -2.6087323e-04, -2.3470599e-05, ...,\n", + " -2.4694938e-04, -1.5543406e-04, -6.1786144e-05], dtype=float32), 1.9628125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_518.wav', 'Er hat Mama gesagt!', 19, array([ 2.6662483e-05, -7.8772522e-05, -5.4227519e-05, ...,\n", + " 1.4953410e-05, -6.7233414e-05, -9.8744909e-05], dtype=float32), 1.9264791666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_551.wav', 'Ciao!', 5, array([-8.1419050e-05, -2.2554104e-05, -9.1002643e-05, ...,\n", + " 8.3599451e-05, -1.5038802e-05, 1.8543131e-05], dtype=float32), 0.8966041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_557.wav', 'Die Welt ist ungerecht.', 23, array([-7.9495927e-05, -2.2434435e-04, -1.8575993e-05, ...,\n", + " 4.3908138e-05, 4.8930386e-05, 1.4439608e-04], dtype=float32), 1.7568333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_573.wav', 'Wer weiÃ\\x9f?', 10, array([-2.4007348e-05, 2.8211702e-05, 1.1010996e-04, ...,\n", + " 3.2032028e-04, 2.8236501e-04, 3.1412503e-04], dtype=float32), 1.6356666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_594.wav', 'Feierabend!', 11, array([-1.4223782e-05, -5.6433430e-05, -3.3835067e-06, ...,\n", + " -1.2677837e-04, 4.7294146e-05, 7.4652962e-05], dtype=float32), 1.6356875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_599.wav', 'Sag bloÃ\\x9f!', 10, array([ 2.12539035e-05, -1.20294884e-04, -8.79466315e-05, ...,\n", + " 2.56883359e-04, 2.45794392e-04, 4.15721239e-04], dtype=float32), 1.4781666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_618.wav', 'Geht das in Ordnung?', 20, array([-1.7039385e-04, -4.3828294e-04, -3.7954788e-04, ...,\n", + " 2.5719850e-04, 3.6655194e-05, 4.4241093e-05], dtype=float32), 1.7204791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_649.wav', 'Komm noch etwas näher!', 23, array([ 2.4222159e-06, -1.3579089e-04, -4.4756231e-05, ...,\n", + " -1.4951664e-04, -2.2786215e-04, -3.1124309e-04], dtype=float32), 1.4418125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_656.wav', 'Lach nicht!', 11, array([ 1.79771829e-04, 1.79155570e-04, 4.07271327e-05, ...,\n", + " 1.34896531e-04, 1.24606095e-05, -4.19603248e-06], dtype=float32), 1.3327708333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_678.wav', 'Ich fasse zusammen.', 19, array([-1.0120855e-04, 6.3165186e-05, -2.2567945e-05, ...,\n", + " 6.0140010e-05, 9.6748437e-05, 3.0506399e-05], dtype=float32), 1.708375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_695.wav', 'Umtausch ausgeschlossen!', 24, array([ 7.3856318e-05, 2.8886712e-05, 1.5315624e-04, ...,\n", + " -9.7581760e-05, 8.5684667e-05, -3.2478438e-05], dtype=float32), 1.6720208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_699.wav', 'Setzt euch.', 11, array([-1.6188849e-04, -1.0612092e-04, -6.7996967e-05, ...,\n", + " -1.1114984e-04, -2.0633070e-04, -1.5339212e-05], dtype=float32), 1.3085416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_703.wav', 'Ja, ist sie.', 12, array([-8.3997344e-05, -2.7474607e-05, -1.9123188e-05, ...,\n", + " 1.8876011e-04, 5.0511160e-05, 9.6139847e-05], dtype=float32), 1.9809791666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_707.wav', 'Nehmt sie ihnen ab!', 19, array([ 4.0254617e-04, 4.7474771e-04, 3.5727478e-04, ...,\n", + " -1.1594634e-06, -1.5993090e-04, -1.5013713e-05], dtype=float32), 1.8477083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_718.wav', 'Bitte schön lächeln.', 22, array([-5.2708318e-05, -1.2709903e-04, -3.1722573e-04, ...,\n", + " -1.4999519e-04, 1.3614057e-04, -2.6379108e-05], dtype=float32), 1.9809791666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_732.wav', 'Kamelle!', 8, array([ 3.4038111e-04, 4.9238594e-04, 3.1708140e-04, ...,\n", + " -8.7314249e-05, -4.2823103e-05, 4.8170114e-06], dtype=float32), 1.0541041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_733.wav', 'Nichts daran war schlimm.', 25, array([2.4388860e-04, 1.5891306e-04, 1.7636098e-04, ..., 6.8294656e-05,\n", + " 7.4376767e-05, 9.9975718e-05], dtype=float32), 1.79925)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_742.wav', 'Hast du das auch gehört?', 25, array([ 2.2057726e-04, 3.3742579e-04, 1.5720318e-05, ...,\n", + " 1.6000369e-05, -1.9323647e-04, -1.1723922e-04], dtype=float32), 1.7568333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_751.wav', 'Irritiert dich das?', 19, array([ 3.1597829e-05, -1.0975795e-04, -4.8185088e-05, ...,\n", + " -7.8868754e-05, 9.2668552e-06, 1.6543895e-04], dtype=float32), 1.7750208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_758.wav', 'Das ist gar nicht so lange her.', 31, array([-0.00046848, -0.00072762, -0.00048674, ..., 0.00027484,\n", + " 0.00023592, 0.00020132], dtype=float32), 1.7750208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_759.wav', 'Die Chemie muss stimmen.', 24, array([ 2.8143785e-04, 3.1653995e-04, 3.5444429e-04, ...,\n", + " 8.1970691e-05, -5.0139199e-05, -1.7111432e-05], dtype=float32), 1.9446458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_763.wav', 'Stimmt eigentlich.', 18, array([-1.2765415e-06, -4.4488741e-05, -1.0883755e-04, ...,\n", + " 2.9581884e-04, 4.5865582e-04, 6.1051000e-04], dtype=float32), 1.3024791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_782.wav', 'Sagen Sie den Zielort.', 22, array([ 1.0137472e-04, 2.3555224e-04, 2.6113808e-04, ...,\n", + " -2.9943618e-05, 3.1559110e-05, 2.7199069e-06], dtype=float32), 1.7810625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_792.wav', 'Meldet euch freiwillig!', 23, array([2.3276571e-04, 3.9564463e-04, 2.9302380e-04, ..., 1.1956793e-04,\n", + " 7.0350601e-05, 1.8581332e-04], dtype=float32), 1.8052916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_793.wav', 'Die Boote liegen auf dem Trockenen.', 35, array([-0.00011364, -0.00017169, -0.00019618, ..., 0.00044204,\n", + " 0.00018713, 0.00049593], dtype=float32), 1.9870416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_19_FINAL/19_802.wav', 'Gesundheit!', 11, array([-8.3673913e-05, -7.9538848e-05, -6.8612273e-05, ...,\n", + " 4.4534498e-04, 4.3816061e-04, 2.6374889e-04], dtype=float32), 1.2722083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_18.wav', 'Aber auch nur gerade so.', 24, array([-2.2079168e-05, -1.6145856e-05, 2.9195176e-06, ...,\n", + " -1.0078496e-05, -6.2482263e-06, -5.8464525e-06], dtype=float32), 1.8333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_83.wav', 'Aber gerne!', 11, array([ 8.6439795e-06, -4.9609935e-07, -6.4880319e-06, ...,\n", + " 3.4692115e-05, 2.2026890e-05, 7.4778809e-06], dtype=float32), 1.0416666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_84.wav', 'Aber heute bleiben wir nicht so lang.', 37, array([-1.8894493e-06, 2.0465507e-06, 9.1691445e-06, ...,\n", + " -7.1275235e-06, -1.7749519e-05, -2.3891846e-05], dtype=float32), 1.9166666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_91.wav', 'Aber ich kÃ\\x83¶nnte das nicht.', 29, array([-1.3084863e-05, -2.4588813e-05, -3.0510082e-05, ...,\n", + " 9.0740468e-06, 7.3771143e-06, 4.7309027e-06], dtype=float32), 1.7916666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_93.wav', 'Aber ich schweife ab.', 21, array([ 2.0572887e-05, 5.2324990e-06, 8.2274501e-06, ...,\n", + " -4.5831721e-06, -5.6718955e-06, 1.2206646e-06], dtype=float32), 1.3333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_107.wav', 'Aber ja!', 8, array([ 6.5076074e-06, 9.5467785e-06, 6.4050842e-06, ...,\n", + " -2.8310139e-06, -1.7247042e-06, 4.6768464e-06], dtype=float32), 1.25)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_115.wav', 'Aber locker!', 12, array([-3.1642696e-05, -3.3065215e-05, -3.9417675e-05, ...,\n", + " 5.7364587e-06, 8.1942826e-06, 2.0739385e-06], dtype=float32), 1.125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_135.wav', 'Aber nicht mein Koch.', 21, array([-3.2864332e-06, 6.4927585e-06, 1.8139610e-05, ...,\n", + " -1.9440764e-05, 6.6915834e-07, -2.3949342e-06], dtype=float32), 1.875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_150.wav', 'Aber sie wirkt.', 15, array([ 4.7021126e-06, 7.9376932e-06, 1.9524101e-05, ...,\n", + " -1.0560079e-05, 2.2925117e-07, 7.0664414e-06], dtype=float32), 1.6666666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_154.wav', 'Aber sonst schon.', 17, array([ 1.3162755e-05, 5.1608640e-06, 2.6601656e-06, ...,\n", + " -1.9497929e-05, -1.3883044e-05, -2.9709727e-05], dtype=float32), 1.9166666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_172.wav', 'Aber wie kann das sein?', 23, array([-1.0407030e-05, -1.3223411e-05, -2.4366140e-05, ...,\n", + " 3.1900552e-06, -6.4861370e-06, -5.3326958e-06], dtype=float32), 1.9166666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_183.wav', 'Abgemacht!', 10, array([ 4.3209253e-05, 3.8841117e-05, 2.0105661e-05, ...,\n", + " 3.7174163e-07, -1.4371894e-05, -1.6794727e-05], dtype=float32), 1.375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_203.wav', 'Ach Mann.', 9, array([-1.1161302e-05, -4.8241122e-06, 1.0564104e-06, ...,\n", + " 5.0679973e-06, 7.8539133e-06, 9.7488000e-06], dtype=float32), 1.0833333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_205.wav', 'Ach die!', 8, array([-1.2094329e-05, -6.8277895e-06, -9.1963557e-07, ...,\n", + " 1.1451033e-05, -2.4406472e-06, 1.2908078e-05], dtype=float32), 1.125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_206.wav', 'Ach du ScheiÃ\\x83Â\\x9fe!', 18, array([ 2.9578983e-05, 1.8899245e-05, 2.3418788e-05, ...,\n", + " -2.3013935e-07, 1.0615421e-05, 1.1895302e-05], dtype=float32), 1.5416666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_208.wav', 'Ach du liebe Zeit!', 18, array([-1.7297025e-05, -4.8105571e-06, 4.0550490e-06, ...,\n", + " 1.3112809e-06, 2.7569813e-06, -5.3473241e-06], dtype=float32), 1.9166666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_209.wav', 'Ach du meine GÃ\\x83¼te!', 21, array([-1.4435645e-06, 1.5456475e-05, 7.5820367e-06, ...,\n", + " -5.9919462e-06, -2.8870822e-06, -8.3686264e-06], dtype=float32), 1.875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_219.wav', 'Ach nein?', 9, array([ 2.6512873e-05, 3.2190139e-05, 2.3575940e-05, ...,\n", + " 1.2494418e-06, -4.9369064e-06, 5.6602944e-06], dtype=float32), 1.0416666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_220.wav', 'Ach so das.', 11, array([1.7692106e-05, 1.0481614e-05, 2.4560395e-05, ..., 1.1682997e-05,\n", + " 1.4096242e-05, 1.0814229e-05], dtype=float32), 1.25)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_221.wav', 'Ach so geht das.', 16, array([-4.7354648e-04, -1.6085681e-04, 6.9589930e-04, ...,\n", + " 2.8736700e-05, 3.1944357e-05, 3.1408650e-05], dtype=float32), 1.3333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_223.wav', 'Ach so.', 7, array([ 1.7158927e-04, 2.4213194e-04, 3.3745603e-04, ...,\n", + " -7.4672876e-06, -9.1694219e-06, 5.6827762e-06], dtype=float32), 0.75)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_227.wav', 'Ach, da bist du ja!', 19, array([2.9949500e-05, 1.6420616e-05, 3.4700156e-06, ..., 1.3191027e-05,\n", + " 1.0943100e-05, 1.8516728e-06], dtype=float32), 1.875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_1_FINAL/1_247.wav', 'Achte auf den Verkehr.', 22, array([-3.3732314e-05, -1.7520404e-05, 3.1957079e-05, ...,\n", + " 9.2553882e-06, 1.9688600e-06, 8.4563535e-06], dtype=float32), 1.8333333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_22.wav', 'Eine letzte Sache noch.', 23, array([6.1327388e-05, 1.8792783e-04, 6.4210355e-05, ..., 9.2773196e-05,\n", + " 9.0997717e-05, 9.3233648e-05], dtype=float32), 1.9870416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_23.wav', 'Es ist aus und vorbei.', 22, array([-1.40103046e-04, -1.22702273e-04, 9.30938695e-05, ...,\n", + " 3.74735857e-04, 3.98035394e-04, 1.15837705e-04], dtype=float32), 1.9022291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_57.wav', 'Wie machst du das?', 18, array([ 2.4910548e-04, 4.8663982e-04, 3.5670877e-04, ...,\n", + " -7.4250769e-05, -2.8972838e-05, 5.8696533e-05], dtype=float32), 1.4660625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_88.wav', 'Die Göre lügt wie gedruckt.', 29, array([-2.1256006e-04, -1.5941747e-04, -9.0014306e-05, ...,\n", + " 8.4916828e-05, -1.1791480e-04, 2.8579583e-04], dtype=float32), 1.9022291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_94.wav', 'Nur wenn das Essen nicht schmeckt.', 34, array([-7.44715726e-05, -1.21678349e-04, 3.31091655e-07, ...,\n", + " -1.03946346e-04, -1.27610518e-04, -1.86876860e-04], dtype=float32), 1.878)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_97.wav', 'Niemals!', 8, array([-4.9271861e-05, 5.3212247e-05, 3.3188411e-05, ...,\n", + " 6.3736064e-05, 4.1986009e-06, 8.9537862e-05], dtype=float32), 1.2479583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_98.wav', 'Und nun zum Wetter.', 19, array([ 5.21471120e-05, -9.25690911e-05, -1.22024496e-04, ...,\n", + " 6.86152780e-05, -3.58715624e-05, 9.09384198e-06], dtype=float32), 1.6356666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_99.wav', 'Die Ã\\x96ffnung ist dehnbar.', 25, array([ 5.6826313e-05, 6.8275417e-06, 9.2087415e-05, ...,\n", + " 3.3015142e-05, 6.6053515e-05, -1.5007930e-04], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_108.wav', 'Habt ihr schon angefangen?', 26, array([1.8725816e-04, 1.5125435e-04, 1.8410715e-04, ..., 7.2607516e-05,\n", + " 2.0626400e-04, 8.0785358e-05], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_120.wav', 'Wie konnte das passieren?', 25, array([8.6616979e-05, 1.3365489e-04, 4.9586175e-05, ..., 2.3242908e-06,\n", + " 9.4004557e-05, 2.2714035e-04], dtype=float32), 1.5751041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_121.wav', 'Schnappen Sie die!', 18, array([ 3.8893679e-05, -8.0967751e-05, 9.0245063e-05, ...,\n", + " -1.8313204e-04, 3.8293081e-05, -2.9012112e-06], dtype=float32), 1.38125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_127.wav', 'Kommst du mit auf die Demo?', 27, array([2.5501425e-04, 3.7619186e-04, 2.3280202e-04, ..., 1.0214894e-04,\n", + " 8.1334627e-05, 1.0037446e-04], dtype=float32), 1.7931875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_131.wav', 'Neymar schummelt immer.', 23, array([ 9.5439558e-05, -2.0274975e-04, -2.7297903e-05, ...,\n", + " -1.8293603e-04, -8.1430808e-05, 2.3813642e-05], dtype=float32), 1.6962708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_146.wav', 'Hä, wie jetzt?', 15, array([ 1.0428468e-05, 1.2862872e-04, 1.4709163e-04, ...,\n", + " 2.5179393e-06, -3.9250128e-05, 1.4990567e-04], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_154.wav', 'Gehst du mit mir kicken?', 24, array([-2.3674214e-05, 1.5158611e-04, 2.0247647e-04, ...,\n", + " -5.0921575e-05, 1.6530334e-04, 2.6747581e-05], dtype=float32), 1.5508541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_165.wav', 'Die Erlösung naht.', 19, array([-2.7500470e-05, 4.6476634e-05, 9.3239294e-05, ...,\n", + " 1.3720182e-04, 3.3580043e-05, 1.6966692e-04], dtype=float32), 1.6114375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_170.wav', 'Worauf wartest du noch?', 23, array([ 1.9643597e-04, 1.8858226e-04, 1.2341220e-04, ...,\n", + " 1.9399264e-04, 7.9539248e-05, -8.9550871e-05], dtype=float32), 1.9628125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_174.wav', 'Wegen der Sicherheit.', 21, array([ 5.0312192e-05, -4.7642745e-05, 7.9094330e-05, ...,\n", + " 1.6562216e-04, -3.8164351e-05, -8.3325220e-05], dtype=float32), 1.53875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_179.wav', 'Was tun Sie da?', 15, array([8.1822727e-05, 1.5520566e-04, 2.9996689e-04, ..., 9.4358256e-05,\n", + " 6.1927640e-05, 1.5151841e-04], dtype=float32), 1.550875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_190.wav', 'Die Play-Offs haben begonnen.', 29, array([-2.7691500e-04, -2.5398214e-04, -1.5421546e-04, ...,\n", + " 3.4238459e-05, -1.6769451e-04, -1.3444168e-04], dtype=float32), 1.7931875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_193.wav', 'Spinnst du?', 11, array([ 1.0871515e-04, 1.6241276e-04, -7.8830650e-05, ...,\n", + " -1.6421604e-04, -1.6669222e-04, -1.5261788e-04], dtype=float32), 1.5993333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_194.wav', 'Nicht mit mir!', 14, array([ 4.5433408e-05, -1.3075510e-04, 6.4006963e-05, ...,\n", + " -2.2528745e-04, -1.7135930e-05, -1.1135123e-04], dtype=float32), 1.4539375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_197.wav', 'Lang lebe die Königin!', 23, array([-1.6047362e-04, -1.5451153e-05, -1.0221335e-04, ...,\n", + " 7.2540395e-05, 9.8553333e-05, -3.9703427e-05], dtype=float32), 1.9628125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_220.wav', 'Der Punkt geht an euch.', 23, array([ 6.8754802e-05, -3.1321447e-06, 2.6729414e-05, ...,\n", + " 5.2136878e-05, 6.9546691e-06, 1.5569202e-04], dtype=float32), 1.878)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_253.wav', 'Ich geh jetzt duschen.', 22, array([-3.17401755e-05, 7.48557359e-05, -5.43324859e-05, ...,\n", + " -1.39205178e-04, -6.44034174e-07, 1.28346255e-05], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_270.wav', 'Ich bin stärker.', 17, array([ 9.11816460e-05, 1.44324003e-04, -2.98500763e-05, ...,\n", + " 1.31568195e-05, 6.36509794e-05, 6.90339657e-05], dtype=float32), 1.3933541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_273.wav', 'Wie lange fahre ich noch?', 25, array([ 2.9487488e-05, -1.3105408e-04, 5.8441510e-05, ...,\n", + " 3.1229702e-05, -5.4796135e-05, -6.3286854e-05], dtype=float32), 1.6841458333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_275.wav', 'Wer hat es dir verraten?', 24, array([ 1.2313928e-04, 1.3087156e-04, -1.2932777e-04, ...,\n", + " 4.8921556e-05, 1.4495553e-04, -3.3808697e-05], dtype=float32), 1.8295416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_293.wav', 'Gib nicht anderen die Schuld.', 29, array([-7.5512668e-05, -3.6905835e-06, 6.9531779e-05, ...,\n", + " 4.3623371e-05, 1.8721327e-04, 7.1873088e-05], dtype=float32), 1.9628125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_297.wav', 'Ist es schon so weit gekommen?', 30, array([-4.3128319e-05, -1.7937485e-04, -1.0890597e-04, ...,\n", + " -2.6245858e-04, -1.7716063e-04, 2.2997918e-04], dtype=float32), 1.6114375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_318.wav', 'Einfach reinstechen!', 20, array([ 9.1551570e-05, 8.9795518e-05, -6.6505017e-05, ...,\n", + " 1.0614502e-04, 1.8572784e-05, 1.7793228e-04], dtype=float32), 1.7568333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_320.wav', 'Also bleibt alles beim Alten.', 29, array([-4.5057204e-06, 1.0390608e-04, 2.8324797e-05, ...,\n", + " -9.8345605e-05, -4.1500021e-05, -2.5271966e-05], dtype=float32), 1.8053125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_324.wav', 'Fragen wir das Publikum!', 24, array([ 3.0478600e-06, -1.7624698e-04, -1.1634296e-04, ...,\n", + " 1.3709384e-04, 8.2070706e-05, 1.4319613e-04], dtype=float32), 1.7931875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_336.wav', 'Nicht nur in Norddeutschland.', 29, array([ 1.6894817e-05, 7.2304661e-05, -1.7737957e-04, ...,\n", + " 7.4396456e-05, 1.5326528e-04, -3.0850897e-05], dtype=float32), 1.8537708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_339.wav', 'Lass uns welche wegräumen.', 27, array([-1.3355519e-04, 3.6361063e-05, 1.2765500e-04, ...,\n", + " -4.6465106e-05, -9.3052886e-06, -3.1085176e-06], dtype=float32), 1.9264583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_356.wav', 'Bauch schlägt Hirn.', 20, array([-8.7791312e-05, -9.9132430e-06, -7.8506528e-05, ...,\n", + " -1.2898828e-04, 1.9388601e-05, -7.8024947e-05], dtype=float32), 1.7326041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_20_FINAL/20_381.wav', 'Polen Sie die Maschine um!', 26, array([ 8.2736617e-05, 1.0996176e-04, 9.2422182e-05, ...,\n", + " -2.2247934e-05, 7.0410904e-05, -2.1137239e-05], dtype=float32), 1.9385833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_125.wav', 'Danke sehr!', 11, array([ 3.3982175e-05, 3.0489264e-05, -3.2230830e-05, ...,\n", + " 1.3063883e-04, 6.5418164e-05, 1.0737507e-04], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_132.wav', 'Nerve ich dich?', 15, array([-1.3204044e-04, -3.8424434e-05, -1.6640245e-04, ...,\n", + " 2.0048997e-04, 2.0114701e-04, 2.8921696e-04], dtype=float32), 1.8133333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_188.wav', 'Kann ich mal riechen?', 21, array([7.4782380e-05, 1.5360968e-04, 1.7683143e-04, ..., 7.1163136e-05,\n", + " 3.2413329e-05, 1.6134117e-04], dtype=float32), 1.5949583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_195.wav', 'Sehe ich das richtig?', 21, array([-4.4274679e-03, -6.2118913e-03, -5.6534973e-03, ...,\n", + " -5.3494594e-05, 1.0948109e-05, 2.8244473e-05], dtype=float32), 1.8706875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_211.wav', 'Ein Dessert gefällig?', 22, array([ 6.1982937e-05, 8.9088433e-05, 2.1896411e-04, ...,\n", + " -5.3060539e-05, 5.5113655e-05, 2.0669409e-06], dtype=float32), 1.6305416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_264.wav', 'Was schätzen Sie?', 18, array([-9.39443475e-04, -1.31584110e-03, -1.22378767e-03, ...,\n", + " 5.19938067e-06, -1.39896365e-05, 3.26375412e-05], dtype=float32), 1.9933125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_266.wav', 'Hast du Geld dabei?', 19, array([-1.3200377e-05, 3.8996362e-04, 1.0263748e-03, ...,\n", + " -2.9147041e-05, 9.2981281e-06, -4.0353654e-05], dtype=float32), 1.707375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_327.wav', 'Augen auf die StraÃ\\x9fe!', 22, array([-1.1210357e-04, -1.8035798e-04, -1.8643556e-04, ...,\n", + " 8.4691441e-05, 5.8400867e-05, 5.8256945e-05], dtype=float32), 1.8399791666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_346.wav', 'Was soll ich da machen?', 23, array([-2.5878362e-05, 2.1881026e-05, -1.2260079e-05, ...,\n", + " 4.5499460e-06, 4.0606970e-05, -2.3619448e-05], dtype=float32), 1.9433541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_349.wav', 'Immer dasselbe mit euch!', 24, array([-2.3236821e-04, -3.3517351e-04, -3.0884243e-04, ...,\n", + " 8.0186677e-05, 1.6797509e-05, -1.6808892e-05], dtype=float32), 1.9652708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_373.wav', 'Kennen wir uns?', 15, array([-5.0764916e-06, -7.3543859e-05, 1.1312031e-05, ...,\n", + " -3.2780910e-05, -1.3342450e-04, -8.3744824e-05], dtype=float32), 1.2833125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_378.wav', 'Redet ihr nicht miteinander?', 28, array([ 3.3598881e-05, 2.8617033e-05, -4.8224880e-05, ...,\n", + " 7.4195086e-06, -4.8723170e-05, 6.5784006e-05], dtype=float32), 1.9491458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_420.wav', 'Ich hasse Rituale.', 18, array([ 7.1912136e-06, 3.0618376e-06, 8.3010753e-05, ...,\n", + " -1.4567961e-05, 1.1762774e-05, 3.1641615e-05], dtype=float32), 1.9995833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_486.wav', 'Wie groÃ\\x9f ist er denn?', 22, array([ 3.0858202e-05, 7.4509022e-05, 1.3619277e-04, ...,\n", + " -3.3022930e-06, 9.8051796e-06, -2.7459086e-05], dtype=float32), 1.867625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_537.wav', 'Es ist zum Heulen.', 18, array([-1.91718082e-05, 6.43216190e-05, 1.19517106e-04, ...,\n", + " 1.98961898e-05, 2.61543628e-05, -1.34301990e-06], dtype=float32), 1.9879583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_544.wav', 'Nimm es ihm nicht übel.', 24, array([ 4.2532893e-08, -6.0193088e-05, 4.5228205e-07, ...,\n", + " 1.0533330e-04, 4.6245714e-05, -1.5597003e-05], dtype=float32), 1.7243333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_547.wav', 'Um Gottes Willen!', 17, array([-1.3659755e-05, -1.1149528e-04, -7.7302495e-05, ...,\n", + " -5.2225241e-05, -6.4986933e-05, -1.9107327e-05], dtype=float32), 1.5258125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_570.wav', 'Voll der Lauch!', 15, array([ 2.3544633e-05, -8.2356913e-05, -8.4443280e-05, ...,\n", + " -8.3270104e-05, -1.1799393e-04, -4.4736080e-05], dtype=float32), 1.8773958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_587.wav', 'Das will ich meinen!', 20, array([ 1.15228731e-05, -1.00152036e-04, -3.91713802e-05, ...,\n", + " -3.00788033e-05, -2.60362140e-05, -2.54406623e-05], dtype=float32), 1.823375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_595.wav', 'Gib dir keine Mühe!', 20, array([1.1918874e-05, 7.7710565e-06, 2.2653954e-05, ..., 1.2088865e-06,\n", + " 7.3900424e-05, 4.7324560e-05], dtype=float32), 1.7467083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_612.wav', 'Entschuldige', 12, array([-3.3377805e-06, -1.3742609e-05, -3.8612947e-05, ...,\n", + " -4.1617693e-07, -5.6907498e-05, -6.3263155e-06], dtype=float32), 1.096375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_21.wav', 'Ich glaube, ja.', 15, array([-8.5291895e-06, -1.9790486e-05, 2.0588757e-05, ...,\n", + " 4.3540977e-06, 3.3659559e-05, 2.8167133e-05], dtype=float32), 1.7166458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_48.wav', 'Was denn jetzt?', 15, array([3.3551037e-06, 7.2315837e-05, 9.8261240e-05, ..., 1.8147666e-04,\n", + " 1.3495231e-04, 1.4128252e-05], dtype=float32), 1.5235625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_53.wav', 'Ist es das wert?', 16, array([ 6.972987e-06, -6.975743e-05, -8.996664e-05, ..., -8.399185e-06,\n", + " -8.876120e-05, -7.246290e-05], dtype=float32), 1.8518125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_118.wav', 'Findest du?', 11, array([-1.12564965e-04, -6.36710465e-05, -1.04058718e-05, ...,\n", + " 9.31948132e-04, 8.68959934e-04, 9.69569141e-04], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_139.wav', \"Wohl bekommt's.\", 15, array([-5.15776883e-05, -1.17497526e-04, -1.66595215e-04, ...,\n", + " 2.18412912e-04, 1.14814145e-04, 9.11775787e-05], dtype=float32), 1.792)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_242.wav', 'An die Latte!', 13, array([-2.9736115e-05, 6.2128674e-05, -1.7713173e-06, ...,\n", + " -9.5688220e-06, -3.3155960e-05, -2.0475885e-05], dtype=float32), 1.3866666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_256.wav', 'Wie lange noch?', 15, array([-2.0701043e-05, 4.3786262e-05, -9.4478482e-06, ...,\n", + " -5.2062300e-05, -2.7314949e-05, -9.1643757e-05], dtype=float32), 1.728)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_287.wav', 'Halt die Klappe!', 16, array([5.4399417e-05, 1.7967819e-04, 1.5970672e-04, ..., 6.5669185e-05,\n", + " 5.5145654e-05, 4.6019220e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_320.wav', 'Mach selber!', 12, array([-6.9740723e-05, 4.4339331e-06, -8.3184044e-05, ...,\n", + " 1.4031340e-05, 1.2219901e-05, 7.0223352e-05], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_406.wav', 'Alles frisch?', 13, array([-1.15522525e-04, -1.33178124e-04, -1.96026522e-04, ...,\n", + " 5.01462309e-05, 9.76682568e-05, 2.38532848e-05], dtype=float32), 1.4626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_577.wav', 'Nun ja.', 7, array([-4.87583275e-05, -1.09872217e-05, -2.24729556e-05, ...,\n", + " 4.66253441e-05, 1.96394685e-04, 1.52344255e-05], dtype=float32), 1.2373333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_584.wav', 'Wer macht Kaffee?', 17, array([ 3.8115049e-05, -9.6357744e-06, 7.8119905e-05, ...,\n", + " -2.0809734e-04, -1.8620661e-04, -1.3914006e-04], dtype=float32), 1.792)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_3_FINAL/3_666.wav', 'Verflixt noch mal!', 18, array([-2.2882066e-04, -2.9250007e-04, -2.8351255e-04, ...,\n", + " 1.1955178e-04, 1.7373663e-04, 7.4429918e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_30.wav', 'Schweigen Sie!', 14, array([-3.1788008e-05, -3.4064793e-05, -2.7987528e-05, ...,\n", + " -1.5091732e-05, -2.6680038e-05, -3.8527149e-05], dtype=float32), 1.7066666666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_152.wav', 'Danke für die Blumen.', 22, array([ 1.7122936e-06, 6.9385942e-06, 3.6246149e-07, ...,\n", + " -1.4888439e-05, 2.3918087e-06, -7.6587348e-06], dtype=float32), 1.8791666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_218.wav', 'Und das stimmt sogar.', 21, array([ 4.1728057e-05, 5.5362845e-05, 6.8501140e-05, ...,\n", + " -2.8829272e-05, -9.4307861e-06, -1.7323953e-05], dtype=float32), 1.77075)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_228.wav', 'Oder etwa doch?', 15, array([-1.9058538e-05, -1.6082793e-05, -2.4990761e-05, ...,\n", + " -3.7682898e-05, -2.6903717e-05, -2.3563476e-05], dtype=float32), 1.8430416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_235.wav', 'Lass es gut sein.', 17, array([2.5800218e-05, 2.4886122e-05, 2.6301905e-05, ..., 2.0628368e-05,\n", + " 1.3992375e-05, 1.1405512e-05], dtype=float32), 1.8430416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_243.wav', 'Was für ein Schwachsinn!', 25, array([-3.7606616e-05, -4.6087491e-05, -5.2579282e-05, ...,\n", + " -9.6937197e-07, -2.7171711e-05, -4.9796104e-06], dtype=float32), 1.79625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_247.wav', 'Meinen Sie etwa mich?', 21, array([3.4092998e-05, 2.4871710e-05, 3.1290274e-05, ..., 3.8184229e-05,\n", + " 3.8311930e-05, 1.9864283e-05], dtype=float32), 1.7936666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_266.wav', 'Doch, der kommt mit.', 20, array([-8.7682038e-06, 3.3905403e-06, -2.5130439e-06, ...,\n", + " -7.3065071e-06, -4.2862930e-06, -2.6758978e-06], dtype=float32), 1.9898125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_324.wav', 'Du willst eine Revanche?', 24, array([ 7.33632942e-06, 5.97303369e-06, 5.83600695e-06, ...,\n", + " 1.49849775e-05, 1.08204476e-05, -3.58769762e-06], dtype=float32), 1.9875833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_359.wav', 'Achtung, Lebensgefahr!', 22, array([ 1.4763166e-05, 2.4559184e-05, -6.1735605e-06, ...,\n", + " -4.0966352e-06, -3.3091931e-06, -8.6383498e-06], dtype=float32), 1.9786666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_368.wav', 'Sag doch was!', 13, array([ 2.2444649e-06, 7.6022111e-06, 4.6965952e-06, ...,\n", + " -3.8131137e-05, -2.2596261e-05, -3.6410544e-05], dtype=float32), 1.6553333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_377.wav', 'Klar geht das!', 14, array([ 7.9997551e-07, 7.2854018e-06, 1.5502587e-06, ...,\n", + " 4.2983497e-06, 1.1067883e-06, -6.2062031e-06], dtype=float32), 1.6706666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_399.wav', 'Ganz wie ihre Mutter!', 21, array([-1.3625373e-05, -1.5324851e-05, -8.2329316e-06, ...,\n", + " -3.1325493e-05, -3.4243036e-05, -3.8296192e-05], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_404.wav', \"Und ab geht's!\", 14, array([-1.6434673e-05, -4.6597820e-06, -3.0193429e-05, ...,\n", + " 5.6945028e-06, 4.0367054e-06, 2.6991445e-06], dtype=float32), 1.7606666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_409.wav', 'Mahlzeit!', 9, array([-1.6801674e-05, -1.1057600e-05, -2.5246043e-05, ...,\n", + " -5.8098987e-08, -1.3756068e-05, 7.1873791e-07], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_417.wav', 'Was für ein Ding?', 18, array([ 6.9620419e-06, 2.2064933e-05, -7.5111966e-06, ...,\n", + " -2.0811036e-05, -7.9874835e-06, -4.7895933e-06], dtype=float32), 1.6473333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_468.wav', 'Genau einen.', 12, array([-7.29009771e-05, -8.52458907e-05, -1.06200605e-04, ...,\n", + " -5.32185413e-06, -1.07338547e-05, -8.40487064e-06], dtype=float32), 1.3666666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_514.wav', 'Zu Befehl!', 10, array([-2.3591008e-05, -3.5732090e-05, -3.4227767e-05, ...,\n", + " -2.8442626e-05, 1.2019399e-05, -1.3777444e-05], dtype=float32), 1.728)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_611.wav', 'So viel dazu.', 13, array([ 7.4472086e-06, 7.6988908e-06, 1.9191646e-05, ...,\n", + " -3.9837760e-06, -5.9473659e-06, -1.5347923e-05], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_633.wav', 'Doch nicht diese!', 17, array([-1.5188496e-05, -1.3384078e-05, -2.5278267e-05, ...,\n", + " -9.0744479e-06, -1.7723884e-05, -8.7737453e-06], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_637.wav', 'Da musste durch.', 16, array([-6.1405983e-05, -6.6703440e-05, -6.7519111e-05, ...,\n", + " -3.0437115e-05, -1.0807975e-05, -2.7072128e-05], dtype=float32), 1.752)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_660.wav', 'Bitte haben Sie Geduld.', 23, array([-5.3847558e-05, -7.3710136e-05, -6.7579982e-05, ...,\n", + " -1.0283680e-05, -3.1539796e-05, -2.2386694e-05], dtype=float32), 1.7706666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_668.wav', 'Na logo!', 8, array([-2.3636436e-05, -1.5810723e-05, -2.8241622e-05, ...,\n", + " -1.3751334e-06, 1.1204750e-05, 6.0684874e-06], dtype=float32), 0.992)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_676.wav', 'Ich bin Student.', 16, array([ 7.12830888e-06, -1.04677674e-05, 5.06380366e-06, ...,\n", + " 2.56778890e-06, 2.41716316e-06, 1.42220715e-05], dtype=float32), 1.952)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_721.wav', 'Warum glaubst du ihm?', 21, array([-2.8855115e-05, -2.1601849e-05, -4.5714023e-05, ...,\n", + " 1.0700950e-06, -8.6324471e-06, -1.1586128e-05], dtype=float32), 1.888)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_767.wav', 'Alle Lichter einschalten', 24, array([ 3.82986327e-05, 4.59369221e-05, 5.11867729e-05, ...,\n", + " -3.22036831e-05, -1.03011635e-05, -3.75456489e-06], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_775.wav', 'Schlaf dich gesund!', 19, array([ 8.9927544e-06, 3.7294924e-07, 2.0666816e-07, ...,\n", + " -1.4574092e-05, 9.9155943e-07, -1.1447136e-05], dtype=float32), 1.8826666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_4_FINAL/4_785.wav', 'Wer spricht da?', 15, array([-5.0560098e-05, -5.3028423e-05, -5.4164509e-05, ...,\n", + " 1.4739732e-05, 9.2475852e-07, 2.9554553e-06], dtype=float32), 1.8953333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_9.wav', 'Kannst du häkeln?', 18, array([ 5.7386926e-05, 8.2160957e-05, 5.5038501e-05, ...,\n", + " -4.3172963e-06, 4.1677453e-05, 4.7943948e-05], dtype=float32), 1.6993333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_44.wav', 'Bitte kommen!', 13, array([1.0956727e-04, 1.5614097e-04, 1.3331856e-04, ..., 1.3650022e-05,\n", + " 1.1109641e-05, 1.3527738e-06], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_53.wav', 'Hör zu!', 8, array([-6.0608932e-06, -4.1002470e-05, 2.2774377e-05, ...,\n", + " -8.5628499e-06, -1.7102975e-05, -5.2866948e-05], dtype=float32), 1.3013333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_54.wav', 'Bitte, bleib da.', 16, array([ 3.5020625e-05, 5.4955650e-05, 8.0653575e-05, ...,\n", + " -2.3735600e-05, 3.2219548e-05, -2.8188835e-05], dtype=float32), 1.3893333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_64.wav', 'Was piept hier so?', 18, array([4.8969712e-05, 1.0184415e-04, 1.0672094e-04, ..., 1.0047335e-04,\n", + " 8.2428909e-05, 7.4903524e-05], dtype=float32), 1.476)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_65.wav', 'Die Tränen sind echt.', 22, array([-2.5628888e-04, -3.2446094e-04, -2.8078147e-04, ...,\n", + " 6.0525483e-05, 4.5224155e-05, 3.3287215e-05], dtype=float32), 1.6746666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_66.wav', 'Oh, wie ist das schön!', 23, array([-1.3561957e-04, -2.9620592e-04, -1.1127204e-04, ...,\n", + " -1.3441611e-05, -2.0591922e-05, -4.1845051e-05], dtype=float32), 1.9373333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_70.wav', 'Nein, die andere.', 17, array([1.08759763e-04, 2.17104956e-04, 2.50456098e-04, ...,\n", + " 1.99571132e-05, 1.15319264e-04, 1.09982837e-04], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_73.wav', 'Der Hunger treibt es hinein!', 28, array([-7.6006359e-04, -1.0618430e-03, -9.1635465e-04, ...,\n", + " -2.1929874e-05, -3.9133694e-05, -2.3749919e-05], dtype=float32), 1.8006666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_81.wav', 'Dann machen alle Mann kehrt.', 28, array([-1.5950583e-04, -1.6477516e-04, -1.3784993e-04, ...,\n", + " 6.2336148e-05, 1.8180552e-05, 9.2034599e-05], dtype=float32), 1.952)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_90.wav', 'Komm mal klar.', 14, array([2.0439363e-04, 2.6905714e-04, 1.8548965e-04, ..., 3.1710202e-05,\n", + " 2.3530252e-05, 2.1564969e-05], dtype=float32), 1.4186666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_95.wav', 'Ist noch alles dran?', 20, array([-2.2047247e-04, -3.2201153e-04, -2.8738266e-04, ...,\n", + " -7.7452714e-05, -4.3362299e-05, 7.5945250e-06], dtype=float32), 1.632)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_99.wav', 'Nie glaubt sie mir.', 19, array([ 1.5801163e-05, 5.7899309e-05, 3.1942949e-05, ...,\n", + " -3.0608622e-05, -8.0015372e-05, -3.3063152e-05], dtype=float32), 1.5613333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_207.wav', 'Sperrt sie ein!', 15, array([1.7913821e-04, 3.0638310e-04, 2.4345164e-04, ..., 5.7913669e-05,\n", + " 2.3223187e-05, 5.4880878e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_249.wav', 'Ja was geht denn ab?', 20, array([-1.0661902e-04, -9.4065879e-05, -6.9818758e-05, ...,\n", + " -3.3508950e-05, 3.7770699e-06, 2.3758860e-06], dtype=float32), 1.9973333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_250.wav', 'Dümmste Ausrede ever!', 22, array([ 3.16905534e-05, 3.74705655e-06, -2.55898794e-05, ...,\n", + " 4.44019097e-05, 2.41961206e-05, 1.06514235e-05], dtype=float32), 1.9806666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_251.wav', 'Wir sind hier ja unter uns.', 27, array([-3.3862656e-04, -5.0057843e-04, -4.7798100e-04, ...,\n", + " 3.9128430e-05, -4.0246316e-05, -1.3086459e-05], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_278.wav', 'Er ist ein User!', 16, array([ 5.7516689e-05, 4.9558192e-05, 6.3942927e-05, ...,\n", + " -2.3214375e-06, 1.1798247e-05, 3.6477853e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_282.wav', 'Zurückbleiben, bitte!', 22, array([ 1.8404999e-04, 2.6386097e-04, 3.0643051e-04, ...,\n", + " -6.5650514e-05, -5.8646885e-05, -6.5778695e-05], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_287.wav', 'Gut getrollt.', 13, array([-3.0470208e-05, -6.1425657e-05, -3.8205933e-05, ...,\n", + " 6.9129404e-05, 1.1258064e-04, 1.2031732e-04], dtype=float32), 1.728)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_336.wav', 'Ganz sicher sogar.', 18, array([2.2912030e-04, 2.5114618e-04, 1.9525687e-04, ..., 8.7549386e-05,\n", + " 8.5029111e-05, 7.8950601e-05], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_341.wav', 'Wohl kaum.', 10, array([ 1.6102573e-04, 1.7911245e-04, 1.5706589e-04, ...,\n", + " -2.9753184e-05, -4.4280365e-05, 3.1124373e-06], dtype=float32), 1.2586666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_405.wav', 'Wie geht das?', 13, array([-1.6796951e-04, -1.9163813e-04, -1.9830326e-04, ...,\n", + " -5.0582935e-06, 1.2309533e-05, -2.6891148e-05], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_407.wav', 'Befehl ist Befehl!', 18, array([ 9.3892188e-05, 1.0890782e-04, 9.6308002e-05, ...,\n", + " -3.0468544e-05, -2.8461071e-05, -7.1021976e-05], dtype=float32), 1.792)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_412.wav', 'Mit wem spreche ich?', 20, array([ 7.7782068e-05, 9.2144561e-05, 2.8574361e-05, ...,\n", + " -1.1466493e-05, 5.7958755e-06, 6.2275390e-06], dtype=float32), 1.7813333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_422.wav', 'An schlechten Tagen ja.', 23, array([ 4.2690190e-05, -2.3120232e-05, -2.5523063e-05, ...,\n", + " 2.1898361e-05, -2.7946093e-05, 4.6620054e-05], dtype=float32), 1.9833333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_460.wav', 'Sie haben richtig geraten!', 26, array([-9.0950904e-05, -1.4647168e-04, -7.1847418e-05, ...,\n", + " 2.8589966e-05, -2.2244849e-05, 1.1577226e-05], dtype=float32), 1.9626666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_476.wav', 'Alle sprechen so leise.', 23, array([-6.9834332e-06, -3.1972188e-05, -3.9375213e-05, ...,\n", + " -2.6475973e-05, 1.4716678e-05, -4.5046556e-05], dtype=float32), 1.92)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_477.wav', 'Woher willst du das wissen?', 27, array([-2.12417421e-04, -2.56415573e-04, -2.42886104e-04, ...,\n", + " 9.67599408e-05, 9.51452384e-05, 1.15144765e-04], dtype=float32), 1.9413333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_488.wav', 'Anders als man denkt.', 21, array([ 1.8948530e-04, 3.4113604e-04, 1.9700162e-04, ...,\n", + " -7.6619792e-05, -3.6041514e-05, -1.6451453e-06], dtype=float32), 1.9413333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_495.wav', 'Runter mit den Waffen!', 22, array([ 1.12369155e-04, 4.44092657e-05, 8.84383553e-05, ...,\n", + " -7.52444794e-06, -4.84231314e-05, -4.22670855e-05], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_504.wav', 'Und jetzt?', 10, array([-5.6267181e-06, -5.9708807e-05, -3.4106170e-06, ...,\n", + " -1.0430286e-04, -1.2670284e-04, -1.4261479e-04], dtype=float32), 1.344)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_511.wav', 'Jein.', 5, array([ 5.89297160e-05, 1.19100565e-04, 6.77589633e-05, ...,\n", + " -1.61726966e-05, -7.95948727e-05, -2.88161173e-05], dtype=float32), 1.0453333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_522.wav', 'Vorsicht Stufe!', 15, array([ 6.2581657e-06, 4.7380847e-05, 8.6832886e-05, ...,\n", + " 6.6710568e-06, 2.2640632e-05, -3.9922857e-06], dtype=float32), 1.3866666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_526.wav', 'War ich zu zickig?', 18, array([1.6193213e-03, 2.2825657e-03, 2.0064272e-03, ..., 6.6650551e-05,\n", + " 7.2444294e-05, 8.5881074e-05], dtype=float32), 1.728)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_530.wav', 'Wo drückt der Schuh?', 21, array([-1.46389175e-05, 3.62552214e-06, -9.26516877e-05, ...,\n", + " -3.03967099e-05, -1.01135854e-04, 3.96938458e-06], dtype=float32), 1.536)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_534.wav', 'Kann das noch warten?', 21, array([1.74110639e-04, 1.80995979e-04, 2.26840231e-04, ...,\n", + " 1.18193166e-04, 7.83515134e-05, 5.11603030e-05], dtype=float32), 1.664)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_539.wav', 'Passen die Sätze so?', 21, array([-3.1769360e-04, -4.7089945e-04, -4.3369626e-04, ...,\n", + " 1.6810809e-04, 5.3649095e-05, 1.4577823e-04], dtype=float32), 1.8346666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_542.wav', 'Ã\\x9cbermorgen.', 12, array([-2.4301407e-04, -3.5653665e-04, -2.1825638e-04, ...,\n", + " 6.1351508e-05, 9.2918686e-05, 8.8779299e-05], dtype=float32), 1.1306666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_589.wav', 'Ich mag deinen Mantel.', 22, array([-2.1532472e-04, -3.8814778e-04, -2.9697348e-04, ...,\n", + " -3.1324416e-05, -3.5802710e-05, 8.7614599e-06], dtype=float32), 1.6746666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_604.wav', 'Wie macht er das bloÃ\\x9f?', 23, array([-1.8150010e-04, -2.0398400e-04, -1.5460433e-04, ...,\n", + " -3.4698380e-05, -6.5080814e-05, -1.8794183e-06], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_666.wav', 'Was soll ich sagen?', 19, array([-8.8535160e-07, -7.4019059e-05, 7.4082243e-05, ...,\n", + " -6.2706102e-05, 2.9464120e-06, -1.1627621e-05], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_694.wav', 'Wie misst man das?', 18, array([ 2.5176766e-04, 1.8225121e-04, 3.6178919e-04, ...,\n", + " 2.0104897e-06, 5.5382880e-05, -2.6957323e-05], dtype=float32), 1.92)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_704.wav', 'Mir fehlen die Worte.', 21, array([ 1.7020236e-04, 3.3776514e-04, 3.4704659e-04, ...,\n", + " 4.7222587e-05, -1.5073445e-05, -1.6250522e-05], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_714.wav', 'Gehen wir?', 10, array([ 1.5890028e-04, 1.6513607e-04, 1.7650245e-04, ...,\n", + " 1.3219027e-05, 3.1738135e-05, -9.3036484e-05], dtype=float32), 1.3226666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_723.wav', 'Ich komme noch mal dran.', 24, array([-4.6879621e-05, -1.1869792e-04, -5.2995206e-06, ...,\n", + " 1.0155864e-05, -8.1713588e-05, -3.8661747e-05], dtype=float32), 1.8773333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_742.wav', 'Bitte schön!', 13, array([-3.4623430e-04, -4.4416677e-04, -3.0297900e-04, ...,\n", + " 5.3006592e-05, 5.1509913e-05, 7.1368544e-05], dtype=float32), 1.1733333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_743.wav', 'Das haben sie gesagt.', 21, array([-2.3902958e-05, 4.5714452e-05, 7.7266725e-07, ...,\n", + " -5.0056198e-05, 3.0718882e-05, 6.8078203e-05], dtype=float32), 1.8346666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_799.wav', 'Ist der Kugelschreiber blau?', 28, array([-1.6907173e-04, -2.9390136e-04, -2.4633619e-04, ...,\n", + " 5.9892503e-05, 6.6163295e-05, 1.4039288e-04], dtype=float32), 1.984)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_21.wav', 'Alles nach vorne!', 17, array([2.0106880e-04, 3.4844220e-04, 2.3129249e-04, ..., 9.6451986e-05,\n", + " 7.4439027e-05, 9.3146300e-05], dtype=float32), 1.5786666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_55.wav', 'Nichts dergleichen.', 19, array([-2.7673854e-04, -3.7996779e-04, -2.6658855e-04, ...,\n", + " -4.9654176e-07, -4.3088527e-05, -2.0399790e-05], dtype=float32), 1.5786666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_66.wav', \"Langsam nervt's.\", 16, array([ 7.7058452e-05, 4.7672478e-05, 2.6094380e-05, ...,\n", + " -6.2562191e-05, 2.7688688e-07, -1.2926825e-05], dtype=float32), 1.7493333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_98.wav', 'Seid ihr verrückt?', 19, array([-1.3435316e-04, -1.8146966e-04, -1.6307829e-04, ...,\n", + " -3.7551112e-07, 1.6737657e-05, 1.7336246e-05], dtype=float32), 1.6426666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_110.wav', 'Gib mir fünf!', 14, array([1.9428060e-04, 2.9409130e-04, 2.5521498e-04, ..., 1.9916235e-05,\n", + " 3.7017526e-05, 2.2721317e-05], dtype=float32), 1.3653333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_140.wav', 'Auch wieder wahr.', 17, array([-6.21244908e-05, -1.39888449e-04, -1.16935575e-04, ...,\n", + " -9.32170296e-05, -7.70114566e-05, -1.37492418e-04], dtype=float32), 1.3653333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_167.wav', 'Sicher ist sicher.', 18, array([ 1.6774700e-04, 2.7458806e-04, 1.3175888e-04, ...,\n", + " -3.9984116e-05, -4.5541576e-05, 2.3846082e-05], dtype=float32), 1.792)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_175.wav', 'Wie soll ich sagen?', 19, array([-2.0688836e-05, -6.4790765e-05, -1.1548823e-05, ...,\n", + " -1.0844359e-05, -3.6513706e-05, -4.4623717e-05], dtype=float32), 1.6213333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_200.wav', 'Ist doch Ehrensache!', 20, array([ 1.07319385e-04, 1.08591557e-04, 6.78624638e-05, ...,\n", + " 3.66282293e-05, -4.84154953e-05, -2.46383879e-05], dtype=float32), 1.92)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_258.wav', 'Jeder Mensch ist anders.', 24, array([ 9.4392788e-05, 1.3444535e-04, 1.5623294e-04, ...,\n", + " -9.0343368e-05, -1.2968398e-04, -2.8964683e-05], dtype=float32), 1.8986666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_262.wav', 'Nächstes Mal vielleicht.', 25, array([-4.9963495e-04, -7.3549181e-04, -5.7168922e-04, ...,\n", + " 5.7476438e-05, 8.7852583e-05, 6.3541149e-05], dtype=float32), 1.76)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_290.wav', 'Ich wollte nur nett sein.', 25, array([-3.0248266e-04, -4.1539475e-04, -4.3182663e-04, ...,\n", + " -6.8298694e-05, -3.5496461e-05, -8.2268067e-05], dtype=float32), 1.856)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_293.wav', 'Sie haben Post.', 15, array([7.0743052e-05, 1.5683858e-04, 7.2936782e-05, ..., 3.4985551e-05,\n", + " 2.5512374e-05, 4.4657580e-05], dtype=float32), 1.6)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_302.wav', 'Mit dem Raumschiff bitte!', 25, array([-3.3868386e-05, -4.2923082e-05, 2.2873657e-05, ...,\n", + " 2.9917417e-05, -9.9794874e-05, -1.3378082e-04], dtype=float32), 1.5470625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_309.wav', 'Hä, wieso das denn?', 20, array([-4.2834796e-05, -1.3094838e-04, -2.1130700e-05, ...,\n", + " -4.5203033e-05, -6.0939405e-05, -4.7152938e-05], dtype=float32), 1.9385)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_321.wav', 'Lass dich nicht so hängen!', 27, array([ 3.3312430e-05, 1.1557561e-04, 1.7304946e-04, ...,\n", + " -5.3516556e-05, -6.5977452e-05, -8.5248823e-05], dtype=float32), 1.6589166666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_333.wav', 'Sehen wir uns in der Bib?', 25, array([1.8330962e-04, 1.0809512e-04, 2.0564985e-04, ..., 5.3472275e-05,\n", + " 1.1819158e-04, 1.3498007e-04], dtype=float32), 1.9571458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_340.wav', 'Klingt logisch.', 15, array([-4.9080444e-07, -4.6037778e-05, -1.0552061e-04, ...,\n", + " -7.5399061e-05, -1.1574150e-04, -1.1011600e-04], dtype=float32), 1.137)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_361.wav', 'Lies mir etwas vor!', 19, array([-5.9860780e-05, -1.2714561e-04, -4.6063276e-05, ...,\n", + " 1.3993531e-04, 1.7140653e-04, 1.5545388e-04], dtype=float32), 1.5284375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_368.wav', 'Nö, nicht wirklich.', 20, array([1.4233610e-05, 5.8029418e-05, 2.2922040e-05, ..., 2.8016962e-04,\n", + " 1.9504840e-04, 1.6919435e-04], dtype=float32), 1.77075)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_401.wav', 'Besser als gar nichts.', 22, array([-1.9661777e-04, -3.8629526e-04, -3.8140707e-04, ...,\n", + " 4.2625456e-06, 9.6469674e-05, 2.5569330e-05], dtype=float32), 1.7055)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_402.wav', 'Lass mich doch mal träumen.', 28, array([5.1605228e-05, 2.0454232e-05, 5.4702823e-06, ..., 1.0539140e-04,\n", + " 9.8325436e-05, 6.1908002e-05], dtype=float32), 1.87325)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_407.wav', 'Wochenende!', 11, array([-7.1158116e-05, -1.3735623e-04, -1.4360537e-04, ...,\n", + " 7.2980845e-05, -2.7338607e-05, -2.3744215e-06], dtype=float32), 1.0251666666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_410.wav', 'Ich habe dich gewarnt.', 22, array([-2.9008405e-04, -3.9160642e-04, -3.8535651e-04, ...,\n", + " -8.1862388e-05, -2.1166212e-04, -1.1729619e-04], dtype=float32), 1.5563958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_420.wav', 'Kann schon sein.', 16, array([8.5848145e-04, 1.2030958e-03, 1.0428407e-03, ..., 9.0862151e-05,\n", + " 1.8885999e-04, 1.3144755e-04], dtype=float32), 1.2395208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_430.wav', 'Schön gespielt.', 16, array([-3.1265599e-04, -3.5982658e-04, -3.4920897e-04, ...,\n", + " -5.9947542e-05, -2.8197737e-05, -8.6103646e-05], dtype=float32), 1.3606666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_432.wav', 'Gut geschlafen?', 15, array([-4.6266021e-05, -4.5735891e-05, -1.5800438e-04, ...,\n", + " -5.1101240e-05, -4.5094261e-05, -1.9669098e-05], dtype=float32), 1.2488333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_435.wav', 'Auf das Wetter natürlich auch.', 31, array([-3.5034932e-04, -4.7157385e-04, -4.0150300e-04, ...,\n", + " 1.4378574e-04, 3.5348174e-05, 1.3807646e-04], dtype=float32), 1.9664583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_437.wav', 'Komm, geh weg!', 14, array([ 3.15589714e-05, 1.08517845e-04, 6.59165744e-05, ...,\n", + " -1.43856349e-04, -9.36611250e-05, -1.37200404e-04], dtype=float32), 1.4119375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_469.wav', 'Schluss mit lustig!', 19, array([ 1.0199297e-04, 1.2600295e-04, 1.6211855e-04, ...,\n", + " -1.5054672e-04, -7.8931960e-05, 6.7272131e-06], dtype=float32), 1.4259166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_483.wav', 'Das spart Geschirr.', 19, array([ 6.6607544e-04, 7.1844418e-04, 6.1214896e-04, ...,\n", + " -3.3901462e-05, 1.3226962e-04, 3.8378406e-05], dtype=float32), 1.8080208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_486.wav', 'Das haben Recherchen ergeben.', 29, array([-9.0566078e-05, -2.1272554e-04, -1.9089306e-04, ...,\n", + " 9.4858078e-05, 8.9547662e-05, 7.4881907e-05], dtype=float32), 1.9571458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_487.wav', 'Frohes Schaffen!', 16, array([ 6.8461159e-05, 1.5294057e-04, 2.2618793e-04, ...,\n", + " -2.1603348e-05, -5.1863241e-05, -6.0653092e-06], dtype=float32), 1.337375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_500.wav', 'Sie sind ja noch blutjung!', 26, array([-0.00065145, -0.00103323, -0.00116705, ..., -0.0001188 ,\n", + " -0.00014697, -0.00013791], dtype=float32), 1.8639375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_516.wav', 'Lebe ich noch?', 14, array([-4.3064877e-04, -5.6503405e-04, -4.1817623e-04, ...,\n", + " -1.6641241e-04, -1.2653919e-04, -8.6205284e-05], dtype=float32), 1.1090416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_527.wav', 'Nicht dafür!', 13, array([ 3.5247151e-04, 4.8163909e-04, 3.9777748e-04, ...,\n", + " -5.2257688e-05, -3.3391923e-05, -1.8325276e-05], dtype=float32), 1.137)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_534.wav', 'Genau hundert Stück.', 21, array([-0.00059065, -0.00093307, -0.00079542, ..., 0.00016691,\n", + " 0.00026112, 0.00016139], dtype=float32), 1.8732708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_535.wav', 'Wie ist das möglich?', 21, array([ 3.7494919e-04, 5.0490367e-04, 3.7185123e-04, ...,\n", + " 4.3858363e-06, -5.6393877e-05, -6.9622547e-05], dtype=float32), 1.3886458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_536.wav', 'Alles wiederholt sich.', 22, array([-7.8303702e-03, -9.4565414e-03, 4.3799067e-03, ...,\n", + " -7.5256619e-05, -4.4781635e-05, -4.8768667e-05], dtype=float32), 1.37)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_538.wav', 'Der Klügere gibt nach.', 23, array([-3.3002507e-04, -4.8394629e-04, -4.5790782e-04, ...,\n", + " -1.5844591e-04, -3.2335000e-05, -1.1339883e-04], dtype=float32), 1.4259166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_559.wav', 'Schwing die Hufe!', 17, array([-0.00077766, -0.00118464, -0.00101971, ..., -0.00019519,\n", + " -0.00011075, -0.00013927], dtype=float32), 1.3233958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_561.wav', 'Was das wieder kostet!', 22, array([ 8.5937936e-04, 1.1237016e-03, 9.1907283e-04, ...,\n", + " 2.4701139e-05, -1.2547316e-04, -5.1732359e-06], dtype=float32), 1.6775416666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_563.wav', 'Wieso immer ich?', 16, array([4.5056498e-04, 7.2014128e-04, 6.0793286e-04, ..., 8.4482606e-05,\n", + " 9.7867851e-05, 2.6745778e-05], dtype=float32), 1.5843541666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_582.wav', 'Dann gäbe es dich jetzt nicht.', 31, array([-2.4657813e-04, -3.9872411e-04, -3.3457237e-04, ...,\n", + " 1.6457469e-05, -1.5761821e-05, 1.1328906e-04], dtype=float32), 1.9944166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_585.wav', 'Dem werde ich Beine machen!', 27, array([0.00027461, 0.00040794, 0.00034263, ..., 0.00012492, 0.00024055,\n", + " 0.00019042], dtype=float32), 1.9850833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_588.wav', 'Wollen wir Ihn herein lassen?', 29, array([-3.2398489e-04, -4.3375781e-04, -3.6100275e-04, ...,\n", + " 1.1542152e-04, 9.4435090e-05, 1.1465035e-04], dtype=float32), 1.9198541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_590.wav', 'Richtig geraten!', 16, array([-2.6969259e-04, -4.4567345e-04, -5.3715584e-04, ...,\n", + " 6.1917281e-06, 1.5911644e-05, 3.0031568e-05], dtype=float32), 1.2954375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_634.wav', 'Nun sag schon!', 14, array([-0.00074525, -0.0010401 , -0.00091129, ..., 0.00015909,\n", + " 0.00022603, 0.00013058], dtype=float32), 1.0997291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_658.wav', 'Mit Vergnügen!', 15, array([-1.9300323e-04, -2.6942717e-04, -2.3031878e-04, ...,\n", + " 6.9992027e-05, 5.8482234e-05, 1.2584617e-04], dtype=float32), 1.1929166666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_659.wav', 'Komm sofort her!', 16, array([ 5.0228823e-04, 8.3419622e-04, 7.3006074e-04, ...,\n", + " 4.1768268e-05, -4.2891694e-05, -7.8192716e-05], dtype=float32), 1.4725208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_674.wav', 'Chill mal!', 10, array([ 3.6116564e-04, 5.9050595e-04, 4.8674442e-04, ...,\n", + " -1.4056740e-04, -6.9539550e-05, -1.2587184e-04], dtype=float32), 1.0624583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_677.wav', 'Jetzt mal Butter bei die Fische.', 32, array([-0.00017322, -0.00025202, -0.0003011 , ..., -0.00014372,\n", + " -0.00011187, -0.00014939], dtype=float32), 1.9198541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_705.wav', 'Das wird Macken geben.', 22, array([ 2.6667553e-06, 2.4150137e-05, 6.4756452e-05, ...,\n", + " -7.3486663e-05, -7.0459449e-05, 4.1346510e-05], dtype=float32), 1.7334583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_707.wav', 'Hilf mir mal auf die Sprünge.', 30, array([ 3.0066914e-04, 4.8592529e-04, 4.8968260e-04, ...,\n", + " -2.9595327e-05, -4.5949713e-05, -2.5512512e-05], dtype=float32), 1.8452916666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_747.wav', 'Versuch macht klug.', 19, array([-6.13919692e-04, -8.45544797e-04, -7.43770273e-04, ...,\n", + " 9.61075566e-05, -8.48421769e-05, -1.16592164e-04], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_768.wav', 'Kapiere ich nicht.', 18, array([ 4.0008963e-04, 6.7968445e-04, 6.0982589e-04, ...,\n", + " -7.4681542e-05, 2.5036192e-05, -4.9270067e-05], dtype=float32), 1.3747083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_776.wav', 'Der ist ja mickrig!', 19, array([-1.7217337e-04, -2.9700578e-04, -2.6711932e-04, ...,\n", + " -1.2146128e-04, -3.9679853e-05, -5.6118748e-05], dtype=float32), 1.3747083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_777.wav', 'Ja, sogar mehrere.', 18, array([ 1.1276272e-03, 1.6285295e-03, 1.3798362e-03, ...,\n", + " -2.8823823e-05, 3.4296296e-05, -5.9779604e-06], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_6_FINAL/6_778.wav', 'Fünf oder lieber sechs?', 24, array([-0.00051076, -0.00086243, -0.00095237, ..., -0.00015284,\n", + " -0.00011934, -0.00010978], dtype=float32), 1.9475)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_1.wav', 'Wen interessiert das schon?', 27, array([-2.0386204e-04, -1.6595512e-04, -3.4064340e-04, ...,\n", + " -5.8528771e-05, -4.0259012e-05, -2.3960278e-05], dtype=float32), 1.9034583333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_4.wav', 'Das sieht man sofort.', 21, array([-4.7220071e-04, -6.1083253e-04, -5.2480790e-04, ...,\n", + " 3.0703570e-05, 5.0339484e-05, -4.0401741e-05], dtype=float32), 1.7007708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_9.wav', 'Kannst du ein Instrument spielen?', 33, array([-5.8206980e-04, -9.0975891e-04, -9.2016242e-04, ...,\n", + " -3.6644913e-05, -8.9309695e-05, 5.9820622e-06], dtype=float32), 1.9519166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_17.wav', 'Nein, hör mir zu!', 18, array([1.8352878e-04, 2.3541819e-04, 1.9473537e-04, ..., 3.8015917e-06,\n", + " 3.0260228e-05, 4.7941758e-05], dtype=float32), 1.6038333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_24.wav', 'Sowas ist schade.', 17, array([ 5.2204914e-04, 7.2680251e-04, 7.3363306e-04, ...,\n", + " -3.0053505e-05, -6.5714506e-05, -9.0218302e-05], dtype=float32), 1.5509583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_48.wav', 'Ich will zocken!', 16, array([-0.00016469, -0.00039593, -0.00179843, ..., 0.00018615,\n", + " 0.00012972, 0.00017355], dtype=float32), 1.5773958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_55.wav', 'Ein Insider berichtet.', 22, array([ 3.7575817e-05, 2.7695228e-04, 1.8994253e-04, ...,\n", + " 2.4524426e-05, 4.0446877e-05, -2.5534926e-05], dtype=float32), 1.8505833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_65.wav', 'Evelyn ist seekrank.', 20, array([0.00062829, 0.00093936, 0.0008276 , ..., 0.00017747, 0.00012535,\n", + " 0.00013539], dtype=float32), 1.7712708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_66.wav', 'Zunächst der Blick aufs Wetter.', 32, array([-0.00092968, -0.00141539, -0.00128506, ..., 0.00019455,\n", + " 0.00034253, 0.00020309], dtype=float32), 1.8593958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_77.wav', 'Was schmeckt am besten?', 23, array([6.4622820e-04, 1.0704662e-03, 1.1439651e-03, ..., 1.9296777e-04,\n", + " 9.2506059e-05, 4.9435432e-05], dtype=float32), 1.6567083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_78.wav', 'Wir rufen Sie dann auf.', 23, array([-1.0261516e-03, -1.4563096e-03, -1.2881490e-03, ...,\n", + " 5.2330338e-06, 6.4821052e-06, -3.7749737e-06], dtype=float32), 1.6655208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_84.wav', 'Das Essen war vorzüglich.', 26, array([-5.0324254e-04, -7.2285999e-04, -5.4835685e-04, ...,\n", + " -4.1776315e-05, -4.3907283e-05, 3.2214456e-07], dtype=float32), 1.9959791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_89.wav', 'SüÃ\\x9fes oder Saures!', 20, array([-2.1448301e-04, -3.2685092e-04, -1.9420320e-04, ...,\n", + " 5.3501964e-05, 3.9838564e-05, 9.8899181e-05], dtype=float32), 1.5641875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_96.wav', 'Woran das wohl liegt?', 21, array([7.9406239e-04, 1.0801835e-03, 8.6238224e-04, ..., 1.5784081e-04,\n", + " 1.3262879e-04, 7.3408869e-06], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_118.wav', 'Hier hast du deinen Fisch.', 26, array([0.00047934, 0.0008143 , 0.00071459, ..., 0.00040429, 0.00026866,\n", + " 0.00011292], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_135.wav', 'Und zwar hochverdient!', 22, array([-3.4465449e-04, -5.7459215e-04, -4.8516967e-04, ...,\n", + " 2.8431052e-05, 9.6089265e-05, 2.6090011e-05], dtype=float32), 1.9475)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_144.wav', 'Da kräht kein Hahn nach.', 25, array([-2.4579404e-05, -2.7367115e-04, -1.3865142e-04, ...,\n", + " 6.7543602e-05, 4.0894251e-05, 2.7544003e-05], dtype=float32), 1.7095833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_148.wav', 'Du zuerst.', 10, array([-8.7500273e-05, -8.8356370e-05, 3.9270883e-05, ...,\n", + " -1.0109833e-04, 5.8080084e-05, -1.4014350e-04], dtype=float32), 1.3658958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_149.wav', 'Hier mal eine Faustregel.', 25, array([7.6173781e-04, 9.7895204e-04, 8.7399769e-04, ..., 5.2696447e-05,\n", + " 1.8836032e-06, 6.7383153e-06], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_150.wav', 'Ich sehe kein Leerzeichen.', 26, array([-2.0238354e-05, -3.9017228e-05, -1.8151976e-04, ...,\n", + " -2.8073411e-05, -8.1482809e-05, -9.7252036e-05], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_151.wav', 'Hast du mal einen Fünfziger?', 29, array([-6.5894198e-04, -9.4568409e-04, -8.3610136e-04, ...,\n", + " -1.5597163e-04, -1.5190896e-04, -4.1842508e-05], dtype=float32), 1.8770208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_155.wav', 'Mit Pommes?', 11, array([ 0.0003422 , 0.0003448 , 0.00032375, ..., -0.00023719,\n", + " -0.00028336, -0.00012051], dtype=float32), 0.9252916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_172.wav', 'Noch fünf Minuten bitte, Schatz!', 33, array([-4.4656807e-04, -5.2705233e-04, -5.8281276e-04, ...,\n", + " -1.7271057e-05, 3.9541996e-05, 1.4292495e-05], dtype=float32), 1.9387083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_174.wav', 'Es ist wie verhext.', 19, array([3.7680543e-04, 6.3684850e-04, 4.2467855e-04, ..., 1.3614137e-05,\n", + " 8.9109992e-05, 1.3674991e-04], dtype=float32), 1.9563125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_179.wav', 'Unter uns ist ein Verräter.', 28, array([-2.2123450e-04, -3.2310621e-04, -2.8145462e-04, ...,\n", + " -1.0567834e-04, 3.1090029e-05, 6.3631160e-05], dtype=float32), 1.8682083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_190.wav', 'Nimm die Maske ab!', 18, array([4.6733877e-04, 6.9651386e-04, 5.4769457e-04, ..., 1.6475593e-04,\n", + " 7.5979711e-05, 7.9883583e-05], dtype=float32), 1.2337291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_193.wav', 'Nicht dass ich wüsste.', 23, array([ 0.0001971 , 0.00045662, 0.00023958, ..., -0.00011544,\n", + " -0.00016933, -0.00016841], dtype=float32), 1.5862083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_194.wav', 'Der Tee zieht noch.', 19, array([-0.00024223, -0.00046848, -0.00045602, ..., -0.00014842,\n", + " -0.00016475, -0.00012201], dtype=float32), 1.6390833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_210.wav', 'Tu es für mich!', 16, array([4.4054058e-04, 7.1835978e-04, 6.8089634e-04, ..., 6.5819913e-05,\n", + " 6.3534033e-05, 2.4601215e-04], dtype=float32), 1.5685833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_216.wav', 'Bölken Sie woanders herum!', 27, array([-4.3733866e-04, -5.8234221e-04, -6.0285319e-04, ...,\n", + " -2.0549475e-04, -5.1659747e-05, -6.9836286e-05], dtype=float32), 1.9827708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_217.wav', 'So, so.', 7, array([5.1622407e-04, 8.1000535e-04, 6.2310486e-04, ..., 1.1862206e-04,\n", + " 7.1799346e-05, 3.3523640e-06], dtype=float32), 1.3747291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_220.wav', 'Leicht verdientes Geld.', 23, array([ 1.47327999e-04, 1.87759506e-04, -1.56362767e-05, ...,\n", + " 1.08211556e-04, 8.50987126e-05, -3.97509648e-05], dtype=float32), 1.7360208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_226.wav', 'Wie lautet der Zwischenstand?', 29, array([ 5.1066454e-04, 7.2763517e-04, 6.3450093e-04, ...,\n", + " -8.1010330e-05, -1.8156270e-05, -5.7707053e-05], dtype=float32), 1.9827708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_273.wav', 'Was hat ihn geritten?', 21, array([-3.4532882e-04, -5.6787761e-04, -6.2309759e-04, ...,\n", + " -3.4597360e-05, -1.2706745e-05, -1.1419446e-04], dtype=float32), 1.6214583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_282.wav', 'So nicht, Freundchen!', 21, array([-2.2482723e-03, -3.3393281e-03, -3.0241525e-03, ...,\n", + " 8.9230271e-05, 8.0567042e-05, -1.7856433e-05], dtype=float32), 1.7800833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_287.wav', 'So ein feiner Hund!', 19, array([-0.00024811, -0.00028893, -0.00043056, ..., -0.0001634 ,\n", + " -0.00015287, -0.00012142], dtype=float32), 1.4628333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_297.wav', 'Ah, die Feuerwehr!', 18, array([-5.8479345e-05, 1.3606872e-06, -3.1950235e-04, ...,\n", + " 4.5466539e-04, 4.1461250e-04, 3.1427949e-04], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_298.wav', 'Nachricht bitte faxen!', 22, array([-3.4957391e-04, -4.1374876e-04, -4.3978900e-04, ...,\n", + " -1.4674234e-04, -2.0285949e-04, -3.0548752e-05], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_330.wav', 'Alter Verwalter!', 16, array([0.00058996, 0.00086262, 0.00074697, ..., 0.00030815, 0.00029123,\n", + " 0.00018931], dtype=float32), 1.8615833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_333.wav', 'Was will man mehr?', 18, array([-8.3821319e-04, -1.1214241e-03, -1.0474359e-03, ...,\n", + " -4.0887986e-05, 1.7188730e-05, 6.5576496e-05], dtype=float32), 1.3570833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_362.wav', 'Ganz der Papa!', 14, array([ 1.0614250e-06, 1.0387501e-04, 2.6466480e-05, ...,\n", + " -3.6802659e-05, 4.0980707e-05, 7.8629993e-05], dtype=float32), 1.3042291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_368.wav', 'Es geht schon, danke!', 21, array([-3.1714016e-04, -4.7203674e-04, -3.6235168e-04, ...,\n", + " 7.8341058e-05, 4.7649206e-05, 1.9486140e-05], dtype=float32), 1.6919583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_404.wav', 'Notieren Sie sich das.', 22, array([-3.1276091e-04, -4.1585916e-04, -4.4194568e-04, ...,\n", + " -1.9349645e-04, -6.0014678e-05, 2.7422161e-07], dtype=float32), 1.8153333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_416.wav', 'Mach das Licht an!', 18, array([ 7.4020500e-04, 9.9551259e-04, 7.7506527e-04, ...,\n", + " -9.4190882e-06, -5.5277683e-06, 6.0646169e-05], dtype=float32), 1.273375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_418.wav', 'Gebt mir ein O!', 15, array([ 2.55384133e-04, 2.99102190e-04, 3.85188963e-04, ...,\n", + " -6.97520736e-05, -1.12780595e-04, -5.84875634e-05], dtype=float32), 1.5641875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_425.wav', 'Wir haben unsere Vorschriften.', 30, array([-0.0014397 , -0.00206455, -0.00194661, ..., 0.00017973,\n", + " 0.00031227, 0.00029818], dtype=float32), 1.9563125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_428.wav', 'Dort spielt die Musik!', 22, array([0.00064248, 0.00109204, 0.00095334, ..., 0.00016345, 0.00021933,\n", + " 0.00016792], dtype=float32), 1.9386875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_439.wav', 'Runter von der Couch!', 21, array([0.00032077, 0.0003695 , 0.00031393, ..., 0.00016823, 0.00027614,\n", + " 0.00030219], dtype=float32), 1.4716458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_440.wav', 'Geh, Martin. Geh!', 17, array([-0.0006147 , -0.00096355, -0.00084441, ..., -0.00019064,\n", + " -0.00014664, -0.0001376 ], dtype=float32), 1.4011458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_442.wav', 'Dann ist doch alles paletti.', 28, array([-0.0003903 , -0.00051721, -0.00051659, ..., 0.00044963,\n", + " 0.00069829, 0.00057605], dtype=float32), 1.7915833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_464.wav', 'Hören Sie erst einmal zu!', 26, array([-2.3209564e-03, -3.7553089e-03, -3.8581355e-03, ...,\n", + " 4.0617133e-06, 6.2217005e-05, 1.8342262e-05], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_475.wav', 'Ich will die Hände sehen!', 26, array([-1.1517418e-03, -1.5774536e-03, -1.5022659e-03, ...,\n", + " 8.5659660e-05, 1.5909245e-04, 1.0823877e-04], dtype=float32), 1.4804583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_481.wav', 'Du kennst doch Tessa.', 21, array([-9.7565542e-05, -8.4838466e-05, -2.1631434e-04, ...,\n", + " -9.0966016e-05, -9.0894253e-05, -1.5524645e-04], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_487.wav', 'Angeber und Neidhammel.', 23, array([-4.2524905e-04, -5.5071624e-04, -4.9216941e-04, ...,\n", + " -9.1045105e-05, -3.0268184e-05, -1.0583480e-04], dtype=float32), 1.8593958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_491.wav', 'Können diese Augen lügen?', 27, array([-1.04710832e-03, -1.57430710e-03, -1.43215503e-03, ...,\n", + " 1.43472225e-05, 1.20743534e-05, -1.07111417e-04], dtype=float32), 1.8241458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_499.wav', 'Kann man hier denn nicht lüften?', 33, array([-9.1343711e-04, -1.1802320e-03, -9.9357730e-04, ...,\n", + " 7.8159035e-05, 2.3012167e-04, 3.3637294e-05], dtype=float32), 1.9871666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_500.wav', 'Der Mann ist vom Leben gezeichnet.', 34, array([ 1.06765685e-04, 2.15540877e-05, -9.11364405e-05, ...,\n", + " -5.42830057e-05, -9.09425871e-05, -3.43727625e-05], dtype=float32), 1.7712708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_506.wav', 'Wollen Sie mich aushorchen?', 27, array([ 0.00060325, 0.00087957, 0.00074186, ..., -0.00021219,\n", + " -0.00024823, -0.00017538], dtype=float32), 1.9739375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_531.wav', 'Je eher, desto besser.', 22, array([5.7826861e-04, 7.7570765e-04, 6.1795511e-04, ..., 8.9765228e-05,\n", + " 4.5600675e-05, 1.4581751e-04], dtype=float32), 1.7800833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_538.wav', 'Och, komm schon her!', 20, array([-4.9066258e-04, -7.3491497e-04, -5.5824185e-04, ...,\n", + " 8.5976262e-06, 1.0786976e-04, 1.2791457e-04], dtype=float32), 1.8593958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_542.wav', 'Nimm deine Maske endlich ab!', 28, array([ 5.4343470e-04, 7.2278164e-04, 7.2296784e-04, ...,\n", + " -3.4153378e-05, -3.6221893e-05, -8.8784982e-05], dtype=float32), 1.9386875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_574.wav', 'Wollt ihr mich ärgern?', 23, array([0.00089293, 0.00139316, 0.0012052 , ..., 0.00011375, 0.00022351,\n", + " 0.00014075], dtype=float32), 1.6567083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_599.wav', 'Das ist knorke.', 15, array([-8.0439750e-06, -4.1563135e-06, -3.6478632e-05, ...,\n", + " -1.6141655e-04, -8.8675122e-05, -1.2264083e-04], dtype=float32), 1.3394583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_603.wav', 'Suchst du Ã\\x84rger?', 17, array([ 1.8951594e-04, 3.2533749e-04, 2.3231433e-04, ...,\n", + " -1.0691231e-05, -6.9874281e-05, -4.5488341e-05], dtype=float32), 1.6038333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_615.wav', 'Hör nicht auf diese Schwätzer!', 32, array([ 0.00019477, 0.00020745, 0.00017311, ..., 0.00030501,\n", + " -0.00018354, 0.00024707], dtype=float32), 1.9739375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_618.wav', \"Gleich geht's weiter!\", 21, array([-6.4648612e-04, -1.0017229e-03, -9.2825363e-04, ...,\n", + " -4.5593577e-05, -6.6424482e-06, 1.4339538e-05], dtype=float32), 1.4452083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_641.wav', 'Herr, erbarme dich!', 19, array([ 9.9213721e-06, 1.8233144e-05, -3.5843041e-05, ...,\n", + " -5.0301041e-05, -1.3241796e-04, -2.0356404e-04], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_649.wav', 'Sammelt Holz für das Feuer!', 28, array([-0.00024918, -0.00046716, -0.00041068, ..., 0.00016901,\n", + " 0.0001653 , 0.00017449], dtype=float32), 1.9387083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_692.wav', 'Erst will ich noch duschen.', 27, array([ 2.7669812e-04, 5.0494721e-04, 5.6616898e-04, ...,\n", + " 4.0362014e-05, -7.8570345e-05, 6.2029525e-05], dtype=float32), 1.6082291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_703.wav', 'Was kommt als nächstes?', 24, array([ 5.5248733e-04, 8.9842337e-04, 6.7765010e-04, ...,\n", + " -1.3254551e-04, -9.5152573e-05, -2.1063161e-05], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_718.wav', 'Setzen, sechs!', 14, array([-1.2044140e-04, -2.0982703e-04, -2.7291384e-04, ...,\n", + " 1.7828704e-04, 9.6640695e-05, 1.3019536e-05], dtype=float32), 1.2689791666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_720.wav', 'Dann nehmen wir meinen Wagen.', 29, array([ 1.2858727e-04, 1.7004457e-04, -5.1648447e-05, ...,\n", + " 2.5735653e-04, 2.8828968e-04, 1.9113944e-04], dtype=float32), 1.9915833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_724.wav', 'Lach mal wieder.', 16, array([2.5169516e-04, 3.1780155e-04, 2.4175562e-04, ..., 1.8466891e-04,\n", + " 9.4025556e-05, 1.4185447e-04], dtype=float32), 1.3570833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_726.wav', 'Lass uns mal Fieber messen!', 27, array([ 4.6217057e-04, 7.1049004e-04, 5.8858085e-04, ...,\n", + " -2.7612457e-06, -4.4886579e-05, -1.3602876e-06], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_733.wav', 'Ja, du hast ja Recht!', 21, array([-0.00065709, -0.00095549, -0.00067059, ..., 0.00023162,\n", + " 0.00042249, 0.00021008], dtype=float32), 1.8241458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_745.wav', 'Kannst du bitte das Licht anlassen?', 35, array([-4.5024044e-05, -6.6272514e-05, -1.4942518e-04, ...,\n", + " -1.0059726e-04, -8.9730158e-05, -4.9335773e-05], dtype=float32), 1.8593958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_7_FINAL/7_755.wav', 'Jetzt wird gefeiert!', 20, array([ 6.5074948e-04, 8.2373072e-04, 6.9322297e-04, ...,\n", + " 2.5613972e-05, -7.3600226e-05, 9.0847658e-05], dtype=float32), 1.4892708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_21.wav', 'Oh, ein Blechschaden!', 21, array([ 2.7968596e-05, 2.5622614e-05, 5.5850909e-05, ...,\n", + " -3.6388674e-06, -1.3192165e-05, -5.8324472e-06], dtype=float32), 1.7536458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_24.wav', 'Woran erkennt man sie?', 22, array([-1.6248678e-05, -2.0881544e-05, 2.2568598e-05, ...,\n", + " -1.0051125e-06, -4.4804568e-05, -3.8311518e-05], dtype=float32), 1.8770208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_57.wav', 'Wo hast Du den Ludenmantel her?', 31, array([ 9.4084098e-05, 6.2570427e-05, 8.1058839e-05, ...,\n", + " -3.1764132e-05, -4.2468575e-05, -3.3772998e-05], dtype=float32), 1.9915833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_59.wav', 'Bingo!', 6, array([ 4.7897654e-05, 2.7239477e-05, 3.7255515e-05, ...,\n", + " -1.7023414e-05, -2.9687346e-05, -3.9503360e-05], dtype=float32), 1.1456041666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_64.wav', 'Schreibt man das so?', 20, array([-1.6650798e-04, -2.2954465e-04, -2.1082905e-04, ...,\n", + " 5.5576045e-05, 1.4893518e-05, 2.0421723e-05], dtype=float32), 1.7272083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_81.wav', 'Halt mal kurz mein Bier.', 24, array([-8.2688921e-06, -1.1980872e-05, -4.0169580e-06, ...,\n", + " 8.8575485e-05, 1.3926605e-04, 3.6588870e-05], dtype=float32), 1.8417708333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_83.wav', 'Doch Hilfe naht bereits.', 24, array([ 4.9734876e-06, 5.2194659e-06, 1.2122488e-05, ...,\n", + " -1.8982364e-05, -4.2752654e-05, -8.2323677e-05], dtype=float32), 1.98275)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_84.wav', 'Formation einnehmen!', 20, array([8.1898354e-05, 7.4887575e-05, 6.6653323e-05, ..., 7.7452451e-06,\n", + " 2.1070047e-05, 3.0395060e-05], dtype=float32), 1.8682083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_87.wav', 'Holt mich hier raus!', 20, array([ 6.53247334e-05, -2.15428197e-04, -5.42638707e-04, ...,\n", + " -1.15612675e-05, 2.72592151e-05, 1.50995202e-05], dtype=float32), 1.5509583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_88.wav', 'Da vorne kommt mein Ex.', 23, array([-2.0436737e-04, -7.5976342e-05, 9.7310134e-05, ...,\n", + " 8.3587765e-06, -3.2081423e-06, 1.7971579e-05], dtype=float32), 1.8505833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_113.wav', 'Glaube es mir einfach.', 22, array([-2.7944061e-05, 1.0844935e-05, -1.5047234e-05, ...,\n", + " -2.7743961e-05, 2.9569403e-06, -3.5605283e-06], dtype=float32), 1.5333333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_115.wav', 'Was die Leute immer haben!', 26, array([-5.4329084e-05, -8.8018889e-05, -7.1306808e-05, ...,\n", + " 7.3982832e-05, 5.8832418e-05, 6.6730849e-05], dtype=float32), 1.9431041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_124.wav', 'BloÃ\\x9f nicht!', 12, array([1.01506448e-04, 1.75192414e-04, 1.12130554e-04, ...,\n", + " 3.55834927e-05, 4.65009398e-05, 5.75332670e-05], dtype=float32), 1.0310416666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_126.wav', \"Ich tu' immer nur rein.\", 23, array([ 3.21958287e-05, 2.19840458e-05, 1.46883485e-05, ...,\n", + " -8.37586867e-06, -5.43750639e-06, -1.22217643e-05], dtype=float32), 1.7448333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_138.wav', 'Wann lief diese Sendung?', 24, array([-1.7348650e-05, 1.9956657e-05, 3.1632226e-05, ...,\n", + " 1.5858004e-05, 1.8046559e-05, -4.8364400e-05], dtype=float32), 1.9563333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_143.wav', 'Gehen Sie aus dem Weg!', 22, array([1.80967872e-05, 1.12411635e-05, 1.61865628e-05, ...,\n", + " 6.79703808e-05, 7.41552940e-05, 9.28417285e-05], dtype=float32), 1.3923333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_145.wav', 'Es geht drunter und drüber.', 28, array([ 5.3915655e-06, 8.5220972e-06, -3.3527529e-05, ...,\n", + " -1.0693114e-05, -6.3991156e-06, 1.2663132e-05], dtype=float32), 1.9915833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_149.wav', 'Suchscheinwerfer einschalten!', 29, array([-4.3899286e-06, 1.1313143e-05, -7.2204307e-06, ...,\n", + " -3.3424400e-05, -1.3328722e-05, -2.6314769e-05], dtype=float32), 1.8858333333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_152.wav', 'Ihre Uhr geht vor.', 18, array([ 1.1011517e-05, -3.0811309e-05, -2.2571772e-05, ...,\n", + " 8.1292972e-05, 7.4179443e-05, 7.1086802e-06], dtype=float32), 1.3394791666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_153.wav', 'Wir suchen noch Freiwillige.', 28, array([-5.6182507e-06, -3.0251003e-05, 5.1053936e-05, ...,\n", + " -5.0866500e-05, -1.7348602e-05, -4.6226152e-05], dtype=float32), 1.9298958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_157.wav', 'Halten Sie sofort an!', 21, array([ 2.3082459e-04, 2.3086018e-04, -2.2280088e-05, ...,\n", + " -4.5649995e-05, -3.0157349e-05, -1.7121181e-05], dtype=float32), 1.6501041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_190.wav', 'Zeig uns mal, wo der Hammer hängt!', 35, array([ 4.6486733e-05, 5.3618060e-05, 4.0510302e-05, ...,\n", + " -1.0646369e-04, -7.5534314e-05, -1.2183484e-04], dtype=float32), 1.91225)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_196.wav', 'Da kann man auch parken.', 24, array([-1.0925556e-05, -3.7278984e-05, -1.0163063e-05, ...,\n", + " -6.9978710e-06, -3.4896555e-06, -6.6393928e-05], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_204.wav', 'Eine gute Stunde ist rum.', 25, array([-2.2296244e-05, -5.8680125e-06, -5.0762057e-05, ...,\n", + " -4.8879232e-05, -8.5942098e-05, -6.8862631e-05], dtype=float32), 1.6214583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_205.wav', 'Oh ja, das fetzt!', 17, array([ 3.4871216e-06, -4.8185248e-06, 1.2310127e-05, ...,\n", + " -1.7998637e-04, -4.5437564e-04, -3.7538476e-04], dtype=float32), 1.5025)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_225.wav', 'Sag das Zauberwort!', 19, array([ 3.0607847e-05, 4.5160428e-05, 1.8997842e-05, ...,\n", + " -1.6968366e-05, 1.1446763e-05, -3.4663015e-05], dtype=float32), 1.6743333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_242.wav', 'Die Arme!', 9, array([4.2858810e-05, 7.1904920e-05, 2.9656387e-05, ..., 5.8210357e-05,\n", + " 4.0901028e-05, 3.2474836e-05], dtype=float32), 0.8636041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_246.wav', 'Schläfst du schon?', 19, array([ 3.2191518e-05, 5.0761428e-05, 4.3220087e-05, ...,\n", + " -4.0423780e-07, 1.7892495e-05, 5.0407853e-06], dtype=float32), 1.1456041666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_247.wav', 'Ja da schau her!', 16, array([-3.9683233e-05, -9.2827155e-05, -5.1356539e-05, ...,\n", + " 8.5207663e-05, 5.3869204e-05, 8.1267404e-05], dtype=float32), 1.3394583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_251.wav', 'Moment, das ging anders.', 24, array([-7.3496245e-05, -9.7117241e-05, -9.9846256e-05, ...,\n", + " -2.2075654e-05, -5.6377292e-05, -3.1324758e-05], dtype=float32), 1.9475208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_254.wav', 'Weiter zum nächsten Kapitel.', 29, array([ 2.7818656e-05, 2.9083269e-05, 2.7292099e-05, ...,\n", + " -1.4497251e-05, 1.6704771e-05, 1.8156856e-05], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_255.wav', 'Holla die Waldfee!', 18, array([ 3.2722608e-05, -3.4862321e-06, 2.1344584e-05, ...,\n", + " -3.5852513e-06, -1.3345180e-05, 1.8042003e-06], dtype=float32), 1.2777916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_278.wav', 'Lass mich nicht allein.', 23, array([-6.2487576e-05, -5.1307488e-05, 3.3147335e-05, ...,\n", + " -1.3666711e-06, -1.6965050e-05, 1.0842440e-05], dtype=float32), 1.7448333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_283.wav', 'Warst du beim Frisör?', 22, array([-5.1554598e-05, -2.8181448e-05, -2.1276550e-05, ...,\n", + " 5.1014787e-05, 6.0253118e-05, 4.9681836e-05], dtype=float32), 1.60825)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_291.wav', 'Warum bin ich so fröhlich?', 27, array([-9.2893220e-05, -9.0468158e-05, -8.4269959e-05, ...,\n", + " 5.6945123e-06, 2.3743269e-05, -1.5906717e-07], dtype=float32), 1.5862083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_307.wav', 'Das klingt sehr gut.', 20, array([ 8.8375481e-07, 1.4093188e-06, -8.0541049e-06, ...,\n", + " -6.2088387e-05, -3.6809190e-05, -5.5097131e-05], dtype=float32), 1.4804583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_310.wav', 'Kann man das mitessen?', 22, array([-1.8419527e-05, -2.5431269e-05, -8.9255473e-06, ...,\n", + " 2.5581608e-05, 3.7564107e-05, 2.2521937e-05], dtype=float32), 1.5421458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_311.wav', 'Wo liegt das Problem?', 21, array([-1.7069402e-05, 2.2379625e-06, -8.6348446e-06, ...,\n", + " 2.4881610e-05, -2.6925150e-06, 1.8407424e-06], dtype=float32), 1.8065208333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_313.wav', 'Wo kann man sich ausloggen?', 27, array([-2.94713544e-07, -2.60781735e-06, 2.09315767e-05, ...,\n", + " -1.10319825e-05, -5.37709784e-05, -2.63888141e-05], dtype=float32), 1.7888958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_319.wav', 'Wo kommt das nur her?', 21, array([-3.2170439e-05, -2.5212325e-05, -3.7200436e-05, ...,\n", + " -9.3722010e-06, -3.0964005e-05, -1.5780270e-05], dtype=float32), 1.9298958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_337.wav', 'Und wenn man die nicht hat?', 27, array([-4.4960318e-05, 5.2144351e-05, -2.9507015e-05, ...,\n", + " -3.9032249e-05, 3.4188946e-05, -2.3692317e-05], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_347.wav', 'Sag ihr das bloÃ\\x9f nicht!', 24, array([8.6986920e-06, 4.4441199e-06, 3.0283294e-05, ..., 9.9162316e-05,\n", + " 7.8216704e-05, 9.9542762e-05], dtype=float32), 1.6126458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_348.wav', 'Wo bleibst du?', 14, array([ 3.3125209e-05, 5.7069548e-05, 3.6280937e-05, ...,\n", + " -2.4643228e-05, -2.7121812e-05, -1.5307731e-05], dtype=float32), 1.2998125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_352.wav', 'Jetzt oder nie!', 15, array([-1.7882267e-05, 1.5871639e-05, -7.5667369e-05, ...,\n", + " -3.7708491e-05, 7.9740630e-06, -7.9073770e-06], dtype=float32), 1.3747291666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_368.wav', 'Hinsetzen und FüÃ\\x9fe hoch!', 26, array([-3.0999392e-05, -7.2621566e-05, -4.7179296e-05, ...,\n", + " -2.5928295e-05, -3.2266624e-05, 1.4868124e-05], dtype=float32), 1.7624583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_394.wav', 'Nimm dir mal eine Pause!', 24, array([-2.7986377e-04, -3.0645030e-04, -2.3860915e-04, ...,\n", + " -3.2176635e-05, -4.1073359e-05, -1.7371191e-05], dtype=float32), 1.9519166666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_427.wav', 'Vom Kinde verschmäht.', 22, array([2.4927745e-05, 5.9401387e-05, 5.5517099e-05, ..., 8.8263223e-05,\n", + " 3.5481713e-05, 1.4234082e-05], dtype=float32), 1.9739375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_435.wav', 'Kann ich dir helfen?', 20, array([-7.2613778e-04, 1.4254064e-03, 4.3165400e-03, ...,\n", + " 9.7870041e-05, 6.2070317e-06, 1.0954802e-04], dtype=float32), 1.3923333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_449.wav', 'Woher will sie das wissen?', 26, array([ 6.6095758e-08, -2.7216944e-05, -1.6521408e-05, ...,\n", + " 3.0345358e-05, -5.6843191e-06, -4.2101074e-05], dtype=float32), 1.7272083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_468.wav', 'Das lass mal meine Sorge sein.', 30, array([-4.7126541e-05, -5.9281327e-05, -3.5599784e-05, ...,\n", + " 2.0367926e-05, 4.0726398e-05, 1.8718367e-05], dtype=float32), 1.9342916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_500.wav', 'Ich habe heute Geburtstag.', 26, array([-5.1001366e-06, 4.8161728e-05, 1.0626727e-05, ...,\n", + " -8.0793325e-05, -6.0714734e-05, -7.9644029e-05], dtype=float32), 1.9387083333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_513.wav', 'Fertig werden!', 14, array([-2.4789182e-05, -1.4137984e-05, -4.8843711e-05, ...,\n", + " 2.4393246e-05, 2.7856760e-05, 6.9619755e-06], dtype=float32), 1.3615)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_525.wav', 'Jeder trauert anders.', 21, array([-6.3906264e-06, -2.4861220e-05, -3.1557371e-05, ...,\n", + " -5.3394677e-05, 5.5594451e-06, -4.3505042e-05], dtype=float32), 1.8593958333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_530.wav', 'Wir sprechen uns später.', 25, array([ 1.9607371e-05, 1.2742041e-05, 5.9507223e-05, ...,\n", + " -1.0580019e-06, -1.0849526e-05, -2.2735680e-05], dtype=float32), 1.5950208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_534.wav', 'Den Schuss nicht hören.', 24, array([1.0162838e-04, 1.3316146e-04, 1.3368837e-04, ..., 5.8495625e-06,\n", + " 7.8353441e-05, 3.3752654e-05], dtype=float32), 1.8726041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_543.wav', 'Wer tut das nicht?', 18, array([-1.5056261e-05, -2.7894443e-05, -8.4756257e-06, ...,\n", + " -4.3981410e-05, -3.8667356e-05, -4.8794256e-05], dtype=float32), 1.5773958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_547.wav', 'Zu so später Stunde?', 21, array([-1.2750152e-04, 1.9311530e-05, -6.8482601e-05, ...,\n", + " -8.0274267e-06, 3.7486578e-05, -4.1844236e-05], dtype=float32), 1.6478958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_569.wav', 'Ein Zirkus ohne Tiere?', 22, array([-2.8725301e-05, -5.8967784e-05, -4.7625667e-06, ...,\n", + " 5.3123777e-06, -7.1301661e-06, -2.9527286e-05], dtype=float32), 1.8461666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_570.wav', 'Sag schon, was ist drin?', 24, array([ 1.10985304e-04, 5.97430153e-05, 9.55062278e-05, ...,\n", + " 6.52888993e-05, -5.82730863e-05, 6.85385385e-05], dtype=float32), 1.8373541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_574.wav', 'Wir zählen auf dich.', 21, array([-7.5106524e-05, -9.9009638e-05, -7.9571801e-05, ...,\n", + " 3.8461326e-06, 8.2744657e-05, 5.6746823e-05], dtype=float32), 1.9210833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_585.wav', 'Das funktioniert auch.', 22, array([ 5.4934342e-05, 1.7679840e-05, -5.7660582e-05, ...,\n", + " 4.9520886e-06, -2.5478117e-05, -6.3567706e-05], dtype=float32), 1.4628333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_589.wav', 'Was drauf?', 10, array([ 1.5172187e-05, 3.5768371e-05, -4.6845405e-05, ...,\n", + " 2.3743922e-05, -3.8076912e-05, 2.2450782e-05], dtype=float32), 1.2072916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_594.wav', 'Einer zur Zeit!', 15, array([-1.9068037e-05, -2.0037192e-05, -8.8215660e-05, ...,\n", + " -1.8433493e-05, -3.3125831e-05, 3.5209345e-05], dtype=float32), 1.4099583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_606.wav', 'Okay, und nun?', 14, array([-1.2366170e-05, 2.3954278e-06, -1.8647337e-05, ...,\n", + " -2.4212586e-06, 6.3337334e-06, -2.5126603e-06], dtype=float32), 1.5597708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_607.wav', 'Das verstehst du noch nicht.', 28, array([ 1.6215906e-04, 2.5805720e-04, 2.2398161e-04, ...,\n", + " -5.9032095e-06, -1.2547288e-06, -1.8913257e-05], dtype=float32), 1.7095833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_609.wav', 'Wie uncool!', 11, array([-1.1241895e-05, -3.2969092e-05, -5.8745212e-05, ...,\n", + " 8.5234688e-06, 1.9909365e-05, 1.7495377e-05], dtype=float32), 1.0927291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_624.wav', 'Setzt dich gerade!', 18, array([-1.7491520e-05, 6.7394591e-05, 5.0117076e-05, ...,\n", + " -2.1143003e-05, -1.6165326e-05, -1.6601503e-05], dtype=float32), 1.3835208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_638.wav', 'Nicht schlecht der Specht!', 26, array([-1.0250892e-05, 1.4861113e-05, -5.1604333e-05, ...,\n", + " 7.6938113e-06, 2.0211788e-05, 4.5162437e-06], dtype=float32), 1.8153333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_647.wav', 'Was haben die vor?', 18, array([ 1.2927976e-06, -4.4330540e-05, -4.2087355e-05, ...,\n", + " 1.2652035e-04, -7.1286093e-05, -1.9011653e-06], dtype=float32), 1.4628333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_648.wav', 'Ich habe gar nichts mitbekommen.', 32, array([ 1.6062468e-05, 4.4314598e-05, 1.1317232e-05, ...,\n", + " -8.4248430e-05, -4.8613791e-05, -4.1891144e-05], dtype=float32), 1.9915833333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_664.wav', 'Je mehr, desto besser.', 22, array([-1.0978662e-05, 2.8232571e-06, -2.7930673e-05, ...,\n", + " 5.0805535e-05, 3.9726485e-05, 6.7175766e-05], dtype=float32), 1.8505833333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_677.wav', 'Da vorne links!', 15, array([ 3.8325859e-05, 3.2421449e-05, 1.5961947e-05, ...,\n", + " 2.6722651e-05, -3.3873417e-05, 3.2344939e-05], dtype=float32), 1.4363958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_682.wav', 'Jetzt mal halblang!', 19, array([-2.0417360e-06, -1.3626728e-05, -2.8990502e-05, ...,\n", + " -2.2435464e-05, -3.3464916e-05, 2.5530893e-05], dtype=float32), 1.4892708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_687.wav', 'Gib uns ein Beispiel!', 21, array([-8.4907850e-05, -5.6986839e-05, 3.7472455e-06, ...,\n", + " -1.4217812e-05, -2.3697576e-05, -2.4605337e-05], dtype=float32), 1.6567083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_689.wav', 'Von wegen!', 10, array([-2.7207323e-05, -6.9836324e-06, -9.1906164e-05, ...,\n", + " 6.5761873e-05, 5.3384709e-05, 3.5098144e-06], dtype=float32), 0.8547916666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_691.wav', 'Das finde ich ziemlich doof.', 28, array([-2.9834633e-05, 5.6474819e-06, -2.5375591e-06, ...,\n", + " -3.2603730e-06, -5.9017879e-05, -9.6670803e-05], dtype=float32), 1.7977083333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_697.wav', 'Das trifft sich gut.', 20, array([ 7.9529818e-06, 3.9593842e-06, 3.0517844e-05, ...,\n", + " -4.2052940e-05, -3.0681629e-05, -2.6093589e-05], dtype=float32), 1.8241458333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_704.wav', 'Jetzt gibt es Zoff.', 19, array([ 1.7251841e-05, 3.0525447e-05, 4.0081544e-05, ...,\n", + " -2.7181366e-05, -6.4996988e-05, -2.0187828e-05], dtype=float32), 1.6655208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_708.wav', 'Liebe ist kein Verbrechen.', 26, array([ 1.3942296e-03, 2.0183886e-03, 1.7392144e-03, ...,\n", + " 4.2136421e-06, 1.5667934e-05, -1.1447505e-05], dtype=float32), 1.8329583333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_725.wav', 'Auch das noch!', 14, array([ 6.9235853e-06, 1.0541713e-05, -6.9821567e-06, ...,\n", + " -6.0647875e-05, -3.7899004e-05, 1.4291401e-05], dtype=float32), 1.2337083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_728.wav', 'Toller Hengst!', 14, array([ 1.5415973e-05, 1.2052349e-05, 2.2745300e-05, ...,\n", + " -5.1455394e-05, -8.6221211e-05, -2.3398878e-05], dtype=float32), 1.1632291666666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_748.wav', 'Reine Gewöhungssache.', 22, array([-7.46887818e-05, 3.63702893e-05, 2.65028193e-05, ...,\n", + " 1.14920855e-04, 8.75776823e-05, 7.50372201e-05], dtype=float32), 1.4452083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_764.wav', 'Siehe weiter unten.', 19, array([ 1.0315010e-04, 1.2668683e-04, 1.3160890e-04, ...,\n", + " 3.5362529e-05, -4.0091851e-05, 3.1800329e-05], dtype=float32), 1.5509583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_778.wav', 'Hilfe ein Ã\\x9cberfall!', 20, array([-9.1011774e-05, -1.6054764e-04, -6.9503607e-05, ...,\n", + " -3.2605390e-06, -1.1628125e-05, -4.9398786e-05], dtype=float32), 1.4011458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_8_FINAL/8_804.wav', \"Wen wundert's?\", 14, array([ 1.8174978e-05, 1.0757233e-05, 1.4760263e-05, ...,\n", + " -4.7010188e-05, -6.0861544e-06, -1.5782018e-05], dtype=float32), 1.2601666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_19.wav', 'Bis die Schwarte kracht.', 24, array([ 3.15900324e-05, -1.30308879e-04, 3.94875406e-06, ...,\n", + " 3.35644108e-05, 1.02667604e-04, 4.54106703e-05], dtype=float32), 1.7536354166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_26.wav', 'Auch das wäre möglich.', 24, array([-4.5410670e-05, 1.9743770e-06, -1.9743769e-05, ...,\n", + " 4.3436296e-05, -1.9743770e-06, 3.5538786e-05], dtype=float32), 1.5421458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_28.wav', 'Was geht gar nicht?', 19, array([ 1.9743770e-06, 9.8718847e-06, -3.3564411e-05, ...,\n", + " 1.2241138e-04, -4.5410670e-05, 0.0000000e+00], dtype=float32), 1.4065104166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_33.wav', 'Die Geschichte geht anders.', 27, array([-3.3564411e-05, -7.7000703e-05, -8.2923834e-05, ...,\n", + " 3.3564411e-05, -3.9487541e-06, 3.1590032e-05], dtype=float32), 1.5333333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_42.wav', 'Welche SchuhgröÃ\\x9fe?', 20, array([ 0.0000000e+00, -1.1846262e-05, -5.9231311e-06, ...,\n", + " 4.5410670e-05, -3.9487539e-05, 2.9615656e-05], dtype=float32), 1.5685833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_51.wav', 'Mediathek aufrufen!', 19, array([-3.6328536e-04, 1.9941208e-04, -8.4898209e-05, ...,\n", + " 5.9231311e-06, -5.7256933e-05, -4.9359427e-05], dtype=float32), 1.9739479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_54.wav', 'Es tut ihr furchtbar leid.', 26, array([-3.3564411e-05, -4.9359427e-05, 1.1846262e-05, ...,\n", + " 5.1333802e-05, -8.8846966e-05, 5.7256933e-05], dtype=float32), 1.9563229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_62.wav', 'Noch mal von vorne, bitte.', 26, array([ 2.5666901e-05, -2.9615656e-05, -3.7513164e-05, ...,\n", + " 8.6872591e-05, -5.7256933e-05, 6.9103196e-05], dtype=float32), 1.8417604166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_63.wav', 'Oh jemine!', 10, array([-1.7769393e-05, -6.9103196e-05, -3.7513164e-05, ...,\n", + " 5.7256933e-05, 5.1333802e-05, 3.9487539e-05], dtype=float32), 1.20728125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_84.wav', 'Reich mir den mal rüber.', 25, array([-1.5795016e-05, -9.8718847e-06, 6.7128822e-05, ...,\n", + " 0.0000000e+00, -1.2833450e-04, 3.3564411e-05], dtype=float32), 1.7448333333333332)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_101.wav', 'Findest du nicht auch?', 22, array([-7.3051953e-05, -9.8718847e-06, 5.9231311e-06, ...,\n", + " 2.5666901e-05, -5.3308180e-05, 1.1451387e-04], dtype=float32), 1.32184375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_113.wav', 'Alles korrekt.', 14, array([ 1.1846262e-05, 2.9615656e-05, 1.2833450e-04, ...,\n", + " -1.9743769e-05, 2.7641279e-05, -1.7769393e-05], dtype=float32), 1.3923333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_128.wav', 'Alles wird gut.', 15, array([-9.2795723e-05, -3.1590032e-05, 8.2923834e-05, ...,\n", + " 1.3820640e-05, -4.7385049e-05, 1.1846262e-05], dtype=float32), 1.6038333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_156.wav', 'Würde ich auch machen.', 23, array([-6.1205690e-05, -5.3308180e-05, -5.5282559e-05, ...,\n", + " -9.8718847e-06, -1.1648824e-04, -6.1205690e-05], dtype=float32), 1.3747083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_173.wav', 'Gib Gas!', 8, array([ 3.7513164e-05, 7.1077571e-05, -1.9743770e-06, ...,\n", + " 5.9231312e-05, -3.0405406e-04, 4.5410672e-04], dtype=float32), 1.03984375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_221.wav', 'Weil er es kann.', 16, array([ 3.9487541e-06, -3.1590032e-05, 2.1718148e-05, ...,\n", + " -9.6744472e-05, -3.9487539e-05, -6.3180065e-05], dtype=float32), 1.4011458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_225.wav', 'Was ist der Sinn des Lebens?', 28, array([-4.7385049e-05, -9.0821341e-05, 8.6872591e-05, ...,\n", + " 7.8975081e-06, -1.3820640e-05, -2.0730958e-04], dtype=float32), 1.9563229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_247.wav', 'Es ist kalt.', 12, array([-5.33081802e-05, -1.14513867e-04, 2.36925243e-05, ...,\n", + " -4.34362955e-05, 5.92313108e-06, -1.08590735e-04], dtype=float32), 1.17203125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_259.wav', 'ScheiÃ\\x9f drauf!', 14, array([ 9.8718854e-05, 3.9487541e-06, 5.9231312e-05, ...,\n", + " -3.3564411e-05, -1.7769393e-05, -1.1253949e-04], dtype=float32), 1.19846875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_271.wav', 'Katzen haben sieben Leben.', 26, array([-1.57950162e-05, 7.89750811e-06, -6.12056901e-05, ...,\n", + " -1.04641986e-04, -7.30519532e-05, -5.92313108e-06], dtype=float32), 1.8593854166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_285.wav', 'Nicht so lasch!', 15, array([ 3.3564411e-05, 1.4412952e-04, -8.8846966e-05, ...,\n", + " 5.9231311e-06, -1.4610391e-04, -3.1590032e-05], dtype=float32), 1.4187708333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_309.wav', 'Ich gehe dann mal kicken.', 25, array([-7.8975077e-05, -5.1333802e-05, 2.1718148e-05, ...,\n", + " -9.8718847e-06, 0.0000000e+00, 1.7769393e-05], dtype=float32), 1.6743229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_313.wav', 'Versuchen Sie es später noch einmal!', 37, array([1.7769393e-04, 1.5597578e-04, 7.7000703e-05, ..., 1.7769393e-05,\n", + " 2.5666901e-05, 0.0000000e+00], dtype=float32), 1.9563125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_327.wav', 'Ulrike muss es ja wissen.', 25, array([ 1.3820640e-05, -4.3436296e-05, -2.5666901e-05, ...,\n", + " 8.0949460e-05, 3.1590032e-05, -1.5795016e-05], dtype=float32), 1.5553645833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_329.wav', 'So viele schon?', 15, array([-1.1451387e-04, -9.4770097e-05, 1.3820640e-05, ...,\n", + " -9.8718847e-06, 7.8975081e-06, 3.3564411e-05], dtype=float32), 1.32625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_339.wav', 'Noch nicht.', 11, array([-1.1569849e-03, -1.1234205e-03, -1.1056511e-03, ...,\n", + " -4.1461917e-05, -1.9743770e-06, -2.3692524e-05], dtype=float32), 0.9473229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_351.wav', 'So alt bin ich dann auch wieder nicht.', 38, array([ 1.0957792e-03, 8.6082838e-04, 5.8836438e-04, ...,\n", + " -7.7000703e-05, -1.0661636e-04, -5.3308180e-05], dtype=float32), 1.9100520833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_352.wav', 'Diese Gelegenheit kann man nutzen.', 34, array([ 1.2043700e-04, 1.9743769e-05, 7.5026328e-05, ...,\n", + " 3.1590032e-05, 6.5154440e-05, -5.1333802e-05], dtype=float32), 1.9981770833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_361.wav', 'Bist neidisch, was?', 19, array([ 3.9487539e-05, 1.7769393e-05, -2.9615656e-05, ...,\n", + " -5.9231311e-06, 1.9743770e-06, -3.1590032e-05], dtype=float32), 1.4848645833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_364.wav', 'Puh, das Quiz ist schwer!', 25, array([-5.9231312e-05, -6.9103196e-05, -8.2923834e-05, ...,\n", + " 1.3623202e-04, 1.3030888e-04, 2.1520710e-04], dtype=float32), 1.9497083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_371.wav', 'Alles klärchen!', 16, array([-3.9487541e-06, 3.3564411e-05, 1.5795016e-05, ...,\n", + " 5.1333802e-05, 6.1205690e-05, 3.5538786e-05], dtype=float32), 1.5068854166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_392.wav', 'Zeig mal dein Piercing!', 23, array([ 5.7256933e-05, 1.3820640e-05, 3.5538786e-05, ...,\n", + " -6.1205690e-05, -9.8718847e-06, 5.5282559e-05], dtype=float32), 1.7712604166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_393.wav', 'Parlieren Sie doch im Park!', 27, array([-7.1077571e-05, -5.3308180e-05, -5.7256933e-05, ...,\n", + " 1.5795015e-04, 1.1253949e-04, 1.0069323e-04], dtype=float32), 1.8505729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_400.wav', 'Hä, was? 400', 13, array([ 3.5538786e-04, 2.7443841e-04, 2.5469463e-04, ...,\n", + " -6.3180065e-05, -1.7769393e-05, -5.9231311e-06], dtype=float32), 0.98696875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_431.wav', 'Tun Sie nicht so überrascht!', 29, array([-2.2310461e-04, -2.6259213e-04, -3.0800281e-04, ...,\n", + " -7.7000703e-05, -1.0661636e-04, -1.1451387e-04], dtype=float32), 1.6743229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_438.wav', 'Was lernen wir daraus?', 22, array([ 3.35644108e-05, -7.70007027e-05, -7.30519532e-05, ...,\n", + " -1.02667604e-04, -8.68725911e-05, -2.76412793e-05], dtype=float32), 1.9078541666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_439.wav', 'Was will sie denn noch?', 23, array([ 5.3308180e-05, 5.7256933e-05, -3.9487539e-05, ...,\n", + " -3.9487541e-06, 5.5282559e-05, 6.9103196e-05], dtype=float32), 1.7624479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_441.wav', 'Ich stecke fest.', 16, array([-1.6584767e-04, -1.5795015e-04, -1.3030888e-04, ...,\n", + " 9.2795723e-05, 7.5026328e-05, 7.5026328e-05], dtype=float32), 1.2976041666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_451.wav', 'Es riecht nach Sonnencreme.', 27, array([-4.1461917e-05, -3.7513164e-05, 2.1718148e-05, ...,\n", + " -2.7641279e-05, -1.0661636e-04, -1.0069323e-04], dtype=float32), 1.7007604166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_452.wav', 'Da entlang!', 11, array([-1.7769393e-05, 5.9231311e-06, 1.7769393e-05, ...,\n", + " -7.8975081e-06, 7.8975081e-06, 0.0000000e+00], dtype=float32), 0.97815625)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_453.wav', 'Tja, Thaddäus!', 15, array([-9.87188541e-05, -1.46103906e-04, -1.24385755e-04, ...,\n", + " 1.02667604e-04, 1.97437703e-06, -2.76412793e-05], dtype=float32), 1.6787395833333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_458.wav', 'Das Leben ist voller Ã\\x9cberraschungen.', 37, array([-3.9487539e-05, 1.1846262e-05, -1.3820640e-05, ...,\n", + " 6.1205690e-05, 3.1590032e-05, 1.9743770e-06], dtype=float32), 1.9739375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_460.wav', 'Danke der Nachfrage!460', 23, array([ 8.29238343e-05, 1.16488241e-04, 9.67444721e-05, ...,\n", + " -1.12539492e-04, -1.08590735e-04, -1.42155142e-04], dtype=float32), 1.4275833333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_465.wav', 'Ich will auch so ein Pferd.', 27, array([ 0.0000000e+00, 1.9743770e-06, -3.1590032e-05, ...,\n", + " 7.8975077e-05, -3.9487539e-05, -5.7256933e-05], dtype=float32), 1.5404791666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_473.wav', 'Was hat das zu bedeuten?', 24, array([-1.26360130e-04, -1.08590735e-04, -1.16488241e-04, ...,\n", + " 8.29238343e-05, 2.36925243e-05, -1.57950162e-05], dtype=float32), 1.8241354166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_492.wav', 'Die Narkose wirkt nicht.', 24, array([ 1.7571956e-04, 1.6782204e-04, 7.8975077e-05, ...,\n", + " 3.1590032e-05, -2.1718148e-05, -2.7641279e-05], dtype=float32), 1.965125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_495.wav', 'Dein Bruder ist echt krass drauf.', 33, array([-7.10775712e-05, -3.35644108e-05, -2.17181478e-05, ...,\n", + " 1.16488241e-04, 1.02667604e-04, 7.89750775e-05], dtype=float32), 1.9563125)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_498.wav', 'Das behaupten alle.', 19, array([-1.1253949e-04, -1.1846262e-04, -9.8718854e-05, ...,\n", + " 5.5282559e-05, -1.1846262e-05, 4.5410670e-05], dtype=float32), 1.3658958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_500.wav', 'So einfach ist es nicht.', 24, array([-0.00019349, -0.00019744, -0.00022113, ..., -0.00021521,\n", + " -0.0002231 , -0.00020534], dtype=float32), 1.7976979166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_503.wav', 'Das gibt ihm den Rest.', 22, array([-3.94875406e-06, -6.71288217e-05, -1.20436998e-04, ...,\n", + " 1.04641986e-04, 1.24385755e-04, 1.14513867e-04], dtype=float32), 1.5068958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_507.wav', 'Was fällt euch ein?', 20, array([9.8718854e-05, 9.0821341e-05, 6.7128822e-05, ..., 1.7374518e-04,\n", + " 2.0730958e-04, 1.5795015e-04], dtype=float32), 1.5421458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_511.wav', 'Lass die Glucke in Ruhe!', 24, array([2.6851529e-04, 2.3692525e-04, 8.2923834e-05, ..., 9.2795723e-05,\n", + " 6.3180065e-05, 6.1205690e-05], dtype=float32), 1.4099583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_512.wav', 'Wieso denn das nicht?', 21, array([-3.9487541e-06, -2.5666901e-05, -6.9103196e-05, ...,\n", + " 3.1590032e-05, -1.9743770e-06, 1.3820640e-05], dtype=float32), 1.4804583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_542.wav', 'Na gut, ich komme mit.', 22, array([ 6.3180065e-05, -3.9487541e-06, 4.3436296e-05, ...,\n", + " -6.9103196e-05, -6.5154440e-05, 7.8975081e-06], dtype=float32), 1.8329479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_543.wav', 'Entschuldige dich bei ihr.', 26, array([-9.6744472e-05, -7.8975077e-05, -5.1333802e-05, ...,\n", + " -7.7000703e-05, -1.2241138e-04, -5.9231312e-05], dtype=float32), 1.7624479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_544.wav', 'Das reimt sich ja.', 18, array([ 1.1056512e-04, 8.4898209e-05, 1.1648824e-04, ...,\n", + " -9.0821341e-05, -1.1451387e-04, -1.1253949e-04], dtype=float32), 1.25134375)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_549.wav', 'Sie kamen, um zu bleiben.', 25, array([ 3.9487539e-05, 7.8975081e-06, 3.3564411e-05, ...,\n", + " 2.1718148e-05, -2.7641279e-05, -9.6744472e-05], dtype=float32), 1.8593854166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_555.wav', 'Sie nimmt kein Blatt vor den Mund.', 34, array([-1.1569849e-03, -7.0287823e-04, -5.3308180e-05, ...,\n", + " 2.5666901e-05, 1.5795016e-05, -1.9743769e-05], dtype=float32), 1.8021041666666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_556.wav', 'Hoffentlich geht es ihm gut.', 28, array([1.0187785e-03, 1.1372411e-03, 1.2616270e-03, ..., 3.5538786e-05,\n", + " 7.8975081e-06, 5.9231312e-05], dtype=float32), 1.9342916666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_557.wav', 'Vergiss deine Schoner nicht!', 28, array([ 3.9487539e-05, -5.5282559e-05, -2.0336083e-04, ...,\n", + " -6.9103196e-05, -7.1077571e-05, -7.1077571e-05], dtype=float32), 1.8461666666666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_590.wav', 'Wenn du schon so fragst!', 24, array([ 1.4610391e-04, 1.4807828e-04, 1.7966831e-04, ...,\n", + " 1.7769393e-05, -4.3436296e-05, -2.7641279e-05], dtype=float32), 1.8329479166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_595.wav', 'Was muss ich einkaufen?', 23, array([ 0.00016387, 0.00012636, 0.00011254, ..., -0.00010464,\n", + " -0.00011649, -0.00010464], dtype=float32), 1.6038229166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_599.wav', 'Der tut nichts!', 15, array([ 9.2795723e-05, 6.1205690e-05, 2.5666901e-05, ...,\n", + " -1.1648824e-04, -9.8718854e-05, -7.8975077e-05], dtype=float32), 1.16321875)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_605.wav', 'Natürlich war es das.', 22, array([3.1590032e-05, 1.9743769e-05, 7.8975077e-05, ..., 1.4610391e-04,\n", + " 1.6782204e-04, 1.4412952e-04], dtype=float32), 1.5157083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_612.wav', 'Sprechen Sie deutsch?', 21, array([ 2.05335207e-04, 1.91514569e-04, 1.57950155e-04, ...,\n", + " 2.96156559e-05, -6.31800649e-05, -1.02667604e-04], dtype=float32), 1.5157083333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_616.wav', 'Gleich hole ich Anne ab.', 24, array([6.3180065e-05, 7.1077571e-05, 1.2636013e-04, ..., 2.1718148e-05,\n", + " 3.1590032e-05, 1.3820640e-05], dtype=float32), 1.4099583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_646.wav', 'Ich bin ganz hin und weg!', 25, array([-3.5143911e-04, -2.5666901e-04, -1.6979642e-04, ...,\n", + " -4.3436296e-05, -6.1205690e-05, 4.3436296e-05], dtype=float32), 1.7800729166666667)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_647.wav', 'Frische Luft tut gut.', 21, array([ 1.04641986e-04, 1.97437703e-06, -7.89750811e-06, ...,\n", + " 8.48982090e-05, 1.38206397e-05, -7.89750811e-06], dtype=float32), 1.6655104166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_652.wav', 'Nein, du Genie!', 15, array([1.3623202e-04, 1.2833450e-04, 1.2833450e-04, ..., 2.7641279e-05,\n", + " 4.5410670e-05, 5.1333802e-05], dtype=float32), 1.5068958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_675.wav', 'Das kann doch wohl nicht wahr sein!', 35, array([-1.9743769e-05, -3.3564411e-05, 3.1590032e-05, ...,\n", + " 9.2795723e-05, 9.6744472e-05, 1.2043700e-04], dtype=float32), 1.9827604166666666)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_684.wav', 'Jedes Kind weiÃ\\x9f das.', 21, array([-1.204370e-04, -1.382064e-04, -9.674447e-05, ..., -1.461039e-04,\n", + " -1.382064e-04, -8.489821e-05], dtype=float32), 1.4716458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_711.wav', 'Die werden ja nicht schlecht.', 29, array([-1.3030888e-04, -1.0069323e-04, -8.2923834e-05, ...,\n", + " -1.3228325e-04, -1.1253949e-04, -9.6744472e-05], dtype=float32), 1.4716458333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_719.wav', 'Das Leben ist schön!', 21, array([ 2.5666901e-05, 4.7385049e-05, 2.9615656e-05, ...,\n", + " -3.3564411e-05, 3.3564411e-05, 7.8975077e-05], dtype=float32), 1.2953958333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_720.wav', 'Was machst du jetzt?', 20, array([-1.5597578e-04, -1.2833450e-04, -1.3425764e-04, ...,\n", + " -5.3308180e-05, -3.9487541e-06, 3.9487541e-06], dtype=float32), 1.4099583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_728.wav', 'Falsche Antwort.', 16, array([-5.1333802e-05, -1.1846262e-05, 9.8718847e-06, ...,\n", + " -9.8718847e-06, -4.7385049e-05, -5.3308180e-05], dtype=float32), 1.3923333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_743.wav', 'Oder wir gehen Burger essen.', 28, array([-2.9615656e-05, -4.7385049e-05, -3.1590032e-05, ...,\n", + " -8.2923834e-05, -5.1333802e-05, 6.3180065e-05], dtype=float32), 1.7007604166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_744.wav', 'Lasst mich allein!', 18, array([-0.00018757, -0.00018757, -0.00024877, ..., -0.00011846,\n", + " -0.00011057, -0.00013031], dtype=float32), 1.7007604166666668)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_745.wav', 'Da sind wir wieder.', 19, array([7.2459638e-04, 7.7395578e-04, 8.3911023e-04, ..., 0.0000000e+00,\n", + " 1.3820640e-05, 1.7769393e-05], dtype=float32), 1.4804583333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_754.wav', 'So weit, so gut.', 16, array([-1.7769393e-05, -3.5538786e-05, 3.5538786e-05, ...,\n", + " 9.8718847e-06, -5.3308180e-05, -4.3436296e-05], dtype=float32), 1.5245208333333333)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_756.wav', 'Alles war voller Qualm.', 23, array([ 6.3180065e-05, 2.9615656e-05, 3.7513164e-05, ...,\n", + " -3.1590032e-05, -3.3564411e-05, 2.1718148e-05], dtype=float32), 1.5333333333333334)\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_9_FINAL/9_765.wav', 'Fick dich!', 10, array([-3.9487539e-05, -8.0949460e-05, -5.7256933e-05, ...,\n", + " 3.9487539e-05, 7.8975077e-05, 9.4770097e-05], dtype=float32), 0.9076666666666666)\n" + ] + } + ], + "source": [ + "# print clips shorter than 2 sec\n", + "for item in data:\n", + " if item[-1] < 2:\n", + " print(item)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "sec_per_chars = []\n", + "for item in data:\n", + " text = item[1]\n", + " dur = item[-1]\n", + " sec_per_char = dur / len(text)\n", + " sec_per_chars.append(sec_per_char)\n", + "# sec_per_char /= len(data)\n", + "# print(sec_per_char)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " > Average durations per char: 0.07641993439576344\n", + " > STD duration per char: 0.015251748851166484\n" + ] + } + ], + "source": [ + "mean = np.mean(sec_per_chars)\n", + "std = np.std(sec_per_chars)\n", + "print(\" > Average durations per char: \", mean)\n", + "print(\" > STD duration per char: \", std)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "# fit a distribution\n", + "dist = norm(mean, std)\n", + "\n", + "# find irregular instances long or short voice durations\n", + "items =[]\n", + "pdfs = []\n", + "for item in data:\n", + " text = item[1]\n", + " dur = item[-1]\n", + " sec_per_char = dur / len(text)\n", + " pdf = norm.pdf(sec_per_char)\n", + " pdfs.append(pdf)\n", + " items.append(item)\n", + "# if pdf < 0.395:\n", + "# print(item)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6gAAAOFCAYAAABnc8/AAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nOzdd2AUZd4H8O+zm957QgqkUkJCKAmh9x4VEATECqJYALuCYANFLKfevbaznXp3WO70VMTeRVSqCCggQpDeOwRIMu8fWzK7O7M7u9nNzm6+n38gu7Ozz87Ozjy/p/weIUkSiIiIiIiIiPzN4O8CEBEREREREQEMUImIiIiIiEgnGKASERERERGRLjBAJSIiIiIiIl1ggEpERERERES6wACViIiIiIiIdCHE3wWwl5KSIuXm5vq7GEREREREROQDK1euPCBJUqrSc7oLUHNzc7FixQp/F4OIiIiIiIh8QAixTe05DvElIiIiIiIiXWCASkRERERERLrAAJWIiIiIiIh0gQEqERERERER6QIDVCIiIiIiItIFBqhERERERESkCwxQiYiIiIiISBcYoBIREREREZEuMEAlIiIiIiIiXWCASkRERERERLrAAJWIiIiIiIh0gQEqERERERER6QIDVCIiIiIiItIFBqhERERERESkCwxQiYiIiIiISBcYoBIREREREZEuMEAlIiIiIiIiXWCASkRERERERLrAAJWIiIiIiIh0gQEqERERERER6YKmAFUIMUwIsVEIsVkIMdPJdmOEEJIQolz22Czz6zYKIYZ6o9BEREREREQUfEJcbSCEMAJ4GsBgADsALBdCvC9J0q9228UCuBHAT7LHigFMANAeQCaAz4UQrSVJqvPeRyAiIiIiIqJgoKUHtSuAzZIkbZEk6SyANwCMVNhuHoCHAdTIHhsJ4A1Jks5IkrQVwGbz/oiIiIiIiIhsaAlQswBsl/29w/yYlRCiM4AcSZIWu/taIiIiIiIiIsALSZKEEAYAjwO4tRH7uEYIsUIIsWL//v2NLRIREREREREFIC0B6k4AObK/s82PWcQCKAHwtRCiGkA3AO+bEyW5ei0AQJKk5yVJKpckqTw1NdW9T0BERERERERBQUuAuhxAkRAiTwgRBlPSo/ctT0qSdFSSpBRJknIlScoF8COACyRJWmHeboIQIlwIkQegCMAyr38KIiIiIiIiCngus/hKklQrhJgG4BMARgAvS5K0XggxF8AKSZLed/La9UKItwD8CqAWwA3M4EtERERERERKhCRJ/i6DjfLycmnFihX+LgYRERERERH5gBBipSRJ5UrPNTpJEhEREREREZE3MEAlIiIiIiIiXWCASkRERERERLrAAJWIiIiIiIh0gQEqERERERER6QIDVCIiIiIiItIFBqhERERERESkCwxQiYiIiIh8rOZcHWrO1fm7GES6xwCViIiIiMjH2t79MbrM+8zfxSDSPQaoRERERERN4OTZ5tmD+vve4/jvyh3+LgYFCAaoRD6yctthnDpb6+9i6NqBE2eQO3Mx3lq+3d9FISIiIh8Z/MS3uO0/a/xdDAoQDFCbyI7Dp3DhM9/j8Mmzml+zovpQowKc1X8exvTXV6O+XvJ4H/6291gNTpxpOAZfbdiHpX8ccHs/l7+8DDe9sdqt19TW1WN59SEAwJLfDyB35mLkzVqM02fr8P6aXfhxy0HVMu85WoMxzy7FzW/+7HZZ/e273/djy/4TLrc7V1eP2rp6t/YtSZLN+fj4Z5sAAHe8/Yvqa87U1mHf8Rq33qep7bM7T4mIiPTkyw17cbbWvXs2+UbNuTos+b2hLnvk1FkcPXXOo329tGQrcmcuxoY9x7xVPF1ggOqBunoJy6sPofyBz9B6zkc4dbYW63YexaOfbFD98T/3zR9Y9ecRfLB2t+LzP245iDpzxX3P0Rq8tXw7xj73A255cw1OnKnFXf9bi3U7j2ou333vr8foZ5Zi0ZpdOHDyjMPzdS6C1u2HTuG9n3cqPrdu51HsPWYbMLy8ZCt+3n4Es975xa0LYG1dPSRJvSyV87/ABf+3xPr3pFeWY+ILP2HeB7/ik/V7sGHPMVzx8jK8urQa0xauQs05U/A48qklOHrqHL7ZtB8A8O2m/Xj35104cuos1u08ityZi/HQR78BMAVAM15fjRXVh/Dxuj3YdvAkvtq4D4WzP8JFz/2AO//7C95fYzoWkgTMeXcdZry+GhOe/xHTFq7Cwp/+tJavvl5C5fwvMPVfKwEAa3c0fGfbDp5EzTlTsLVl/wl8tXEfXvxuC06aA5tNe49DkiQcPX0O76yyHQbz/eYD+HWX84tPXb3kcCy3HzqFuYt+tQkKf9lxBC9+twUTnv8BNefqIEmm1207eBIb9xzHZS8tw4C/fAPAFHidqa2z7vf3vcfx2a97Me65H1A0+yMUzv4Im/fZBrNXv7YCT335u2IZ+z76NTrN+wy/7T6GI6fOYv/xhnNz0OPf4MnPN+HCZ763ec3Nb/6Mrg9+YS3DlFeX4+UlWwGYLvJfbdjndqDsbV3nf4Hz/vZdk76nJElevSH9tOUgPrS7Pj3+6UZcZz6XPXXwxBm88O0W1d/57qOnnTbErdx2GFNeXeHxzftsbT3+/dM267Xmm037NTXafbR2N3JnLsYf+09Yr5cHT5xBXb2ErQdO4qO1u/HLjiMuf5eBzlL56ffoV26/dsfhU/jnj9twrOYcth08CcB0jfxt9zGcqa3Dpr3H8cr3W1FXL2HxL7uxbudR/LH/BCrnf449R/XdKEUNmrIBseZcHd5asd1pvUGPjp46h0MaOijeWbVD9Vq3/dApXPbST9Y6gxb15nrB8upDmPzKCsx85xf8uOUgTpypxX9WbHe7A+PPg6dQW1ePpX8cwAvfbkHuzMXWpE/n6urx189/d9mx8s8ft+Hjdcp1YblzdfU4dPIsfvjjoE05D588i+M1nt0PLH7ddQxt7/4Ifx485dbrdh89jZ+3H3G6zbm6epyzq5Os23kUkiThxy0HceMbqzH6maW49KWfsH6XqY7Yce5nKJv7KbYdPInZ/1uLl5dsxeUvL8OBE2ew/dAp3PW/tXhpyVas33UUv+46hl1HTmPjnuMAgIc/3gAAGPbkd/hDQ+dCoBB6+5GXl5dLK1as8HcxnLr85WX41hz4AEDPwmR8v9nUmzaoXTqev6wLDAYBANh64CT+u3I7nv7qD+v2c6raITYiBHe+vRbZiZGYN7IEk15Zjj6tU232ay88xIDf5g7Dol924bwOmTAI4Np/rcT9F5TgndU7MKhdOlqnx2LZ1kMY9/cfrK9bdtdARIWH4Nhp0w+632NfQwDY+MBwnKurx/PfbsFVvfIQEWoEYGrJ6Tr/C5ytrUf1giqHcuTOXAwAWHf/UMSEh2DnkdPoueBL6/MzBhbhb1/8ju/u6I+cpCiH1//zx23o3DIB7TLikH/Xh5jcMw83DS7C2dp6pMSEAwBWbjuEgtQYdJxrSiYwtku2prkLseEhOG538b68eyu89sM2xe3X3DMEZXM/dblfV6b2zcffv9mC7vnJ+EGlZ1WLEaUZOHjiLH7aeghzqtrh1Nk6dGmViEte/AkAMLg4HS9cXo6xzy5FvSRh4dXdIEnAul1HcdFzDd95iEGgVnZBf39aT3TITgDQ8P3JGQTg7D4VFWbE1D4FeOLzTU7LP6hdGj7/bR8AoGteEspbJeJMbT1mDW+L4zW16KQxOUTvohTsPHwaT03sjBHmwG/xjF44evocJr5gOhaf3twHQ574FoDpuC35/QBendwVsREhqJeAf/6wDbOr2mHbwVNYt/MoxnTJBmBqCHjk443o1yYVY7tkIyLUiK827MPbq3bgqYmdsedoDQ6cOIOSrHhNZf1j/wkMNAf0lt/Lym2HcejkWQwuTgdgOubJ0WFYefdgSJKEt1Zsx95jZzB9QCG2HzqNrMRIGM3XDFe2HjiJd1fvRE5SFG77zxrMHdkex2tqcX2/Aghhu4+jp8/h0U82YPaIYkSGGRX3V3OuDgdOnEGvh00ByHs39ERZju25svWhEXjis024pFsrpMdFYM32IyjJiodBAF/8tg9xkaHompeEpX8cQJjRgPLcJOv+r/zHMny9cb91v3uP1eDJzzfh9WXb0TYjFhv2HEfHnAS8e0NPAEDV375DWmw4/n5ZOQwCKJz9kXVfmx8cjhCjY7vq8ZpzmPXOWkzs2hLd8pOx5cAJRIeH4NTZOlz92gps2W8KjrISIrHzyGnrd7V2x1EkRIUiIz4CdfWS9Rr48Mcb8OzXf9i8x8KrK63nnj3L977wpz/x75+2YfGM3tbnTp+tQ0SoATsOn8aqPw9jZMcsm9cePnkWB06cwT+WVmNAmzQYjQKb9hzHQx9twEc39ka7FnHWbc/U1uH3vSeQHBOGFvGRAICvN+5DcWYcwowG/LT1EIrSYnDdv1bhoTGl6NwyETXn6iAEEB5ixKGTZ3Hk1Fnkp8YAAE6drcXkV5bjxy2HEBcRgn9NqcTy6sNokx6LXkUpmPfBr3jJ3BgEAJGhRiybPRBCCIx8agkeu6gMnVom2nye8X//AeeVZeKRjzfgeI3ttXjL/BHIv+tDh+P34OgSzP7fOofHZw5vi9GdspAeF6F43L3hreXbUZmfhFbJ0YrPS5KE+xf9igldc9A2I05xm2DQbf4X2HOsBo+M6YBxFTn48+ApJEaHIjYiFOfq6rH3WA3++eM2/P2bLdgwb5j1t/LJ+j2Y+s+VWDilEj0KU9x6zw/X7sazX/+B88taoKpDJrISIm2e37zvBHKTo2x+85Zz8qUryjGwXbrHn3fVn4fRMTsBWw6cwLGaWnS2O4+VSJKEmnP1OFdfjzCjwXoMauvqYTQICCHw6tJqbNl/AvePLMHxmnPYdvAUUmPDUTn/C5t9bZg3DLX1EmLCQ6yPrdl+BCOfNjXQ9i5KwT+vqrR5jfze/dhFZRhrvqfJnTxTi192HEWoUSAi1Ijz/m8JJvfMQ4+CZEx5zbFu/dCFpRjYLg1xEaGY9c5a9GuTanON+nbTfuSlRCMnKcpa17PUdywsdb03l/+JO99ei6l98jFrRDtrI4IQApIkIW+W7W+/ekEV6uolfLVhn7Vsf53QETHhIdh//AxmvrPWuu2NA4tw8+DW2HusxnosHx9Xht1Ha3BFj1zU1UsIMQjUnKvDoMe/wU2DWmPPsRpU5iWhX5s01NWbGuEt174b31iN937eBQB4ZEwHjO6chcMnzyI2ItTmXmmpT1vuX/mzFqNeAq7pk4826bHWegUAVB84iSWbD2D+h78hPMSAJ8Z3xJX/WI6+rVPxzab9mDeqBHe/a3udm9a/ENf2K0DJvZ84fDdaVOQmYnn1Yevfj48rw6iOWVi9/Qi6tHJ9TvubEGKlJEnlis8xQHXP019txqOfbHS53fQBhTAaBJ78XLknyVc+mN4Le4/V4KpXG47hP66swKRXljtsO6pjJrq0SsTd761HUVoMPrulL37ZcQQXPNXQg/X5LX1RmBaDunoJry6txvllmah48HPr8/NHl6I4Mw6jnm54TVJ0mLWl8KUryvHp+r24bWgbGASw6s8juNp8Ifr8lj4Y9Pi3NmX6z7XdbQItcvTVbf3Q/7Gv3XpNbESIQ2WxuUmPC8eXt/ZDeyc3gv+7uBOmv24aCn5+WSZKMuPQITsB4aEGtE6PxZ6jp3Gmth61dRLKchJwprYOd/73F7xrvtHdOLAI1/YtQLt7PgYAfHN7P9z93nprw1P1giosWrPL+h4x4SE4caYWEytb4u6qYrz2QzXaZMTivZ93YWj7dNTWS5i2cDW+ub2ftQKt1MAgd+vg1qjMT8bSPw7gpSVbcbymFh1zEnDv+cXoZA5Ypv5zpXV0gRaW4KxHQTLKchLw7Nd/4PahbbD3WI218ef2oW2s18aY8BC8MqkC6XERGPbktzh5tg5PjC/DBWVZKFAIUABTg1e9JKHDfc4bjDY+MAzhIaYKxMpth/HNpv34aO1u/G7uyb+uX4FDcKmkMi8JP201DeHPS4nG1gOmINZoEC5HmNhrmxGL8FAj1qi0rPdvk4qvNpqOd3mrRFzfvwC3vrUG1/crxIMf/uZ03wYBdGmViHmjSnDRcz9Yf8cXds7CT1sOWQNuJePKs/HWClPDXnxkKI6aGym/vb0/Bj3+Dc46GXlwfb8CPKNwHOXHKjEqFKvvGQLA1FN9+mydRw1+V/bIxStLqxWfi4sIwS/3DXV7n3JTXl2BgydNDUKTXzHdfzq3TMDb1/VA3qwPrZ/jeM05vPbDNlzXt8DayLxu51Gc939LkBYbjmWzBzWqHE3peM05xISH4N8//YmV2w5jaPt0ZMRHItQokBkfiTpJQlxEKMru/xTX9rVtfPzbxZ0ww3yNurp3Hl74bqvNvgvTYnDgxBmsvnsw5n/4G174bisGtUvHriOn8fZ1PSBBwsKf/kRyTBhOn61H/7apSIwKswZ0/125A3kpURjzbMO9PsxowNe390OPBV+ia14S5o5sj2FPfocre+QiOToMU3rnIzLMiBmvr8b7a3ZhXHk2xpXnoEurRJytq8cL327B5T1yERcRat3nb7uP4fd9J9C3KBWb9h1HRW4STp2txbwPfsPry/60OZefu7QzhpW0cHpMX/uhGve8tx4A0K5FHD66sTcOnzyLTvM+w4yBRbi8eyuUP/C59RiFGg34bbfnIyyeHN8Re4/VYPO+E9i074Ti9WVQu3QkRYdixsAiPP7ZJryzSnnkm5r8lGhsOXASpVnxWGsepffKpAr8tPUQ9h6twTurdyLUKFCQGoO+bVJtAlOL1ukxuPf89tiy/wTuNh8fua0PjUC7ez5GzTnH643adUaJJ3UfoKHTBAA+vqk3QgwGDHr8G5ttIkIN1vJ9c3s/3PW/tchNjsa/ZSPknPni1r7Whmq9iA4zYv3cYf4uhlMMUL3IVeUwGG1+cLhNLwYRAavuHozOTbRcwLOXdMbw0hbYfugUej/i/lBLiyt75OLDtbux7/gZ1xu7EGoUOFfn3v3DncqImpeuKMeGPcfx1y9+53wqHXjn+h7o3DLRp/fGNfcMQXxUqMPjlt6Uf0+pRE9Z790Hv+xC78JUfLlxL9pnxltHWth7dGwH3P5f0/z39plxWG8ert06PQYlmfEIMQprgG8JUGvO1eHQybOolyTsPVaDLq2SFPf99cZ96FmYglCFHv8dh08hMtSIZPOIITU15+oQZjRYg2Wt9h2rQVe7Hjs9sIw08PRc+fmewZi2cDWWbG6Yu/f0xM7Ysv8E/mLOZ2AZxZSfGm0dOWHxxPgy3PymcpKe7vnJeP2abli38yheXVqNR8Z2sPb8/e2LzRhemuFwHslH8ZApKLef8kP+tXLOIJfXGX9igOpFzTFAJSL/+/LWvta5wUTNyfllmfi/izs5PP7h2t24/t+rEB1mRN82qZg5rB1uWLjK2hPkbfKeGAulaTA/bTmI8c//iKl98zFreDsAwFNf/o6zdRKGFKfjPHNeBflrtx08icOnzqGjeXi9ZUjkxV1b4qELS63bbT90CkaDQKZsOOzOI6dxoqYWbTJisX7XUTy4+Dcs/cPzqSa+Ih9l4S1T++Tj79869uo11oWds/D4uI6s81FAmzuyPS7vnuvvYqhyFqCGKD1IRET6wuCUmquP1u4GFAJUi5Nn6/Dh2j34cO0en5bDPjhVc+CEaYrL9kOnzMlRgMc+3aS6D/nUoWn9C9G2Rax1OY7Xl/2J+aNLrPPLLSMotswfAYPB1MMnzwGhZ94OTgH4JDgFgHdW7cRXG/b5ZN9ETUU+xznQBG7JiYiIKOgZhPIQ121uZuD0hRe/24LxFTmINc99PHGmFhvN2bVdBc2vLq1G6/RYm8Dtqa82O2z32KcbEWo0oGtew3Din3ccQXZCJJ7UGDST+w57mDmciBqPASoRERHpllIypyW/H7Aur+BPDyz+DQ8s/g2RoUZM6pnr1hzre99fj6cndna5nXwVAIspr67QtGwJEVEg4jqoFFQKUpWXCmiMGQMKvb5PIiLSzrJe4Lurd2L7oVO6W5T+9Lk6jxKA3bBwlUfvx+CUiIIZA1SdaJ8ZvOurNaUPb+yt+HiKkyxm4SHOfwa3DGnTqDJpNagRa7o9dlGZF0tCRKQvVX9bgnU7j+KmN3/G6Ge+xwOLnS/PQ0TU3A1om+bvIniMAapO9C5KtVmQnTyjNldpUs9c1df8dUInLJxSqfq8mkXTeilmcLQYUZph/X9OUqTqdhbTFHpq/62xXG3SYzVt522PXVSGR8Z28Mt7E1HzYsl+a0lCREQU6MIUOkm+u6O/V/adEBXmlf34AwNUnbiuXwEeH+e8FywpOnBPNE90yI53+zVK4WnvohRU5CqvVWcioYdsDT0A6Ncm1ebvq3rl2fxdvaAKpS7K540VnORr+7VKjlJ/L3j2ZkqB80cqvdD2vrujP8Z2MS2Wbu+vEzpq2sfknnmuNyIiIqJmxZM6YCDqU5Tq8FhOknp9r7lggOpn1QuqUL2gCvGRoUiPi3C6rdKatUotL8HCkwAvxGjA9f0KbB4bqHGIw2c397H+f/7oUpvn7j6v2O2y1Ms+wFU981CaFY/iRvSSPzq2DO9P66n4nCQBg4vdHyL8wTTHYFRrT778AvrjrIG4c1hb3Ht+Me4Y1gYjO2Zp2od9h/cH03spbrfsroGICjM6PJ4a2zB0W22ucFyE/3PBhRqVe/ZJ34a2d/83JR85Qdr87eJO+GB6L873JyKrzHjXI888MaeqnU/2q9UQu7pap5YJ+OdVXf1UGv0K3ujGT+aNbO+0gr/sroHW/2cn2v74kqLDbJL82M+bvKG/7c179d2DsfruwS7LFIiV495FKa43UnFx15YOj6mM/LVRJBsmK18E3VPyALtFQiQWTe+FitxEm23u0RD4lmaZWhG75iU57UV/aqL6OoGqZYSE5y/rgoVXm4YSv3eDcgAMACvmDLL+X34eA0BGfASu61eAST3zcH0/03n65jXdcGEnbYGqRduMWPRt7diaGBsRal3AXs0Ehe8dAIwG2y9/at98t8qkRzcNKvJ3EZqF8BDHRhGLhKhQxcefuaSLW+/xv+t74LwOLTCuPNut1wWTC8oyUZIVj8vsFpSXN0CRb2m5RxI1pY4tG+759tOpehQke7zfKb211QF+sqvneMvzl5c7PNY9X/3z9CpMwbT+rhvv7Nc8DfR7CgPURrKvqF/WPRcLp1QiIlT50KbJekmVbgh5KQ0Bqn0wY59EJzE6DNEqi/C2zWgItr68tZ/iNno1ojQDr07qqnrDfPu67tj60Aj0Mg9/fVHhx27Pvje2qkML2V/u3Zn/c213TdvJe10t72802J4Xk3rmukyQtWh6L2x9aITNfpTIK9PvT+uJPgqBnr16CRjSPgM9ClJQvaAKZeYg0L4HuWVSlE2DSZqL3n4AqMxPtgn6ldh/HqNB4InxjsODhVAOQOXfnNr8Y4MQNje3WcPbOSTG0nKsGsMbw73lbhrU2rs7bAT74e/BxP6USo0NR89CU0ViYFv3e1dLshx/651aJuKpiZ1RlOZ6HrnaaIBvbu/ndlkCQVenUzNIK2fTQyw2PziiCUriyBeNEME2isGujRXPXuJ8aSL7KUp6dHVv5fvGpd0a7vMdsuLx0hXlWHPPEACmnkYLrfk5hpdkYOtDIzCxUrkBW03bjFikx0U4zTPiichQx0bPzIQICIX6S765w+pfUypxpZM8KmoeGRvYyTMZoDaSfe8MYAocf5jZ+JYX+ZDNt6Z2t1bAc5Iivf6j8cSs4W1dbiPvddPKaDDAoHBcJ/XMRfWCKnRplQQhBJ65tDNev7obBhWn49e5Q/HDrAE228eEh2B0pyxcJJsjmZ0YiRsHFuGWwfIKfkP0MK1/ofVzrZgzyKEBIj81WnU+66JpvfC/63tY/85JirIGz5b5qvZzRYUQiucQAJvhvEoXL9v9NPw/MtSIDtkJmnovlYaNA8DEypa4tFtLPDG+DNf1K8Arkypc7stVuZSkxNr2CAshVHuJXTUj2B/GEJXjCgDLZtuel+1a+CfJVFPR0vrqKW8H3wBwcVfHec3e5KxRK1al0Q8Aylsl4qUrKvDpzX1Ue1ABoLOsItWzMBnnl2XikTEd0LqRycwu7OzYIp4RF4FWyd5fXssfHK4X7NVrtE0PDNeURM9oEEh0ck77ii++YhFkJ84lla2w5l5TkPb61d3Q3UXvYYSTkR/uktdptJjcMw+fyqZLKYkND8G1fQscHv/lviF4YFSp9dpqNAgMbJeOePPfl3dvZd3Wvk4kv+bK1dVLEEIgPtL23HaV3ba/wvPPuGgYUNIi3rYx31KXekd2XEd1zFI8Y9+5rgcWzzBNe5I///ktfXD70KZZXcKfGKB6gX23OqDemyOXpTiMtOF1F3bOxsc39cZ3d/Q3De+MMVXcJ1Q4bwma1DMXGeYfxUVdfNfFf35ZpuLj1Quq8ODoEozpnI2UmHC3x9bHRypXEO8cZhsQx0WEWi/UUWEhaGE3XyEhKhRPjO9o08ucEReBmwe3RkFqjMM8AAC4bWgbTDVfOFNiwm16CpfNHohF05TnSAKmILRTS7te7+J0VC+oUvmuneuQ7XjBVTqthhSnoyTTFAC/fGW59eYwqlMWvr29P567VH3IYb2T4OKBUaUY3Skbdw5ri/zUGADAuzf0xFe39dP8GS5QOUe+nzkAj4ztgOxE7YkABheno3PLBJsETPLjIb9hfTijN941D1dWOmbxkaG284olU2Knz2/pq7k8Wl3Xz/FGLO91vHNYW8X5td4yp6odEn2YYG1C1xzVaQTuNGx0adXw28lPiVHdTmsSLzXVC6owqDjd/H07VqRszimF10eEGtE6PdZpYP60rCJTmBqD/7u4E8ZV5LgVzGu9dgfT0Ez7j+KsscBdtw5ujekDCjG8JLh611wRAppT6Dm7H6hp6aVkLk8qjJzxBft6RCBIiQlHfGQoqhdUoXtBssvriNr9xNWSekrs6zSAadpYsso9JcQoXDbEdc1zPjKitcpoktGdsvHjrIHWYN1ZI6Glfjrc3Jtufx+2X30gLyUa388cgOoFVfhh1gDcprC84PCSDFzWrVNBo9EAACAASURBVJXD40osPaUVuUk2wbGlbt5ZdlzVOiASosLQPtMxUVR2YpTDlL9gxADVQ5aTTwLw5a2mSq28w8bg5Mj2aZ2KLq0S8azCXKUrephO/revM7WutM2IsyajiQkPwZb5IxySANlrmRSFEHMBBmlMnPPcpe63DGXERai2KF1S2Qp/MWcldjaPS8ldI5QnsEcoDI1wl/J1QFsNLy02QnVItVbe7m2KDQ/B85eXW3ucB7RNt0le1DI5ylrxT4oOw9MTbb8vdyu3HXMSbIahu5KZEGmdQyuXlRCpmP1XjRCm7/+d63vaJGB6bXKlzTYWxZlxsqBM+UNO7plr7UmXAIzsmIXCNMfAyNkSRa5kxkfgzmFtHSqI8uC4VXKUR5W8R8Z0sF4nXFHrKfeG1umx+F1leGC/NtrXYEuJ0RZEe2s5LtP37VgRklcWXI1cUBNmVL4BuPM9KLXgO6N1vTv59A9nVt09WNMoGS2U8gIosT/eUxV6WTw1tjwbtw5pg/svaO+1fQYCAd+McrCQ92p5wjLE19V9pbyVY6CkRmkElsU1ffJd5jPwFlfv89rkrhjUzvXv1j64cvV19m6dgikKUy+8de3cOG84VqrkP6k3t3K8Nrkr/n6ZeuO45bfubiNURnyENeD72Tz0V8nfJnTE7w8Ox+hOpoa+uAjbYNb+DPnvtd2tHQkt4iMVR7YJITBvVImmKUGWMiZEhWLNvUPwyqQK5CZHWQNUx3273KVT9jlrggEDVA/JW6gsww9CZFGpfQ/qVb3ysGHeMACmH+7b1/VQ7NXoXZRqHsaqfDE2GIRblSZfbGl9hQBGlLbAA6NKGt2rIRcV5rusq764Uc8a3lZzL7GrlkN3RbrR82YQDUOMO7dMwLxRJQFzUVMbstUmI9baWGT/m3N1RgshrC3K8sBhah/bBAr3nt/eYUi9u3P+6l2ceFpGXNgbV5Gjep3QakqvPF1MF7Dnj15By7ng7L21lkveEyW/XlsefmK867lB8lPGUrEUwrT0gnxEhmW7v1zk3flGSdFhqtMPXLFPthanMioGsJ2DaP9u6XGeXZ+UpglYPktqbDhu1tEcbl8zXVuUrz/28+E8acgqa2Sw9/CYDnhwdInLJUX+q7ExDgAursjBZd1aKZ4/RoPA3JHOGynGKAyl98S7TpIOAqb6wItXmAKX6SoZrOMjQx1Wa3D2PQ1rn4Gh7TMw57xim9Fbb17Tzc3Sq3PWAGC59vVpnYqh7ZVHKwjR8BlCZKNv7PfamOqaEAKhKg2FluctSrPikexGXeiBkSXW/88fXaqp979fmzR8fXt/1Q4bTxpDLflQpg8oxMc39dbc+BgoGKB6yHIuSRJgNP/RRnZy2Fc427WI80oPoDPuTgKXs59Ur6ViYPlBXdqtlVstc4lRodZMqvZDA2cM9Ed20sZFrVP7FqC3bB2ry7q1wg39lVv+R5S2wPLZ7s/LtWe5P2i5pjXczIT15tEiIVLzUJXG8nWwYQm67e+ZQsMxkv+OLWaNaOcyaHNnaLL9/qtKWzg874+AbEJFDqZ78HtTKr8WK+YMcjo8XN4I0djAGzD1yDwwqsT1hmaWQClJtrC5/dciD3ymKCT5sCRuU6tAWh7WOkfuv9d2x8NjSjHWPNxXkoD3p/XC9zMHOGybGB2muiyOliQ53uRO0CI/EvLfQXpcuMeNlc4afIQQuLEZZcE2GIRqw6zRIPDO9T2s+Q582dOqJj4yFJdUtoIQwmG+nqe6FyRj3qgSh3mHWj12UQdsme/7pFGW0/Tr2/vb5cUw+eb2fvha4ZrpbCj2c5d1UfzdKK1MkO/GaCgLpXLKqa3JLm+Uk49EUwzMPLgf5rr5WRpzy5WvH5+XEo1RCvk+lOoW9v5xZQX+72L3V1+w7LsiNwm/3DcEtw5pg5SYcM3ZiQMFA1QPWS58BmFae/ONa7rhtckNvWiWE8hoEFg0rRfGdHZvuQ1PhHrY2g2Yhk9Gy3rjFk3rpZplLSsh0iGjsDuGlWQg0VwJnNyz4T0qchNtLn6WH/ZVvfKczv3Um3mjSnD7UPWhcd7IWtgQcrr+zq3bylotA23amv09TD6szHKztr/RaTk2Y7uYeiGvUjnXVcvj4nl3so86Sy/vrlEdlef82i9pBQALxnRQrcDNHdker1/tvRZ3wDQESevwcKV5T3JaslS+N60nRrgRTFuuN9eah9MpDc+bPaJhaHZmQqTD57HMPVWrk8h/i64IAZTnJmG8i5wD8gqh2jmvVMnVorG9Y1YaAx9vJLe5ZXBrh8Yqb+072AhhmgunlO9AK28GtZZ1sMfazb+2ny/oiqveKPsy29+ThRCKvYSWuY9yagmB7KdiXdgpC0nRYTajreTnpFKZWyVHK460S4oOQ2VeEh66sNThOXe4us5ayO+3alNzLEkJ1ZITWobaAsDckdobDl2xrMhQlBaLZbOdJyd98fJy3He+6RouP9xqQbUaIYTD6gv2X9/oTlno1yYV05ys7dy/bZpqLhdn5L2w8qHLaquHBKrg+jRN6LWrKnH/Be2twwK65SfbXEjkLbil2fEez2XyhNo7RYYasWhaL6y9T3nc/rr7hzb0EDgp7qB2aXjxCtdLu6hxd/7HBWWZ1iy4vuHfiosn7265wWppk2jotQGGFGdgULt0zPTS3DJf+fmewVglm+Ni/zHnjixp6OV0cSycHaKk6DC8fV0PhwRbrjj7OY/smIkHR9vegOepDCnrXZSCxOgwr/SgVi+owl0qC5APbZ+Bt6Z2t1teqUFhWozNcM7Lu+eqZoq8po//W2kvqXTd+x8XEerWb8vSeGMZ6ms0CIeh2fZD6v93fQ98clNDBdVSQVCrtFv2p+V+oLQPVy9Te97T+49axnJPVbqa4qBQzEfdDE5mDCzyeGhysFKrfnuSNMeXkmPCseTO/g7LnNkHRa6yxLpiGVEgn6plCVycUZov2To9VnHEjX3w9/j4jlh192D0Lkq1BnGeXveNBoE3p3a3jthwRv4e9nUvrYGZloByXEUOru6dp2kUXHxkqNN3tgw91/I7zpb1DKfFOu+BH1ScjitlnSJaKSVWs/QCW47vb3OH2TwfExGCVyZ1RbqGJfncobbKAQD0KUrFHcOCJ7uvvq5OAeDCzlnWOUBX9MhV3c7yu/JkCIW7Xp3c1dry6ExyTBhKs+NVhxoLoa2NubHB9qB26ZpaXX0R09sn0gGgOmm9qbjTdmeZi2LpuclS6Blz3L+lUmyqYL94RbnbQ1S9SUurb0JUmM2F2Nk51/D5lLfxzXkkVIePVpW2sAYyljJd1j3XYSjPmnuG4KUrTFluQ5xlVXOD2u9KCOF0/vPnt/S1zmdx1YDktV41D1gSVmn9SuXf/dKZA1Qb54CGCoelcVGSJNS6SGuaEBVmM7XDcgWVzxmzOf9kjUXOyuopbT2z/gveXCV+sunVMB8rT+YDKi1h4W4vSTD4x5Wm64vakPN/2a0l6WquvJz9Ou3ekp0Y5TDn0p7m37/KlglRYaheUGUzssBZfc66P41vnJscZbNMoD3L0nfOhqJrGaWmpTyW3s/E6DCvNEyrvWeY0YDZVcWIjXBvWLXS7h67qAw3DSrSlhhLYQdqS87I2czxz3A+Re3ZS7uoTvux7MfXU/i0MBgEru8XPNl9GaC66fFxHfG+huGmIUYD/nFlBRZ6eZickr6tU1GSFW8dg98iIdI6/CAjviHzrOXHHmIQqsN3nbEsEt/Y+o39LdDTeSJqws29GJalUWzeW/bm0wcU4b0bejZZRj9vSIuNwNMTO+Pt63rg6Ymd8ffLXPdkuzvvzdvs39WT4eHOSq7Wm9zYCuljF5U5TaZyabdWNq2ma+4ZghkDizDQ1eczlzM+qiH5hSfzUJS4U8G054/5Z3JCOK4ZZ8/d36r8nM9MiHRaeXr5ygrcOaytTRkGasiwafN+5rdLUpkP2i3f1EhgWYC9MT6c0RuZ8RF4fJxs2SUPfuNFCpmr5RbP6KVpTWVvaGzg3slcMb2iR67j+o125/fa+4bgCnPlPVjXFLQ0CKj9tAvs7pFK2ykto6dFQlSoLhOwyVl66JKjwxQbbu6V9ao+e0lnTY073fKT8NKVzpfXemBUCdbfP1S1h3DZXQPx9CWu7wn2Aa587XSLa/oUoHpBFWLCQ1STBvUuSvFZg4Ma5REips+TGhuOmwa11nS8LcvdWJacWTFnkKZ6t/zt57mRqyAQvHtDT69P0fEH36VLJbeXCWisK7rnok16LHoUpmBQu3QMbJdmnVvy8U29kWtezF0IgdlVxfjf6p04cOKszT6UWrABU89FVkIkHlj8m9uVoLKcePRvk4rV24/gyKlzCDUYbN7n+cu6YPzzP7r3YZ1Ii43AK5Mq0NlF65vRIPzaG+QpyzBNteGa9tyZ99YUvN2TYdmbJ5lwnbGfB6VEPucjPirUIYGEYs+FwkNKc0Q90Zie2FhzA1SuQkKd6gVVOFNb51Cp2PTAcJf7/fyWvvht9zFNZVg0vRd2HTmt+rxlXV3LV92/TSq+2rhffYcaTolLKluif5s05CRF4bp+BVhRfcj63OhO2RjaPgPF93yiqfzyt7ttSBt8sn4vzuvQMMfo0m6tMLg4wyujNooz47B0lvM5V0rsz0lXP5v2mfEYVpKBd1bvtD42uWceXv5+q/b31Lid7RI/jo+54k4gHRsRisiwhkbX+y9oj3vfX++wXVWHFlj8y27N+71tSGuUZifgipeXaX6NhUG4Xou0Z2Eyvt98UPG5e88vxv2LfnV4vDGNT+vuH4o1249g5NPf2zx+TZ8CLK9egdbpzhs4fMHSmLX7aI3L7ZxJjgnHggtLNS2HZRlxNaEiBxd0zMTEF35S3O6Na7o7PGY/X9FoEKpL1hWmxdisv+6M5fOlxIRhxRzlZV+8zdvVCCFMnSauRquomdwzDwlRYRhrHmmhdWUCy3UwMSq0Ub2f8nNs3f1D8cjHG/DaD9s82tfCqysRajRomrrlTCB1ujjDHtQgYjAI9DDPSTAahE3ig7YZnmcRntonH3Oqiht64tz88YSHGPGPSV3xzW398cqkCsRHhdrcMNUqIJbW3ZgI99tR+rVJs5k8blnmwBs9F4EmwtxL56010BrNg/uQs3OuYV6fh+XRASGEw7wrJdFhRsxwknTBZQIuJ8e+dXosXri8HPNVhmCHhxg9uoYUpsVoTgSREhPuNGGL5Vqh9bvWst2Do0udrhcdFRaCb2/vb13v2vn7NbxhkXlumjwjsRDCGpx2zU1ymKvcWFd6sGavpcHx4TGlePlK5yMyOrVMQLsWcR6vDezq62jMTzg7MRKXOslMrnTqW6cHQFh7XwHlnih5g5XSEGKLK3vmeTwqSMtw5n9Pcb9nRO1nb3+81UZgKDXkDi5OR/WCKiREKc+Jc/Zdall33VnGb0C4Pc94dKcs9C5ynLM5oWtLtxqMFozpgB4Frud+Wqy7fyjese/NV/HGNd3cWg7G16Oibhviu+WYwrxULwkxGjCuPMfp0jdKGhruG3cM5T+ZmPCQRi2T2KMgBRW5SejSynE6jqVBw9k1LtiwB5VUWX53KTHhNglDPG3diY8KVWypVJsfM390KUZ3ynIYhuSJthlxeHVyV9dJOoJQckw43rimG0qyfJloyrXU2HDsP35GdR0wZ5zOQVUZwqz0+Hkae52VvHlNN4eefiEEHh5Tih2HlXv95OW23IiHqCwFMrGyJSZWtkTuzMU2jxekRuOP/ScBAOvtEjG4y1XvtdK8KWdJGfTeKOCt4rXUuEyLO9fGt6517GkBTD3Zx2tqUeRBz5SWpEZl2QnYsOe49e9hJRnYuPc4+rROdZksLDk6DC9e4Xz4oru8dQ6lxIRruk7YPuhYhtKseGTIerAs9yd5T+HM4W3x3Dd/KL6Pp0NiAVPwc0HHTFz2kvu9r86o3WN9OR/Z2b61JKVzlvFbvuuK3EQsrz5s/VseDMrL8ISGtSp9wZ3zoZubGd3d/focNpedFkrTH6YNcJ7w6IdZA9D9oS/dK4RZfGQo/nNtd7TNiEWnuZ95tI/GkCePDAQRoUZsfcj3Sx/pCQNUUmXfY2oZgeHtoZRqIsOM6NPa9XISWvX14r4Cjbs3Pq8yny9/Hd8R0eEhiI/S3rsQZjTgbF29pm1Vs/iaH9/0wHDVFPhadFRJvKC0DIglCG+Z1BDYFKbFYMO8YW73Qn58Ux8Uzf7IrdeocXeo31tTuzfZGprDFDIluuKLabONubx5o8J/UZcc3DS4yGYEiFqA4YmreufhzRXbrX9P7ZuPq3rn2byfOuefb9G0Xta5YK7IG16se2/E4fvLuDLXG9lpWK5L/bdRmBYLYI91ioyvTOtfCKNBOJ0n7Wp94MaeJt6ch95UFf+h7TOsAapBmJbNaQq9ClOwZPOBJnkvNd6sij12URn+s2I7Hvpog4s3bfivvKHBk2uUt7OEu8OSwdnTJRN7FaZg2dZDyExQ7n33RU4Hfya48wcGqKRZuXkSvdryE7cObu3+cFzR/H50zVVkmNHtOb+LpvfC1xv3adrWvuHE/v7gKjOkK+4Mp0qNDceLl5dbfzMWngyR9eavw91hUM4y/wLeK9vUvvkY2VH7/EHLd+HvxE6+oi1Y9Iz9d2YQwpoAr7FyU6IUAyylyuu/p3RDt4e+sCubZ2fUgLZpLkfaKI0ekDRMD7ioSzYGyfI5+EpPDUuGpMSoj2aYU9VO9d5s8drkrrjcPDc21CgcE8t58fd093mul23xlNrX9Te7ZHONvT6N7ZKtOKcXMK2eUCebN/n61d2sU4mairu/F4fzXPZ3UnQYpvYtcB2g2nnsojLc9p81yElqfENmU9YEo8ND8OOsgUh28ptyZlr/Qozpko2sBO/kjyBHDFBJs4rcJPw6d6jqGPvpGta/ouZncs9c3PjGz057IPq3SVVca61NRqzNMh7OqK4BqenV3udsXqO/pJvXiZvswVpwSrzVuHR591w331j9qTuGtcGS3/3bs+Ft3jjOcREhGF7iOMRdy661xi1qo2uU8hekxIShNCset8ky6Hr6MR/X0HvqLGuos4q+EPB5cDqhIschuCzLjseaHUe176NrS9XhpPLj/94NPZGZEOl6vnojXaBx3rk7jAaBunpJ9ffg7TUnYyNC0TIpCn8eOqVYFvk8WFeNA7rkST4Iu9/KmM5ZSI0NR2+VBpZr+xaoDof3t8YkqzMYBINTH2OSJHJLYyaAy1nm4fniJkb6MrJjFqoXVCHRyVzG6PAQj1tgp/QyBVv2lRbLIvSF6doCXFf81dHvzREGF3Y29VJe1t3/iRZev7obFl5diXvOK/b4Rq9Uv7q+X2GTLO/lbZas7xd09M018Zf7huLhsR0cjpk7UzZcberyeVnlNsRowKLpvbwy9UIpUY/99UTpXLmhfyEmVOTgkm4Nw/SFsN3WnXOzLNv5PP9IlREUvYt8O/1EngyqLCdBNTj1Vob16gVVqqM1OmlYo1KNJdu5AHCZOVmMsx4wb1w6g2n9XPt7idIn07o6gHyffVunqn7fM4e31f1yQ6RPDFDJIQW6hS8vzAWpMaheUIX2mfFenV9Fzc+c84oVb4ApMeH411WVeGqid9YYDQYlWfGoXlDlNAGJOxpT/+tekIweBSmY3Mv93lzL+7q6dgTS9AHLNdHXSwTYHzKvDiFXOd6WrLZqS2tYy+LFwqTEhKN6QRUynfSSxEeGYsGYDqoNr6mxzhMv2XvPxRrpavdUb9xrnZVS6y3WMqT/yh65jS6PkkXTeuHVyV09fr38c0zta1rfU/7dxXppqHpzIz/HG5OngcibGKASXpnk/IYRSJU8IrleRSlem8/nr19Bc/31PTG+Ydjm+9N64sMZva1/N8U1SU/NZt5sxLMPhuyPZZjR+9WCqX0LcO/5xRhfkeN0O18um+HqGPrq+76mT771/x4u9eiRhVdXWv+vdYm4RHNPdGN6Oe29fV1DVt3S7PhGXY+tPcGyz2H5bOEhBrTNsG1sZ9XFfZbkQWp4TJ3LiDONTkjVuB4rqWOA2owVpZmGPsorJPJ7eFN1bDIANuFR0Dd/nad6/nk4K1uiG9malYzu1LAmZIfsBBSrjPSgxrH/Cr+7sz8Wz7DtCdR6L1DrQQ0LMWBSzzyXa1f64lx/fHxHVOYl2Swd47QMKv/31BD5XHSV4+iNe63l2FmWUpMH+w1DfJueq8zD7lBbUgxQztLvzQYPX685qhezRrTz23vr+V6n1eXdc/HMJZ1xUbnrNY3JOY6HaMaeu6wL1u44qrjsh/xi7OtrBof4OvrnVZ4Pg6Lm5d7zi/HWih3+LoaDXC8NI1bSMMTXZ2+hK95sHLEcs8SoUDw6tsxh7lh6XIRqshlXpWhsMX1xr+mWn4w3pyqvOSvXFPchpaG8iVGh6OHjBDvWjxZgAcDUvvlon9kwr7eqtAX+/u0Wt5Yqc8e8USU4XnPOJ/sOFK56uAPsFGpyBoPAiFLP11unBgxQm7H4yFD0KnKd2p58y5LgI1TWk+3rpBnkvqa+MQthqli6Ck4m9czDJC9l5XWXs7L5MgGa5W1dzd0LlsqUL4Kn9LgIr2eabuwa2f4cTZNmDsp7F6WqNnykxYbjkkrPE4yFGg04V1dn89jqe4Z4vD8lSkVviE9d9WB7dvzzU6Kx5cBJ1xu6adZw2968O4a1xfX9Cq1zmoGG5e+uUpjLPr4iB2t3as+EbEm8RET+xwCVXPJ1naG5D/F9cnxHLFqzC+1aeCfbLAWHRdN64dP1e/xdDEVtM2KxYc9x1ee/nznAaXKaxlKqaIcaBTrleG84IclpC5AbeyWXv35M56YdIpeVEImlMwcgPS4C+4+fMZXH7gMtmz1I077UGhRSY8Ox7aDjkiXuWn33YJytq0fl/IZ1ZO1/E0q3VVe3Wk8bQj6+qQ/q6iW0u+djj16vldEgHHpPLcmwlIztko05767zaZmIyDcYoJKNZh4r+kVSdBiu8FHWRPKepv5tlGTFoyTL+bIV/rLw6m7YtFc5QH1kTIcmWx9OXp/+/cERXtprcF8EvdUZmxYbjn3mQM6i0UN8Za+/bUgb9Q19JNPH562WY68lSEyMDkNtXb0bb2z6x1dndlgI05n4Q6Q5oVH3Au0j4f53fQ+MfmYpgIZzzZ3zojEdCn+d0BHLqw95/HpqXnhVIVWcG0rk6Ib+Bf4uglssS0d4U1J0GLrlK8+bG+ciU6s3WIf4+uAS1dK8fuaFTdyDp4U3ErV4a/kwpd5Eb46GUVtXsSkkRYchNiIEc6qKvbpfXy7dZj30Cm/RkP3W8yG+eSnRTdbw5C1eWQdVx9WgmPAQfHFrXzw6toPm13RqmejVxFXuGNkxCw+MKlV9vrkkoiJtGKCSKss8yE4tOWyOSAiB6gVVuH1oW38XRbM5Ve3QIdu362r6gy+rMamx4dj60AhcWtlS82v+clEZ+rXx/bxxbwY47gSTasuUXNqtJUZ29N5cYyEEuuUn4blLu3htn54ICzFg7X1Dcb6TedT/udZ14iWLCRU5LpfvsND6vbgMNmX/1xpkOWuU/uq2flg03fk6r9T0ClJjEBGq7dyyaExDA0NIaioc4kuqBhWn47e5w6zDSIh8xVvV7qrSFoiLDOzL2oSKHHy0Tp9zT/XGZZIkD2tT7vYEjumSjTFd9NfjqsSbPUKW3pD3ft7ltX2+cY32wM+fshO1V/IXjOmABWM6oNfDX7rc1lcJsQDXa1z6A6cVEZGSwK7Jkc8xOCVf8nbd5OlLOnt5j03PUpklsufddR2pqXkz9lT7/pQabR4aU4oBbdNczml31TDji+BZz0No5YIpkG7Igk6kXwxQifwkKTrM30XwO94gySM+nINKjrQuU6LFxV1zsH7XsUbvx98aeyx6FaYgQWkNclkkdOewtmibEYsztXW49l+rPH6vuIjQgOnh15vpAwpx59trkRIT7u+i+Iw7wXcwBeqkbwxQifxg1d2DmflQhvc8cgeTaQSuhy5svqMD2mfGYeeR0wCA4aUZimuqynspr+tnSsj20drdLvcdqIGD3ss9vqIlxldon48eCPTasKf3c4GaFmvIRH6QFB2GmHC2DxE1hqt6FgNZYGrffJu/1RIekXvsh9J+clMfl695ckJHtE6PAdD4c1Pt+2v4fgPjC/ZlsMTfv/fxmFJTYYBKREQBxdmSGsFoaPsMAMC4CveHac4a3s7m74YlR9wvR4DEPH7RJiPW5TZRYSHobM6Kr3YslQJLLce9KQIHVz+3RdN64ZVJFT4vBzUOf8cUCNiFQzb8MfSjfWYcMuMjAmr5DiLyn+ZWv8pJikL1giqv7pM9IY3j7Pi1zYjzeL+BvP54abbzJExKGCz5TwCfatQMMEAlRU1504gOD8HSWQOb7g2JKCh4c13Q5sKTSikrss5d2SPX5m+vZL+X3YSVjr/aEF5rQqsACfx4bvmf/amSmxyFonSVEQEBcl5R4GOASkREAcVSOWfl1n2NCWDUXpOXEo2tB056XKbmRPM56+bJHSgBKenf17f393cRiDgHlZSx4kekH1kJkYjS2CszuDg96CurWtfxC/bj0FSyEyMBAB2yExSf/2B6LyybrTwK5vLujplqm5OcpCibv61zgN3YR1Odx5ae4O75yYrP+6JewN9oYOH3RU2FPahkgxcfIv357o7+mgezvnB5OeYu+hUvf7/Vp2XyJ16mmlZZTgI+vbkPClNjFJ+PDg9BtEpW8vsvaO/LouleelyE4uMu77Uuhvg6bG7+Nz7StLZqqNH9/ocurRK9PtfZFZ9m8eWFgihgMUAlItI5g4E1LSWBnFDGXyzHzN0zqrXanDQXAmW5E3f5fP6zwrmtJbHVYxeV4Z1VO1DmQcIial549SQ9Y4BKREQBResQX3LCx4HjnKp26NIq0afvEYga06ZiHxQnRoXi8KlzABoaApKiwzCld77Da/UqSNsvAoo7jUi+/Lp4LpAcA1QiHbn3dUbqwgAAIABJREFU/GKsqD7s72I0OQYa3qXn7LZvXtMN5+oaV74Qg2n4YmSoF7KlBjGluYRNdWYEUpDkicYu0+Py9bLaulrFffU9Q5A/azHqJUvPuG9r+Hq+rhBRcGGASqQjk3rmYVLPPH8Xo8mwwbT5qVRJwOKODtnxuGVwa0yoyEHX+V94oVTB6eo+jtcSSw8ef3v+4UmIF8wj2dXm6JI+BeuQfdIfBqhEREGmsb07eieEwIyBRf4uhq4tuLAU/dukOTweYp7PHB3O3me/8mSZHycvCsTAoTQr3qNkTkQU/HhlICIiCjITurZUDFo6ZMfjtiGt8cT4jn4oFTWG0hDbwcXpAAKzR9zXMXUgHhMiMmEPKhH5TRCPXPOr5JgwAECceckJIgshBKYNCKze54VTKjHxxZ/8XQyv8PZw3b9d3AkHT5xtmkzfvGAHFU+yoPs0SRKbFEiGPahE5He8LXnXNX3y8fCYUoztnO3vovhVAI56JAU9ClMwpZdpPu2VPXKtjxe3iMMH03v5qVSNo3ZqGs2BZniIY/VMqQIfHmJEZkKkN4tGzQwvk6RH7EElG8GcjIGouQg1GjC+oqW/i0HkdWGywC0nKRIlWYG13qerTLilWfGYMaAQEytbNVGJqLmZXdUOIQYDBrRNwwe/7PZ3cYgUMUAlRex5ICIicu3qPm4sqWPJoqxykxVC4JYhbVRe2nQtyJd2a4my7IQmez9qOmmxEfjLuDJ8/utet1/LuiE1FQaoREREFHD0MGctPS4cWUE4xPaBUaUOj1l6r3OSgu/zEpG+cA4qKeJQXyIKdHoIYMg7zivLBAD0a51qfawpexT1wN/nc0JUGF6ZVIEPpvX2azm0CsSld5qSJ78eX56D/LpIjj2oZIMXCCIKJH1bp+KbTfv9XQzysY45CaheUIV9x2v8XRSvCNRbbT+FtXVdWT57EE6drfVBacgbWO8jPWKASkREAeuVSRUc8dGMyHtw/N2j6AlPTtXIMCMAIDE6zLuFaSKpseEAwv1dDCIKIAxQiYgoYAkh2ANAAcedc7Zv61TMG1WCCztl+a5ARBrwWktNhQEqERERBZwbBxX57b0tCYNaJUX7/L2EELisG5edIf8RgrlJqGkxQCUiv+N9j3yhObb2f3FrX/y665i/i+FzkaFGtGsR57f3T4oOwwuXl6MiN9Gt10ms5ZNO8FwkPWOASkR+0wzjByKfKkiNQUFqjL+L0SwMLk53+zWWkKA5Np40NR5irXikSH+4zAwREREFlEAP8AIxwRMRUVNhDyoRERGRF62cMwiGQI+iiezwlKamwgCViPyGM2DIldgIz29TrEuRvyTHcFkVCh4CvF9T02KASjY4Z578gYEEKfny1r5IiArMtR/JNwK9B4f3WApkHJpOTYUBKikK9EoAEQW+fCb7oSDDJEmkF2wrIT1jkiQiIiIiomaIjSWkRwxQSRGHIRERkV6xTk2uMPDyPh5TaioMUMkGLz5EFCwEL2ikMxJbfykA+fJaOrGyJQAw6zXZYIBKRERE1ITYeEKByBdn7X3nt8eGecNgNPA3QQ2YJImIiIgCSqAGeOw/Jb3wpDPfF+evwSAQYTD6YM8UyNiDSkRERNSEAjO8pmDEc5H0iAEqEfkdexXIF1jxCl6BOpdzWv9C5CRFondRir+LQkSkWwxQichvGEAQkTsC/ZrRrkUcvrtjABKiwvxdlKAXqMPAm0qvohSUZcfj9qFtXG7bJj0WQOD//ihwcA4qERERBRQGH0SNExMegvem9dK07b+mVOK33ccQYmS/FjUNnmlERERERKQoKToMPQs5LJ2aDgNUIiIiIiIi0gUGqGQjQPNOEBE54CjQ4MWvlogoeDFAJUWs2FFTYHsIEREREckxQCUiv2N7CBEREREBDFBJBYf6ElGwKEiN9ncRiIiISCMuM0M2OLSXiIKFEAKvTu6K4hZx/i4KeQmXlyEiCn4MUImIKGj1bZ3q7yIQERGRGzjEl4iIiAILO1KJiIKWpgBVCDFMCLFRCLFZCDFT4flrhRBrhRA/CyGWCCGKzY+HCSH+YX5ujRCin5fLT0RERESkaGj7dH8XgYjc5HKIrxDCCOBpAIMB7ACwXAjxviRJv8o2WyhJ0nPm7S8A8DiAYQCuBgBJkkqFEGkAPhJCVEiSVO/lz0FEREREZLVs9kDER4b6uxhE5CYtPahdAWyWJGmLJElnAbwBYKR8A0mSjsn+jEbD8obFAL40b7MPwBEA5Y0tNBEFFyaNJiJ3cIQvaZEWG4HwEKO/i0FEbtISoGYB2C77e4f5MRtCiBuEEH8AeATADPPDawBcIIQIEULkAegCIKdxRSaiYMFKJhERERHJeS1JkiRJT0uSVADgTgBzzA+/DFNAuwLAkwCWAqizf60Q4hohxAohxIr9+/d7q0hEREREREQUQLQEqDth2+uZbX5MzRsARgGAJEm1kiTdLElSR0mSRgJIALDJ/gWSJD0vSVK5JEnlqalcEoCIiIiIiKg50hKgLgdQJITIE0KEAZgA4H35BkKIItmfVQB+Nz8eJYSINv9/MIBau+RKRERERJpwWgARUfBzmcVXkqRaIcQ0AJ8AMAJ4WZKk9UKIuQBWSJL0PoBpQohBAM4BOAzgCvPL0wB8IoSoh6nX9TJffAjyHonZaoiIiIiIyE9cBqgAIEnShwA+tHvsHtn/b1R5XTWANo0oH/mJYDM1ERERERE1Ma8lSaLgwp5Uago8zYjIE4KtqEREQYsBKtngPZ/8gacdEREREQEMUImIiIiIiEgnGKASERFRQOFoHyKi4MUAlYiIiAIK8yQQEQUvBqhERERERESkCwxQiYiIKKBwiC8RUfBigEpEfsfRekREREQEMEAlIj9iJwgRuYONWUREwY8BKhEREQUUNm4REQUvBqhERERERESkCwxQyQZT9xMRERERkb8wQCUiIqKAILEVlYgo6DFAJRtM3U9ERERERP7CAJWI/IZ9IUTkDsFWVCKioMcAlYj8jlVOItKCQ3wDX1J0GACgLDvBzyUhIr0K8XcBiIiIiNzBntTAlZMUhQ9n9EZhWoy/i0JEOsUAlYiIiIiaTHFmnL+LQEQ6xiG+REREFBA4wJeIKPgxQCUiIqKAwgG+RETBiwEqEfkde0WIiIiICGCASkR+xF4QIiIiIpJjgEpERERERES6wACViIiIiIiIdIEBKtngGuhERKRXvEcREQU/BqhEREQUUAQnsBMRBS0GqGSDN30iIiIiIvIXBqhERERERESkCwxQiYiIKCBIXDWZiCjoMUAlIr9hVZOIPMP5KEREwYoBKhH5HauaROQeNm8REQUrBqhERERERESkCwxQiYiIKMBw3AURUbBigEpERERERES6wACViPyOs8mISBNeLIiIgh4DVCLyGw7SIyJPCF48iIiCFgNUIiIiIiIi0gUGqGRD4vApIiLSKd6iiIiCHwNUIiIiIiIi0gUGqGSD83qIiEiveIsiIgp+DFCJiIiIiIhIFxigEhERUUDgHFQiouDHAJWIiIgCCof6EhEFLwaoROQ37A0hIiIiIjkGqETkd+wNISItuBQaEVHwY4BKREREAYUZ54mIghcDVCIiIiIiItIFBqhE5HcctUdEWsRGhAAARnbM8nNJiIjIV0L8XQAiar44So+I3BEdHoK19w1BdBirL0REwYpXeCIiIgoYsRGh/i4CERH5EIf4kg1mSCQiIiIiIn9hgEpERERERES6wACVbDB1PxERERER+QsDVCIiIiIiItIFBqhERERERESkCwxQiYiIiIiISBcYoBIREREREZEuMEAlIiIiIiIiXWCASkR+w2V3iYiIiEiOASoR+R1XNyIiIiIigAEqERERERER6QQDVCLyOw71JSIiIiKAASoR+RGH9hIRERGRHANUsiGxK4uIiIiIiPyEASoRERERERHpAgNUsiE45pKIiIiIiPyEASoRERERERHpAgNUIiIiIiIi0gUGqERERERERKQLDFCJiIiIiIhIFxigEhERERERkS4wQCUiIiIiIiJdYIBKREREREREusAAlYj8RvJ3AYiIiIhIVxigEpHfCX8XgIiIiIh0gQEqEfkde1KJiIiICGCASkR+xJ5TIiIiIpJjgEpERERERES6wACViIiIiIiIdIEBKhEREREREekCA1QiIiIiIiLSBQaoREREREREpAsMUImIiIiIiEgXGKASERERERGRLjBAJSIiIiIiIl1ggEpERERERES6wACViIiIiIiIdIEBKhH5jeTvAhARERGRrjBAJSIiIiIiIl1ggEpEfiP8XQAiIiIi0hUGqGTjjqFtAQDxkaF+LgkRERERETU3If4uAOnLuIocjKvI8XcxiIiIiIioGWIPKhEREREREekCA1QiIiIiIiLSBQaoREREREREpAsMUImIiIiIiEgXGKASERERERGRLjBAJSIiIiIiIl1ggEpERERERES6wACViIiIiIiIdIEBKhEREREREekCA1QiIiIiIiLSBQaoROQ37TPjAAAjSlr4uSREREREpAch/i4AETVf+akx2DJ/BAwG4e+iEBEREZEOsAeViPyKwSkRERERWWgKUIUQw4QQG4UQm4UQMxWev1YIsVYI8bMQYokQotj8eKgQ4lXzc78JIWZ5+wMQERERERFRcHAZoAohjACeBjAcQDGAiy0BqMxCSZJKJUnqCOARAI+bH78IQLgkSaUAugCYKoTI9VLZiYiIiIiIKIho6UHtCmCzJElbJEk6C+ANACPlG0iSdEz2ZzQAyfIUgGghRAiASABnAci3JSIiIiIiIgKgLUlSFv6/vfsPtvyu6zv+enfTiMQqULdWkyhLZm0nSifQbaBTpRZSDMYxccZpg4zG2g6TSoZU7ECsTEbzn2knto47Ykbj1B9xtWjbHbs2ta2tQ6fSrBoIAWM2AUkyWFaIpJTWJPLuH+e7cLjdm72wd7nve/fxmDmz3+/n+z2Xz+GT7+597jn3u8mja/uPJXnZxpOq6g1J3pTkwiSvXIbfnlXMfijJc5N8b3d/9GwmDAAAwN60bTdJ6u7D3X1ZkrckeesyfGWSP03yFUkOJPm+qnrRxudW1eur6nhVHT958uR2TQkAAIBdZCuB+niSS9f2L1nGNnMkyXXL9rcn+ffd/XR3fzjJf0tyaOMTuvvO7j7U3Yf279+/tZkDAACwp2wlUO9NcrCqDlTVhUmuT3J0/YSqOri2e02Sh5btD2b5uG9VXZTk5Ul+72wnDQAAwN5zxp9B7e5nquqmJPck2Zfkru5+oKpuS3K8u48muamqrkrydJInktywPP1wkp+uqgeSVJKf7u53n4sXAgAAwO62lZskpbuPJTm2YezWte2bN3nex7P6p2YAAADgWW3bTZIAAADgbAhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBG2FKhVdXVVPVhVJ6rqltMcv7Gq7q+q+6rqHVV1+TL+umXs1OOTVXXFdr8IAAAAdr8zBmpV7UtyOMlrklye5LWnAnTN3d394u6+IsntSe5Iku7++e6+Yhn/jiTv7+77tvUVAAAAsCds5R3UK5Oc6O5HuvupJEeSXLt+Qnc/ubZ7UZI+zdd57fJcAAAA+P9csIVzLk7y6Nr+Y0letvGkqnpDkjcluTDJK0/zdf5uNoQtAAAAnLJtN0nq7sPdfVmStyR56/qxqnpZkk9093tO99yqen1VHa+q4ydPntyuKQEAALCLbCVQH09y6dr+JcvYZo4kuW7D2PVJfmGzJ3T3nd19qLsP7d+/fwtTAgAAYK/ZSqDem+RgVR2oqguzis2j6ydU1cG13WuSPLR27M8k+Tvx86cAAAA8izP+DGp3P1NVNyW5J8m+JHd19wNVdVuS4919NMlNVXVVkqeTPJHkhrUv8Yokj3b3I9s/fQAAAPaKrdwkKd19LMmxDWO3rm3f/CzP/S9JXv45zg8AAIDzxLbdJAkAAADOhkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMMKWArWqrq6qB6vqRFXdcprjN1bV/VV1X1W9o6ouXzv2V6rqv1fVA8s5z9nOFwAAAMDecMZArap9SQ4neU2Sy5O8dj1AF3d394u7+4oktye5Y3nuBUl+LsmN3f01Sb4hydPbN30AAAD2iq28g3plkhPd/Uh3P5XkSJJr10/o7ifXdi9K0sv2q5O8u7vftZz3ke7+07OfNgAAAHvNVgL14iSPru0/tox9hqp6Q1U9nNU7qG9chr86SVfVPVX1O1X15rOdMAAAAHvTtt0kqbsPd/dlSd6S5K3L8AVJvi7J65Zfv7WqXrXxuVX1+qo6XlXHT548uV1TAgAAYBfZSqA+nuTStf1LlrHNHEly3bL9WJLf7O4/6u5PJDmW5KUbn9Ddd3b3oe4+tH///q3NHAAAgD1lK4F6b5KDVXWgqi5Mcn2So+snVNXBtd1rkjy0bN+T5MVV9dzlhkl/M8l7z37aAAAA7DUXnOmE7n6mqm7KKjb3Jbmrux+oqtuSHO/uo0luqqqrsrpD7xNJblie+0RV3ZFV5HaSY939787RawEAAGAXO2OgJkl3H8vq47nrY7eubd/8LM/9uaz+qRkAAADY1LbdJAkAAADOhkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGGFLgVpVV1fVg1V1oqpuOc3xG6vq/qq6r6reUVWXL+MvrKr/s4zfV1Vv2+4XAAAAwN5wwZlOqKp9SQ4n+dtJHktyb1Ud7e73rp12d3e/bTn/W5LckeTq5djD3X3F9k4bAACAvWYr76BemeREdz/S3U8lOZLk2vUTuvvJtd2LkvT2TREAAIDzwVYC9eIkj67tP7aMfYaqekNVPZzk9iRvXDt0oKp+t6r+a1V9/VnNFgAAgD1r226S1N2Hu/uyJG9J8tZl+ENJvrK7X5LkTUnurqov3vjcqnp9VR2vquMnT57crikBAACwi2wlUB9Pcuna/iXL2GaOJLkuSbr7T7r7I8v2byd5OMlXb3xCd9/Z3Ye6+9D+/fu3OncAAAD2kK0E6r1JDlbVgaq6MMn1SY6un1BVB9d2r0ny0DK+f7nJUqrqRUkOJnlkOyYOAADA3nLGu/h29zNVdVOSe5LsS3JXdz9QVbclOd7dR5PcVFVXJXk6yRNJblie/ookt1XV00k+meTG7v7ouXghAAAA7G5nDNQk6e5jSY5tGLt1bfvmTZ73y0l++WwmCAAAwPlh226SBAAAAGdDoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYYUuBWlVXV9WDVXWiqm45zfEbq+r+qrqvqt5RVZdvOP6VVfXxqvrH2zVxAAAA9pYzBmpV7UtyOMlrklye5LUbAzTJ3d394u6+IsntSe7YcPyOJL+2DfMFAABgj9rKO6hXJjnR3Y9091NJjiS5dv2E7n5ybfeiJH1qp6quS/L+JA+c/XQBAADYq7YSqBcneXRt/7Fl7DNU1Ruq6uGs3kF94zL2RUnekuSHnu1/oKpeX1XHq+r4yZMntzp3AAAA9pBtu0lSdx/u7suyCtK3LsM/mORHuvvjZ3jund19qLsP7d+/f7umBAAAwC5ywRbOeTzJpWv7lyxjmzmS5MeX7Zcl+baquj3J85J8sqr+b3f/2OcyWQAAAPaurQTqvUkOVtWBrML0+iTfvn5CVR3s7oeW3WuSPJQk3f31a+f8YJKPi1MAAABO54yB2t3PVNVNSe5Jsi+9xAB9AAAMgklEQVTJXd39QFXdluR4dx9NclNVXZXk6SRPJLnhXE4aAACAvWcr76Cmu48lObZh7Na17Zu38DV+8LOdHAAAAOePbbtJEgAAAJwNgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwwpYCtaqurqoHq+pEVd1ymuM3VtX9VXVfVb2jqi5fxq9cxu6rqndV1bdu9wsAAABgbzhjoFbVviSHk7wmyeVJXnsqQNfc3d0v7u4rktye5I5l/D1JDi3jVyf5iaq6YNtmDwAAwJ6xlXdQr0xyorsf6e6nkhxJcu36Cd395NruRUl6Gf9Edz+zjD/n1DgAAABstJV3My9O8uja/mNJXrbxpKp6Q5I3JbkwySvXxl+W5K4kX5XkO9aCFQAAAD5l226S1N2Hu/uyJG9J8ta18Xd299ck+WtJvr+qnrPxuVX1+qo6XlXHT548uV1TAgAAYBfZSqA+nuTStf1LlrHNHEly3cbB7n5fko8n+drTHLuzuw9196H9+/dvYUoAAADsNVsJ1HuTHKyqA1V1YZLrkxxdP6GqDq7tXpPkoWX8wKmbIlXVVyX5y0k+sA3zBgAAYI8548+gdvczVXVTknuS7EtyV3c/UFW3JTne3UeT3FRVVyV5OskTSW5Ynv51SW6pqqeTfDLJ93T3H52LFwIAAMDutqV/8qW7jyU5tmHs1rXtmzd53s8m+dmzmSAAAADnh227SRIAAACcDYEKAADACAIVAACAEQQqAAAAIwhUAAAARtjSXXwB4Nl85Quem6+9+It3ehoAwC4nUAE4a7/55r+101MAAPYAH/EFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAIAhUAAIARBCoAAAAjCFQAAABGEKgAAACMIFABAAAYQaACAAAwgkAFAABgBIEKAADACAIVAACAEQQqAAAAIwhUAAAARhCoAAAAjCBQAQAAGEGgAgAAMIJABQAAYASBCgAAwAgCFQAAgBEEKgAAACMIVAAAAEYQqAAAAIxQ3b3Tc/gMVXUyyR/s9DzO4EuT/NFOT4LPibXbvazd7mXtdi9rt3tZu93N+u1e1m5rvqq795/uwLhA3Q2q6nh3H9rpefDZs3a7l7Xbvazd7mXtdi9rt7tZv93L2p09H/EFAABgBIEKAADACAL1c3PnTk+Az5m1272s3e5l7XYva7d7WbvdzfrtXtbuLPkZVAAAAEbwDioAAAAjCNTPQlVdXVUPVtWJqrplp+dDUlWXVtVvVNV7q+qBqrp5GX9BVf16VT20/Pr8Zbyq6keXNXx3Vb107WvdsJz/UFXdsFOv6XxTVfuq6ner6leX/QNV9c5ljX6xqi5cxr9g2T+xHH/h2tf4/mX8war6xp15JeeXqnpeVb29qn6vqt5XVX/ddbc7VNX3Lr9fvqeqfqGqnuO6m6uq7qqqD1fVe9bGtu1aq6q/WlX3L8/50aqqz+8r3Ls2Wbt/uvy++e6q+tdV9by1Y6e9pjb7/nOz65azd7q1Wzv2fVXVVfWly77rbrt1t8cWHkn2JXk4yYuSXJjkXUku3+l5ne+PJF+e5KXL9p9L8vtJLk9ye5JblvFbkvzwsv1NSX4tSSV5eZJ3LuMvSPLI8uvzl+3n7/TrOx8eSd6U5O4kv7rs/1KS65fttyX5h8v29yR527J9fZJfXLYvX67HL0hyYLlO9+3069rrjyT/Msk/WLYvTPI81938R5KLk7w/yRcu+7+U5Ltcd3MfSV6R5KVJ3rM2tm3XWpL/sZxby3Nfs9Ovea88Nlm7Vye5YNn+4bW1O+01lWf5/nOz69bj3KzdMn5pknuS/EGSL13GXHfb/PAO6tZdmeREdz/S3U8lOZLk2h2e03mvuz/U3b+zbP+vJO/L6huwa7P6BjrLr9ct29cm+Zle+a0kz6uqL0/yjUl+vbs/2t1PJPn1JFd/Hl/KeamqLklyTZKfXPYrySuTvH05ZePanVrTtyd51XL+tUmOdPefdPf7k5zI6nrlHKmqL8nqD++fSpLufqq7/ziuu93igiRfWFUXJHlukg/FdTdWd/9mko9uGN6Wa2059sXd/Vu9+q75Z9a+FmfpdGvX3f+hu59Zdn8rySXL9mbX1Gm//zzDn5ecpU2uuyT5kSRvTrJ+Ex/X3TYTqFt3cZJH1/YfW8YYYvno2UuSvDPJl3X3h5ZDf5jky5btzdbR+u6Mf57Vb/SfXPb/fJI/XvvDe30dPrVGy/GPLedbu8+/A0lOJvnpWn08+yer6qK47sbr7seT/LMkH8wqTD+W5Lfjuttttutau3jZ3jjO58d3Z/XuWfLZr92z/XnJOVBV1yZ5vLvfteGQ626bCVT2hKr6oiS/nOQfdfeT68eWv51yu+phquqbk3y4u397p+fCZ+2CrD769OPd/ZIk/zurjxl+iutupuVnFa/N6i8ZviLJRfGu9a7mWtudquoHkjyT5Od3ei6cWVU9N8k/SXLrTs/lfCBQt+7xrD53fsolyxg7rKr+bFZx+vPd/SvL8P9cPkKR5dcPL+ObraP1/fz7G0m+pao+kNVHll6Z5F9k9dGYC5Zz1tfhU2u0HP+SJB+JtdsJjyV5rLvfuey/Patgdd3Nd1WS93f3ye5+OsmvZHUtuu52l+261h7Ppz9iuj7OOVRV35Xkm5O8bvkLhuSzX7uPZPPrlu13WVZ/sfeu5fuWS5L8TlX9xbjutp1A3bp7kxxc7ph2YVY3izi6w3M67y0/g/FTSd7X3XesHTqa5NTd0m5I8m/Xxr9zuePay5N8bPmY1D1JXl1Vz1/eYXj1MsY50t3f392XdPcLs7qe/nN3vy7JbyT5tuW0jWt3ak2/bTm/l/Hra3W30QNJDmZ18wHOke7+wySPVtVfWoZeleS9cd3tBh9M8vKqeu7y++eptXPd7S7bcq0tx56sqpcv/z1859rX4hyoqquz+tGWb+nuT6wd2uyaOu33n8t1uNl1yzbr7vu7+y909wuX71sey+omnX8Y1932O9d3YdpLj6zu0vX7Wd1N7Qd2ej4enSRfl9VHm96d5L7l8U1Z/WzGf0ryUJL/mOQFy/mV5PCyhvcnObT2tb47q5sSnEjy93b6tZ1PjyTfkE/fxfdFWf2hfCLJv0ryBcv4c5b9E8vxF609/weWNX0w7oT3+VqzK5IcX669f5PVHQpdd7vgkeSHkvxekvck+dms7hrquhv6SPILWf288NNZfVP897fzWktyaPlv4eEkP5akdvo175XHJmt3IqufSzz1Pcvb1s4/7TWVTb7/3Oy69Tg3a7fh+Afy6bv4uu62+VHL/0kAAACwo3zEFwAAgBEEKgAAACMIVAAAAEYQqAAAAIwgUAEAABhBoAIAADCCQAUAAGAEgQoAAMAI/w+a7XNaKksz5AAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# plot pdf values too see outliers\n", + "plt.figure(figsize=[16,16])\n", + "plt.plot(pdfs)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_11_FINAL/11_227.wav', 'Q-R-S-T-U-V-W-X-Y-Z macht es komplett!', 38, array([-4.0032621e-04, -3.3042193e-04, -3.4537757e-04, ...,\n", + " 7.7704317e-06, 2.7401828e-05, 7.1041533e-05], dtype=float32), 11.323739583333333) 0.38161673291429454\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_14_FINAL/14_496.wav', 'Ist der Kuli blau?', 18, array([ 1.2363373e-05, -3.6298752e-05, 2.1456377e-05, ...,\n", + " 3.9692618e-06, -6.7328816e-05, -9.5399046e-05], dtype=float32), 5.530666666666667) 0.38054811432758695\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_17_FINAL/17_426.wav', 'H-I-J-K-L-M-N-O-P!', 18, array([ 4.7872534e-05, -3.4164757e-05, -2.1835160e-04, ...,\n", + " -4.3899294e-05, -7.5021897e-05, -3.4489829e-05], dtype=float32), 11.167979166666667) 0.32909346861901806\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_119.wav', 'Kann ich mich irgendwie revanchieren?', 37, array([-5.1586820e-05, -9.1837741e-05, -9.9342957e-05, ...,\n", + " -1.4234778e-04, -1.2327779e-04, -1.4810068e-04], dtype=float32), 9.728) 0.3853891360487213\n", + "('/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_5_FINAL/5_41.wav', 'Ja, eben.', 9, array([ 8.6438486e-05, 1.5554321e-04, 1.1511238e-04, ...,\n", + " -1.3761004e-05, -2.3534812e-05, -5.6318945e-06], dtype=float32), 2.1033333333333335) 0.38819509492217963\n" + ] + } + ], + "source": [ + "# print outliers\n", + "threshold = 0.39\n", + "for item, pdf in zip(items, pdfs):\n", + " if pdf < threshold:\n", + " print(item, pdf)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from IPython.display import Audio\n", + "Audio(\"/home/erogol/Data/Mozilla_DE_Thomas3/BATCH_2_FINAL/2_119.wav\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "Collapsed": "false" + }, + "source": [ + "### Plot Dataset Statistics" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAEICAYAAABGaK+TAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAd00lEQVR4nO3dfZRcdZ3n8fcnnQYqQekgGaQbMIyyYZERoj2zODoji2gQETIehoeVGRBczuyZGcTBsERnFWedhTkRhTnj4GRUUEEEMUZkxQzrw3F1EDexwYCQgeEpdHholQaBRjrJd/+4t5LqSlV1Pde9XZ/XOTnpulV977dvuj518/397r2KCMzMLH/m9boAMzNrjgPczCynHOBmZjnlADczyykHuJlZTjnAzcxyygE+x0g6RtJjPdr2JZKu7cW25zJJSySFpPnp41slndWmdYek17RjXXVur221mwO8KyQ9LOm4rK2rxTp69kHR7yLiHRHxhV7XMZtKH+h5qT0vHOBm1rDi/wastxzgHSbpS8DBwDclPSfponT50ZL+VdKkpLskHZMu/31Jv5B0UPr4SElPSzqs2rpm2f6wpK9JmpD0kKTzS567RNKNkr4o6deS7pE0WvL86yWNpc99VdINkj4uaSFwKzCc1vGcpOH02/aotr6yuq6S9ImyZd+Q9Ffp1/9d0ni6ns2S3lplPddI+sf0v+bPSfqRpFdKuiLdb/dJWlbn/vg9Sben/yaPS/oHSXuUPB+S/kzS/elrPi1JVeqquq7ylki67PuS3pd+PSDpE+nvwYPAO8vWXfraeZL+WtIjkp5K9/0+lWpKX78yrWerpHOqrTd9fLakH5b9/H8u6X7g/nTZlZK2SHpW0kZJf5AuPx74EHBa+u9yVyO1l+yjsyQ9mu6LD1f7ufpWRPhPh/8ADwPHlTweAX4JnEDyIfq29PHi9Pm/Bb4LFIBNwF9UW1eFbR0DPJZ+PQ/YCHwE2AP4beBBYHn6/CXAi2kdA8ClwI/T5/YAHgHeDwwC7wZeAj5evp2SbVddX4U6/xDYAih9vAiYAoaBpelzw+lzS4BXV1nPNcAvgDcAe6X77SHgT9MaPg58r8798QbgaGB+us17gQtKthXALcAQyQfpBHB8lbqqrit9HMD8ktd/H3hf+vWfAfcBBwH7At8rfX3Za88BHkh/lr2BtcCXqtR0PPAkcASwEPhyut7XlK83fXw28MOyn/+2tKZCuuxM4BXpz3kh8ASwV8nvw7VlNdRVe8k++meS98GRwG+A/9jr93OW/vgIvDfOBL4VEd+KiB0RcRuwgST4IPnF3wf4CTAOfLrJ7fwuyYfC30TESxHxIMkb4vSS1/wwrWM78CWSNwrsCp+/j4jpiFib1jObausr939J3qB/kD4+Bbg9IrYC24E9gcMlDUbEwxHx7zW2+fWI2BgRLwJfB16MiC+mNdwAFI/Aa+6PdB0/johtEfEw8E/AW8q2dVlETEbEoyTBelSlgupcVzWnAldExJaI+BXJB2E17wE+GREPRsRzwCrgdFVucZwKXB0Rd0fE8yS/Z426NCJ+FRFTABFxbUT8Mv05Lyf5d1ta57rqqf1jETEVEXcBd1H996kvOcB741XAH6f/vZ6UNAm8GTgAICKmSY4sjwAuj/SQpMntDJdt50PA/iWveaLk6xeAvdI30DAwXrbtLXVss9r6ZkjX+xXgjHTRfwGuS597ALiAJGCekvSVkhZNJU+WfD1V4fHe6dc194ek/yDpFklPSHoW+F/AfrP8fHtTQZ3rqmaYmfv6kVleW/r8IyQfvPtXeW29661mxu+ApA9KulfSM+n+3IfGfs7Zaq9rf/crB3h3lAfwFpL/Kg6V/FkYEZcBSBoBPgpcDVwuac8a66plC/BQ2XZeFhEnzPqd8DgwUtbjPajJOqq5HjhF0quA/wR8befKI74cEW8mCd0A/q4N25ttf1xF0ro4NCJeThLuFXvcdai1rufTvxeUvP6VJV8/zsx9fXCN7Wwl2Uelr93GzA+xetf7fI2ainb+u6f97otIjuwXRcQQ8Ay7fs7Zfkcaqd0qcIB3x5Mkfb6ia4F3SVqeDljtpWRa3oFpYF4DfA44l+RN9z9rrKuWnwC/VjIgWEi3dYSk363je28naWX8haT5kk4Gfq+sjlfUGjCbTUSMkfSvPwusj4hJAElLJR2bfnC9SHIUvaPZ7ZSYbX+8DHgWeE7SYcB/a2FbVdcVERMkrbEz0xrOAV5d8r03Auenvw+LgItrbOd64AOSDpG0N8mR/g0Rsa3Ca28EzpZ0uKQFJAcJpe4E3i1pgZK54efW8TNuIxkLmC/pI8DLS55/ElgiqVrONFK7VeAA745Lgb9O/9v+wYjYApxMclQ2QXJkuJLk3+N84LeA/5G2Gd4LvLc4ul++rlobTXvAJ5L0aR9iV1jOGroR8RLJwOW5wCRJ3/4WkoEkIuI+kjfgg2kttVoctXwZOC79u2hP4LK03idI9seqJte/Ux3744MkrZxfk/TGb2hhc7Ot67+S/Jv/Engt8K8lz/0zsJ6k5/tTksG9aj5PMtbwA5Kf6UXgLyu9MCJuBa4gGeh9IP271KdIBqqfBL5A2tKqYT3wbeDfSNofLzKzxfLV9O9fSvppK7VbZcUZAGazknQH8JmIuLrXtZiZj8CtBklvUTKner6S059fR3LEZWYZ4LOprJalJH3ThSTzpU+JiMd7W5KZFbmFYmaWU26hmJnlVFdbKPvtt18sWbKkm5s0M8u9jRs3/iIiFpcv72qAL1myhA0bNnRzk2ZmuSep4lmzbqGYmeWUA9zMLKcc4GZmOeUANzPLKQe4mVlO+UxMM+updWPjrF6/ma2TUwwPFVi5fCkrlo30uqxccICbWc+sGxtn1dpNTE1vB2B8copVazcBOMTr4BaKmfXM6vWbd4Z30dT0dlav39yjivLFAW5mPbN1cqqh5TaTA9zMemZ4qNDQcpvJAW5mPbNy+VIKgwMzlhUGB1i5vN4b2/c3D2KaWc8UByo9C6U5DnAz66kVy0Yc2E2atYUi6fOSnpJ0d8my1ZLuk/QzSV+XNNTZMs3MrFw9PfBrgOPLlt0GHBERryO5I3XLdww3M7PGzBrgEfED4Fdly/4lIralD38MHNiB2szMrIZ2zEI5B7i1DesxM7MGtBTgkj4MbAOuq/Ga8yRtkLRhYmKilc2ZmVmJpgNc0tnAicB7osat7SNiTUSMRsTo4sW73dLNzMya1NQ0QknHAxcBb4mIF9pbkpmZ1aOeaYTXA7cDSyU9Julc4B+AlwG3SbpT0mc6XKeZmZWZ9Qg8Is6osPhzHajFzMwa4GuhmJnllAPczCynHOBmZjnlADczyykHuJlZTjnAzcxyygFuZpZTDnAzs5xygJuZ5ZQD3MwspxzgZmY55QA3M8spB7iZWU45wM3McsoBbmaWU03dkcfMzOqzbmyc1es3s3VyiuGhAiuXL2XFspG2rNsBbmbWIevGxlm1dhNT09sBGJ+cYtXaTQBtCXG3UMzMOmT1+s07w7toano7q9dvbsv6HeBmZh2ydXKqoeWNcoCbmXXI8FChoeWNcoCbmXXIyuVLKQwOzFhWGBxg5fKlbVm/BzHNzDqkOFDpWShmZjm0YtlI2wK7nFsoZmY55QA3M8spB7iZWU7NGuCSPi/pKUl3lyzbV9Jtku5P/17U2TLNzKxcPUfg1wDHly27GPhORBwKfCd9bGZmXTRrgEfED4BflS0+GfhC+vUXgBVtrsvMzGbRbA98/4h4PP36CWD/ai+UdJ6kDZI2TExMNLk5MzMr1/IgZkQEEDWeXxMRoxExunjx4lY3Z2ZmqWYD/ElJBwCkfz/VvpLMzKwezQb4zcBZ6ddnAd9oTzlmZlaveqYRXg/cDiyV9Jikc4HLgLdJuh84Ln1sZmZdNOu1UCLijCpPvbXNtZiZWQN8JqaZWU45wM3McsoBbmaWUw5wM7Oc8g0dzKyr1o2Nd+wONf3GAW5mXbNubJxVazcxNb0dgPHJKVat3QTgEG+CA9zM2ma2o+vV6zfvDO+iqentrF6/2QHeBAe4mbVFPUfXWyenKn5vteVWmwcxzawtah1dFw0PFSp+b7XlVpsD3Mzaop6j65XLl1IYHJjxfGFwgJXLl9a1jXVj47zpsu9yyMX/mzdd9l3WjY03X/Ac4BaKmbXF8FCB8QohXnp0XWylNDMLxQOgu3OAm1lbrFy+dEbAQuWj6xXLRpoKXA+A7s4BbmZtUevouh1zvz0AujsHuJm1TaWj63a1Pupp0fQbD2KaWUfVMzulHq0OgM5FPgI3s45qV+ujlQHQucoBbmYd1c7WR7MDoHOVWyhm1lGdaH14PnjCR+Bm1lHtbn14PvguDnAz67h2tj48H3wXt1DMLFc8H3wXH4GbWSbUe7KP54Pv4gA3s6YUA3d8cooBie0RjDTZ326kr13vKfv9cOcfB7iZNWTd2DiX3HwPk1PTO5dtjwCaH1BspK9dz6Bovwx0OsDNrG7lwVhJvQOKpUfIUeU11frasw2K9stAZ0sBLukDwPuAADYB742IF9tRmJllT6VgrKQYvNXaGPV8EEDzfe1+GehsOsAljQDnA4dHxJSkG4HTgWvaVJuZZUy9ATg8VKjZxqjng6CVk336ZaCz1WmE84GCpPnAAmBr6yWZWVbVE4DF4K3Vxqj1QSBgZKjApe/+nabbHf1y4aumAzwixoFPAI8CjwPPRMS/lL9O0nmSNkjaMDEx0XylZtZzlYIRYJ6Sv0uDt1Ybo9oHwchQgYcueyc/uvjYlnrVK5aNcOm7f4eRoUJbPhCyqpUWyiLgZOAQYBL4qqQzI+La0tdFxBpgDcDo6Gi1sQozy4FGTouv1caodypgq7W2M7CzOC2xlUHM44CHImICQNJa4PeBa2t+l5nlWj3BuG5snOd/s2235cWQztulYbM6LbGVAH8UOFrSAmAKeCuwoS1VmVnm1HsEWm2GyaIFg3z0Xa/d+T15ujRsVqclNh3gEXGHpJuAnwLbgDHSVomZZU8rLYBGjkCrzTBZsMf83AR2uaxOS2xpFkpEfDQiDouIIyLiTyLiN+0qzMzapxjA4+lJM8UArvc62o3cFi2rYdeKaoOuvZ6W6KsRmvWBVu9L2UgoZzXsWpHVaYkOcLM+0OpRcSOhnNWwa0VWpyX6WihmfaDeMxMr9cmBmjNKyuVthkm9sjjoqojuTc0eHR2NDRs8UcWs2+qZGVLpNYMDgoDpHVH1+6zzJG2MiNHy5T4CN+sDxaAtvwzs0y9M84Eb7uSCG+7ceU3vUtPbKx/g5XlGyVziHrhZjjVyd/YVy0ZYuOfux2zFiC4P71ryPKNkLvERuFlO1Ts3u57rbjcqzzNK5hIHuFlOzXZ24LqxcT72zXt4+oXpKmuYXaUeeN5nlMwlDnCznKo1NbDeGybMZnp7sGjBIBHwzNT0nJlRMlc4wM1yqtrUwHkSH/vmPXWFt2DWtsrTL0xTGBzgU6cd5eDOGA9imuVUtWtzb4+YtW0yMlTg4cveyadOO4oBadZtNXLWpnWPA9wsp4pnB9YTwKVKe9grlo1w+alHVvwgKOeZJ9njADfLsRXLRtjRwPS/ocLgbqeAl58mXu0DwTNPssc9cLMcKp0aOK/CCTjlBiQuP/XIqj3s0tPEKw2AeuZJNjnAzXKmPGDrOQFnR0TdA5Bz9Vomc5ED3Cxnqt0wodKp8EWNtj+yeOEm250D3CzjStsl+xQGZ1zLpNSOCK447Si3P/qIA9wsY8oD+/mXtu28qFS18IbkKNvtj/7iADfLkPL+dq3ALlU+NdCB3R8c4GYZsW5snAtvvKuhqwIWZeHuMNZ9DnCzHls3Nr7bdbobMVLSOrH+4gA364Fin3t8cqqu65FU4wHK/uYAN+uy8j53o+G9aMEgky/4yoDmADfrqlb63JC0S3508bFtrsryygFu1gXtuLmC2yVWrqWLWUkaknSTpPsk3Svpje0qzGyuKLZMGgnvRQsGOfPog3deYGpkqOCZJrabVo/ArwS+HRGnSNoDWNCGmsxyrfREnOGhAs//ZtusN1coDmSOuK9tDWg6wCXtA/whcDZARLwEvNSesszyqdKNhmcz25UCzapppYVyCDABXC1pTNJnJS0sf5Gk8yRtkLRhYmKihc2ZZV+1C01VUxgccHhb01oJ8PnA64GrImIZ8DxwcfmLImJNRIxGxOjixYtb2JxZtq0bG6/riLuo0s0VzBrRSg/8MeCxiLgjfXwTFQLcbK6acTKOoJGZgVf4BsHWBk0HeEQ8IWmLpKURsRl4K/Dz9pVmlh3lVwh8adt2XpjesfP5RsLbp75bu7Q6C+UvgevSGSgPAu9tvSSzbGn2CoGVeC63tVNLAR4RdwKjbarFLDMavedkNUOFQRbuOd/X5raO8JmYZmWauedkJYXBAS456bUObOsYB7hZmUanAlayaMEgH32Xw9s6ywFufa/ee07WUpyF4jMprZsc4NbXmh2g9CVdLQsc4NbXmmmX+JKulhUtXY3QLO+2NnDmJHgaoGWLj8Ct75SeQVmPAYkdEW6XWOY4wK1vNHPz4MLggK9XYpnlALc5r5G74QgY8gCl5YQD3Oa08lkm9Rj7yNs7WJFZ+zjAbU4o7WsPpKe+DzRxCvzwUKFDFZq1nwPccq/aqe+NhrdnmFjeOMAt19aNjXPhjXc1fb2SeYIdPoPScsoBbrkz40YKJDcDbtRQYdAXmrLcc4BbrpS3SxoNbx9p21ziALfcaKVd4vncNhc5wC2zWm2VFGeh+Kjb5ioHuGVSs60SH2lbP3GAW+Y02iopHp37SNv6jQPcMqHZdsmAxOWnHunQtr7kALeec7vErDkOcOuJRi/pWs73nDRzgFuXNXJlwErc5zbbxQFuHdfqdEC3Sswqc4BbRzXb3/bMErPZtRzgkgaADcB4RJzYekk2VzR75qRnlpjVpx1H4O8H7gVe3oZ12RzQSp/b7RKz+rUU4JIOBN4J/C3wV22pyHKrmXtOAkgQvqSrWcNaPQK/ArgIeFm1F0g6DzgP4OCDD25xc5ZVjdy6zP1ts/ZoOsAlnQg8FREbJR1T7XURsQZYAzA6OtrcVfctc4ozS7ZOTu28CXA9/7jub5u1TytH4G8CTpJ0ArAX8HJJ10bEme0pzbKoUn+73l63+9tm7TWv2W+MiFURcWBELAFOB77r8J7bim2SZgYnhwqDDm+zNvM8cKvJp7ybZVdbAjwivg98vx3rst5rxz0n3es26zwfgdsMrd5zEtzrNusWB7jNsHr95rqmApZaMDiPPeYP8MzUNMOeGmjWNQ5w22nd2HhDvW63Scx6ywHex2b0utOzIevlNolZ7znA+0TpiTf7FAaZ3r6D51/a1SqpFd7FgUzf5d0sWxzgfaB8YLLRa5V86rSjHNZmGdT0iTyWH80MTBaNDBUc3mYZ5QCf4xodmCxVGBxg5fKlba7IzNrFLZQ5qtlLuxYNFQa55CSfQWmWZQ7wOWC2Acp6+JrcZvnjAM+5ZgcoF6WXgPWJN2b55QDPsWbvOTkyVOBHFx/boarMrFsc4DnUSn/bA5Nmc4cDPEdaHZj0pV3N5hYHeA60cpd38IwSs7nKAZ4xpdcnKZ663sg1uQU771HpAUqzuc0BnhGVjrKLg5P1hrcvMGXWXxzgGVA+FbAZ7m+b9R8HeI81OxWwyMFt1r8c4D3QjntOemDSzBzgXVSpz93I4GTgU93NbBcHeBc0Ow3QoW1mtTjAO6CVW5WB7zVpZvVxgLdJtb52o+HtqYBmVi8HeIsqnd7e6KCkWyVm1oymA1zSQcAXgf1J8mdNRFzZrsLyoB3ztz2bxMya1coR+Dbgwoj4qaSXARsl3RYRP29TbZnW6vxtB7eZtarpAI+Ix4HH069/LeleYATIfYCX3uGm/HoirV5YyifemFm7KJo8gpyxEmkJ8APgiIh4tuy584DzAA4++OA3PPLIIy1vr5MqtUUG54m995rP0y9M133iTfF1xQtSub9tZs2StDEiRsuXtzyIKWlv4GvABeXhDRARa4A1AKOjo61/WnTY6vWbd+tpT++InUfc9fwAPso2s25oKcAlDZKE93URsbY9JfXW1smppr/X87fNrJtamYUi4HPAvRHxyfaV1H2lPe95acujUZ6/bWbd1soR+JuAPwE2SbozXfahiPhW62V1XrUTb5oJb88oMbNeaGUWyg9Jxupyp3yg0ifemFke9eWZmJUGKuvlAUozy4q+CfDSlkmjBL6/pJllzpwP8ErXKmnEyFCBH118bJurMjNr3ZwO8FavVVIYHGDl8qVtrsrMrD3mdIA30uu+4rSjdn5PpVPozcyyJpcBXn6tkv982GK+d98E45NTO09dH2hgPvfIUGFnUDuwzSwvchfg5W2R8ckprv3xozufL4Z2veHtNomZ5VXuAryVKYBFxduceR63meVZ7gK8lWuVOLDNbC7JfICX97v3KQw2NSXQ0wHNbK7JdIBX6nc3w31uM5uLMh3gzfS7fSMFM+sXmQ7w2frdQ4VBFu453/O2zawvZTrAh4cKNdsmz0xNc+dH397FiszMsmNerwuoZeXypRQGB6o+PzxU6GI1ZmbZkukj8GI7pNJd4D0waWb9LtNH4JCE+NhH3s4Vpx3FyFABkUwJ9O3LzKzfZfoIvNSKZSMObDOzEpk/Ajczs8oc4GZmOeUANzPLKQe4mVlOOcDNzHJKUeeND9qyMWkCeKRk0X7AL7pWQOvyVi/kr+a81Qv5qzlv9UL+am53va+KiMXlC7sa4LttXNoQEaM9K6BBeasX8ldz3uqF/NWct3ohfzV3q163UMzMcsoBbmaWU70O8DU93n6j8lYv5K/mvNUL+as5b/VC/mruSr097YGbmVnzen0EbmZmTXKAm5nlVM8CXNLxkjZLekDSxb2qoxpJB0n6nqSfS7pH0vvT5ftKuk3S/enfi3pdaylJA5LGJN2SPj5E0h3pfr5B0h69rrGUpCFJN0m6T9K9kt6Y5X0s6QPp78Pdkq6XtFfW9rGkz0t6StLdJcsq7lMl/j6t/WeSXp+RelenvxM/k/R1SUMlz61K690saXm3661Wc8lzF0oKSfuljzu2j3sS4JIGgE8D7wAOB86QdHgvaqlhG3BhRBwOHA38eVrjxcB3IuJQ4Dvp4yx5P3BvyeO/Az4VEa8BngbO7UlV1V0JfDsiDgOOJKk9k/tY0ghwPjAaEUcAA8DpZG8fXwMcX7as2j59B3Bo+uc84Kou1VjqGnav9zbgiIh4HfBvwCqA9D14OvDa9Hv+Mc2TbruG3WtG0kHA24FHSxZ3bh9HRNf/AG8E1pc8XgWs6kUtDdT8DeBtwGbggHTZAcDmXtdWUuOBJG/OY4FbAJGcDTa/0n7v9R9gH+Ah0sH0kuWZ3MfACLAF2JfkWvq3AMuzuI+BJcDds+1T4J+AMyq9rpf1lj33R8B16dczsgJYD7wxC/s4XXYTyYHIw8B+nd7HvWqhFN8IRY+lyzJJ0hJgGXAHsH9EPJ4+9QSwf4/KquQK4CJgR/r4FcBkRGxLH2dtPx8CTABXp22fz0paSEb3cUSMA58gObp6HHgG2Ei293FRtX2ah/fiOcCt6deZrVfSycB4RNxV9lTHavYg5iwk7Q18DbggIp4tfS6Sj9NMzMOUdCLwVERs7HUtDZgPvB64KiKWAc9T1i7J2D5eBJxM8sEzDCykwn+jsy5L+3Q2kj5M0s68rte11CJpAfAh4CPd3G6vAnwcOKjk8YHpskyRNEgS3tdFxNp08ZOSDkifPwB4qlf1lXkTcJKkh4GvkLRRrgSGJBVvnZe1/fwY8FhE3JE+vokk0LO6j48DHoqIiYiYBtaS7Pcs7+Oiavs0s+9FSWcDJwLvST90ILv1vprkg/2u9D14IPBTSa+kgzX3KsD/H3BoOnq/B8mgxM09qqUiSQI+B9wbEZ8seepm4Kz067NIeuM9FxGrIuLAiFhCsj+/GxHvAb4HnJK+LDP1AkTEE8AWSUvTRW8Ffk5G9zFJ6+RoSQvS349ivZndxyWq7dObgT9NZ0ocDTxT0mrpGUnHk7QDT4qIF0qeuhk4XdKekg4hGRj8SS9qLBURmyLityJiSfoefAx4ffo73rl93Ivmf/phegLJ6PK/Ax/uVR016nszyX8zfwbcmf45gaSv/B3gfuD/APv2utYKtR8D3JJ+/dskv+APAF8F9ux1fWW1HgVsSPfzOmBRlvcx8DHgPuBu4EvAnlnbx8D1JD366TRIzq22T0kGuj+dvg83kcywyUK9D5D0jYvvvc+UvP7Dab2bgXdkZR+XPf8wuwYxO7aPfSq9mVlOeRDTzCynHOBmZjnlADczyykHuJlZTjnAzcxyygFuZpZTDnAzs5z6//Sn0SdRnW0yAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.title(\"text length vs mean audio duration\")\n", + "plt.scatter(list(text_vs_avg.keys()), list(text_vs_avg.values()))" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAEICAYAAABGaK+TAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAepElEQVR4nO3dfZRcdZ3n8fcnnQYqoHSQiKQhhlE3jqAQbWdxcUYXdYIOAuM4iCsrCG7O7M74tBg30RmBWWdhTkTBM44O4wMqiKDEiPgQWdHj6CBuYsCAkIEBJHR4iEqjkI50ku/+cW8l1ZV6rttd91Z9Xuf0SdW9t+/91i9V3/r19/e79yoiMDOz4pnT6wDMzKwzTuBmZgXlBG5mVlBO4GZmBeUEbmZWUE7gZmYF5QReUJJeKenBHh37AklX9uLYM6G6LSXdIemVPQypKUmLJYWkuenzb0k6K6N9h6TnZrGvFo+XWeyDxgk8Q5Lul/TqvO2ryzh69kXRKxFxdER8v9dxtCMiXhsRn+t1HM3U+vIvSux55ARuZpko/zVgs8cJPCOSvgAsAr4u6QlJ70uXHy/pXyVNSLqt/Ke5pP8k6ZeSjkyfHyvpMUnPr7evJsdfKOk6Sdsk3SfpnRXrLpB0raTPS/ptWiIYq1j/Ykkb03VflnSNpA9JOhD4FrAwjeMJSQvTX9uv3v6q4vqEpA9XLfuapP+ZPv5fksbT/WyW9Ko6+7lC0j+mf24/IelHkp4l6dK03e6StLTF9iil+3tM0s+Bl1Yda89fP5L+QNLN6f/fQ5L+QdJ+FduGpL+QdHe6zcclqc5rqLuv6pJIuuz7kt6ePh6S9OH0PXMv8CdV+67cdo6kv5b0C0mPpv9PB9eKKd1+RRrPVknn1Ntv+vxsST+sev1/Kelu4O502WWStkj6jaQNkv4wXX4S8H7gTen/4W3txF7RRmdJeiBtiw/Ue10DISL8k9EPcD/w6orno8CvgNeRfFm+Jn2+IF3/d8BNQAnYBPxVvX3VONYrgQfTx3OADcAHgf2A3wPuBZal6y8AdqRxDAEXAT9O1+0H/AJ4FzAMvAF4CvhQ9XEqjl13fzXi/CNgC6D0+XxgElgILEnXLUzXLQaeU2c/VwC/BF4CHJC2233AW9MYPgR8r8X2uBj4F+AQ4Ejg9srXWNn26fGOB+am8d0JvLti2wBuAEZIvnS3ASfVeQ1195U+D2BuxfbfB96ePv4L4K403kOA71VuX7XtOcA96es+CFgDfKFOTCcBjwDHAAcCX0z3+9zq/abPzwZ+WPX6b0xjKqXLzgSekb7O84CHgQMq3jtXVsXQUuwVbfTPJJ+ZY4HfAb/f689+r37cA59ZZwLfjIhvRsTuiLgRWE+S+CB5Mx8M/AQYBz7e4XFeSvKl8LcR8VRE3EvyJj+jYpsfpnHsAr5A8uaHvQnlYxExFRFr0niaqbe/av9C8qH7w/T5G4GbI2IrsAvYH3iBpOGIuD8i/r3BMb8aERsiYgfwVWBHRHw+jeEaoNwDb9YepwN/FxG/jogtwMfqHTA93o8jYmdE3A/8E/CKqs0ujoiJiHiAJLEe18W+6jkduDQitkTEr0m+NOt5C/CRiLg3Ip4AVgFnqHaJ43TgsxFxe0Q8SfKebNdFaVtOAkTElRHxq/R1XkLyf7ykxX21EvuFETEZEbcBt1H/vdf3nMBn1rOBP0//ZJ6QNAG8HDgcICKmSHqWxwCXRNrN6PA4C6uO837gsIptHq54vB04IP1QLATGq469pYVj1tvfNOl+vwS8OV30X4Cr0nX3AO8mSRqPSvpSRYmmlkcqHk/WeH5Q+rhZeyxk+mv8Rb0DSvoPkm6Q9LCk3wD/Bzi0arPqtjiIGlrcVz0tx5xuW7n+FyRf0ofV2bbV/dYz7f0i6b2S7pT0eNr2B9Pe62wWe0vtPQicwLNVnYC3kPz5N1Lxc2BEXAwgaRQ4H/gscImk/Rvsq5EtwH1Vx3laRLyu6W/CQ8BoVd32yA7jqOdq4I2Sng38R+C6PTuP+GJEvJwk6Qbw9xkcr1l7PMT017iowb4+QVK6eF5EPJ3ki6BmjbsFjfb1ZPrvvIrtn1XxuJ2Yt5K0Z+W2O5n+hdfqfp9sEFPZnvdIWu9+H0nPfn5EjACPs/d1Nns/tRP7wHMCz9YjJLW7siuB10talg5CHaBkWt4RacK8Avg0cC7JB+l/N9hXIz8BfqtkQLCUHusYSS9t+ptwM0kp468kzZV0KvAHVXE8o9EgWDMRsZGkfv0pYF1ETABIWiLpxPSLawdJL3p3p8ep0Kw9rgVWSZov6QjgHQ329TTgN8ATkp4P/Pcu4qq7r4jYRlJGOzON9xzgORW/ey3wzvS9Mx9Y2eA4VwPvkXSUpINIevrXRMTOGtteC5wt6QWS5pF0KCrdCrxB0jwlc8PPbeE17iQZC5gr6YPA0yvWPwIsllQv97QT+8BzAs/WRcBfp3+2vzetr55K0tPaRtIzXEHS7u8Engn8TVpmeBvwtvKIffW+Gh00rQGfTFJ7vY+9ybJp0o2Ip0gGLs8FJkjq9jeQDA4REXeRfKjuTWNpVOJo5IvAq9N/y/YnGVD8Jcmfxc8kqXl2pYX2uJDkT/P7gO+Q1PDreS9J2ee3JHX0a7oIrdm+/hvJ++NXwNHAv1as+2dgHUnN96ckg3v1fIbkNf2A5DXuoM6XVER8C7iUZFD4nvTfSh8lGdR+BPgcafmrgXXAt4F/I2njHUwvsXw5/fdXkn7aTey2d2aA2R6SbgE+GRGf7XUsZlafe+CGpFcomVM9V8kpzS8i6UWZWY75zCmDZIrXtSTzgO8F3hgRD/U2JDNrxiUUM7OCcgnFzKygZrWEcuihh8bixYtn85BmZoW3YcOGX0bEgurls5rAFy9ezPr162fzkGZmhSep5hmyLqGYmRWUE7iZWUE5gZuZFZQTuJlZQTmBm5kVlM/ENLOeWrtxnNXrNrN1YpKFIyVWLFvCaUtHex1WITiBm1nPrN04zqo1m5ic2gXA+MQkq9ZsAnASb4FLKGbWM6vXbd6TvMsmp3axet3mHkVULE7gZtYzWycm21pu0zmBm1nPLBwptbXcpnMCN7OeWbFsCaXhoWnLSsNDrFjW6k3sB5sHMc2sZ8oDlZ6F0hkncDPrqdOWjjphd6hpCUXSZyQ9Kun2imWrJd0l6WeSvippZGbDNDOzaq3UwK8ATqpadiNwTES8iOTu013fSdzMzNrTNIFHxA+AX1ct+05E7Eyf/hg4YgZiMzOzBrKYhXIO8K0M9mNmZm3oKoFL+gCwE7iqwTbLJa2XtH7btm3dHM7MzCp0nMAlnQ2cDLwlGtzaPiIuj4ixiBhbsGCfW7qZmVmHOppGKOkk4H3AKyJie7YhmZlZK1qZRng1cDOwRNKDks4F/gF4GnCjpFslfXKG4zQzsypNe+AR8eYaiz89A7GYmVkbfC0UM7OCcgI3MysoJ3Azs4JyAjczKygncDOzgnICNzMrKCdwM7OCcgI3MysoJ3Azs4JyAjczKygncDOzgnICNzMrKCdwM7OCcgI3MysoJ3Azs4Lq6I48ZmbWmrUbx1m9bjNbJyZZOFJixbIlnLZ0NJN9O4Gbmc2QtRvHWbVmE5NTuwAYn5hk1ZpNAJkkcZdQzMxmyOp1m/ck77LJqV2sXrc5k/07gZuZzZCtE5NtLW+XE7iZ2QxZOFJqa3m7nMDNzGbIimVLKA0PTVtWGh5ixbIlmezfg5hmZjOkPFDpWShmZgV02tLRzBJ2NZdQzMwKygnczKygmiZwSZ+R9Kik2yuWHSLpRkl3p//On9kwzcysWis98CuAk6qWrQS+GxHPA76bPjczs1nUNIFHxA+AX1ctPhX4XPr4c8BpGcdlZmZNdFoDPywiHkofPwwcllE8ZmbWoq4HMSMigKi3XtJySeslrd+2bVu3hzMzs1SnCfwRSYcDpP8+Wm/DiLg8IsYiYmzBggUdHs7MzKp1msCvB85KH58FfC2bcMzMrFWtTCO8GrgZWCLpQUnnAhcDr5F0N/Dq9LmZmc2ipqfSR8Sb66x6VcaxmJlZG3wmpplZQTmBm5kVlBO4mVlBOYGbmRWUE7iZWUH5hg5mNqvWbhyfsTvUDBoncDObNWs3jrNqzSYmp3YBMD4xyao1mwCcxDvgBG5mmWnWu169bvOe5F02ObWL1es2O4F3wAnczDLRSu9668Rkzd+tt9wa8yCmmWWiUe+6bOFIqebv1ltujTmBm1kmWuldr1i2hNLw0LT1peEhVixb0tIx1m4c54SLb+Kold/ghItvYu3G8c4D7gMuoZhZJhaOlBivkcQre9flUkons1A8ALovJ3Azy8SKZUumJVio3bs+beloRwnXA6D7cgI3s0w06l1nMffbA6D7cgI3s8zU6l1nVfpopUQzaDyIaWYzqpXZKa3odgC0H7kHbmYzKqvSRzcDoP3KCdzMZlSWpY9OB0D7lUsoZjajZqL04fngCffAzWxGZV368HzwvZzAzWzGZVn68HzwvZzAzSwXWp0r7vnge7kGbmY9Vy6LjE9MEuwti9SqbfuCWHs5gZtZV7IYUGxnrnirg6KDMNDpEoqZdSyrAcV2yiKtDIoOykBnVwlc0nuAtwMBbALeFhE7sgjMzPIvqwHFdueKNxsUHZSBzo5LKJJGgXcCYxFxDDAEnJFVYGaWf1kNKGY9V3xQBjq7LaHMBUqSpoB5wNbuQzKzomjWc251ZknWc8UH5cJXHSfwiBiX9GHgAWAS+E5EfCezyMwslyqT8sGlYYaHxNSu2LO+3HNuVoeuldx/tPLETGJs9drkRddxApc0HzgVOAqYAL4s6cyIuLJqu+XAcoBFixZ1EaqZ9dLajeNc+PU7eGz71J5lE5NTDM8R8+cNM7F9alrP+YSLb2o4s2QmBxkH5cJXiojmW9X6RenPgZMi4tz0+VuB4yPif9T7nbGxsVi/fn1HxzOz3qnuTVcbkrjk9GOnJcijVn6DWtlF1C9xjI6UMuuFZy2Lm1J0StKGiBirXt5NDfwB4HhJ80hKKK8CnJ3N+lCtWR2VdkXsUx6ZI7GrRgdx4UipcIOMeZ2W2PEslIi4BfgK8FOSKYRzgMszisvMcqSVxFouj5STXa3kXa5DF+1syqxuSpG1rmahRMT5wPkZxWJmPVavTFCv5FFt68Rk3d76kMRFb3jhnh5rkQYZ8/oXg0+lNzOg8fVIas3TrqVReWR3xJ7kfdrSUS56wwsZHSkhktp3ZXLPm7z+xeBT6c0MaFwmKA8slnvnI/OGeWLHTqZ27zt9cPW6zS3NwS7S3XXyOi3RCdzMgPrlgPGJSU64+KY9ZZWPvum4uvO4i1geaUVepyU6gZsZUH9qn2DP8urZF7NxVmVe5PEvho7ngXfC88DNeqfZPOZac70FNedy53m+dj+qNw/cg5hmA6DZDRPKyX1yahdDEpAk6Xrdu17PvrCEE7jZAGg0QFmZ3CE5Kadcsx7N6ewLSziBmxVYq3edaTRAecH1d9RN7llf5tWy5QRuVlBZ3EcSkgtS1TI+Mcl7rrmVA4bnMFIaLsR87UHjBG5WUN3eR7IVATy2fYrf7dzNR990HD9aeaKTd454GqFZQTU7vbs8MDk+MclQnQtLtaofb0fWD9wDNyuoRqd31xqY7JZnnuSPE7hZQdUrizz5u51c+PV9Bya75Zkn+eMSilkBVc7bniOouCRJ3UHJVo2Uhvndzt19dSp8v3IP3KxgqssjuzuojoyOlLj0TcfVnCJ4wSlHF+pKgYPMPXCzgml2d5xmyr3pZtcsccLOPydws4JpZzCxPPuk/O9ojSTtRF1cTuBmBdPq3XFg72nxLoH0J9fAzXKm2enxtWafDA+JkdJwzf3l4d6NNjPcAzfLkVp3P1/x5du48Ot3MLF9ioNLw0jsuWpgdVnkqJXfqHkFQc/h7k/ugZvlSK0ByqndwWPbpwiSKYKPbU+mCVZeNbBcHsnrvRttZjiBm/VAvTJJuz3l6vKIrx44WFxCMZtltcok77nmVt59za0d7a8y6ffr7cysNidws1lWq0zSzZVKiny3d+uOSyhmsyzLAUWXRwZbVz1wSSPAp4BjSDoR50TEzVkEZtZPKi/t2o3S8BwOGB5iYvuUyyPWdQnlMuDbEfFGSfsB8zKIyayv1Lrbe6uGJHZHOFlbTR0ncEkHA38EnA0QEU8BT2UTlln/6PTaJT6D0prppgd+FLAN+KykY4ENwLsi4slMIjMrqHK5pDwLpJWyyWi6Xb1rlpjV0k0Cnwu8GHhHRNwi6TJgJfA3lRtJWg4sB1i0aFEXhzPLv1pTBEXjWSajIyV+tPLEWYnP+ks3s1AeBB6MiFvS518hSejTRMTlETEWEWMLFizo4nBm+bZ24zjnXXtbW1MEPYvEutFxDzwiHpa0RdKSiNgMvAr4eXahmRXD2o3jXHD9HS3fCafcI3eZxLrV7SyUdwBXpTNQ7gXe1n1IZvlWWeMemTfMEzt2MtXGbXEWumRiGekqgUfErcBYRrGY5V51jbt8Yal2+MqAlhWfSm9WQ/VMknKpo9vbmYGvDGjZcQI3q9Lomtzt9LiH5wgEU7v2llc8aGlZcgI3q9Lomtytmj9vmPNff/Se/fnKgDYTnMDNqnRTox6SuOT0Y6claSdsmylO4DbwKuvdB5eGaXrmTR0+9d1mmxO4DbTqenerc7kBRkrDHLj/XJdHrGecwG2gdXOhqQtOOdoJ23rKCdwGWif17vIApZO39ZoTuA2cypr3nPTqf434mtyWV07gNjBqXbOkWfL2wKTlmRO4DYRW74rjW5ZZkTiBW1+pvPdk+eYI8+cNM7F9qqWZgYccuL8vNGWF4QRufWHtxvF9TnUvl0faOYPSF5qyInECt8Lr5qbB1XyhKSuSbu7IY5YLWVwhEHyhKSseJ3ArtLUbx1u6aXC1IYkzj1/E6EgJkdwdx7NNrGhcQrHCqDVAqQ7246mB1i+cwK0Qquvc5QHKRjNLJIhIzpyMgMcnPTXQ+osTuOVOrZ72UAtnTFa69E3HOUlb33MCt1yp19NuJ3mPjpScvG0geBDTcqXbGSWeSWKDxD1wy4XKskm7yvdfGHV92waME7j1XDcn4jhp2yBzArdZVWuAshOjIyVfs8QGnhO4zZp6A5SN1Lo9pevcZomuE7ikIWA9MB4RJ3cfkvWbTuvb5V525Q0YPI/bbK8seuDvAu4Enp7BvqzPdFrfruxln7Z01AnbrIauphFKOgL4E+BT2YRj/WTtxnHOu/a2tpP3kORT3c1a0G0P/FLgfcDTMojF+kSta3O3ytcpMWtdxwlc0snAoxGxQdIrG2y3HFgOsGjRok4PZwVQ656TjVSfJu8pgWbt6aYHfgJwiqTXAQcAT5d0ZUScWblRRFwOXA4wNjbW2Zwxy712at3uZZtlo+MEHhGrgFUAaQ/8vdXJ2/pPt/ecdH3bLDueB24tqzePu9Vat3veZtnKJIFHxPeB72exL8uXyjnYc7o4c3KkNMwFpxzt5G2WIffAra5OzpysNn/eMOe/3onbbCY4gds+urkyYNmQxCWnH+vEbTaDnMBtmm6uDFjmWrfZ7HACtz3KZ062WyrxPSfNesMJ3Do+c9KXdDXrLSfwAdbumZOVfElXs95zAh8QldMBDy4NM7VrN08+1VqduzQ8xJ+9ZJTv3bXNl3Q1yxEn8AFQPTDZTo/bZ06a5ZcTeJ/rdGASPJvELO+cwPtUN/Vt8JmTZkXgBN5nurkWN/jMSbMicQLvA5VnTta6CXAr3OM2Kx4n8AKrVSZpJXkLGEkvAesZJWbF5QReQN3O3/bApFl/cAIvkG4HJl3fNusvTuAF0O3ApOvbZv3JCTznOrk6YHkg0zcJNutvTuA5U+uek+1wmcRscDiB50StMkk7yduJ22zwOIH3mE+8MbNOOYH3gE+8MbMsOIHPolq97XaTtwcmzazMCXwWdFsmAZ+AY2b7cgKfAdNKJIIOruQKeDqgmTXmBJ6x6nnbnSRv17fNrBUdJ3BJRwKfBw4j6SheHhGXZRVY0VT2utvlnraZdaKbHvhO4LyI+KmkpwEbJN0YET/PKLZC8I0TzKxXOk7gEfEQ8FD6+LeS7gRGgYFI4L4+iZn1WiY1cEmLgaXALVnsL8984o2Z5UXXCVzSQcB1wLsj4jc11i8HlgMsWrSo28P1RDcn3pSvZ+L6tpllrasELmmYJHlfFRFram0TEZcDlwOMjY11OKGud/aZVdLi73netpnNtG5moQj4NHBnRHwku5B6r5sZJeAyiZnNjm564CcA/xXYJOnWdNn7I+Kb3YfVO51cf7vMA5NmNpu6mYXyQ5IpzH1j7cZxzrv2travwe3EbWa9MLBnYpbLJFsnJjm4NMzUrt08+VRrvW6feGNmeTCQCby6TNLOSThDEpecfqyTtpn13MAk8Moe95wOblUGnlliZvkyEAm8usfdSfJ2ucTM8qaQCbyyN72wSWLtdGAS3OM2s3wrXAKv7k2PT0yyas2mPesrE/t/fv4Crtsw3lHy9swSM8u7wiXw1es27zNHe3JqFxd+/Q52TO2eltiv+vEDLZ85OX/eMBPbp5r26M3M8qJwCXxrnbMja11cqtXkPTpS4kcrT+wiKjOz2Ten1wG0a+FIKdP9lYaHWLFsSab7NDObDbnvgVcPWJbr2p2c6g4wPCQO3G8uj0+6XGJmxZbrBF5rwPK6DeP82UtGufLHD7S9P08FNLN+kusEXm/A8upbtjB/3nBbN1VwndvM+k2ua+D1Bix3RfDEjp0MD7V2LS3Xuc2sH+U6gTcasJzaHRy431xG022GlCTz0ZESZx6/iNGREkqf+2QcM+tHuS6hrFi2pOG1uR+fnOLW8/94lqMyM8uHXPfAT1s6ykVveOGe3nW1rKcUmpkVSa4TOCRJ/JLTj6U0PDRtuevaZjbocl1CKSvXr1u9gJWZ2SAoRAKHJIk7YZuZ7ZX7EoqZmdXmBG5mVlBO4GZmBeUEbmZWUE7gZmYFpejgdmMdH0zaBvyiYtGhwC9nLYDuFS1eKF7MRYsXihdz0eKF4sWcdbzPjogF1QtnNYHvc3BpfUSM9SyANhUtXihezEWLF4oXc9HiheLFPFvxuoRiZlZQTuBmZgXV6wR+eY+P366ixQvFi7lo8ULxYi5avFC8mGcl3p7WwM3MrHO97oGbmVmHnMDNzAqqZwlc0kmSNku6R9LKXsVRj6QjJX1P0s8l3SHpXenyQyTdKOnu9N/5vY61kqQhSRsl3ZA+P0rSLWk7XyNpv17HWEnSiKSvSLpL0p2SXpbnNpb0nvT9cLukqyUdkLc2lvQZSY9Kur1iWc02VeJjaew/k/TinMS7On1P/EzSVyWNVKxblca7WdKy2Y63XswV686TFJIOTZ/PWBv3JIFLGgI+DrwWeAHwZkkv6EUsDewEzouIFwDHA3+ZxrgS+G5EPA/4bvo8T94F3Fnx/O+Bj0bEc4HHgHN7ElV9lwHfjojnA8eSxJ7LNpY0CrwTGIuIY4Ah4Azy18ZXACdVLavXpq8Fnpf+LAc+MUsxVrqCfeO9ETgmIl4E/BuwCiD9DJ4BHJ3+zj+m+WS2XcG+MSPpSOCPgQcqFs9cG0fErP8ALwPWVTxfBazqRSxtxPw14DXAZuDwdNnhwOZex1YR4xEkH84TgRsAkZwNNrdWu/f6BzgYuI90ML1ieS7bGBgFtgCHkFxL/wZgWR7bGFgM3N6sTYF/At5ca7texlu17k+Bq9LH03IFsA54WR7aOF32FZKOyP3AoTPdxr0qoZQ/CGUPpstySdJiYClwC3BYRDyUrnoYOKxHYdVyKfA+YHf6/BnARETsTJ/nrZ2PArYBn03LPp+SdCA5beOIGAc+TNK7egh4HNhAvtu4rF6bFuGzeA7wrfRxbuOVdCowHhG3Va2asZg9iNmEpIOA64B3R8RvKtdF8nWai3mYkk4GHo2IDb2OpQ1zgRcDn4iIpcCTVJVLctbG84FTSb54FgIHUuPP6LzLU5s2I+kDJOXMq3odSyOS5gHvBz44m8ftVQIfB46seH5EuixXJA2TJO+rImJNuvgRSYen6w8HHu1VfFVOAE6RdD/wJZIyymXAiKTyrfPy1s4PAg9GxC3p86+QJPS8tvGrgfsiYltETAFrSNo9z21cVq9Nc/tZlHQ2cDLwlvRLB/Ib73NIvthvSz+DRwA/lfQsZjDmXiXw/wc8Lx29349kUOL6HsVSkyQBnwbujIiPVKy6HjgrfXwWSW285yJiVUQcERGLSdrzpoh4C/A94I3pZrmJFyAiHga2SFqSLnoV8HNy2sYkpZPjJc1L3x/leHPbxhXqten1wFvTmRLHA49XlFp6RtJJJOXAUyJie8Wq64EzJO0v6SiSgcGf9CLGShGxKSKeGRGL08/gg8CL0/f4zLVxL4r/6Zfp60hGl/8d+ECv4mgQ38tJ/sz8GXBr+vM6krryd4G7gf8LHNLrWGvE/krghvTx75G8we8Bvgzs3+v4qmI9DliftvNaYH6e2xi4ELgLuB34ArB/3toYuJqkRj+VJpJz67UpyUD3x9PP4SaSGTZ5iPcekrpx+bP3yYrtP5DGuxl4bV7auGr9/ewdxJyxNvap9GZmBeVBTDOzgnICNzMrKCdwM7OCcgI3MysoJ3Azs4JyAjczKygncDOzgvr/34J4JquheD8AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.title(\"text length vs median audio duration\")\n", + "plt.scatter(list(text_vs_median.keys()), list(text_vs_median.values()))" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3df5QlZX3n8fdnepqhZ3RtlEnC9DAOybKwIgra/lqSFX8giAYImhWM2ZhgOJsNG00MyRATRZMcxiWJmrNEQ4xhYwxo1IwTQckP8CRrlNCTARUEMwF0psEwKk0SZpSeme/+UXXHO7er6tb9Xbfu53XOHPreqlv13KLv9z79ref5PooIzMxs/K0adQPMzKw/HNDNzGrCAd3MrCYc0M3MasIB3cysJhzQzcxqwgHdxoKkMyXtGdG5r5T0J6M4t1knHNCta5IekPTSqh2rx3aM7IujpR0bJX1M0jckPSrpS5JeL+mHJP17+u8xSdH0+N8lbZL0GUnflvRvkv5V0g5JWyStGfX7ssFyQDerpg8Cu4GnAk8Bfhz4l4j4u4h4QkQ8ATgl3Xe28VxEfC197rKIeCJwHPBm4CLgJkka7tuwYXJAt65I+iCwCfiLtGf4S+nzz5f095KWJN0p6cz0+f+S9jaPTx8/U9Ijkk7OO1ab829Ie7B7Jd0v6eeatl0p6SOS/jjtpd4lab5p+7Mk7Uy3/ZmkD0v6DUnrgE8BG5p6vBvSlx2Vd7yWdr1X0m+1PPcJSb+Q/vzLkhbT49wr6SU5b/E5wHUR8VhEHIiInRHxqXbXpVX6+s8A5wEvAF7R6TFsjESE//lfV/+AB4CXNj2eA74JnEvSWTgrfbw+3f6bwC3ADPBFkl5k5rEyznUmsCf9eRWwA3grcBTw/cB9wNnp9iuBb6ftmAKuAj6fbjsK+CrwRmAauBB4HPiN1vM0nTv3eBnt/K8kPWulj48B9gMbgJPSbRvSbZuBH8g5zl8DnyXpWW/K2WczEMDqluc/A7whY/+/Bd456t8b/xvcP/fQrZ9eB9wUETdFxKGI+CtggSQQQhIYnwT8A7AIXNPleZ5D8iXxjoh4PCLuA/6AJPg1/L+0HQdJ0hfPTJ9/PrAa+N2IWI6Ij6ftaSfveK3+jiTI/lD6+NXA5yLiQeAgsAZ4mqTpiHggIv455zg/mh7r14D7Jd0h6Tkl2lnkQeDJPR7DKswB3frpqcCPpumWJUlLwA+S5HGJiGXgOuDpwG9HRLeV4Z5KkhZpPs+vAN/btM/Xm37eBxwtaTVJT3mx5dy7S5wz73hHSI97A3Bx+tRrgQ+l23YBbyL5YntY0g1NKZ3W4zwSEVsi4pT0fd0BbOsxBz4HfKuH11vFOaBbL1oD8m7ggxEx2/RvXURsBZA0B7wN+CPgt1tGXXQS3HcD97ec54kRcW7bV8JDwFxLYDy+y3bkuR54taSnAs8DPnb44BF/GhE/SPKlFMA72x0sIr4B/BbJl1FXPez03sWzSXr9VlMO6NaLfyHJXzf8CfDDks6WNCXp6HQY4MY0gF4H/CFwCUlg/fWCYxX5B+Df0huMM+m5nl4yJfE5ktTHZZJWSzofeG5LO54i6Ukl27JCROwEvgG8H7g5IpYAJJ0k6cXpF9m3SXLrh7KOIemd6XtaLemJwM8AuyLim520RdJaSS8EPkFy3W7q9n1Z9TmgWy+uAn41TXv8YkTsBs4nSX/sJelJX07ye/ZzwPcAv5amJX4S+ElJP5R1rKKTpnnsVwKnAffz3eDZNghHxOMkN0IvAZZI8v6fBL6Tbr+HpId9X9qWzJRICX8KvDT9b8MaYGva3q+TXI8rcl6/FvjztI33kfToz+vg/P9H0r+RfEG9m+SvhHMiIvMLxOpB0XUa06weJN0GvC8i/mjUbTHrhXvoNnEkvVDS96XpjJ8AngF8etTtMuvVirv0ZhPgJOAjwDqSdMarI+Kh0TbJrHdOuZiZ1YRTLmZmNTGylMuxxx4bmzdvHtXpzczG0o4dO74REeuzto0soG/evJmFhYVRnd7MbCxJ+mreNqdczMxqwgHdzKwmHNDNzGrCAd3MrCYc0M3MasIzRc0m0Ladi1x98708uLSfDbMzXH72SVxw+tyom2U9ckA3mzDbdi5yxce/yP7lgwAsLu3nio9/EcBBfcw55WI2Ya6++d7Dwbxh//JBrr753hG1yPrFAd1swjy4tL+j5218OKCbTZgNszMdPW/jwwHdbMJcfvZJzExPHfHczPQUl5990ohaZP3im6JmE6Zx49OjXOrHAd1sAl1w+pwDeA21TblI+oCkhyV9KWf7j0n6gqQvSvp7Sc/sfzPNzKydMjn064BzCrbfD7wwIk4Ffh24tg/tMjOzDrVNuUTE30raXLD975sefh7Y2HuzzMysU/0e5XIJ8Km8jZIulbQgaWHv3r19PrWZ2WTrW0CX9CKSgP7LeftExLURMR8R8+vXZ66gZGZmXerLKBdJzwDeD7w8Ir7Zj2OamVlneu6hS9oEfBz48Yj4Su9NMjOzbrTtoUu6HjgTOFbSHuBtwDRARLwPeCvwFOD3JAEciIj5QTV42Fxm1Kw9f06qocwol4vbbH8D8Ia+tahCXGbUrD1/TqrDtVwKuMyoWXv+nFSHA3oBlxk1a8+fk+pwQC/gMqNm7flzUh0O6AVcZtSsPX9OqsPVFgu4zKhZe/6cVIciYiQnnp+fj4WFhZGc28xsXEnakTc03CkXM7OacMrFzLrmCUXV4oBuZl3xhKLqccrFzLriCUXV44BuZl3xhKLqcUA3s654QlH1OKCbWVc8oah6fFPUzLriCUXV44BuZl274PQ5B/AKcUA3qwmPCTcHdLMa8JhwA98UNasFjwk3cEA3qwWPCTdwQDerBY8JN3BAN6sFjwk38E1Rs1rwmHCDEgFd0geAVwIPR8TTM7YLeA9wLrAPeH1E/GO/G2pmxTwm3MqkXK4DzinY/nLgxPTfpcB7e2+WmZl1qm1Aj4i/Bb5VsMv5wB9H4vPArKTj+tVAMzMrpx83ReeA3U2P96TPrSDpUkkLkhb27t3bh1ObmVnDUEe5RMS1ETEfEfPr168f5qnNzGqvHwF9ETi+6fHG9DkzMxuifgxb3A5cJukG4HnAoxHxUB+Oa2ZdcqGuyVRm2OL1wJnAsZL2AG8DpgEi4n3ATSRDFneRDFv8yUE11szac6GuydU2oEfExW22B/CzfWuRmfWkqFCXA3q9eeq/Wc24UNfkckA3qxkX6ppcDuhmNeNCXZPLxbnMasaFuiaXA7pZDblQ12RyysXMrCYc0M3MasIB3cysJhzQzcxqwgHdzKwmPMrFbMhcOMsGxQHdbIhcOMsGySkXsyEqKpxl1isHdLMhcuEsGyQHdLMhcuEsGyQHdLMhcuEsGyTfFDUbIhfOskFyQDcbsKxhip/d8uJRN2sFD6ccfw7oZgM0LsMUx6WdVswB3awP8nq3VVvfc1zaad1xQDfrUVHvtuwwxbxA2880SD/aadXmgG7WheZAu0riYMQR2xu92w2zMyxmBMXmYYp5gXbhq9/iYzsW+5YGKeqFl2mnVZ+HLZp1qBGAF5f2E7AimDc8uLS/1DDFvEB7/W27+zqrtKgX7uGU9VAqoEs6R9K9knZJ2pKxfZOkWyXtlPQFSef2v6lm1ZAVgLNsmJ3hgtPnuOrCU5mbnUHA3OwMV1146hE97LxAW/RF0Y2iSU1l2mnV1zblImkKuAY4C9gD3C5pe0Tc3bTbrwIfiYj3SnoacBOweQDtNRu5MgG1uXfbbn3PvHTHVEYqp7E/dD7M8PKzTzoitdNpO636yvTQnwvsioj7IuJx4Abg/JZ9AvgP6c9PAh7sXxPNqiWvpzslddW7zUp3TK8SR0+v/Hg2AnBr2qeRX9+2czH3PO6F11+Zm6JzwO6mx3uA57XscyXwl5L+F7AOeGnWgSRdClwKsGnTpk7balYJeT3dboNj6+zRJ81M89jjB3js8SPTOrMz01x53ilccPocZ2y9pathhu6F11u/bopeDFwXERuBc4EPSlpx7Ii4NiLmI2J+/fr1fTq12XANoqd7welzfHbLi7l/6ytYt2Y1ywdXplrWrVl9+BweZmhZyvTQF4Hjmx5vTJ9rdglwDkBEfE7S0cCxwMP9aKRZ1Qyyp9suWG/buZg5VBI8zHDSlemh3w6cKOkESUcBFwHbW/b5GvASAEn/GTga2NvPhpqN2radi5yx9RZO2HIjZ2y9pTBf3Yui0SiN3HlWMPcwQ2sb0CPiAHAZcDPwZZLRLHdJeoek89Ld3gz8tKQ7geuB10fkjLkyG0Pd3ITsVtGY8Lwhk1OSb3BauZmiEXETyVDE5ufe2vTz3cAZ/W2aWXUMs9ZJUYndn//wHZmvORSR2Q5XUJwsnvpvVsKwb0Lm5eg7maLvCoqTx1P/zUqoytJxeWPW9z1+YEVu3wtSTx4HdLMSqlLrpHXI5OzMNAge2be8IrfvoY2TxykXsxKqtHRcczrmjK23sLR/+YjtrqA4uRzQzUqq4izLol74u15zWmHtlgbfOK0PB3SzMVbUCy/zV4VvnNaLA7rZGOu1gqKXnqsXB3SzMdZrbt83TuvFAd1szPWS2/eN03pxQLeJNsobglW4GdkuZWPjRaMquTI/Px8LCwsjOXfdVSFQ9GoY76H1hmDDMWunedsPnzLQa5Z17l5qqvfalnH/fZkkknZExHzWNvfQa6YOoxaG9R7yCl09sm954Nds2Dcji4J2mZSNg/548EzRmqnDdO9hvYeiG3+DvmbDvBnZa6XIYVaatN44oNdMHUYtDOs9tLvxt7i0f2D1z4dZG6bXL8g6dBImhVMuNVOHUQudvodu0gHbdi7y2HcOFO4jONyOsmmforY0b5tdO830KrF86Lv3sGamp3jRyes5Y+stfU1t9PoFWYdOwqRwD71mqlJEqhedvIesdMCbPnwHp739L3N71I3XtNZAaSagdbhAu15pUWqiddsj+5ZBSXGtxrqkr3r2HB/bsdj31Eavfw1UpdKkteeAXjODWMB42Dp5D3k3Npf2L+cGw6JVfxrnyxv7VdQrLUpNZG1bPhisW7Oa+7e+gs9ueTG33rN3IKmNXr/k69BJmBROudRQFYtIdarseyhzY7P1OHmvORTB/VtfASRVDDtNXXWTmmjeNqjURq+zSatUadKKOaDbWMvLtzdkBcMyOfpuJty0O267cw7y/kevX/J16CRMAqdcbKxs27l4xMiTF528fkU6oFlWMCyTQugmdVV03DLndGrDeuUeuo2NrAlHH9uxyKuePceNX3goudHYYt/jB9i2c/GIQFw2hdBpr7TMcYu2ObVhvfLUfxsbeXntudkZPrvlxWzbuciV2+9aMXqleUr9uM54HNd2W/956r9VWtlg1e6m4QWnz3H1zffmLskGjGVZhDqUc7DhKJVDl3SOpHsl7ZK0JWef/ybpbkl3SfrT/jbTxk1rrrvdmPAyY6/LjIcuCvrjOuNxXNttw9e2hy5pCrgGOAvYA9wuaXtE3N20z4nAFcAZEfGIpO8ZVIOtmlpnQf77tw8cngVZ1KPspEhVmZEnRSNFyg4LrFp6wzM1rawyPfTnArsi4r6IeBy4ATi/ZZ+fBq6JiEcAIuLh/jbTqixrFmTzlHbI71F2EqzKjDzJGikC8Nh3DjC7djrzXM09/Ky/GC7/6J2c9va/HEhNlzJGNVOz7F9ZVh1lcuhzwO6mx3uA57Xs858AJH0WmAKujIhPtx5I0qXApQCbNm3qpr1WYFQ9y7yZl626HRMOK9/bu15zWuZ7azz39r+464hRL0v7l5leJaanxPLBI+unNPfw82Z0NvLyo8hfj2IRCuftx1O/xqGvBk4EzgQuBv5A0mzrThFxbUTMR8T8+vXr+3Rqg9GWOC37p38jSDd6fpu33MhDj658bWuw6vS9XXD6HGuPWtlXWT4UrDtqdWEPv8x7GXb+ehTlHNrl7d17r6YyPfRF4PimxxvT55rtAW6LiGXgfklfIQnwt/elldbWKFdvbzdbE74bpFt7fi2ZGWZnprnyvFNWjN3u9L3lBeZH9y9zx9te1tN7KTr+oAx7pmZRKsy99+oq00O/HThR0gmSjgIuAra37LONpHeOpGNJUjD39bGd1sYob5xl5a2np3REJcFGj7JdembdmtVHBIVtOxdzA+zi0v7cHmJefjmgsEeZl4NvVfdKg0V5e4+6qa62PfSIOCDpMuBmkvz4ByLiLknvABYiYnu67WWS7gYOApdHxDcH2XA70ijroHcyw7HdF0zz9kZPsEhzCqa5LVl554aiHmXre2kdsQNHpoSqNiKmX4ry9j//4TsyX+NRN6PnmaI1UaVFh4vkzfZsmJI4FMGG2Rke+86BwprlrRozRhsawTbvfK3758kL2uNyzbuV977bzdi1wSqaKeqAXiPj0FvMCoL9Ijhc/rbZCVtuzKxvnrd/WZMa2Or+RVZ1nvo/ITq5cTao4N963BedvJ5b79l7xHmuuvDUwz3nKYmDEYf/W0bevkV530GkoyZ1wo+LiFWXA/oE6nSUQnPqohFM5zI+xFnH/ZPPf+3w9sZ5rrrw1BU92BO23Fiq7TPTU4eXais7LntQ47iLvijG4a+lXrg+ejU55TKBOkkVFKVIpleJJxy9mqV9yx3lvLPOk9emY9ZOs/ao1Zn5604C5iACbF7qIe8LxykJ6wenXOwInaQKioYZLh+Kw7Mxy4zdblhMxzK3TtnPCo5v++FTMoNgN7XK+x1M81IPo5wTMGh1/8tj3DmgT6BOcsqDyge3pnjGNS+b9UVR12F9nlBUfQ7oE6iTnHLZmZOdyuqx1iUvO8o5AYNU57886sJrik6grNogr3p2MouzdeZl2ZmTDcesnT7iuK97fn4RtnHvseap69qgkzqqZ5y4hz6hmnvDZf6ULpqg05CX8771nr217LHmGdf0UTt1/cujThzQLfdP6Td/5E6gOPg3HLN2OvcG5ijKv45aXdJHzSbx/+O4cUC33D+ZD0b05eZlXXusk8b/H6vP49CtbX2Vuk9lNxsnRePQfVN0zAxiYYF2Nz5908tsPLiHXmFZdVFaZyBC9qIQnZ6j3Q3PrKn+ZjZ8nilacVmz74AVI08+9PmvZVYNXNq/3NUEj04qH3oSiVn1uYc+YllBdXpVUgCrdXm2dvJy3Z3WtS6qfOh8utlouYdeYZmrzHcayVNZue6sMeZv+vAdvP0v7jpch6XVoQgEmX8NOJ9uVl0O6CPWzwCZNcEjr7jWI/uWc4N24zieRDKZXIBrfHmUy4j1K0DmTfAo+sIIklV7so5T1+nrVqzxF93i0v4j1mvtx2gqGzz30Iesufczu3aa7/SwFFvz+ptZvahtOxdZ1WYloCDJi+f1xtxTmywuwDXeHNAHrDWAN68gn5XDnplexYFDwfLB9nn0QxG5a2I2elrtlnUruslZx+nrVswFuMabA/oAtd6QzLsJ2ezJ69YcXiSh0TPOWwmoaKmzooUpGpxCsVajKsDlvH1/lAroks4B3gNMAe+PiK05+70K+CjwnIiYuDGJrb+U+x4/0PHq9g8u7V/RM85b6uxFJ6/PrZJY1KMS+ENjmUZRgMsLZ/RP24AuaQq4BjgL2APcLml7RNzdst8TgTcCtw2ioVWX9UvZjayeUDdLneX1tDyO3IqMogCX8/b9U6aH/lxgV0TcByDpBuB84O6W/X4deCdweV9bOCbKpDjaKeoJdbrU2btec5pLnVpXhn3vxHn7/ikzbHEO2N30eE/63GGSngUcHxE39rFtAzOIAldlf/mmp8TszDQiqSHe+Llo1aA8eXnNDbMzmasSedV5q6Ki32PrTM83RSWtAn4HeH2JfS8FLgXYtCl/abJBGlS+Li/FMTszzbo1q1lc2s+UxPLBYN2a1SuKaXXTrnb5To9SsXHghTP6p0xAXwSOb3q8MX2u4YnA04HPSAL4PmC7pPNab4xGxLXAtZDUcumh3V3rR74uqwrivscPrNhvZnqKK887BVhZaCtribdO2+UFB6wO/HvcP22Lc0laDXwFeAlJIL8deG1E3JWz/2eAX2w3ymVQxbmKhj9t27nIm3LyzoLcMd2txy9ToXBmehVHT0+xtG85d3JP8w3KE7bcmDkNv2y7zGwy9FScKyIOSLoMuJlk2OIHIuIuSe8AFiJie3+b272itAVwxM+tGvm6duNhy978/PbyIfYvHwLIndzTnHf3Arxm1qtalc/NKwc7V1BsCpLUyFUXngqQ2ftuXkAiryfdjeYeet5Yc9/INLNmE1M+t9vhT41g/uaP3JnZm25eQCKvJ92p1ps+ziOaWa9qFdDbpS2Keu/t6p40blBm3ZFvlVeWtl0xLY9KMbNe1Cqgtxv+lLetbF68MS0f4Mrtd2XWV5mdmeaVzzxuxdqfTp+Y2aDVKqCXSVtkbcubcdmq0dNv9KSLbqDOP/XJTp+Y2VDV6qZot/JupjZzD9vMqqDopqhXLILM1XmmV4lj1k572ryZjY1apVy6VXaEiWs2m1mVOaCn2o0wcc1mM6s6p1xKKqq1YmZWBQ7oJblms5lV3USnXDrJibvWiplV3cT20Bs58cWl/QTfzYnnLSqRNRLGNZvNrEomNqB3mhP3CkBmVnUTkXLJSq10kxN3rRUzq7LaB/S84Yaza6d5ZN/KWizOiZvZuKp9yiUvtRKBc+JmVitjH9C37VzkjK23cMKWGzlj6y0rbmrmpVAe3b/snLiZ1cpYp1zazd7ctnMxdz3PDbMzzombWa2MdUBvN1Ilb9EKp1bMrI7GOuVSNFIlb9GKKcmpFTOrpbEO6HkjUjbMzuQG+0MRDuZmVktjHdCLZm8WBXszG752Axisd2OdQ29Xx7xofVEzGx6Xnx6OUgFd0jnAe4Ap4P0RsbVl+y8AbwAOAHuBn4qIr/a5rZnyRqqUXbTCzAavaACDP5P90zagS5oCrgHOAvYAt0vaHhF3N+22E5iPiH2Sfgb438BrBtHgTnhYotnobdu5mLtmr8tP91eZHvpzgV0RcR+ApBuA84HDAT0ibm3a//PA6/rZyCxeDs6s+hqpljy+p9VfZW6KzgG7mx7vSZ/LcwnwqawNki6VtCBpYe/eveVb2aLT0rdmNhp5w4fB97QGoa+jXCS9DpgHrs7aHhHXRsR8RMyvX7++6/N4OTiz8VCUUvF8kP4rE9AXgeObHm9MnzuCpJcCbwHOi4jv9Kd52fJ+SRaX9ntIlFmF5KVU5tLSG9ZfZQL67cCJkk6QdBRwEbC9eQdJpwO/TxLMH+5/M49UlHdzCsasOrzS13C1DegRcQC4DLgZ+DLwkYi4S9I7JJ2X7nY18ATgzyTdIWl7zuH6IuuXpJVTMGaj55W+hkuRUbxqGObn52NhYaHr1zePcsl7BwLu3/qKrs9hZqPhUWz5JO2IiPmsbWM7U7R5jPkZW2/JHOfqIVFm48ezSrs31rVcGpynM6sPj2Lr3tj20Jt5mr9ZfXSzgLslahHQwdP8zepiw+yMU6hdqkXKxczqwynU7tWmh25m9eAUavcc0M2scpxC7Y5TLmZmNeGAbmZWEw7oZmY14YBuZlYTY3VT1PUdzMzyjU1Ad30HM7NiY5NycX0HM7NiYxPQXd/BzKzY2AT0vDoOru9gZpYYm4Du+g5mZsXG5qao6zuYmRUbm4AOru9gZlZkbFIuZmZWzAHdzKwmHNDNzGrCAd3MrCZK3RSVdA7wHmAKeH9EbG3Zvgb4Y+DZwDeB10TEA/1tqpmNg0bNpcWl/UxJHIxgrmVUWt4+Lzp5Pbfes7fUSLbm2k5PmplGgqV9y5mv63cdqG6PN+h6VIqI4h2kKeArwFnAHuB24OKIuLtpn/8JPCMi/oeki4AfiYjXFB13fn4+FhYWem2/mVVIa82lZjPTU1x14akAufvkvaY16BWdp/V1WfvmHbeMbo/Xr3ZI2hER81nbyqRcngvsioj7IuJx4Abg/JZ9zgf+b/rzR4GXSFLpFppZLWTVXGpo1F4q2ifvNZ2cp/V1/a4D1e3xhlGPqkxAnwN2Nz3ekz6XuU9EHAAeBZ7SeiBJl0pakLSwd+/e7lpsZpXVrrbSg0v7O66/lLV/mWM09ul3HahujzeMelRDvSkaEddGxHxEzK9fv36YpzazIWhXW2nD7EzH9Zey9i9zjMY+/a4D1e3xhlGPqkxAXwSOb3q8MX0ucx9Jq4EnkdwcNbMJklVzqaFRe6lon7zXdHKe1tf1uw5Ut8cbRj2qMqNcbgdOlHQCSeC+CHhtyz7bgZ8APge8Grgl2t1tNbPaaa65VDTKJW+fsqNcWms7FY1y6XcdqG6PN4x6VG1HuQBIOhd4N8mwxQ9ExG9KegewEBHbJR0NfBA4HfgWcFFE3Fd0TI9yMTPrXNEol1Lj0CPiJuCmlufe2vTzt4Ef7aWRZmbWG88UNTOrCQd0M7OacEA3M6sJB3Qzs5ooNcplICeW9gJfbXrqWOAbI2lM98atzePWXhi/Nru9gzdube53e58aEZkzM0cW0FtJWsgbilNV49bmcWsvjF+b3d7BG7c2D7O9TrmYmdWEA7qZWU1UKaBfO+oGdGHc2jxu7YXxa7PbO3jj1uahtbcyOXQzM+tNlXroZmbWAwd0M7OaqERAl3SOpHsl7ZK0ZdTtaSXpeEm3Srpb0l2S3pg+/2RJfyXpn9L/HjPqtjaTNCVpp6RPpo9PkHRbep0/LOmoUbexmaRZSR+VdI+kL0t6QZWvsaSfT38fviTpeklHV+0aS/qApIclfanpucxrqsTvpm3/gqRnVajNV6e/F1+Q9OeSZpu2XZG2+V5JZ1ehvU3b3iwpJB2bPh7oNR55QE8Xob4GeDnwNOBiSU8bbatWOAC8OSKeBjwf+Nm0jVuAv4mIE4G/SR9XyRuBLzc9fifwroj4j8AjwCUjaVW+9wCfjoiTgWeStL2S11jSHPBzwHxEPJ2ktPRFVO8aXwec0/Jc3jV9OXBi+u9S4L1DamOr61jZ5r8Cnh4RzyBZtP4KgPRzeBFwSvqa30tjyjBdx8r2Iul44GXA15qeHuw1joiR/gNeANzc9PgK4IpRt6tNmz8BnAXcCxyXPncccO+o29bUxo0kH9YXA58ERDJbbXXWdR/1P5JVru4nvVHf9Hwlr7yx4c4AAALeSURBVDHfXUf3ySRlqD8JnF3FawxsBr7U7poCvw9cnLXfqNvcsu1HgA+lPx8RL4CbgRdUob3AR0k6Jg8Axw7jGo+8h065RagrQ9JmkoU8bgO+NyIeSjd9HfjeETUry7uBXwIOpY+fAixFsog3VO86nwDsBf4oTRO9X9I6KnqNI2IR+C2S3tdDJAuj76Da17gh75qOy2fxp4BPpT9Xss2SzgcWI+LOlk0DbW8VAvrYkPQE4GPAmyLiX5u3RfJ1W4kxoJJeCTwcETtG3ZYOrAaeBbw3Ik4HHqMlvVKxa3wMcD7JF9EGYB0Zf3ZXXZWuaRmS3kKSAv3QqNuSR9Ja4FeAt7bbt9+qENDLLEI9cpKmSYL5hyLi4+nT/yLpuHT7ccDDo2pfizOA8yQ9ANxAknZ5DzCbLuIN1bvOe4A9EXFb+vijJAG+qtf4pcD9EbE3IpaBj5Nc9ypf44a8a1rpz6Kk1wOvBH4s/SKCarb5B0i+6O9MP4MbgX+U9H0MuL1VCOiHF6FORwRcRLLodGVIEvCHwJcj4neaNjUWxyb97yeG3bYsEXFFRGyMiM0k1/OWiPgx4FaSRbyhQu0FiIivA7slNZZAfwlwNxW9xiSpludLWpv+fjTaW9lr3CTvmm4H/ns6EuP5wKNNqZmRknQOSQrxvIjY17RpO3CRpDVKFrI/EfiHUbSxISK+GBHfExGb08/gHuBZ6e/4YK/xKG54ZNxQOJfkzvU/A28ZdXsy2veDJH+WfgG4I/13Lkle+m+AfwL+GnjyqNua0fYzgU+mP38/yS/7LuDPgDWjbl9LW08DFtLrvA04psrXGHg7cA/wJZJF0tdU7RoD15Pk+JdJAssledeU5Mb5Nenn8IskI3iq0uZdJLnnxufvfU37vyVt873Ay6vQ3pbtD/Ddm6IDvcae+m9mVhNVSLmYmVkfOKCbmdWEA7qZWU04oJuZ1YQDuplZTTigm5nVhAO6mVlN/H+kdrVI+Iu3mgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.title(\"text length vs STD\")\n", + "plt.scatter(list(text_vs_std.keys()), list(text_vs_std.values()))" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEICAYAAACktLTqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3df5xcdX3v8dc7mwUX8LIgW4QlkVQpCCIJbBUb2wL+iICVFBWwatHSm95efVRaSg3aW/FHH8SiYn3cFi+KgooCFRop0CIl2F65oiaG35AaJZisAYKygGaFTfK5f5wzydnJObuzs/PjzMz7+XjsY2fOOXPmu2dmP/Od7/fz/X4VEZiZWXeZ0+4CmJlZ4zm4m5l1IQd3M7Mu5OBuZtaFHNzNzLqQg7uZWRdycLeGk3SCpE1teu4LJX2lHc89U5L+VdLZ7S6HdScH9x4haYOk15btXLMsR9s+RIpI+jtJy9LbGyTtW3RsRJwcEVfO8vneJenbszmHdScHd7PGOg5YLWkImIiIp9pdIOtNDu49QNKXgfnAv0j6haS/SrcfL+n/SRqTdLekE9LtvyXpCUnz0vvHSHpS0hFF55rm+Q+WdJ2kLZIelvRnmX0XSrpW0pckPSPpfkkjmf3HSlqb7vsnSddI+pikvYF/BQ5Oy/ELSQenD9uj6HxV5bpU0ieqtn1D0l+kt98vaTQ9zzpJr5nm7xRwFHAfMAKsneb4b0n64/T2uyR9W9In0mv9sKSTM8e+S9KP07I8LOntkl4KfBZ4Vfr3j6XHnppes6clbZR0YeY8h0oKSWdL+kn6On8ws79P0gck/Sh9rjWZ98ERkm6V9PP0epyRedwpkh5IHzMq6S+n+tutBSLCPz3wA2wAXpu5Pwz8DDiF5EP+den9oXT/3wKrgAHgXuC9RefKea4TgE3p7TnAGuBvgD2AXwd+DCxJ918I/CotRx9wEXBnum8P4BHgfUA/cDrwHPCx6ufJPHfh+XLK+TvARkDp/f2AceBg4PB038HpvkOBFxec5zBgDHga2Jbe/lV6rjHgnQWP+xbwx+ntdwETwH9Py/2nwE8BAXun5z48PfYg4KjM476dc/2PTq/9y4HHgKWZvyOAz6Wv7THAs8BL0/3np6/34elzHwO8IC3DRuDdwFxgEfAEcGT6uM3Ab2eu47Htfs/3+o9r7r3rHcDNEXFzROyIiFuB1SRBEZIguS/wPWAU+Ic6n+c3ST4wPhIRz0XEj0kCy1mZY76dlmM78GWSgAJwPEkg+UxETETE9Wl5plN0vmr/lyTQ/XZ6/y3AdyLip8B2YE/gSEn9EbEhIn6Ud5KI+GFEDAKfAc4jCW7/BbwkIgYj4ss1lBngkYj4XFruK0mC+IHpvh3AyyQNRMTmiLi/6CQR8a2IuDd9Xe8Bvgb8btVhH46I8Yi4G7ibXdfoj4G/joh1kbg7In4GvBHYEBFfjIhtEbEWuA54a/q4ifRa/beIeDIiflDj32xN4uDeu14EvDVtkhlLv9K/miSgEBETwBXAy4BPRkS9M8y9iKTpJPs8H2BX0AJ4NHN7K/A8SXNJatCjVc+9sYbnLDrfJOl5rwbelm76A+CqdN964FySD7nHJV2dafaZpNK0BVwAfISklv1S4H5JX6+hvLuVOyK2pjf3iYhfAmcC/wPYLOkmSUcUnUTSKyXdnjaDPZU+7oCi5yK5Rvukt+cBeR9iLwJeWfU6vh14Ybr/zSQVg0ck/YekV9XyB1vzOLj3jurgvBH4clqzrPzsHRErACQNAx8Cvgh8UtKeU5xrKhuBh6ue5/kRccq0j0y+6g+nbdkV8+osR5GvAW+R9CLglSS10eTkEV+NiFeTBLYAPp53goj4LeAI4IcRsS/w18DH07/1LQ0oIxFxS0S8juTD9yGSbz+Qfw2+CtwAzEvL81mSJpZabAReXLD9P6pex30i4k/T8n0/Ik4Dfg1YCVxb699mzeHg3jseI2nvrvgK8HuSlqSdaM9Tklp4SBpMrwAuB84hCbIfneJcU/ke8EzaOTmQPtfLJP1mDY/9DknzyHslzZV0GvCKqnK8QFOkG04nbV54Avg8cEtEVDolD5d0UvqhVmk/3zHFqY5jVwfqsSRNXA0h6UBJp6WdyM8Cv8iU5THgEEl7ZB7yfODnEfErSa8g+UZSq88DH5V0mBIvl/QC4EbgNyS9U1J/+vObkl4qaY+0g3ff9Bvf00x9rawFHNx7x0XAX6dfqf8yIjYCp5E0kWwhqZmdT/Ke+DOSGtj/Spsu3g28W9Jv551rqidN24/fCCwEHmZXIJ02IEfEcySdqOeQdEy+gyTIPJvuf4ik5v3jtCy5zSY1+Crw2vR3xZ7AirS8j5JcjwumOMdxQKWd+ViSTuRGmQP8BUkH689J2s//NN23CrgfeFTSE+m2/wl8RNIzJB3ZM6lFfyo9/pskQfpyYCAingFeT9JX8lOSa/JxkusE8E5gg6SnSZqB3j7zP9MaqZIlYNYRJH0X+GxEfLHdZTErM9fcrdQk/a6kF6bNMmeTpPb9W7vLZVZ2u2UQmJXM4STNBHuT5Me/JSI2t7dIZuU3bbOMpOcB/0nStjYX+HpEfEjSApI0sheQtC++MyKeSzugvkTSBvkz4MyI2NC8P8HMzKrV0izzLHBSRBxD0in2BknHk3SmXBIRLwGeJOn0Iv39ZLr9EgrSx8zMrHlm1KEqaS/g2yQ99TcBL4yIbemAhQsjYomkW9Lb30kHjjxKMkKx8IkOOOCAOPTQQ2fzd5iZ9Zw1a9Y8ERFDeftqanOX1EfS9PISkmHoPwLGImJbesgmkrlKSH9vBEgD/1MkTTdPVJ1zGbAMYP78+axe3bC0YDOzniDpkaJ9NWXLRMT2iFgIHEIyiKRw6HOtIuKyiBiJiJGhodwPHjMzq9OMUiHT0Xu3A68CBjPzdRxCMrkU6e/KFKFzSQar/KwhpTUzs5pMG9wlDUkaTG8PkEwN+yBJkK/Mm3E28I309g3pfdL9q2Yx6ZSZmdWhljb3g4Ar03b3OcC1EXGjpAeAqyV9jGROjcvT4y8HvixpPclQ6bPyTmpmZs0zbXBP54NelLP9x0yexKmy/VfsmuPZzMzawCNUbaeVa0e5+JZ1/HRsnIMHBzh/yeEsXTQ8/QPNrHQc3A1IAvsF19/L+MR2AEbHxrng+nsBHODNOpCDe48pqp1ffMu6nYG9YnxiOxffss7B3awDObj3iJVrR/nwv9zPk1sndm7L1s5/Ojae+7ii7WZWbp7ytwdUmlyygb2iUjs/eHAg97FF282s3Bzce0Bek0vWT8fGOX/J4Qz0903aPtDfx/lLDm928cysCdws06UqbeujNTSrzEnXn77o9KOdLWPWJRzcu1B15st0tkdwwfX3ctHpR3PH8pMaWg5/WJi1h5tlutB0zTB5Km3vjVL5gBkdGyfY1Xm7cu3otI81s9lzcO9C9Wa4NDIzZqrUSjNrPgf3LjRdhktf2sY+08fNhFMrzdrLwb0L5WW+VAz09/G2V85remaMUyvN2svBvQstXTTMRacfzXAaSCs19eHBAS46/Wg+tvTonfuV2d7Izs6iD5hfPrvN7e5mLTCjNVSbZWRkJLzMXnnMNsslm4Y5R7Cj6i020N+324eJM2vMZk7SmogYydvnmrtNMtssl+zjYffADrt3rDqzxqzxnOduk0w3gVh1DfvEI4a4/aEtO+9vfW5bTWmY2Y5VT1pm1ngO7jZJUTbL6Ng4iz7yzd0mHvvKnT+ZdL9W2Y5VZ9aYNZ6bZWySomwWQe7EY/WozsxxZo1Z4zm42yRFWS6z6Xbv7xODA/2FmTmetMys8dwsY5NUgu6FN9zP2Hh9NfXBgX723nNuzZkvlX3OljFrHKdCdpFGphMuXrFqRm3oFXlpjmbWHE6F7AGNTiespTNzcKCfdxw/f9JgqUqWi9MYzdrLwb0LrFw7ynnX3t3Qibqm6swcHhzg02cu5K4PvZ6PLT16Z5v59vRboPPUzdrPbe4drlJj317QvFZLDTyvOef8JYfvNid8UZOL89TNysc19w433dzt06UTFjXnADXPP+M8dbPycc29w00VQGtJJ5yq1n3H8pNqqnkfPDiQ2/nqPHWz9pm25i5pnqTbJT0g6X5J70u3XyhpVNJd6c8pmcdcIGm9pHWSljTzD+h1RQG0T6opa6URtW7nqZuVTy01923AeRHxA0nPB9ZIujXdd0lEfCJ7sKQjgbOAo4CDgX+X9BsRMbN132ySvHZxgK3Pbdvt2JmkIzai1u08dbPymTa4R8RmYHN6+xlJDwJT/deeBlwdEc8CD0taD7wC+E4DytuTqhe8Hh0b5/x/uhsEE9snd6QODvRz4ZuOqjmwFnWczrTWvXTRsIO5WYnMqENV0qHAIuC76ab3SrpH0hck7ZduGwY2Zh62iZwPA0nLJK2WtHrLli0zLngvyWsXn9gRuwV2gL33nDujIJtd2KNZC3eYWevV3KEqaR/gOuDciHha0qXAR0mmHfko8Engj2o9X0RcBlwGyQjVmRS618yk/bueDBXXus26T03BXVI/SWC/KiKuB4iIxzL7PwfcmN4dBeZlHn5Ius3qsHLtKHOkwjz2ap2QoeJVl8yar5ZsGQGXAw9GxKcy2w/KHPb7wH3p7RuAsyTtKWkBcBjwvcYVuXdMNUCpf47o79OkbZ2QoeJVl8xao5aa+2LgncC9ku5Kt30AeJukhSTNMhuAPwGIiPslXQs8QJJp8x5nytSnaIBSn8TFbz1m5zGdVAMuyqs/95q7uPiWdTtXdhodG6cv/cYy3CF/m1mZeFbIEluw/KbCedSHBwc6KqhXTPU3TcWzTZrtzrNCdqipVkVqd7PGyrWjLF6xigXLb2LxilU1PX+l/6Aes5kEzawXObiXWN7IT7H7qkitDnz1tJtPN8FZLTxXjVntHNxLLC8HvSg0tjLwTTUfzUweM1OdkAlkVhaeOKxk8tIE71h+0s79RSsktTLw1TMfzWw/fDohE8isTFxzL5FamjvKMElX0QfJVB8wRfsGB/onfTOpXtkJPGrWrB6uuZdILYtelGGSrnrmoyl6zEzmwTGz2jm4l0itzR3tni6gng+YMnwomfUSB/cS6aRFL+r5gGnWh5KnMzDbndvcS6QM7emdxtMZmOVzzb1E3HRRm2xNPW9SNS/ObebgXjrtbk8vu+qFS4oGRXnAk/U6N8tYR6l1MFQZ+ynMWsnB3TpKLTVy91OYObhbh6mlRu4BT2YO7qVVz6yLvSAvoyhreHDAgd0Md6iWUnWnYSW9D+jJwFWdx/7m44a58e7NjI1PTDrOzTFmu7jmXkL1zLrYrfLy2K9bM8qFbzqKT5+5cNK8NG6OMdvFNfcSqmfWxW411QfdHctPcjA3K+DgXkKdNA1Bs9X7QecpCazXuVmmhDwNwS5FH2hzpMJOZk9JYObgXkp5KzD1antyUXbM9ojCgO0+CzM3y5SWpyFIVK7BedfeXdMcMivXjuY2aUFv9llY73LN3Upv6aJhdtQwh0ylOaZIL/ZZWO9ycLeOUMvSflPNO9OrfRbWuxzcrSPU0sk8VbNLr/ZZWO+aNrhLmifpdkkPSLpf0vvS7ftLulXSD9Pf+6XbJekzktZLukfSsc3+I6z71dLJXFS795QE1otq6VDdBpwXET+Q9HxgjaRbgXcBt0XECknLgeXA+4GTgcPSn1cCl6a/zWZluk7mehbuNutW09bcI2JzRPwgvf0M8CAwDJwGXJkediWwNL19GvClSNwJDEo6qOElN6viFFKzXWaUCinpUGAR8F3gwIjYnO56FDgwvT0MbMw8bFO6bXNmG5KWAcsA5s+fP8Nim+VzCqlZouYOVUn7ANcB50bE09l9ERFAfq5agYi4LCJGImJkaGhoJg81M7Np1BTcJfWTBParIuL6dPNjleaW9Pfj6fZRYF7m4Yek28zMrEVqyZYRcDnwYER8KrPrBuDs9PbZwDcy2/8wzZo5Hngq03xjZmYtUEub+2LgncC9ku5Kt30AWAFcK+kc4BHgjHTfzcApwHpgK/DuhpbYzMymNW1wj4hvAyrY/Zqc4wN4zyzLZdYQnvrXepUnDmsjB57m8nKF1ss8/UCbeM7x5vPUv9bLHNzbpCjwnHvNXSxescpBvgG8XKH1Mgf3NpkqwLgW3xi1zCRp1q0c3NtkugDj5oPZ83KF1ssc3NukaPm4LDcfzI7nmrFe5myZNqkEmItvWVe4LJybD2bPc81Yr3LNvY2WLhrmjuUn8ekzF7r5wMwayjX3EsjW4p3z3hweU2C9xsG9JNx80DwezGS9yMG9DVyLbK2pBjP5ulu3cnBvMdciW8+DmawXuUO1xTwkvvU8mMl6kYN7i7kW2XpFg5lOPGKIxStWsWD5TZ7ywbqOm2Va7ODBgdy8dtcimycvG+nEI4a4bs2om8esazm4t9j5Sw6f1OYOzmlvhepspMUrVrmT1bqag3sLVGfHvPm4YW5/aIuzZdpk5drRwlHBbh6zbuHg3mR52THXrRn1HCdtUnk9isyRWLD8Jn/oWsdzh2qTOTumXPJej6ztEV48xbqCg3uTOTumXGZy3f0hbJ3Mwb3JnGNdLjO97v4Qtk7l4N5kXjCiXIpej8GB/tzj/SFsncodqk1WPW97nzTp67477FqrKOf9xrs373Zs9kPY8wFZp3Fwb4FKEPCcMuWQzXmvzmaq2G+vfj70e0exdNGw5wOyjuRmmRZx1kw5FWXP7LXH3Em1fL921mkc3FvEWTPlVMvr4tfOOtG0wV3SFyQ9Lum+zLYLJY1Kuiv9OSWz7wJJ6yWtk7SkWQXvNM6aKadaXhe/dtaJaqm5XwG8IWf7JRGxMP25GUDSkcBZwFHpY/5RUl/OY7veyrWjk2YcPPGIIWfNlFAt2UzOeLJONG1wj4j/BH5e4/lOA66OiGcj4mFgPfCKWZSvI1U64EbHxneOdrxuzShvPm6Y4cEBBAwPDngKghJYumiYi04/esrXpZZjzMpmNtky75X0h8Bq4LyIeBIYBu7MHLMp3bYbScuAZQDz58+fRTHKp6gD7vaHtnDH8pPaVCorUj1jZOVbV3Xao4O5dZJ6O1QvBV4MLAQ2A5+c6Qki4rKIGImIkaGhoTqLUU7ugOtced+6PMeMdaK6gntEPBYR2yNiB/A5djW9jALzMocekm7rKe6A61xOe7RuUVdwl3RQ5u7vA5VMmhuAsyTtKWkBcBjwvdkVsfO4A65z+VuXdYtp29wlfQ04AThA0ibgQ8AJkhYCAWwA/gQgIu6XdC3wALANeE9EFM+v2qXyhrh7uHpn8DKI1i0UEe0uAyMjI7F69ep2F8MsdzqCgf4+Z8dYKUlaExEjefs8t4xZhr91WbdwcDer4rRH6wYO7mZ18BTAVnYO7mYz5CmArRN4VkizGXIuvHUCB/cGq54wzCMbu09Rzvvo2LhfcysNB/cG8tD13jBVzrtfcysLB/cG8tf13pA3AjnLr7mVgTtUG8hD17tPdVbMiUcMcftDWxif2E6fxPaCQYB+za3dHNwbyEPXu0teVsxX7vzJzv3bIxDJHBzV/Jpbu7lZpoE8YVh3KVo8OysAVW3za25l4Jp7A3noeneptWklSFZn8mtuZeLg3mAeut49iprZqg0PDuSusOVRrNZObpYxKzBdVgwUN8E4LdbazcG9QTx4qfvkLYz9juPn17RQttNird3cLNMAnmuke9XbzOa0WGs319wbwLU0q+Z1dK3dHNwbwLU0q+a0WGs3B/cGcC3NquW113upPmslt7nXKZvmtu9AP/19YmL7rrGKrqX1ruoUyEvOXOigbi3n4F6H6g7UsfEJ+ueI/fbqZ2zrhHOae5g7160sHNzrkNeBOrEj2GuPuaz9m9e3qVRWBlN1rju4Wyu5zb0O7kC1In5vWFk4uNfBHahWxO8NKwsH9zo4zc3yrFw7yi+f3bbbdr83rB2mDe6SviDpcUn3ZbbtL+lWST9Mf++Xbpekz0haL+keScc2s/Dt4jQ3q1bpSB0bn5i0fb+9+v3esLaopUP1CuB/A1/KbFsO3BYRKyQtT++/HzgZOCz9eSVwafq763j2R8sqmvt9rz3m+n1ibTFtzT0i/hP4edXm04Ar09tXAksz278UiTuBQUkHNaqwZmXljlQrm3pTIQ+MiM3p7UeBA9Pbw8DGzHGb0m2bqSJpGbAMYP78+XUWo/08Z7eBl1i08pl1h2pEBPnLSE73uMsiYiQiRoaGhmZbjLbwnN1W4U52K5t6g/tjleaW9Pfj6fZRYF7muEPSbV3Js0FahTvZrWzqbZa5ATgbWJH+/kZm+3slXU3SkfpUpvmm67id1bKKOtnddGftMG1wl/Q14ATgAEmbgA+RBPVrJZ0DPAKckR5+M3AKsB7YCry7CWUuDbez2nQ814y1y7TBPSLeVrDrNTnHBvCe2RaqU5y/5PBJ/7jgdlabzHPNWLt44rBZqPxz+iu3FXHTnbWLg/sseTCTTcVNd9YunltmBlauHWXxilUsWH4Ti1escsqjTcspktYuDu41ck671aM6RXJwoJ/n9c/hz6+5yxUEayoH9xo5p93qtXTRMHcsP4lLzlzIs9t28OTWCVcQrOkc3GvkjjGbLVcQrJUc3GvkRRhstlxBsFZycK+RO8ZstooqAnMkd9JbwzkVcgrVw8bffNwwtz+0xTntVpe8QW8A2yOZd8+jV62RHNwL5A0bv27NqCeDsrpVD3qbI+0M7BUevWqN4maZAu78smaoZM48vOJUdkT+TNlug7dGcHAv4M4vazZ30lszObgX8D+eNdPKtaNsfW7bbtvdSW+N4uBewNkx1iyV/pwnt05M2j440O8+HWsYd6gW8IyP1ix5/TkAe+85d9L7y4t82GwoCjp1WmlkZCRWr17d7mKYtcSC5TcVLjoskqa/E48Y4ro1o7utFeCavWVJWhMRI3n73Cxj1mJT9dtU5py56s6fOFvLZsXB3azF8vpzqhXV7J2tZbVym7tZi1X358ykYdTZWlYrB3ezNsiu4LV4xarc1ZqqOVvLZsLNMjm84pK1Ui3NNIA7U21GXHOvkjenjCdzsmbKNtMU1eCHBwf8/rMZcc29iueUsXaozDnz6TMXevCcNYRr7lU8p4y1Uy2D5zy4yWrh4F7l4MGB3K/GzlKwVsl2tlZzs6HVysG9St6CCv1zxNbntrFg+U2uKVnLZWvqngPeajWr4C5pA/AMsB3YFhEjkvYHrgEOBTYAZ0TEk7MrZutUfy3ed6CfXz63beckT64pWStV19SrA3uFmw2tWiM6VE+MiIWZ+Q2WA7dFxGHAben90sumP158yzrOX3I4D684lb33nMvE9vyaklmzFU0yVs3NhlatGc0ypwEnpLevBL4FvL8Jz9MwU7VjuoPV2qmW95mzaSzPbGvuAXxT0hpJy9JtB0bE5vT2o8CBeQ+UtEzSakmrt2zZMstizM5U6Y9etMPaabr32fDggAc3Wa7ZBvdXR8SxwMnAeyT9TnZnJPMJ5zYSRsRlETESESNDQ0OzLMbsTFU796Id1k5TjV6tvA8d2C3PrJplImI0/f24pH8GXgE8JumgiNgs6SDg8QaUs6mmSn/0oh3WTpX32XnX3p2bJXPhDff7vWm56l6sQ9LewJyIeCa9fSvwEeA1wM8iYoWk5cD+EfFXU52r3Yt1VLe5gxdGsHKZaoGPrP45Yp/nzWVs64SDfQ+YarGO2dTcDwT+WVLlPF+NiH+T9H3gWknnAI8AZ8ziOVrCtXMru6Jvl9UmdoTTdg3wMntmHSHv22WthgcHuGP5SU0olbVbs2ruZtYied8ut2YG103Fabu9ycHdrENUzzlTa23eabu9qeeDu2fYs05VNFVGdkS103Z7V88F92wwr/5ncAeUdZpKbb7yvh4bn6AvnVxs2JWVntZTi3VUvsaOposSj41PeN4Y63jZ9zUkk4t5gJP1VHCvdRImd0BZJ/HqYZanp4J7rUHbHVDWSTy5neXpqeBeS9B2B5R1Gk9uZ3l6JrivXDvK1ue2TXlMn+QpB6zj5E0ull09bPGKVaxcO9qm0lm79ES2TK35wDsiHNit43j1MMvTE9MPLF6xqqZ5OTxM27rBVO/34cEBTjxiiNsf2uKxHV1gqukHeqJZxqvZWC+Z6v0+OjbOV+78yc504NGxcc695i4WfvibbrrpMj0R3Is6lvokhFezse5ST0fq2PgEF1x/rwN8F+mJ4F60mtInzziGh1ecyh3LT3Jgt64x1epNU3FufHfpiQ5Vz9duvST7fq+lrynLufHdoyc6VM161Uznge+T+OQZx7ji0yF6bj53z/RolphqDdY82yOcNtkluq7mXlRTGRzo58I3HeU3rPWkvP+L/jnJ7JE7ckLAfnv1s9cec11BKrmeSoUsmhzM2QDWy5YuGuai049meHBgZ4bYxW89hqK63ZNbJyalS2b/d1auHWXxilUe/VpyXdcsM1WHUCUbwDUQ60XVKzlB7Z2u2Uya7DcAj34tr66ruU+X4+tsALNdZpI2WRnwlDe98LnX3MWhrsmXStcF9+nerJ4pz2yXbHNNI4yOjXP+1+9m4Ye/6WabNuu64F55s+63V/9u+zzFgNnuli4a5o7lJ6EGnW9iezA2PpHbXm+t0xVt7pXUx9Gx8UnrR5768oM8QZJZjQ4eHJjxoKdazLSvy6nMjdGxqZDZgC4g768Y6O/znDFmNZrpgKeZEPDwilMLK2KVAJ6bstkn9t5jLk+NTzjYV5kqFbJpwV3SG4C/B/qAz0fEiqJjZxrcZ/Im9DS+ZrXL1poH9+rnF7/axkROInx/nyDI3VdkjsjNqQcKK2h58ipt2XLvO9CPBGNb8z8MGv3NoN7zNaIcLR+hKqkP+AfgdcAm4PuSboiIBxpx/loXugZnx5jNRHW65FQ1bZjZ/DVTfQ7MpIpZ3cxTXdkbG5/YeWx1qmb1sbNN5az3fI0uR55mtbm/AlgfET8GkHQ1cBrQkOA+k4Dt7Biz+uXlxlfvh+Y26eTJxoDpKnvZD4O8Y2cz/qXe8zW6HHmalS0zDGzM3N+UbmuIWgO2s2PMWiNvBGwzZWNALZW9yjFFx9b7Db/e8zW6HHnalgopaZmk1ZJWb9myZUaPzctlr6Rx9Sm55QU4zFqrkkFBxWIAAAWuSURBVFJZWSOhWQG+utJWS2WvckzRsfV+w6/3fI0uR55mBfdRYF7m/iHptp0i4rKIGImIkaGhoRmdPK+WcMmZC9mw4lR+dNEpbPACHGZtN92AQlX9rsXgQP9ulbbpnif7YVC0cE+93/DrPV+jy5GnWW3u3wcOk7SAJKifBfxBI59gurZAM2uv6kVD8lIfobjTttaFvKsX45kqW6bRC/fUe75WLCDUzFTIU4BPk6RCfiEi/rboWC/WYWY2c21ZrCMibgZubtb5zcysWNfNLWNmZg7uZmZdycHdzKwLObibmXWhUswKKWkL8Ehm0wHAE20qTr06rcydVl7ovDK7vM3XaWVudHlfFBG5A4VKEdyrSVpdlN5TVp1W5k4rL3RemV3e5uu0MreyvG6WMTPrQg7uZmZdqKzB/bJ2F6AOnVbmTisvdF6ZXd7m67Qyt6y8pWxzNzOz2Slrzd3MzGbBwd3MrAuVLrhLeoOkdZLWS1re7vJUkzRP0u2SHpB0v6T3pdv3l3SrpB+mv/drd1mzJPVJWivpxvT+AknfTa/zNZL2aHcZsyQNSvq6pIckPSjpVWW+xpL+PH0/3Cfpa5KeV7ZrLOkLkh6XdF9mW+41VeIzadnvkXRsicp8cfq+uEfSP0sazOy7IC3zOklLylDezL7zJIWkA9L7Tb3GpQrumYW1TwaOBN4m6cj2lmo324DzIuJI4HjgPWkZlwO3RcRhwG3p/TJ5H/Bg5v7HgUsi4iXAk8A5bSlVsb8H/i0ijgCOISl7Ka+xpGHgz4CRiHgZyTTXZ1G+a3wF8IaqbUXX9GTgsPRnGXBpi8pY7Qp2L/OtwMsi4uXAfwEXAKT/h2cBR6WP+cc0prTSFexeXiTNA14P/CSzubnXOCJK8wO8Crglc/8C4IJ2l2uaMn8DeB2wDjgo3XYQsK7dZcuU8RCSf9yTgBtJFr95Apibd93b/QPsCzxM2uGf2V7Ka8yuNYP3J5lG+0ZgSRmvMXAocN901xT4P8Db8o5rd5mr9v0+cFV6e1K8AG4BXlWG8gJfJ6mkbAAOaMU1LlXNnSYvrN1okg4FFgHfBQ6MiM3prkeBA9tUrDyfBv4K2JHefwEwFhHb0vtlu84LgC3AF9OmpM9L2puSXuOIGAU+QVIr2ww8Bayh3Ne4ouiadsr/4h8B/5reLmWZJZ0GjEbE3VW7mlresgX3jiFpH+A64NyIeDq7L5KP4VLkmEp6I/B4RKxpd1lmYC5wLHBpRCwCfklVE0zJrvF+wGkkH0oHA3uT89W87Mp0TWsh6YMkzaRXtbssRSTtBXwA+JtWP3fZgvu0C2uXgaR+ksB+VURcn25+TNJB6f6DgMfbVb4qi4E3SdoAXE3SNPP3wKCkykpcZbvOm4BNEfHd9P7XSYJ9Wa/xa4GHI2JLREwA15Nc9zJf44qia1rq/0VJ7wLeCLw9/VCCcpb5xSQf+nen/4OHAD+Q9EKaXN6yBfedC2unmQVnATe0uUyTSBJwOfBgRHwqs+sG4Oz09tkkbfFtFxEXRMQhEXEoyfVcFRFvB24H3pIeVpryAkTEo8BGSZWl4F8DPEBJrzFJc8zxkvZK3x+V8pb2GmcUXdMbgD9MMzqOB57KNN+0laQ3kDQzvikitmZ23QCcJWlPSQtIOiq/144yVkTEvRHxaxFxaPo/uAk4Nn2PN/cat6ODZJrOiFNIesB/BHyw3eXJKd+rSb663gPclf6cQtKOfRvwQ+Dfgf3bXdacsp8A3Jje/nWSN/564J+APdtdvqqyLgRWp9d5JbBfma8x8GHgIeA+4MvAnmW7xsDXSPoEJkiCzDlF15Sk0/0f0v/De0kygcpS5vUkbdWV/7/PZo7/YFrmdcDJZShv1f4N7OpQbeo19vQDZmZdqGzNMmZm1gAO7mZmXcjB3cysCzm4m5l1IQd3M7Mu5OBuZtaFHNzNzLrQ/wfXr4XGNVPn1wAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.title(\"text length vs # instances\")\n", + "plt.scatter(list(text_len_counter.keys()), list(text_len_counter.values()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "Collapsed": "false" + }, + "source": [ + "### Check words frequencies" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [ + "w_count_df = pd.DataFrame.from_dict(w_count, orient='index')\n", + "w_count_df.sort_values(0, ascending=False, inplace=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "Collapsed": "false", + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
0
die3066
der2362
das1794
ist1767
nicht1467
......
wertvollsten,1
blutgruppe1
gelenkschmerzen1
entgeltbefreiung1
anrã¼cken.1
\n", + "

27102 rows × 1 columns

\n", + "
" + ], + "text/plain": [ + " 0\n", + "die 3066\n", + "der 2362\n", + "das 1794\n", + "ist 1767\n", + "nicht 1467\n", + "... ...\n", + "wertvollsten, 1\n", + "blutgruppe 1\n", + "gelenkschmerzen 1\n", + "entgeltbefreiung 1\n", + "anrã¼cken. 1\n", + "\n", + "[27102 rows x 1 columns]" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "w_count_df" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "18" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# check a certain word\n", + "w_count_df.at['auto', 0]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "Collapsed": "false" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/erogol/miniconda3/lib/python3.7/site-packages/matplotlib-3.2.0rc3-py3.7-linux-x86_64.egg/matplotlib/backends/backend_agg.py:214: RuntimeWarning: Glyph 159 missing from current font.\n", + " font.set_text(s, 0.0, flags=flags)\n", + "/home/erogol/miniconda3/lib/python3.7/site-packages/matplotlib-3.2.0rc3-py3.7-linux-x86_64.egg/matplotlib/backends/backend_agg.py:214: RuntimeWarning: Glyph 156 missing from current font.\n", + " font.set_text(s, 0.0, flags=flags)\n", + "/home/erogol/miniconda3/lib/python3.7/site-packages/matplotlib-3.2.0rc3-py3.7-linux-x86_64.egg/matplotlib/backends/backend_agg.py:183: RuntimeWarning: Glyph 159 missing from current font.\n", + " font.set_text(s, 0, flags=flags)\n", + "/home/erogol/miniconda3/lib/python3.7/site-packages/matplotlib-3.2.0rc3-py3.7-linux-x86_64.egg/matplotlib/backends/backend_agg.py:183: RuntimeWarning: Glyph 156 missing from current font.\n", + " font.set_text(s, 0, flags=flags)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAG5CAYAAACDRzPnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nOy9e7hdVXX3/xm5h6tcAkKCXCOCFblEtGq13kCxBa0Vwb6Vqr9ivdVq1R+2r0Xb+mp98V61pRVFCwK2KgiIIoIKKCQgJBAICYRAQkhC7vecnDPeP8aYrnV29t5nn+SEc5L9/TzPfvbea80111xzzTnGnGPMi7k7QgghupdRw50AIYQQw4sUgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5Y4Y7Ae048MAD/YgjjhjuZAghxC7FXXfd9ZS7T+o0/IhWBEcccQQzZswY7mQIIcQuhZktGEx4mYaEEKLLkSIQQoguR4pACCG6nBHtIxBCiOGip6eHhQsXsmnTpuFOSksmTJjAlClTGDt27A7FI0UghBBNWLhwIXvvvTdHHHEEZjbcydkGd2f58uUsXLiQI488cofikmlICCGasGnTJg444IARqQQAzIwDDjhgSHosUgRCCNGCkaoECkOVPikCIYQYwdxwww0ce+yxHHPMMXzmM5/ZKfeQj0AIITrgiAuuG9L4Hv3M6wcM09vby3vf+15uvPFGpkyZwgte8ALOPPNMjj/++CFNy4A9AjObYGZ3mtm9Zna/mX0yjx9pZneY2Twzu9LMxuXx8fl/Xp4/ohbXx/L4HDM7fUifRAghdjPuvPNOjjnmGI466ijGjRvHOeecw9VXXz3k9+nENLQZeKW7Px84EXitmb0I+BfgC+5+DLASeGeGfyewMo9/IcNhZscD5wDPBV4LfM3MRg/lwwghxO7EokWLOOyww373f8qUKSxatGjI7zOgIvBgXf4dmx8HXgn8dx6/FHhD/j4r/5PnX2Xh0TgLuMLdN7v7fGAecOqQPIUQQojtpiNnsZmNNrN7gKXAjcDDwCp335pBFgKT8/dk4HGAPL8aOKB+vMk1QgghGpg8eTKPP16JzYULFzJ58tCLzY4Ugbv3uvuJwBSiFf+cIU9JYmbnm9kMM5uxbNmynXUbIYQY8bzgBS9g7ty5zJ8/ny1btnDFFVdw5plnDvl9BjV81N1XATcDvw88w8zKqKMpQDFcLQIOA8jz+wLL68ebXFO/x8XuPs3dp02a1PFy2kIIsdsxZswY/vVf/5XTTz+d4447jrPPPpvnPve5Q3+fgQKY2SSgx91XmdlE4DWEA/hm4E+BK4DzgOLKvib//zrP/9zd3cyuAS43s88DhwJTgTuH+HmEEGKn0Mlwz53BGWecwRlnnLFT79HJPIJDgEtzhM8o4Cp3v9bMZgNXmNk/A78FvpHhvwF8x8zmASuIkUK4+/1mdhUwG9gKvNfde4f2cYQQQgyWARWBu88ETmpy/BGajPpx903Am1vE9SngU4NPphBCiJ2FlpgQQoguR4pACCFa4O7DnYS2DFX6pAiEEKIJEyZMYPny5SNWGZT9CCZMmLDDcWnROSGEaMKUKVNYuHAhI3k+U9mhbEeRIhBCiCaMHTt2h3f+2lWQaUgIIbocKQIhhOhypAiEEKLLkSIQQoguR4pACCG6HCkCIYTocqQIhBCiy5EiEEKILkeKQAghuhwpAiGE6HKkCIQQosuRIhBCiC5HikAIIbocKQIhhOhypAiEEKLLkSIQQoguR4pACCG6HCkCIYTocqQIhBCiy5EiEEKILmdARWBmh5nZzWY228zuN7MP5PFPmNkiM7snP2fUrvmYmc0zszlmdnrt+Gvz2Dwzu2DnPJIQQojBMKaDMFuBv3X3u81sb+AuM7sxz33B3S+qBzaz44FzgOcChwI/M7Nn5+mvAq8BFgLTzewad589FA8ihBBi+xhQEbj7YmBx/l5rZg8Ak9tcchZwhbtvBuab2Tzg1Dw3z90fATCzKzKsFIEQQgwjg/IRmNkRwEnAHXnofWY208wuMbP98thk4PHaZQvzWKvjjfc438xmmNmMZcuWDSZ5QgghtoOOFYGZ7QX8D/A37r4G+DpwNHAi0WP43FAkyN0vdvdp7j5t0qRJQxGlEEKINnTiI8DMxhJK4DJ3/z6Auy+pnf8P4Nr8uwg4rHb5lDxGm+NCCCGGiU5GDRnwDeABd/987fghtWBvBO7L39cA55jZeDM7EpgK3AlMB6aa2ZFmNo5wKF8zNI8hhBBie+mkR/AS4M+BWWZ2Tx77O+BcMzsRcOBR4F0A7n6/mV1FOIG3Au91914AM3sf8BNgNHCJu98/hM8ihBBiOzB3H+40tGTatGk+Y8aM4U6GEELsUpjZXe4+rdPwmlkshBBdjhSBEEJ0OVIEQgjR5UgRCCFElyNFIIQQXY4UgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5UgRCCNHlSBEIIUSXI0UghBBdjhSBEEJ0OVIEQgjR5UgRCCFElyNFIIQQXY4UgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5UgRCCNHlSBEIIUSXI0UghBBdzoCKwMwOM7ObzWy2md1vZh/I4/ub2Y1mNje/98vjZmZfNrN5ZjbTzE6uxXVehp9rZuftvMcSQgjRKZ30CLYCf+vuxwMvAt5rZscDFwA3uftU4Kb8D/A6YGp+zge+DqE4gAuBFwKnAhcW5SGEEGL4GFARuPtid787f68FHgAmA2cBl2awS4E35O+zgG978BvgGWZ2CHA6cKO7r3D3lcCNwGuH9GmEEEIMmkH5CMzsCOAk4A7gYHdfnKeeBA7O35OBx2uXLcxjrY433uN8M5thZjOWLVs2mOQJIYTYDjpWBGa2F/A/wN+4+5r6OXd3wIciQe5+sbtPc/dpkyZNGooohRBCtKEjRWBmYwklcJm7fz8PL0mTD/m9NI8vAg6rXT4lj7U6LoQQYhjpZNSQAd8AHnD3z9dOXQOUkT/nAVfXjr8tRw+9CFidJqSfAKeZ2X7pJD4tjwkhhBhGxnQQ5iXAnwOzzOyePPZ3wGeAq8zsncAC4Ow8dz1wBjAP2AC8HcDdV5jZPwHTM9w/uvuKIXkKIYQQ242FeX9kMm3aNJ8xY8ZwJ0MIIXYpzOwud5/WaXjNLBZCiC5HikAIIbocKQIhhOhypAiEEKLLkSIQQoguR4pACCG6HCkCIYTocqQIhBCiy5EiEEKILkeKQAghuhwpAiGE6HKkCIQQosuRIhBCiC5HikAIIbocKQIhhOhypAiEEKLLkSIQQoguR4pACCG6HCkCIYTocqQIhBCiy5EiEEKILkeKQAghuhwpAiGE6HKkCIQQossZUBGY2SVmttTM7qsd+4SZLTKze/JzRu3cx8xsnpnNMbPTa8dfm8fmmdkFQ/8oQgghtodOegTfAl7b5PgX3P3E/FwPYGbHA+cAz81rvmZmo81sNPBV4HXA8cC5GVYIIcQwM2agAO7+SzM7osP4zgKucPfNwHwzmwecmufmufsjAGZ2RYadPegUCyGEGFJ2xEfwPjObmaaj/fLYZODxWpiFeazVcSGEEMPM9iqCrwNHAycCi4HPDVWCzOx8M5thZjOWLVs2VNEKIYRowXYpAndf4u697t4H/AeV+WcRcFgt6JQ81up4s7gvdvdp7j5t0qRJ25M8IYQQg2C7FIGZHVL7+0agjCi6BjjHzMab2ZHAVOBOYDow1cyONLNxhEP5mu1PthBCiKFiQGexmX0X+EPgQDNbCFwI/KGZnQg48CjwLgB3v9/MriKcwFuB97p7b8bzPuAnwGjgEne/f8ifRgghxKAxdx/uNLRk2rRpPmPGjOFOhhBC7FKY2V3uPq3T8JpZLIQQXY4UgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5UgRCCNHlSBEIIUSXI0UghBBdjhSBEEJ0OVIEQgjR5UgRCCFElyNFIIQQXY4UgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5UgRCCNHlSBEIIUSXI0UghBBdjhSBEEJ0OVIEQgjR5UgRCCFElyNFIIQQXc6AisDMLjGzpWZ2X+3Y/mZ2o5nNze/98riZ2ZfNbJ6ZzTSzk2vXnJfh55rZeTvncYQQQgyWTnoE3wJe23DsAuAmd58K3JT/AV4HTM3P+cDXIRQHcCHwQuBU4MKiPIQQQgwvAyoCd/8lsKLh8FnApfn7UuANtePf9uA3wDPM7BDgdOBGd1/h7iuBG9lWuQghhBgGttdHcLC7L87fTwIH5+/JwOO1cAvzWKvjQgghhpkddha7uwM+BGkBwMzON7MZZjZj2bJlQxWtEEKIFmyvIliSJh/ye2keXwQcVgs3JY+1Or4N7n6xu09z92mTJk3azuQJIYTolO1VBNcAZeTPecDVteNvy9FDLwJWpwnpJ8BpZrZfOolPy2NCCCGGmTEDBTCz7wJ/CBxoZguJ0T+fAa4ys3cCC4CzM/j1wBnAPGAD8HYAd19hZv8ETM9w/+jujQ5oIYQQw4CFiX9kMm3aNJ8xY8ZwJ0MIIXYpzOwud5/WaXjNLBZCiC5HikAIIbocKQIhhOhypAiEEKLLkSIQQoguR4pACCG6HCkCIYTocqQIhBCiy5EiEEKILkeKQAghuhwpAiGE6HKkCIQQosuRIhBCiC5HikAIIbocKQIhhOhypAiEEKLLkSIQQoguR4pACCG6HCkCIYTocqQIhBCiy5EiEEKILkeKQAghuhwpAiGE6HKkCIQQosuRIhBCiC5nhxSBmT1qZrPM7B4zm5HH9jezG81sbn7vl8fNzL5sZvPMbKaZnTwUDyCEEGLHGIoewSvc/UR3n5b/LwBucvepwE35H+B1wNT8nA98fQjuLYQQYgfZGaahs4BL8/elwBtqx7/twW+AZ5jZITvh/kIIIQbBjioCB35qZneZ2fl57GB3X5y/nwQOzt+Tgcdr1y7MY/0ws/PNbIaZzVi2bNkOJk8IIcRAjNnB61/q7ovM7CDgRjN7sH7S3d3MfDARuvvFwMUA06ZNG9S1QgghBs8O9QjcfVF+LwV+AJwKLCkmn/xemsEXAYfVLp+Sx4QQQgwj260IzGxPM9u7/AZOA+4DrgHOy2DnAVfn72uAt+XooRcBq2smJCGEEMPEjpiGDgZ+YGYlnsvd/QYzmw5cZWbvBBYAZ2f464EzgHnABuDtO3BvIYQQQ8R2KwJ3fwR4fpPjy4FXNTnuwHu3935CCCF2DppZLIQQXY4UgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5UgRCCNHlSBEIIUSXI0UghBBdjhSBEEJ0OVIEQgjR5UgRCCFElyNFIIQQXY4UgRBCdDlSBEII0eVIEQghRJcjRSCEEF2OFIEQQnQ5UgRCCNHlSBEIIUSXI0UghBBdjhSBEEJ0OVIEQgjR5UgRCCFEl7NLKIIjLrhuuJMghBC7LU+7IjCz15rZHDObZ2YXdHqdlIEQQuwcnlZFYGajga8CrwOOB841s+M7vV7KQAghhp4xT/P9TgXmufsjAGZ2BXAWMHuwEdWVwqOfef1QpU8IIbqOp1sRTAYer/1fCLywHsDMzgfOz79uZtbv/L9sG2mzY0II0cWcMpjAT7ciGBB3vxi4GMDMfJiTI4QQuz1Pt7N4EXBY7f+UPCaEEGKYeLoVwXRgqpkdaWbjgHOAa57mNAghhKjxtJqG3H2rmb0P+AkwGrjE3e9vc8l84IinI21CCNGtmLvM8EII0c3sEjOLhRBC7DykCIQQossZUYrAzEaZ2VvM7CIzG187foCZfbYcy3D7mNkz8/9oM/tBk/g+UDv/rfz9cTP7oJm9xMzenJ978twX8/vN+T3RzI7N33uZ2a8ybaeZ2Yvz+J/l90sGeLb9zez325z/gybHLjSz59avy/SObgg32swuG+D+ozPt1i5ck+tGlWvy99n5e4/8HmNmV5rZ3h3Gd2QnxzqMa4yZXdTk+MkWHFY79uZ8n79XP5bf4xvjaIjvnWZ2Qu3/S8xsekMcn8x7HFkL93Yzu87Mjhoo3xuuG21ml+V1Jb/NzJ5lZmeXslbqQUlTu/gb0v7mJsf+LO/7wVoaLjKzPzazUbWwe9TLxGBpqNejsy7Wjz2vsZ43XlcLW8+zcs3RpUy0uKZep0fV0jDRzF7eLO52x9o85wc6PPa7PG9yblSRM7Vje3SahsEwYnwEWZA/DpwG1AuZN/xvRz1sebDtKrA7SOO9G5+hMZ3bm8Y+Qpk3ix/ax9vqvtub362OO9ADjAO25vHRQG+mvTHdfU2O9+X3ZmB8/m8c6FCu6zSNzcKVsCuB/Qd5fWPYZtf1EXkwpnbOamF783wzxdTJ89Xj6QEmUOXp6CbXNtKb4QZbd3qAsXkfa7hfPU31NNTff3m/xrbloXzq1/ZQ5eFG4jlLPKU+NNL4LI3vp1zbmM/lfW2pnR9dC1N/f63ibleG6ufq+b857zGuxXWFkhcOLAOuAw4C9gZWATe7+5cGiGNEKYIngYOHOx1CCLGbMdbdt7YLMJJMQ1ICQggx9Kwxs9PaBRhJiuAXw50AIYTYDfk94LPtAowYReDuf0jYxYQQQgwRZbXndowYRZA+grajN4QQQgwOM/slcHi7MCNCEVisO3TAcKdDCCF2Qw4Ezm0XYEQoAvoPxxJCCDF07APc0i7AiBC+7r6RaiyxEEKIoWMP4D/aBRgRiiD5zXAnQAghdkP2BU5uF2Ak7VC2L9UsPiGEEEPDLJrPtv4dI6JHYGYTgGOREhBCiKFmErCgXYCRInjHUq1HI4QQYuj4EbC4XYAR0SNw97XATcOdDiGE2A2Z4u6fbBdgJC06V1bRE0IIMXQ48Bfu/u1WAUZEjyC5gQEcGkIIIQZND/C1dgFGkiK4meHZO0AIIXZnFjPAfhQjSRGcM9wJEEKI3ZCVDGBtGRE2eTPbF9hvuNMhhBC7IR8jdj9ryUhyFi8htlgTQggxdKxw97aLeo4k09BHCKeGEEKIoWP/gQKMpB5BH7HZskxEQggxhLh724E4I6lH8Cix3pAQQoiho9fMXt0uwIhQBGb2DGLBuRGRHiGE2I3oBT7fLsBIEbzrgGuADcOdECGE2M3YAuzdLsCIUATuvhX4BAOskCeEEGLQ7AHc2i7AiHEWA5jZr4EXMMAsOCGEEB3jwGhvI+xHRI+gxumMvDQJIcSuzkfanRwxPQIzmwhcD9wLfGCYkyOEELsTy939wFYnR5IiGAc8CeyDTENCCDFUrAT2dPfxrQKMGDOMu28B7ieGkQohhBgabiVGDrVkRCw6B2Bm1xHp0XaVQggxdLwaeFu7ACPJNDTe3Teb2chIkBBC7CbsSktM3Gxm/wBcOdwJEUKI3Qkzm9nu/IgxDRHdlwV0sFKeEEKIQfHZdidHTI/A3TcAp7n7aLR3sRBCDCUfbndyxPgICma2J7H2kBBCiKFhk7tPbHVyxPQIajyf2KCm7dZqQgghOmbX2KqyYGaLgYOBtl5uIYQQHbEW2MPdW/qER2KP4HI0qUwIIYaKHw8UYCQqghOAh4Y7EUIIsZvwcuD2dgFGlCIws08DXybWzxZCCLHjrHD3l7ULMKIUAfAyYC4webgTIoQQuwnPHijASFME04Fvoy0rhRBiqBhtZh9tF2BEjRoys1OAvYCby6FhTI4QQuwO9AEL3P2oVgFGlCIomNkCYghpy/WzhRBCdMQWYIu7t9zAfqSZhgrjgLHDnQghhNgNeCcDjMQcSYvOYWb3EzuUwchVUkIIsStxKtCyNwAjT9ieAXwJmIgWnhNCiKHgjUDL/Yph5PoI9gBWED4CR05jIYTYXrYCK939oFYBRlqPoHAH1Qb2UgJCCLH9fB/Y2C7ASFUEfwTMAG5DcwqEEGJHeCNwUbsAI8o0ZGbPILQXhINjFTGCaF+0qb0QQmwP69oNHYWR1yNYB9zt7q8kViH9OnA3MeO4bzgTJoQQuyh3DxRgRPUIAMxsL3dfZ2azgWOA+cAhDDD8SQghRFMedvdj2gUYUfMIANy9bFP5uvy+H61GKoQQ28vSgQKMNNNQnaOJLSvPJXwFy5F5SAghBst+ZjazXYAR1yOo8XLCZ/AxYD9iLOwGYlG6Qh8jW5kJIcRw8yFgdrsAI85H0IiZzSJGDD0bCX4hhBgsM4DvuftnWwUY0ULVzPYnzEP3AouAn+apHkIpbAWWDU/qhBBil+Cvgb9oF2DE9QjM7Dp3f33+ng8cRKxEup5wGo8jFEAxa20CJgxDUoUQYldgPTDX3U9qFWAk9gj+BMDM1gIHAL3EchNjiOUmHiOGk0KsQyQlIIQQrXEG8BGMREVwE4C77+3u+7j7PsCP87OSWFf7LKJX8BXgKuAp4mFHVvdGCCGGn72AF7ULMOJMQ80ws38mhP8hRK+gl1Bie2YQ9QqEEGJbnBh9ucLdj2gVaCT2CJpxFrHcxJ7EXgVPEUNKx9NcCTSutNebHyGE6DZuJqwpLdlVFMEm4DJgHnAdMeu4BziSmDX3FGEqghhN1LjN5WiqZa1BJiQhRHewnmg0/6xdoF1FEUwnTEJfBU4EfggsAVYTD7ov1bOMYuCJcmWPAykEIcTuzCpgX3f/SLtAu4oi2Ad4M3AL8BrgPHc/HHgl8HFiN7NWz9JO2GvTGyHE7swedNDg3VWcxa8A/iA/RwO/BZ4D3Am8mvAT7E+YhXqAu4AXE4K+CHvNShZCdBtrgbe4+4/bBdolFAGAmY0GXgC8Avgrwm/wb8A/EopgDNWcg3b0Eo6TA6kmpmlSmhBid2W+ux/VLsAuoQjM7CZixNCvgV8BtxI7mY1z91MzzCrCV1AfHVSUQpljUHoEnfYOHJmPhBC7Ln3uPlDjeJcxlcwEtgC/B5yQ368BbjOzfzWzPwDWEC38rYQyqO91bPR/1k6fW0pACLHbs0v0CApmtjexeNKHgWcCt+cpB16Yv8cRSmMCIcgXAIcTyqE+mmgtMQ9hoL2QG3sTQgixK7E2V2hoyS6hCMzsfYSj+BTgUcI89CvgJCrzzX75+53AecBnCYdysf2vJMxLRfCvo9rb4Ang0NotZRISQuwObHT3AXd43FUUwYcJwX+Xu2+tHb8cmAZcQwjuPwImAQ8Tgv0uYt7BZOBJ+gv7VtQzpCiDZk7ozUSPQgghRio97j6Q1WNE71D2O9z9ohanpgAnl32OzexC4D5i/4JTgIXA6wnhXl+xtAj4+nLW0F/gb6DaK7luFirXj29yTAghRhKrOgm0q9u9DyJa5oUewndwJbFhzbOJdYeMqqVfF9hFCZRzjaOK+ppcU1ci5fvp7FaN/C6cEGKksNfAQXaRHkEbLgPuMLOr8/8fAw8Q8wJ+RuxdMJfY+P6DxIJ1fUTLfxxVS76xNT+Kbc0+9SGn9Z7E052H6nkIITphEzDRzC5397e2C7hL+AjaYWbTgJfk39uAHxF+ggXEOkQHlqD5vQ/Ri9hA+Ax6gfuB51ONENpAKIp2trUetl3cTgghRgpluf757n50u4C7vCJoxMzeDVwPfJL+ZpQzCIF/ALFq6U157EFCWRRH8iZildN9gMNKtC1u11c7tzNb6s38GloyQwjRCQPOLN7VTUPN2NvdF5jZzfRXBM8jdjM7AvgI0dp/DHguIVDXE0NNy/aXU4hWf9kis+5ILoK5mJk2E4qjkaFyIjfzawxWCcihLUT3sYBYpbktu6MieCC/ZwN/B7y2du6LxAzkrwA/B24g9jMoO52NJpTAMfm/+BG2ED2FvTL8MwgfQlEMrZRA6ZoNZct9c6ZrsEJdSkCI7mMFA6+/tvuZhgpmNodo+a+gGv2zFbi7YS7CQmBvQmjvSSiKvQhB3zgbuU4fIew3Am8CPk30LsYzuNb3YFvqq/Pe+w3iGiFE93Kvu5/YLsDu2CMoLHP3a1qdNLNZhBBeS/gHijAuzuU+YjbyXsRoo/VUPQeoWvkTieWwR9N8uOlA1MM2KoX6/6XEcNm7gZcNEGdZTbU+6a3E1ckKrUKIXZ8fEX7Q9w0UcHfuEbwKOJdwCv9uroG7fz/PH14L/lfExjfPohKYZURQH+E8nkAojF8DpxLDUa8Hrnb3E83sMcL09DlCEG8iTEiNDLYHUBzSmzMNg3ESl2ub3a+s1gph+tIsaSF2P3oJc7m7+wmtAu3OPYK3E2sNjaVqqTuxfDXuvqAENLO3EstQrCWEYx9hUlpIOJdXAfcQtvl5xGii+4nF7Caa2bOIzC7bwZWhp8VH0GxCWjvqyqII8gmEwL6DGC7bbke2ZhPoGkce7Z2/pQSE2H1xYumdtuzOiuAF7n5sh2EPJDa5OYFYu2gGIWzXEstM/B4xN+Ea4C+J3dCK0F9B7I+wlGoWX+OSFHW2UPU2jMqMU2+9N1Mcvfm5kViArxVGNdqpmL72bghTzsHAq68KIXZdZtcbva3YnU1D3wT+r7vP7iDsauAthK3/48ROaC+m6klsBE4GrgVOA45195/Wrj+Q6CnsRWv7e7PF7LaHeou/WTybCUUzKn+PIhTDE8RoqGamJQ0tFWL3w4FNu83qo9uDmT1A7G88nxCIRgs7mZkto3IS1ymKYIG7H2VmPwbe7O7rzOw57v6gmZ2cYZ4JnAX8L8KB3ChY68K2leCtz1uoX1f2QyhDWevDR9s5mLdmnBObhFtG9HLKPIgBC4sQYpeh1Pct7j6g6Xd3VgSHNzverJuUYf+BEPzPI1r2KwkT0Z2EKWYr4Ud4PuGAfgkxF+FPiPWMAH6fEOTtRuWUrTQHO3KntOQHGvWzIcMWM1Wjz6AMid2S4SZ0EKcQYtejD/ixuw/oI9htFcFgMbM/aTg0AXglcCbhE3hni0tPBT5G9AQ+SSxhUZzEm6h2SxtHzFHYk4HXKGrWY3iI6HXs3eRc47XFPPRzYvnt4+g//HV0httE5YgWQuw+OLAYmAM8udsvOjdUpE8BYqTRMwmBvg5YQmTk61pcN9PdTzCz7xC9hB5gKtVksx6qSWatzC+9hKDeg+h5bCCUT2/GMZYQ2vuyrYmpcQntwmZi8tkkYBbwVeDrVMtnj6O/2UmIoUI+p5HBEsLkvWCgRed251FDg8Ld3w6td0PLc1OJGcTHU7WiJ+f3QrPXMxIAACAASURBVOBgwky0AfgNMcz02Xl+U7kV21aS0VTLVIyhEsyjqUw844mu3nqqUUBGCPs985oNhNIYk+EPynAnAP+ev+ujhAbyWQixPagsjQyuJSwVWwcKqJbgtnwHOJ+YlYeZHW9mxSz0TapW9SuAbwMLzOzfiVFHDxPDO/ckegcHUy1rXRapW1e7V2nNb6X/Bjt1Yd1LmJe2UPUaoHJk70tlappA9CCKH+QpKlPRBsJxPq/JM9/eIi+EELsmfcA7CNlw/kCBpQi25VvAT6iWpX4I+Jv8PdHdbyJMagvc/ROEYP8JcDoh0I8nhPEoYp/kNXm85PXehFCHquU0mhDWRWj/kMqZ+9UMdx/R+l/REF+99TWK6FlMyf+fJ0xTVwCvy99lOdrNtetfwshjMDbLkWLfHCnpeDroGzhIS56ufOqm99GKX7j7LwYKJEWwLQe6+1VkQU/zUBnps9nMRgFzzex9ZvZGYE93/767zyWcMzPz2wjTzL6EqWYsoSCKk7aOEQpiNdEb+MP83gK8nxD+Mwkh/yuit7KW/j4Car+Lye/v8/d5wC8IM1V556UXsSG/yzPuCI3Cwdk2jZ2yves1PV300X/G+nClY7jYEdnxdOXT7vQ+BluHyhL5t3QSWIpgW9ab2QFkxpvZi6g2vv8hYZr5a+AU4M8JIVv4lrt/jBhtNBV4EbHkdSmQvyWUwFwqAdmT59bmuaVET2IrIfyvI4awHg2sdfc3ld8ZZiGx/tEyYktOJ5bEmE0sUPckoVB+UEvnKqp3X0xNQzF8tLE8NZsp/XSyoy3Cnjbn6suL14fnis7ZnfNse56t3TWDrUNlPtIfdxJYo4YayAliXyGWlbiPGHUzmphL8GOitd7vpbj7irz2bnc/2czWEkJiLNsOFXVCgJfj9+a9lhKK4nTCnDSRykFc1ityYnbwbJrvSdBHCP35RG/kv4lexoeBy4lF+OpzERonp42U+QR9hOlqYgdhmzm6Oz3Wjq15zWia71U92LjLexwo7K4wkqsuNNot1Q7tF0nUIIUqf5YRPf7B7DXSKm+LX9GAXncfcAN7KYIGzOzNhM3/MGKfgRcSC8z9MWFfX0T/JR4ceC+x3OvZwJVEL2ED0dqeQwi1l9L6xdV3PNtE1UpfS3TtZhF7HbyM6E30EGsiFSE1qiGOIuy3UAnTIuR/TUx8683wjesi7eyK2ek9vksoru2Jvz5p7+naQnQgOl011mvfRSHU3+tg4xss21sG7gIeJ+bdjGQltrNozLed8X4GG6cTimDAvdW78YUNxMfdfQ2x8csrgK8BL3f344BL3P0odz+y/k2s4zODEOJ3EYJ8NSGQ7so4oNrMBirH8FrCB7A4w3+cUDbLiF7DR4FHqfZanpBhZtG/terEqKL1wBuJQvmjvLYsWAeVEniMalG6Yop6OlpnnZhRnFDCW6kc64OJfwzbLtWxMyhKp5PWVLO61sz0VExpo+ivyDptsXnDd6fsqJ/jFOANPP0ypfSwh7tF26x3Plga/X2Nz1TWDWsM2yqeNZ2mQ4pgW4rAfD3wH+5+HTmc093f3ewCd7/X3S8Fjs7vLYS5p5doyV+Svx8i7PhLiKGdPYT55ivE3gWriPH+vyD8Edfl7/9FzGx+F7EHwr3ESqhPAT8lXvw6qq02f0a10NwzqBRMb97rvXmfh4GbCWU1Ou//dDCQacSI9Gxix0anNIsbIh86ibdeGcs6T71EPhUF0Ec1MuxxmlfgVulo11JrjKfRZNeq7lrDd6cMNvw8KiVdxql3mqdQCbTVwMX5+/4Orq3nSRke/XQo/XY0e9+l/NaVVDMlXT/WuOpws2cazNyvvQgz8YBoQtm2LMp5Aa8B/sXMxtO5wpxrZk7Y9lcQZpkTCeHsxNDS1Rnf/lRDRo+l0vbLqVq15b6TgAfz9wJiOGvZ7+BYokKuIGZEjyVa+b1EC62siPoiwlx1NaFQNhE9jF/muXVET2Rfhr5SlS5tJ/bi8j0buI0ws62i+SY/regh8qG02Es5rw/XbYcTFehI+gvWMht7Qu05yogvCDPe2Wy7v0OzSt7MlEAt3nuIvTBabUm6s8x49Z5BK58IhK9qFZHP9xHlvOzn0Y7y7KOIsvxrorG0lXje+q56rdLXS7zjA/N7uBu0zd5DWbplC9uWv2ZloY+By+VKojz0EY24fYi5Ss3uXfLproGTLx/BNpjZHsSG97Pcfa6ZHQI8r77sdJtrD8if5xGt8fMJH0MxVYwiWo9OCOh1xEY5LyZ2Nvt7YpIawJ8Ro5L64e63m9kXiJFLG6jMPp5x307lq5hJtLIuJwrZyrzmMKJ38pxMw5G1dO1IpSp7LZSCvi6fs7FydyLESpgyZ6LTdPUSSm4DUQle2+F1jfcuprJ2jaWSxuIP2kBU/vqzFZ9MWeupkwrfjMZ82ESVpyuJhkVx/jdLYyNFWQ50zzFU/qVWgwk2UjV2BlN+bgReTeWvGgkDFQainp/lvZd8X0u19W1p9LRTplCZi9fRXInW73cfYWloPN6M5YS14adlV8Z2SBHsZHJv5D8lRhsdRfgLjiFs/5OJoal7EzOazyQmfznhfD4FeDnRUtqT0P6PufvnzWwesfLpG4D/ndc8CTxC7JzWQ7RqDyO24BxNCPu1ed+LCXPTeOAi4AL6j2ZqpF7wdqZTeR6hmLZXKBST2KH096FA/9ZuvaXWTHhtJnpIhxOCahz9K3V9I6GiAK8EzmmTti2Ekv7hYB6oIf07I9+LqaxZfrW6X8njucRQ6RKeQaZxNZGve1H1uiDeY/GVNI6cazX6ql16hyrv6rsOlvJQGjqbCKVY78WtIJR0ncZyNCqvK72I+vHtVeRFufy3u79joIeSIhhCzGw9/U0co6kK9np338fMfo9ooc8lTB//P2FbLgvTvYlo7Z9LzB/YRDiGnehh/Duh7d9FVMBFxFDRJwjfwzcJJfImYrnsVYQwO4oYRno7MeP4P/K6LxIFc0+iV/LRIcqORjNQo5mmFaWb/FtiM6DSEq63TBvDNxMIQzEEczGhqFvdA6oK3LjnQyfpaqzs9WuaPWuz/Spa3assZ7IPndEYR7OeWBEue1P19hYTZfllxMCEKbRv3W+l6l1sIebB9BJzY8p7hupZy33LSLhSr9YQdabUm0abepnwN4b2e3c30uxdDMRG+g913h6lU8w+7Si9/9G07kH0C+/uHZn/pQiGEDO7mUpAbCUqxkXA/yUcxl8jWvWja+GeICrRb6jWCPoJMVzV6hvp1FY6LQvjQdj3/5IYpnoSITw3Ei2tMwjfwlsJJTARuCHj/zKhFDYQDul/Ilo2+9NfiHdSqJu1cEpFLbb6jcS+zi9gWwFYnzdQhOXjhJA4jNgK9NwO0lEo8ylWEIJ8oFZi/RkXET2mOYQCfhPhlCyVrq6U+gjH/XTCXv2GvK9Ttdg2Zfh9iXc9imqC4mCFTjFZjSEaAwfSX8h1Gk/ZHnVjXr8nzffXbhdfMYtsploZt6yF1eqaXqLXeijVmliH0/mmSFuJ+TaHUvnXRjOwiWtnUAYPlPLQmG8LiJ78dELJ7U/7XkzJ/3Z7iC8j6shiotdcGlZF6RY/U1EWa4C3dGTWliIYOsxsAiE43kQICIgX/WpiHsFGojCXzWCeJJygE/P3rFp0BxNmnM8Ro4DeA7zC3d/Q5L7N/Bp/Tpgpvk+08svKpM8nHMRHEIrqL4nexZt3PAf6KZBiNy29pAEntdTiKEJ0M/33Smg2TrveSi7HlhDDbcewbRe6KJqSrn0Iod+4v/Vy4n3tT1Sq0vWvT/Ir6ZiX9z+E8L0cR7zPQ4je38mEku4l3v94+ptUSrrKMxZ/zyhCSHfamm3GYCcJ1udgbCLyrlxfemujiFnr5Zn6qIRXD/1NTPX7l1FgexCCchyhzFYRAyLq76quJIvA66FSNosIBX10xlPyiYbrW9HqfDPHbXFO05AfjQ7+TuJvpN6IaudPKI2AsVS9qjW07xX0Afe6+8ltwgDD723f3fgh0ZI/nljXZx1RcJcTL20C0a0em7+PJLqDEwgzzedqnzKz+HpCqF4EvNrM1jTe1N031NY7wt0XE0tP9xHCZjPhmNubWFRvMlEpDyWcTzOIgvaaDp+zsfWwlcrpVSru2Py+mVBCxYfxVJs45xPO8u9mnNOpRhC1a9WX1uETxKiof6PqcTS2FkvlLRsEOeGzqdOb6TyIqJjFvDeXaJE+AXyDqkV6WH7GEsJxD8IUN5Hww5yU148iykIv8V7qcyTqwr6MRBpH/1Z6MY040Vsqz1LMIGuoBHndDFWf67A5w24lHNklXHl/92Uaewjh/ET+LjNVy9yGU4gWav0ZHq09Z0lDXaBOoDKhHE6VvyX991GVjx6qjZO2UCmlfTNfjiR6l/tnnGXE3YeotmjdQjQKmtFOSDcqzjIse0LDuWI6K0vDLCJ8fCX+kueN63jV/y/K7/WZ7tV53VNUjaIS38b8vTjP3dAk7f+T331Ez7+zBoS76zNEH+C+/J5ICN5zgUuBLxGV7seEGebmfNHFRroyX+CXidVG/5vwD8wnBLptR1rm5PfXM/6VhBCYmYXuRqoCXIRDEdiNn77ad1nMqlH41z9l7HRPFtoN+XknMaLqU2yrOEq8PYRA21JL88oW9/GM/+NUlWYzlf15HZWZqPF5yn3KM2/OdH8kwxdhUn/+xus3Ek73OYSS+HHt/IYMsyzj2pr5fXst7zZQKbF2nx76K4HGT31Ow6aGdzDQZzPVSrfrqBYz7CXKylaiHNbfkxOK6j7C+d1Ti6uTZ5mZcd1GNdJmQX7X87r+DKXsbSb8R4cTkyLvA/6LUChrCBNsKQOlTHWSD63u2Sz966jmjrSKZwOhhJbRf4HInoZwZW7K1hbne4k66sAdRO+pVb1rfI6jiH1VBpYXwy08d6cPIfRfQrRSDs8C+6/A24mW1X8Rrd31VL2FYqe9nxCS7yCEypeyoowjnL7LgP/VYTquI5zGxxNbaX6DMC+VAlQqfhGGfVnA7iQqZCcFrR5mC2HT31o7NzcLcVkcrwicJR1UtA1Ey6i0Er+acTSr1MVGfQkxYqqPqjXVScVfmfe8jMpctzHjPTzT/8uGexeBu5Qw2T2R/2/Oc2Xxv7oy6clneopKSC3LclAEfRHi9SHBj2V66kqrE0H/WyoFVE93T+34glq66vct915SS2sflTmtj+hZLsjyNpMwf30gP+sJk9g1+U7qaS4t/TKrvjz75syLgZTJpszvPyTq21PEBMkzmzzrYBRA4zWl19R4bAPV/iB1YV3P33LtZpqX2Xqc5Z2uYtsGS+NnI1FmNhOm5162fc7SCNpM9NBOkCJ4+hVBeZHlRWypv5QMMzlf0MOEsJxLmGe+X4vnXmJC0bcIBTEd+DuiG/mhDtIxnnDMNlbu3ixMC7MC/RVhtnhV3uszRCWut6RaVah2FW15pnddfkrh3DDAdfWKUQTjcirFWVc0vYRwXEQs1X028J/EKqv35LlbGipIqTibMv/r5+qtyOIEnkXl1ymC7Kk8dxvhiP90xlUEe6PgaOz5rG3431iJByO0Sr6WXk/ZGrUo0Mb7t/usBi7Mz370F0zNekR9VMuTrCHK6QoqZdZ43eZafKUXsIRKOZRGya1UyvmJjOMjVA2mFQ1xNvZWW+X9YJRAp+FLubiaqnw8mzBb/SNVb7a8q8Z7lF6rA5+oPWOrtJTysSw/q6h6vSXM3cA/A1cMRnbJWTyEmNnhLU5dSzhL96PaVrLYEFcQ9v+PEwV/H8IOOpowoZxJ2MjnAe8Gvu7unxwgHbcC/4dqyv51xJIZuPuCDHM3Uem/S1TASRm2k+Fm9Zm7sK0dsiw5MIdKYHzN3S8zs15C2LdyHpc4H8l4DyME7zGEY30OUSFmEQp0TyKvJuZ9Smv8eCKfixOzbrcuTrfi+Kx/l7AriEr2PcKWfRSx0N94Ir8WEEMlP0n4hiYBZxGK6V+JSj2aMKscmun7dMb/nlo8k/M5S56WvCytzz2pRpLcRtjFN1NNHjw50zaf8E1MyLw7iOhNjqOalV1a38WJ/0vCsf0sQoAvzjQcCvwtMcrtPwnz5EH0n8U6NvNnccZTllf/AnAVsd7VaqLMjyF6NvtluIMy7DMzXx7PNCym8pk1c3B7Xj+RaOnOMrOZmdcHEGWlNBBuJObZlJFN7WYrl7g7safXnd/lmvq1fYTwH081abTRkf1wPsPBxDt8IsM8r/YMZQBEme/SS+ThEXmPUn4bZykXpXMPcJm7f6mDZ1KPYCf1DN4LPKP2/0uEAFtCdJ2XEb6CXxDmmNIiXZT/pxHj+x8kuvhjCUFzxyDS8C/EjNZvEa3YbxLOpSuIoacPEDNv9yW69n15/01EBf4G0bVfSrVNZml91FthzVoumzL++wjzyrWEoPoV/Vs2axriW0FUou/ntZ/Mc0szz+r3LC2sLUSrtI+YmV1atbMJAdjMP1COfZAQPu8kFOJaQtiuye9ZtTTX710cs/cRe0/sn+94//zMzWfZTFTIUjmX5bOsperhFL9G8RcUE1gPIVQ35rnFVK3/0hqd3pDnddPSPKLnuSzf3axMc8nHHqKsradyPm4lhNJG4LZaL7feoi0OzYcz304kBOMeGX4d1bDkqZkXfYRp8hEqQTaDqidQ8nURlc28/p7L4oNriDLck/f6YL6D0uscqBW/vsmxxjL8JNU+v8U8eX2mrZiz6qYfpxoMUt5DvbdweS1t5f38Re2+xanttf+NaVxKKOV35fm7897FzNqqF7EVmNmRvBhuobk7foB7Gv5/mKiU8wmH8Bvz+F3EzOE5REtoL+DNee5IQgj9PD+3EN3mn3eYhldm3L8lKuXdRIWfQwiDR6hsvL9tKKjNPqVgL6VqWT5MZXJ5gFA4XyQq69w8dlJWoj8gfCXtbNu9hK3/o4QweoLKRFR6F3WzWw8xI/uGPPZ5oiX7t8TokSmEw76P/pWlnobG9JT/6+gvWOs247WZf72Zf/Pz/vOpxseXLUXLKKHNwMcIBVXS8SAxG/ma2r03UpkIBnJINirfhwiz33eJVnpdILVynPble9pC+LBWZtq/m8+2kuiZFgFV4iiKq4foNd2e4RcRAmoJoWiK/+FvMl9+TOVILQ71RwghXRpCRZGtyDx4Mo/dQgyq2EDUjY2EAv6PfOb78/l/QKVAyjtrZn9vNNu1E6rtPqUhsoFo+NxLKMNm5ascK/l5f957CWFiWk9/E2VRDguJ8lLiWEW1gnAJ20tlOruVqIOHSxEMnyKYRW2kD+Ht30o1euc6QriXnsHniUln12Thfg9ROU+pfV6S4T7bYRo+md8TiJnK9dbyPxCjPh4nBNParHitbJmlkmzJOBcSFbw4HuuVqFxfWk831yplKeTtWm8ljk1sO1qmXjmKgLmJUEhFOcwhejhlNnbdL7CB/itCNj5nSeO1tXw8g+jRFKd3uWZN5uO7814b8r4bCYFWWrufyOs+lPl9WaZ1I2EGLGWiL8+vJlaILb6aPkLAX5Z5+UA+w54ZR/ETPEmYEFbk8dKybifA+qiUThG0PcD/1J6/5NdWqrWtnFAOy6ls3MUkM5cY9TadEFxX1c5tzLy8gqoHWvK9Pqy1lUBeT/SwFmRebCKE/keI3skjmcb78n19jaoc1Rs5j1C15utCt/TOyv+fUI2qa8y34kPpyTz/CtGLPJyo10fndf9G855I+byLqgFQfE2t3ls5/gRhZrwq/9fTV2+wzQKOlCIYPkXwf/MlvSo/d2SleZjK67+CaPk+loX63jxX1l7pIRRD4+fOQablBkLx3EE1zHEuoYj2ICpy3SlYhG8vlbCtV5hS0FZQCdjSdS+CcnlWiDdlGh7N8/fU4i2t5CJUPSvDrIzjM4SAnM62grtUll6il/MkocgOb/i8NuNbTn+hUir6U5mGH2bFOY5w9j1F2N33z3e0Ko9tJCr79cCt+Wz7Eo7q2wi7+AJCcK+l6nGtp3IMfpxqtNgTGXcxfT1GrA1Dprvc82ai5bgu428mJMron2JqOZooe4cRyv4uKsHvGddnMvzKPL420/YDKj9EXaHUhekTRIPg5xnm17Uyd3FDGVyZz7CUMFGW99k4XLTRJOSZB6uJ+lFMb/tnvDcR9ew/M19KD2pNPt+PqXpm9bJbf47GkUYba8eLue0XVIMYiqlscy2e5+V9rs53WJR7iWdtvt8Sd13Ql5WCN9TSWTcR9tau2Zz5sSTv9yGi3Jd0baBqHJTG121SBMOnCEYRLcX/zs+7CPPQP2clm58V6W8JQXYDscTEd4nRBk8RwuuPap8ziZbPnEGm5b6sGO8mhpL+rpfRJFwRBqWlX8bAr8yK9SSh5H5FCM8ioJZmBf00IXDGZZzjieUtHqQaAVKEUGOFKP//rpaOxopa/5ThcbcRgm4LoWgfyPs9QDgg98r4fkCldIrdfm1DnHVhURbtW57xfzLveSfRynu4lndfyWuuJ2zfvXntbwmFUhTBJiql1+5T8qf0FLZkni/M9K0jJt5tJATPo5neC/O5S/5ton1r9Of5PPcQ5e4ewub/G2B2Ptvn6b95UX2Y68ba769n+KJIf5rxF8VeRtAVk8j1RJl5kOjtbqJS4D1UvZN2reOi0Mtw36eohuOWYcD1BkSzMfoljjLIoD7yq1nZK2XkCqr5Fc/Kd30e1YS2LfRXcvXP43mf0ogqfoL6/UrPb3PtviuJelSE/hVE2ewhTIzlmUq8r0Q+gmFXBhOBY/P3ewgt3peV7JtEq/GKLLhPZcG4gBA46wgn0/za56GsXC8dZDoupqrUawlBVD71nscWolV1E1XLsC6s1xGVdD7RtS62+sbKUlpKxbF3JWGWup4wCxS7by/bDn0rn/UZZjVRmRdTtdDqlXQj0aJ+OSEkixKYXztfegTz89p3U5ldSk9oKf1ndD6ZcZbPDwjH8y15j+JEfCjzdU7G+az8f2d+353fexJmo1Op7LzFXHEb1Qzd+vDjYnPuJey9lxF25xUZ/t8yX/4i39kKqtblxrxPL+GkLYKkKKjybotZozinHyUWQdwIrM60FxPf1tp76CXMJg/lsdIy/1Hm92NUjY7HM92Npp7SEykzgjcRo+felM8/m6gz66iUa18+b5lQ9jEq5XFCfsqw6dX0V7rF51Js95syzlVUCsOJhs1Gohy0m5cwn0rhPk7Uif8i6tBtee6SPFaff1HvOfcSI5v+JK/bTPTWWynucs2VTfKzMZ13keVOimD4lMCZhHCYn/+/kQVuZi3M3oRD7x5iCOAz8vgBWYHuIlrpKwiBsIpYemJCh2mYRQif2VkwluTv2VnYy0iXy4llMXqzMBbhW1pI12UB3Qxcn3G/kGiRPkA1M3ox0etZCMzNcGWm9YWEkHiIMEeVESxrM94FmV+LiVb94VmJbshK9mCt4NcVQVFS15GCq/b8J2cl/Aoh4L5Ppdgeo+rp9BGmhUvoPwFodubDTQ3xvpFYAfY3+Qw/I4RGGQ30RMZbhOet+buYeT5B5excSTWMs4wCuTTTPJ3wQSyj6uZvoBL2izP/y5o778383ZDXXFh71kZhsZDK9FPMgXU/wHLCrFJa86vYtkVdhjSvJYYm30EI6Y3Awlp+lfkWxxCC+lNUPaQLCQG6Jo/Np5rdfCeVw7uYu64G3kY1J+cWws8ylUpBLsl8WFFLd8mHJVTKtm5yqQvUK/Md3kxV3i6hUtSl3H2DmOg4h1gv6Y+IMlAmAJbeT+Pks2ZKpf5p5UMrDbYNRK+rPhm0+ImeomoA/BWxmsH7pQiGTxGUYZm/rR3bTI77zv/jCQH1a2Is+YuJVUhfRjgoNxNd6/8k9k5+P1FBv9dhGg6vfR5r8nkEmF4L/yjVshdP1gpgGdroWfBKy3o2oTTKsL4yumN1hr+Q6I08j1B2RlTULxMjP4ppppdqiYEbiAp8FdVwyVLgGytRXTm8HJjXJA82UY3+aZzoVLr4ZbmApVROzd5M53PIiX5Er+4WQol+Ajg+j7+SEPYLiYr66/zMzvc9J+P8ArAhr3mAqhW+Nd9FcZjWR400Co2S7i1Ei3wN0VtZnvEeSAihR4kGxRO1PK6/zyI41ub7fGnmQ/FbrKd/2WlspfZlfq3I93YN0ejYn1CStxML7u1P9FxWE42aZUR5LmamtxLK8EuEqfFrRCPqAUJxP5b3KApoKVXZLO+pCMOtmddbiZ5z6ck8VHvWPqpRS6XnVfJ6HtFzXU/l6G8mvIugLmuI/ZxodJxMDMq4PdN9EdUSKfUeQeOnNIhuyXyaSSXUi99nXv6ek++2+PWaLdVSnN2XEr3fkzuRF5pQthMws9+4+4vM7LfuflIeKzMwDyRe/AHES/sRUSE2lcuJCT9LgbXufnwt3ruJHsHvju1gOh8ATnf3x/L3sVTmkYOIAngA0ZJq3OnrA8Q4boiC17gbU09efwxRQOcSgnVrXvvveV19ZcrRRKW5khhq+hqqpblnE+a2MqGoJ+/1CDFE9H+IoatkWk4GTifMFi8jhhjuRYwuWUK1GN3kjOMVREU7lFBCB7j7ZjO7n6h8M4Er3f2eJvl4Xv48gJgkNJt4h1fk86xx973MbBmhJE7O+5SFxv4P1QqxbyMmWn2TEDIQyuY3+Xta5ueivNcKYC93n2xmRpSpuzPsC/Pb2XayVBmyuCrTcQwhxE4keiPXEg2a7xDv/ge1fDXCeXsQsXrtZqJXtqjhPnsQLfYJVO/r8bxuPNVS1qXX8QliItitRFnYO6/ZCuDuY83secC33P2U3PTpNOK9vYp4x8fl804n/DOH0pw1GX99Qlgf/ZeELpP8GvdkKIpw/wz/Wyrz0osIZbUq03AO1eq1pW7cQjReykx7iB6QEQ3CUVSrjEK1mmtRgHvk9xIi7ycR5RiqOvUoOS/D3V/ZIg9qTzUCWtC724foNr6VEB5Tia7+vxGV6IOEU/VP82XPyTClBTY5C8wXidbMe7LQfI0w41xLh1oe+C81+AAAIABJREFUeGV+/0mLzxlUo5Z+TeXku4QQ4m+gsr/u3/CZTWUnrY/i6SFaIk/VnumfCRv3Y/n7DqLCl5ZNWavocULQP0LYhbdm2m4hhNVHqSYolQr5eN7z51TDY/+e2OpzBiEUV2RezqcaTlh6A57Hv5vpOCjT/kvCFHE9MD7zsdHHspYQ8hACsbQu6xPGyj3WEC3Ib2aa+6gcm8X8Ny+fcb/M13dQtWSLeWFzhtuY170vz92W+fsQIaQ/TOVXqdukS0u3OK+btXqL/f5+qtEsywmlegOhKP8MODGf/RDgtCbl75v5KXtll+dfSuWDOCfvtZyqNbyVGEhxUB47mjBTHU6UvYeIunQJIexmE2X5n6hmSP+YULBrqWz5ZV7DzVSjxcrAjOkZbgZVj6GYR8tAibpJaR7VXIeHiUloP8v7vJVYBv5B+vvSiu+rbtbcROUTqs95qA9VLvd9J/17x8XHVsrdKkKJPkrlL/tZR7JiuIXm7vghNPansnBNJ4TfeGLN/70zzP8mur+3ES26xjhupupSlklGTjXDd0AnENVcglIhL8nPN4FL8tx4Yo+CS/Ke78hPEWBzqIT1FsJ0sDgL8BVUQ2HvyMK4gGjVrMr4X0pU6tcQSy9cnIX0U1mA78uKdB/RW1pNZT7aRFTWVfQXYOV7PaE0jiXWXa8/+7lEJV+Vleq6DL+KUCwbMkwRlvUhs7PzvZxJtOxvHSCfp2a8xR9Tuv29hND6JDEEcRmhEDcBryN6SLdSrdA6i/4V+1GqkTNlNMt6ouU+O5+72NwfJBoKE/J9FMX2F2zrUymmqDKAYSHR6n95vqtvEAr0TkKwzSaUy/8QvYStmbYFefyUWl40zqq/jBD038v3cQlRth4nhPV/EUp3FZVZZQuhhD6U6X2UylRS/FWzar9LftVt8cW3UBzb8wnl8e9Uzvk1tWuL4P0QYeralO+mlLW6UC6r+Ja1qJ5DKMgyj2U11UTOMnelvIPN+exFQVxKlI1ZVMPLSz0vDvzZVAMPHqJa/6r4QMpnLdELLD6k2+jQWSzT0E7AzKYRrdIj6L/dHh47jL2UaL1cRHS9VxCtrc0lDnf/6zZrF5UwC9qdr6WnbJjTLz3u/o9m9uI8/hlijLu7+7fN7K2EAPs0Ufgez+vWE63v6UShO4howRqVo3IclY18GtFaHG9mhxICYU93P9HMbiQq2EsJ5bmVavr8Nwi7ell/p48w7ZQ16pcQJpSxhLJ4KdHqLM84nmg9TiZGT8wmZmt/guhdPUqYO/6CUGinZRrK2Gzc/RcN+fisFll8OWEW6iHMEcXHcFa+74uJES5vILYbPYV4578mysCThCJ8hDDROJVJpLQmV+fzjyXMZhuIET5vAn7h7s+rpXNB5tGhVPv+QmUCeZRoXT9EtSHPIxnnmMzDBZnX46i27Pw6MZx0jLtb3uulxDpSJ+T/e9z9xFpa3kT0iPcm3sPmjLP07H5N9Ar2yPsV89KTxDt7G/BZQrDeTZiavkc0Xp6VaXomMaqr9LyOpjI5rqbaM3gL/ddzKnlazC5FAI8jGm0fJMxyZUMiqPYqfpCoA79PmKTOyDTfTSjTku9b8z39F80pSmJDPsN1RKNpPdV6XvtSreU0Nc/fkvnyd1TmpTISbz+iPL3F3We3uG8/pAh2AmY2h+ialxUgCz9095PM7NPEbmKXZ6X9hybRlBESR1BbCM7d/3o70nMD0Xq4mygsEAXsJKLS3ENMivo+sCmV0L8Q5qPbCbvnXURL/p8JIVEccGS8p2XcBxGF8TmEwjiJaM291t2n5yJhawnn9+cJAb6VqGhbCEFQFMarCaH1XCIvTyFMbEcRS1ZcQ7Wz2tiM867aM44jxqj/FSGox1L5F8qwwUOInsr9wN+7e+MmNfV8nFX7WzYWmkNU0puI97V/xjse+Jy7/31uJnQO8Z6/mul/P9ESfB0h8J4i5mi8n/Ch/DXRev5upn0eIUh/QSirItRKi7XH3ffJdP4X0cB4F2Gu+AqVsCvDhA/OZyj29IOJFuSfEkLsImKE1FFEb+9lxEinE4D93P13G/6Y2d2eu2BlHp3g7m5mH8q4Pp3P/S6iwfEPhEIwqgX8DiCU/18RPcM7M/oTqPxCqwiFcAKx+N7VxNyaqwilcBPh6/kloQgeI0Y0jSbKz5S833F5bo8MeyZVuRtHlKupxOicxi1Yi09hVb7jiUTjZ1/C33cSoaz68twYQgmW3smYvPZRwh9T3smDhHLbmNcZ1b7QRYGXPcn/mFDgvZmGJYSfoPg1NgOT3L34HQdEimAnYGa3uvtLmxy/lihwr6HaW/hOQtA+y93n1MLeTjgIi7kAAHe/dDvSc5+7/16T4w8Qo1/czM4hFqq7mSh0LwMucPcrzew/M80HEsJiJeGcK5O29iNauKeSE4Hc/flmdqe7n2pmm6hWoDwk86D0dsZkPowjWk4QyuYponCXSUKNW0kW+gjb6fvd/ZSG57uKqCjHEgp1HFXPxQlBWJZr6KNqfRvRM2q78buZnUz4cI4nFNN0QgH2EMLgqTx2eubZVnd/Zl77INGavYCo2K8nlPFmwk9zdn6uy/RdD/yxu0/M9/ZVondxIGFiWU+8h5UZ3+FUO40dQn+HcXGGGtHK3J8QOMsyjjcQAvJZVKaIssex57Ufy9/HZR6WFu+5ee9/J+zk04jGxkJCcX6PUIqrCQWwimg87EP0oqcS770I4D/JtD1JtTIp+X88IXzPy7TdTjQeZmW6yraWj2X+QSi6FVRbXJYd3NYQLfLiLB5NNfihbn55FWHu+lzG91FCES3MfH8HoTw/mvlwDP0FevHTFBPgtURj5jv5HJ73XkqUnT8glMTRRMOgjyhvJ+fzHUm1a1xP5sMP3P0dDAIpgp2Amb2KqBD9zD2EHbFxb+H/j3A0jXP3I83sRGKW5xTvYK/RDtNzMfAVd5/VcPx7RMtzCVFBfkW0tCAmRT2Z4e5295PNbC7hnPwpUeDGUE28KcMeJwLvcvefmtmHiYr9OkJwnU21ounJefyZhDDYk2jxf4RoiTph9riWGKP9OqLLXcbSlxbQAsLcM5XwX5TlESAEwzpC8HyPEEqPEXMg/ohKAFp+xrt72Zay07ydRVT+BwhBci/Ru5ibz/kWYpmHPyUc2RMzH76b6djP3T9Wi+9OQjAelvkyJuN7Zj7LW4hRVe8hlMgqokfn+UzfIlreEC3OhwjBsZJomW7IePbNeL9J9BomEgrzr939VbX0/JgQ0L8g7OInUinUtXnvghOC+F2EwIQQ5BDlY0+q0UOlh1Ja2lsIRXQr8c4PJ1rqhxAKbS+iETI/07+cKA/fIfwkVxHmmf2Iclha7kVxQZTZsoXqhMyT/ajmUCwl/CCXEsL+OKq9m0fTf2/h6zIvipJyQjGd4DHabE/CLHk/VcOnzAgel2HL1qUH0l9Rl9/r839ZnG9i5tflhBzZQrWmUSnH38k4p7r7mXSIFMFOILvmzyEKQWnNu7u/I22qU939m2Y2iTCfvBS4xauhpvcR3eR1RKWo+w5WDCIdZeG1MYSgfCTjOiKP300U5jsJe/8vgRe6+8EN8dxBOICnp0KYRPQcXt/ktj8sz5HXvoYwGxnhtDsSeF/azssyugdRCZa9iZEbz6b/ngWlQj9GNfSzJ68t++A+2pCWA4nJTcelSeovCWfm84i9DO4BfuTu/5hpvbud8k1TR2EUobj+iKq3s5lQLr2EolpCjP76PKGwLJ9p37z+caJcHFW7x+FEQ6CYvDYRef1xQnmMJswjhxKtxS8T5ogJRC9ufKbpBsIcUnwlDxNCowji0YSy2kooACPKwaeA17v7ezI9FxADG8q+CGOInuo5wBfd/U2t8iuvv49olc8n7Okr89k3EILeCPPaRMJE1ksovcuJRsENee/JmY+/JNY12mhmRxGt5JcSQn4OIXCfQSjICXmvPagGA8yjcubfkO/jnHwnxUewnjCVFXNO8Q+UljdUvar1RFmCaNCdTTRGIHpWZRTWC4h3WUw/pTeyJb/vIt7jDZkv5LV1ikJcR7yD9XmPYiIsebmWWBb8eXRKJx5lfQY9aqjpekDUZtjm/0OJuQLQf/LZTEJgFVvi/Pw8Msh0HN7iU8wOL699LiOcqOcQgqM+VLK+INansrCV5bJHAxfV7vme2u+PA4c1pGkBURlmUg3jLBWuzMadT7V2/maq2c4biC74FYQQK0spPEb/tX/KrOqy3MCjVOsKPUm02tcRQu1r+U72JUdmNMnH7+T3KrYdovpsQhA9SUyW+gQhdNdkOr5N9Jr+tBbfKeSorXq+EWa1tYQA/yLRuv800fI9hFCodxMK7KNULcHf5vPuRfSCZmacxQRWH0lTHKp3Z1m4g4ZRV3n/D+Vncd6r7A39BaKF+3lCMH0Z+HLt2qnE+lqziYZHGb65iOgh/zzT/WA+898SPZyHM72/JBy07waem3FekO/4/VRbqRZT0vIsEwuJFn1xsP8mv9+cv+cR5bw805pM1/H5Pu+o5ePniCGgpR6UGfCzCSU9vVk5ybSup5rAWJbpLqO1+ohycipR9zcSo+j68jmWZRn4EOHXKYsXrsr0f46wIDxC1JFbCTNX6Wlsolr0sKPRQr9L93ALzd3xQ+4X3OR4mWFbF/oraD7n4BHgwJ2cziPJJSuolE0Zx18+o4jewHMI5fQ+4LiGeH7TIv6lRK/oFbVjG1IglGGURVAsJFqeS4hu96sIG/q9+XmUsIVflelclNdcmpW5Pj/iXbXPiwlh/VxCeN1ICKClhAA6N/N8AfBUi+eYnRW3zJ7t96mFmU206GYSPZUy5G9SVuSvEj2eSVmpvw+clGE+TfTO7hvgnd2Rcd0HbMxj91MJ/3tK+co4/4xqGGpZCuOrmaf7EkKxjJ3/XB67l0rhLSeU7gb674P8EKF0zgPOq6Xv1nx3MwlF8xRVq7jMr9hC9D6WEQL5AULxlTkTZfRLKRtX5jv/WaZpbi0vlhOO7bKI241Ue0CUoaHr8vxPqIbMlvMzqRbP25DPtZ5w3BZFWobtbiHqxLWE0j+XWrlrV9/pv4xHfVG6shFQmR9QFFl9efIy8ez/tXfmYXdNZ///3ImQkZhpQ6oxU1SpkIgfqq2XVovQmkoHnVSat1qdNIYOvLQutIYOtF7VkpcihhapmlJBBkkFRaKmKk0lISKm+/fHd63sffazz3nOeXLOeU5kfa/rXM+z91l7r7X32Xvd656+90LkN5if2x9J6WI+wmPhnl/Y0FzQ25Pm2/FDVugjz40/i3IystmU5xzcTKj61MJx3k/GFDoArfxihbDxwIDw3YxuznMBsv0fSeWEPAM5HKcCXw9to6ngQFTacUnu8yqajP6OVoz9c33MINBIoAnum+G8xVyJ/OfikrHG7+4utL2OoKmVHHM8GRtkXkjGWPA4mcwPv/mS8ELODMefHF70pWH7gPBS70eh6hyBlqPGvd4a+XJuDeM5FwmxryAfyF/RRHVRGMdQssS2BWHcMeM5UmEcTFYRbja5erdoUhmGzB2PkPkYRlUZ37Twd3b4Ozz0tRWZRrpV6PtItBCKAnRUOGZNpHlcjMKsd0Ehq7GP2UhDehGZwy4P43+QjIb6FSSo4qr8X2QaxVIkWP4TxvNg+LwTPZebIS3iK2Ra+f+F3/focNz9aBFS61lb9r6T1d9eFH77PAdUPkEyLo5eINN2bw3b/8y1dbKExChcXkCa1fnUyUkWP/XUp01oHEU6hoixZnYRMNTMPoecjDehl3uV8DkA2QgfB2aa2W0U8guaOM5V3P218P9vyLJfz0Naym+QCWlyiAe/2sOTXUD/cFw+ld3DeJ80s7HArcHGvhD5KNZEWkG0Cz+NVp7boUgLgD+Z2Vh3fx45jh8zsy3QZHYUyk9YGPo5Jj8gM7vS3Q/J+UmKmAGMd/cFof2aZJEgFXD3c4FzzewCd/9iro/huWbbovDPIeil3AA408yORqr+BWhiAUWJLXT3G8zs+4XuRgNHm9k8shBLd/lU+qIQ192D7+XZcB+eQr/TOORMnosiWMaQhSFG5+PaZOGJmyOz0DlIS/kiegb/X2480WRxNRIuhyCn7NwQaXZzuEfRd7XUzPoAj5rZcUhzWwUJ8dfDfXscaVhrIQ1lW3d/y8xmIA3pRWC8mX0I5Uzsjyb6SLlwKlrdgwTKhUhQxwCC6ch/EvNzPoies7FIMM4mcxZvGM7zpLs/E64TMxuI3ssYifN8OPYS9DytigIe3EMORQmuDBF3a4VxxboZQ5AW81O0IBmAzIWxPkUMKd4W2f/7h2OuQeGxB4Vr3g+ZAyehxeOGSAB+BrjJzDZz959UGVslWrniTJ/SFdM+aMI4M/z/CHppN6HSjv+psk+Tx3IL8NHw/xwkhCbnvs/TV8fMzQpqhW7O/4vw9yY0gTyNJqlV0MpnAZn6mydHi7xMp6IXfLcwtpfJVo8DCn2tjxzsN4XtMeGFGF7l00XLKdvXg3u6BjJxHYkEy4/JTBkL0WQ6j8yRW8yILh1v7vu7yLS4o8L3uyEt6QY0OY0jK585l2wVGTn7Y23qvxJoSML5RpErMBP2fQeZil4Nv0vk+X+CbGU9N9d+ZzQ5DUOT5lXhmqeTmZueDNd/L1rwHIME0u/DvRsTPr8M59wLFWIv3uuTwnmfR9pMpOqIZpzxaOJ8Nlz3rmiBM4cs0/r1cI53Fc59RrhPD4axfgMJlWhC2rv425SML0Y1LQi/wX/ITLAPoBDcf6P3Y0b47X6GNOJjySiz/xGu6X6Ug3FxuIap6D15NXz/aLi/eyFBVarhlo61NyfFlfGDVoofRZP/BnRDX9DisYxAKvBT4UGaBYwI3+0CXNrN8d8If88jOA3zn1y7+8LfvG8khljOCC/aGWg1/Gh4wf4WxhZjrpeSTUbR5Jan9Y7C5oGwvQrBPFFl7A+g0M24vVat9nXcy1iE59vIIfp8eIGPC9cWBcD54Zq/QhWOnm76uZTM+R05gKIzvcLUFCaN1ZGWkDc/RAruHXL3NRak362kzx2RED8z/B6fLv6eYbsicKDw3U5IQI0Ddgr7BpOZayLVdhxf5ITqsuCgki7lWrIKeJH1M9KKRMEQaxM8SsZdNRUJrTWrjHdeGEOsYjc3jDM+j9Ee/xDB9Ftyjgm56/Iqn5fDOaajXIw5ZH6NWH/4tXCN8b5ND20OD/cg+koWhXGejzSGf9T7XKXw0TbCzD6LXs4/I9V8DxTfviZdcw5mo1Xe1mRJNHgu1LCJ4xpMFrL5ZNi9MRmtryN1dThUmBPXcPdJOfbNIh5D9vFRaGKMyWSHocnxKeA6d78yjGMX4MvuflRkbjWzrZBP4RRkchuR78ADzYaZ3efuOxcYX2PlrTPIQlRjfPlxaNKeGE41FviBu/9vvfctj5C9vZDKzOYN0W/6NfSCR4bJ29HKPl7Dk9QJM5uAfBa/RBrBhSh2/wJXpnr++me5TEoxSS5mnkKWn3A6mui3QuG2m6EJZIdct/lcktvQJDQDCbHtCu3ucfeR3VzDWch0E6Oc7iLLYVkV1dk91cyOQNrU1MLx+ZDV/ijv5FkUvrk6eobfRWZK6ocmyRhsMBEJkDztw3c9FzpsZoeEc30UvafPoWfocbRo2DNsX4BMNcuexcJY90LC5PfoeXga3fPRKELvMmSSew4J0jWREIiV7V5Fmt5ryIR0BBJmQ9H7M5mMofcU5Gf8YO6aziuOqQxJELQRgXpiN3efH7bXRhL8UQo5B2hSnoCcZh9B6nMfdy+jo+jpeNYIfYxBL+A9aDX/UqHpN8nslssoKrybhJWQPTsePcQ/RJPNQ+hhPxhFg2yBVjOrILNKjIh4Admfx4TT3Y4ccg9W6esv6IW8JUxYI5EAeCfKyH2o5Jityfwaf/Y6eVmq9N8le9vMNkAC5iQkJCKvjJGjqHD3bRrs64/IqXklJZnq7r59aBeF6Rtoonw99Ps0ioq5J4zr5TCWd6AV9P+6+48Kfd7g7vuZ2R9QBNYr4RznQea7MrML0D2fSJYQhbtfnTvXW8CR7v7b3L6fk0X57OXK/VgTuNndY5JjtfvRH2mI1yBn7oBwrjvRZD0O+bvWDocMRhN6rDq3FPkm8vkvUYgehhYzk8O5B6JgjwphW2NspyBh8Ti6/6uF//+JfDEbICHwJnoH4/MRieUGIEH3vXCer5Fl8sdCNB9EPobbwrPfH+jr7svuf3dIzuL2Yj6Vk+xLyN67U7GhmU1z98lmZmGlcbKZTaOcl6inuBiZYA4J20cCp7r7gflGYVWzhbsvpQSWkewVNYaF7n5TaDOKLHHpEXd/3cw+jF7QlyhQaaAV037IARgJ57ahuiM+skaOMLO7w3EHo5VyFyEAECb+Hk/+BUwxs/e4+2wz+xK6p+uiCTFOtNHW6+hFfz8KZa0bZrZ5OMdz4bwj0QRzKtIgv55r/kwITjD03JyLVsfno3DgdZCgvQ/4obtfW61fd4/Jg7FW9cIqTasFDlyd234CONbMRnhI5kOmI8JENiP8/6KZLcv0NrOPAYe4+2GFPqcg4bMqSrr7RLi2tYGvonvye/SsrYo08kfQQuR7SDvcoXDOuOB5D3By0LaOcnc3Mw/jGVTlHsTxRgr5g8M9eB4tTkDCaSF6ZvuQJXXORVnlq5IVVjod2NTdfx2CH76MEslWQabCsej9GRQEF0hAxLoU3SJpBG1ALiN1B/RgXYsejANQNMABxdWoiWtoNApb+zNa+Z3u7tU4d3oyrgqmyBr7bkIJZC9XOc8j6GUrTuZfROruArSijxENGwLfd/fpZSvpcM7FiIDsd2HXJ4CL3L3LyxeiaY5Hq9OisDkHrbquoTL66urieZYHZjYHRTvNQyu2hSjOfzurwiLr7v8ws9neQAaomcX4/ynIqRhf4Ce8wEMVsnoHo5V+P7LCK0ZG470I2f7HIHPgo4jN9Fc1xlAzA7uOa5iOhOC5aNKL5o7XkeP738ivcRaa4LdyRY7FiKP1ctft6B2KHP4RkVDvdbJcGFA46jvQ3LeDme2BTEavufvQ3BjLeMHmoQidfZDQ/TRweTXzi4lc8gp3n2ni27oFOawdaWP7IG3jcHTfXwzn/xYSpu9Fz/VM4FqvLFL1PbSIuQg9D6+QFfsB+W+6L0gTkDSC9mBI+Pt4+ERci1YjM4vhgmjCHIgehNPQCquaLb6nWGJmo939Lli2al8SvzSz88gocmeaWReq7PDvC+5+XfHkweYPsv1+AD2wm6AX/wK0Alq2ki4Z3zx3fyOca161i3D3N83sk+5+NjKx5bF6GP8H84dQuUJtBvatMb7ox6igqDCR1j3bYD9vAC+WrIrLEFfxe6NnZwP0zg8L++cjW/q4sH8wMjHsgSKwusDMNgM2CYKvi+/KRPtwDtJUHEUmfdXd87+fhd/1SyG89i4kPE9E+QpLwpgHh+3bg3/NEHHfEDNbC/kz+iOm37+gRVPE73L/vwf5VBaE/V8EXjWzd5GFShcJBg9B2udZ7r7AxAs2KYx1ETLdfs/dbym7T+GefMvMtjSFT/dD2uEl4esNURjupkhDOg+FU38EmU83QhrkF5Gg+Grh9MOCH+UGAmswut93Ii24buZRSBpBryHEWg9GL0AXlDmeWjCGHZC6vEbY9SIKUZ0Vvq8peOIK1KqQ7MVVd85WnaffjvvyK+mlZGyP96IXMYaU9kGhoZ+tci2xdsEVVNqm61aPlwdWpVZB3hEcHL0RsbjLVfW8tGHiAy0MRqJ7neehWuBVeKhKNLZY1vAZNLmtipzc96IIryk1xnFXaLsnJb4rM7sHhUDmNbmvuPsuuXN83t0vym2/DwUJfNrMtkRCwFAo80NmNgYlWq6CVsubIeE1DK2W9winegwJhmhOOj/8vREJtvic9yXTjn5NVn/i0mrXHcY5AQmI/6DnbKK7/6ubYw5AZpwjkYB7LvS7LkowXK/Qfo43UIrWxLC7CAkRkBb1uLsfUv2okvMkQdA+mNnlyNzxJrLLrg6c4+5n5tpMojwBCoDuHLQNjmc1ZL8cgRxnC9XFMrttbDcI1Sl4M2z3RUydr4TtUpI95CiegFToWP91O5Tlea+LqrpoNrkz/H0TZV7HmrFvhbGtRQlM0SyxX5CZ48kwpi7305ubmJcn+FsuR3CN88/LnX9YSZOnvEpEmVWhRQ/frevuL9Q5htHAZe7+riDA93P3ecGf9b7Qpovz1MweiA7sbs6/v7tfX0e72UjLvCeYd/ZCgm4AmmD/SabJgrScSKd9FyIfHI4EwPOhzYx6nwkz2w6Zrg5CpIYfqOOYJ8N47kXO+seQlnaiu98T2oxB2tR97n5s0L62qHVPioLDeljXPJmG2out3X2RmR2O7JLfRKurM3Ntzgp/D0SqfJ7nvebqowe4lqxgzTM12k1Gpp3oIxiAInqi3XXnMt+FmV2FnNG7oVXblohT5stIQEDXCKXon7icrsV0uiBnbrmeSirfrZEQiHVnW4qind+yWgX5feuixKRtqDSrdGvLdfdNlmN4E0wZrmUaWx8z+xXwDnff1xRJtWvRRxBWwzsB6wdt9gmULX4ClSyxN5kYS3+P7vuhwI1Ro6mmtQSMCH19jXLhHbNkX3X3V82sT1jMzESC19EzHRHNnPPQYmQO0qaGIadt/l6cUmNcRTyPVvbzkb+iHkQ68IPQ+xPzCKYEIeHITPc82Xv1DDIn1RKO081sZBQmaPE0tUb7UiRB0F70M7N+SFX8aXBmVjTwUB7RzH7sldFEk8zs/iaPZ5i7V4vCyaN/3lHs7i+bUvAjppjZ1kWHN0pOOwiWmTZeRQLhDZRIdA0ShPGlz/Oxx9XbpvnVUcnYov9lC7KqVYbi1FdDK6xv07VsaE0zwPIiOMJ3Kez+LTIp7I80w0+hMNmGYGbbIsH8IeRkfQ7Y0t2/XeWQY5AQjg5jyPwkv0Z26++E/X8PYyz6CD6OnJcPI9/VsShk80gqfVfRJPF5Kn8zZJJpAAAWCklEQVTXT4TtWnkw88PfwTXaADxtZkORML0FrfgNPVdHEOpbeyW99/lk9a8nhev7V7UAiDJY12iwz5U889UwEC3qFiKBOgY5uPM5F5OQue0adAGvWHGCyMYSNdB+VAqT4dRYOFVDEgTtxUVoJfUAcEcwi1QLwxtkZu9297kAZrYJkvbNRC1HbR6LzWzHaG83hYsuyX0/knKH9+JgTtgbPfzbo1VO5E8BrQIPJ0si2hg50i5HiTs1V0dxJWdmdwA7uvtLYftklIhzGeURTU1F0RGMIk2KjuC13f1XZjYuCPzbzey+BvuZgOLPR6JJ8BRkbhiCBF4ZSjW2gHXc/Uoz+xaAu79hZmUTyWvu7ma2OCwEHJlFDiy0OxH4Y9B8T0L34bQ6fTWDgtlxUXD8l8LdPx7+3dwU9TMU+dr+B63QnwLWMeVPgGzooGduADIbHogcxktRctxLdZhdN0KO75ndtCvDdkgY90eC6k7gC+7+dGxgZi8jjSCGp8aiM2XYHwnbi9AzUKFBmdm5jZg/kyBoLya5CMyAZXbDaiXlxgN/MbO5aGIdTlZ5arlglQVrjgl9VBCcFQ4ZB0w0szixbYhU/ohqWsW6aOW1FZoU1wF+7u7Pmllcyf+MkESEYuFvQmq+Ia3hTVNRGSMjoyvD+miFFfFa2Fca0dQCDMn9HytYXVVoE7N6/2lm+6F7UurzKMKU/HcZupebIztz9DfdS2XoZBHVNDaQsF6bbPIZSfniZJCZPQGsa8rY3hjlKVwHFb6r7wbBMhr9pmeRRYjVRHQgm9knUSJlt3D3201JbJGmeWfke5uE4u9BwQMfRqbV7ZA2+GDY3gH5DE6uo69vddemBt5APoLpKPR3NRSosXeuzQTEo7WRmf0WZeQfXWUs/zCzKeHvbWVtGoL3Es/NyvihpPAJgba3SvvV0Cp6e+ScbdY4htf6lLQfi16ubVECzg1o9V127mPz14tC9+4Px89D9stBZPz5kZJ7Rm5sc5DJaHPkYxiOTD0za1xTJEc7OXxmogiTvamDO75Nv//+KHJlW2TSmUYg/avz+DWRGehipOFsgwTkw8D7axxXSosevtsRMWAuCH//jsotFs+xB8pqnRb6OwmZMS4Hzs61y9dCOCy/r4HrPBsxc+4exrdjtecttI903/l6EX+jsl5EHzQBP1ByfJd9LfjtY/2DSLkegx+K7dZGYb/70009ErKCSeOWd3xJI2gDQkjcNsAaZpZXpVcn5zQsHHNUYdf2ZoZ3E+JWD7zx0NST3H1isMvuSe1V3hcQpz4oImki4lOfhJzfZ4ftX4Q2rwdzQFRtY7GNb4Y26yN6iqqro3BNPzAlvu0edh3j7jNyEU1l9vGmwZTxewKVvgg85wj2LPpjIbqPDcGVaXs1EmwXoWpeTyPG0HtrHFrLDzQHxerHWtDXIGFQ7Duasj7p7lvmvir6rmI28z7AGcGZ24fGEAMGogM3mhqrOdWHh2doC7To6IO01uh/ejcKOV0PmB3MQVF77A+sYmaL3L2YS9BMPEbGJTQj9F/mL9kDJZJG+/8fStpEvM/M3gF82swuJfOxAd065iuQBEF7sAWS8EPRKiriJaSWliHPr9IfrWyn02InZxVEm/F+iFq6jEc/YtnD6O5zg5p/DVLPL0QmhXwizrnoYV/PzH6AhMd33f2WEAo3MpxznLv/u9YgXXbooi26ln28mZiIru+XVHHWhaihz9FVWFQzD5ZhdeQcPRI5HI8C7jYz87A8LKIbwX8psqH/MGwfhqiTx4YxxxKfEYMK+7an0ndVloiVp72oB8UIMAcWmdkOXm6fPw49Qy8gbWcUEvrroKioRUiTOhFl6Z9CNtneiUK566/v2zO8jITTGsg8dR8F+39waG9KloPxeTP7gLt/uco5L0SRYO9GQi8vCLpzzFcg5RG0EWa2q7v/tYfHDkWVo+qJ8mkqrDzdfhm5WaHtMGTnzz9Y66FV8FIA7xpnnk8imuvuN4bwyy7wBhPEzOwS4ExfDkK5OvtZFktfo80UNPHkGUpx96IvoezYPijscVdEz/B+NKHESmWruPuh1c9Q9bxdEpiq7DsNxef/EwnvJWSJW5939z/RJJjybXZCPDyGFlGzkACd6O7/U3LMlmSa6DvdfYQVSOuC1nCru3fRxizH2toKWMbemmeHrcivMJE0bhUFevjNH3T3rbo5d0XBpJ4gaQRtgJl9Izy8h4UVcgW8Pu/+YuRE7Q3UXOUF9f8gspXuzeGrfAjhoFz7l5B9dHX0z8PI7oyJhfJGKquF5YVK3fwpAaURTUVh1FNYlvE7KYQX/oHK+PS8ej7Q3U/sYVdroolxOjJ9LETCOUZw9XRFWxGHbgp5LQtT/qhnrKY3InPblcifUC2ypacYhnwCL4f+JiC/1BgkRLsIAnd/2MwGe460DoVsDjUlakW8ZaK3jslmfdA9bIiSoQfoa6Jo2RIgRDutW2jzGNKYowa3UdhXCjNb3d0XAd/JPYfLkExDnYfIfll3HoBVZhj3QckoVzZ5XHXBlUF8dW47rgwjriXj4l9KoHhw9+7iwcv6Ojb8ewElYYg9GH6rNahoh45qed4MUlTPrzez/3L3GxvtxN3nm9mFyGQzCHH4/B0RET5f8+ASWO049IdLDllsSoSMc8ZI9Fwe2izfVQ7rUWk2eR1Y392XBPt+NSzzN5nZGShAYA2y38SRieZCFLEVC+EsQgSQrcSq6F73zZlAI49WfNeHAA+Z2b1hexcUEVYNlyNtqfgMQjINdT7MbGCYXGu12SO3+QYqFvJ0tfa9CavCILqc54x88KORADgL+Ra6DUPsVARNaBByFL5Gpp3U7aQ0cSq9D02UdyOH8V/dfUnNA7uep0jtUYGiX8FE0HYOGXnfs2TUydPd/eBG+u9mbCehBLaYJfwRpA39GIUfH17luMNRWPOOKF9gPqrxPDHX5lMlh3qTBVnZ2O5Dfp1bUFbzZOB3njGgVkVw1Nc692WI3ffOoF03Pr4kCNoHM9sVxdUPdveNzWx7ZF9tiJO+0xDMOed594lpjZyzKlFds/poBsxsL3f/cyEabBm8yXTXuX6HoCiqE4AN3H01M/s4MMrdT2hFn1XG0RLflSlpcVTYvNvd69Kmc/6mLwFHufu0wvfjkA/rAqRlbBu0h5fcvVoAxHIjRLQdh3wcO5rZwcBn3H3f8H1V/0Ud594TRcvtjhI0pyOhcE7d50iCoH0ws6lIJbwu5zCqxsc/ElHTboXUyr7A4haHuDUEq0xM2wwV1WiKHb4RB3VvwsxOcfcJwSldhOcjgszMyLKoTzOzjYANuwn9LPZ3HEq0G4SeiweBM9z9j+H8j7p7rcS7HqFaxBPyA/2tTZFZ3cIy6vR3ooimYgnY0Sha7+uovsV7g0+hX7O12sK43o2c2bshlt95wOF5zSv4EA5092psA7XO3xdFGu6JQriXFMJ8ax+fBEH7YGZT3X2XWpEDubb3I36WiciZdRSwuS9fdmNT0ah5ocFzD0T2/dnu/mhwUL/H3W/u5tCOhWUZsA2VYiyc4wQUJvkntEjYF8WeH4Rs9n/x1tS1jhFPe1PJIbQWcKW7f7PZffYEVUw/IBPWrkiQga5lD7JC8Gt7oSBTk8cVKUgGoN9pMcGvFkNizexaxOd0C5VU6jWDSYIAiX6jO4G7GvUbJWdxe/GUme2GnFn9EHVDaRlFAHd/zMz6uuifLwkrl44RBJ4VXBmJwtwiz8/qaJLqsSCow0HdUTCzmPhWi8VzF69RirEeuPtZuc3ngNtCJNrdSGssFjBpFga6+4md7rvyrEZGkTr9DpSpfjISYL9DETxfQ7H7x7R4aDtRGRJ7BAqJ/YKZxZDYq+lZouMs5DfaFgmXBWbWmN/IW5xanT4VKeHrIAbKf6FV3WVoJVLW9g6k+l+KwuXG04ZU+B5e1wyCdhm2+1BCp/F2/iC78yHxN0KLrNmFNlPRZB1pNdalAfoFatAstOH6vg/8V2/f5wbGew/yxcXtwYhiYl8UTXMrCiF9BoVoDm/xeO4oGc/tSEOY06Q+hiCz4T9Q0Zu6j00aQRvhyowtjXgowZFoQj0OCYGNkPrfiajIanX3t8xsZXu26mHxLM2ibqCPQ1Hcf1sd0wHjgG+ZWSwwFGkj3qLDfFcB1ajTT0KZxZcgvqcPoYzfTyHSw1ah25BYE9X6j1CoeJcyoNUQ/Ea7I63gCcRFdWetY4pY2V7WXoWV13Md74FqOg/P7OuvknGudCrmmtnxKBIDFLHR5Zre5uiWxdPdf2tm08iyqD/m7lVNgyWI2bsfKfmu6fxJBayBFjGnI9v6Nch5vCUyuXQaqlGnfxRpAPej32Ao4rRqmMO/QfwWmBr8ABAI+4IJK2a9X4IYSM9GTt9jqI+nqT/wE+RvqMVCWxXJWdxGWB31XHNtRyF75nAqeWma7ghcXpjZemi1uxeakCYj3vaGE51WVJgoMc5Ddtq/IbPPwe4+qyzrMw+vMwM0Rigt92B7gJyj+9PuPsDMHgRecfedOzSsdydUYKeCOt3dp5nZQ2hFPg1dT1smwe5CYi3QlJjZbA8V76wO6pKmjC0JgvbBGqjnauIdGU9XXpr5xbYJvYsQunc8EgRboJXmI+7+evh+HuWZnzHMtiHhHmL3j6IreV1T6zAX+oxcOS+jSKFfoqiuHwFHlz3DvQkzG4s0qFiE5ntIu34LraCdUMmMBpP6WoUQmTUa+D9EjvcMyhxveWhuMg21AbkVYWk91yqHLXT3m9oxvuWFmfUHPkPXWryNsGqusHD3N030zGejuP7i983miLoROUNbWnWtgEjf8AQyV0xAOR6d6rsqUqcfQMhMN7M5KFLoGULeS9kirRcwDvEjHY+y6feksgxoy5A0gjagyoowonRFaGanowiTq6kkMWuIfbMdMLOJiJ/mMORwOxx4yN3H9erA2ggT9UM/ZI7Ix4BPtypMqvk2DfY13d1rnrPZsEr6ht+Q0YVPrHlgL6FaZjoa//FIgJ2HTEbroWivHoc7NwNmtjswxUPIa9i3Yzve+SQI2gQTpeyu7n53ne1vK9ntnit00inIvXSRH6gfSnEf2e3BbxPkfq98spW7+15VfsuIhn9TMxuPyNOupzrTadNhom/4HKpL0Z+cNtJpvqtqmekohHS5kvpaBTN7BdUpGBv9a+0S+sk01CaEkMqfoszBeto3zDnSi4i1eBeY2bYo0Wm9XhxP25DLGC0WUyFst+K3fA04E5XnjIKnIbbJnsBF9bwfJb6rDkQ16vTTfTmT+lqIR9DveruZfcbdp1BuRWg6kiBoLyab2UHA1d1FKpiKlU9AHOyg5JNTvQc8JG3Az8PK6rsoc3IwitdeGRCL1m+BuF6uRS/vRyhQCIc49v8GNnb3Y0Pc+BaelbCsF18DNvVuKra1CCuE78qrZKab2WmWK41q4lBql5+lO7i7X29mjwBXmNnFVNbiaBmSaaiNsIyG+A0UwVCVhtjMrkJhiL8Ju44Etnf30mSi3oRVFqbpF3a7u7cyQaejECgM9vOMZmMIcIO7j8m1uQKtpI9ysV4ORDbhhjhuzOxmlINQk8q8FViRfFdl6GRfh1VykA1CeQUHunvLF+xJI2gj3H1IiCDajCpF63MY4e75aIxTzKysXmsnoFiYZmXE+mQF0Qn/r19oM8LdDw3cQLj7K2bWE9V/Maq6dhuVk3HLwkdziDkvO+X21Sos31FoQlJfSxC0lGU1Edx9MXCImW3cjv6TIGgjzOyzKERsGDATZRhPQQ9lEUvMbLS73xWOHYUcXp2IYd4LtZQ7DJcC95rZH8L2x4BfF9q8ZmYDyMwSI+iZ4LwmfNqOFcx3tQyFpL7nyZI6MbO1Wu1o7w4xBBllFef3P9mO/pNpqI0w8ffvDNzjqky0JfDDMnOPqWjNpSi1H8Rh/il3n9W2AdcJa0FhmhURIUx097B5h7vPKHy/D/KjbI3qOo9CyVh/aec4lwcrmO9qGQoh3PlJr0dJfa1ArRDklvedBEH7YGb3hZT8mYiSeKmZPeju2+Ta/Hf+ELKi74vRA/uTNg65LuQSdFpSIP7tAlNJwVlIs5sLTO2Jwzc3qVWgHZPZiuS7KkMI447FgU4NppcN3X1qLw+tV0PGk2movXg6ZDpeA9xiZi/SlbO/WhTKEdQuZN2b2Le3B7CC4FdIY9gHlRScYWZ3eAMlBQPy9vn+qKB9TT6jJmJF8l2V4WeEPAKU/PgScBV613oVvWl2SxpBL8FU4GMN4I/u/lrJ991GoSSseLDlLClY47ztIScz+yvw9YLv6ix337XVfTcDOc6kbqsE9gZCnkaRqqXl0XdJI+gluPvt3TSpJwolYQWCdS0puLP3gKG1QFnRB2kI7XqXvwBcGnwFEHxXbeq7GYicSR2XR2BmFyKuoT0Rqd/BtMkKkARB56KeKJSEFQvLX1JQ+DGZj+ANRAQ3tlmDLEPBd3Uplb6rD6BrWxGwvMWBWondAkXLLHc/xcx+jCrftRzJNNTB6C4KJWHFRDDzHQ2cAGzg7qvVeVycjGPkS8xBiFQWLQskMLNYB6E0g9rdj2hV381GiNaLeQSTOyGPAMDMprrYUe9B1NnzUS3wTVvdd9IIOhghbGyFyNhM6B62/CUF66azaDbc/RRY5rvaMee7Ohm4oZV9Nxvu/jBiy+00XB+CSc5E772j6mktR9IIEhLaBDM7AU38PS4pGM7Ta4EEgQdnO3ePdXZXA2Z5G4qnrEwI97V/u/IzkkaQkNAmuPtZTTpVbwYSJN9Vi2Bmd6EEvTtRKcu2JekljSAhYQWDmX0H0SznJ+Mr3P1Hbeo/+a5aADPbBN3X3RH9zFJU12N8y/tOgiAhYcVDmozfngh1E/ZAv+2ewJPt4PFKgiAhISGhA2BmjwP/Bi5H5qGZ7t6WHIckCBISEhI6AGY2DhiN6ik/jPwFd7j74y3vOwmChISEhM6BmQ0GjkF5JsPcvW/L+0yCICEhIaH3ETKJR6NSr1OAu5CzeG7L+06CICEhIaH3YWYHo4n/X23vOwmChISEhM6Amb0TGE4ux8vd72h1vymhLCEhIaEDYGanA58A5gBvht0OtFwQJI0gISEhoQNQpO9oJ/q0u8OEhISEhFLMRTWL245kGkpISEjoRZjZecgE9AowMxQwWqYVuPvxrR5DEgQJCQkJvYv7w99pwHW9MYDkI0hISEjoAJjZIOBVd38zbPcFVnP3V1rdd/IRJCQkJHQGJgMDctsDgFvb0XESBAkJCQmdgf7u/nLcCP8PbEfHSRAkJCQkdAYWB3pxAMxsJ2BJOzpOPoKEhISEDkCY+K8Ang27NgQOdfdpre47RQ0lJCQkdAY2Ad4LbAwcCOyCwkpbjmQaSkhISOgMnOTui4ChqDrZ+cAF7eg4CYKEhISEzkDkF9oP+IW73wCs2o6OkyBISEhI6Aw8Y2YXAYcCN5rZarRpjk7O4oSEhIQOgJkNBD4MzHb3R0Mh+/e4+80t7zsJgoSEhISVG8k0lJCQkLCSIwmChISEhJUcSRAkJCQkrORIgiAhISFhJUcSBAkJCQkrOf4/rkQyWNNX7+8AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# fequency bar plot - it takes time!!\n", + "w_count_df.plot.bar()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "Collapsed": "false" + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/dataset_analysis/AnalyzeDataset.ipynb b/notebooks/dataset_analysis/AnalyzeDataset.ipynb similarity index 99% rename from dataset_analysis/AnalyzeDataset.ipynb rename to notebooks/dataset_analysis/AnalyzeDataset.ipynb index 3ed54ded..62870fdc 100644 --- a/dataset_analysis/AnalyzeDataset.ipynb +++ b/notebooks/dataset_analysis/AnalyzeDataset.ipynb @@ -27,7 +27,7 @@ "from multiprocessing import Pool\n", "from matplotlib import pylab as plt\n", "from collections import Counter\n", - "from TTS.datasets.preprocess import *\n", + "from TTS.tts.datasets.preprocess import *\n", "%matplotlib inline" ] }, diff --git a/dataset_analysis/CheckDatasetSNR.ipynb b/notebooks/dataset_analysis/CheckDatasetSNR.ipynb similarity index 100% rename from dataset_analysis/CheckDatasetSNR.ipynb rename to notebooks/dataset_analysis/CheckDatasetSNR.ipynb diff --git a/dataset_analysis/README.md b/notebooks/dataset_analysis/README.md similarity index 100% rename from dataset_analysis/README.md rename to notebooks/dataset_analysis/README.md diff --git a/dataset_analysis/analyze.py b/notebooks/dataset_analysis/analyze.py similarity index 100% rename from dataset_analysis/analyze.py rename to notebooks/dataset_analysis/analyze.py diff --git a/run_tests.sh b/run_tests.sh new file mode 100755 index 00000000..e5e46476 --- /dev/null +++ b/run_tests.sh @@ -0,0 +1,10 @@ +# tests +nosetests tests -x + +# runtime tests +./tests/test_server_package.sh +./tests/test_tts_train.sh +./tests/test_vocoder_train.sh + +# linter check +cardboardlinter --refspec master \ No newline at end of file diff --git a/server/README.md b/server/README.md deleted file mode 100644 index 3c65c961..00000000 --- a/server/README.md +++ /dev/null @@ -1,47 +0,0 @@ -## TTS example web-server - -You'll need a model package (Zip file, includes TTS Python wheel, model files, server configuration, and optional nginx/uwsgi configs). Publicly available models are listed [here](https://github.com/mozilla/TTS/wiki/Released-Models#simple-packaging---self-contained-package-that-runs-an-http-api-for-a-pre-trained-tts-model). - -Instructions below are based on a Ubuntu 18.04 machine, but it should be simple to adapt the package names to other distros if needed. Python 3.6 is recommended, as some of the dependencies' versions predate Python 3.7 and will force building from source, which requires extra dependencies and is not guaranteed to work. - -#### Development server: - -##### Using server.py -If you have the environment set already for TTS, then you can directly call ```server.py```. - -##### Using .whl -1. apt-get install -y espeak libsndfile1 python3-venv -2. python3 -m venv /tmp/venv -3. source /tmp/venv/bin/activate -4. pip install -U pip setuptools wheel -5. pip install -U https//example.com/url/to/python/package.whl -6. python -m TTS.server.server - -You can now open http://localhost:5002 in a browser - -#### Running with nginx/uwsgi: - -1. apt-get install -y uwsgi uwsgi-plugin-python3 nginx espeak libsndfile1 python3-venv -2. python3 -m venv /tmp/venv -3. source /tmp/venv/bin/activate -4. pip install -U pip setuptools wheel -5. pip install -U https//example.com/url/to/python/package.whl -6. curl -LO https://github.com/reuben/TTS/releases/download/t2-ljspeech-mold/t2-ljspeech-mold-nginx-uwsgi.zip -7. unzip *-nginx-uwsgi.zip -8. cp tts_site_nginx /etc/nginx/sites-enabled/default -9. service nginx restart -10. uwsgi --ini uwsgi.ini - -You can now open http://localhost:80 in a browser (edit the port in /etc/nginx/sites-enabled/tts_site_nginx). -Configure number of workers (number of requests that will be processed in parallel) by editing the `uwsgi.ini` file, specifically the `processes` setting. - -#### Creating a server package with an embedded model - -[setup.py](../setup.py) was extended with two new parameters when running the `bdist_wheel` command: - -- `--checkpoint ` - path to model checkpoint file you want to embed in the package -- `--model_config ` - path to corresponding config.json file for the checkpoint - -To create a package, run `python setup.py bdist_wheel --checkpoint /path/to/checkpoint --model_config /path/to/config.json`. - -A Python `.whl` file will be created in the `dist/` folder with the checkpoint and config embedded in it. diff --git a/server/__init__.py b/server/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/server/conf.json b/server/conf.json deleted file mode 100644 index 00045365..00000000 --- a/server/conf.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "tts_path":"/media/erogol/data_ssd/Models/libri_tts/5049/", // tts model root folder - "tts_file":"best_model.pth.tar", // tts checkpoint file - "tts_config":"config.json", // tts config.json file - "tts_speakers": null, // json file listing speaker ids. null if no speaker embedding. - "vocoder_config":null, - "vocoder_file": null, - "wavernn_lib_path": null, // Rootpath to wavernn project folder to be imported. If this is null, model uses GL for speech synthesis. - "wavernn_path":null, // wavernn model root path - "wavernn_file":null, // wavernn checkpoint file name - "wavernn_config": null, // wavernn config file - "is_wavernn_batched":true, - "port": 5002, - "use_cuda": true, - "debug": true -} diff --git a/server/server.py b/server/server.py deleted file mode 100644 index bd23ea9c..00000000 --- a/server/server.py +++ /dev/null @@ -1,86 +0,0 @@ -#!flask/bin/python -import argparse -import os - -from flask import Flask, request, render_template, send_file -from TTS.server.synthesizer import Synthesizer - - -def create_argparser(): - def convert_boolean(x): - return x.lower() in ['true', '1', 'yes'] - - parser = argparse.ArgumentParser() - parser.add_argument('--tts_checkpoint', type=str, help='path to TTS checkpoint file') - parser.add_argument('--tts_config', type=str, help='path to TTS config.json file') - parser.add_argument('--tts_speakers', type=str, help='path to JSON file containing speaker ids, if speaker ids are used in the model') - parser.add_argument('--wavernn_lib_path', type=str, default=None, help='path to WaveRNN project folder to be imported. If this is not passed, model uses Griffin-Lim for synthesis.') - parser.add_argument('--wavernn_checkpoint', type=str, default=None, help='path to WaveRNN checkpoint file.') - parser.add_argument('--wavernn_config', type=str, default=None, help='path to WaveRNN config file.') - parser.add_argument('--is_wavernn_batched', type=convert_boolean, default=False, help='true to use batched WaveRNN.') - parser.add_argument('--vocoder_config', type=str, default=None, help='path to TTS.vocoder config file.') - parser.add_argument('--vocoder_checkpoint', type=str, default=None, help='path to TTS.vocoder checkpoint file.') - parser.add_argument('--port', type=int, default=5002, help='port to listen on.') - parser.add_argument('--use_cuda', type=convert_boolean, default=False, help='true to use CUDA.') - parser.add_argument('--debug', type=convert_boolean, default=False, help='true to enable Flask debug mode.') - return parser - - -synthesizer = None - -embedded_models_folder = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'model') - -embedded_tts_folder = os.path.join(embedded_models_folder, 'tts') -tts_checkpoint_file = os.path.join(embedded_tts_folder, 'checkpoint.pth.tar') -tts_config_file = os.path.join(embedded_tts_folder, 'config.json') - -embedded_vocoder_folder = os.path.join(embedded_models_folder, 'vocoder') -vocoder_checkpoint_file = os.path.join(embedded_vocoder_folder, 'checkpoint.pth.tar') -vocoder_config_file = os.path.join(embedded_vocoder_folder, 'config.json') - -# These models are soon to be deprecated -embedded_wavernn_folder = os.path.join(embedded_models_folder, 'wavernn') -wavernn_checkpoint_file = os.path.join(embedded_wavernn_folder, 'checkpoint.pth.tar') -wavernn_config_file = os.path.join(embedded_wavernn_folder, 'config.json') - -args = create_argparser().parse_args() - -# If these were not specified in the CLI args, use default values with embedded model files -if not args.tts_checkpoint and os.path.isfile(tts_checkpoint_file): - args.tts_checkpoint = tts_checkpoint_file -if not args.tts_config and os.path.isfile(tts_config_file): - args.tts_config = tts_config_file - -if not args.vocoder_checkpoint and os.path.isfile(vocoder_checkpoint_file): - args.vocoder_checkpoint = vocoder_checkpoint_file -if not args.vocoder_config and os.path.isfile(vocoder_config_file): - args.vocoder_config = vocoder_config_file - -if not args.wavernn_checkpoint and os.path.isfile(wavernn_checkpoint_file): - args.wavernn_checkpoint = wavernn_checkpoint_file -if not args.wavernn_config and os.path.isfile(wavernn_config_file): - args.wavernn_config = wavernn_config_file - -synthesizer = Synthesizer(args) - -app = Flask(__name__) - -@app.route('/') -def index(): - return render_template('index.html') - - -@app.route('/api/tts', methods=['GET']) -def tts(): - text = request.args.get('text') - print(" > Model input: {}".format(text)) - data = synthesizer.tts(text) - return send_file(data, mimetype='audio/wav') - - -def main(): - app.run(debug=args.debug, host='0.0.0.0', port=args.port) - - -if __name__ == '__main__': - main() diff --git a/server/synthesizer.py b/server/synthesizer.py deleted file mode 100644 index 0f743d87..00000000 --- a/server/synthesizer.py +++ /dev/null @@ -1,194 +0,0 @@ -import io -import sys -import time - -import numpy as np -import torch -import yaml -import pysbd - -from TTS.utils.audio import AudioProcessor -from TTS.utils.io import load_config -from TTS.utils.generic_utils import setup_model -from TTS.utils.speakers import load_speaker_mapping -from TTS.vocoder.utils.generic_utils import setup_generator -# pylint: disable=unused-wildcard-import -# pylint: disable=wildcard-import -from TTS.utils.synthesis import * - -from TTS.utils.text import make_symbols, phonemes, symbols - - -class Synthesizer(object): - def __init__(self, config): - self.wavernn = None - self.vocoder_model = None - self.config = config - print(config) - self.seg = self.get_segmenter("en") - self.use_cuda = self.config.use_cuda - if self.use_cuda: - assert torch.cuda.is_available(), "CUDA is not availabe on this machine." - self.load_tts(self.config.tts_checkpoint, self.config.tts_config, - self.config.use_cuda) - if self.config.vocoder_checkpoint: - self.load_vocoder(self.config.vocoder_checkpoint, self.config.vocoder_config, self.config.use_cuda) - if self.config.wavernn_lib_path: - self.load_wavernn(self.config.wavernn_lib_path, self.config.wavernn_checkpoint, - self.config.wavernn_config, self.config.use_cuda) - - @staticmethod - def get_segmenter(lang): - return pysbd.Segmenter(language=lang, clean=True) - - def load_tts(self, tts_checkpoint, tts_config, use_cuda): - # pylint: disable=global-statement - global symbols, phonemes - - print(" > Loading TTS model ...") - print(" | > model config: ", tts_config) - print(" | > checkpoint file: ", tts_checkpoint) - - self.tts_config = load_config(tts_config) - self.use_phonemes = self.tts_config.use_phonemes - self.ap = AudioProcessor(**self.tts_config.audio) - - if 'characters' in self.tts_config.keys(): - symbols, phonemes = make_symbols(**self.tts_config.characters) - - if self.use_phonemes: - self.input_size = len(phonemes) - else: - self.input_size = len(symbols) - # TODO: fix this for multi-speaker model - load speakers - if self.config.tts_speakers is not None: - self.tts_speakers = load_speaker_mapping(self.config.tts_speakers) - num_speakers = len(self.tts_speakers) - else: - num_speakers = 0 - self.tts_model = setup_model(self.input_size, num_speakers=num_speakers, c=self.tts_config) - # load model state - cp = torch.load(tts_checkpoint, map_location=torch.device('cpu')) - # load the model - self.tts_model.load_state_dict(cp['model']) - if use_cuda: - self.tts_model.cuda() - self.tts_model.eval() - self.tts_model.decoder.max_decoder_steps = 3000 - if 'r' in cp: - self.tts_model.decoder.set_r(cp['r']) - print(f" > model reduction factor: {cp['r']}") - - def load_vocoder(self, model_file, model_config, use_cuda): - self.vocoder_config = load_config(model_config) - self.vocoder_model = setup_generator(self.vocoder_config) - self.vocoder_model.load_state_dict(torch.load(model_file, map_location="cpu")["model"]) - self.vocoder_model.remove_weight_norm() - self.vocoder_model.inference_padding = 0 - self.vocoder_config = load_config(model_config) - - if use_cuda: - self.vocoder_model.cuda() - self.vocoder_model.eval() - - def load_wavernn(self, lib_path, model_file, model_config, use_cuda): - # TODO: set a function in wavernn code base for model setup and call it here. - sys.path.append(lib_path) # set this if WaveRNN is not installed globally - #pylint: disable=import-outside-toplevel - from WaveRNN.models.wavernn import Model - print(" > Loading WaveRNN model ...") - print(" | > model config: ", model_config) - print(" | > model file: ", model_file) - self.wavernn_config = load_config(model_config) - # This is the default architecture we use for our models. - # You might need to update it - self.wavernn = Model( - rnn_dims=512, - fc_dims=512, - mode=self.wavernn_config.mode, - mulaw=self.wavernn_config.mulaw, - pad=self.wavernn_config.pad, - use_aux_net=self.wavernn_config.use_aux_net, - use_upsample_net=self.wavernn_config.use_upsample_net, - upsample_factors=self.wavernn_config.upsample_factors, - feat_dims=80, - compute_dims=128, - res_out_dims=128, - res_blocks=10, - hop_length=self.ap.hop_length, - sample_rate=self.ap.sample_rate, - ).cuda() - - check = torch.load(model_file, map_location="cpu") - self.wavernn.load_state_dict(check['model']) - if use_cuda: - self.wavernn.cuda() - self.wavernn.eval() - - def save_wav(self, wav, path): - # wav *= 32767 / max(1e-8, np.max(np.abs(wav))) - wav = np.array(wav) - self.ap.save_wav(wav, path) - - def split_into_sentences(self, text): - return self.seg.segment(text) - - def tts(self, text, speaker_id=None): - start_time = time.time() - wavs = [] - sens = self.split_into_sentences(text) - print(sens) - speaker_id = id_to_torch(speaker_id) - if speaker_id is not None and self.use_cuda: - speaker_id = speaker_id.cuda() - - for sen in sens: - # preprocess the given text - inputs = text_to_seqvec(sen, self.tts_config) - inputs = numpy_to_torch(inputs, torch.long, cuda=self.use_cuda) - inputs = inputs.unsqueeze(0) - # synthesize voice - _, postnet_output, _, _ = run_model_torch(self.tts_model, inputs, self.tts_config, False, speaker_id, None) - if self.vocoder_model: - # use native vocoder model - vocoder_input = postnet_output[0].transpose(0, 1).unsqueeze(0) - wav = self.vocoder_model.inference(vocoder_input) - if self.use_cuda: - wav = wav.cpu().numpy() - else: - wav = wav.numpy() - wav = wav.flatten() - elif self.wavernn: - # use 3rd paty wavernn - vocoder_input = None - if self.tts_config.model == "Tacotron": - vocoder_input = torch.FloatTensor(self.ap.out_linear_to_mel(linear_spec=postnet_output.T).T).T.unsqueeze(0) - else: - vocoder_input = postnet_output[0].transpose(0, 1).unsqueeze(0) - if self.use_cuda: - vocoder_input.cuda() - wav = self.wavernn.generate(vocoder_input, batched=self.config.is_wavernn_batched, target=11000, overlap=550) - else: - # use GL - if self.use_cuda: - postnet_output = postnet_output[0].cpu() - else: - postnet_output = postnet_output[0] - postnet_output = postnet_output.numpy() - wav = inv_spectrogram(postnet_output, self.ap, self.tts_config) - - # trim silence - wav = trim_silence(wav, self.ap) - - wavs += list(wav) - wavs += [0] * 10000 - - out = io.BytesIO() - self.save_wav(wavs, out) - - # compute stats - process_time = time.time() - start_time - audio_time = len(wavs) / self.tts_config.audio['sample_rate'] - print(f" > Processing time: {process_time}") - print(f" > Real-time factor: {process_time / audio_time}") - return out diff --git a/server/templates/index.html b/server/templates/index.html deleted file mode 100644 index 45b874a9..00000000 --- a/server/templates/index.html +++ /dev/null @@ -1,111 +0,0 @@ - - - - - - - - - - - Mozilla - Text2Speech engine - - - - - - - - - - Fork me on GitHub - - - - - -
-
-
- -

Mozilla TTS

-
    -
- -

- -

-
-
-
- - - - - - - diff --git a/setup.py b/setup.py index 3f02dd09..260aa20f 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ args, unknown_args = parser.parse_known_args() # Remove our arguments from argv so that setuptools doesn't see them sys.argv = [sys.argv[0]] + unknown_args -version = '0.0.3' +version = '0.0.4' # Adapted from https://github.com/pytorch/pytorch cwd = os.path.dirname(os.path.abspath(__file__)) @@ -112,6 +112,8 @@ setup( name='TTS', version=version, url='https://github.com/mozilla/TTS', + author='Eren Gölge', + author_email='egolge@mozilla.com', description='Text to Speech with Deep Learning', license='MPL-2.0', entry_points={ @@ -119,11 +121,7 @@ setup( 'tts-server = TTS.server.server:main' ] }, - package_dir={'': 'tts_namespace'}, - packages=find_packages('tts_namespace'), - package_data={ - 'TTS': package_data, - }, + packages=find_packages(include=['TTS*']), project_urls={ 'Documentation': 'https://github.com/mozilla/TTS/wiki', 'Tracker': 'https://github.com/mozilla/TTS/issues', diff --git a/speaker_encoder/README.md b/speaker_encoder/README.md deleted file mode 100644 index b6f541f8..00000000 --- a/speaker_encoder/README.md +++ /dev/null @@ -1,18 +0,0 @@ -### Speaker Encoder - -This is an implementation of https://arxiv.org/abs/1710.10467. This model can be used for voice and speaker embedding. - -With the code here you can generate d-vectors for both multi-speaker and single-speaker TTS datasets, then visualise and explore them along with the associated audio files in an interactive chart. - -Below is an example showing embedding results of various speakers. You can generate the same plot with the provided notebook as demonstrated in [this video](https://youtu.be/KW3oO7JVa7Q). - -![](umap.png) - -Download a pretrained model from [Released Models](https://github.com/mozilla/TTS/wiki/Released-Models) page. - -To run the code, you need to follow the same flow as in TTS. - -- Define 'config.json' for your needs. Note that, audio parameters should match your TTS model. -- Example training call ```python speaker_encoder/train.py --config_path speaker_encoder/config.json --data_path ~/Data/Libri-TTS/train-clean-360``` -- Generate embedding vectors ```python speaker_encoder/compute_embeddings.py --use_cuda true /model/path/best_model.pth.tar model/config/path/config.json dataset/path/ output_path``` . This code parses all .wav files at the given dataset path and generates the same folder structure under the output path with the generated embedding files. -- Watch training on Tensorboard as in TTS diff --git a/speaker_encoder/__init__.py b/speaker_encoder/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/speaker_encoder/compute_embeddings.py b/speaker_encoder/compute_embeddings.py deleted file mode 100644 index bfa377e3..00000000 --- a/speaker_encoder/compute_embeddings.py +++ /dev/null @@ -1,88 +0,0 @@ -import argparse -import glob -import os - -import numpy as np -from tqdm import tqdm - -import torch -from TTS.speaker_encoder.model import SpeakerEncoder -from TTS.utils.audio import AudioProcessor -from TTS.utils.generic_utils import load_config - -parser = argparse.ArgumentParser( - description='Compute embedding vectors for each wav file in a dataset. ') -parser.add_argument( - 'model_path', - type=str, - help='Path to model outputs (checkpoint, tensorboard etc.).') -parser.add_argument( - 'config_path', - type=str, - help='Path to config file for training.', -) -parser.add_argument( - 'data_path', - type=str, - help='Data path for wav files - directory or CSV file') -parser.add_argument( - 'output_path', - type=str, - help='path for training outputs.') -parser.add_argument( - '--use_cuda', type=bool, help='flag to set cuda.', default=False -) -parser.add_argument( - '--separator', type=str, help='Separator used in file if CSV is passed for data_path', default='|' -) -args = parser.parse_args() - - -c = load_config(args.config_path) -ap = AudioProcessor(**c['audio']) - -data_path = args.data_path -split_ext = os.path.splitext(data_path) -sep = args.separator - -if len(split_ext) > 0 and split_ext[1].lower() == '.csv': - # Parse CSV - print(f'CSV file: {data_path}') - with open(data_path) as f: - wav_path = os.path.join(os.path.dirname(data_path), 'wavs') - wav_files = [] - print(f'Separator is: {sep}') - for line in f: - components = line.split(sep) - if len(components) != 2: - print("Invalid line") - continue - wav_file = os.path.join(wav_path, components[0] + '.wav') - #print(f'wav_file: {wav_file}') - if os.path.exists(wav_file): - wav_files.append(wav_file) - print(f'Count of wavs imported: {len(wav_files)}') -else: - # Parse all wav files in data_path - wav_path = data_path - wav_files = glob.glob(data_path + '/**/*.wav', recursive=True) - -output_files = [wav_file.replace(wav_path, args.output_path).replace( - '.wav', '.npy') for wav_file in wav_files] - -for output_file in output_files: - os.makedirs(os.path.dirname(output_file), exist_ok=True) - -model = SpeakerEncoder(**c.model) -model.load_state_dict(torch.load(args.model_path)['model']) -model.eval() -if args.use_cuda: - model.cuda() - -for idx, wav_file in enumerate(tqdm(wav_files)): - mel_spec = ap.melspectrogram(ap.load_wav(wav_file)).T - mel_spec = torch.FloatTensor(mel_spec[None, :, :]) - if args.use_cuda: - mel_spec = mel_spec.cuda() - embedd = model.compute_embedding(mel_spec) - np.save(output_files[idx], embedd.detach().cpu().numpy()) diff --git a/speaker_encoder/config.json b/speaker_encoder/config.json deleted file mode 100644 index 0d0f8f68..00000000 --- a/speaker_encoder/config.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "run_name": "libritts_360-half", - "run_description": "train speaker encoder for libritts 360", - "audio": { - // Audio processing parameters - "num_mels": 40, // size of the mel spec frame. - "num_freq": 1025, // number of stft frequency levels. Size of the linear spectogram frame. - "sample_rate": 16000, // DATASET-RELATED: wav sample-rate. If different than the original data, it is resampled. - "frame_length_ms": 50, // stft window length in ms. - "frame_shift_ms": 12.5, // stft window hop-lengh in ms. - "preemphasis": 0.98, // pre-emphasis to reduce spec noise and make it more structured. If 0.0, no -pre-emphasis. - "min_level_db": -100, // normalization range - "ref_level_db": 20, // reference level db, theoretically 20db is the sound of air. - // Normalization parameters - "signal_norm": true, // normalize the spec values in range [0, 1] - "symmetric_norm": true, // move normalization to range [-1, 1] - "max_norm": 4, // scale normalization to range [-max_norm, max_norm] or [0, max_norm] - "clip_norm": true, // clip normalized values into the range. - "mel_fmin": 0.0, // minimum freq level for mel-spec. ~50 for male and ~95 for female voices. Tune for dataset!! - "mel_fmax": 8000.0, // maximum freq level for mel-spec. Tune for dataset!! - "do_trim_silence": false // enable trimming of slience of audio as you load it. LJspeech (false), TWEB (false), Nancy (true) - }, - "reinit_layers": [], - "grad_clip": 3.0, // upper limit for gradients for clipping. - "epochs": 1000, // total number of epochs to train. - "lr": 0.0001, // Initial learning rate. If Noam decay is active, maximum learning rate. - "lr_decay": false, // if true, Noam learning rate decaying is applied through training. - "warmup_steps": 4000, // Noam decay steps to increase the learning rate from 0 to "lr" - "tb_model_param_stats": false, // true, plots param stats per layer on tensorboard. Might be memory consuming, but good for debugging. - "steps_plot_stats": 10, // number of steps to plot embeddings. - "num_speakers_in_batch": 32, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'. - "wd": 0.000001, // Weight decay weight. - "checkpoint": true, // If true, it saves checkpoints per "save_step" - "save_step": 1000, // Number of training steps expected to save traning stats and checkpoints. - "print_step": 1, // Number of steps to log traning on console. - "output_path": "/media/erogol/data_ssd/Models/libri_tts/speaker_encoder/", // DATASET-RELATED: output path for all training outputs. - "num_loader_workers": 0, // number of training data loader processes. Don't set it too big. 4-8 are good values. - "model": { - "input_dim": 40, - "proj_dim": 128, - "lstm_dim": 384, - "num_lstm_layers": 3 - }, - "datasets": - [ - { - "name": "libri_tts", - "path": "/home/erogol/Data/Libri-TTS/train-clean-360/", - "meta_file_train": null, - "meta_file_val": null - }, - { - "name": "libri_tts", - "path": "/home/erogol/Data/Libri-TTS/train-clean-100/", - "meta_file_train": null, - "meta_file_val": null - } - ] -} \ No newline at end of file diff --git a/speaker_encoder/dataset.py b/speaker_encoder/dataset.py deleted file mode 100644 index 913b7a6d..00000000 --- a/speaker_encoder/dataset.py +++ /dev/null @@ -1,123 +0,0 @@ -import numpy as np -import torch -import random -from torch.utils.data import Dataset - - -class MyDataset(Dataset): - def __init__(self, ap, meta_data, voice_len=1.6, num_speakers_in_batch=64, - num_utter_per_speaker=10, skip_speakers=False, verbose=False): - """ - Args: - ap (TTS.utils.AudioProcessor): audio processor object. - meta_data (list): list of dataset instances. - seq_len (int): voice segment length in seconds. - verbose (bool): print diagnostic information. - """ - self.items = meta_data - self.sample_rate = ap.sample_rate - self.voice_len = voice_len - self.seq_len = int(voice_len * self.sample_rate) - self.num_speakers_in_batch = num_speakers_in_batch - self.num_utter_per_speaker = num_utter_per_speaker - self.skip_speakers = skip_speakers - self.ap = ap - self.verbose = verbose - self.__parse_items() - if self.verbose: - print("\n > DataLoader initialization") - print(f" | > Number of instances : {len(self.items)}") - print(f" | > Sequence length: {self.seq_len}") - print(f" | > Num speakers: {len(self.speakers)}") - - def load_wav(self, filename): - audio = self.ap.load_wav(filename) - return audio - - def load_data(self, idx): - text, wav_file, speaker_name = self.items[idx] - wav = np.asarray(self.load_wav(wav_file), dtype=np.float32) - mel = self.ap.melspectrogram(wav).astype("float32") - # sample seq_len - - assert text.size > 0, self.items[idx][1] - assert wav.size > 0, self.items[idx][1] - - sample = { - "mel": mel, - "item_idx": self.items[idx][1], - "speaker_name": speaker_name, - } - return sample - - def __parse_items(self): - """ - Find unique speaker ids and create a dict mapping utterances from speaker id - """ - speakers = list({item[-1] for item in self.items}) - self.speaker_to_utters = {} - self.speakers = [] - for speaker in speakers: - speaker_utters = [item[1] for item in self.items if item[2] == speaker] - if len(speaker_utters) < self.num_utter_per_speaker and self.skip_speakers: - print( - f" [!] Skipped speaker {speaker}. Not enough utterances {self.num_utter_per_speaker} vs {len(speaker_utters)}." - ) - else: - self.speakers.append(speaker) - self.speaker_to_utters[speaker] = speaker_utters - - def __len__(self): - return int(1e10) - - def __sample_speaker(self): - speaker = random.sample(self.speakers, 1)[0] - if self.num_utter_per_speaker > len(self.speaker_to_utters[speaker]): - utters = random.choices( - self.speaker_to_utters[speaker], k=self.num_utter_per_speaker - ) - else: - utters = random.sample( - self.speaker_to_utters[speaker], self.num_utter_per_speaker - ) - return speaker, utters - - def __sample_speaker_utterances(self, speaker): - """ - Sample all M utterances for the given speaker. - """ - feats = [] - labels = [] - for _ in range(self.num_utter_per_speaker): - # TODO:dummy but works - while True: - if len(self.speaker_to_utters[speaker]) > 0: - utter = random.sample(self.speaker_to_utters[speaker], 1)[0] - else: - self.speakers.remove(speaker) - speaker, _ = self.__sample_speaker() - continue - wav = self.load_wav(utter) - if wav.shape[0] - self.seq_len > 0: - break - self.speaker_to_utters[speaker].remove(utter) - - offset = random.randint(0, wav.shape[0] - self.seq_len) - mel = self.ap.melspectrogram(wav[offset : offset + self.seq_len]) - feats.append(torch.FloatTensor(mel)) - labels.append(speaker) - return feats, labels - - def __getitem__(self, idx): - speaker, _ = self.__sample_speaker() - return speaker - - def collate_fn(self, batch): - labels = [] - feats = [] - for speaker in batch: - feats_, labels_ = self.__sample_speaker_utterances(speaker) - labels.append(labels_) - feats.extend(feats_) - feats = torch.stack(feats) - return feats.transpose(1, 2), labels diff --git a/speaker_encoder/generic_utils.py b/speaker_encoder/generic_utils.py deleted file mode 100644 index c568d129..00000000 --- a/speaker_encoder/generic_utils.py +++ /dev/null @@ -1,41 +0,0 @@ -import os -import datetime -import torch - - -def save_checkpoint(model, optimizer, model_loss, out_path, - current_step, epoch): - checkpoint_path = 'checkpoint_{}.pth.tar'.format(current_step) - checkpoint_path = os.path.join(out_path, checkpoint_path) - print(" | | > Checkpoint saving : {}".format(checkpoint_path)) - - new_state_dict = model.state_dict() - state = { - 'model': new_state_dict, - 'optimizer': optimizer.state_dict() if optimizer is not None else None, - 'step': current_step, - 'epoch': epoch, - 'GE2Eloss': model_loss, - 'date': datetime.date.today().strftime("%B %d, %Y"), - } - torch.save(state, checkpoint_path) - - -def save_best_model(model, optimizer, model_loss, best_loss, out_path, - current_step): - if model_loss < best_loss: - new_state_dict = model.state_dict() - state = { - 'model': new_state_dict, - 'optimizer': optimizer.state_dict(), - 'step': current_step, - 'GE2Eloss': model_loss, - 'date': datetime.date.today().strftime("%B %d, %Y"), - } - best_loss = model_loss - bestmodel_path = 'best_model.pth.tar' - bestmodel_path = os.path.join(out_path, bestmodel_path) - print("\n > BEST MODEL ({0:.5f}) : {1:}".format( - model_loss, bestmodel_path)) - torch.save(state, bestmodel_path) - return best_loss \ No newline at end of file diff --git a/speaker_encoder/loss.py b/speaker_encoder/loss.py deleted file mode 100644 index ab290547..00000000 --- a/speaker_encoder/loss.py +++ /dev/null @@ -1,121 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F - - -# adapted from https://github.com/cvqluu/GE2E-Loss -class GE2ELoss(nn.Module): - def __init__(self, init_w=10.0, init_b=-5.0, loss_method="softmax"): - """ - Implementation of the Generalized End-to-End loss defined in https://arxiv.org/abs/1710.10467 [1] - Accepts an input of size (N, M, D) - where N is the number of speakers in the batch, - M is the number of utterances per speaker, - and D is the dimensionality of the embedding vector (e.g. d-vector) - Args: - - init_w (float): defines the initial value of w in Equation (5) of [1] - - init_b (float): definies the initial value of b in Equation (5) of [1] - """ - super(GE2ELoss, self).__init__() - # pylint: disable=E1102 - self.w = nn.Parameter(torch.tensor(init_w)) - # pylint: disable=E1102 - self.b = nn.Parameter(torch.tensor(init_b)) - self.loss_method = loss_method - - assert self.loss_method in ["softmax", "contrast"] - - if self.loss_method == "softmax": - self.embed_loss = self.embed_loss_softmax - if self.loss_method == "contrast": - self.embed_loss = self.embed_loss_contrast - - # pylint: disable=R0201 - def calc_new_centroids(self, dvecs, centroids, spkr, utt): - """ - Calculates the new centroids excluding the reference utterance - """ - excl = torch.cat((dvecs[spkr, :utt], dvecs[spkr, utt + 1 :])) - excl = torch.mean(excl, 0) - new_centroids = [] - for i, centroid in enumerate(centroids): - if i == spkr: - new_centroids.append(excl) - else: - new_centroids.append(centroid) - return torch.stack(new_centroids) - - def calc_cosine_sim(self, dvecs, centroids): - """ - Make the cosine similarity matrix with dims (N,M,N) - """ - cos_sim_matrix = [] - for spkr_idx, speaker in enumerate(dvecs): - cs_row = [] - for utt_idx, utterance in enumerate(speaker): - new_centroids = self.calc_new_centroids( - dvecs, centroids, spkr_idx, utt_idx - ) - # vector based cosine similarity for speed - cs_row.append( - torch.clamp( - torch.mm( - utterance.unsqueeze(1).transpose(0, 1), - new_centroids.transpose(0, 1), - ) - / (torch.norm(utterance) * torch.norm(new_centroids, dim=1)), - 1e-6, - ) - ) - cs_row = torch.cat(cs_row, dim=0) - cos_sim_matrix.append(cs_row) - return torch.stack(cos_sim_matrix) - - # pylint: disable=R0201 - def embed_loss_softmax(self, dvecs, cos_sim_matrix): - """ - Calculates the loss on each embedding $L(e_{ji})$ by taking softmax - """ - N, M, _ = dvecs.shape - L = [] - for j in range(N): - L_row = [] - for i in range(M): - L_row.append(-F.log_softmax(cos_sim_matrix[j, i], 0)[j]) - L_row = torch.stack(L_row) - L.append(L_row) - return torch.stack(L) - - # pylint: disable=R0201 - def embed_loss_contrast(self, dvecs, cos_sim_matrix): - """ - Calculates the loss on each embedding $L(e_{ji})$ by contrast loss with closest centroid - """ - N, M, _ = dvecs.shape - L = [] - for j in range(N): - L_row = [] - for i in range(M): - centroids_sigmoids = torch.sigmoid(cos_sim_matrix[j, i]) - excl_centroids_sigmoids = torch.cat( - (centroids_sigmoids[:j], centroids_sigmoids[j + 1 :]) - ) - L_row.append( - 1.0 - - torch.sigmoid(cos_sim_matrix[j, i, j]) - + torch.max(excl_centroids_sigmoids) - ) - L_row = torch.stack(L_row) - L.append(L_row) - return torch.stack(L) - - def forward(self, dvecs): - """ - Calculates the GE2E loss for an input of dimensions (num_speakers, num_utts_per_speaker, dvec_feats) - """ - centroids = torch.mean(dvecs, 1) - cos_sim_matrix = self.calc_cosine_sim(dvecs, centroids) - torch.clamp(self.w, 1e-6) - cos_sim_matrix = self.w * cos_sim_matrix + self.b - L = self.embed_loss(dvecs, cos_sim_matrix) - return L.mean() diff --git a/speaker_encoder/model.py b/speaker_encoder/model.py deleted file mode 100644 index b3bd71ff..00000000 --- a/speaker_encoder/model.py +++ /dev/null @@ -1,88 +0,0 @@ -import torch -from torch import nn - - -class LSTMWithProjection(nn.Module): - def __init__(self, input_size, hidden_size, proj_size): - super().__init__() - self.input_size = input_size - self.hidden_size = hidden_size - self.proj_size = proj_size - self.lstm = nn.LSTM(input_size, hidden_size, batch_first=True) - self.linear = nn.Linear(hidden_size, proj_size, bias=False) - - def forward(self, x): - self.lstm.flatten_parameters() - o, (_, _) = self.lstm(x) - return self.linear(o) - - -class SpeakerEncoder(nn.Module): - def __init__(self, input_dim, proj_dim=256, lstm_dim=768, num_lstm_layers=3): - super().__init__() - layers = [] - layers.append(LSTMWithProjection(input_dim, lstm_dim, proj_dim)) - for _ in range(num_lstm_layers - 1): - layers.append(LSTMWithProjection(proj_dim, lstm_dim, proj_dim)) - self.layers = nn.Sequential(*layers) - self._init_layers() - - def _init_layers(self): - for name, param in self.layers.named_parameters(): - if "bias" in name: - nn.init.constant_(param, 0.0) - elif "weight" in name: - nn.init.xavier_normal_(param) - - def forward(self, x): - # TODO: implement state passing for lstms - d = self.layers(x) - d = torch.nn.functional.normalize(d[:, -1], p=2, dim=1) - return d - - def inference(self, x): - d = self.layers.forward(x) - d = torch.nn.functional.normalize(d[:, -1], p=2, dim=1) - return d - - def compute_embedding(self, x, num_frames=160, overlap=0.5): - """ - Generate embeddings for a batch of utterances - x: 1xTxD - """ - num_overlap = int(num_frames * overlap) - max_len = x.shape[1] - embed = None - cur_iter = 0 - for offset in range(0, max_len, num_frames - num_overlap): - cur_iter += 1 - end_offset = min(x.shape[1], offset + num_frames) - frames = x[:, offset:end_offset] - if embed is None: - embed = self.inference(frames) - else: - embed += self.inference(frames) - return embed / cur_iter - - def batch_compute_embedding(self, x, seq_lens, num_frames=160, overlap=0.5): - """ - Generate embeddings for a batch of utterances - x: BxTxD - """ - num_overlap = num_frames * overlap - max_len = x.shape[1] - embed = None - num_iters = seq_lens / (num_frames - num_overlap) - cur_iter = 0 - for offset in range(0, max_len, num_frames - num_overlap): - cur_iter += 1 - end_offset = min(x.shape[1], offset + num_frames) - frames = x[:, offset:end_offset] - if embed is None: - embed = self.inference(frames) - else: - embed[cur_iter <= num_iters, :] += self.inference( - frames[cur_iter <= num_iters, :, :] - ) - return embed / num_iters - diff --git a/speaker_encoder/notebooks/PlotUmapLibriTTS.ipynb b/speaker_encoder/notebooks/PlotUmapLibriTTS.ipynb deleted file mode 100644 index 159f040c..00000000 --- a/speaker_encoder/notebooks/PlotUmapLibriTTS.ipynb +++ /dev/null @@ -1,325 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Overview\n", - "\n", - "This notebook can be used with both a single or multi- speaker corpus and allows the interactive plotting of speaker embeddings linked to underlying audio (see instructions in the repo's speaker_embedding directory)\n", - "\n", - "Depending on the directory structure used for your corpus, you may need to adjust handling of **speaker_to_utter** and **locations**." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import glob\n", - "import random\n", - "import numpy as np\n", - "import torch\n", - "import umap\n", - "\n", - "from TTS.speaker_encoder.model import SpeakerEncoder\n", - "from TTS.utils.audio import AudioProcessor\n", - "from TTS.utils.generic_utils import load_config\n", - "\n", - "from bokeh.io import output_notebook, show\n", - "from bokeh.plotting import figure\n", - "from bokeh.models import HoverTool, ColumnDataSource, BoxZoomTool, ResetTool, OpenURL, TapTool\n", - "from bokeh.transform import factor_cmap, factor_mark\n", - "from bokeh.palettes import Category10" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For larger sets of speakers, you can use **Category20**, but you need to change it in the **pal** variable too\n", - "\n", - "List of Bokeh palettes here: http://docs.bokeh.org/en/1.4.0/docs/reference/palettes.html\n", - "\n", - "**NB:** if you have problems with other palettes, first see https://stackoverflow.com/questions/48333820/why-do-some-bokeh-palettes-raise-a-valueerror-when-used-in-factor-cmap" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "output_notebook()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You should also adjust all the path constants to point at the relevant locations for you locally" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "MODEL_RUN_PATH = \"/media/erogol/data_ssd/Models/libri_tts/speaker_encoder/libritts_360-half-October-31-2019_04+54PM-19d2f5f/\"\n", - "MODEL_PATH = MODEL_RUN_PATH + \"best_model.pth.tar\"\n", - "CONFIG_PATH = MODEL_RUN_PATH + \"config.json\"\n", - "\n", - "# My single speaker locations\n", - "#EMBED_PATH = \"/home/neil/main/Projects/TTS3/embeddings/neil14/\"\n", - "#AUDIO_PATH = \"/home/neil/data/Projects/NeilTTS/neil14/wavs/\"\n", - "\n", - "# My multi speaker locations\n", - "EMBED_PATH = \"/home/erogol/Data/Libri-TTS/train-clean-360-embed_128/\"\n", - "AUDIO_PATH = \"/home/erogol/Data/Libri-TTS/train-clean-360/\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!ls -1 $MODEL_RUN_PATH" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "CONFIG = load_config(CONFIG_PATH)\n", - "ap = AudioProcessor(**CONFIG['audio'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Bring in the embeddings created by **compute_embeddings.py**" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "embed_files = glob.glob(EMBED_PATH+\"/**/*.npy\", recursive=True)\n", - "print(f'Embeddings found: {len(embed_files)}')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Check that we did indeed find an embedding" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "embed_files[0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Process the speakers\n", - "\n", - "Assumes count of **speaker_paths** corresponds to number of speakers (so a corpus in just one directory would be treated like a single speaker and the multiple directories of LibriTTS are treated as distinct speakers)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "speaker_paths = list(set([os.path.dirname(os.path.dirname(embed_file)) for embed_file in embed_files]))\n", - "speaker_to_utter = {}\n", - "for embed_file in embed_files:\n", - " speaker_path = os.path.dirname(os.path.dirname(embed_file))\n", - " try:\n", - " speaker_to_utter[speaker_path].append(embed_file)\n", - " except:\n", - " speaker_to_utter[speaker_path]=[embed_file]\n", - "print(f'Speaker count: {len(speaker_paths)}')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Set up the embeddings\n", - "\n", - "Adjust the number of speakers to select and the number of utterances from each speaker and they will be randomly sampled from the corpus" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "embeds = []\n", - "labels = []\n", - "locations = []\n", - "\n", - "# single speaker \n", - "#num_speakers = 1\n", - "#num_utters = 1000\n", - "\n", - "# multi speaker\n", - "num_speakers = 10\n", - "num_utters = 20\n", - "\n", - "\n", - "speaker_idxs = np.random.choice(range(len(speaker_paths)), num_speakers, replace=False )\n", - "\n", - "for speaker_num, speaker_idx in enumerate(speaker_idxs):\n", - " speaker_path = speaker_paths[speaker_idx]\n", - " speakers_utter = speaker_to_utter[speaker_path]\n", - " utter_idxs = np.random.randint(0, len(speakers_utter) , num_utters)\n", - " for utter_idx in utter_idxs:\n", - " embed_path = speaker_to_utter[speaker_path][utter_idx]\n", - " embed = np.load(embed_path)\n", - " embeds.append(embed)\n", - " labels.append(str(speaker_num))\n", - " locations.append(embed_path.replace(EMBED_PATH, '').replace('.npy','.wav'))\n", - "embeds = np.concatenate(embeds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Load embeddings with UMAP" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "model = umap.UMAP()\n", - "projection = model.fit_transform(embeds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Interactively charting the data in Bokeh\n", - "\n", - "Set up various details for Bokeh to plot the data\n", - "\n", - "You can use the regular Bokeh [tools](http://docs.bokeh.org/en/1.4.0/docs/user_guide/tools.html?highlight=tools) to explore the data, with reset setting it back to normal\n", - "\n", - "Once you have started the local server (see cell below) you can then click on plotted points which will open a tab to play the audio for that point, enabling easy exploration of your corpus\n", - "\n", - "File location in the tooltip is given relative to **AUDIO_PATH**" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "source_wav_stems = ColumnDataSource(\n", - " data=dict(\n", - " x = projection.T[0].tolist(),\n", - " y = projection.T[1].tolist(),\n", - " desc=locations,\n", - " label=labels\n", - " )\n", - " )\n", - "\n", - "hover = HoverTool(\n", - " tooltips=[\n", - " (\"file\", \"@desc\"),\n", - " (\"speaker\", \"@label\"),\n", - " ]\n", - " )\n", - "\n", - "# optionally consider adding these to the tooltips if you want additional detail\n", - "# for the coordinates: (\"(x,y)\", \"($x, $y)\"),\n", - "# for the index of the embedding / wav file: (\"index\", \"$index\"),\n", - "\n", - "factors = list(set(labels))\n", - "pal_size = max(len(factors), 3)\n", - "pal = Category10[pal_size]\n", - "\n", - "p = figure(plot_width=600, plot_height=400, tools=[hover,BoxZoomTool(), ResetTool(), TapTool()])\n", - "\n", - "\n", - "p.circle('x', 'y', source=source_wav_stems, color=factor_cmap('label', palette=pal, factors=factors),)\n", - "\n", - "url = \"http://localhost:8000/@desc\"\n", - "taptool = p.select(type=TapTool)\n", - "taptool.callback = OpenURL(url=url)\n", - "\n", - "show(p)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Local server to serve wav files from corpus\n", - "\n", - "This is required so that when you click on a data point the hyperlink associated with it will be served the file locally.\n", - "\n", - "There are other ways to serve this if you prefer and you can also run the commands manually on the command line\n", - "\n", - "The server will continue to run until stopped. To stop it simply interupt the kernel (ie square button or under Kernel menu)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%cd $AUDIO_PATH\n", - "%pwd\n", - "!python -m http.server" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/speaker_encoder/requirements.txt b/speaker_encoder/requirements.txt deleted file mode 100644 index a486cc45..00000000 --- a/speaker_encoder/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -umap-learn -numpy>=1.17.0 diff --git a/speaker_encoder/train.py b/speaker_encoder/train.py deleted file mode 100644 index f74d0880..00000000 --- a/speaker_encoder/train.py +++ /dev/null @@ -1,252 +0,0 @@ -import argparse -import os -import sys -import time -import traceback - -import torch -from torch.utils.data import DataLoader -from TTS.datasets.preprocess import load_meta_data -from TTS.speaker_encoder.dataset import MyDataset -from TTS.speaker_encoder.loss import GE2ELoss -from TTS.speaker_encoder.model import SpeakerEncoder -from TTS.speaker_encoder.visual import plot_embeddings -from TTS.speaker_encoder.generic_utils import save_best_model -from TTS.utils.audio import AudioProcessor -from TTS.utils.generic_utils import (create_experiment_folder, get_git_branch, - remove_experiment_folder, set_init_dict) -from TTS.utils.io import load_config, copy_config_file -from TTS.utils.training import check_update, NoamLR -from TTS.utils.tensorboard_logger import TensorboardLogger -from TTS.utils.radam import RAdam - -torch.backends.cudnn.enabled = True -torch.backends.cudnn.benchmark = True -torch.manual_seed(54321) -use_cuda = torch.cuda.is_available() -num_gpus = torch.cuda.device_count() -print(" > Using CUDA: ", use_cuda) -print(" > Number of GPUs: ", num_gpus) - - -def setup_loader(ap, is_val=False, verbose=False): - if is_val: - loader = None - else: - dataset = MyDataset(ap, - meta_data_eval if is_val else meta_data_train, - voice_len=1.6, - num_utter_per_speaker=10, - skip_speakers=False, - verbose=verbose) - # sampler = DistributedSampler(dataset) if num_gpus > 1 else None - loader = DataLoader(dataset, - batch_size=c.num_speakers_in_batch, - shuffle=False, - num_workers=c.num_loader_workers, - collate_fn=dataset.collate_fn) - return loader - - -def train(model, criterion, optimizer, scheduler, ap, global_step): - data_loader = setup_loader(ap, is_val=False, verbose=True) - model.train() - epoch_time = 0 - best_loss = float('inf') - avg_loss = 0 - end_time = time.time() - for _, data in enumerate(data_loader): - start_time = time.time() - - # setup input data - inputs = data[0] - loader_time = time.time() - end_time - global_step += 1 - - # setup lr - if c.lr_decay: - scheduler.step() - optimizer.zero_grad() - - # dispatch data to GPU - if use_cuda: - inputs = inputs.cuda(non_blocking=True) - # labels = labels.cuda(non_blocking=True) - - # forward pass model - outputs = model(inputs) - - # loss computation - loss = criterion( - outputs.view(c.num_speakers_in_batch, - outputs.shape[0] // c.num_speakers_in_batch, -1)) - loss.backward() - grad_norm, _ = check_update(model, c.grad_clip) - optimizer.step() - - step_time = time.time() - start_time - epoch_time += step_time - - avg_loss = 0.01 * loss.item( - ) + 0.99 * avg_loss if avg_loss != 0 else loss.item() - current_lr = optimizer.param_groups[0]['lr'] - - if global_step % c.steps_plot_stats == 0: - # Plot Training Epoch Stats - train_stats = { - "GE2Eloss": avg_loss, - "lr": current_lr, - "grad_norm": grad_norm, - "step_time": step_time - } - tb_logger.tb_train_epoch_stats(global_step, train_stats) - figures = { - # FIXME: not constant - "UMAP Plot": plot_embeddings(outputs.detach().cpu().numpy(), - 10), - } - tb_logger.tb_train_figures(global_step, figures) - - if global_step % c.print_step == 0: - print( - " | > Step:{} Loss:{:.5f} AvgLoss:{:.5f} GradNorm:{:.5f} " - "StepTime:{:.2f} LoaderTime:{:.2f} LR:{:.6f}".format( - global_step, loss.item(), avg_loss, grad_norm, step_time, - loader_time, current_lr), - flush=True) - - # save best model - best_loss = save_best_model(model, optimizer, avg_loss, best_loss, - OUT_PATH, global_step) - - end_time = time.time() - return avg_loss, global_step - - -def main(args): # pylint: disable=redefined-outer-name - # pylint: disable=global-variable-undefined - global meta_data_train - global meta_data_eval - - ap = AudioProcessor(**c.audio) - model = SpeakerEncoder(input_dim=40, - proj_dim=128, - lstm_dim=384, - num_lstm_layers=3) - optimizer = RAdam(model.parameters(), lr=c.lr) - criterion = GE2ELoss(loss_method='softmax') - - if args.restore_path: - checkpoint = torch.load(args.restore_path) - try: - # TODO: fix optimizer init, model.cuda() needs to be called before - # optimizer restore - # optimizer.load_state_dict(checkpoint['optimizer']) - if c.reinit_layers: - raise RuntimeError - model.load_state_dict(checkpoint['model']) - except KeyError: - print(" > Partial model initialization.") - model_dict = model.state_dict() - model_dict = set_init_dict(model_dict, checkpoint, c) - model.load_state_dict(model_dict) - del model_dict - for group in optimizer.param_groups: - group['lr'] = c.lr - print(" > Model restored from step %d" % checkpoint['step'], - flush=True) - args.restore_step = checkpoint['step'] - else: - args.restore_step = 0 - - if use_cuda: - model = model.cuda() - criterion.cuda() - - if c.lr_decay: - scheduler = NoamLR(optimizer, - warmup_steps=c.warmup_steps, - last_epoch=args.restore_step - 1) - else: - scheduler = None - - num_params = count_parameters(model) - print("\n > Model has {} parameters".format(num_params), flush=True) - - # pylint: disable=redefined-outer-name - meta_data_train, meta_data_eval = load_meta_data(c.datasets) - - global_step = args.restore_step - train_loss, global_step = train(model, criterion, optimizer, scheduler, ap, - global_step) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument( - '--restore_path', - type=str, - help='Path to model outputs (checkpoint, tensorboard etc.).', - default=0) - parser.add_argument( - '--config_path', - type=str, - help='Path to config file for training.', - ) - parser.add_argument('--debug', - type=bool, - default=True, - help='Do not verify commit integrity to run training.') - parser.add_argument( - '--data_path', - type=str, - default='', - help='Defines the data path. It overwrites config.json.') - parser.add_argument('--output_path', - type=str, - help='path for training outputs.', - default='') - parser.add_argument('--output_folder', - type=str, - default='', - help='folder name for training outputs.') - args = parser.parse_args() - - # setup output paths and read configs - c = load_config(args.config_path) - _ = os.path.dirname(os.path.realpath(__file__)) - if args.data_path != '': - c.data_path = args.data_path - - if args.output_path == '': - OUT_PATH = os.path.join(_, c.output_path) - else: - OUT_PATH = args.output_path - - if args.output_folder == '': - OUT_PATH = create_experiment_folder(OUT_PATH, c.run_name, args.debug) - else: - OUT_PATH = os.path.join(OUT_PATH, args.output_folder) - - new_fields = {} - if args.restore_path: - new_fields["restore_path"] = args.restore_path - new_fields["github_branch"] = get_git_branch() - copy_config_file(args.config_path, os.path.join(OUT_PATH, 'config.json'), - new_fields) - - LOG_DIR = OUT_PATH - tb_logger = TensorboardLogger(LOG_DIR) - - try: - main(args) - except KeyboardInterrupt: - remove_experiment_folder(OUT_PATH) - try: - sys.exit(0) - except SystemExit: - os._exit(0) # pylint: disable=protected-access - except Exception: # pylint: disable=broad-except - remove_experiment_folder(OUT_PATH) - traceback.print_exc() - sys.exit(1) diff --git a/speaker_encoder/umap.png b/speaker_encoder/umap.png deleted file mode 100644 index ca8aefea..00000000 Binary files a/speaker_encoder/umap.png and /dev/null differ diff --git a/speaker_encoder/visual.py b/speaker_encoder/visual.py deleted file mode 100644 index 68c48f12..00000000 --- a/speaker_encoder/visual.py +++ /dev/null @@ -1,46 +0,0 @@ -import umap -import numpy as np -import matplotlib -import matplotlib.pyplot as plt - -matplotlib.use("Agg") - - -colormap = ( - np.array( - [ - [76, 255, 0], - [0, 127, 70], - [255, 0, 0], - [255, 217, 38], - [0, 135, 255], - [165, 0, 165], - [255, 167, 255], - [0, 255, 255], - [255, 96, 38], - [142, 76, 0], - [33, 0, 127], - [0, 0, 0], - [183, 183, 183], - ], - dtype=np.float, - ) - / 255 -) - - -def plot_embeddings(embeddings, num_utter_per_speaker): - embeddings = embeddings[: 10 * num_utter_per_speaker] - model = umap.UMAP() - projection = model.fit_transform(embeddings) - num_speakers = embeddings.shape[0] // num_utter_per_speaker - ground_truth = np.repeat(np.arange(num_speakers), num_utter_per_speaker) - colors = [colormap[i] for i in ground_truth] - - fig, ax = plt.subplots(figsize=(16, 10)) - _ = ax.scatter(projection[:, 0], projection[:, 1], c=colors) - plt.gca().set_aspect("equal", "datalim") - plt.title("UMAP projection") - plt.tight_layout() - plt.savefig("umap") - return fig diff --git a/synthesize.py b/synthesize.py deleted file mode 100644 index 18048c2f..00000000 --- a/synthesize.py +++ /dev/null @@ -1,182 +0,0 @@ -# pylint: disable=redefined-outer-name, unused-argument -import os -import time -import argparse -import torch -import json -import string - -from TTS.utils.synthesis import synthesis -from TTS.utils.generic_utils import setup_model -from TTS.utils.io import load_config -from TTS.utils.text.symbols import make_symbols, symbols, phonemes -from TTS.utils.audio import AudioProcessor - - -def tts(model, - vocoder_model, - C, - VC, - text, - ap, - ap_vocoder, - use_cuda, - batched_vocoder, - speaker_id=None, - figures=False): - t_1 = time.time() - use_vocoder_model = vocoder_model is not None - waveform, alignment, _, postnet_output, stop_tokens, _ = synthesis( - model, text, C, use_cuda, ap, speaker_id, style_wav=False, - truncated=False, enable_eos_bos_chars=C.enable_eos_bos_chars, - use_griffin_lim=(not use_vocoder_model), do_trim_silence=True) - - if C.model == "Tacotron" and use_vocoder_model: - postnet_output = ap.out_linear_to_mel(postnet_output.T).T - # correct if there is a scale difference b/w two models - if use_vocoder_model: - postnet_output = ap._denormalize(postnet_output) - postnet_output = ap_vocoder._normalize(postnet_output) - vocoder_input = torch.FloatTensor(postnet_output.T).unsqueeze(0) - waveform = vocoder_model.generate( - vocoder_input.cuda() if use_cuda else vocoder_input, - batched=batched_vocoder, - target=8000, - overlap=400) - print(" > Run-time: {}".format(time.time() - t_1)) - return alignment, postnet_output, stop_tokens, waveform - - -if __name__ == "__main__": - - global symbols, phonemes - - parser = argparse.ArgumentParser() - parser.add_argument('text', type=str, help='Text to generate speech.') - parser.add_argument('config_path', - type=str, - help='Path to model config file.') - parser.add_argument( - 'model_path', - type=str, - help='Path to model file.', - ) - parser.add_argument( - 'out_path', - type=str, - help='Path to save final wav file. Wav file will be names as the text given.', - ) - parser.add_argument('--use_cuda', - type=bool, - help='Run model on CUDA.', - default=False) - parser.add_argument( - '--vocoder_path', - type=str, - help= - 'Path to vocoder model file. If it is not defined, model uses GL as vocoder. Please make sure that you installed vocoder library before (WaveRNN).', - default="", - ) - parser.add_argument('--vocoder_config_path', - type=str, - help='Path to vocoder model config file.', - default="") - parser.add_argument( - '--batched_vocoder', - type=bool, - help="If True, vocoder model uses faster batch processing.", - default=True) - parser.add_argument('--speakers_json', - type=str, - help="JSON file for multi-speaker model.", - default="") - parser.add_argument( - '--speaker_id', - type=int, - help="target speaker_id if the model is multi-speaker.", - default=None) - args = parser.parse_args() - - if args.vocoder_path != "": - assert args.use_cuda, " [!] Enable cuda for vocoder." - from WaveRNN.models.wavernn import Model as VocoderModel - - # load the config - C = load_config(args.config_path) - C.forward_attn_mask = True - - # load the audio processor - ap = AudioProcessor(**C.audio) - - # if the vocabulary was passed, replace the default - if 'characters' in C.keys(): - symbols, phonemes = make_symbols(**C.characters) - - # load speakers - if args.speakers_json != '': - speakers = json.load(open(args.speakers_json, 'r')) - num_speakers = len(speakers) - else: - num_speakers = 0 - - # load the model - num_chars = len(phonemes) if C.use_phonemes else len(symbols) - model = setup_model(num_chars, num_speakers, C) - cp = torch.load(args.model_path) - model.load_state_dict(cp['model']) - model.eval() - if args.use_cuda: - model.cuda() - model.decoder.set_r(cp['r']) - - # load vocoder model - if args.vocoder_path != "": - VC = load_config(args.vocoder_config_path) - ap_vocoder = AudioProcessor(**VC.audio) - bits = 10 - vocoder_model = VocoderModel(rnn_dims=512, - fc_dims=512, - mode=VC.mode, - mulaw=VC.mulaw, - pad=VC.pad, - upsample_factors=VC.upsample_factors, - feat_dims=VC.audio["num_mels"], - compute_dims=128, - res_out_dims=128, - res_blocks=10, - hop_length=ap.hop_length, - sample_rate=ap.sample_rate, - use_aux_net=True, - use_upsample_net=True) - - check = torch.load(args.vocoder_path) - vocoder_model.load_state_dict(check['model']) - vocoder_model.eval() - if args.use_cuda: - vocoder_model.cuda() - else: - vocoder_model = None - VC = None - ap_vocoder = None - - # synthesize voice - print(" > Text: {}".format(args.text)) - _, _, _, wav = tts(model, - vocoder_model, - C, - VC, - args.text, - ap, - ap_vocoder, - args.use_cuda, - args.batched_vocoder, - speaker_id=args.speaker_id, - figures=False) - - # save the results - file_name = args.text.replace(" ", "_") - file_name = file_name.translate( - str.maketrans('', '', string.punctuation.replace('_', ''))) + '.wav' - out_path = os.path.join(args.out_path, file_name) - print(" > Saving output to {}".format(out_path)) - ap.save_wav(wav, out_path) diff --git a/tests/generic_utils_text.py b/tests/generic_utils_text.py index 228df2df..19c48647 100644 --- a/tests/generic_utils_text.py +++ b/tests/generic_utils_text.py @@ -1,8 +1,8 @@ import unittest import torch as T -from TTS.utils.generic_utils import save_checkpoint, save_best_model -from TTS.layers.tacotron import Prenet +from TTS.tts.utils.generic_utils import save_checkpoint, save_best_model +from TTS.tts.layers.tacotron import Prenet OUT_PATH = '/tmp/test.pth.tar' diff --git a/tests/test_config.json b/tests/inputs/test_config.json similarity index 100% rename from tests/test_config.json rename to tests/inputs/test_config.json diff --git a/config.json b/tests/inputs/test_train_config.json similarity index 83% rename from config.json rename to tests/inputs/test_train_config.json index 23868a33..e43903ce 100644 --- a/config.json +++ b/tests/inputs/test_train_config.json @@ -1,149 +1,151 @@ -{ - "model": "Tacotron2", - "run_name": "ljspeech-ddc-bn", - "run_description": "tacotron2 with ddc and batch-normalization", - - // AUDIO PARAMETERS - "audio":{ - // stft parameters - "fft_size": 1024, // number of stft frequency levels. Size of the linear spectogram frame. - "win_length": 1024, // stft window length in ms. - "hop_length": 256, // stft window hop-lengh in ms. - "frame_length_ms": null, // stft window length in ms.If null, 'win_length' is used. - "frame_shift_ms": null, // stft window hop-lengh in ms. If null, 'hop_length' is used. - - // Audio processing parameters - "sample_rate": 22050, // DATASET-RELATED: wav sample-rate. - "preemphasis": 0.0, // pre-emphasis to reduce spec noise and make it more structured. If 0.0, no -pre-emphasis. - "ref_level_db": 20, // reference level db, theoretically 20db is the sound of air. - - // Silence trimming - "do_trim_silence": true,// enable trimming of slience of audio as you load it. LJspeech (true), TWEB (false), Nancy (true) - "trim_db": 60, // threshold for timming silence. Set this according to your dataset. - - // Griffin-Lim - "power": 1.5, // value to sharpen wav signals after GL algorithm. - "griffin_lim_iters": 60,// #griffin-lim iterations. 30-60 is a good range. Larger the value, slower the generation. - - // MelSpectrogram parameters - "num_mels": 80, // size of the mel spec frame. - "mel_fmin": 0.0, // minimum freq level for mel-spec. ~50 for male and ~95 for female voices. Tune for dataset!! - "mel_fmax": 8000.0, // maximum freq level for mel-spec. Tune for dataset!! - "spec_gain": 20.0, - - // Normalization parameters - "signal_norm": true, // normalize spec values. Mean-Var normalization if 'stats_path' is defined otherwise range normalization defined by the other params. - "min_level_db": -100, // lower bound for normalization - "symmetric_norm": true, // move normalization to range [-1, 1] - "max_norm": 4.0, // scale normalization to range [-max_norm, max_norm] or [0, max_norm] - "clip_norm": true, // clip normalized values into the range. - "stats_path": null // DO NOT USE WITH MULTI_SPEAKER MODEL. scaler stats file computed by 'compute_statistics.py'. If it is defined, mean-std based notmalization is used and other normalization params are ignored - }, - - // VOCABULARY PARAMETERS - // if custom character set is not defined, - // default set in symbols.py is used - // "characters":{ - // "pad": "_", - // "eos": "~", - // "bos": "^", - // "characters": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!'(),-.:;? ", - // "punctuations":"!'(),-.:;? ", - // "phonemes":"iyɨʉɯuɪʏʊeøɘəɵɤoɛœɜɞʌɔæɐaɶɑɒᵻʘɓǀɗǃʄǂɠǁʛpbtdʈɖcɟkɡqɢʔɴŋɲɳnɱmʙrʀⱱɾɽɸβfvθðszʃʒʂʐçʝxɣχʁħʕhɦɬɮʋɹɻjɰlɭʎʟˈˌːˑʍwɥʜʢʡɕʑɺɧɚ˞ɫ" - // }, - - // DISTRIBUTED TRAINING - "distributed":{ - "backend": "nccl", - "url": "tcp:\/\/localhost:54321" - }, - - "reinit_layers": [], // give a list of layer names to restore from the given checkpoint. If not defined, it reloads all heuristically matching layers. - - // TRAINING - "batch_size": 32, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'. - "eval_batch_size":16, - "r": 7, // Number of decoder frames to predict per iteration. Set the initial values if gradual training is enabled. - "gradual_training": [[0, 7, 64], [1, 5, 64], [50000, 3, 32], [130000, 2, 32], [290000, 1, 32]], //set gradual training steps [first_step, r, batch_size]. If it is null, gradual training is disabled. For Tacotron, you might need to reduce the 'batch_size' as you proceeed. - "loss_masking": true, // enable / disable loss masking against the sequence padding. - "ga_alpha": 10.0, // weight for guided attention loss. If > 0, guided attention is enabled. - - // VALIDATION - "run_eval": true, - "test_delay_epochs": 10, //Until attention is aligned, testing only wastes computation time. - "test_sentences_file": null, // set a file to load sentences to be used for testing. If it is null then we use default english sentences. - - // OPTIMIZER - "noam_schedule": false, // use noam warmup and lr schedule. - "grad_clip": 1.0, // upper limit for gradients for clipping. - "epochs": 1000, // total number of epochs to train. - "lr": 0.0001, // Initial learning rate. If Noam decay is active, maximum learning rate. - "wd": 0.000001, // Weight decay weight. - "warmup_steps": 4000, // Noam decay steps to increase the learning rate from 0 to "lr" - "seq_len_norm": false, // Normalize eash sample loss with its length to alleviate imbalanced datasets. Use it if your dataset is small or has skewed distribution of sequence lengths. - - // TACOTRON PRENET - "memory_size": -1, // ONLY TACOTRON - size of the memory queue used fro storing last decoder predictions for auto-regression. If < 0, memory queue is disabled and decoder only uses the last prediction frame. - "prenet_type": "bn", // "original" or "bn". - "prenet_dropout": false, // enable/disable dropout at prenet. - - // TACOTRON ATTENTION - "attention_type": "original", // 'original' or 'graves' - "attention_heads": 4, // number of attention heads (only for 'graves') - "attention_norm": "sigmoid", // softmax or sigmoid. - "windowing": false, // Enables attention windowing. Used only in eval mode. - "use_forward_attn": false, // if it uses forward attention. In general, it aligns faster. - "forward_attn_mask": false, // Additional masking forcing monotonicity only in eval mode. - "transition_agent": false, // enable/disable transition agent of forward attention. - "location_attn": true, // enable_disable location sensitive attention. It is enabled for TACOTRON by default. - "bidirectional_decoder": false, // use https://arxiv.org/abs/1907.09006. Use it, if attention does not work well with your dataset. - "double_decoder_consistency": true, // use DDC explained here https://erogol.com/solving-attention-problems-of-tts-models-with-double-decoder-consistency-draft/ - "ddc_r": 7, // reduction rate for coarse decoder. - - // STOPNET - "stopnet": true, // Train stopnet predicting the end of synthesis. - "separate_stopnet": true, // Train stopnet seperately if 'stopnet==true'. It prevents stopnet loss to influence the rest of the model. It causes a better model, but it trains SLOWER. - - // TENSORBOARD and LOGGING - "print_step": 25, // Number of steps to log training on console. - "tb_plot_step": 100, // Number of steps to plot TB training figures. - "print_eval": false, // If True, it prints intermediate loss values in evalulation. - "save_step": 10000, // Number of training steps expected to save traninpg stats and checkpoints. - "checkpoint": true, // If true, it saves checkpoints per "save_step" - "tb_model_param_stats": false, // true, plots param stats per layer on tensorboard. Might be memory consuming, but good for debugging. - - // DATA LOADING - "text_cleaner": "phoneme_cleaners", - "enable_eos_bos_chars": false, // enable/disable beginning of sentence and end of sentence chars. - "num_loader_workers": 4, // number of training data loader processes. Don't set it too big. 4-8 are good values. - "num_val_loader_workers": 4, // number of evaluation data loader processes. - "batch_group_size": 0, //Number of batches to shuffle after bucketing. - "min_seq_len": 6, // DATASET-RELATED: minimum text length to use in training - "max_seq_len": 153, // DATASET-RELATED: maximum text length - - // PATHS - "output_path": "/home/erogol/Models/LJSpeech/", - - // PHONEMES - "phoneme_cache_path": "/media/erogol/data_ssd2/mozilla_us_phonemes_3", // phoneme computation is slow, therefore, it caches results in the given folder. - "use_phonemes": true, // use phonemes instead of raw characters. It is suggested for better pronounciation. - "phoneme_language": "en-us", // depending on your target language, pick one from https://github.com/bootphon/phonemizer#languages - - // MULTI-SPEAKER and GST - "use_speaker_embedding": false, // use speaker embedding to enable multi-speaker learning. - "style_wav_for_test": null, // path to style wav file to be used in TacotronGST inference. - "use_gst": false, // TACOTRON ONLY: use global style tokens - - // DATASETS - "datasets": // List of datasets. They all merged and they get different speaker_ids. - [ - { - "name": "ljspeech", - "path": "/home/erogol/Data/LJSpeech-1.1/", - "meta_file_train": "metadata.csv", - "meta_file_val": null - } - ] - -} - +{ + "model": "Tacotron2", + "run_name": "test_sample_dataset_run", + "run_description": "sample dataset test run", + + // AUDIO PARAMETERS + "audio":{ + // stft parameters + "fft_size": 1024, // number of stft frequency levels. Size of the linear spectogram frame. + "win_length": 1024, // stft window length in ms. + "hop_length": 256, // stft window hop-lengh in ms. + "frame_length_ms": null, // stft window length in ms.If null, 'win_length' is used. + "frame_shift_ms": null, // stft window hop-lengh in ms. If null, 'hop_length' is used. + + // Audio processing parameters + "sample_rate": 22050, // DATASET-RELATED: wav sample-rate. + "preemphasis": 0.0, // pre-emphasis to reduce spec noise and make it more structured. If 0.0, no -pre-emphasis. + "ref_level_db": 20, // reference level db, theoretically 20db is the sound of air. + + // Silence trimming + "do_trim_silence": true,// enable trimming of slience of audio as you load it. LJspeech (true), TWEB (false), Nancy (true) + "trim_db": 60, // threshold for timming silence. Set this according to your dataset. + + // Griffin-Lim + "power": 1.5, // value to sharpen wav signals after GL algorithm. + "griffin_lim_iters": 60,// #griffin-lim iterations. 30-60 is a good range. Larger the value, slower the generation. + + // MelSpectrogram parameters + "num_mels": 80, // size of the mel spec frame. + "mel_fmin": 0.0, // minimum freq level for mel-spec. ~50 for male and ~95 for female voices. Tune for dataset!! + "mel_fmax": 8000.0, // maximum freq level for mel-spec. Tune for dataset!! + "spec_gain": 20.0, + + // Normalization parameters + "signal_norm": true, // normalize spec values. Mean-Var normalization if 'stats_path' is defined otherwise range normalization defined by the other params. + "min_level_db": -100, // lower bound for normalization + "symmetric_norm": true, // move normalization to range [-1, 1] + "max_norm": 4.0, // scale normalization to range [-max_norm, max_norm] or [0, max_norm] + "clip_norm": true, // clip normalized values into the range. + "stats_path": null // DO NOT USE WITH MULTI_SPEAKER MODEL. scaler stats file computed by 'compute_statistics.py'. If it is defined, mean-std based notmalization is used and other normalization params are ignored + }, + + // VOCABULARY PARAMETERS + // if custom character set is not defined, + // default set in symbols.py is used + // "characters":{ + // "pad": "_", + // "eos": "~", + // "bos": "^", + // "characters": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!'(),-.:;? ", + // "punctuations":"!'(),-.:;? ", + // "phonemes":"iyɨʉɯuɪʏʊeøɘəɵɤoɛœɜɞʌɔæɐaɶɑɒᵻʘɓǀɗǃʄǂɠǁʛpbtdʈɖcɟkɡqɢʔɴŋɲɳnɱmʙrʀⱱɾɽɸβfvθðszʃʒʂʐçʝxɣχʁħʕhɦɬɮʋɹɻjɰlɭʎʟˈˌːˑʍwɥʜʢʡɕʑɺɧɚ˞ɫ" + // }, + + // DISTRIBUTED TRAINING + "distributed":{ + "backend": "nccl", + "url": "tcp:\/\/localhost:54321" + }, + + "reinit_layers": [], // give a list of layer names to restore from the given checkpoint. If not defined, it reloads all heuristically matching layers. + + // TRAINING + "batch_size": 1, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'. + "eval_batch_size":1, + "r": 7, // Number of decoder frames to predict per iteration. Set the initial values if gradual training is enabled. + "gradual_training": [[0, 7, 4]], //set gradual training steps [first_step, r, batch_size]. If it is null, gradual training is disabled. For Tacotron, you might need to reduce the 'batch_size' as you proceeed. + "loss_masking": true, // enable / disable loss masking against the sequence padding. + "ga_alpha": 10.0, // weight for guided attention loss. If > 0, guided attention is enabled. + + // VALIDATION + "run_eval": true, + "test_delay_epochs": 0, //Until attention is aligned, testing only wastes computation time. + "test_sentences_file": null, // set a file to load sentences to be used for testing. If it is null then we use default english sentences. + + // OPTIMIZER + "noam_schedule": false, // use noam warmup and lr schedule. + "grad_clip": 1.0, // upper limit for gradients for clipping. + "epochs": 1, // total number of epochs to train. + "lr": 0.0001, // Initial learning rate. If Noam decay is active, maximum learning rate. + "wd": 0.000001, // Weight decay weight. + "warmup_steps": 4000, // Noam decay steps to increase the learning rate from 0 to "lr" + "seq_len_norm": false, // Normalize eash sample loss with its length to alleviate imbalanced datasets. Use it if your dataset is small or has skewed distribution of sequence lengths. + + // TACOTRON PRENET + "memory_size": -1, // ONLY TACOTRON - size of the memory queue used fro storing last decoder predictions for auto-regression. If < 0, memory queue is disabled and decoder only uses the last prediction frame. + "prenet_type": "bn", // "original" or "bn". + "prenet_dropout": false, // enable/disable dropout at prenet. + + // TACOTRON ATTENTION + "attention_type": "original", // 'original' or 'graves' + "attention_heads": 4, // number of attention heads (only for 'graves') + "attention_norm": "sigmoid", // softmax or sigmoid. + "windowing": false, // Enables attention windowing. Used only in eval mode. + "use_forward_attn": false, // if it uses forward attention. In general, it aligns faster. + "forward_attn_mask": false, // Additional masking forcing monotonicity only in eval mode. + "transition_agent": false, // enable/disable transition agent of forward attention. + "location_attn": true, // enable_disable location sensitive attention. It is enabled for TACOTRON by default. + "bidirectional_decoder": false, // use https://arxiv.org/abs/1907.09006. Use it, if attention does not work well with your dataset. + "double_decoder_consistency": true, // use DDC explained here https://erogol.com/solving-attention-problems-of-tts-models-with-double-decoder-consistency-draft/ + "ddc_r": 7, // reduction rate for coarse decoder. + + // STOPNET + "stopnet": true, // Train stopnet predicting the end of synthesis. + "separate_stopnet": true, // Train stopnet seperately if 'stopnet==true'. It prevents stopnet loss to influence the rest of the model. It causes a better model, but it trains SLOWER. + + // TENSORBOARD and LOGGING + "print_step": 1, // Number of steps to log training on console. + "tb_plot_step": 100, // Number of steps to plot TB training figures. + "print_eval": false, // If True, it prints intermediate loss values in evalulation. + "save_step": 10000, // Number of training steps expected to save traninpg stats and checkpoints. + "checkpoint": true, // If true, it saves checkpoints per "save_step" + "tb_model_param_stats": false, // true, plots param stats per layer on tensorboard. Might be memory consuming, but good for debugging. + + // DATA LOADING + "text_cleaner": "phoneme_cleaners", + "enable_eos_bos_chars": false, // enable/disable beginning of sentence and end of sentence chars. + "num_loader_workers": 4, // number of training data loader processes. Don't set it too big. 4-8 are good values. + "num_val_loader_workers": 4, // number of evaluation data loader processes. + "batch_group_size": 0, //Number of batches to shuffle after bucketing. + "min_seq_len": 6, // DATASET-RELATED: minimum text length to use in training + "max_seq_len": 153, // DATASET-RELATED: maximum text length + + // PATHS + "output_path": "tests/train_outputs/", + + // PHONEMES + "phoneme_cache_path": "tests/train_outputs/phoneme_cache/", // phoneme computation is slow, therefore, it caches results in the given folder. + "use_phonemes": true, // use phonemes instead of raw characters. It is suggested for better pronounciation. + "phoneme_language": "en-us", // depending on your target language, pick one from https://github.com/bootphon/phonemizer#languages + + // MULTI-SPEAKER and GST + "use_speaker_embedding": false, // use speaker embedding to enable multi-speaker learning. + "style_wav_for_test": null, // path to style wav file to be used in TacotronGST inference. + "use_gst": false, // TACOTRON ONLY: use global style tokens + + // DATASETS + "train_portion": 0.1, // dataset portion used for training. It is mainly for internal experiments. + "eval_portion": 0.1, // dataset portion used for training. It is mainly for internal experiments. + "datasets": // List of datasets. They all merged and they get different speaker_ids. + [ + { + "name": "ljspeech", + "path": "tests/data/ljspeech/", + "meta_file_train": "metadata.csv", + "meta_file_val": "metadata.csv" + } + ] + +} + diff --git a/vocoder/tests/test_config.json b/tests/inputs/test_vocoder_audio_config.json similarity index 100% rename from vocoder/tests/test_config.json rename to tests/inputs/test_vocoder_audio_config.json diff --git a/vocoder/configs/multiband_melgan_config.json b/tests/inputs/test_vocoder_multiband_melgan_config.json similarity index 89% rename from vocoder/configs/multiband_melgan_config.json rename to tests/inputs/test_vocoder_multiband_melgan_config.json index a89d43bb..c0f552a4 100644 --- a/vocoder/configs/multiband_melgan_config.json +++ b/tests/inputs/test_vocoder_multiband_melgan_config.json @@ -31,7 +31,7 @@ "symmetric_norm": true, // move normalization to range [-1, 1] "max_norm": 4.0, // scale normalization to range [-max_norm, max_norm] or [0, max_norm] "clip_norm": true, // clip normalized values into the range. - "stats_path": "/home/erogol/Data/LJSpeech-1.1/scale_stats.npy" // DO NOT USE WITH MULTI_SPEAKER MODEL. scaler stats file computed by 'compute_statistics.py'. If it is defined, mean-std based notmalization is used and other normalization params are ignored + "stats_path": null // DO NOT USE WITH MULTI_SPEAKER MODEL. scaler stats file computed by 'compute_statistics.py'. If it is defined, mean-std based notmalization is used and other normalization params are ignored }, // DISTRIBUTED TRAINING @@ -90,7 +90,7 @@ }, // DATASET - "data_path": "/home/erogol/Data/LJSpeech-1.1/wavs/", + "data_path": "tests/data/ljspeech/wavs/", "feature_path": null, "seq_len": 16384, "pad_short": 2000, @@ -101,7 +101,7 @@ "reinit_layers": [], // give a list of layer names to restore from the given checkpoint. If not defined, it reloads all heuristically matching layers. // TRAINING - "batch_size": 64, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'. + "batch_size": 4, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'. // VALIDATION "run_eval": true, @@ -109,7 +109,7 @@ "test_sentences_file": null, // set a file to load sentences to be used for testing. If it is null then we use default english sentences. // OPTIMIZER - "epochs": 10000, // total number of epochs to train. + "epochs": 1, // total number of epochs to train. "wd": 0.0, // Weight decay weight. "gen_clip_grad": -1, // Generator gradient clipping threshold. Apply gradient clipping if > 0 "disc_clip_grad": -1, // Discriminator gradient clipping threshold. @@ -127,7 +127,7 @@ "lr_disc": 1e-4, // TENSORBOARD and LOGGING - "print_step": 25, // Number of steps to log traning on console. + "print_step": 1, // Number of steps to log traning on console. "print_eval": false, // If True, it prints loss values for each step in eval run. "save_step": 25000, // Number of training steps expected to plot training stats on TB and save model checkpoints. "checkpoint": true, // If true, it saves checkpoints per "save_step" @@ -139,6 +139,6 @@ "eval_split_size": 10, // PATHS - "output_path": "/home/erogol/Models/LJSpeech/" + "output_path": "tests/outputs/train_outputs/" } diff --git a/tests/outputs/dummy_model_config.json b/tests/outputs/dummy_model_config.json deleted file mode 100644 index 36fac3e5..00000000 --- a/tests/outputs/dummy_model_config.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "run_name": "mozilla-no-loc-fattn-stopnet-sigmoid-loss_masking", - "run_description": "using forward attention, with original prenet, loss masking,separate stopnet, sigmoid. Compare this with 4817. Pytorch DPP", - - "audio":{ - // Audio processing parameters - "num_mels": 80, // size of the mel spec frame. - "fft_size": 1024, // number of stft frequency levels. Size of the linear spectogram frame. - "sample_rate": 22050, // DATASET-RELATED: wav sample-rate. If different than the original data, it is resampled. - "hop_length": 256, - "win_length": 1024, - "preemphasis": 0.98, // pre-emphasis to reduce spec noise and make it more structured. If 0.0, no -pre-emphasis. - "min_level_db": -100, // normalization range - "ref_level_db": 20, // reference level db, theoretically 20db is the sound of air. - "power": 1.5, // value to sharpen wav signals after GL algorithm. - "griffin_lim_iters": 60,// #griffin-lim iterations. 30-60 is a good range. Larger the value, slower the generation. - // Normalization parameters - "signal_norm": true, // normalize the spec values in range [0, 1] - "symmetric_norm": false, // move normalization to range [-1, 1] - "max_norm": 1, // scale normalization to range [-max_norm, max_norm] or [0, max_norm] - "clip_norm": true, // clip normalized values into the range. - "mel_fmin": 0.0, // minimum freq level for mel-spec. ~50 for male and ~95 for female voices. Tune for dataset!! - "mel_fmax": 8000.0, // maximum freq level for mel-spec. Tune for dataset!! - "do_trim_silence": true // enable trimming of slience of audio as you load it. LJspeech (false), TWEB (false), Nancy (true) - }, - - "distributed":{ - "backend": "nccl", - "url": "tcp:\/\/localhost:54321" - }, - - "reinit_layers": [], - - "model": "Tacotron2", // one of the model in models/ - "grad_clip": 1, // upper limit for gradients for clipping. - "epochs": 1000, // total number of epochs to train. - "lr": 0.0001, // Initial learning rate. If Noam decay is active, maximum learning rate. - "lr_decay": false, // if true, Noam learning rate decaying is applied through training. - "warmup_steps": 4000, // Noam decay steps to increase the learning rate from 0 to "lr" - "windowing": false, // Enables attention windowing. Used only in eval mode. - "memory_size": 5, // ONLY TACOTRON - memory queue size used to queue network predictions to feed autoregressive connection. Useful if r < 5. - "attention_norm": "sigmoid", // softmax or sigmoid. Suggested to use softmax for Tacotron2 and sigmoid for Tacotron. - "prenet_type": "original", // ONLY TACOTRON2 - "original" or "bn". - "prenet_dropout": true, // ONLY TACOTRON2 - enable/disable dropout at prenet. - "use_forward_attn": true, // ONLY TACOTRON2 - if it uses forward attention. In general, it aligns faster. - "forward_attn_mask": false, - "attention_type": "original", - "attention_heads": 5, - "bidirectional_decoder": false, - "transition_agent": false, // ONLY TACOTRON2 - enable/disable transition agent of forward attention. - "location_attn": false, // ONLY TACOTRON2 - enable_disable location sensitive attention. It is enabled for TACOTRON by default. - "loss_masking": true, // enable / disable loss masking against the sequence padding. - "enable_eos_bos_chars": false, // enable/disable beginning of sentence and end of sentence chars. - "stopnet": true, // Train stopnet predicting the end of synthesis. - "separate_stopnet": true, // Train stopnet seperately if 'stopnet==true'. It prevents stopnet loss to influence the rest of the model. It causes a better model, but it trains SLOWER. - "tb_model_param_stats": false, // true, plots param stats per layer on tensorboard. Might be memory consuming, but good for debugging. - "use_gst": false, - "double_decoder_consistency": true, // use DDC explained here https://erogol.com/solving-attention-problems-of-tts-models-with-double-decoder-consistency-draft/ - "ddc_r": 7, // reduction rate for coarse decoder. - - "batch_size": 32, // Batch size for training. Lower values than 32 might cause hard to learn attention. - "eval_batch_size":16, - "r": 1, // Number of frames to predict for step. - "wd": 0.000001, // Weight decay weight. - "checkpoint": true, // If true, it saves checkpoints per "save_step" - "save_step": 1000, // Number of training steps expected to save traning stats and checkpoints. - "print_step": 10, // Number of steps to log traning on console. - "batch_group_size": 0, //Number of batches to shuffle after bucketing. - - "run_eval": true, - "test_delay_epochs": 5, //Until attention is aligned, testing only wastes computation time. - "test_sentences_file": null, // set a file to load sentences to be used for testing. If it is null then we use default english sentences. - "data_path": "/media/erogol/data_ssd/Data/Mozilla/", // DATASET-RELATED: can overwritten from command argument - "meta_file_train": "metadata_train.txt", // DATASET-RELATED: metafile for training dataloader. - "meta_file_val": "metadata_val.txt", // DATASET-RELATED: metafile for evaluation dataloader. - "dataset": "mozilla", // DATASET-RELATED: one of TTS.dataset.preprocessors depending on your target dataset. Use "tts_cache" for pre-computed dataset by extract_features.py - "min_seq_len": 0, // DATASET-RELATED: minimum text length to use in training - "max_seq_len": 150, // DATASET-RELATED: maximum text length - "output_path": "../keep/", // DATASET-RELATED: output path for all training outputs. - "num_loader_workers": 4, // number of training data loader processes. Don't set it too big. 4-8 are good values. - "num_val_loader_workers": 4, // number of evaluation data loader processes. - "phoneme_cache_path": "mozilla_us_phonemes", // phoneme computation is slow, therefore, it caches results in the given folder. - "use_phonemes": false, // use phonemes instead of raw characters. It is suggested for better pronounciation. - "phoneme_language": "en-us", // depending on your target language, pick one from https://github.com/bootphon/phonemizer#languages - "text_cleaner": "phoneme_cleaners", - "use_speaker_embedding": false // whether to use additional embeddings for separate speakers -} - diff --git a/tests/symbols_tests.py b/tests/symbols_tests.py index 4c32c7d6..87ec4a8a 100644 --- a/tests/symbols_tests.py +++ b/tests/symbols_tests.py @@ -1,6 +1,6 @@ import unittest -from TTS.utils.text import phonemes +from TTS.tts.utils.text import phonemes class SymbolsTest(unittest.TestCase): def test_uniqueness(self): #pylint: disable=no-self-use diff --git a/tests/test_audio.py b/tests/test_audio.py index 4b8ee276..a4d69de5 100644 --- a/tests/test_audio.py +++ b/tests/test_audio.py @@ -1,7 +1,7 @@ import os import unittest -from TTS.tests import get_tests_path, get_tests_input_path, get_tests_output_path +from tests import get_tests_path, get_tests_input_path, get_tests_output_path from TTS.utils.audio import AudioProcessor from TTS.utils.io import load_config @@ -10,7 +10,7 @@ OUT_PATH = os.path.join(get_tests_output_path(), "audio_tests") WAV_FILE = os.path.join(get_tests_input_path(), "example_1.wav") os.makedirs(OUT_PATH, exist_ok=True) -conf = load_config(os.path.join(TESTS_PATH, 'test_config.json')) +conf = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) # pylint: disable=protected-access diff --git a/tests/test_demo_server.py b/tests/test_demo_server.py index 5404304b..e8a86094 100644 --- a/tests/test_demo_server.py +++ b/tests/test_demo_server.py @@ -4,10 +4,11 @@ import unittest import torch as T from TTS.server.synthesizer import Synthesizer -from TTS.tests import get_tests_input_path, get_tests_output_path -from TTS.utils.text.symbols import make_symbols, phonemes, symbols -from TTS.utils.generic_utils import setup_model -from TTS.utils.io import load_config, save_checkpoint +from tests import get_tests_input_path, get_tests_output_path +from TTS.tts.utils.text.symbols import make_symbols, phonemes, symbols +from TTS.tts.utils.generic_utils import setup_model +from TTS.tts.utils.io import save_checkpoint +from TTS.utils.io import load_config class DemoServerTest(unittest.TestCase): diff --git a/speaker_encoder/tests.py b/tests/test_encoder.py similarity index 97% rename from speaker_encoder/tests.py rename to tests/test_encoder.py index 039833fc..c713a1f1 100644 --- a/speaker_encoder/tests.py +++ b/tests/test_encoder.py @@ -2,12 +2,13 @@ import os import unittest import torch as T +from tests import get_tests_path, get_tests_input_path from TTS.speaker_encoder.model import SpeakerEncoder from TTS.speaker_encoder.loss import GE2ELoss from TTS.utils.io import load_config -file_path = os.path.dirname(os.path.realpath(__file__)) + "/../tests/" +file_path = get_tests_input_path() c = load_config(os.path.join(file_path, "test_config.json")) diff --git a/tests/test_layers.py b/tests/test_layers.py index d7c8829f..e9a36e35 100644 --- a/tests/test_layers.py +++ b/tests/test_layers.py @@ -1,9 +1,9 @@ import unittest import torch as T -from TTS.layers.tacotron import Prenet, CBHG, Decoder, Encoder -from TTS.layers.losses import L1LossMasked -from TTS.utils.generic_utils import sequence_mask +from TTS.tts.layers.tacotron import Prenet, CBHG, Decoder, Encoder +from TTS.tts.layers.losses import L1LossMasked +from TTS.tts.utils.generic_utils import sequence_mask # pylint: disable=unused-variable diff --git a/tests/test_loader.py b/tests/test_loader.py index 52d24c7a..978b29b7 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -4,18 +4,18 @@ import shutil import torch import numpy as np +from tests import get_tests_path, get_tests_input_path, get_tests_output_path from torch.utils.data import DataLoader from TTS.utils.io import load_config from TTS.utils.audio import AudioProcessor -from TTS.datasets import TTSDataset -from TTS.datasets.preprocess import ljspeech +from TTS.tts.datasets import TTSDataset +from TTS.tts.datasets.preprocess import ljspeech #pylint: disable=unused-variable -file_path = os.path.dirname(os.path.realpath(__file__)) -OUTPATH = os.path.join(file_path, "outputs/loader_tests/") +OUTPATH = os.path.join(get_tests_output_path(), "loader_tests/") os.makedirs(OUTPATH, exist_ok=True) -c = load_config(os.path.join(file_path, 'test_config.json')) +c = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) ok_ljspeech = os.path.exists(c.data_path) DATA_EXIST = True diff --git a/tests/test_preprocessors.py b/tests/test_preprocessors.py index 993ee495..56f79402 100644 --- a/tests/test_preprocessors.py +++ b/tests/test_preprocessors.py @@ -1,8 +1,8 @@ import unittest import os -from TTS.tests import get_tests_input_path +from tests import get_tests_input_path -from TTS.datasets.preprocess import common_voice +from TTS.tts.datasets.preprocess import common_voice class TestPreprocessors(unittest.TestCase): diff --git a/__init__.py b/tests/test_stft_torch.py similarity index 100% rename from __init__.py rename to tests/test_stft_torch.py diff --git a/tests/test_tacotron2_model.py b/tests/test_tacotron2_model.py index ae9f20a2..41b5f039 100644 --- a/tests/test_tacotron2_model.py +++ b/tests/test_tacotron2_model.py @@ -1,14 +1,14 @@ -import os import copy -import torch +import os import unittest -import numpy as np -from torch import optim -from torch import nn +import torch +from tests import get_tests_input_path +from torch import nn, optim + +from TTS.tts.layers.losses import MSELossMasked +from TTS.tts.models.tacotron2 import Tacotron2 from TTS.utils.io import load_config -from TTS.layers.losses import MSELossMasked -from TTS.models.tacotron2 import Tacotron2 #pylint: disable=unused-variable @@ -16,8 +16,7 @@ torch.manual_seed(1) use_cuda = torch.cuda.is_available() device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") -file_path = os.path.dirname(os.path.realpath(__file__)) -c = load_config(os.path.join(file_path, 'test_config.json')) +c = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) class TacotronTrainTest(unittest.TestCase): diff --git a/tf/tests/test_tacotron2_tf_model.py b/tests/test_tacotron2_tf_model.py similarity index 95% rename from tf/tests/test_tacotron2_tf_model.py rename to tests/test_tacotron2_tf_model.py index 03db194a..472c1ebf 100644 --- a/tf/tests/test_tacotron2_tf_model.py +++ b/tests/test_tacotron2_tf_model.py @@ -5,9 +5,11 @@ import numpy as np import tensorflow as tf tf.get_logger().setLevel('INFO') +from tests import get_tests_path, get_tests_input_path, get_tests_output_path + from TTS.utils.io import load_config -from TTS.tf.models.tacotron2 import Tacotron2 -from TTS.tf.utils.tflite import convert_tacotron2_to_tflite, load_tflite_model +from TTS.tts.tf.models.tacotron2 import Tacotron2 +from TTS.tts.tf.utils.tflite import convert_tacotron2_to_tflite, load_tflite_model #pylint: disable=unused-variable @@ -15,8 +17,7 @@ torch.manual_seed(1) use_cuda = torch.cuda.is_available() device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") -file_path = os.path.dirname(os.path.realpath(__file__)).replace('/tf/', '/') -c = load_config(os.path.join(file_path, 'test_config.json')) +c = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) class TacotronTFTrainTest(unittest.TestCase): diff --git a/tests/test_tacotron_model.py b/tests/test_tacotron_model.py index 2bbb3c8d..f8d4a4d7 100644 --- a/tests/test_tacotron_model.py +++ b/tests/test_tacotron_model.py @@ -1,13 +1,14 @@ -import os import copy -import torch +import os import unittest -from torch import optim -from torch import nn +import torch +from tests import get_tests_input_path +from torch import nn, optim + +from TTS.tts.layers.losses import L1LossMasked +from TTS.tts.models.tacotron import Tacotron from TTS.utils.io import load_config -from TTS.layers.losses import L1LossMasked -from TTS.models.tacotron import Tacotron #pylint: disable=unused-variable @@ -15,8 +16,7 @@ torch.manual_seed(1) use_cuda = torch.cuda.is_available() device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") -file_path = os.path.dirname(os.path.realpath(__file__)) -c = load_config(os.path.join(file_path, 'test_config.json')) +c = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) def count_parameters(model): diff --git a/tests/test_text_processing.py b/tests/test_text_processing.py index 93edabe7..992f0a17 100644 --- a/tests/test_text_processing.py +++ b/tests/test_text_processing.py @@ -3,12 +3,12 @@ import os # pylint: disable=wildcard-import # pylint: disable=unused-import import unittest -from TTS.utils.text import * -from TTS.tests import get_tests_path +from tests import get_tests_input_path +from TTS.tts.utils.text import * +from tests import get_tests_path from TTS.utils.io import load_config -TESTS_PATH = get_tests_path() -conf = load_config(os.path.join(TESTS_PATH, 'test_config.json')) +conf = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) def test_phoneme_to_sequence(): text = "Recent research at Harvard has shown meditating for as little as 8 weeks can actually increase, the grey matter in the parts of the brain responsible for emotional regulation and learning!" @@ -19,7 +19,7 @@ def test_phoneme_to_sequence(): sequence_with_params = phoneme_to_sequence(text, text_cleaner, lang, tp=conf.characters) text_hat_with_params = sequence_to_phoneme(sequence, tp=conf.characters) gt = "ɹiːsənt ɹɪsɜːtʃ æt hɑːɹvɚd hɐz ʃoʊn mɛdᵻteɪɾɪŋ fɔːɹ æz lɪɾəl æz eɪt wiːks kæn æktʃuːəli ɪnkɹiːs, ðə ɡɹeɪ mæɾɚɹ ɪnðə pɑːɹts ʌvðə bɹeɪn ɹɪspɑːnsəbəl fɔːɹ ɪmoʊʃənəl ɹɛɡjuːleɪʃən ænd lɜːnɪŋ!" - assert text_hat == text_hat_with_params == gt + assert text_hat == text_hat_with_params == gt # multiple punctuations text = "Be a voice, not an! echo?" diff --git a/datasets/__init__.py b/tests/test_train_tts.py similarity index 100% rename from datasets/__init__.py rename to tests/test_train_tts.py diff --git a/tests/test_tts_train.sh b/tests/test_tts_train.sh new file mode 100755 index 00000000..55379a1e --- /dev/null +++ b/tests/test_tts_train.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +BASEDIR=$(dirname "$0") +echo "$BASEDIR" +# run training +CUDA_VISIBLE_DEVICES="" python TTS/bin/train_tts.py --config_path $BASEDIR/inputs/test_train_config.json +# find the training folder +LATEST_FOLDER=$(ls $BASEDIR/train_outputs/| sort | tail -1) +echo $LATEST_FOLDER +# continue the previous training +CUDA_VISIBLE_DEVICES="" python TTS/bin/train_tts.py --continue_path $BASEDIR/train_outputs/$LATEST_FOLDER +# remove all the outputs +rm -rf $BASEDIR/train_outputs/ diff --git a/vocoder/tests/test_datasets.py b/tests/test_vocoder_datasets.py similarity index 93% rename from vocoder/tests/test_datasets.py rename to tests/test_vocoder_datasets.py index 43d0d3de..2a487d9a 100644 --- a/vocoder/tests/test_datasets.py +++ b/tests/test_vocoder_datasets.py @@ -1,24 +1,24 @@ import os + import numpy as np +from tests import get_tests_path, get_tests_input_path, get_tests_output_path from torch.utils.data import DataLoader -from TTS.vocoder.datasets.gan_dataset import GANDataset -from TTS.vocoder.datasets.preprocess import load_wav_data from TTS.utils.audio import AudioProcessor from TTS.utils.io import load_config - +from TTS.vocoder.datasets.gan_dataset import GANDataset +from TTS.vocoder.datasets.preprocess import load_wav_data file_path = os.path.dirname(os.path.realpath(__file__)) -OUTPATH = os.path.join(file_path, "../../tests/outputs/loader_tests/") +OUTPATH = os.path.join(get_tests_output_path(), "loader_tests/") os.makedirs(OUTPATH, exist_ok=True) -C = load_config(os.path.join(file_path, 'test_config.json')) +C = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) -test_data_path = os.path.join(file_path, "../../tests/data/ljspeech/") +test_data_path = os.path.join(get_tests_path(), "data/ljspeech/") ok_ljspeech = os.path.exists(test_data_path) - def gan_dataset_case(batch_size, seq_len, hop_len, conv_pad, return_segments, use_noise_augment, use_cache, num_workers): ''' run dataloader with given parameters and check conditions ''' ap = AudioProcessor(**C.audio) diff --git a/vocoder/tests/test_losses.py b/tests/test_vocoder_losses.py similarity index 85% rename from vocoder/tests/test_losses.py rename to tests/test_vocoder_losses.py index 68e42e89..965e68ad 100644 --- a/vocoder/tests/test_losses.py +++ b/tests/test_vocoder_losses.py @@ -1,11 +1,11 @@ import os + import torch +from tests import get_tests_input_path, get_tests_output_path, get_tests_path -from TTS.vocoder.layers.losses import TorchSTFT, STFTLoss, MultiScaleSTFTLoss - -from TTS.tests import get_tests_path, get_tests_input_path, get_tests_output_path from TTS.utils.audio import AudioProcessor from TTS.utils.io import load_config +from TTS.vocoder.layers.losses import MultiScaleSTFTLoss, STFTLoss, TorchSTFT TESTS_PATH = get_tests_path() @@ -14,8 +14,7 @@ os.makedirs(OUT_PATH, exist_ok=True) WAV_FILE = os.path.join(get_tests_input_path(), "example_1.wav") -file_path = os.path.dirname(os.path.realpath(__file__)) -C = load_config(os.path.join(file_path, 'test_config.json')) +C = load_config(os.path.join(get_tests_input_path(), 'test_config.json')) ap = AudioProcessor(**C.audio) @@ -53,9 +52,3 @@ def test_multiscale_stft_loss(): loss_m, loss_sc = stft_loss(wav, torch.rand_like(wav)) assert loss_sc < 1.0 assert loss_m + loss_sc > 0 - - - - - - diff --git a/vocoder/tests/test_melgan_discriminator.py b/tests/test_vocoder_melgan_discriminator.py similarity index 100% rename from vocoder/tests/test_melgan_discriminator.py rename to tests/test_vocoder_melgan_discriminator.py diff --git a/vocoder/tests/test_melgan_generator.py b/tests/test_vocoder_melgan_generator.py similarity index 100% rename from vocoder/tests/test_melgan_generator.py rename to tests/test_vocoder_melgan_generator.py diff --git a/vocoder/tests/test_pqmf.py b/tests/test_vocoder_pqmf.py similarity index 90% rename from vocoder/tests/test_pqmf.py rename to tests/test_vocoder_pqmf.py index a26bdd59..8924fea8 100644 --- a/vocoder/tests/test_pqmf.py +++ b/tests/test_vocoder_pqmf.py @@ -4,7 +4,7 @@ import torch import soundfile as sf from librosa.core import load -from TTS.tests import get_tests_path, get_tests_input_path +from tests import get_tests_path, get_tests_input_path from TTS.vocoder.layers.pqmf import PQMF diff --git a/vocoder/tests/test_rwd.py b/tests/test_vocoder_rwd.py similarity index 100% rename from vocoder/tests/test_rwd.py rename to tests/test_vocoder_rwd.py diff --git a/tests/test_vocoder_train.sh b/tests/test_vocoder_train.sh new file mode 100755 index 00000000..b4a9b9fa --- /dev/null +++ b/tests/test_vocoder_train.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +BASEDIR=$(dirname "$0") +echo "$BASEDIR" +# create run dir +mkdir $BASEDIR/train_outputs +# run training +CUDA_VISIBLE_DEVICES="" python TTS/bin/train_vocoder.py --config_path $BASEDIR/inputs/test_vocoder_multiband_melgan_config.json +# find the training folder +LATEST_FOLDER=$(ls $BASEDIR/outputs/train_outputs/| sort | tail -1) +echo $LATEST_FOLDER +# continue the previous training +CUDA_VISIBLE_DEVICES="" python TTS/bin/train_vocoder.py --continue_path $BASEDIR/outputs/train_outputs/$LATEST_FOLDER +# remove all the outputs +rm -rf $BASEDIR/train_outputs/ diff --git a/tf/README.md b/tf/README.md deleted file mode 100644 index 0f9d58e9..00000000 --- a/tf/README.md +++ /dev/null @@ -1,20 +0,0 @@ -## Utilities to Convert Models to Tensorflow2 -Here there are experimental utilities to convert trained Torch models to Tensorflow (2.2>=). - -Converting Torch models to TF enables all the TF toolkit to be used for better deployment and device specific optimizations. - -Note that we do not plan to share training scripts for Tensorflow in near future. But any contribution in that direction would be more than welcome. - -To see how you can use TF model at inference, check the notebook. - -This is an experimental release. If you encounter an error, please put an issue or in the best send a PR but you are mostly on your own. - - -### Converting a Model -- Run ```convert_tacotron2_torch_to_tf.py --torch_model_path /path/to/torch/model.pth.tar --config_path /path/to/model/config.json --output_path /path/to/output/tf/model``` with the right arguments. - -### Known issues ans limitations -- We use a custom model load/save mechanism which enables us to store model related information with models weights. (Similar to Torch). However, it is prone to random errors. -- Current TF model implementation is slightly slower than Torch model. Hopefully, it'll get better with improving TF support for eager mode and ```tf.function```. -- TF implementation of Tacotron2 only supports regular Tacotron2 as in the paper. -- You can only convert models trained after TF model implementation since model layers has been updated in Torch model. diff --git a/tf/__init__.py b/tf/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tf/convert_tacotron2_tflite.py b/tf/convert_tacotron2_tflite.py deleted file mode 100644 index fc46cc79..00000000 --- a/tf/convert_tacotron2_tflite.py +++ /dev/null @@ -1,37 +0,0 @@ -# Convert Tensorflow Tacotron2 model to TF-Lite binary - -import argparse - -from TTS.utils.io import load_config -from TTS.utils.text.symbols import symbols, phonemes -from TTS.tf.utils.generic_utils import setup_model -from TTS.tf.utils.io import load_checkpoint -from TTS.tf.utils.tflite import convert_tacotron2_to_tflite - - -parser = argparse.ArgumentParser() -parser.add_argument('--tf_model', - type=str, - help='Path to target torch model to be converted to TF.') -parser.add_argument('--config_path', - type=str, - help='Path to config file of torch model.') -parser.add_argument('--output_path', - type=str, - help='path to tflite output binary.') -args = parser.parse_args() - -# Set constants -CONFIG = load_config(args.config_path) - -# load the model -c = CONFIG -num_speakers = 0 -num_chars = len(phonemes) if c.use_phonemes else len(symbols) -model = setup_model(num_chars, num_speakers, c, enable_tflite=True) -model.build_inference() -model = load_checkpoint(model, args.tf_model) -model.decoder.set_max_decoder_steps(1000) - -# create tflite model -tflite_model = convert_tacotron2_to_tflite(model, output_path=args.output_path) \ No newline at end of file diff --git a/tf/convert_tacotron2_torch_to_tf.py b/tf/convert_tacotron2_torch_to_tf.py deleted file mode 100644 index dfc42250..00000000 --- a/tf/convert_tacotron2_torch_to_tf.py +++ /dev/null @@ -1,210 +0,0 @@ -# %% -import sys -sys.path.append('/home/erogol/Projects') -import os -os.environ['CUDA_VISIBLE_DEVICES'] = '' -# %% -import argparse -import numpy as np -import torch -import tensorflow as tf -from fuzzywuzzy import fuzz - -from TTS.utils.text.symbols import phonemes, symbols -from TTS.utils.generic_utils import setup_model -from TTS.utils.io import load_config -from TTS.tf.models.tacotron2 import Tacotron2 -from TTS.tf.utils.convert_torch_to_tf_utils import compare_torch_tf, tf_create_dummy_inputs, transfer_weights_torch_to_tf, convert_tf_name -from TTS.tf.utils.generic_utils import save_checkpoint - -parser = argparse.ArgumentParser() -parser.add_argument('--torch_model_path', - type=str, - help='Path to target torch model to be converted to TF.') -parser.add_argument('--config_path', - type=str, - help='Path to config file of torch model.') -parser.add_argument('--output_path', - type=str, - help='path to output file including file name to save TF model.') -args = parser.parse_args() - -# load model config -config_path = args.config_path -c = load_config(config_path) -num_speakers = 0 - -# init torch model -num_chars = len(phonemes) if c.use_phonemes else len(symbols) -model = setup_model(num_chars, num_speakers, c) -checkpoint = torch.load(args.torch_model_path, - map_location=torch.device('cpu')) -state_dict = checkpoint['model'] -model.load_state_dict(state_dict) - -# init tf model -model_tf = Tacotron2(num_chars=num_chars, - num_speakers=num_speakers, - r=model.decoder.r, - postnet_output_dim=c.audio['num_mels'], - decoder_output_dim=c.audio['num_mels'], - attn_type=c.attention_type, - attn_win=c.windowing, - attn_norm=c.attention_norm, - prenet_type=c.prenet_type, - prenet_dropout=c.prenet_dropout, - forward_attn=c.use_forward_attn, - trans_agent=c.transition_agent, - forward_attn_mask=c.forward_attn_mask, - location_attn=c.location_attn, - attn_K=c.attention_heads, - separate_stopnet=c.separate_stopnet, - bidirectional_decoder=c.bidirectional_decoder) - -# set initial layer mapping - these are not captured by the below heuristic approach -# TODO: set layer names so that we can remove these manual matching -common_sufix = '/.ATTRIBUTES/VARIABLE_VALUE' -var_map = [ - ('embedding/embeddings:0', 'embedding.weight'), - ('encoder/lstm/forward_lstm/lstm_cell_1/kernel:0', - 'encoder.lstm.weight_ih_l0'), - ('encoder/lstm/forward_lstm/lstm_cell_1/recurrent_kernel:0', - 'encoder.lstm.weight_hh_l0'), - ('encoder/lstm/backward_lstm/lstm_cell_2/kernel:0', - 'encoder.lstm.weight_ih_l0_reverse'), - ('encoder/lstm/backward_lstm/lstm_cell_2/recurrent_kernel:0', - 'encoder.lstm.weight_hh_l0_reverse'), - ('encoder/lstm/forward_lstm/lstm_cell_1/bias:0', - ('encoder.lstm.bias_ih_l0', 'encoder.lstm.bias_hh_l0')), - ('encoder/lstm/backward_lstm/lstm_cell_2/bias:0', - ('encoder.lstm.bias_ih_l0_reverse', 'encoder.lstm.bias_hh_l0_reverse')), - ('attention/v/kernel:0', 'decoder.attention.v.linear_layer.weight'), - ('decoder/linear_projection/kernel:0', - 'decoder.linear_projection.linear_layer.weight'), - ('decoder/stopnet/kernel:0', 'decoder.stopnet.1.linear_layer.weight') -] - -# %% -# get tf_model graph -mel_pred = model_tf.build_inference() - -# get tf variables -tf_vars = model_tf.weights - -# match variable names with fuzzy logic -torch_var_names = list(state_dict.keys()) -tf_var_names = [we.name for we in model_tf.weights] -for tf_name in tf_var_names: - # skip re-mapped layer names - if tf_name in [name[0] for name in var_map]: - continue - tf_name_edited = convert_tf_name(tf_name) - ratios = [ - fuzz.ratio(torch_name, tf_name_edited) - for torch_name in torch_var_names - ] - max_idx = np.argmax(ratios) - matching_name = torch_var_names[max_idx] - del torch_var_names[max_idx] - var_map.append((tf_name, matching_name)) - -# %% -# print variable match -from pprint import pprint -pprint(var_map) -pprint(torch_var_names) - -# pass weights -tf_vars = transfer_weights_torch_to_tf(tf_vars, dict(var_map), state_dict) - -# Compare TF and TORCH models -# %% -# check embedding outputs -model.eval() -input_ids = torch.randint(0, 24, (1, 128)).long() - -o_t = model.embedding(input_ids) -o_tf = model_tf.embedding(input_ids.detach().numpy()) -assert abs(o_t.detach().numpy() - - o_tf.numpy()).sum() < 1e-5, abs(o_t.detach().numpy() - - o_tf.numpy()).sum() - -# compare encoder outputs -oo_en = model.encoder.inference(o_t.transpose(1, 2)) -ooo_en = model_tf.encoder(o_t.detach().numpy(), training=False) -assert compare_torch_tf(oo_en, ooo_en) < 1e-5 - -#pylint: disable=redefined-builtin -# compare decoder.attention_rnn -inp = torch.rand([1, 768]) -inp_tf = inp.numpy() -model.decoder._init_states(oo_en, mask=None) #pylint: disable=protected-access -output, cell_state = model.decoder.attention_rnn(inp) -states = model_tf.decoder.build_decoder_initial_states(1, 512, 128) -output_tf, memory_state = model_tf.decoder.attention_rnn(inp_tf, - states[2], - training=False) -assert compare_torch_tf(output, output_tf).mean() < 1e-5 - -query = output -inputs = torch.rand([1, 128, 512]) -query_tf = query.detach().numpy() -inputs_tf = inputs.numpy() - -# compare decoder.attention -model.decoder.attention.init_states(inputs) -processes_inputs = model.decoder.attention.preprocess_inputs(inputs) -loc_attn, proc_query = model.decoder.attention.get_location_attention( - query, processes_inputs) -context = model.decoder.attention(query, inputs, processes_inputs, None) - -attention_states = model_tf.decoder.build_decoder_initial_states(1, 512, 128)[-1] -model_tf.decoder.attention.process_values(tf.convert_to_tensor(inputs_tf)) -loc_attn_tf, proc_query_tf = model_tf.decoder.attention.get_loc_attn(query_tf, attention_states) -context_tf, attention, attention_states = model_tf.decoder.attention(query_tf, attention_states, training=False) - -assert compare_torch_tf(loc_attn, loc_attn_tf).mean() < 1e-5 -assert compare_torch_tf(proc_query, proc_query_tf).mean() < 1e-5 -assert compare_torch_tf(context, context_tf) < 1e-5 - -# compare decoder.decoder_rnn -input = torch.rand([1, 1536]) -input_tf = input.numpy() -model.decoder._init_states(oo_en, mask=None) #pylint: disable=protected-access -output, cell_state = model.decoder.decoder_rnn( - input, [model.decoder.decoder_hidden, model.decoder.decoder_cell]) -states = model_tf.decoder.build_decoder_initial_states(1, 512, 128) -output_tf, memory_state = model_tf.decoder.decoder_rnn(input_tf, - states[3], - training=False) -assert abs(input - input_tf).mean() < 1e-5 -assert compare_torch_tf(output, output_tf).mean() < 1e-5 - -# compare decoder.linear_projection -input = torch.rand([1, 1536]) -input_tf = input.numpy() -output = model.decoder.linear_projection(input) -output_tf = model_tf.decoder.linear_projection(input_tf, training=False) -assert compare_torch_tf(output, output_tf) < 1e-5 - -# compare decoder outputs -model.decoder.max_decoder_steps = 100 -model_tf.decoder.set_max_decoder_steps(100) -output, align, stop = model.decoder.inference(oo_en) -states = model_tf.decoder.build_decoder_initial_states(1, 512, 128) -output_tf, align_tf, stop_tf = model_tf.decoder(ooo_en, states, training=False) -assert compare_torch_tf(output.transpose(1, 2), output_tf) < 1e-4 - -# compare the whole model output -outputs_torch = model.inference(input_ids) -outputs_tf = model_tf(tf.convert_to_tensor(input_ids.numpy())) -print(abs(outputs_torch[0].numpy()[:, 0] - outputs_tf[0].numpy()[:, 0]).mean()) -assert compare_torch_tf(outputs_torch[2][:, 50, :], - outputs_tf[2][:, 50, :]) < 1e-5 -assert compare_torch_tf(outputs_torch[0], outputs_tf[0]) < 1e-4 - -# %% -# save tf model -save_checkpoint(model_tf, None, checkpoint['step'], checkpoint['epoch'], - checkpoint['r'], args.output_path) -print(' > Model conversion is successfully completed :).') diff --git a/tf/layers/common_layers.py b/tf/layers/common_layers.py deleted file mode 100644 index f2353a93..00000000 --- a/tf/layers/common_layers.py +++ /dev/null @@ -1,285 +0,0 @@ -import tensorflow as tf -from tensorflow import keras -from tensorflow.python.ops import math_ops -# from tensorflow_addons.seq2seq import BahdanauAttention - - -class Linear(keras.layers.Layer): - def __init__(self, units, use_bias, **kwargs): - super(Linear, self).__init__(**kwargs) - self.linear_layer = keras.layers.Dense(units, use_bias=use_bias, name='linear_layer') - self.activation = keras.layers.ReLU() - - def call(self, x): - """ - shapes: - x: B x T x C - """ - return self.activation(self.linear_layer(x)) - - -class LinearBN(keras.layers.Layer): - def __init__(self, units, use_bias, **kwargs): - super(LinearBN, self).__init__(**kwargs) - self.linear_layer = keras.layers.Dense(units, use_bias=use_bias, name='linear_layer') - self.batch_normalization = keras.layers.BatchNormalization(axis=-1, momentum=0.90, epsilon=1e-5, name='batch_normalization') - self.activation = keras.layers.ReLU() - - def call(self, x, training=None): - """ - shapes: - x: B x T x C - """ - out = self.linear_layer(x) - out = self.batch_normalization(out, training=training) - return self.activation(out) - - -class Prenet(keras.layers.Layer): - def __init__(self, - prenet_type, - prenet_dropout, - units, - bias, - **kwargs): - super(Prenet, self).__init__(**kwargs) - self.prenet_type = prenet_type - self.prenet_dropout = prenet_dropout - self.linear_layers = [] - if prenet_type == "bn": - self.linear_layers += [LinearBN(unit, use_bias=bias, name=f'linear_layer_{idx}') for idx, unit in enumerate(units)] - elif prenet_type == "original": - self.linear_layers += [Linear(unit, use_bias=bias, name=f'linear_layer_{idx}') for idx, unit in enumerate(units)] - else: - raise RuntimeError(' [!] Unknown prenet type.') - if prenet_dropout: - self.dropout = keras.layers.Dropout(rate=0.5) - - def call(self, x, training=None): - """ - shapes: - x: B x T x C - """ - for linear in self.linear_layers: - if self.prenet_dropout: - x = self.dropout(linear(x), training=training) - else: - x = linear(x) - return x - - -def _sigmoid_norm(score): - attn_weights = tf.nn.sigmoid(score) - attn_weights = attn_weights / tf.reduce_sum(attn_weights, axis=1, keepdims=True) - return attn_weights - - -class Attention(keras.layers.Layer): - """TODO: implement forward_attention - TODO: location sensitive attention - TODO: implement attention windowing """ - def __init__(self, attn_dim, use_loc_attn, loc_attn_n_filters, - loc_attn_kernel_size, use_windowing, norm, use_forward_attn, - use_trans_agent, use_forward_attn_mask, **kwargs): - super(Attention, self).__init__(**kwargs) - self.use_loc_attn = use_loc_attn - self.loc_attn_n_filters = loc_attn_n_filters - self.loc_attn_kernel_size = loc_attn_kernel_size - self.use_windowing = use_windowing - self.norm = norm - self.use_forward_attn = use_forward_attn - self.use_trans_agent = use_trans_agent - self.use_forward_attn_mask = use_forward_attn_mask - self.query_layer = tf.keras.layers.Dense(attn_dim, use_bias=False, name='query_layer/linear_layer') - self.inputs_layer = tf.keras.layers.Dense(attn_dim, use_bias=False, name=f'{self.name}/inputs_layer/linear_layer') - self.v = tf.keras.layers.Dense(1, use_bias=True, name='v/linear_layer') - if use_loc_attn: - self.location_conv1d = keras.layers.Conv1D( - filters=loc_attn_n_filters, - kernel_size=loc_attn_kernel_size, - padding='same', - use_bias=False, - name='location_layer/location_conv1d') - self.location_dense = keras.layers.Dense(attn_dim, use_bias=False, name='location_layer/location_dense') - if norm == 'softmax': - self.norm_func = tf.nn.softmax - elif norm == 'sigmoid': - self.norm_func = _sigmoid_norm - else: - raise ValueError("Unknown value for attention norm type") - - def init_states(self, batch_size, value_length): - states = [] - if self.use_loc_attn: - attention_cum = tf.zeros([batch_size, value_length]) - attention_old = tf.zeros([batch_size, value_length]) - states = [attention_cum, attention_old] - if self.use_forward_attn: - alpha = tf.concat([ - tf.ones([batch_size, 1]), - tf.zeros([batch_size, value_length])[:, :-1] + 1e-7 - ], 1) - states.append(alpha) - return tuple(states) - - def process_values(self, values): - """ cache values for decoder iterations """ - #pylint: disable=attribute-defined-outside-init - self.processed_values = self.inputs_layer(values) - self.values = values - - def get_loc_attn(self, query, states): - """ compute location attention, query layer and - unnorm. attention weights""" - attention_cum, attention_old = states[:2] - attn_cat = tf.stack([attention_old, attention_cum], axis=2) - - processed_query = self.query_layer(tf.expand_dims(query, 1)) - processed_attn = self.location_dense(self.location_conv1d(attn_cat)) - score = self.v( - tf.nn.tanh(self.processed_values + processed_query + - processed_attn)) - score = tf.squeeze(score, axis=2) - return score, processed_query - - def get_attn(self, query): - """ compute query layer and unnormalized attention weights """ - processed_query = self.query_layer(tf.expand_dims(query, 1)) - score = self.v(tf.nn.tanh(self.processed_values + processed_query)) - score = tf.squeeze(score, axis=2) - return score, processed_query - - def apply_score_masking(self, score, mask): #pylint: disable=no-self-use - """ ignore sequence paddings """ - padding_mask = tf.expand_dims(math_ops.logical_not(mask), 2) - # Bias so padding positions do not contribute to attention distribution. - score -= 1.e9 * math_ops.cast(padding_mask, dtype=tf.float32) - return score - - def apply_forward_attention(self, alignment, alpha): #pylint: disable=no-self-use - # forward attention - fwd_shifted_alpha = tf.pad(alpha[:, :-1], ((0, 0), (1, 0)), constant_values=0.0) - # compute transition potentials - new_alpha = ((1 - 0.5) * alpha + 0.5 * fwd_shifted_alpha + 1e-8) * alignment - # renormalize attention weights - new_alpha = new_alpha / tf.reduce_sum(new_alpha, axis=1, keepdims=True) - return new_alpha - - def update_states(self, old_states, scores_norm, attn_weights, new_alpha=None): - states = [] - if self.use_loc_attn: - states = [old_states[0] + scores_norm, attn_weights] - if self.use_forward_attn: - states.append(new_alpha) - return tuple(states) - - def call(self, query, states): - """ - shapes: - query: B x D - """ - if self.use_loc_attn: - score, _ = self.get_loc_attn(query, states) - else: - score, _ = self.get_attn(query) - - # TODO: masking - # if mask is not None: - # self.apply_score_masking(score, mask) - # attn_weights shape == (batch_size, max_length, 1) - - # normalize attention scores - scores_norm = self.norm_func(score) - attn_weights = scores_norm - - # apply forward attention - new_alpha = None - if self.use_forward_attn: - new_alpha = self.apply_forward_attention(attn_weights, states[-1]) - attn_weights = new_alpha - - # update states tuple - # states = (cum_attn_weights, attn_weights, new_alpha) - states = self.update_states(states, scores_norm, attn_weights, new_alpha) - - # context_vector shape after sum == (batch_size, hidden_size) - context_vector = tf.matmul(tf.expand_dims(attn_weights, axis=2), self.values, transpose_a=True, transpose_b=False) - context_vector = tf.squeeze(context_vector, axis=1) - return context_vector, attn_weights, states - - -# def _location_sensitive_score(processed_query, keys, processed_loc, attention_v, attention_b): -# dtype = processed_query.dtype -# num_units = keys.shape[-1].value or array_ops.shape(keys)[-1] -# return tf.reduce_sum(attention_v * tf.tanh(keys + processed_query + processed_loc + attention_b), [2]) - - -# class LocationSensitiveAttention(BahdanauAttention): -# def __init__(self, -# units, -# memory=None, -# memory_sequence_length=None, -# normalize=False, -# probability_fn="softmax", -# kernel_initializer="glorot_uniform", -# dtype=None, -# name="LocationSensitiveAttention", -# location_attention_filters=32, -# location_attention_kernel_size=31): - -# super(LocationSensitiveAttention, -# self).__init__(units=units, -# memory=memory, -# memory_sequence_length=memory_sequence_length, -# normalize=normalize, -# probability_fn='softmax', ## parent module default -# kernel_initializer=kernel_initializer, -# dtype=dtype, -# name=name) -# if probability_fn == 'sigmoid': -# self.probability_fn = lambda score, _: self._sigmoid_normalization(score) -# self.location_conv = keras.layers.Conv1D(filters=location_attention_filters, kernel_size=location_attention_kernel_size, padding='same', use_bias=False) -# self.location_dense = keras.layers.Dense(units, use_bias=False) -# # self.v = keras.layers.Dense(1, use_bias=True) - -# def _location_sensitive_score(self, processed_query, keys, processed_loc): -# processed_query = tf.expand_dims(processed_query, 1) -# return tf.reduce_sum(self.attention_v * tf.tanh(keys + processed_query + processed_loc), [2]) - -# def _location_sensitive(self, alignment_cum, alignment_old): -# alignment_cat = tf.stack([alignment_cum, alignment_old], axis=2) -# return self.location_dense(self.location_conv(alignment_cat)) - -# def _sigmoid_normalization(self, score): -# return tf.nn.sigmoid(score) / tf.reduce_sum(tf.nn.sigmoid(score), axis=-1, keepdims=True) - -# # def _apply_masking(self, score, mask): -# # padding_mask = tf.expand_dims(math_ops.logical_not(mask), 2) -# # # Bias so padding positions do not contribute to attention distribution. -# # score -= 1.e9 * math_ops.cast(padding_mask, dtype=tf.float32) -# # return score - -# def _calculate_attention(self, query, state): -# alignment_cum, alignment_old = state[:2] -# processed_query = self.query_layer( -# query) if self.query_layer else query -# processed_loc = self._location_sensitive(alignment_cum, alignment_old) -# score = self._location_sensitive_score( -# processed_query, -# self.keys, -# processed_loc) -# alignment = self.probability_fn(score, state) -# alignment_cum = alignment_cum + alignment -# state[0] = alignment_cum -# state[1] = alignment -# return alignment, state - -# def compute_context(self, alignments): -# expanded_alignments = tf.expand_dims(alignments, 1) -# context = tf.matmul(expanded_alignments, self.values) -# context = tf.squeeze(context, [1]) -# return context - -# # def call(self, query, state): -# # alignment, next_state = self._calculate_attention(query, state) -# # return alignment, next_state diff --git a/tf/layers/tacotron2.py b/tf/layers/tacotron2.py deleted file mode 100644 index 7cef9eac..00000000 --- a/tf/layers/tacotron2.py +++ /dev/null @@ -1,300 +0,0 @@ -import tensorflow as tf -from tensorflow import keras -from TTS.tf.utils.tf_utils import shape_list -from TTS.tf.layers.common_layers import Prenet, Attention -# from tensorflow_addons.seq2seq import AttentionWrapper - - -class ConvBNBlock(keras.layers.Layer): - def __init__(self, filters, kernel_size, activation, **kwargs): - super(ConvBNBlock, self).__init__(**kwargs) - self.convolution1d = keras.layers.Conv1D(filters, kernel_size, padding='same', name='convolution1d') - self.batch_normalization = keras.layers.BatchNormalization(axis=2, momentum=0.90, epsilon=1e-5, name='batch_normalization') - self.dropout = keras.layers.Dropout(rate=0.5, name='dropout') - self.activation = keras.layers.Activation(activation, name='activation') - - def call(self, x, training=None): - o = self.convolution1d(x) - o = self.batch_normalization(o, training=training) - o = self.activation(o) - o = self.dropout(o, training=training) - return o - - -class Postnet(keras.layers.Layer): - def __init__(self, output_filters, num_convs, **kwargs): - super(Postnet, self).__init__(**kwargs) - self.convolutions = [] - self.convolutions.append(ConvBNBlock(512, 5, 'tanh', name='convolutions_0')) - for idx in range(1, num_convs - 1): - self.convolutions.append(ConvBNBlock(512, 5, 'tanh', name=f'convolutions_{idx}')) - self.convolutions.append(ConvBNBlock(output_filters, 5, 'linear', name=f'convolutions_{idx+1}')) - - def call(self, x, training=None): - o = x - for layer in self.convolutions: - o = layer(o, training=training) - return o - - -class Encoder(keras.layers.Layer): - def __init__(self, output_input_dim, **kwargs): - super(Encoder, self).__init__(**kwargs) - self.convolutions = [] - for idx in range(3): - self.convolutions.append(ConvBNBlock(output_input_dim, 5, 'relu', name=f'convolutions_{idx}')) - self.lstm = keras.layers.Bidirectional(keras.layers.LSTM(output_input_dim // 2, return_sequences=True, use_bias=True), name='lstm') - - def call(self, x, training=None): - o = x - for layer in self.convolutions: - o = layer(o, training=training) - o = self.lstm(o) - return o - - -class Decoder(keras.layers.Layer): - #pylint: disable=unused-argument - def __init__(self, frame_dim, r, attn_type, use_attn_win, attn_norm, prenet_type, - prenet_dropout, use_forward_attn, use_trans_agent, use_forward_attn_mask, - use_location_attn, attn_K, separate_stopnet, speaker_emb_dim, enable_tflite, **kwargs): - super(Decoder, self).__init__(**kwargs) - self.frame_dim = frame_dim - self.r_init = tf.constant(r, dtype=tf.int32) - self.r = tf.constant(r, dtype=tf.int32) - self.output_dim = r * self.frame_dim - self.separate_stopnet = separate_stopnet - self.enable_tflite = enable_tflite - - # layer constants - self.max_decoder_steps = tf.constant(1000, dtype=tf.int32) - self.stop_thresh = tf.constant(0.5, dtype=tf.float32) - - # model dimensions - self.query_dim = 1024 - self.decoder_rnn_dim = 1024 - self.prenet_dim = 256 - self.attn_dim = 128 - self.p_attention_dropout = 0.1 - self.p_decoder_dropout = 0.1 - - self.prenet = Prenet(prenet_type, - prenet_dropout, - [self.prenet_dim, self.prenet_dim], - bias=False, - name='prenet') - self.attention_rnn = keras.layers.LSTMCell(self.query_dim, use_bias=True, name='attention_rnn', ) - self.attention_rnn_dropout = keras.layers.Dropout(0.5) - - # TODO: implement other attn options - self.attention = Attention(attn_dim=self.attn_dim, - use_loc_attn=True, - loc_attn_n_filters=32, - loc_attn_kernel_size=31, - use_windowing=False, - norm=attn_norm, - use_forward_attn=use_forward_attn, - use_trans_agent=use_trans_agent, - use_forward_attn_mask=use_forward_attn_mask, - name='attention') - self.decoder_rnn = keras.layers.LSTMCell(self.decoder_rnn_dim, use_bias=True, name='decoder_rnn') - self.decoder_rnn_dropout = keras.layers.Dropout(0.5) - self.linear_projection = keras.layers.Dense(self.frame_dim * r, name='linear_projection/linear_layer') - self.stopnet = keras.layers.Dense(1, name='stopnet/linear_layer') - - - def set_max_decoder_steps(self, new_max_steps): - self.max_decoder_steps = tf.constant(new_max_steps, dtype=tf.int32) - - def set_r(self, new_r): - self.r = tf.constant(new_r, dtype=tf.int32) - self.output_dim = self.frame_dim * new_r - - def build_decoder_initial_states(self, batch_size, memory_dim, memory_length): - zero_frame = tf.zeros([batch_size, self.frame_dim]) - zero_context = tf.zeros([batch_size, memory_dim]) - attention_rnn_state = self.attention_rnn.get_initial_state(batch_size=batch_size, dtype=tf.float32) - decoder_rnn_state = self.decoder_rnn.get_initial_state(batch_size=batch_size, dtype=tf.float32) - attention_states = self.attention.init_states(batch_size, memory_length) - return zero_frame, zero_context, attention_rnn_state, decoder_rnn_state, attention_states - - def step(self, prenet_next, states, - memory_seq_length=None, training=None): - _, context_next, attention_rnn_state, decoder_rnn_state, attention_states = states - attention_rnn_input = tf.concat([prenet_next, context_next], -1) - attention_rnn_output, attention_rnn_state = \ - self.attention_rnn(attention_rnn_input, - attention_rnn_state, training=training) - attention_rnn_output = self.attention_rnn_dropout(attention_rnn_output, training=training) - context, attention, attention_states = self.attention(attention_rnn_output, attention_states, training=training) - decoder_rnn_input = tf.concat([attention_rnn_output, context], -1) - decoder_rnn_output, decoder_rnn_state = \ - self.decoder_rnn(decoder_rnn_input, decoder_rnn_state, training=training) - decoder_rnn_output = self.decoder_rnn_dropout(decoder_rnn_output, training=training) - linear_projection_input = tf.concat([decoder_rnn_output, context], -1) - output_frame = self.linear_projection(linear_projection_input, training=training) - stopnet_input = tf.concat([decoder_rnn_output, output_frame], -1) - stopnet_output = self.stopnet(stopnet_input, training=training) - output_frame = output_frame[:, :self.r * self.frame_dim] - states = (output_frame[:, self.frame_dim * (self.r - 1):], context, attention_rnn_state, decoder_rnn_state, attention_states) - return output_frame, stopnet_output, states, attention - - def decode(self, memory, states, frames, memory_seq_length=None): - B, _, _ = shape_list(memory) - num_iter = shape_list(frames)[1] // self.r - # init states - frame_zero = tf.expand_dims(states[0], 1) - frames = tf.concat([frame_zero, frames], axis=1) - outputs = tf.TensorArray(dtype=tf.float32, size=num_iter) - attentions = tf.TensorArray(dtype=tf.float32, size=num_iter) - stop_tokens = tf.TensorArray(dtype=tf.float32, size=num_iter) - # pre-computes - self.attention.process_values(memory) - prenet_output = self.prenet(frames, training=True) - step_count = tf.constant(0, dtype=tf.int32) - - def _body(step, memory, prenet_output, states, outputs, stop_tokens, attentions): - prenet_next = prenet_output[:, step] - output, stop_token, states, attention = self.step(prenet_next, - states, - memory_seq_length) - outputs = outputs.write(step, output) - attentions = attentions.write(step, attention) - stop_tokens = stop_tokens.write(step, stop_token) - return step + 1, memory, prenet_output, states, outputs, stop_tokens, attentions - _, memory, _, states, outputs, stop_tokens, attentions = \ - tf.while_loop(lambda *arg: True, - _body, - loop_vars=(step_count, memory, prenet_output, - states, outputs, stop_tokens, attentions), - parallel_iterations=32, - swap_memory=True, - maximum_iterations=num_iter) - - outputs = outputs.stack() - attentions = attentions.stack() - stop_tokens = stop_tokens.stack() - outputs = tf.transpose(outputs, [1, 0, 2]) - attentions = tf.transpose(attentions, [1, 0, 2]) - stop_tokens = tf.transpose(stop_tokens, [1, 0, 2]) - stop_tokens = tf.squeeze(stop_tokens, axis=2) - outputs = tf.reshape(outputs, [B, -1, self.frame_dim]) - return outputs, stop_tokens, attentions - - def decode_inference(self, memory, states): - B, _, _ = shape_list(memory) - # init states - outputs = tf.TensorArray(dtype=tf.float32, size=0, clear_after_read=False, dynamic_size=True) - attentions = tf.TensorArray(dtype=tf.float32, size=0, clear_after_read=False, dynamic_size=True) - stop_tokens = tf.TensorArray(dtype=tf.float32, size=0, clear_after_read=False, dynamic_size=True) - - # pre-computes - self.attention.process_values(memory) - - # iter vars - stop_flag = tf.constant(False, dtype=tf.bool) - step_count = tf.constant(0, dtype=tf.int32) - - def _body(step, memory, states, outputs, stop_tokens, attentions, stop_flag): - frame_next = states[0] - prenet_next = self.prenet(frame_next, training=False) - output, stop_token, states, attention = self.step(prenet_next, - states, - None, - training=False) - stop_token = tf.math.sigmoid(stop_token) - outputs = outputs.write(step, output) - attentions = attentions.write(step, attention) - stop_tokens = stop_tokens.write(step, stop_token) - stop_flag = tf.greater(stop_token, self.stop_thresh) - stop_flag = tf.reduce_all(stop_flag) - return step + 1, memory, states, outputs, stop_tokens, attentions, stop_flag - - cond = lambda step, m, s, o, st, a, stop_flag: tf.equal(stop_flag, tf.constant(False, dtype=tf.bool)) - _, memory, states, outputs, stop_tokens, attentions, stop_flag = \ - tf.while_loop(cond, - _body, - loop_vars=(step_count, memory, states, outputs, - stop_tokens, attentions, stop_flag), - parallel_iterations=32, - swap_memory=True, - maximum_iterations=self.max_decoder_steps) - - outputs = outputs.stack() - attentions = attentions.stack() - stop_tokens = stop_tokens.stack() - - outputs = tf.transpose(outputs, [1, 0, 2]) - attentions = tf.transpose(attentions, [1, 0, 2]) - stop_tokens = tf.transpose(stop_tokens, [1, 0, 2]) - stop_tokens = tf.squeeze(stop_tokens, axis=2) - outputs = tf.reshape(outputs, [B, -1, self.frame_dim]) - return outputs, stop_tokens, attentions - - def decode_inference_tflite(self, memory, states): - """Inference with TF-Lite compatibility. It assumes - batch_size is 1""" - # init states - # dynamic_shape is not supported in TFLite - outputs = tf.TensorArray(dtype=tf.float32, - size=self.max_decoder_steps, - element_shape=tf.TensorShape( - [self.output_dim]), - clear_after_read=False, - dynamic_size=False) - # stop_flags = tf.TensorArray(dtype=tf.bool, - # size=self.max_decoder_steps, - # element_shape=tf.TensorShape( - # []), - # clear_after_read=False, - # dynamic_size=False) - attentions = () - stop_tokens = () - - # pre-computes - self.attention.process_values(memory) - - # iter vars - stop_flag = tf.constant(False, dtype=tf.bool) - step_count = tf.constant(0, dtype=tf.int32) - - def _body(step, memory, states, outputs, stop_flag): - frame_next = states[0] - prenet_next = self.prenet(frame_next, training=False) - output, stop_token, states, _ = self.step(prenet_next, - states, - None, - training=False) - stop_token = tf.math.sigmoid(stop_token) - stop_flag = tf.greater(stop_token, self.stop_thresh) - stop_flag = tf.reduce_all(stop_flag) - # stop_flags = stop_flags.write(step, tf.logical_not(stop_flag)) - - outputs = outputs.write(step, tf.reshape(output, [-1])) - return step + 1, memory, states, outputs, stop_flag - - cond = lambda step, m, s, o, stop_flag: tf.equal(stop_flag, tf.constant(False, dtype=tf.bool)) - step_count, memory, states, outputs, stop_flag = \ - tf.while_loop(cond, - _body, - loop_vars=(step_count, memory, states, outputs, - stop_flag), - parallel_iterations=32, - swap_memory=True, - maximum_iterations=self.max_decoder_steps) - - - outputs = outputs.stack() - outputs = tf.gather(outputs, tf.range(step_count)) # pylint: disable=no-value-for-parameter - outputs = tf.expand_dims(outputs, axis=[0]) - outputs = tf.transpose(outputs, [1, 0, 2]) - outputs = tf.reshape(outputs, [1, -1, self.frame_dim]) - return outputs, stop_tokens, attentions - - - def call(self, memory, states, frames=None, memory_seq_length=None, training=False): - if training: - return self.decode(memory, states, frames, memory_seq_length) - if self.enable_tflite: - return self.decode_inference_tflite(memory, states) - return self.decode_inference(memory, states) diff --git a/tf/models/tacotron2.py b/tf/models/tacotron2.py deleted file mode 100644 index 70d725e2..00000000 --- a/tf/models/tacotron2.py +++ /dev/null @@ -1,108 +0,0 @@ -import tensorflow as tf -from tensorflow import keras - -from TTS.tf.layers.tacotron2 import Encoder, Decoder, Postnet -from TTS.tf.utils.tf_utils import shape_list - - -#pylint: disable=too-many-ancestors -class Tacotron2(keras.models.Model): - def __init__(self, - num_chars, - num_speakers, - r, - postnet_output_dim=80, - decoder_output_dim=80, - attn_type='original', - attn_win=False, - attn_norm="softmax", - attn_K=4, - prenet_type="original", - prenet_dropout=True, - forward_attn=False, - trans_agent=False, - forward_attn_mask=False, - location_attn=True, - separate_stopnet=True, - bidirectional_decoder=False, - enable_tflite=False): - super(Tacotron2, self).__init__() - self.r = r - self.decoder_output_dim = decoder_output_dim - self.postnet_output_dim = postnet_output_dim - self.bidirectional_decoder = bidirectional_decoder - self.num_speakers = num_speakers - self.speaker_embed_dim = 256 - self.enable_tflite = enable_tflite - - self.embedding = keras.layers.Embedding(num_chars, 512, name='embedding') - self.encoder = Encoder(512, name='encoder') - # TODO: most of the decoder args have no use at the momment - self.decoder = Decoder(decoder_output_dim, - r, - attn_type=attn_type, - use_attn_win=attn_win, - attn_norm=attn_norm, - prenet_type=prenet_type, - prenet_dropout=prenet_dropout, - use_forward_attn=forward_attn, - use_trans_agent=trans_agent, - use_forward_attn_mask=forward_attn_mask, - use_location_attn=location_attn, - attn_K=attn_K, - separate_stopnet=separate_stopnet, - speaker_emb_dim=self.speaker_embed_dim, - name='decoder', - enable_tflite=enable_tflite) - self.postnet = Postnet(postnet_output_dim, 5, name='postnet') - - @tf.function(experimental_relax_shapes=True) - def call(self, characters, text_lengths=None, frames=None, training=None): - if training: - return self.training(characters, text_lengths, frames) - if not training: - return self.inference(characters) - raise RuntimeError(' [!] Set model training mode True or False') - - def training(self, characters, text_lengths, frames): - B, T = shape_list(characters) - embedding_vectors = self.embedding(characters, training=True) - encoder_output = self.encoder(embedding_vectors, training=True) - decoder_states = self.decoder.build_decoder_initial_states(B, 512, T) - decoder_frames, stop_tokens, attentions = self.decoder(encoder_output, decoder_states, frames, text_lengths, training=True) - postnet_frames = self.postnet(decoder_frames, training=True) - output_frames = decoder_frames + postnet_frames - return decoder_frames, output_frames, attentions, stop_tokens - - def inference(self, characters): - B, T = shape_list(characters) - embedding_vectors = self.embedding(characters, training=False) - encoder_output = self.encoder(embedding_vectors, training=False) - decoder_states = self.decoder.build_decoder_initial_states(B, 512, T) - decoder_frames, stop_tokens, attentions = self.decoder(encoder_output, decoder_states, training=False) - postnet_frames = self.postnet(decoder_frames, training=False) - output_frames = decoder_frames + postnet_frames - print(output_frames.shape) - return decoder_frames, output_frames, attentions, stop_tokens - - @tf.function( - experimental_relax_shapes=True, - input_signature=[ - tf.TensorSpec([1, None], dtype=tf.int32), - ],) - def inference_tflite(self, characters): - B, T = shape_list(characters) - embedding_vectors = self.embedding(characters, training=False) - encoder_output = self.encoder(embedding_vectors, training=False) - decoder_states = self.decoder.build_decoder_initial_states(B, 512, T) - decoder_frames, stop_tokens, attentions = self.decoder(encoder_output, decoder_states, training=False) - postnet_frames = self.postnet(decoder_frames, training=False) - output_frames = decoder_frames + postnet_frames - print(output_frames.shape) - return decoder_frames, output_frames, attentions, stop_tokens - - def build_inference(self, ): - # TODO: issue https://github.com/PyCQA/pylint/issues/3613 - input_ids = tf.random.uniform(shape=[1, 4], maxval=10, dtype=tf.int32) #pylint: disable=unexpected-keyword-arg - self(input_ids) - diff --git a/tf/notebooks/Benchmark-TTS_tf.ipynb b/tf/notebooks/Benchmark-TTS_tf.ipynb deleted file mode 100644 index 4a21ae17..00000000 --- a/tf/notebooks/Benchmark-TTS_tf.ipynb +++ /dev/null @@ -1,714 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "This is to test TTS tensorflow models with benchmark sentences.\n", - "\n", - "Before running this script please DON'T FORGET: \n", - "- to set file paths.\n", - "- to download related models.\n", - " - Sample TF model: https://www.dropbox.com/sh/3b1fat5oxqab6yn/AADDlNs-9-r7ASbVnFYx3RHHa?dl=0\n", - "- download or clone related repos, linked below.\n", - "- setup the repositories. ```python setup.py install```\n", - "- to checkout right commit versions (given next to the model in the models page).\n", - "- to set the file paths below.\n", - "\n", - "Repositories:\n", - "- TTS: https://github.com/mozilla/TTS\n", - "- PWGAN: https://github.com/erogol/ParallelWaveGAN (if you like to use a vocoder model)\n", - "\n", - "Known Issues:\n", - "- To load the model second time you need to restart the notebook kernel. \n", - "- Some of the advance methods are not yet implemented for Tensorflow." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false", - "scrolled": true - }, - "outputs": [], - "source": [ - "%load_ext autoreload\n", - "%autoreload 2\n", - "import os\n", - "\n", - "# you may need to change this depending on your system\n", - "os.environ['CUDA_VISIBLE_DEVICES']='1'\n", - "\n", - "import sys\n", - "import io\n", - "import torch \n", - "import tensorflow as tf\n", - "print(tf.config.list_physical_devices('GPU'))\n", - "\n", - "import time\n", - "import json\n", - "import yaml\n", - "import numpy as np\n", - "from collections import OrderedDict\n", - "import matplotlib.pyplot as plt\n", - "plt.rcParams[\"figure.figsize\"] = (16,5)\n", - "\n", - "import librosa\n", - "import librosa.display\n", - "\n", - "from TTS.tf.models.tacotron2 import Tacotron2\n", - "from TTS.tf.utils.generic_utils import setup_model, load_checkpoint\n", - "from TTS.utils.audio import AudioProcessor\n", - "from TTS.utils.io import load_config\n", - "from TTS.utils.synthesis import synthesis\n", - "from TTS.utils.visual import visualize\n", - "\n", - "import IPython\n", - "from IPython.display import Audio\n", - "\n", - "%matplotlib inline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "def tts(model, text, CONFIG, use_cuda, ap, use_gl, figures=True):\n", - " t_1 = time.time()\n", - " waveform, alignment, mel_spec, mel_postnet_spec, stop_tokens, inputs = synthesis(model, text, CONFIG, use_cuda, ap, None, None, False, CONFIG.enable_eos_bos_chars, use_gl, backend=BACKEND)\n", - " if CONFIG.model == \"Tacotron\" and not use_gl:\n", - " # coorect the normalization differences b/w TTS and the Vocoder.\n", - " mel_postnet_spec = ap.out_linear_to_mel(mel_postnet_spec.T).T\n", - " print(mel_postnet_spec.shape)\n", - " print(\"max- \", mel_postnet_spec.max(), \" -- min- \", mel_postnet_spec.min())\n", - " if not use_gl:\n", - " waveform = vocoder_model.inference(torch.FloatTensor(mel_postnet_spec.T).unsqueeze(0))\n", - " mel_postnet_spec = ap._denormalize(mel_postnet_spec.T).T\n", - " if use_cuda and not use_gl:\n", - " waveform = waveform.cpu()\n", - " waveform = waveform.numpy()\n", - " waveform = waveform.squeeze()\n", - " rtf = (time.time() - t_1) / (len(waveform) / ap.sample_rate)\n", - " print(waveform.shape)\n", - " print(\" > Run-time: {}\".format(time.time() - t_1))\n", - " print(\" > Real-time factor: {}\".format(rtf))\n", - " if figures: \n", - " visualize(alignment, mel_postnet_spec, stop_tokens, text, ap.hop_length, CONFIG, ap._denormalize(mel_spec.T).T) \n", - " IPython.display.display(Audio(waveform, rate=CONFIG.audio['sample_rate'], normalize=True)) \n", - " os.makedirs(OUT_FOLDER, exist_ok=True)\n", - " file_name = text.replace(\" \", \"_\").replace(\".\",\"\") + \".wav\"\n", - " out_path = os.path.join(OUT_FOLDER, file_name)\n", - " ap.save_wav(waveform, out_path)\n", - " return alignment, mel_postnet_spec, stop_tokens, waveform" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "# Set constants\n", - "ROOT_PATH = '../torch_model/'\n", - "MODEL_PATH = ROOT_PATH + '/tts_tf_checkpoint_360000.pkl'\n", - "CONFIG_PATH = ROOT_PATH + '/config.json'\n", - "OUT_FOLDER = '/home/erogol/Dropbox/AudioSamples/benchmark_samples/'\n", - "CONFIG = load_config(CONFIG_PATH)\n", - "# Run FLAGs\n", - "use_cuda = True # use the available GPU (only for torch)\n", - "# Set the vocoder\n", - "use_gl = True # use GL if True\n", - "BACKEND = 'tf' # set the backend for inference " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false", - "scrolled": true - }, - "outputs": [], - "source": [ - "from TTS.utils.text.symbols import symbols, phonemes, make_symbols\n", - "from TTS.tf.utils.convert_torch_to_tf_utils import tf_create_dummy_inputs\n", - "c = CONFIG\n", - "num_speakers = 0\n", - "r = 1\n", - "num_chars = len(phonemes) if c.use_phonemes else len(symbols)\n", - "model = setup_model(num_chars, num_speakers, c)\n", - "\n", - "# before loading weights you need to run the model once to generate the variables\n", - "input_ids, input_lengths, mel_outputs, mel_lengths = tf_create_dummy_inputs()\n", - "mel_pred = model(input_ids, training=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false", - "scrolled": true - }, - "outputs": [], - "source": [ - "model = load_checkpoint(model, MODEL_PATH)\n", - "# model = tf.function(model, experimental_relax_shapes=True)\n", - "ap = AudioProcessor(**CONFIG.audio) " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "# wrapper class to use tf.function\n", - "class ModelInference(tf.keras.Model):\n", - " def __init__(self, model):\n", - " super(ModelInference, self).__init__()\n", - " self.model = model\n", - " \n", - " @tf.function(input_signature=[tf.TensorSpec(shape=(None, None), dtype=tf.int32)])\n", - " def call(self, characters):\n", - " return self.model(characters, training=False)\n", - " \n", - "model = ModelInference(model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "# LOAD WAVERNN\n", - "if use_gl == False:\n", - " from parallel_wavegan.models import ParallelWaveGANGenerator, MelGANGenerator\n", - " \n", - " vocoder_model = MelGANGenerator(**VOCODER_CONFIG[\"generator_params\"])\n", - " vocoder_model.load_state_dict(torch.load(VOCODER_MODEL_PATH, map_location=\"cpu\")[\"model\"][\"generator\"])\n", - " vocoder_model.remove_weight_norm()\n", - " ap_vocoder = AudioProcessor(**VOCODER_CONFIG['audio']) \n", - " if use_cuda:\n", - " vocoder_model.cuda()\n", - " vocoder_model.eval();\n", - " print(count_parameters(vocoder_model))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "### Comparision with https://mycroft.ai/blog/available-voices/" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Bill got in the habit of asking himself “Is that thought true?” and if he wasn’t absolutely certain it was, he just let it go.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "### https://espnet.github.io/icassp2020-tts/" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"The Commission also recommends\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"As a result of these studies, the planning document submitted by the Secretary of the Treasury to the Bureau of the Budget on August thirty-one.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"The FBI now transmits information on all defectors, a category which would, of course, have included Oswald.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"they seem unduly restrictive in continuing to require some manifestation of animus against a Government official.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"and each agency given clear understanding of the assistance which the Secret Service expects.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "### Other examples" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Be a voice, not an echo.\" # 'echo' is not in training set. \n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"It took me quite a long time to develop a voice, and now that I have it I'm not going to be silent.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"The human voice is the most perfect instrument of all.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"I'm sorry Dave. I'm afraid I can't do that.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"This cake is great. It's so delicious and moist.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "### Comparison with https://keithito.github.io/audio-samples/" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Generative adversarial network or variational auto-encoder.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Scientists at the CERN laboratory say they have discovered a new particle.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Here’s a way to measure the acute emotional intelligence that has never gone out of style.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"President Trump met with other leaders at the Group of 20 conference.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"The buses aren't the problem, they actually provide a solution.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "### Comparison with https://google.github.io/tacotron/publications/tacotron/index.html" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Generative adversarial network or variational auto-encoder.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Basilar membrane and otolaryngology are not auto-correlations.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \" He has read the whole thing.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"He reads books.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Thisss isrealy awhsome.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"This is your internet browser, Firefox.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"This is your internet browser Firefox.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"The quick brown fox jumps over the lazy dog.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Does the quick brown fox jump over the lazy dog?\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Eren, how are you?\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "Collapsed": "false" - }, - "source": [ - "### Hard Sentences" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Encouraged, he started with a minute a day.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"His meditation consisted of “body scanning” which involved focusing his mind and energy on each section of the body from head to toe .\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Recent research at Harvard has shown meditating for as little as 8 weeks can actually increase the grey matter in the parts of the brain responsible for emotional regulation and learning . \"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"If he decided to watch TV he really watched it.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sentence = \"Often we try to bring about change through sheer effort and we put all of our energy into a new initiative .\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "# for twb dataset\n", - "sentence = \"In our preparation for Easter, God in his providence offers us each year the season of Lent as a sacramental sign of our conversion.\"\n", - "align, spec, stop_tokens, wav = tts(model, sentence, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "wavs = []\n", - "model.eval()\n", - "model.decoder.prenet.eval()\n", - "model.decoder.max_decoder_steps = 2000\n", - "# model.decoder.prenet.train()\n", - "speaker_id = None\n", - "sentence = '''This is App Store Optimization report.\n", - "The first tab on the report is App Details. App details report is updated weekly and Datetime column shows the latest report update date. The widget displays the app icon, respective app version, visual assets on the store, app description, latest app update date on the Appstore/Google PlayStore and what’s new section.\n", - "In App Details tab, you can see not only your app but all Delivery Hero apps since we think it can be inspiring to see the other apps, their description and screenshots. \n", - "Product name is the actual app name on the AppStore or Google Play Store.\n", - "Screenshot URLs column display the actual screenshots on the store for the current version. No resizing is done. If you click on the screenshot, you can see it in full-size.\n", - "Current release date show the latest app update date when the query is run. Here we see that Appetito24 Android is updated to app version 4.6.3.2 on 28th of March.\n", - "If the description is too long, clarisights is not able to display the full description; however, if you select description and current_release_date cells to copy and paste it to a text editor, you'll see the full description.\n", - "If you scroll down in the widget, you can see the older app versions for the same apps. Or you can filter Datetime to see a specific timeframe and the apps’ Store presence back then.\n", - "You can also filter for a specific app using Product Name.\n", - "If the description is too long, clarisights is not able to display the full description; however, if you select description and current_release_date cells to copy and paste it to a text editor, you'll see the full description.\n", - "'''\n", - "\n", - "for s in sentence.split('\\n'):\n", - " print(s)\n", - " align, spec, stop_tokens, wav = tts(model, s, CONFIG, use_cuda, ap, use_gl=use_gl, figures=True)\n", - " wavs = np.concatenate([wavs, np.zeros(int(ap.sample_rate * 0.5)), wav])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tf/tests/__init__.py b/tf/tests/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/tf/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/tf/tests/test_layers_tf.py b/tf/tests/test_layers_tf.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tf/utils/convert_torch_to_tf_utils.py b/tf/utils/convert_torch_to_tf_utils.py deleted file mode 100644 index e9e1e8a3..00000000 --- a/tf/utils/convert_torch_to_tf_utils.py +++ /dev/null @@ -1,81 +0,0 @@ -import numpy as np -import tensorflow as tf - - -def tf_create_dummy_inputs(): - """ Create dummy inputs for TF Tacotron2 model """ - batch_size = 4 - max_input_length = 32 - max_mel_length = 128 - pad = 1 - n_chars = 24 - input_ids = tf.random.uniform([batch_size, max_input_length + pad], maxval=n_chars, dtype=tf.int32) - input_lengths = np.random.randint(0, high=max_input_length+1 + pad, size=[batch_size]) - input_lengths[-1] = max_input_length - input_lengths = tf.convert_to_tensor(input_lengths, dtype=tf.int32) - mel_outputs = tf.random.uniform(shape=[batch_size, max_mel_length + pad, 80]) - mel_lengths = np.random.randint(0, high=max_mel_length+1 + pad, size=[batch_size]) - mel_lengths[-1] = max_mel_length - mel_lengths = tf.convert_to_tensor(mel_lengths, dtype=tf.int32) - return input_ids, input_lengths, mel_outputs, mel_lengths - - -def compare_torch_tf(torch_tensor, tf_tensor): - """ Compute the average absolute difference b/w torch and tf tensors """ - return abs(torch_tensor.detach().numpy() - tf_tensor.numpy()).mean() - - -def convert_tf_name(tf_name): - """ Convert certain patterns in TF layer names to Torch patterns """ - tf_name_tmp = tf_name - tf_name_tmp = tf_name_tmp.replace(':0', '') - tf_name_tmp = tf_name_tmp.replace('/forward_lstm/lstm_cell_1/recurrent_kernel', '/weight_hh_l0') - tf_name_tmp = tf_name_tmp.replace('/forward_lstm/lstm_cell_2/kernel', '/weight_ih_l1') - tf_name_tmp = tf_name_tmp.replace('/recurrent_kernel', '/weight_hh') - tf_name_tmp = tf_name_tmp.replace('/kernel', '/weight') - tf_name_tmp = tf_name_tmp.replace('/gamma', '/weight') - tf_name_tmp = tf_name_tmp.replace('/beta', '/bias') - tf_name_tmp = tf_name_tmp.replace('/', '.') - return tf_name_tmp - - -def transfer_weights_torch_to_tf(tf_vars, var_map_dict, state_dict): - """ Transfer weigths from torch state_dict to TF variables """ - print(" > Passing weights from Torch to TF ...") - for tf_var in tf_vars: - torch_var_name = var_map_dict[tf_var.name] - print(f' | > {tf_var.name} <-- {torch_var_name}') - # if tuple, it is a bias variable - if not isinstance(torch_var_name, tuple): - torch_layer_name = '.'.join(torch_var_name.split('.')[-2:]) - torch_weight = state_dict[torch_var_name] - if 'convolution1d/kernel' in tf_var.name or 'conv1d/kernel' in tf_var.name: - # out_dim, in_dim, filter -> filter, in_dim, out_dim - numpy_weight = torch_weight.permute([2, 1, 0]).detach().cpu().numpy() - elif 'lstm_cell' in tf_var.name and 'kernel' in tf_var.name: - numpy_weight = torch_weight.transpose(0, 1).detach().cpu().numpy() - # if variable is for bidirectional lstm and it is a bias vector there - # needs to be pre-defined two matching torch bias vectors - elif '_lstm/lstm_cell_' in tf_var.name and 'bias' in tf_var.name: - bias_vectors = [value for key, value in state_dict.items() if key in torch_var_name] - assert len(bias_vectors) == 2 - numpy_weight = bias_vectors[0] + bias_vectors[1] - elif 'rnn' in tf_var.name and 'kernel' in tf_var.name: - numpy_weight = torch_weight.transpose(0, 1).detach().cpu().numpy() - elif 'rnn' in tf_var.name and 'bias' in tf_var.name: - bias_vectors = [value for key, value in state_dict.items() if torch_var_name[:-2] in key] - assert len(bias_vectors) == 2 - numpy_weight = bias_vectors[0] + bias_vectors[1] - elif 'linear_layer' in torch_layer_name and 'weight' in torch_var_name: - numpy_weight = torch_weight.transpose(0, 1).detach().cpu().numpy() - else: - numpy_weight = torch_weight.detach().cpu().numpy() - assert np.all(tf_var.shape == numpy_weight.shape), f" [!] weight shapes does not match: {tf_var.name} vs {torch_var_name} --> {tf_var.shape} vs {numpy_weight.shape}" - tf.keras.backend.set_value(tf_var, numpy_weight) - return tf_vars - - -def load_tf_vars(model_tf, tf_vars): - for tf_var in tf_vars: - model_tf.get_layer(tf_var.name).set_weights(tf_var) - return model_tf diff --git a/tf/utils/generic_utils.py b/tf/utils/generic_utils.py deleted file mode 100644 index 1fea4cbb..00000000 --- a/tf/utils/generic_utils.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import datetime -import importlib -import pickle -import numpy as np -import tensorflow as tf - - -def save_checkpoint(model, optimizer, current_step, epoch, r, output_path, **kwargs): - state = { - 'model': model.weights, - 'optimizer': optimizer, - 'step': current_step, - 'epoch': epoch, - 'date': datetime.date.today().strftime("%B %d, %Y"), - 'r': r - } - state.update(kwargs) - pickle.dump(state, open(output_path, 'wb')) - - -def load_checkpoint(model, checkpoint_path): - checkpoint = pickle.load(open(checkpoint_path, 'rb')) - chkp_var_dict = {var.name: var.numpy() for var in checkpoint['model']} - tf_vars = model.weights - for tf_var in tf_vars: - layer_name = tf_var.name - try: - chkp_var_value = chkp_var_dict[layer_name] - except KeyError: - class_name = list(chkp_var_dict.keys())[0].split("/")[0] - layer_name = f"{class_name}/{layer_name}" - chkp_var_value = chkp_var_dict[layer_name] - - tf.keras.backend.set_value(tf_var, chkp_var_value) - if 'r' in checkpoint.keys(): - model.decoder.set_r(checkpoint['r']) - return model - - -def sequence_mask(sequence_length, max_len=None): - if max_len is None: - max_len = sequence_length.max() - batch_size = sequence_length.size(0) - seq_range = np.empty([0, max_len], dtype=np.int8) - seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len) - if sequence_length.is_cuda: - seq_range_expand = seq_range_expand.cuda() - seq_length_expand = ( - sequence_length.unsqueeze(1).expand_as(seq_range_expand)) - # B x T_max - return seq_range_expand < seq_length_expand - - -# @tf.custom_gradient -def check_gradient(x, grad_clip): - x_normed = tf.clip_by_norm(x, grad_clip) - grad_norm = tf.norm(grad_clip) - return x_normed, grad_norm - - -def count_parameters(model, c): - try: - return model.count_params() - except RuntimeError: - input_dummy = tf.convert_to_tensor(np.random.rand(8, 128).astype('int32')) - input_lengths = np.random.randint(100, 129, (8, )) - input_lengths[-1] = 128 - input_lengths = tf.convert_to_tensor(input_lengths.astype('int32')) - mel_spec = np.random.rand(8, 2 * c.r, - c.audio['num_mels']).astype('float32') - mel_spec = tf.convert_to_tensor(mel_spec) - speaker_ids = np.random.randint( - 0, 5, (8, )) if c.use_speaker_embedding else None - _ = model(input_dummy, input_lengths, mel_spec, speaker_ids=speaker_ids) - return model.count_params() - - -def setup_model(num_chars, num_speakers, c, enable_tflite=False): - print(" > Using model: {}".format(c.model)) - MyModel = importlib.import_module('TTS.tf.models.' + c.model.lower()) - MyModel = getattr(MyModel, c.model) - if c.model.lower() in "tacotron": - raise NotImplementedError(' [!] Tacotron model is not ready.') - # tacotron2 - model = MyModel(num_chars=num_chars, - num_speakers=num_speakers, - r=c.r, - postnet_output_dim=c.audio['num_mels'], - decoder_output_dim=c.audio['num_mels'], - attn_type=c.attention_type, - attn_win=c.windowing, - attn_norm=c.attention_norm, - prenet_type=c.prenet_type, - prenet_dropout=c.prenet_dropout, - forward_attn=c.use_forward_attn, - trans_agent=c.transition_agent, - forward_attn_mask=c.forward_attn_mask, - location_attn=c.location_attn, - attn_K=c.attention_heads, - separate_stopnet=c.separate_stopnet, - bidirectional_decoder=c.bidirectional_decoder, - enable_tflite=enable_tflite) - return model diff --git a/tf/utils/io.py b/tf/utils/io.py deleted file mode 100644 index 78a56de4..00000000 --- a/tf/utils/io.py +++ /dev/null @@ -1,42 +0,0 @@ -import pickle -import datetime -import tensorflow as tf - - -def save_checkpoint(model, optimizer, current_step, epoch, r, output_path, **kwargs): - state = { - 'model': model.weights, - 'optimizer': optimizer, - 'step': current_step, - 'epoch': epoch, - 'date': datetime.date.today().strftime("%B %d, %Y"), - 'r': r - } - state.update(kwargs) - pickle.dump(state, open(output_path, 'wb')) - - -def load_checkpoint(model, checkpoint_path): - checkpoint = pickle.load(open(checkpoint_path, 'rb')) - chkp_var_dict = {var.name: var.numpy() for var in checkpoint['model']} - tf_vars = model.weights - for tf_var in tf_vars: - layer_name = tf_var.name - try: - chkp_var_value = chkp_var_dict[layer_name] - except KeyError: - class_name = list(chkp_var_dict.keys())[0].split("/")[0] - layer_name = f"{class_name}/{layer_name}" - chkp_var_value = chkp_var_dict[layer_name] - - tf.keras.backend.set_value(tf_var, chkp_var_value) - if 'r' in checkpoint.keys(): - model.decoder.set_r(checkpoint['r']) - return model - - -def load_tflite_model(tflite_path): - tflite_model = tf.lite.Interpreter(model_path=tflite_path) - tflite_model.allocate_tensors() - return tflite_model - diff --git a/tf/utils/tf_utils.py b/tf/utils/tf_utils.py deleted file mode 100644 index 558936d5..00000000 --- a/tf/utils/tf_utils.py +++ /dev/null @@ -1,8 +0,0 @@ -import tensorflow as tf - - -def shape_list(x): - """Deal with dynamic shape in tensorflow cleanly.""" - static = x.shape.as_list() - dynamic = tf.shape(x) - return [dynamic[i] if s is None else s for i, s in enumerate(static)] diff --git a/tf/utils/tflite.py b/tf/utils/tflite.py deleted file mode 100644 index 5e684b30..00000000 --- a/tf/utils/tflite.py +++ /dev/null @@ -1,31 +0,0 @@ -import tensorflow as tf - - -def convert_tacotron2_to_tflite(model, - output_path=None, - experimental_converter=True): - """Convert Tensorflow Tacotron2 model to TFLite. Save a binary file if output_path is - provided, else return TFLite model.""" - - concrete_function = model.inference_tflite.get_concrete_function() - converter = tf.lite.TFLiteConverter.from_concrete_functions( - [concrete_function]) - converter.experimental_new_converter = experimental_converter - converter.optimizations = [tf.lite.Optimize.DEFAULT] - converter.target_spec.supported_ops = [ - tf.lite.OpsSet.TFLITE_BUILTINS, tf.lite.OpsSet.SELECT_TF_OPS - ] - tflite_model = converter.convert() - print(f'Tflite Model size is {len(tflite_model) / (1024.0 * 1024.0)} MBs.') - if output_path is not None: - # same model binary if outputpath is provided - with open(output_path, 'wb') as f: - f.write(tflite_model) - return None - return tflite_model - - -def load_tflite_model(tflite_path): - tflite_model = tf.lite.Interpreter(model_path=tflite_path) - tflite_model.allocate_tensors() - return tflite_model \ No newline at end of file diff --git a/train.py b/train.py deleted file mode 100644 index 189a6baa..00000000 --- a/train.py +++ /dev/null @@ -1,641 +0,0 @@ -import argparse -import os -import sys -import glob -import time -import traceback - -import numpy as np -import torch -from torch.utils.data import DataLoader - -from TTS.datasets.TTSDataset import MyDataset -from distribute import (DistributedSampler, apply_gradient_allreduce, - init_distributed, reduce_tensor) -from TTS.layers.losses import TacotronLoss -from TTS.utils.audio import AudioProcessor -from TTS.utils.generic_utils import (count_parameters, create_experiment_folder, remove_experiment_folder, - get_git_branch, set_init_dict, - setup_model, KeepAverage, check_config) -from TTS.utils.io import (save_best_model, save_checkpoint, - load_config, copy_config_file) -from TTS.utils.training import (NoamLR, check_update, adam_weight_decay, - gradual_training_scheduler, set_weight_decay, - setup_torch_training_env) -from TTS.utils.tensorboard_logger import TensorboardLogger -from TTS.utils.console_logger import ConsoleLogger -from TTS.utils.speakers import load_speaker_mapping, save_speaker_mapping, \ - get_speakers -from TTS.utils.synthesis import synthesis -from TTS.utils.text.symbols import make_symbols, phonemes, symbols -from TTS.utils.visual import plot_alignment, plot_spectrogram -from TTS.datasets.preprocess import load_meta_data -from TTS.utils.radam import RAdam -from TTS.utils.measures import alignment_diagonal_score - - -use_cuda, num_gpus = setup_torch_training_env(True, False) - - -def setup_loader(ap, r, is_val=False, verbose=False): - if is_val and not c.run_eval: - loader = None - else: - dataset = MyDataset( - r, - c.text_cleaner, - compute_linear_spec=True if c.model.lower() == 'tacotron' else False, - meta_data=meta_data_eval if is_val else meta_data_train, - ap=ap, - tp=c.characters if 'characters' in c.keys() else None, - batch_group_size=0 if is_val else c.batch_group_size * - c.batch_size, - min_seq_len=c.min_seq_len, - max_seq_len=c.max_seq_len, - phoneme_cache_path=c.phoneme_cache_path, - use_phonemes=c.use_phonemes, - phoneme_language=c.phoneme_language, - enable_eos_bos=c.enable_eos_bos_chars, - verbose=verbose) - sampler = DistributedSampler(dataset) if num_gpus > 1 else None - loader = DataLoader( - dataset, - batch_size=c.eval_batch_size if is_val else c.batch_size, - shuffle=False, - collate_fn=dataset.collate_fn, - drop_last=False, - sampler=sampler, - num_workers=c.num_val_loader_workers - if is_val else c.num_loader_workers, - pin_memory=False) - return loader - - -def format_data(data): - if c.use_speaker_embedding: - speaker_mapping = load_speaker_mapping(OUT_PATH) - - # setup input data - text_input = data[0] - text_lengths = data[1] - speaker_names = data[2] - linear_input = data[3] if c.model in ["Tacotron"] else None - mel_input = data[4] - mel_lengths = data[5] - stop_targets = data[6] - avg_text_length = torch.mean(text_lengths.float()) - avg_spec_length = torch.mean(mel_lengths.float()) - - if c.use_speaker_embedding: - speaker_ids = [ - speaker_mapping[speaker_name] for speaker_name in speaker_names - ] - speaker_ids = torch.LongTensor(speaker_ids) - else: - speaker_ids = None - - # set stop targets view, we predict a single stop token per iteration. - stop_targets = stop_targets.view(text_input.shape[0], - stop_targets.size(1) // c.r, -1) - stop_targets = (stop_targets.sum(2) > - 0.0).unsqueeze(2).float().squeeze(2) - - # dispatch data to GPU - if use_cuda: - text_input = text_input.cuda(non_blocking=True) - text_lengths = text_lengths.cuda(non_blocking=True) - mel_input = mel_input.cuda(non_blocking=True) - mel_lengths = mel_lengths.cuda(non_blocking=True) - linear_input = linear_input.cuda(non_blocking=True) if c.model in ["Tacotron"] else None - stop_targets = stop_targets.cuda(non_blocking=True) - if speaker_ids is not None: - speaker_ids = speaker_ids.cuda(non_blocking=True) - return text_input, text_lengths, mel_input, mel_lengths, linear_input, stop_targets, speaker_ids, avg_text_length, avg_spec_length - - -def train(model, criterion, optimizer, optimizer_st, scheduler, - ap, global_step, epoch): - data_loader = setup_loader(ap, model.decoder.r, is_val=False, - verbose=(epoch == 0)) - model.train() - epoch_time = 0 - keep_avg = KeepAverage() - if use_cuda: - batch_n_iter = int( - len(data_loader.dataset) / (c.batch_size * num_gpus)) - else: - batch_n_iter = int(len(data_loader.dataset) / c.batch_size) - end_time = time.time() - c_logger.print_train_start() - for num_iter, data in enumerate(data_loader): - start_time = time.time() - - # format data - text_input, text_lengths, mel_input, mel_lengths, linear_input, stop_targets, speaker_ids, avg_text_length, avg_spec_length = format_data(data) - loader_time = time.time() - end_time - - global_step += 1 - - # setup lr - if c.noam_schedule: - scheduler.step() - optimizer.zero_grad() - if optimizer_st: - optimizer_st.zero_grad() - - # forward pass model - if c.bidirectional_decoder or c.double_decoder_consistency: - decoder_output, postnet_output, alignments, stop_tokens, decoder_backward_output, alignments_backward = model( - text_input, text_lengths, mel_input, mel_lengths, speaker_ids=speaker_ids) - else: - decoder_output, postnet_output, alignments, stop_tokens = model( - text_input, text_lengths, mel_input, mel_lengths, speaker_ids=speaker_ids) - decoder_backward_output = None - alignments_backward = None - - # set the alignment lengths wrt reduction factor for guided attention - if mel_lengths.max() % model.decoder.r != 0: - alignment_lengths = (mel_lengths + (model.decoder.r - (mel_lengths.max() % model.decoder.r))) // model.decoder.r - else: - alignment_lengths = mel_lengths // model.decoder.r - - # compute loss - loss_dict = criterion(postnet_output, decoder_output, mel_input, - linear_input, stop_tokens, stop_targets, - mel_lengths, decoder_backward_output, - alignments, alignment_lengths, alignments_backward, - text_lengths) - - # backward pass - loss_dict['loss'].backward() - optimizer, current_lr = adam_weight_decay(optimizer) - grad_norm, _ = check_update(model, c.grad_clip, ignore_stopnet=True) - optimizer.step() - - # compute alignment error (the lower the better ) - align_error = 1 - alignment_diagonal_score(alignments) - loss_dict['align_error'] = align_error - - # backpass and check the grad norm for stop loss - if c.separate_stopnet: - loss_dict['stopnet_loss'].backward() - optimizer_st, _ = adam_weight_decay(optimizer_st) - grad_norm_st, _ = check_update(model.decoder.stopnet, 1.0) - optimizer_st.step() - else: - grad_norm_st = 0 - - step_time = time.time() - start_time - epoch_time += step_time - - # aggregate losses from processes - if num_gpus > 1: - loss_dict['postnet_loss'] = reduce_tensor(loss_dict['postnet_loss'].data, num_gpus) - loss_dict['decoder_loss'] = reduce_tensor(loss_dict['decoder_loss'].data, num_gpus) - loss_dict['loss'] = reduce_tensor(loss_dict['loss'] .data, num_gpus) - loss_dict['stopnet_loss'] = reduce_tensor(loss_dict['stopnet_loss'].data, num_gpus) if c.stopnet else loss_dict['stopnet_loss'] - - # detach loss values - loss_dict_new = dict() - for key, value in loss_dict.items(): - if isinstance(value, (int, float)): - loss_dict_new[key] = value - else: - loss_dict_new[key] = value.item() - loss_dict = loss_dict_new - - # update avg stats - update_train_values = dict() - for key, value in loss_dict.items(): - update_train_values['avg_' + key] = value - update_train_values['avg_loader_time'] = loader_time - update_train_values['avg_step_time'] = step_time - keep_avg.update_values(update_train_values) - - # print training progress - if global_step % c.print_step == 0: - c_logger.print_train_step(batch_n_iter, num_iter, global_step, - avg_spec_length, avg_text_length, - step_time, loader_time, current_lr, - loss_dict, keep_avg.avg_values) - - if args.rank == 0: - # Plot Training Iter Stats - # reduce TB load - if global_step % c.tb_plot_step == 0: - iter_stats = { - "lr": current_lr, - "grad_norm": grad_norm, - "grad_norm_st": grad_norm_st, - "step_time": step_time - } - iter_stats.update(loss_dict) - tb_logger.tb_train_iter_stats(global_step, iter_stats) - - if global_step % c.save_step == 0: - if c.checkpoint: - # save model - save_checkpoint(model, optimizer, global_step, epoch, model.decoder.r, OUT_PATH, - optimizer_st=optimizer_st, - model_loss=loss_dict['postnet_loss']) - - # Diagnostic visualizations - const_spec = postnet_output[0].data.cpu().numpy() - gt_spec = linear_input[0].data.cpu().numpy() if c.model in [ - "Tacotron", "TacotronGST" - ] else mel_input[0].data.cpu().numpy() - align_img = alignments[0].data.cpu().numpy() - - figures = { - "prediction": plot_spectrogram(const_spec, ap), - "ground_truth": plot_spectrogram(gt_spec, ap), - "alignment": plot_alignment(align_img), - } - - if c.bidirectional_decoder or c.double_decoder_consistency: - figures["alignment_backward"] = plot_alignment(alignments_backward[0].data.cpu().numpy()) - - tb_logger.tb_train_figures(global_step, figures) - - # Sample audio - if c.model in ["Tacotron", "TacotronGST"]: - train_audio = ap.inv_spectrogram(const_spec.T) - else: - train_audio = ap.inv_melspectrogram(const_spec.T) - tb_logger.tb_train_audios(global_step, - {'TrainAudio': train_audio}, - c.audio["sample_rate"]) - end_time = time.time() - - # print epoch stats - c_logger.print_train_epoch_end(global_step, epoch, epoch_time, keep_avg) - - # Plot Epoch Stats - if args.rank == 0: - epoch_stats = {"epoch_time": epoch_time} - epoch_stats.update(keep_avg.avg_values) - tb_logger.tb_train_epoch_stats(global_step, epoch_stats) - if c.tb_model_param_stats: - tb_logger.tb_model_weights(model, global_step) - return keep_avg.avg_values, global_step - - -@torch.no_grad() -def evaluate(model, criterion, ap, global_step, epoch): - data_loader = setup_loader(ap, model.decoder.r, is_val=True) - model.eval() - epoch_time = 0 - keep_avg = KeepAverage() - c_logger.print_eval_start() - if data_loader is not None: - for num_iter, data in enumerate(data_loader): - start_time = time.time() - - # format data - text_input, text_lengths, mel_input, mel_lengths, linear_input, stop_targets, speaker_ids, _, _ = format_data(data) - assert mel_input.shape[1] % model.decoder.r == 0 - - # forward pass model - if c.bidirectional_decoder or c.double_decoder_consistency: - decoder_output, postnet_output, alignments, stop_tokens, decoder_backward_output, alignments_backward = model( - text_input, text_lengths, mel_input, speaker_ids=speaker_ids) - else: - decoder_output, postnet_output, alignments, stop_tokens = model( - text_input, text_lengths, mel_input, speaker_ids=speaker_ids) - decoder_backward_output = None - alignments_backward = None - - # set the alignment lengths wrt reduction factor for guided attention - if mel_lengths.max() % model.decoder.r != 0: - alignment_lengths = (mel_lengths + (model.decoder.r - (mel_lengths.max() % model.decoder.r))) // model.decoder.r - else: - alignment_lengths = mel_lengths // model.decoder.r - - # compute loss - loss_dict = criterion(postnet_output, decoder_output, mel_input, - linear_input, stop_tokens, stop_targets, - mel_lengths, decoder_backward_output, - alignments, alignment_lengths, alignments_backward, - text_lengths) - - # step time - step_time = time.time() - start_time - epoch_time += step_time - - # compute alignment score - align_error = 1 - alignment_diagonal_score(alignments) - loss_dict['align_error'] = align_error - - # aggregate losses from processes - if num_gpus > 1: - loss_dict['postnet_loss'] = reduce_tensor(loss_dict['postnet_loss'].data, num_gpus) - loss_dict['decoder_loss'] = reduce_tensor(loss_dict['decoder_loss'].data, num_gpus) - if c.stopnet: - loss_dict['stopnet_loss'] = reduce_tensor(loss_dict['stopnet_loss'].data, num_gpus) - - # detach loss values - loss_dict_new = dict() - for key, value in loss_dict.items(): - if isinstance(value, (int, float)): - loss_dict_new[key] = value - else: - loss_dict_new[key] = value.item() - loss_dict = loss_dict_new - - # update avg stats - update_train_values = dict() - for key, value in loss_dict.items(): - update_train_values['avg_' + key] = value - keep_avg.update_values(update_train_values) - - if c.print_eval: - c_logger.print_eval_step(num_iter, loss_dict, keep_avg.avg_values) - - if args.rank == 0: - # Diagnostic visualizations - idx = np.random.randint(mel_input.shape[0]) - const_spec = postnet_output[idx].data.cpu().numpy() - gt_spec = linear_input[idx].data.cpu().numpy() if c.model in [ - "Tacotron", "TacotronGST" - ] else mel_input[idx].data.cpu().numpy() - align_img = alignments[idx].data.cpu().numpy() - - eval_figures = { - "prediction": plot_spectrogram(const_spec, ap), - "ground_truth": plot_spectrogram(gt_spec, ap), - "alignment": plot_alignment(align_img) - } - - # Sample audio - if c.model in ["Tacotron", "TacotronGST"]: - eval_audio = ap.inv_spectrogram(const_spec.T) - else: - eval_audio = ap.inv_melspectrogram(const_spec.T) - tb_logger.tb_eval_audios(global_step, {"ValAudio": eval_audio}, - c.audio["sample_rate"]) - - # Plot Validation Stats - - if c.bidirectional_decoder or c.double_decoder_consistency: - align_b_img = alignments_backward[idx].data.cpu().numpy() - eval_figures['alignment2'] = plot_alignment(align_b_img) - tb_logger.tb_eval_stats(global_step, keep_avg.avg_values) - tb_logger.tb_eval_figures(global_step, eval_figures) - - if args.rank == 0 and epoch > c.test_delay_epochs: - if c.test_sentences_file is None: - test_sentences = [ - "It took me quite a long time to develop a voice, and now that I have it I'm not going to be silent.", - "Be a voice, not an echo.", - "I'm sorry Dave. I'm afraid I can't do that.", - "This cake is great. It's so delicious and moist.", - "Prior to November 22, 1963." - ] - else: - with open(c.test_sentences_file, "r") as f: - test_sentences = [s.strip() for s in f.readlines()] - - # test sentences - test_audios = {} - test_figures = {} - print(" | > Synthesizing test sentences") - speaker_id = 0 if c.use_speaker_embedding else None - style_wav = c.get("style_wav_for_test") - for idx, test_sentence in enumerate(test_sentences): - try: - wav, alignment, decoder_output, postnet_output, stop_tokens, inputs = synthesis( - model, - test_sentence, - c, - use_cuda, - ap, - speaker_id=speaker_id, - style_wav=style_wav, - truncated=False, - enable_eos_bos_chars=c.enable_eos_bos_chars, #pylint: disable=unused-argument - use_griffin_lim=True, - do_trim_silence=False) - - file_path = os.path.join(AUDIO_PATH, str(global_step)) - os.makedirs(file_path, exist_ok=True) - file_path = os.path.join(file_path, - "TestSentence_{}.wav".format(idx)) - ap.save_wav(wav, file_path) - test_audios['{}-audio'.format(idx)] = wav - test_figures['{}-prediction'.format(idx)] = plot_spectrogram( - postnet_output, ap) - test_figures['{}-alignment'.format(idx)] = plot_alignment( - alignment) - except: - print(" !! Error creating Test Sentence -", idx) - traceback.print_exc() - tb_logger.tb_test_audios(global_step, test_audios, - c.audio['sample_rate']) - tb_logger.tb_test_figures(global_step, test_figures) - return keep_avg.avg_values - - -# FIXME: move args definition/parsing inside of main? -def main(args): # pylint: disable=redefined-outer-name - # pylint: disable=global-variable-undefined - global meta_data_train, meta_data_eval, symbols, phonemes - # Audio processor - ap = AudioProcessor(**c.audio) - if 'characters' in c.keys(): - symbols, phonemes = make_symbols(**c.characters) - - # DISTRUBUTED - if num_gpus > 1: - init_distributed(args.rank, num_gpus, args.group_id, - c.distributed["backend"], c.distributed["url"]) - num_chars = len(phonemes) if c.use_phonemes else len(symbols) - - # load data instances - meta_data_train, meta_data_eval = load_meta_data(c.datasets) - - # parse speakers - if c.use_speaker_embedding: - speakers = get_speakers(meta_data_train) - if args.restore_path: - prev_out_path = os.path.dirname(args.restore_path) - speaker_mapping = load_speaker_mapping(prev_out_path) - assert all([speaker in speaker_mapping - for speaker in speakers]), "As of now you, you cannot " \ - "introduce new speakers to " \ - "a previously trained model." - else: - speaker_mapping = {name: i for i, name in enumerate(speakers)} - save_speaker_mapping(OUT_PATH, speaker_mapping) - num_speakers = len(speaker_mapping) - print("Training with {} speakers: {}".format(num_speakers, - ", ".join(speakers))) - else: - num_speakers = 0 - - model = setup_model(num_chars, num_speakers, c) - - params = set_weight_decay(model, c.wd) - optimizer = RAdam(params, lr=c.lr, weight_decay=0) - if c.stopnet and c.separate_stopnet: - optimizer_st = RAdam(model.decoder.stopnet.parameters(), - lr=c.lr, - weight_decay=0) - else: - optimizer_st = None - - # setup criterion - criterion = TacotronLoss(c, stopnet_pos_weight=10.0, ga_sigma=0.4) - - if args.restore_path: - checkpoint = torch.load(args.restore_path, map_location='cpu') - try: - # TODO: fix optimizer init, model.cuda() needs to be called before - # optimizer restore - # optimizer.load_state_dict(checkpoint['optimizer']) - if c.reinit_layers: - raise RuntimeError - model.load_state_dict(checkpoint['model']) - except: - print(" > Partial model initialization.") - model_dict = model.state_dict() - model_dict = set_init_dict(model_dict, checkpoint['model'], c) - model.load_state_dict(model_dict) - del model_dict - for group in optimizer.param_groups: - group['lr'] = c.lr - print(" > Model restored from step %d" % checkpoint['step'], - flush=True) - args.restore_step = checkpoint['step'] - else: - args.restore_step = 0 - - if use_cuda: - model.cuda() - criterion.cuda() - - # DISTRUBUTED - if num_gpus > 1: - model = apply_gradient_allreduce(model) - - if c.noam_schedule: - scheduler = NoamLR(optimizer, - warmup_steps=c.warmup_steps, - last_epoch=args.restore_step - 1) - else: - scheduler = None - - num_params = count_parameters(model) - print("\n > Model has {} parameters".format(num_params), flush=True) - - if 'best_loss' not in locals(): - best_loss = float('inf') - - global_step = args.restore_step - for epoch in range(0, c.epochs): - c_logger.print_epoch_start(epoch, c.epochs) - # set gradual training - if c.gradual_training is not None: - r, c.batch_size = gradual_training_scheduler(global_step, c) - c.r = r - model.decoder.set_r(r) - if c.bidirectional_decoder: - model.decoder_backward.set_r(r) - print("\n > Number of output frames:", model.decoder.r) - train_avg_loss_dict, global_step = train(model, criterion, optimizer, - optimizer_st, scheduler, ap, - global_step, epoch) - eval_avg_loss_dict = evaluate(model, criterion, ap, global_step, epoch) - c_logger.print_epoch_end(epoch, eval_avg_loss_dict) - target_loss = train_avg_loss_dict['avg_postnet_loss'] - if c.run_eval: - target_loss = eval_avg_loss_dict['avg_postnet_loss'] - best_loss = save_best_model(target_loss, best_loss, model, optimizer, global_step, epoch, c.r, - OUT_PATH) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument( - '--continue_path', - type=str, - help='Training output folder to continue training. Use to continue a training. If it is used, "config_path" is ignored.', - default='', - required='--config_path' not in sys.argv) - parser.add_argument( - '--restore_path', - type=str, - help='Model file to be restored. Use to finetune a model.', - default='') - parser.add_argument( - '--config_path', - type=str, - help='Path to config file for training.', - required='--continue_path' not in sys.argv - ) - parser.add_argument('--debug', - type=bool, - default=False, - help='Do not verify commit integrity to run training.') - - # DISTRUBUTED - parser.add_argument( - '--rank', - type=int, - default=0, - help='DISTRIBUTED: process rank for distributed training.') - parser.add_argument('--group_id', - type=str, - default="", - help='DISTRIBUTED: process group id.') - args = parser.parse_args() - - if args.continue_path != '': - args.output_path = args.continue_path - args.config_path = os.path.join(args.continue_path, 'config.json') - list_of_files = glob.glob(args.continue_path + "/*.pth.tar") # * means all if need specific format then *.csv - latest_model_file = max(list_of_files, key=os.path.getctime) - args.restore_path = latest_model_file - print(f" > Training continues for {args.restore_path}") - - # setup output paths and read configs - c = load_config(args.config_path) - check_config(c) - _ = os.path.dirname(os.path.realpath(__file__)) - - OUT_PATH = args.continue_path - if args.continue_path == '': - OUT_PATH = create_experiment_folder(c.output_path, c.run_name, args.debug) - - AUDIO_PATH = os.path.join(OUT_PATH, 'test_audios') - - c_logger = ConsoleLogger() - - if args.rank == 0: - os.makedirs(AUDIO_PATH, exist_ok=True) - new_fields = {} - if args.restore_path: - new_fields["restore_path"] = args.restore_path - new_fields["github_branch"] = get_git_branch() - copy_config_file(args.config_path, - os.path.join(OUT_PATH, 'config.json'), new_fields) - os.chmod(AUDIO_PATH, 0o775) - os.chmod(OUT_PATH, 0o775) - - LOG_DIR = OUT_PATH - tb_logger = TensorboardLogger(LOG_DIR, model_name='TTS') - - # write model desc to tensorboard - tb_logger.tb_add_text('model-description', c['run_description'], 0) - - try: - main(args) - except KeyboardInterrupt: - remove_experiment_folder(OUT_PATH) - try: - sys.exit(0) - except SystemExit: - os._exit(0) # pylint: disable=protected-access - except Exception: # pylint: disable=broad-except - remove_experiment_folder(OUT_PATH) - traceback.print_exc() - sys.exit(1) diff --git a/tts_namespace/README.md b/tts_namespace/README.md deleted file mode 100644 index c5b2ddbf..00000000 --- a/tts_namespace/README.md +++ /dev/null @@ -1,29 +0,0 @@ -This folder contains a symlink called TTS to the parent folder: - - lrwxr-xr-x TTS -> .. - -This is used to appease the distribute/setuptools gods. When the project was -initially set up, the repository folder itself was considered a namespace, and -development was done with `sys.path` hacks. This means if you tried to install -TTS, `setup.py` would see the packages `models`, `utils`, `layers`... instead of - `TTS.models`, `TTS.utils`... - -Installing TTS would then pollute the package namespace with generic names like -those above. In order to make things installable in both install and development -modes (`pip install /path/to/TTS` and `pip install -e /path/to/TTS`), we needed -to add an additional 'TTS' namespace to avoid this pollution. A virtual redirect -using `packages_dir` in `setup.py` is not enough because it breaks the editable -installation, which can only handle the simplest of `package_dir` redirects. - -Our solution is to use a symlink in order to add the extra `TTS` namespace. In -`setup.py`, we only look for packages inside `tts_namespace` (this folder), -which contains a symlink called TTS pointing to the repository root. The final -result is that `setuptools.find_packages` will find `TTS.models`, `TTS.utils`... - -With this hack, `pip install -e` will then add a symlink to the `tts_namespace` -in your `site-packages` folder, which works properly. It's important not to add -anything else in this folder because it will pollute the package namespace when -installing the project. - -This does not work if you check out your project on a filesystem that does not -support symlinks. \ No newline at end of file diff --git a/tts_namespace/TTS b/tts_namespace/TTS deleted file mode 120000 index a96aa0ea..00000000 --- a/tts_namespace/TTS +++ /dev/null @@ -1 +0,0 @@ -.. \ No newline at end of file diff --git a/utils/.generic_utils.py.swo b/utils/.generic_utils.py.swo deleted file mode 100644 index ab1b3870..00000000 Binary files a/utils/.generic_utils.py.swo and /dev/null differ diff --git a/utils/.model.py.swp b/utils/.model.py.swp deleted file mode 100644 index 24a8152e..00000000 Binary files a/utils/.model.py.swp and /dev/null differ diff --git a/utils/__init__.py b/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/utils/audio.py b/utils/audio.py deleted file mode 100644 index 5b5b5059..00000000 --- a/utils/audio.py +++ /dev/null @@ -1,356 +0,0 @@ -import librosa -import soundfile as sf -import numpy as np -import scipy.io -import scipy.signal - -from TTS.utils.data import StandardScaler - - -class AudioProcessor(object): - def __init__(self, - sample_rate=None, - num_mels=None, - min_level_db=None, - frame_shift_ms=None, - frame_length_ms=None, - hop_length=None, - win_length=None, - ref_level_db=None, - fft_size=1024, - power=None, - preemphasis=0.0, - signal_norm=None, - symmetric_norm=None, - max_norm=None, - mel_fmin=None, - mel_fmax=None, - spec_gain=20, - stft_pad_mode='reflect', - clip_norm=True, - griffin_lim_iters=None, - do_trim_silence=False, - trim_db=60, - do_sound_norm=False, - stats_path=None, - **_): - - print(" > Setting up Audio Processor...") - # setup class attributed - self.sample_rate = sample_rate - self.num_mels = num_mels - self.min_level_db = min_level_db or 0 - self.frame_shift_ms = frame_shift_ms - self.frame_length_ms = frame_length_ms - self.ref_level_db = ref_level_db - self.fft_size = fft_size - self.power = power - self.preemphasis = preemphasis - self.griffin_lim_iters = griffin_lim_iters - self.signal_norm = signal_norm - self.symmetric_norm = symmetric_norm - self.mel_fmin = mel_fmin or 0 - self.mel_fmax = mel_fmax - self.spec_gain = float(spec_gain) - self.stft_pad_mode = 'reflect' - self.max_norm = 1.0 if max_norm is None else float(max_norm) - self.clip_norm = clip_norm - self.do_trim_silence = do_trim_silence - self.trim_db = trim_db - self.do_sound_norm = do_sound_norm - self.stats_path = stats_path - # setup stft parameters - if hop_length is None: - # compute stft parameters from given time values - self.hop_length, self.win_length = self._stft_parameters() - else: - # use stft parameters from config file - self.hop_length = hop_length - self.win_length = win_length - assert min_level_db != 0.0, " [!] min_level_db is 0" - assert self.win_length <= self.fft_size, " [!] win_length cannot be larger than fft_size" - members = vars(self) - for key, value in members.items(): - print(" | > {}:{}".format(key, value)) - # create spectrogram utils - self.mel_basis = self._build_mel_basis() - self.inv_mel_basis = np.linalg.pinv(self._build_mel_basis()) - # setup scaler - if stats_path: - mel_mean, mel_std, linear_mean, linear_std, _ = self.load_stats(stats_path) - self.setup_scaler(mel_mean, mel_std, linear_mean, linear_std) - self.signal_norm = True - self.max_norm = None - self.clip_norm = None - self.symmetric_norm = None - - ### setting up the parameters ### - def _build_mel_basis(self, ): - if self.mel_fmax is not None: - assert self.mel_fmax <= self.sample_rate // 2 - return librosa.filters.mel( - self.sample_rate, - self.fft_size, - n_mels=self.num_mels, - fmin=self.mel_fmin, - fmax=self.mel_fmax) - - def _stft_parameters(self, ): - """Compute necessary stft parameters with given time values""" - factor = self.frame_length_ms / self.frame_shift_ms - assert (factor).is_integer(), " [!] frame_shift_ms should divide frame_length_ms" - hop_length = int(self.frame_shift_ms / 1000.0 * self.sample_rate) - win_length = int(hop_length * factor) - return hop_length, win_length - - ### normalization ### - def _normalize(self, S): - """Put values in [0, self.max_norm] or [-self.max_norm, self.max_norm]""" - #pylint: disable=no-else-return - S = S.copy() - if self.signal_norm: - # mean-var scaling - if hasattr(self, 'mel_scaler'): - if S.shape[0] == self.num_mels: - return self.mel_scaler.transform(S.T).T - elif S.shape[0] == self.fft_size / 2: - return self.linear_scaler.transform(S.T).T - else: - raise RuntimeError(' [!] Mean-Var stats does not match the given feature dimensions.') - # range normalization - S -= self.ref_level_db # discard certain range of DB assuming it is air noise - S_norm = ((S - self.min_level_db) / (-self.min_level_db)) - if self.symmetric_norm: - S_norm = ((2 * self.max_norm) * S_norm) - self.max_norm - if self.clip_norm: - S_norm = np.clip(S_norm, -self.max_norm, self.max_norm) - return S_norm - else: - S_norm = self.max_norm * S_norm - if self.clip_norm: - S_norm = np.clip(S_norm, 0, self.max_norm) - return S_norm - else: - return S - - def _denormalize(self, S): - """denormalize values""" - #pylint: disable=no-else-return - S_denorm = S.copy() - if self.signal_norm: - # mean-var scaling - if hasattr(self, 'mel_scaler'): - if S_denorm.shape[0] == self.num_mels: - return self.mel_scaler.inverse_transform(S_denorm.T).T - elif S_denorm.shape[0] == self.fft_size / 2: - return self.linear_scaler.inverse_transform(S_denorm.T).T - else: - raise RuntimeError(' [!] Mean-Var stats does not match the given feature dimensions.') - if self.symmetric_norm: - if self.clip_norm: - S_denorm = np.clip(S_denorm, -self.max_norm, self.max_norm) - S_denorm = ((S_denorm + self.max_norm) * -self.min_level_db / (2 * self.max_norm)) + self.min_level_db - return S_denorm + self.ref_level_db - else: - if self.clip_norm: - S_denorm = np.clip(S_denorm, 0, self.max_norm) - S_denorm = (S_denorm * -self.min_level_db / - self.max_norm) + self.min_level_db - return S_denorm + self.ref_level_db - else: - return S_denorm - - ### Mean-STD scaling ### - def load_stats(self, stats_path): - stats = np.load(stats_path, allow_pickle=True).item() #pylint: disable=unexpected-keyword-arg - mel_mean = stats['mel_mean'] - mel_std = stats['mel_std'] - linear_mean = stats['linear_mean'] - linear_std = stats['linear_std'] - stats_config = stats['audio_config'] - # check all audio parameters used for computing stats - skip_parameters = ['griffin_lim_iters', 'stats_path', 'do_trim_silence', 'ref_level_db', 'power'] - for key in stats_config.keys(): - if key in skip_parameters: - continue - assert stats_config[key] == self.__dict__[key],\ - f" [!] Audio param {key} does not match the value used for computing mean-var stats. {stats_config[key]} vs {self.__dict__[key]}" - return mel_mean, mel_std, linear_mean, linear_std, stats_config - - # pylint: disable=attribute-defined-outside-init - def setup_scaler(self, mel_mean, mel_std, linear_mean, linear_std): - self.mel_scaler = StandardScaler() - self.mel_scaler.set_stats(mel_mean, mel_std) - self.linear_scaler = StandardScaler() - self.linear_scaler.set_stats(linear_mean, linear_std) - - ### DB and AMP conversion ### - # pylint: disable=no-self-use - def _amp_to_db(self, x): - return self.spec_gain * np.log10(np.maximum(1e-5, x)) - - # pylint: disable=no-self-use - def _db_to_amp(self, x): - return np.power(10.0, x / self.spec_gain) - - ### Preemphasis ### - def apply_preemphasis(self, x): - if self.preemphasis == 0: - raise RuntimeError(" [!] Preemphasis is set 0.0.") - return scipy.signal.lfilter([1, -self.preemphasis], [1], x) - - def apply_inv_preemphasis(self, x): - if self.preemphasis == 0: - raise RuntimeError(" [!] Preemphasis is set 0.0.") - return scipy.signal.lfilter([1], [1, -self.preemphasis], x) - - ### SPECTROGRAMs ### - def _linear_to_mel(self, spectrogram): - return np.dot(self.mel_basis, spectrogram) - - def _mel_to_linear(self, mel_spec): - return np.maximum(1e-10, np.dot(self.inv_mel_basis, mel_spec)) - - def spectrogram(self, y): - if self.preemphasis != 0: - D = self._stft(self.apply_preemphasis(y)) - else: - D = self._stft(y) - S = self._amp_to_db(np.abs(D)) - return self._normalize(S) - - def melspectrogram(self, y): - if self.preemphasis != 0: - D = self._stft(self.apply_preemphasis(y)) - else: - D = self._stft(y) - S = self._amp_to_db(self._linear_to_mel(np.abs(D))) - return self._normalize(S) - - def inv_spectrogram(self, spectrogram): - """Converts spectrogram to waveform using librosa""" - S = self._denormalize(spectrogram) - S = self._db_to_amp(S) - # Reconstruct phase - if self.preemphasis != 0: - return self.apply_inv_preemphasis(self._griffin_lim(S**self.power)) - return self._griffin_lim(S**self.power) - - def inv_melspectrogram(self, mel_spectrogram): - '''Converts melspectrogram to waveform using librosa''' - D = self._denormalize(mel_spectrogram) - S = self._db_to_amp(D) - S = self._mel_to_linear(S) # Convert back to linear - if self.preemphasis != 0: - return self.apply_inv_preemphasis(self._griffin_lim(S**self.power)) - return self._griffin_lim(S**self.power) - - def out_linear_to_mel(self, linear_spec): - S = self._denormalize(linear_spec) - S = self._db_to_amp(S) - S = self._linear_to_mel(np.abs(S)) - S = self._amp_to_db(S) - mel = self._normalize(S) - return mel - - ### STFT and ISTFT ### - def _stft(self, y): - return librosa.stft( - y=y, - n_fft=self.fft_size, - hop_length=self.hop_length, - win_length=self.win_length, - pad_mode=self.stft_pad_mode, - ) - - def _istft(self, y): - return librosa.istft( - y, hop_length=self.hop_length, win_length=self.win_length) - - def _griffin_lim(self, S): - angles = np.exp(2j * np.pi * np.random.rand(*S.shape)) - S_complex = np.abs(S).astype(np.complex) - y = self._istft(S_complex * angles) - for _ in range(self.griffin_lim_iters): - angles = np.exp(1j * np.angle(self._stft(y))) - y = self._istft(S_complex * angles) - return y - - def compute_stft_paddings(self, x, pad_sides=1): - '''compute right padding (final frame) or both sides padding (first and final frames) - ''' - assert pad_sides in (1, 2) - pad = (x.shape[0] // self.hop_length + 1) * self.hop_length - x.shape[0] - if pad_sides == 1: - return 0, pad - return pad // 2, pad // 2 + pad % 2 - - ### Audio Processing ### - def find_endpoint(self, wav, threshold_db=-40, min_silence_sec=0.8): - window_length = int(self.sample_rate * min_silence_sec) - hop_length = int(window_length / 4) - threshold = self._db_to_amp(threshold_db) - for x in range(hop_length, len(wav) - window_length, hop_length): - if np.max(wav[x:x + window_length]) < threshold: - return x + hop_length - return len(wav) - - def trim_silence(self, wav): - """ Trim silent parts with a threshold and 0.01 sec margin """ - margin = int(self.sample_rate * 0.01) - wav = wav[margin:-margin] - return librosa.effects.trim( - wav, top_db=self.trim_db, frame_length=self.win_length, hop_length=self.hop_length)[0] - - @staticmethod - def sound_norm(x): - return x / abs(x).max() * 0.9 - - ### save and load ### - def load_wav(self, filename, sr=None): - if sr is None: - x, sr = sf.read(filename) - else: - x, sr = librosa.load(filename, sr=sr) - if self.do_trim_silence: - try: - x = self.trim_silence(x) - except ValueError: - print(f' [!] File cannot be trimmed for silence - {filename}') - assert self.sample_rate == sr, "%s vs %s"%(self.sample_rate, sr) - if self.do_sound_norm: - x = self.sound_norm(x) - return x - - def save_wav(self, wav, path): - wav_norm = wav * (32767 / max(0.01, np.max(np.abs(wav)))) - scipy.io.wavfile.write(path, self.sample_rate, wav_norm.astype(np.int16)) - - @staticmethod - def mulaw_encode(wav, qc): - mu = 2 ** qc - 1 - # wav_abs = np.minimum(np.abs(wav), 1.0) - signal = np.sign(wav) * np.log(1 + mu * np.abs(wav)) / np.log(1. + mu) - # Quantize signal to the specified number of levels. - signal = (signal + 1) / 2 * mu + 0.5 - return np.floor(signal,) - - @staticmethod - def mulaw_decode(wav, qc): - """Recovers waveform from quantized values.""" - mu = 2 ** qc - 1 - x = np.sign(wav) / mu * ((1 + mu) ** np.abs(wav) - 1) - return x - - - @staticmethod - def encode_16bits(x): - return np.clip(x * 2**15, -2**15, 2**15 - 1).astype(np.int16) - - @staticmethod - def quantize(x, bits): - return (x + 1.) * (2**bits - 1) / 2 - - @staticmethod - def dequantize(x, bits): - return 2 * x / (2**bits - 1) - 1 diff --git a/utils/console_logger.py b/utils/console_logger.py deleted file mode 100644 index 85d5b376..00000000 --- a/utils/console_logger.py +++ /dev/null @@ -1,95 +0,0 @@ -import datetime -from TTS.utils.io import AttrDict - - -tcolors = AttrDict({ - 'OKBLUE': '\033[94m', - 'HEADER': '\033[95m', - 'OKGREEN': '\033[92m', - 'WARNING': '\033[93m', - 'FAIL': '\033[91m', - 'ENDC': '\033[0m', - 'BOLD': '\033[1m', - 'UNDERLINE': '\033[4m' -}) - - -class ConsoleLogger(): - def __init__(self): - # TODO: color code for value changes - # use these to compare values between iterations - self.old_train_loss_dict = None - self.old_epoch_loss_dict = None - self.old_eval_loss_dict = None - - # pylint: disable=no-self-use - def get_time(self): - now = datetime.datetime.now() - return now.strftime("%Y-%m-%d %H:%M:%S") - - def print_epoch_start(self, epoch, max_epoch): - print("\n{}{} > EPOCH: {}/{}{}".format(tcolors.UNDERLINE, tcolors.BOLD, - epoch, max_epoch, tcolors.ENDC), - flush=True) - - def print_train_start(self): - print(f"\n{tcolors.BOLD} > TRAINING ({self.get_time()}) {tcolors.ENDC}") - - def print_train_step(self, batch_steps, step, global_step, avg_spec_length, - avg_text_length, step_time, loader_time, lr, - loss_dict, avg_loss_dict): - indent = " | > " - print() - log_text = "{} --> STEP: {}/{} -- GLOBAL_STEP: {}{}\n".format( - tcolors.BOLD, step, batch_steps, global_step, tcolors.ENDC) - for key, value in loss_dict.items(): - # print the avg value if given - if f'avg_{key}' in avg_loss_dict.keys(): - log_text += "{}{}: {:.5f} ({:.5f})\n".format(indent, key, value, avg_loss_dict[f'avg_{key}']) - else: - log_text += "{}{}: {:.5f} \n".format(indent, key, value) - log_text += f"{indent}avg_spec_len: {avg_spec_length}\n{indent}avg_text_len: {avg_text_length}\n{indent}"\ - f"step_time: {step_time:.2f}\n{indent}loader_time: {loader_time:.2f}\n{indent}lr: {lr:.5f}" - print(log_text, flush=True) - - # pylint: disable=unused-argument - def print_train_epoch_end(self, global_step, epoch, epoch_time, - print_dict): - indent = " | > " - log_text = f"\n{tcolors.BOLD} --> TRAIN PERFORMACE -- EPOCH TIME: {epoch_time:.2f} sec -- GLOBAL_STEP: {global_step}{tcolors.ENDC}\n" - for key, value in print_dict.items(): - log_text += "{}{}: {:.5f}\n".format(indent, key, value) - print(log_text, flush=True) - - def print_eval_start(self): - print(f"{tcolors.BOLD} > EVALUATION {tcolors.ENDC}\n") - - def print_eval_step(self, step, loss_dict, avg_loss_dict): - indent = " | > " - print() - log_text = f"{tcolors.BOLD} --> STEP: {step}{tcolors.ENDC}\n" - for key, value in loss_dict.items(): - # print the avg value if given - if f'avg_{key}' in avg_loss_dict.keys(): - log_text += "{}{}: {:.5f} ({:.5f})\n".format(indent, key, value, avg_loss_dict[f'avg_{key}']) - else: - log_text += "{}{}: {:.5f} \n".format(indent, key, value) - print(log_text, flush=True) - - def print_epoch_end(self, epoch, avg_loss_dict): - indent = " | > " - log_text = " {}--> EVAL PERFORMANCE{}\n".format( - tcolors.BOLD, tcolors.ENDC) - for key, value in avg_loss_dict.items(): - # print the avg value if given - color = tcolors.FAIL - sign = '+' - diff = 0 - if self.old_eval_loss_dict is not None: - diff = value - self.old_eval_loss_dict[key] - if diff <= 0: - color = tcolors.OKGREEN - sign = '' - log_text += "{}{}:{} {:.5f} {}({}{:.5f})\n".format(indent, key, color, value, tcolors.ENDC, sign, diff) - self.old_eval_loss_dict = avg_loss_dict - print(log_text, flush=True) diff --git a/utils/data.py b/utils/data.py deleted file mode 100644 index a83325cb..00000000 --- a/utils/data.py +++ /dev/null @@ -1,77 +0,0 @@ -import numpy as np - - -def _pad_data(x, length): - _pad = 0 - assert x.ndim == 1 - return np.pad( - x, (0, length - x.shape[0]), mode='constant', constant_values=_pad) - - -def prepare_data(inputs): - max_len = max((len(x) for x in inputs)) - return np.stack([_pad_data(x, max_len) for x in inputs]) - - -def _pad_tensor(x, length): - _pad = 0. - assert x.ndim == 2 - x = np.pad( - x, [[0, 0], [0, length - x.shape[1]]], - mode='constant', - constant_values=_pad) - return x - - -def prepare_tensor(inputs, out_steps): - max_len = max((x.shape[1] for x in inputs)) - remainder = max_len % out_steps - pad_len = max_len + (out_steps - remainder) if remainder > 0 else max_len - return np.stack([_pad_tensor(x, pad_len) for x in inputs]) - - -def _pad_stop_target(x, length): - _pad = 0. - assert x.ndim == 1 - return np.pad( - x, (0, length - x.shape[0]), mode='constant', constant_values=_pad) - - -def prepare_stop_target(inputs, out_steps): - """ Pad row vectors with 1. """ - max_len = max((x.shape[0] for x in inputs)) - remainder = max_len % out_steps - pad_len = max_len + (out_steps - remainder) if remainder > 0 else max_len - return np.stack([_pad_stop_target(x, pad_len) for x in inputs]) - - -def pad_per_step(inputs, pad_len): - return np.pad( - inputs, [[0, 0], [0, 0], [0, pad_len]], - mode='constant', - constant_values=0.0) - - -# pylint: disable=attribute-defined-outside-init -class StandardScaler(): - - def set_stats(self, mean, scale): - self.mean_ = mean - self.scale_ = scale - - def reset_stats(self): - delattr(self, 'mean_') - delattr(self, 'scale_') - - def transform(self, X): - X = np.asarray(X) - X -= self.mean_ - X /= self.scale_ - return X - - def inverse_transform(self, X): - X = np.asarray(X) - X *= self.scale_ - X += self.mean_ - return X - diff --git a/utils/generic_utils.py b/utils/generic_utils.py deleted file mode 100644 index c806bdf3..00000000 --- a/utils/generic_utils.py +++ /dev/null @@ -1,362 +0,0 @@ -import os -import glob -import torch -import shutil -import datetime -import subprocess -import importlib -import numpy as np -from collections import Counter - - -def get_git_branch(): - try: - out = subprocess.check_output(["git", "branch"]).decode("utf8") - current = next(line for line in out.split("\n") - if line.startswith("*")) - current.replace("* ", "") - except subprocess.CalledProcessError: - current = "inside_docker" - return current - - -def get_commit_hash(): - """https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script""" - # try: - # subprocess.check_output(['git', 'diff-index', '--quiet', - # 'HEAD']) # Verify client is clean - # except: - # raise RuntimeError( - # " !! Commit before training to get the commit hash.") - try: - commit = subprocess.check_output( - ['git', 'rev-parse', '--short', 'HEAD']).decode().strip() - # Not copying .git folder into docker container - except subprocess.CalledProcessError: - commit = "0000000" - print(' > Git Hash: {}'.format(commit)) - return commit - - -def create_experiment_folder(root_path, model_name, debug): - """ Create a folder with the current date and time """ - date_str = datetime.datetime.now().strftime("%B-%d-%Y_%I+%M%p") - if debug: - commit_hash = 'debug' - else: - commit_hash = get_commit_hash() - output_folder = os.path.join( - root_path, model_name + '-' + date_str + '-' + commit_hash) - os.makedirs(output_folder, exist_ok=True) - print(" > Experiment folder: {}".format(output_folder)) - return output_folder - - -def remove_experiment_folder(experiment_path): - """Check folder if there is a checkpoint, otherwise remove the folder""" - - checkpoint_files = glob.glob(experiment_path + "/*.pth.tar") - if not checkpoint_files: - if os.path.exists(experiment_path): - shutil.rmtree(experiment_path, ignore_errors=True) - print(" ! Run is removed from {}".format(experiment_path)) - else: - print(" ! Run is kept in {}".format(experiment_path)) - - -def count_parameters(model): - r"""Count number of trainable parameters in a network""" - return sum(p.numel() for p in model.parameters() if p.requires_grad) - - -def split_dataset(items): - is_multi_speaker = False - speakers = [item[-1] for item in items] - is_multi_speaker = len(set(speakers)) > 1 - eval_split_size = 500 if len(items) * 0.01 > 500 else int( - len(items) * 0.01) - assert eval_split_size > 0, " [!] You do not have enough samples to train. You need at least 100 samples." - np.random.seed(0) - np.random.shuffle(items) - if is_multi_speaker: - items_eval = [] - # most stupid code ever -- Fix it ! - while len(items_eval) < eval_split_size: - speakers = [item[-1] for item in items] - speaker_counter = Counter(speakers) - item_idx = np.random.randint(0, len(items)) - if speaker_counter[items[item_idx][-1]] > 1: - items_eval.append(items[item_idx]) - del items[item_idx] - return items_eval, items - return items[:eval_split_size], items[eval_split_size:] - - -# from https://gist.github.com/jihunchoi/f1434a77df9db1bb337417854b398df1 -def sequence_mask(sequence_length, max_len=None): - if max_len is None: - max_len = sequence_length.data.max() - batch_size = sequence_length.size(0) - seq_range = torch.arange(0, max_len).long() - seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len) - if sequence_length.is_cuda: - seq_range_expand = seq_range_expand.to(sequence_length.device) - seq_length_expand = ( - sequence_length.unsqueeze(1).expand_as(seq_range_expand)) - # B x T_max - return seq_range_expand < seq_length_expand - - -def set_init_dict(model_dict, checkpoint_state, c): - # Partial initialization: if there is a mismatch with new and old layer, it is skipped. - for k, v in checkpoint_state.items(): - if k not in model_dict: - print(" | > Layer missing in the model definition: {}".format(k)) - # 1. filter out unnecessary keys - pretrained_dict = { - k: v - for k, v in checkpoint_state.items() if k in model_dict - } - # 2. filter out different size layers - pretrained_dict = { - k: v - for k, v in pretrained_dict.items() - if v.numel() == model_dict[k].numel() - } - # 3. skip reinit layers - if c.reinit_layers is not None: - for reinit_layer_name in c.reinit_layers: - pretrained_dict = { - k: v - for k, v in pretrained_dict.items() - if reinit_layer_name not in k - } - # 4. overwrite entries in the existing state dict - model_dict.update(pretrained_dict) - print(" | > {} / {} layers are restored.".format(len(pretrained_dict), - len(model_dict))) - return model_dict - - -def setup_model(num_chars, num_speakers, c): - print(" > Using model: {}".format(c.model)) - MyModel = importlib.import_module('TTS.models.' + c.model.lower()) - MyModel = getattr(MyModel, c.model) - if c.model.lower() in "tacotron": - model = MyModel(num_chars=num_chars, - num_speakers=num_speakers, - r=c.r, - postnet_output_dim=int(c.audio['fft_size'] / 2 + 1), - decoder_output_dim=c.audio['num_mels'], - gst=c.use_gst, - memory_size=c.memory_size, - attn_type=c.attention_type, - attn_win=c.windowing, - attn_norm=c.attention_norm, - prenet_type=c.prenet_type, - prenet_dropout=c.prenet_dropout, - forward_attn=c.use_forward_attn, - trans_agent=c.transition_agent, - forward_attn_mask=c.forward_attn_mask, - location_attn=c.location_attn, - attn_K=c.attention_heads, - separate_stopnet=c.separate_stopnet, - bidirectional_decoder=c.bidirectional_decoder, - double_decoder_consistency=c.double_decoder_consistency, - ddc_r=c.ddc_r) - elif c.model.lower() == "tacotron2": - model = MyModel(num_chars=num_chars, - num_speakers=num_speakers, - r=c.r, - postnet_output_dim=c.audio['num_mels'], - decoder_output_dim=c.audio['num_mels'], - gst=c.use_gst, - attn_type=c.attention_type, - attn_win=c.windowing, - attn_norm=c.attention_norm, - prenet_type=c.prenet_type, - prenet_dropout=c.prenet_dropout, - forward_attn=c.use_forward_attn, - trans_agent=c.transition_agent, - forward_attn_mask=c.forward_attn_mask, - location_attn=c.location_attn, - attn_K=c.attention_heads, - separate_stopnet=c.separate_stopnet, - bidirectional_decoder=c.bidirectional_decoder, - double_decoder_consistency=c.double_decoder_consistency, - ddc_r=c.ddc_r) - return model - -class KeepAverage(): - def __init__(self): - self.avg_values = {} - self.iters = {} - - def __getitem__(self, key): - return self.avg_values[key] - - def items(self): - return self.avg_values.items() - - def add_value(self, name, init_val=0, init_iter=0): - self.avg_values[name] = init_val - self.iters[name] = init_iter - - def update_value(self, name, value, weighted_avg=False): - if name not in self.avg_values: - # add value if not exist before - self.add_value(name, init_val=value) - else: - # else update existing value - if weighted_avg: - self.avg_values[name] = 0.99 * self.avg_values[name] + 0.01 * value - self.iters[name] += 1 - else: - self.avg_values[name] = self.avg_values[name] * \ - self.iters[name] + value - self.iters[name] += 1 - self.avg_values[name] /= self.iters[name] - - def add_values(self, name_dict): - for key, value in name_dict.items(): - self.add_value(key, init_val=value) - - def update_values(self, value_dict): - for key, value in value_dict.items(): - self.update_value(key, value) - - -def _check_argument(name, c, enum_list=None, max_val=None, min_val=None, restricted=False, val_type=None, alternative=None): - if alternative in c.keys() and c[alternative] is not None: - return - if restricted: - assert name in c.keys(), f' [!] {name} not defined in config.json' - if name in c.keys(): - if max_val: - assert c[name] <= max_val, f' [!] {name} is larger than max value {max_val}' - if min_val: - assert c[name] >= min_val, f' [!] {name} is smaller than min value {min_val}' - if enum_list: - assert c[name].lower() in enum_list, f' [!] {name} is not a valid value' - if val_type: - assert isinstance(c[name], val_type) or c[name] is None, f' [!] {name} has wrong type - {type(c[name])} vs {val_type}' - - -def check_config(c): - _check_argument('model', c, enum_list=['tacotron', 'tacotron2'], restricted=True, val_type=str) - _check_argument('run_name', c, restricted=True, val_type=str) - _check_argument('run_description', c, val_type=str) - - # AUDIO - _check_argument('audio', c, restricted=True, val_type=dict) - - # audio processing parameters - _check_argument('num_mels', c['audio'], restricted=True, val_type=int, min_val=10, max_val=2056) - _check_argument('fft_size', c['audio'], restricted=True, val_type=int, min_val=128, max_val=4058) - _check_argument('sample_rate', c['audio'], restricted=True, val_type=int, min_val=512, max_val=100000) - _check_argument('frame_length_ms', c['audio'], restricted=True, val_type=float, min_val=10, max_val=1000, alternative='win_length') - _check_argument('frame_shift_ms', c['audio'], restricted=True, val_type=float, min_val=1, max_val=1000, alternative='hop_length') - _check_argument('preemphasis', c['audio'], restricted=True, val_type=float, min_val=0, max_val=1) - _check_argument('min_level_db', c['audio'], restricted=True, val_type=int, min_val=-1000, max_val=10) - _check_argument('ref_level_db', c['audio'], restricted=True, val_type=int, min_val=0, max_val=1000) - _check_argument('power', c['audio'], restricted=True, val_type=float, min_val=1, max_val=5) - _check_argument('griffin_lim_iters', c['audio'], restricted=True, val_type=int, min_val=10, max_val=1000) - - # vocabulary parameters - _check_argument('characters', c, restricted=False, val_type=dict) - _check_argument('pad', c['characters'] if 'characters' in c.keys() else {}, restricted='characters' in c.keys(), val_type=str) - _check_argument('eos', c['characters'] if 'characters' in c.keys() else {}, restricted='characters' in c.keys(), val_type=str) - _check_argument('bos', c['characters'] if 'characters' in c.keys() else {}, restricted='characters' in c.keys(), val_type=str) - _check_argument('characters', c['characters'] if 'characters' in c.keys() else {}, restricted='characters' in c.keys(), val_type=str) - _check_argument('phonemes', c['characters'] if 'characters' in c.keys() else {}, restricted='characters' in c.keys(), val_type=str) - _check_argument('punctuations', c['characters'] if 'characters' in c.keys() else {}, restricted='characters' in c.keys(), val_type=str) - - # normalization parameters - _check_argument('signal_norm', c['audio'], restricted=True, val_type=bool) - _check_argument('symmetric_norm', c['audio'], restricted=True, val_type=bool) - _check_argument('max_norm', c['audio'], restricted=True, val_type=float, min_val=0.1, max_val=1000) - _check_argument('clip_norm', c['audio'], restricted=True, val_type=bool) - _check_argument('mel_fmin', c['audio'], restricted=True, val_type=float, min_val=0.0, max_val=1000) - _check_argument('mel_fmax', c['audio'], restricted=True, val_type=float, min_val=500.0) - _check_argument('spec_gain', c['audio'], restricted=True, val_type=float, min_val=1, max_val=100) - _check_argument('do_trim_silence', c['audio'], restricted=True, val_type=bool) - _check_argument('trim_db', c['audio'], restricted=True, val_type=int) - - # training parameters - _check_argument('batch_size', c, restricted=True, val_type=int, min_val=1) - _check_argument('eval_batch_size', c, restricted=True, val_type=int, min_val=1) - _check_argument('r', c, restricted=True, val_type=int, min_val=1) - _check_argument('gradual_training', c, restricted=False, val_type=list) - _check_argument('loss_masking', c, restricted=True, val_type=bool) - # _check_argument('grad_accum', c, restricted=True, val_type=int, min_val=1, max_val=100) - - # validation parameters - _check_argument('run_eval', c, restricted=True, val_type=bool) - _check_argument('test_delay_epochs', c, restricted=True, val_type=int, min_val=0) - _check_argument('test_sentences_file', c, restricted=False, val_type=str) - - # optimizer - _check_argument('noam_schedule', c, restricted=False, val_type=bool) - _check_argument('grad_clip', c, restricted=True, val_type=float, min_val=0.0) - _check_argument('epochs', c, restricted=True, val_type=int, min_val=1) - _check_argument('lr', c, restricted=True, val_type=float, min_val=0) - _check_argument('wd', c, restricted=True, val_type=float, min_val=0) - _check_argument('warmup_steps', c, restricted=True, val_type=int, min_val=0) - _check_argument('seq_len_norm', c, restricted=True, val_type=bool) - - # tacotron prenet - _check_argument('memory_size', c, restricted=True, val_type=int, min_val=-1) - _check_argument('prenet_type', c, restricted=True, val_type=str, enum_list=['original', 'bn']) - _check_argument('prenet_dropout', c, restricted=True, val_type=bool) - - # attention - _check_argument('attention_type', c, restricted=True, val_type=str, enum_list=['graves', 'original']) - _check_argument('attention_heads', c, restricted=True, val_type=int) - _check_argument('attention_norm', c, restricted=True, val_type=str, enum_list=['sigmoid', 'softmax']) - _check_argument('windowing', c, restricted=True, val_type=bool) - _check_argument('use_forward_attn', c, restricted=True, val_type=bool) - _check_argument('forward_attn_mask', c, restricted=True, val_type=bool) - _check_argument('transition_agent', c, restricted=True, val_type=bool) - _check_argument('transition_agent', c, restricted=True, val_type=bool) - _check_argument('location_attn', c, restricted=True, val_type=bool) - _check_argument('bidirectional_decoder', c, restricted=True, val_type=bool) - _check_argument('double_decoder_consistency', c, restricted=True, val_type=bool) - _check_argument('ddc_r', c, restricted='double_decoder_consistency' in c.keys(), min_val=1, max_val=7, val_type=int) - - # stopnet - _check_argument('stopnet', c, restricted=True, val_type=bool) - _check_argument('separate_stopnet', c, restricted=True, val_type=bool) - - # tensorboard - _check_argument('print_step', c, restricted=True, val_type=int, min_val=1) - _check_argument('tb_plot_step', c, restricted=True, val_type=int, min_val=1) - _check_argument('save_step', c, restricted=True, val_type=int, min_val=1) - _check_argument('checkpoint', c, restricted=True, val_type=bool) - _check_argument('tb_model_param_stats', c, restricted=True, val_type=bool) - - # dataloading - # pylint: disable=import-outside-toplevel - from TTS.utils.text import cleaners - _check_argument('text_cleaner', c, restricted=True, val_type=str, enum_list=dir(cleaners)) - _check_argument('enable_eos_bos_chars', c, restricted=True, val_type=bool) - _check_argument('num_loader_workers', c, restricted=True, val_type=int, min_val=0) - _check_argument('num_val_loader_workers', c, restricted=True, val_type=int, min_val=0) - _check_argument('batch_group_size', c, restricted=True, val_type=int, min_val=0) - _check_argument('min_seq_len', c, restricted=True, val_type=int, min_val=0) - _check_argument('max_seq_len', c, restricted=True, val_type=int, min_val=10) - - # paths - _check_argument('output_path', c, restricted=True, val_type=str) - - # multi-speaker gst - _check_argument('use_speaker_embedding', c, restricted=True, val_type=bool) - _check_argument('style_wav_for_test', c, restricted=True, val_type=str) - _check_argument('use_gst', c, restricted=True, val_type=bool) - - # datasets - checking only the first entry - _check_argument('datasets', c, restricted=True, val_type=list) - for dataset_entry in c['datasets']: - _check_argument('name', dataset_entry, restricted=True, val_type=str) - _check_argument('path', dataset_entry, restricted=True, val_type=str) - _check_argument('meta_file_train', dataset_entry, restricted=True, val_type=str) - _check_argument('meta_file_val', dataset_entry, restricted=True, val_type=str) diff --git a/utils/io.py b/utils/io.py deleted file mode 100644 index faf00195..00000000 --- a/utils/io.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -import json -import re -import torch -import datetime - - -class AttrDict(dict): - def __init__(self, *args, **kwargs): - super(AttrDict, self).__init__(*args, **kwargs) - self.__dict__ = self - - -def load_config(config_path): - config = AttrDict() - with open(config_path, "r") as f: - input_str = f.read() - input_str = re.sub(r'\\\n', '', input_str) - input_str = re.sub(r'//.*\n', '\n', input_str) - data = json.loads(input_str) - config.update(data) - return config - - -def copy_config_file(config_file, out_path, new_fields): - config_lines = open(config_file, "r").readlines() - # add extra information fields - for key, value in new_fields.items(): - if isinstance(value, str): - new_line = '"{}":"{}",\n'.format(key, value) - else: - new_line = '"{}":{},\n'.format(key, value) - config_lines.insert(1, new_line) - config_out_file = open(out_path, "w") - config_out_file.writelines(config_lines) - config_out_file.close() - - -def load_checkpoint(model, checkpoint_path, use_cuda=False): - state = torch.load(checkpoint_path, map_location=torch.device('cpu')) - model.load_state_dict(state['model']) - if use_cuda: - model.cuda() - # set model stepsize - if 'r' in state.keys(): - model.decoder.set_r(state['r']) - return model, state - - -def save_model(model, optimizer, current_step, epoch, r, output_path, **kwargs): - new_state_dict = model.state_dict() - state = { - 'model': new_state_dict, - 'optimizer': optimizer.state_dict() if optimizer is not None else None, - 'step': current_step, - 'epoch': epoch, - 'date': datetime.date.today().strftime("%B %d, %Y"), - 'r': r - } - state.update(kwargs) - torch.save(state, output_path) - - -def save_checkpoint(model, optimizer, current_step, epoch, r, output_folder, **kwargs): - file_name = 'checkpoint_{}.pth.tar'.format(current_step) - checkpoint_path = os.path.join(output_folder, file_name) - print(" > CHECKPOINT : {}".format(checkpoint_path)) - save_model(model, optimizer, current_step, epoch, r, checkpoint_path, **kwargs) - - -def save_best_model(target_loss, best_loss, model, optimizer, current_step, epoch, r, output_folder, **kwargs): - if target_loss < best_loss: - file_name = 'best_model.pth.tar' - checkpoint_path = os.path.join(output_folder, file_name) - print(" > BEST MODEL : {}".format(checkpoint_path)) - save_model(model, optimizer, current_step, epoch, r, checkpoint_path, model_loss=target_loss, **kwargs) - best_loss = target_loss - return best_loss diff --git a/utils/measures.py b/utils/measures.py deleted file mode 100644 index 01d25695..00000000 --- a/utils/measures.py +++ /dev/null @@ -1,18 +0,0 @@ -import torch - - -def alignment_diagonal_score(alignments, binary=False): - """ - Compute how diagonal alignment predictions are. It is useful - to measure the alignment consistency of a model - Args: - alignments (torch.Tensor): batch of alignments. - binary (bool): if True, ignore scores and consider attention - as a binary mask. - Shape: - alignments : batch x decoder_steps x encoder_steps - """ - maxs = alignments.max(dim=1)[0] - if binary: - maxs[maxs > 0] = 1 - return maxs.mean(dim=1).mean(dim=0).item() diff --git a/utils/radam.py b/utils/radam.py deleted file mode 100644 index 4724b705..00000000 --- a/utils/radam.py +++ /dev/null @@ -1,97 +0,0 @@ -# from https://github.com/LiyuanLucasLiu/RAdam - -import math -import torch -from torch.optim.optimizer import Optimizer, required - - -class RAdam(Optimizer): - - def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0, degenerated_to_sgd=True): - if lr < 0.0: - raise ValueError("Invalid learning rate: {}".format(lr)) - if eps < 0.0: - raise ValueError("Invalid epsilon value: {}".format(eps)) - if not 0.0 <= betas[0] < 1.0: - raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) - if not 0.0 <= betas[1] < 1.0: - raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) - - self.degenerated_to_sgd = degenerated_to_sgd - if isinstance(params, (list, tuple)) and len(params) > 0 and isinstance(params[0], dict): - for param in params: - if 'betas' in param and (param['betas'][0] != betas[0] or param['betas'][1] != betas[1]): - param['buffer'] = [[None, None, None] for _ in range(10)] - defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, buffer=[[None, None, None] for _ in range(10)]) - super(RAdam, self).__init__(params, defaults) - - def __setstate__(self, state): - super(RAdam, self).__setstate__(state) - - def step(self, closure=None): - - loss = None - if closure is not None: - loss = closure() - - for group in self.param_groups: - - for p in group['params']: - if p.grad is None: - continue - grad = p.grad.data.float() - if grad.is_sparse: - raise RuntimeError('RAdam does not support sparse gradients') - - p_data_fp32 = p.data.float() - - state = self.state[p] - - if len(state) == 0: - state['step'] = 0 - state['exp_avg'] = torch.zeros_like(p_data_fp32) - state['exp_avg_sq'] = torch.zeros_like(p_data_fp32) - else: - state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32) - state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32) - - exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] - beta1, beta2 = group['betas'] - - exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2) - exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1) - - state['step'] += 1 - buffered = group['buffer'][int(state['step'] % 10)] - if state['step'] == buffered[0]: - N_sma, step_size = buffered[1], buffered[2] - else: - buffered[0] = state['step'] - beta2_t = beta2 ** state['step'] - N_sma_max = 2 / (1 - beta2) - 1 - N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t) - buffered[1] = N_sma - - # more conservative since it's an approximated value - if N_sma >= 5: - step_size = math.sqrt((1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / (N_sma_max - 2)) / (1 - beta1 ** state['step']) - elif self.degenerated_to_sgd: - step_size = 1.0 / (1 - beta1 ** state['step']) - else: - step_size = -1 - buffered[2] = step_size - - # more conservative since it's an approximated value - if N_sma >= 5: - if group['weight_decay'] != 0: - p_data_fp32.add_(p_data_fp32, alpha=-group['weight_decay'] * group['lr']) - denom = exp_avg_sq.sqrt().add_(group['eps']) - p_data_fp32.addcdiv_(exp_avg, denom, value=-step_size * group['lr']) - p.data.copy_(p_data_fp32) - elif step_size > 0: - if group['weight_decay'] != 0: - p_data_fp32.add_(p_data_fp32, alpha=-group['weight_decay'] * group['lr']) - p_data_fp32.add_(exp_avg, alpha=-step_size * group['lr']) - p.data.copy_(p_data_fp32) - - return loss diff --git a/utils/speakers.py b/utils/speakers.py deleted file mode 100644 index 8aa612a8..00000000 --- a/utils/speakers.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -import json - -from TTS.datasets.preprocess import get_preprocessor_by_name - - -def make_speakers_json_path(out_path): - """Returns conventional speakers.json location.""" - return os.path.join(out_path, "speakers.json") - - -def load_speaker_mapping(out_path): - """Loads speaker mapping if already present.""" - try: - with open(make_speakers_json_path(out_path)) as f: - return json.load(f) - except FileNotFoundError: - return {} - - -def save_speaker_mapping(out_path, speaker_mapping): - """Saves speaker mapping if not yet present.""" - speakers_json_path = make_speakers_json_path(out_path) - with open(speakers_json_path, "w") as f: - json.dump(speaker_mapping, f, indent=4) - - -def get_speakers(items): - """Returns a sorted, unique list of speakers in a given dataset.""" - speakers = {e[2] for e in items} - return sorted(speakers) diff --git a/utils/synthesis.py b/utils/synthesis.py deleted file mode 100644 index ce76b0ec..00000000 --- a/utils/synthesis.py +++ /dev/null @@ -1,231 +0,0 @@ -import pkg_resources -installed = {pkg.key for pkg in pkg_resources.working_set} #pylint: disable=not-an-iterable -if 'tensorflow' in installed or 'tensorflow-gpu' in installed: - import tensorflow as tf -import torch -import numpy as np -from .text import text_to_sequence, phoneme_to_sequence - - -def text_to_seqvec(text, CONFIG): - text_cleaner = [CONFIG.text_cleaner] - # text ot phonemes to sequence vector - if CONFIG.use_phonemes: - seq = np.asarray( - phoneme_to_sequence(text, text_cleaner, CONFIG.phoneme_language, - CONFIG.enable_eos_bos_chars, - tp=CONFIG.characters if 'characters' in CONFIG.keys() else None), - dtype=np.int32) - else: - seq = np.asarray(text_to_sequence(text, text_cleaner, tp=CONFIG.characters if 'characters' in CONFIG.keys() else None), dtype=np.int32) - return seq - - -def numpy_to_torch(np_array, dtype, cuda=False): - if np_array is None: - return None - tensor = torch.as_tensor(np_array, dtype=dtype) - if cuda: - return tensor.cuda() - return tensor - - -def numpy_to_tf(np_array, dtype): - if np_array is None: - return None - tensor = tf.convert_to_tensor(np_array, dtype=dtype) - return tensor - - -def compute_style_mel(style_wav, ap): - style_mel = ap.melspectrogram( - ap.load_wav(style_wav)).expand_dims(0) - return style_mel - - -def run_model_torch(model, inputs, CONFIG, truncated, speaker_id=None, style_mel=None): - if CONFIG.use_gst: - decoder_output, postnet_output, alignments, stop_tokens = model.inference( - inputs, style_mel=style_mel, speaker_ids=speaker_id) - else: - if truncated: - decoder_output, postnet_output, alignments, stop_tokens = model.inference_truncated( - inputs, speaker_ids=speaker_id) - else: - decoder_output, postnet_output, alignments, stop_tokens = model.inference( - inputs, speaker_ids=speaker_id) - return decoder_output, postnet_output, alignments, stop_tokens - - -def run_model_tf(model, inputs, CONFIG, truncated, speaker_id=None, style_mel=None): - if CONFIG.use_gst and style_mel is not None: - raise NotImplementedError(' [!] GST inference not implemented for TF') - if truncated: - raise NotImplementedError(' [!] Truncated inference not implemented for TF') - if speaker_id is not None: - raise NotImplementedError(' [!] Multi-Speaker not implemented for TF') - # TODO: handle multispeaker case - decoder_output, postnet_output, alignments, stop_tokens = model( - inputs, training=False) - return decoder_output, postnet_output, alignments, stop_tokens - - -def run_model_tflite(model, inputs, CONFIG, truncated, speaker_id=None, style_mel=None): - if CONFIG.use_gst and style_mel is not None: - raise NotImplementedError(' [!] GST inference not implemented for TfLite') - if truncated: - raise NotImplementedError(' [!] Truncated inference not implemented for TfLite') - if speaker_id is not None: - raise NotImplementedError(' [!] Multi-Speaker not implemented for TfLite') - # get input and output details - input_details = model.get_input_details() - output_details = model.get_output_details() - # reshape input tensor for the new input shape - model.resize_tensor_input(input_details[0]['index'], inputs.shape) - model.allocate_tensors() - detail = input_details[0] - # input_shape = detail['shape'] - model.set_tensor(detail['index'], inputs) - # run the model - model.invoke() - # collect outputs - decoder_output = model.get_tensor(output_details[0]['index']) - postnet_output = model.get_tensor(output_details[1]['index']) - # tflite model only returns feature frames - return decoder_output, postnet_output, None, None - - -def parse_outputs_torch(postnet_output, decoder_output, alignments, stop_tokens): - postnet_output = postnet_output[0].data.cpu().numpy() - decoder_output = decoder_output[0].data.cpu().numpy() - alignment = alignments[0].cpu().data.numpy() - stop_tokens = stop_tokens[0].cpu().numpy() - return postnet_output, decoder_output, alignment, stop_tokens - - -def parse_outputs_tf(postnet_output, decoder_output, alignments, stop_tokens): - postnet_output = postnet_output[0].numpy() - decoder_output = decoder_output[0].numpy() - alignment = alignments[0].numpy() - stop_tokens = stop_tokens[0].numpy() - return postnet_output, decoder_output, alignment, stop_tokens - - -def parse_outputs_tflite(postnet_output, decoder_output): - postnet_output = postnet_output[0] - decoder_output = decoder_output[0] - return postnet_output, decoder_output - - -def trim_silence(wav, ap): - return wav[:ap.find_endpoint(wav)] - - -def inv_spectrogram(postnet_output, ap, CONFIG): - if CONFIG.model.lower() in ["tacotron"]: - wav = ap.inv_spectrogram(postnet_output.T) - else: - wav = ap.inv_melspectrogram(postnet_output.T) - return wav - - -def id_to_torch(speaker_id): - if speaker_id is not None: - speaker_id = np.asarray(speaker_id) - speaker_id = torch.from_numpy(speaker_id).unsqueeze(0) - return speaker_id - - -# TODO: perform GL with pytorch for batching -def apply_griffin_lim(inputs, input_lens, CONFIG, ap): - '''Apply griffin-lim to each sample iterating throught the first dimension. - Args: - inputs (Tensor or np.Array): Features to be converted by GL. First dimension is the batch size. - input_lens (Tensor or np.Array): 1D array of sample lengths. - CONFIG (Dict): TTS config. - ap (AudioProcessor): TTS audio processor. - ''' - wavs = [] - for idx, spec in enumerate(inputs): - wav_len = (input_lens[idx] * ap.hop_length) - ap.hop_length # inverse librosa padding - wav = inv_spectrogram(spec, ap, CONFIG) - # assert len(wav) == wav_len, f" [!] wav lenght: {len(wav)} vs expected: {wav_len}" - wavs.append(wav[:wav_len]) - return wavs - - -def synthesis(model, - text, - CONFIG, - use_cuda, - ap, - speaker_id=None, - style_wav=None, - truncated=False, - enable_eos_bos_chars=False, #pylint: disable=unused-argument - use_griffin_lim=False, - do_trim_silence=False, - backend='torch'): - """Synthesize voice for the given text. - - Args: - model (TTS.models): model to synthesize. - text (str): target text - CONFIG (dict): config dictionary to be loaded from config.json. - use_cuda (bool): enable cuda. - ap (TTS.utils.audio.AudioProcessor): audio processor to process - model outputs. - speaker_id (int): id of speaker - style_wav (str): Uses for style embedding of GST. - truncated (bool): keep model states after inference. It can be used - for continuous inference at long texts. - enable_eos_bos_chars (bool): enable special chars for end of sentence and start of sentence. - do_trim_silence (bool): trim silence after synthesis. - backend (str): tf or torch - """ - # GST processing - style_mel = None - if CONFIG.model == "TacotronGST" and style_wav is not None: - style_mel = compute_style_mel(style_wav, ap) - # preprocess the given text - inputs = text_to_seqvec(text, CONFIG) - # pass tensors to backend - if backend == 'torch': - speaker_id = id_to_torch(speaker_id) - style_mel = numpy_to_torch(style_mel, torch.float, cuda=use_cuda) - inputs = numpy_to_torch(inputs, torch.long, cuda=use_cuda) - inputs = inputs.unsqueeze(0) - elif backend == 'tf': - # TODO: handle speaker id for tf model - style_mel = numpy_to_tf(style_mel, tf.float32) - inputs = numpy_to_tf(inputs, tf.int32) - inputs = tf.expand_dims(inputs, 0) - elif backend == 'tflite': - style_mel = numpy_to_tf(style_mel, tf.float32) - inputs = numpy_to_tf(inputs, tf.int32) - inputs = tf.expand_dims(inputs, 0) - # synthesize voice - if backend == 'torch': - decoder_output, postnet_output, alignments, stop_tokens = run_model_torch( - model, inputs, CONFIG, truncated, speaker_id, style_mel) - postnet_output, decoder_output, alignment, stop_tokens = parse_outputs_torch( - postnet_output, decoder_output, alignments, stop_tokens) - elif backend == 'tf': - decoder_output, postnet_output, alignments, stop_tokens = run_model_tf( - model, inputs, CONFIG, truncated, speaker_id, style_mel) - postnet_output, decoder_output, alignment, stop_tokens = parse_outputs_tf( - postnet_output, decoder_output, alignments, stop_tokens) - elif backend == 'tflite': - decoder_output, postnet_output, alignment, stop_tokens = run_model_tflite( - model, inputs, CONFIG, truncated, speaker_id, style_mel) - postnet_output, decoder_output = parse_outputs_tflite( - postnet_output, decoder_output) - # convert outputs to numpy - # plot results - wav = None - if use_griffin_lim: - wav = inv_spectrogram(postnet_output, ap, CONFIG) - # trim silence - if do_trim_silence: - wav = trim_silence(wav, ap) - return wav, alignment, decoder_output, postnet_output, stop_tokens, inputs diff --git a/utils/tensorboard_logger.py b/utils/tensorboard_logger.py deleted file mode 100644 index cbf68ad6..00000000 --- a/utils/tensorboard_logger.py +++ /dev/null @@ -1,81 +0,0 @@ -import traceback -from tensorboardX import SummaryWriter - - -class TensorboardLogger(object): - def __init__(self, log_dir, model_name): - self.model_name = model_name - self.writer = SummaryWriter(log_dir) - self.train_stats = {} - self.eval_stats = {} - - def tb_model_weights(self, model, step): - layer_num = 1 - for name, param in model.named_parameters(): - if param.numel() == 1: - self.writer.add_scalar( - "layer{}-{}/value".format(layer_num, name), - param.max(), step) - else: - self.writer.add_scalar( - "layer{}-{}/max".format(layer_num, name), - param.max(), step) - self.writer.add_scalar( - "layer{}-{}/min".format(layer_num, name), - param.min(), step) - self.writer.add_scalar( - "layer{}-{}/mean".format(layer_num, name), - param.mean(), step) - self.writer.add_scalar( - "layer{}-{}/std".format(layer_num, name), - param.std(), step) - self.writer.add_histogram( - "layer{}-{}/param".format(layer_num, name), param, step) - self.writer.add_histogram( - "layer{}-{}/grad".format(layer_num, name), param.grad, step) - layer_num += 1 - - def dict_to_tb_scalar(self, scope_name, stats, step): - for key, value in stats.items(): - self.writer.add_scalar('{}/{}'.format(scope_name, key), value, step) - - def dict_to_tb_figure(self, scope_name, figures, step): - for key, value in figures.items(): - self.writer.add_figure('{}/{}'.format(scope_name, key), value, step) - - def dict_to_tb_audios(self, scope_name, audios, step, sample_rate): - for key, value in audios.items(): - try: - self.writer.add_audio('{}/{}'.format(scope_name, key), value, step, sample_rate=sample_rate) - except: - traceback.print_exc() - - def tb_train_iter_stats(self, step, stats): - self.dict_to_tb_scalar(f"{self.model_name}_TrainIterStats", stats, step) - - def tb_train_epoch_stats(self, step, stats): - self.dict_to_tb_scalar(f"{self.model_name}_TrainEpochStats", stats, step) - - def tb_train_figures(self, step, figures): - self.dict_to_tb_figure(f"{self.model_name}_TrainFigures", figures, step) - - def tb_train_audios(self, step, audios, sample_rate): - self.dict_to_tb_audios(f"{self.model_name}_TrainAudios", audios, step, sample_rate) - - def tb_eval_stats(self, step, stats): - self.dict_to_tb_scalar(f"{self.model_name}_EvalStats", stats, step) - - def tb_eval_figures(self, step, figures): - self.dict_to_tb_figure(f"{self.model_name}_EvalFigures", figures, step) - - def tb_eval_audios(self, step, audios, sample_rate): - self.dict_to_tb_audios(f"{self.model_name}_EvalAudios", audios, step, sample_rate) - - def tb_test_audios(self, step, audios, sample_rate): - self.dict_to_tb_audios(f"{self.model_name}_TestAudios", audios, step, sample_rate) - - def tb_test_figures(self, step, figures): - self.dict_to_tb_figure(f"{self.model_name}_TestFigures", figures, step) - - def tb_add_text(self, title, text, step): - self.writer.add_text(title, text, step) diff --git a/utils/text/__init__.py b/utils/text/__init__.py deleted file mode 100644 index 41aa6778..00000000 --- a/utils/text/__init__.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- - -import re -from packaging import version -import phonemizer -from phonemizer.phonemize import phonemize -from TTS.utils.text import cleaners -from TTS.utils.text.symbols import make_symbols, symbols, phonemes, _phoneme_punctuations, _bos, \ - _eos - -# Mappings from symbol to numeric ID and vice versa: -_symbol_to_id = {s: i for i, s in enumerate(symbols)} -_id_to_symbol = {i: s for i, s in enumerate(symbols)} - -_phonemes_to_id = {s: i for i, s in enumerate(phonemes)} -_id_to_phonemes = {i: s for i, s in enumerate(phonemes)} - -# Regular expression matching text enclosed in curly braces: -_CURLY_RE = re.compile(r'(.*?)\{(.+?)\}(.*)') - -# Regular expression matching punctuations, ignoring empty space -PHONEME_PUNCTUATION_PATTERN = r'['+_phoneme_punctuations+']+' - - -def text2phone(text, language): - ''' - Convert graphemes to phonemes. - ''' - seperator = phonemizer.separator.Separator(' |', '', '|') - #try: - punctuations = re.findall(PHONEME_PUNCTUATION_PATTERN, text) - if version.parse(phonemizer.__version__) < version.parse('2.1'): - ph = phonemize(text, separator=seperator, strip=False, njobs=1, backend='espeak', language=language) - ph = ph[:-1].strip() # skip the last empty character - # phonemizer does not tackle punctuations. Here we do. - # Replace \n with matching punctuations. - if punctuations: - # if text ends with a punctuation. - if text[-1] == punctuations[-1]: - for punct in punctuations[:-1]: - ph = ph.replace('| |\n', '|'+punct+'| |', 1) - ph = ph + punctuations[-1] - else: - for punct in punctuations: - ph = ph.replace('| |\n', '|'+punct+'| |', 1) - elif version.parse(phonemizer.__version__) >= version.parse('2.1'): - ph = phonemize(text, separator=seperator, strip=False, njobs=1, backend='espeak', language=language, preserve_punctuation=True) - # this is a simple fix for phonemizer. - # https://github.com/bootphon/phonemizer/issues/32 - if punctuations: - for punctuation in punctuations: - ph = ph.replace(f"| |{punctuation} ", f"|{punctuation}| |").replace(f"| |{punctuation}", f"|{punctuation}| |") - ph = ph[:-3] - else: - raise RuntimeError(" [!] Use 'phonemizer' version 2.1 or older.") - - return ph - - -def pad_with_eos_bos(phoneme_sequence, tp=None): - # pylint: disable=global-statement - global _phonemes_to_id, _bos, _eos - if tp: - _bos = tp['bos'] - _eos = tp['eos'] - _, _phonemes = make_symbols(**tp) - _phonemes_to_id = {s: i for i, s in enumerate(_phonemes)} - - return [_phonemes_to_id[_bos]] + list(phoneme_sequence) + [_phonemes_to_id[_eos]] - - -def phoneme_to_sequence(text, cleaner_names, language, enable_eos_bos=False, tp=None): - # pylint: disable=global-statement - global _phonemes_to_id - if tp: - _, _phonemes = make_symbols(**tp) - _phonemes_to_id = {s: i for i, s in enumerate(_phonemes)} - - sequence = [] - clean_text = _clean_text(text, cleaner_names) - to_phonemes = text2phone(clean_text, language) - if to_phonemes is None: - print("!! After phoneme conversion the result is None. -- {} ".format(clean_text)) - # iterate by skipping empty strings - NOTE: might be useful to keep it to have a better intonation. - for phoneme in filter(None, to_phonemes.split('|')): - sequence += _phoneme_to_sequence(phoneme) - # Append EOS char - if enable_eos_bos: - sequence = pad_with_eos_bos(sequence, tp=tp) - return sequence - - -def sequence_to_phoneme(sequence, tp=None): - # pylint: disable=global-statement - '''Converts a sequence of IDs back to a string''' - global _id_to_phonemes - result = '' - if tp: - _, _phonemes = make_symbols(**tp) - _id_to_phonemes = {i: s for i, s in enumerate(_phonemes)} - - for symbol_id in sequence: - if symbol_id in _id_to_phonemes: - s = _id_to_phonemes[symbol_id] - result += s - return result.replace('}{', ' ') - - -def text_to_sequence(text, cleaner_names, tp=None): - '''Converts a string of text to a sequence of IDs corresponding to the symbols in the text. - - The text can optionally have ARPAbet sequences enclosed in curly braces embedded - in it. For example, "Turn left on {HH AW1 S S T AH0 N} Street." - - Args: - text: string to convert to a sequence - cleaner_names: names of the cleaner functions to run the text through - - Returns: - List of integers corresponding to the symbols in the text - ''' - # pylint: disable=global-statement - global _symbol_to_id - if tp: - _symbols, _ = make_symbols(**tp) - _symbol_to_id = {s: i for i, s in enumerate(_symbols)} - - sequence = [] - # Check for curly braces and treat their contents as ARPAbet: - while text: - m = _CURLY_RE.match(text) - if not m: - sequence += _symbols_to_sequence(_clean_text(text, cleaner_names)) - break - sequence += _symbols_to_sequence( - _clean_text(m.group(1), cleaner_names)) - sequence += _arpabet_to_sequence(m.group(2)) - text = m.group(3) - return sequence - - -def sequence_to_text(sequence, tp=None): - '''Converts a sequence of IDs back to a string''' - # pylint: disable=global-statement - global _id_to_symbol - if tp: - _symbols, _ = make_symbols(**tp) - _id_to_symbol = {i: s for i, s in enumerate(_symbols)} - - result = '' - for symbol_id in sequence: - if symbol_id in _id_to_symbol: - s = _id_to_symbol[symbol_id] - # Enclose ARPAbet back in curly braces: - if len(s) > 1 and s[0] == '@': - s = '{%s}' % s[1:] - result += s - return result.replace('}{', ' ') - - -def _clean_text(text, cleaner_names): - for name in cleaner_names: - cleaner = getattr(cleaners, name) - if not cleaner: - raise Exception('Unknown cleaner: %s' % name) - text = cleaner(text) - return text - - -def _symbols_to_sequence(syms): - return [_symbol_to_id[s] for s in syms if _should_keep_symbol(s)] - - -def _phoneme_to_sequence(phons): - return [_phonemes_to_id[s] for s in list(phons) if _should_keep_phoneme(s)] - - -def _arpabet_to_sequence(text): - return _symbols_to_sequence(['@' + s for s in text.split()]) - - -def _should_keep_symbol(s): - return s in _symbol_to_id and s not in ['~', '^', '_'] - - -def _should_keep_phoneme(p): - return p in _phonemes_to_id and p not in ['~', '^', '_'] diff --git a/utils/text/cleaners.py b/utils/text/cleaners.py deleted file mode 100644 index f0a66f57..00000000 --- a/utils/text/cleaners.py +++ /dev/null @@ -1,123 +0,0 @@ -''' -Cleaners are transformations that run over the input text at both training and eval time. - -Cleaners can be selected by passing a comma-delimited list of cleaner names as the "cleaners" -hyperparameter. Some cleaners are English-specific. You'll typically want to use: - 1. "english_cleaners" for English text - 2. "transliteration_cleaners" for non-English text that can be transliterated to ASCII using - the Unidecode library (https://pypi.python.org/pypi/Unidecode) - 3. "basic_cleaners" if you do not want to transliterate (in this case, you should also update - the symbols in symbols.py to match your data). -''' - -import re -from unidecode import unidecode -from .number_norm import normalize_numbers - -# Regular expression matching whitespace: -_whitespace_re = re.compile(r'\s+') - -# List of (regular expression, replacement) pairs for abbreviations: -_abbreviations = [(re.compile('\\b%s\\.' % x[0], re.IGNORECASE), x[1]) - for x in [ - ('mrs', 'misess'), - ('mr', 'mister'), - ('dr', 'doctor'), - ('st', 'saint'), - ('co', 'company'), - ('jr', 'junior'), - ('maj', 'major'), - ('gen', 'general'), - ('drs', 'doctors'), - ('rev', 'reverend'), - ('lt', 'lieutenant'), - ('hon', 'honorable'), - ('sgt', 'sergeant'), - ('capt', 'captain'), - ('esq', 'esquire'), - ('ltd', 'limited'), - ('col', 'colonel'), - ('ft', 'fort'), - ]] - - -def expand_abbreviations(text): - for regex, replacement in _abbreviations: - text = re.sub(regex, replacement, text) - return text - - -def expand_numbers(text): - return normalize_numbers(text) - - -def lowercase(text): - return text.lower() - - -def collapse_whitespace(text): - return re.sub(_whitespace_re, ' ', text).strip() - - -def convert_to_ascii(text): - return unidecode(text) - - -def remove_aux_symbols(text): - text = re.sub(r'[\<\>\(\)\[\]\"]+', '', text) - return text - - -def replace_symbols(text): - text = text.replace(';', ',') - text = text.replace('-', ' ') - text = text.replace(':', ',') - text = text.replace('&', 'and') - return text - - -def basic_cleaners(text): - '''Basic pipeline that lowercases and collapses whitespace without transliteration.''' - text = lowercase(text) - text = collapse_whitespace(text) - return text - - -def transliteration_cleaners(text): - '''Pipeline for non-English text that transliterates to ASCII.''' - text = convert_to_ascii(text) - text = lowercase(text) - text = collapse_whitespace(text) - return text - - -# TODO: elaborate it -def basic_turkish_cleaners(text): - '''Pipeline for Turkish text''' - text = text.replace("I", "ı") - text = lowercase(text) - text = collapse_whitespace(text) - return text - - -def english_cleaners(text): - '''Pipeline for English text, including number and abbreviation expansion.''' - text = convert_to_ascii(text) - text = lowercase(text) - text = expand_numbers(text) - text = expand_abbreviations(text) - text = replace_symbols(text) - text = remove_aux_symbols(text) - text = collapse_whitespace(text) - return text - - -def phoneme_cleaners(text): - '''Pipeline for phonemes mode, including number and abbreviation expansion.''' - text = convert_to_ascii(text) - text = expand_numbers(text) - text = expand_abbreviations(text) - text = replace_symbols(text) - text = remove_aux_symbols(text) - text = collapse_whitespace(text) - return text diff --git a/utils/text/cmudict.py b/utils/text/cmudict.py deleted file mode 100644 index c0f23406..00000000 --- a/utils/text/cmudict.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- - -import re - -VALID_SYMBOLS = [ - 'AA', 'AA0', 'AA1', 'AA2', 'AE', 'AE0', 'AE1', 'AE2', 'AH', 'AH0', 'AH1', - 'AH2', 'AO', 'AO0', 'AO1', 'AO2', 'AW', 'AW0', 'AW1', 'AW2', 'AY', 'AY0', - 'AY1', 'AY2', 'B', 'CH', 'D', 'DH', 'EH', 'EH0', 'EH1', 'EH2', 'ER', 'ER0', - 'ER1', 'ER2', 'EY', 'EY0', 'EY1', 'EY2', 'F', 'G', 'HH', 'IH', 'IH0', - 'IH1', 'IH2', 'IY', 'IY0', 'IY1', 'IY2', 'JH', 'K', 'L', 'M', 'N', 'NG', - 'OW', 'OW0', 'OW1', 'OW2', 'OY', 'OY0', 'OY1', 'OY2', 'P', 'R', 'S', 'SH', - 'T', 'TH', 'UH', 'UH0', 'UH1', 'UH2', 'UW', 'UW0', 'UW1', 'UW2', 'V', 'W', - 'Y', 'Z', 'ZH' -] - - -class CMUDict: - '''Thin wrapper around CMUDict data. http://www.speech.cs.cmu.edu/cgi-bin/cmudict''' - - def __init__(self, file_or_path, keep_ambiguous=True): - if isinstance(file_or_path, str): - with open(file_or_path, encoding='latin-1') as f: - entries = _parse_cmudict(f) - else: - entries = _parse_cmudict(file_or_path) - if not keep_ambiguous: - entries = { - word: pron - for word, pron in entries.items() if len(pron) == 1 - } - self._entries = entries - - def __len__(self): - return len(self._entries) - - def lookup(self, word): - '''Returns list of ARPAbet pronunciations of the given word.''' - return self._entries.get(word.upper()) - - @staticmethod - def get_arpabet(word, cmudict, punctuation_symbols): - first_symbol, last_symbol = '', '' - if word and word[0] in punctuation_symbols: - first_symbol = word[0] - word = word[1:] - if word and word[-1] in punctuation_symbols: - last_symbol = word[-1] - word = word[:-1] - arpabet = cmudict.lookup(word) - if arpabet is not None: - return first_symbol + '{%s}' % arpabet[0] + last_symbol - return first_symbol + word + last_symbol - - -_alt_re = re.compile(r'\([0-9]+\)') - - -def _parse_cmudict(file): - cmudict = {} - for line in file: - if line and (line[0] >= 'A' and line[0] <= 'Z' or line[0] == "'"): - parts = line.split(' ') - word = re.sub(_alt_re, '', parts[0]) - pronunciation = _get_pronunciation(parts[1]) - if pronunciation: - if word in cmudict: - cmudict[word].append(pronunciation) - else: - cmudict[word] = [pronunciation] - return cmudict - - -def _get_pronunciation(s): - parts = s.strip().split(' ') - for part in parts: - if part not in VALID_SYMBOLS: - return None - return ' '.join(parts) diff --git a/utils/text/number_norm.py b/utils/text/number_norm.py deleted file mode 100644 index 7b539bff..00000000 --- a/utils/text/number_norm.py +++ /dev/null @@ -1,71 +0,0 @@ -""" from https://github.com/keithito/tacotron """ - -import inflect -import re - -_inflect = inflect.engine() -_comma_number_re = re.compile(r'([0-9][0-9\,]+[0-9])') -_decimal_number_re = re.compile(r'([0-9]+\.[0-9]+)') -_pounds_re = re.compile(r'£([0-9\,]*[0-9]+)') -_dollars_re = re.compile(r'\$([0-9\.\,]*[0-9]+)') -_ordinal_re = re.compile(r'[0-9]+(st|nd|rd|th)') -_number_re = re.compile(r'[0-9]+') - - -def _remove_commas(m): - return m.group(1).replace(',', '') - - -def _expand_decimal_point(m): - return m.group(1).replace('.', ' point ') - - -def _expand_dollars(m): - match = m.group(1) - parts = match.split('.') - if len(parts) > 2: - return match + ' dollars' # Unexpected format - dollars = int(parts[0]) if parts[0] else 0 - cents = int(parts[1]) if len(parts) > 1 and parts[1] else 0 - if dollars and cents: - dollar_unit = 'dollar' if dollars == 1 else 'dollars' - cent_unit = 'cent' if cents == 1 else 'cents' - return '%s %s, %s %s' % (dollars, dollar_unit, cents, cent_unit) - elif dollars: - dollar_unit = 'dollar' if dollars == 1 else 'dollars' - return '%s %s' % (dollars, dollar_unit) - elif cents: - cent_unit = 'cent' if cents == 1 else 'cents' - return '%s %s' % (cents, cent_unit) - else: - return 'zero dollars' - - -def _expand_ordinal(m): - return _inflect.number_to_words(m.group(0)) - - -def _expand_number(m): - num = int(m.group(0)) - if 1000 < num < 3000: - if num == 2000: - return 'two thousand' - if 2000 < num < 2010: - return 'two thousand ' + _inflect.number_to_words(num % 100) - if num % 100 == 0: - return _inflect.number_to_words(num // 100) + ' hundred' - return _inflect.number_to_words(num, - andword='', - zero='oh', - group=2).replace(', ', ' ') - return _inflect.number_to_words(num, andword='') - - -def normalize_numbers(text): - text = re.sub(_comma_number_re, _remove_commas, text) - text = re.sub(_pounds_re, r'\1 pounds', text) - text = re.sub(_dollars_re, _expand_dollars, text) - text = re.sub(_decimal_number_re, _expand_decimal_point, text) - text = re.sub(_ordinal_re, _expand_ordinal, text) - text = re.sub(_number_re, _expand_number, text) - return text diff --git a/utils/text/symbols.py b/utils/text/symbols.py deleted file mode 100644 index 544277c5..00000000 --- a/utils/text/symbols.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -''' -Defines the set of symbols used in text input to the model. - -The default is a set of ASCII characters that works well for English or text that has been run -through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. -''' -def make_symbols(characters, phonemes, punctuations='!\'(),-.:;? ', pad='_', eos='~', bos='^'):# pylint: disable=redefined-outer-name - ''' Function to create symbols and phonemes ''' - _phonemes_sorted = sorted(list(phonemes)) - - # Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters): - _arpabet = ['@' + s for s in _phonemes_sorted] - - # Export all symbols: - _symbols = [pad, eos, bos] + list(characters) + _arpabet - _phonemes = [pad, eos, bos] + list(_phonemes_sorted) + list(punctuations) - - return _symbols, _phonemes - -_pad = '_' -_eos = '~' -_bos = '^' -_characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!\'(),-.:;? ' -_punctuations = '!\'(),-.:;? ' -_phoneme_punctuations = '.!;:,?' - -# Phonemes definition -_vowels = 'iyɨʉɯuɪʏʊeøɘəɵɤoɛœɜɞʌɔæɐaɶɑɒᵻ' -_non_pulmonic_consonants = 'ʘɓǀɗǃʄǂɠǁʛ' -_pulmonic_consonants = 'pbtdʈɖcɟkɡqɢʔɴŋɲɳnɱmʙrʀⱱɾɽɸβfvθðszʃʒʂʐçʝxɣχʁħʕhɦɬɮʋɹɻjɰlɭʎʟ' -_suprasegmentals = 'ˈˌːˑ' -_other_symbols = 'ʍwɥʜʢʡɕʑɺɧ' -_diacrilics = 'ɚ˞ɫ' -_phonemes = _vowels + _non_pulmonic_consonants + _pulmonic_consonants + _suprasegmentals + _other_symbols + _diacrilics - -symbols, phonemes = make_symbols(_characters, _phonemes, _punctuations, _pad, _eos, _bos) - -# Generate ALIEN language -# from random import shuffle -# shuffle(phonemes) - -if __name__ == '__main__': - print(" > TTS symbols {}".format(len(symbols))) - print(symbols) - print(" > TTS phonemes {}".format(len(phonemes))) - print(phonemes) diff --git a/utils/training.py b/utils/training.py deleted file mode 100644 index 9046f9e0..00000000 --- a/utils/training.py +++ /dev/null @@ -1,108 +0,0 @@ -import torch -import numpy as np - - -def setup_torch_training_env(cudnn_enable, cudnn_benchmark): - torch.backends.cudnn.enabled = cudnn_enable - torch.backends.cudnn.benchmark = cudnn_benchmark - torch.manual_seed(54321) - use_cuda = torch.cuda.is_available() - num_gpus = torch.cuda.device_count() - print(" > Using CUDA: ", use_cuda) - print(" > Number of GPUs: ", num_gpus) - return use_cuda, num_gpus - - -def check_update(model, grad_clip, ignore_stopnet=False): - r'''Check model gradient against unexpected jumps and failures''' - skip_flag = False - if ignore_stopnet: - grad_norm = torch.nn.utils.clip_grad_norm_([param for name, param in model.named_parameters() if 'stopnet' not in name], grad_clip) - else: - grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip) - # compatibility with different torch versions - if isinstance(grad_norm, float): - if np.isinf(grad_norm): - print(" | > Gradient is INF !!") - skip_flag = True - else: - if torch.isinf(grad_norm): - print(" | > Gradient is INF !!") - skip_flag = True - return grad_norm, skip_flag - - -def lr_decay(init_lr, global_step, warmup_steps): - r'''from https://github.com/r9y9/tacotron_pytorch/blob/master/train.py''' - warmup_steps = float(warmup_steps) - step = global_step + 1. - lr = init_lr * warmup_steps**0.5 * np.minimum(step * warmup_steps**-1.5, - step**-0.5) - return lr - - -def adam_weight_decay(optimizer): - """ - Custom weight decay operation, not effecting grad values. - """ - for group in optimizer.param_groups: - for param in group['params']: - current_lr = group['lr'] - weight_decay = group['weight_decay'] - factor = -weight_decay * group['lr'] - param.data = param.data.add(param.data, - alpha=factor) - return optimizer, current_lr - -# pylint: disable=dangerous-default-value -def set_weight_decay(model, weight_decay, skip_list={"decoder.attention.v", "rnn", "lstm", "gru", "embedding"}): - """ - Skip biases, BatchNorm parameters, rnns. - and attention projection layer v - """ - decay = [] - no_decay = [] - for name, param in model.named_parameters(): - if not param.requires_grad: - continue - - if len(param.shape) == 1 or any([skip_name in name for skip_name in skip_list]): - no_decay.append(param) - else: - decay.append(param) - return [{ - 'params': no_decay, - 'weight_decay': 0. - }, { - 'params': decay, - 'weight_decay': weight_decay - }] - - -# pylint: disable=protected-access -class NoamLR(torch.optim.lr_scheduler._LRScheduler): - def __init__(self, optimizer, warmup_steps=0.1, last_epoch=-1): - self.warmup_steps = float(warmup_steps) - super(NoamLR, self).__init__(optimizer, last_epoch) - - def get_lr(self): - step = max(self.last_epoch, 1) - return [ - base_lr * self.warmup_steps**0.5 * - min(step * self.warmup_steps**-1.5, step**-0.5) - for base_lr in self.base_lrs - ] - - -def gradual_training_scheduler(global_step, config): - """Setup the gradual training schedule wrt number - of active GPUs""" - num_gpus = torch.cuda.device_count() - if num_gpus == 0: - num_gpus = 1 - new_values = None - # we set the scheduling wrt num_gpus - for values in config.gradual_training: - if global_step * num_gpus >= values[0]: - new_values = values - return new_values[1], new_values[2] diff --git a/utils/visual.py b/utils/visual.py deleted file mode 100644 index b4ebec9a..00000000 --- a/utils/visual.py +++ /dev/null @@ -1,93 +0,0 @@ -import torch -import librosa -import matplotlib -matplotlib.use('Agg') -import matplotlib.pyplot as plt -from TTS.utils.text import phoneme_to_sequence, sequence_to_phoneme - - -def plot_alignment(alignment, info=None, fig_size=(16, 10), title=None): - if isinstance(alignment, torch.Tensor): - alignment_ = alignment.detach().cpu().numpy().squeeze() - else: - alignment_ = alignment - fig, ax = plt.subplots(figsize=fig_size) - im = ax.imshow( - alignment_.T, aspect='auto', origin='lower', interpolation='none') - fig.colorbar(im, ax=ax) - xlabel = 'Decoder timestep' - if info is not None: - xlabel += '\n\n' + info - plt.xlabel(xlabel) - plt.ylabel('Encoder timestep') - # plt.yticks(range(len(text)), list(text)) - plt.tight_layout() - if title is not None: - plt.title(title) - return fig - - -def plot_spectrogram(spectrogram, ap=None, fig_size=(16, 10)): - if isinstance(spectrogram, torch.Tensor): - spectrogram_ = spectrogram.detach().cpu().numpy().squeeze().T - else: - spectrogram_ = spectrogram.T - if ap is not None: - spectrogram_ = ap._denormalize(spectrogram_) # pylint: disable=protected-access - fig = plt.figure(figsize=fig_size) - plt.imshow(spectrogram_, aspect="auto", origin="lower") - plt.colorbar() - plt.tight_layout() - return fig - - -def visualize(alignment, postnet_output, stop_tokens, text, hop_length, CONFIG, decoder_output=None, output_path=None, figsize=(8, 24)): - if decoder_output is not None: - num_plot = 4 - else: - num_plot = 3 - - label_fontsize = 16 - fig = plt.figure(figsize=figsize) - - plt.subplot(num_plot, 1, 1) - plt.imshow(alignment.T, aspect="auto", origin="lower", interpolation=None) - plt.xlabel("Decoder timestamp", fontsize=label_fontsize) - plt.ylabel("Encoder timestamp", fontsize=label_fontsize) - # compute phoneme representation and back - if CONFIG.use_phonemes: - seq = phoneme_to_sequence(text, [CONFIG.text_cleaner], CONFIG.phoneme_language, CONFIG.enable_eos_bos_chars, tp=CONFIG.characters if 'characters' in CONFIG.keys() else None) - text = sequence_to_phoneme(seq, tp=CONFIG.characters if 'characters' in CONFIG.keys() else None) - print(text) - plt.yticks(range(len(text)), list(text)) - plt.colorbar() - # plot stopnet predictions - plt.subplot(num_plot, 1, 2) - plt.plot(range(len(stop_tokens)), list(stop_tokens)) - # plot postnet spectrogram - plt.subplot(num_plot, 1, 3) - librosa.display.specshow(postnet_output.T, sr=CONFIG.audio['sample_rate'], - hop_length=hop_length, x_axis="time", y_axis="linear", - fmin=CONFIG.audio['mel_fmin'], - fmax=CONFIG.audio['mel_fmax']) - - plt.xlabel("Time", fontsize=label_fontsize) - plt.ylabel("Hz", fontsize=label_fontsize) - plt.tight_layout() - plt.colorbar() - - if decoder_output is not None: - plt.subplot(num_plot, 1, 4) - librosa.display.specshow(decoder_output.T, sr=CONFIG.audio['sample_rate'], - hop_length=hop_length, x_axis="time", y_axis="linear", - fmin=CONFIG.audio['mel_fmin'], - fmax=CONFIG.audio['mel_fmax']) - plt.xlabel("Time", fontsize=label_fontsize) - plt.ylabel("Hz", fontsize=label_fontsize) - plt.tight_layout() - plt.colorbar() - - if output_path: - print(output_path) - fig.savefig(output_path) - plt.close() diff --git a/vocoder/README.md b/vocoder/README.md deleted file mode 100644 index e3baf1f9..00000000 --- a/vocoder/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# Mozilla TTS Vocoders (Experimental) - -We provide here different vocoder implementations which can be combined with our TTS models to enable "FASTER THAN REAL-TIME" end-to-end TTS stack. - -Currently, there are implementations of the following models. - -- Melgan -- MultiBand-Melgan -- GAN-TTS (Discriminator Only) - -It is also very easy to adapt different vocoder models as we provide here a flexible and modular (but not too modular) framework. - -## Training a model - -You can see here an example (Soon)[Colab Notebook]() training MelGAN with LJSpeech dataset. - -In order to train a new model, you need to collecto all your wav files under a common parent folder and give this path to `data_path` field in '''config.json''' - -You need to define other relevant parameters in your ```config.json``` and then start traning with the following command from Mozilla TTS root path, where '0' is the Id of the GPU you wish to use. - -```CUDA_VISIBLE_DEVICES='0' python vocoder/train.py --config_path path/to/config.json``` - -Exampled config files can be found under `vocoder/configs/` folder. - -You can continue a previous training by the following command. - -```CUDA_VISIBLE_DEVICES='0' python vocoder/train.py --continue_path path/to/your/model/folder``` - -You can fine-tune a pre-trained model by the following command. - -```CUDA_VISIBLE_DEVICES='0' python vocoder/train.py --restore_path path/to/your/model.pth.tar``` - -Restoring a model starts a new training in a different output folder. It only restores model weights with the given checkpoint file. However, continuing a training starts from the same conditions the previous training run left off. - -You can also follow your training runs on Tensorboard as you do with our TTS models. - -## Acknowledgement -Thanks to @kan-bayashi for his [repository](https://github.com/kan-bayashi/ParallelWaveGAN) being the start point of our work. diff --git a/vocoder/__init__.py b/vocoder/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/vocoder/configs/multiband-melgan_and_rwd_config.json b/vocoder/configs/multiband-melgan_and_rwd_config.json deleted file mode 100644 index 0b751854..00000000 --- a/vocoder/configs/multiband-melgan_and_rwd_config.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "run_name": "multiband-melgan-rwd", - "run_description": "multiband melgan with random window discriminator from https://arxiv.org/pdf/1909.11646.pdf", - - // AUDIO PARAMETERS - "audio":{ - // stft parameters - "num_freq": 513, // number of stft frequency levels. Size of the linear spectogram frame. - "win_length": 1024, // stft window length in ms. - "hop_length": 256, // stft window hop-lengh in ms. - "frame_length_ms": null, // stft window length in ms.If null, 'win_length' is used. - "frame_shift_ms": null, // stft window hop-lengh in ms. If null, 'hop_length' is used. - - // Audio processing parameters - "sample_rate": 22050, // DATASET-RELATED: wav sample-rate. If different than the original data, it is resampled. - "preemphasis": 0.0, // pre-emphasis to reduce spec noise and make it more structured. If 0.0, no -pre-emphasis. - "ref_level_db": 20, // reference level db, theoretically 20db is the sound of air. - - // Silence trimming - "do_trim_silence": true,// enable trimming of slience of audio as you load it. LJspeech (false), TWEB (false), Nancy (true) - "trim_db": 60, // threshold for timming silence. Set this according to your dataset. - - // Griffin-Lim - "power": 1.5, // value to sharpen wav signals after GL algorithm. - "griffin_lim_iters": 60,// #griffin-lim iterations. 30-60 is a good range. Larger the value, slower the generation. - - // MelSpectrogram parameters - "num_mels": 80, // size of the mel spec frame. - "mel_fmin": 0.0, // minimum freq level for mel-spec. ~50 for male and ~95 for female voices. Tune for dataset!! - "mel_fmax": 8000.0, // maximum freq level for mel-spec. Tune for dataset!! - - // Normalization parameters - "signal_norm": true, // normalize spec values. Mean-Var normalization if 'stats_path' is defined otherwise range normalization defined by the other params. - "min_level_db": -100, // lower bound for normalization - "symmetric_norm": true, // move normalization to range [-1, 1] - "max_norm": 4.0, // scale normalization to range [-max_norm, max_norm] or [0, max_norm] - "clip_norm": true, // clip normalized values into the range. - "stats_path": null // DO NOT USE WITH MULTI_SPEAKER MODEL. scaler stats file computed by 'compute_statistics.py'. If it is defined, mean-std based notmalization is used and other normalization params are ignored - }, - - // DISTRIBUTED TRAINING - // "distributed":{ - // "backend": "nccl", - // "url": "tcp:\/\/localhost:54321" - // }, - - // MODEL PARAMETERS - "use_pqmf": true, - - // LOSS PARAMETERS - "use_stft_loss": true, - "use_subband_stft_loss": true, - "use_mse_gan_loss": true, - "use_hinge_gan_loss": false, - "use_feat_match_loss": false, // use only with melgan discriminators - - // loss weights - "stft_loss_weight": 0.5, - "subband_stft_loss_weight": 0.5, - "mse_G_loss_weight": 2.5, - "hinge_G_loss_weight": 2.5, - "feat_match_loss_weight": 25, - - // multiscale stft loss parameters - "stft_loss_params": { - "n_ffts": [1024, 2048, 512], - "hop_lengths": [120, 240, 50], - "win_lengths": [600, 1200, 240] - }, - - // subband multiscale stft loss parameters - "subband_stft_loss_params":{ - "n_ffts": [384, 683, 171], - "hop_lengths": [30, 60, 10], - "win_lengths": [150, 300, 60] - }, - - "target_loss": "avg_G_loss", // loss value to pick the best model to save after each epoch - - // DISCRIMINATOR - "discriminator_model": "random_window_discriminator", - "discriminator_model_params":{ - "uncond_disc_donwsample_factors": [8, 4], - "cond_disc_downsample_factors": [[8, 4, 2, 2, 2], [8, 4, 2, 2], [8, 4, 2], [8, 4], [4, 2, 2]], - "cond_disc_out_channels": [[128, 128, 256, 256], [128, 256, 256], [128, 256], [256], [128, 256]], - "window_sizes": [512, 1024, 2048, 4096, 8192] - }, - "steps_to_start_discriminator": 200000, // steps required to start GAN trainining.1 - - // GENERATOR - "generator_model": "multiband_melgan_generator", - "generator_model_params": { - "upsample_factors":[8, 4, 2], - "num_res_blocks": 4 - }, - - // DATASET - "data_path": "/home/erogol/Data/LJSpeech-1.1/wavs/", - "seq_len": 16384, - "pad_short": 2000, - "conv_pad": 0, - "use_noise_augment": false, - "use_cache": true, - - "reinit_layers": [], // give a list of layer names to restore from the given checkpoint. If not defined, it reloads all heuristically matching layers. - - // TRAINING - "batch_size": 64, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'. - - // VALIDATION - "run_eval": true, - "test_delay_epochs": 10, //Until attention is aligned, testing only wastes computation time. - "test_sentences_file": null, // set a file to load sentences to be used for testing. If it is null then we use default english sentences. - - // OPTIMIZER - "noam_schedule": false, // use noam warmup and lr schedule. - "warmup_steps_gen": 4000, // Noam decay steps to increase the learning rate from 0 to "lr" - "warmup_steps_disc": 4000, - "epochs": 10000, // total number of epochs to train. - "wd": 0.0, // Weight decay weight. - "gen_clip_grad": -1, // Generator gradient clipping threshold. Apply gradient clipping if > 0 - "disc_clip_grad": -1, // Discriminator gradient clipping threshold. - "lr_scheduler_gen": "MultiStepLR", // one of the schedulers from https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate - "lr_scheduler_gen_params": { - "gamma": 0.5, - "milestones": [100000, 200000, 300000, 400000, 500000, 600000] - }, - "lr_scheduler_disc": "MultiStepLR", // one of the schedulers from https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate - "lr_scheduler_disc_params": { - "gamma": 0.5, - "milestones": [100000, 200000, 300000, 400000, 500000, 600000] - }, - "lr_gen": 1e-4, // Initial learning rate. If Noam decay is active, maximum learning rate. - "lr_disc": 1e-4, - - // TENSORBOARD and LOGGING - "print_step": 25, // Number of steps to log traning on console. - "print_eval": false, // If True, it prints loss values for each step in eval run. - "save_step": 25000, // Number of training steps expected to plot training stats on TB and save model checkpoints. - "checkpoint": true, // If true, it saves checkpoints per "save_step" - "tb_model_param_stats": false, // true, plots param stats per layer on tensorboard. Might be memory consuming, but good for debugging. - - // DATA LOADING - "num_loader_workers": 4, // number of training data loader processes. Don't set it too big. 4-8 are good values. - "num_val_loader_workers": 4, // number of evaluation data loader processes. - "eval_split_size": 10, - - // PATHS - "output_path": "/home/erogol/Models/LJSpeech/" -} - diff --git a/vocoder/datasets/__init__.py b/vocoder/datasets/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/vocoder/datasets/gan_dataset.py b/vocoder/datasets/gan_dataset.py deleted file mode 100644 index af23fbf2..00000000 --- a/vocoder/datasets/gan_dataset.py +++ /dev/null @@ -1,127 +0,0 @@ -import os -import glob -import torch -import random -import numpy as np -from torch.utils.data import Dataset -from multiprocessing import Manager - - -class GANDataset(Dataset): - """ - GAN Dataset searchs for all the wav files under root path - and converts them to acoustic features on the fly and returns - random segments of (audio, feature) couples. - """ - def __init__(self, - ap, - items, - seq_len, - hop_len, - pad_short, - conv_pad=2, - is_training=True, - return_segments=True, - use_noise_augment=False, - use_cache=False, - verbose=False): - - self.ap = ap - self.item_list = items - self.compute_feat = not isinstance(items[0], (tuple, list)) - self.seq_len = seq_len - self.hop_len = hop_len - self.pad_short = pad_short - self.conv_pad = conv_pad - self.is_training = is_training - self.return_segments = return_segments - self.use_cache = use_cache - self.use_noise_augment = use_noise_augment - self.verbose = verbose - - assert seq_len % hop_len == 0, " [!] seq_len has to be a multiple of hop_len." - self.feat_frame_len = seq_len // hop_len + (2 * conv_pad) - - # map G and D instances - self.G_to_D_mappings = list(range(len(self.item_list))) - self.shuffle_mapping() - - # cache acoustic features - if use_cache: - self.create_feature_cache() - - def create_feature_cache(self): - self.manager = Manager() - self.cache = self.manager.list() - self.cache += [None for _ in range(len(self.item_list))] - - @staticmethod - def find_wav_files(path): - return glob.glob(os.path.join(path, '**', '*.wav'), recursive=True) - - def __len__(self): - return len(self.item_list) - - def __getitem__(self, idx): - """ Return different items for Generator and Discriminator and - cache acoustic features """ - if self.return_segments: - idx2 = self.G_to_D_mappings[idx] - item1 = self.load_item(idx) - item2 = self.load_item(idx2) - return item1, item2 - item1 = self.load_item(idx) - return item1 - - def shuffle_mapping(self): - random.shuffle(self.G_to_D_mappings) - - def load_item(self, idx): - """ load (audio, feat) couple """ - if self.compute_feat: - # compute features from wav - wavpath = self.item_list[idx] - # print(wavpath) - - if self.use_cache and self.cache[idx] is not None: - audio, mel = self.cache[idx] - else: - audio = self.ap.load_wav(wavpath) - - if len(audio) < self.seq_len + self.pad_short: - audio = np.pad(audio, (0, self.seq_len + self.pad_short - len(audio)), \ - mode='constant', constant_values=0.0) - - mel = self.ap.melspectrogram(audio) - else: - - # load precomputed features - wavpath, feat_path = self.item_list[idx] - - if self.use_cache and self.cache[idx] is not None: - audio, mel = self.cache[idx] - else: - audio = self.ap.load_wav(wavpath) - mel = np.load(feat_path) - - # correct the audio length wrt padding applied in stft - audio = np.pad(audio, (0, self.hop_len), mode="edge") - audio = audio[:mel.shape[-1] * self.hop_len] - assert mel.shape[-1] * self.hop_len == audio.shape[-1], f' [!] {mel.shape[-1] * self.hop_len} vs {audio.shape[-1]}' - - audio = torch.from_numpy(audio).float().unsqueeze(0) - mel = torch.from_numpy(mel).float().squeeze(0) - - if self.return_segments: - max_mel_start = mel.shape[1] - self.feat_frame_len - mel_start = random.randint(0, max_mel_start) - mel_end = mel_start + self.feat_frame_len - mel = mel[:, mel_start:mel_end] - - audio_start = mel_start * self.hop_len - audio = audio[:, audio_start:audio_start + - self.seq_len] - - if self.use_noise_augment and self.is_training and self.return_segments: - audio = audio + (1 / 32768) * torch.randn_like(audio) - return (mel, audio) diff --git a/vocoder/datasets/preprocess.py b/vocoder/datasets/preprocess.py deleted file mode 100644 index be60c13a..00000000 --- a/vocoder/datasets/preprocess.py +++ /dev/null @@ -1,37 +0,0 @@ -import glob -import os -from pathlib import Path - -import numpy as np - - -def find_wav_files(data_path): - wav_paths = glob.glob(os.path.join(data_path, '**', '*.wav'), recursive=True) - return wav_paths - - -def find_feat_files(data_path): - feat_paths = glob.glob(os.path.join(data_path, '**', '*.npy'), recursive=True) - return feat_paths - - -def load_wav_data(data_path, eval_split_size): - wav_paths = find_wav_files(data_path) - np.random.seed(0) - np.random.shuffle(wav_paths) - return wav_paths[:eval_split_size], wav_paths[eval_split_size:] - - -def load_wav_feat_data(data_path, feat_path, eval_split_size): - wav_paths = sorted(find_wav_files(data_path)) - feat_paths = sorted(find_feat_files(feat_path)) - assert len(wav_paths) == len(feat_paths) - for wav, feat in zip(wav_paths, feat_paths): - wav_name = Path(wav).stem - feat_name = Path(feat).stem - assert wav_name == feat_name - - items = list(zip(wav_paths, feat_paths)) - np.random.seed(0) - np.random.shuffle(items) - return items[:eval_split_size], items[eval_split_size:] diff --git a/vocoder/layers/__init__.py b/vocoder/layers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/vocoder/layers/losses.py b/vocoder/layers/losses.py deleted file mode 100644 index 431f7f45..00000000 --- a/vocoder/layers/losses.py +++ /dev/null @@ -1,309 +0,0 @@ -import torch - -from torch import nn -from torch.nn import functional as F - - -class TorchSTFT(): - def __init__(self, n_fft, hop_length, win_length, window='hann_window'): - """ Torch based STFT operation """ - self.n_fft = n_fft - self.hop_length = hop_length - self.win_length = win_length - self.window = getattr(torch, window)(win_length) - - def __call__(self, x): - # B x D x T x 2 - o = torch.stft(x, - self.n_fft, - self.hop_length, - self.win_length, - self.window, - center=True, - pad_mode="reflect", # compatible with audio.py - normalized=False, - onesided=True) - M = o[:, :, :, 0] - P = o[:, :, :, 1] - return torch.sqrt(torch.clamp(M ** 2 + P ** 2, min=1e-8)) - - -################################# -# GENERATOR LOSSES -################################# - - -class STFTLoss(nn.Module): - """ Single scale STFT Loss """ - def __init__(self, n_fft, hop_length, win_length): - super(STFTLoss, self).__init__() - self.n_fft = n_fft - self.hop_length = hop_length - self.win_length = win_length - self.stft = TorchSTFT(n_fft, hop_length, win_length) - - def forward(self, y_hat, y): - y_hat_M = self.stft(y_hat) - y_M = self.stft(y) - # magnitude loss - loss_mag = F.l1_loss(torch.log(y_M), torch.log(y_hat_M)) - # spectral convergence loss - loss_sc = torch.norm(y_M - y_hat_M, p="fro") / torch.norm(y_M, p="fro") - return loss_mag, loss_sc - -class MultiScaleSTFTLoss(torch.nn.Module): - """ Multi scale STFT loss """ - def __init__(self, - n_ffts=(1024, 2048, 512), - hop_lengths=(120, 240, 50), - win_lengths=(600, 1200, 240)): - super(MultiScaleSTFTLoss, self).__init__() - self.loss_funcs = torch.nn.ModuleList() - for n_fft, hop_length, win_length in zip(n_ffts, hop_lengths, win_lengths): - self.loss_funcs.append(STFTLoss(n_fft, hop_length, win_length)) - - def forward(self, y_hat, y): - N = len(self.loss_funcs) - loss_sc = 0 - loss_mag = 0 - for f in self.loss_funcs: - lm, lsc = f(y_hat, y) - loss_mag += lm - loss_sc += lsc - loss_sc /= N - loss_mag /= N - return loss_mag, loss_sc - - -class MultiScaleSubbandSTFTLoss(MultiScaleSTFTLoss): - """ Multiscale STFT loss for multi band model outputs """ - # pylint: disable=no-self-use - def forward(self, y_hat, y): - y_hat = y_hat.view(-1, 1, y_hat.shape[2]) - y = y.view(-1, 1, y.shape[2]) - return super().forward(y_hat.squeeze(1), y.squeeze(1)) - - -class MSEGLoss(nn.Module): - """ Mean Squared Generator Loss """ - # pylint: disable=no-self-use - def forward(self, score_real): - loss_fake = F.mse_loss(score_real, score_real.new_ones(score_real.shape)) - return loss_fake - - -class HingeGLoss(nn.Module): - """ Hinge Discriminator Loss """ - # pylint: disable=no-self-use - def forward(self, score_real): - # TODO: this might be wrong - loss_fake = torch.mean(F.relu(1. - score_real)) - return loss_fake - - -################################## -# DISCRIMINATOR LOSSES -################################## - - -class MSEDLoss(nn.Module): - """ Mean Squared Discriminator Loss """ - def __init__(self,): - super(MSEDLoss, self).__init__() - self.loss_func = nn.MSELoss() - - # pylint: disable=no-self-use - def forward(self, score_fake, score_real): - loss_real = self.loss_func(score_real, score_real.new_ones(score_real.shape)) - loss_fake = self.loss_func(score_fake, score_fake.new_zeros(score_fake.shape)) - loss_d = loss_real + loss_fake - return loss_d, loss_real, loss_fake - - -class HingeDLoss(nn.Module): - """ Hinge Discriminator Loss """ - # pylint: disable=no-self-use - def forward(self, score_fake, score_real): - loss_real = torch.mean(F.relu(1. - score_real)) - loss_fake = torch.mean(F.relu(1. + score_fake)) - loss_d = loss_real + loss_fake - return loss_d, loss_real, loss_fake - - -class MelganFeatureLoss(nn.Module): - def __init__(self,): - super(MelganFeatureLoss, self).__init__() - self.loss_func = nn.L1Loss() - - # pylint: disable=no-self-use - def forward(self, fake_feats, real_feats): - loss_feats = 0 - for fake_feat, real_feat in zip(fake_feats, real_feats): - loss_feats += self.loss_func(fake_feat, real_feat) - loss_feats /= len(fake_feats) + len(real_feats) - return loss_feats - - -##################################### -# LOSS WRAPPERS -##################################### - - -def _apply_G_adv_loss(scores_fake, loss_func): - """ Compute G adversarial loss function - and normalize values """ - adv_loss = 0 - if isinstance(scores_fake, list): - for score_fake in scores_fake: - fake_loss = loss_func(score_fake) - adv_loss += fake_loss - adv_loss /= len(scores_fake) - else: - fake_loss = loss_func(scores_fake) - adv_loss = fake_loss - return adv_loss - - -def _apply_D_loss(scores_fake, scores_real, loss_func): - """ Compute D loss func and normalize loss values """ - loss = 0 - real_loss = 0 - fake_loss = 0 - if isinstance(scores_fake, list): - # multi-scale loss - for score_fake, score_real in zip(scores_fake, scores_real): - total_loss, real_loss, fake_loss = loss_func(score_fake=score_fake, score_real=score_real) - loss += total_loss - real_loss += real_loss - fake_loss += fake_loss - # normalize loss values with number of scales - loss /= len(scores_fake) - real_loss /= len(scores_real) - fake_loss /= len(scores_fake) - else: - # single scale loss - total_loss, real_loss, fake_loss = loss_func(scores_fake, scores_real) - loss = total_loss - return loss, real_loss, fake_loss - - -################################## -# MODEL LOSSES -################################## - - -class GeneratorLoss(nn.Module): - def __init__(self, C): - """ Compute Generator Loss values depending on training - configuration """ - super(GeneratorLoss, self).__init__() - assert not(C.use_mse_gan_loss and C.use_hinge_gan_loss),\ - " [!] Cannot use HingeGANLoss and MSEGANLoss together." - - self.use_stft_loss = C.use_stft_loss - self.use_subband_stft_loss = C.use_subband_stft_loss - self.use_mse_gan_loss = C.use_mse_gan_loss - self.use_hinge_gan_loss = C.use_hinge_gan_loss - self.use_feat_match_loss = C.use_feat_match_loss - - self.stft_loss_weight = C.stft_loss_weight - self.subband_stft_loss_weight = C.subband_stft_loss_weight - self.mse_gan_loss_weight = C.mse_G_loss_weight - self.hinge_gan_loss_weight = C.hinge_G_loss_weight - self.feat_match_loss_weight = C.feat_match_loss_weight - - if C.use_stft_loss: - self.stft_loss = MultiScaleSTFTLoss(**C.stft_loss_params) - if C.use_subband_stft_loss: - self.subband_stft_loss = MultiScaleSubbandSTFTLoss(**C.subband_stft_loss_params) - if C.use_mse_gan_loss: - self.mse_loss = MSEGLoss() - if C.use_hinge_gan_loss: - self.hinge_loss = HingeGLoss() - if C.use_feat_match_loss: - self.feat_match_loss = MelganFeatureLoss() - - def forward(self, y_hat=None, y=None, scores_fake=None, feats_fake=None, feats_real=None, y_hat_sub=None, y_sub=None): - gen_loss = 0 - adv_loss = 0 - return_dict = {} - - # STFT Loss - if self.use_stft_loss: - stft_loss_mg, stft_loss_sc = self.stft_loss(y_hat.squeeze(1), y.squeeze(1)) - return_dict['G_stft_loss_mg'] = stft_loss_mg - return_dict['G_stft_loss_sc'] = stft_loss_sc - gen_loss += self.stft_loss_weight * (stft_loss_mg + stft_loss_sc) - - # subband STFT Loss - if self.use_subband_stft_loss: - subband_stft_loss_mg, subband_stft_loss_sc = self.subband_stft_loss(y_hat_sub, y_sub) - return_dict['G_subband_stft_loss_mg'] = subband_stft_loss_mg - return_dict['G_subband_stft_loss_sc'] = subband_stft_loss_sc - gen_loss += self.subband_stft_loss_weight * (subband_stft_loss_mg + subband_stft_loss_sc) - - # multiscale MSE adversarial loss - if self.use_mse_gan_loss and scores_fake is not None: - mse_fake_loss = _apply_G_adv_loss(scores_fake, self.mse_loss) - return_dict['G_mse_fake_loss'] = mse_fake_loss - adv_loss += self.mse_gan_loss_weight * mse_fake_loss - - # multiscale Hinge adversarial loss - if self.use_hinge_gan_loss and not scores_fake is not None: - hinge_fake_loss = _apply_G_adv_loss(scores_fake, self.hinge_loss) - return_dict['G_hinge_fake_loss'] = hinge_fake_loss - adv_loss += self.hinge_gan_loss_weight * hinge_fake_loss - - # Feature Matching Loss - if self.use_feat_match_loss and not feats_fake: - feat_match_loss = self.feat_match_loss(feats_fake, feats_real) - return_dict['G_feat_match_loss'] = feat_match_loss - adv_loss += self.feat_match_loss_weight * feat_match_loss - return_dict['G_loss'] = gen_loss + adv_loss - return_dict['G_gen_loss'] = gen_loss - return_dict['G_adv_loss'] = adv_loss - return return_dict - - -class DiscriminatorLoss(nn.Module): - """ Compute Discriminator Loss values depending on training - configuration """ - def __init__(self, C): - super(DiscriminatorLoss, self).__init__() - assert not(C.use_mse_gan_loss and C.use_hinge_gan_loss),\ - " [!] Cannot use HingeGANLoss and MSEGANLoss together." - - self.use_mse_gan_loss = C.use_mse_gan_loss - self.use_hinge_gan_loss = C.use_hinge_gan_loss - - if C.use_mse_gan_loss: - self.mse_loss = MSEDLoss() - if C.use_hinge_gan_loss: - self.hinge_loss = HingeDLoss() - - def forward(self, scores_fake, scores_real): - loss = 0 - return_dict = {} - - if self.use_mse_gan_loss: - mse_D_loss, mse_D_real_loss, mse_D_fake_loss = _apply_D_loss( - scores_fake=scores_fake, - scores_real=scores_real, - loss_func=self.mse_loss) - return_dict['D_mse_gan_loss'] = mse_D_loss - return_dict['D_mse_gan_real_loss'] = mse_D_real_loss - return_dict['D_mse_gan_fake_loss'] = mse_D_fake_loss - loss += mse_D_loss - - if self.use_hinge_gan_loss: - hinge_D_loss, hinge_D_real_loss, hinge_D_fake_loss = _apply_D_loss( - scores_fake=scores_fake, - scores_real=scores_real, - loss_func=self.hinge_loss) - return_dict['D_hinge_gan_loss'] = hinge_D_loss - return_dict['D_hinge_gan_real_loss'] = hinge_D_real_loss - return_dict['D_hinge_gan_fake_loss'] = hinge_D_fake_loss - loss += hinge_D_loss - - return_dict['D_loss'] = loss - return return_dict \ No newline at end of file diff --git a/vocoder/layers/melgan.py b/vocoder/layers/melgan.py deleted file mode 100644 index 58c12a2e..00000000 --- a/vocoder/layers/melgan.py +++ /dev/null @@ -1,45 +0,0 @@ -from torch import nn -from torch.nn.utils import weight_norm - - -class ResidualStack(nn.Module): - def __init__(self, channels, num_res_blocks, kernel_size): - super(ResidualStack, self).__init__() - - assert (kernel_size - 1) % 2 == 0, " [!] kernel_size has to be odd." - base_padding = (kernel_size - 1) // 2 - - self.blocks = nn.ModuleList() - for idx in range(num_res_blocks): - layer_kernel_size = kernel_size - layer_dilation = layer_kernel_size**idx - layer_padding = base_padding * layer_dilation - self.blocks += [nn.Sequential( - nn.LeakyReLU(0.2), - nn.ReflectionPad1d(layer_padding), - weight_norm( - nn.Conv1d(channels, - channels, - kernel_size=kernel_size, - dilation=layer_dilation, - bias=True)), - nn.LeakyReLU(0.2), - weight_norm( - nn.Conv1d(channels, channels, kernel_size=1, bias=True)), - )] - - self.shortcuts = nn.ModuleList([ - weight_norm(nn.Conv1d(channels, channels, kernel_size=1, - bias=True)) for i in range(num_res_blocks) - ]) - - def forward(self, x): - for block, shortcut in zip(self.blocks, self.shortcuts): - x = shortcut(x) + block(x) - return x - - def remove_weight_norm(self): - for block, shortcut in zip(self.blocks, self.shortcuts): - nn.utils.remove_weight_norm(block[2]) - nn.utils.remove_weight_norm(block[4]) - nn.utils.remove_weight_norm(shortcut) diff --git a/vocoder/layers/pqmf.py b/vocoder/layers/pqmf.py deleted file mode 100644 index ef5a3507..00000000 --- a/vocoder/layers/pqmf.py +++ /dev/null @@ -1,56 +0,0 @@ -import numpy as np -import torch -import torch.nn.functional as F - -from scipy import signal as sig - - -# adapted from -# https://github.com/kan-bayashi/ParallelWaveGAN/tree/master/parallel_wavegan -class PQMF(torch.nn.Module): - def __init__(self, N=4, taps=62, cutoff=0.15, beta=9.0): - super(PQMF, self).__init__() - - self.N = N - self.taps = taps - self.cutoff = cutoff - self.beta = beta - - QMF = sig.firwin(taps + 1, cutoff, window=('kaiser', beta)) - H = np.zeros((N, len(QMF))) - G = np.zeros((N, len(QMF))) - for k in range(N): - constant_factor = (2 * k + 1) * (np.pi / - (2 * N)) * (np.arange(taps + 1) - - ((taps - 1) / 2)) - phase = (-1)**k * np.pi / 4 - H[k] = 2 * QMF * np.cos(constant_factor + phase) - - G[k] = 2 * QMF * np.cos(constant_factor - phase) - - H = torch.from_numpy(H[:, None, :]).float() - G = torch.from_numpy(G[None, :, :]).float() - - self.register_buffer("H", H) - self.register_buffer("G", G) - - updown_filter = torch.zeros((N, N, N)).float() - for k in range(N): - updown_filter[k, k, 0] = 1.0 - self.register_buffer("updown_filter", updown_filter) - self.N = N - - self.pad_fn = torch.nn.ConstantPad1d(taps // 2, 0.0) - - def forward(self, x): - return self.analysis(x) - - def analysis(self, x): - return F.conv1d(x, self.H, padding=self.taps // 2, stride=self.N) - - def synthesis(self, x): - x = F.conv_transpose1d(x, - self.updown_filter * self.N, - stride=self.N) - x = F.conv1d(x, self.G, padding=self.taps // 2) - return x diff --git a/vocoder/layers/qmf.dat b/vocoder/layers/qmf.dat deleted file mode 100644 index 17eab137..00000000 --- a/vocoder/layers/qmf.dat +++ /dev/null @@ -1,640 +0,0 @@ - 0.0000000e+000 - -5.5252865e-004 - -5.6176926e-004 - -4.9475181e-004 - -4.8752280e-004 - -4.8937912e-004 - -5.0407143e-004 - -5.2265643e-004 - -5.4665656e-004 - -5.6778026e-004 - -5.8709305e-004 - -6.1327474e-004 - -6.3124935e-004 - -6.5403334e-004 - -6.7776908e-004 - -6.9416146e-004 - -7.1577365e-004 - -7.2550431e-004 - -7.4409419e-004 - -7.4905981e-004 - -7.6813719e-004 - -7.7248486e-004 - -7.8343323e-004 - -7.7798695e-004 - -7.8036647e-004 - -7.8014496e-004 - -7.7579773e-004 - -7.6307936e-004 - -7.5300014e-004 - -7.3193572e-004 - -7.2153920e-004 - -6.9179375e-004 - -6.6504151e-004 - -6.3415949e-004 - -5.9461189e-004 - -5.5645764e-004 - -5.1455722e-004 - -4.6063255e-004 - -4.0951215e-004 - -3.5011759e-004 - -2.8969812e-004 - -2.0983373e-004 - -1.4463809e-004 - -6.1733441e-005 - 1.3494974e-005 - 1.0943831e-004 - 2.0430171e-004 - 2.9495311e-004 - 4.0265402e-004 - 5.1073885e-004 - 6.2393761e-004 - 7.4580259e-004 - 8.6084433e-004 - 9.8859883e-004 - 1.1250155e-003 - 1.2577885e-003 - 1.3902495e-003 - 1.5443220e-003 - 1.6868083e-003 - 1.8348265e-003 - 1.9841141e-003 - 2.1461584e-003 - 2.3017255e-003 - 2.4625617e-003 - 2.6201759e-003 - 2.7870464e-003 - 2.9469448e-003 - 3.1125421e-003 - 3.2739613e-003 - 3.4418874e-003 - 3.6008268e-003 - 3.7603923e-003 - 3.9207432e-003 - 4.0819753e-003 - 4.2264269e-003 - 4.3730720e-003 - 4.5209853e-003 - 4.6606461e-003 - 4.7932561e-003 - 4.9137604e-003 - 5.0393023e-003 - 5.1407354e-003 - 5.2461166e-003 - 5.3471681e-003 - 5.4196776e-003 - 5.4876040e-003 - 5.5475715e-003 - 5.5938023e-003 - 5.6220643e-003 - 5.6455197e-003 - 5.6389200e-003 - 5.6266114e-003 - 5.5917129e-003 - 5.5404364e-003 - 5.4753783e-003 - 5.3838976e-003 - 5.2715759e-003 - 5.1382275e-003 - 4.9839688e-003 - 4.8109469e-003 - 4.6039530e-003 - 4.3801862e-003 - 4.1251642e-003 - 3.8456408e-003 - 3.5401247e-003 - 3.2091886e-003 - 2.8446758e-003 - 2.4508540e-003 - 2.0274176e-003 - 1.5784683e-003 - 1.0902329e-003 - 5.8322642e-004 - 2.7604519e-005 - -5.4642809e-004 - -1.1568136e-003 - -1.8039473e-003 - -2.4826724e-003 - -3.1933778e-003 - -3.9401124e-003 - -4.7222596e-003 - -5.5337211e-003 - -6.3792293e-003 - -7.2615817e-003 - -8.1798233e-003 - -9.1325330e-003 - -1.0115022e-002 - -1.1131555e-002 - -1.2185000e-002 - -1.3271822e-002 - -1.4390467e-002 - -1.5540555e-002 - -1.6732471e-002 - -1.7943338e-002 - -1.9187243e-002 - -2.0453179e-002 - -2.1746755e-002 - -2.3068017e-002 - -2.4416099e-002 - -2.5787585e-002 - -2.7185943e-002 - -2.8607217e-002 - -3.0050266e-002 - -3.1501761e-002 - -3.2975408e-002 - -3.4462095e-002 - -3.5969756e-002 - -3.7481285e-002 - -3.9005368e-002 - -4.0534917e-002 - -4.2064909e-002 - -4.3609754e-002 - -4.5148841e-002 - -4.6684303e-002 - -4.8216572e-002 - -4.9738576e-002 - -5.1255616e-002 - -5.2763075e-002 - -5.4245277e-002 - -5.5717365e-002 - -5.7161645e-002 - -5.8591568e-002 - -5.9983748e-002 - -6.1345517e-002 - -6.2685781e-002 - -6.3971590e-002 - -6.5224711e-002 - -6.6436751e-002 - -6.7607599e-002 - -6.8704383e-002 - -6.9763024e-002 - -7.0762871e-002 - -7.1700267e-002 - -7.2568258e-002 - -7.3362026e-002 - -7.4100364e-002 - -7.4745256e-002 - -7.5313734e-002 - -7.5800836e-002 - -7.6199248e-002 - -7.6499217e-002 - -7.6709349e-002 - -7.6817398e-002 - -7.6823001e-002 - -7.6720492e-002 - -7.6505072e-002 - -7.6174832e-002 - -7.5730576e-002 - -7.5157626e-002 - -7.4466439e-002 - -7.3640601e-002 - -7.2677464e-002 - -7.1582636e-002 - -7.0353307e-002 - -6.8966401e-002 - -6.7452502e-002 - -6.5769067e-002 - -6.3944481e-002 - -6.1960278e-002 - -5.9816657e-002 - -5.7515269e-002 - -5.5046003e-002 - -5.2409382e-002 - -4.9597868e-002 - -4.6630331e-002 - -4.3476878e-002 - -4.0145828e-002 - -3.6641812e-002 - -3.2958393e-002 - -2.9082401e-002 - -2.5030756e-002 - -2.0799707e-002 - -1.6370126e-002 - -1.1762383e-002 - -6.9636862e-003 - -1.9765601e-003 - 3.2086897e-003 - 8.5711749e-003 - 1.4128883e-002 - 1.9883413e-002 - 2.5822729e-002 - 3.1953127e-002 - 3.8277657e-002 - 4.4780682e-002 - 5.1480418e-002 - 5.8370533e-002 - 6.5440985e-002 - 7.2694330e-002 - 8.0137293e-002 - 8.7754754e-002 - 9.5553335e-002 - 1.0353295e-001 - 1.1168269e-001 - 1.2000780e-001 - 1.2850029e-001 - 1.3715518e-001 - 1.4597665e-001 - 1.5496071e-001 - 1.6409589e-001 - 1.7338082e-001 - 1.8281725e-001 - 1.9239667e-001 - 2.0212502e-001 - 2.1197359e-001 - 2.2196527e-001 - 2.3206909e-001 - 2.4230169e-001 - 2.5264803e-001 - 2.6310533e-001 - 2.7366340e-001 - 2.8432142e-001 - 2.9507167e-001 - 3.0590986e-001 - 3.1682789e-001 - 3.2781137e-001 - 3.3887227e-001 - 3.4999141e-001 - 3.6115899e-001 - 3.7237955e-001 - 3.8363500e-001 - 3.9492118e-001 - 4.0623177e-001 - 4.1756969e-001 - 4.2891199e-001 - 4.4025538e-001 - 4.5159965e-001 - 4.6293081e-001 - 4.7424532e-001 - 4.8552531e-001 - 4.9677083e-001 - 5.0798175e-001 - 5.1912350e-001 - 5.3022409e-001 - 5.4125534e-001 - 5.5220513e-001 - 5.6307891e-001 - 5.7385241e-001 - 5.8454032e-001 - 5.9511231e-001 - 6.0557835e-001 - 6.1591099e-001 - 6.2612427e-001 - 6.3619801e-001 - 6.4612697e-001 - 6.5590163e-001 - 6.6551399e-001 - 6.7496632e-001 - 6.8423533e-001 - 6.9332824e-001 - 7.0223887e-001 - 7.1094104e-001 - 7.1944626e-001 - 7.2774489e-001 - 7.3582118e-001 - 7.4368279e-001 - 7.5131375e-001 - 7.5870808e-001 - 7.6586749e-001 - 7.7277809e-001 - 7.7942875e-001 - 7.8583531e-001 - 7.9197358e-001 - 7.9784664e-001 - 8.0344858e-001 - 8.0876950e-001 - 8.1381913e-001 - 8.1857760e-001 - 8.2304199e-001 - 8.2722753e-001 - 8.3110385e-001 - 8.3469374e-001 - 8.3797173e-001 - 8.4095414e-001 - 8.4362383e-001 - 8.4598185e-001 - 8.4803158e-001 - 8.4978052e-001 - 8.5119715e-001 - 8.5230470e-001 - 8.5310209e-001 - 8.5357206e-001 - 8.5373856e-001 - 8.5357206e-001 - 8.5310209e-001 - 8.5230470e-001 - 8.5119715e-001 - 8.4978052e-001 - 8.4803158e-001 - 8.4598185e-001 - 8.4362383e-001 - 8.4095414e-001 - 8.3797173e-001 - 8.3469374e-001 - 8.3110385e-001 - 8.2722753e-001 - 8.2304199e-001 - 8.1857760e-001 - 8.1381913e-001 - 8.0876950e-001 - 8.0344858e-001 - 7.9784664e-001 - 7.9197358e-001 - 7.8583531e-001 - 7.7942875e-001 - 7.7277809e-001 - 7.6586749e-001 - 7.5870808e-001 - 7.5131375e-001 - 7.4368279e-001 - 7.3582118e-001 - 7.2774489e-001 - 7.1944626e-001 - 7.1094104e-001 - 7.0223887e-001 - 6.9332824e-001 - 6.8423533e-001 - 6.7496632e-001 - 6.6551399e-001 - 6.5590163e-001 - 6.4612697e-001 - 6.3619801e-001 - 6.2612427e-001 - 6.1591099e-001 - 6.0557835e-001 - 5.9511231e-001 - 5.8454032e-001 - 5.7385241e-001 - 5.6307891e-001 - 5.5220513e-001 - 5.4125534e-001 - 5.3022409e-001 - 5.1912350e-001 - 5.0798175e-001 - 4.9677083e-001 - 4.8552531e-001 - 4.7424532e-001 - 4.6293081e-001 - 4.5159965e-001 - 4.4025538e-001 - 4.2891199e-001 - 4.1756969e-001 - 4.0623177e-001 - 3.9492118e-001 - 3.8363500e-001 - 3.7237955e-001 - 3.6115899e-001 - 3.4999141e-001 - 3.3887227e-001 - 3.2781137e-001 - 3.1682789e-001 - 3.0590986e-001 - 2.9507167e-001 - 2.8432142e-001 - 2.7366340e-001 - 2.6310533e-001 - 2.5264803e-001 - 2.4230169e-001 - 2.3206909e-001 - 2.2196527e-001 - 2.1197359e-001 - 2.0212502e-001 - 1.9239667e-001 - 1.8281725e-001 - 1.7338082e-001 - 1.6409589e-001 - 1.5496071e-001 - 1.4597665e-001 - 1.3715518e-001 - 1.2850029e-001 - 1.2000780e-001 - 1.1168269e-001 - 1.0353295e-001 - 9.5553335e-002 - 8.7754754e-002 - 8.0137293e-002 - 7.2694330e-002 - 6.5440985e-002 - 5.8370533e-002 - 5.1480418e-002 - 4.4780682e-002 - 3.8277657e-002 - 3.1953127e-002 - 2.5822729e-002 - 1.9883413e-002 - 1.4128883e-002 - 8.5711749e-003 - 3.2086897e-003 - -1.9765601e-003 - -6.9636862e-003 - -1.1762383e-002 - -1.6370126e-002 - -2.0799707e-002 - -2.5030756e-002 - -2.9082401e-002 - -3.2958393e-002 - -3.6641812e-002 - -4.0145828e-002 - -4.3476878e-002 - -4.6630331e-002 - -4.9597868e-002 - -5.2409382e-002 - -5.5046003e-002 - -5.7515269e-002 - -5.9816657e-002 - -6.1960278e-002 - -6.3944481e-002 - -6.5769067e-002 - -6.7452502e-002 - -6.8966401e-002 - -7.0353307e-002 - -7.1582636e-002 - -7.2677464e-002 - -7.3640601e-002 - -7.4466439e-002 - -7.5157626e-002 - -7.5730576e-002 - -7.6174832e-002 - -7.6505072e-002 - -7.6720492e-002 - -7.6823001e-002 - -7.6817398e-002 - -7.6709349e-002 - -7.6499217e-002 - -7.6199248e-002 - -7.5800836e-002 - -7.5313734e-002 - -7.4745256e-002 - -7.4100364e-002 - -7.3362026e-002 - -7.2568258e-002 - -7.1700267e-002 - -7.0762871e-002 - -6.9763024e-002 - -6.8704383e-002 - -6.7607599e-002 - -6.6436751e-002 - -6.5224711e-002 - -6.3971590e-002 - -6.2685781e-002 - -6.1345517e-002 - -5.9983748e-002 - -5.8591568e-002 - -5.7161645e-002 - -5.5717365e-002 - -5.4245277e-002 - -5.2763075e-002 - -5.1255616e-002 - -4.9738576e-002 - -4.8216572e-002 - -4.6684303e-002 - -4.5148841e-002 - -4.3609754e-002 - -4.2064909e-002 - -4.0534917e-002 - -3.9005368e-002 - -3.7481285e-002 - -3.5969756e-002 - -3.4462095e-002 - -3.2975408e-002 - -3.1501761e-002 - -3.0050266e-002 - -2.8607217e-002 - -2.7185943e-002 - -2.5787585e-002 - -2.4416099e-002 - -2.3068017e-002 - -2.1746755e-002 - -2.0453179e-002 - -1.9187243e-002 - -1.7943338e-002 - -1.6732471e-002 - -1.5540555e-002 - -1.4390467e-002 - -1.3271822e-002 - -1.2185000e-002 - -1.1131555e-002 - -1.0115022e-002 - -9.1325330e-003 - -8.1798233e-003 - -7.2615817e-003 - -6.3792293e-003 - -5.5337211e-003 - -4.7222596e-003 - -3.9401124e-003 - -3.1933778e-003 - -2.4826724e-003 - -1.8039473e-003 - -1.1568136e-003 - -5.4642809e-004 - 2.7604519e-005 - 5.8322642e-004 - 1.0902329e-003 - 1.5784683e-003 - 2.0274176e-003 - 2.4508540e-003 - 2.8446758e-003 - 3.2091886e-003 - 3.5401247e-003 - 3.8456408e-003 - 4.1251642e-003 - 4.3801862e-003 - 4.6039530e-003 - 4.8109469e-003 - 4.9839688e-003 - 5.1382275e-003 - 5.2715759e-003 - 5.3838976e-003 - 5.4753783e-003 - 5.5404364e-003 - 5.5917129e-003 - 5.6266114e-003 - 5.6389200e-003 - 5.6455197e-003 - 5.6220643e-003 - 5.5938023e-003 - 5.5475715e-003 - 5.4876040e-003 - 5.4196776e-003 - 5.3471681e-003 - 5.2461166e-003 - 5.1407354e-003 - 5.0393023e-003 - 4.9137604e-003 - 4.7932561e-003 - 4.6606461e-003 - 4.5209853e-003 - 4.3730720e-003 - 4.2264269e-003 - 4.0819753e-003 - 3.9207432e-003 - 3.7603923e-003 - 3.6008268e-003 - 3.4418874e-003 - 3.2739613e-003 - 3.1125421e-003 - 2.9469448e-003 - 2.7870464e-003 - 2.6201759e-003 - 2.4625617e-003 - 2.3017255e-003 - 2.1461584e-003 - 1.9841141e-003 - 1.8348265e-003 - 1.6868083e-003 - 1.5443220e-003 - 1.3902495e-003 - 1.2577885e-003 - 1.1250155e-003 - 9.8859883e-004 - 8.6084433e-004 - 7.4580259e-004 - 6.2393761e-004 - 5.1073885e-004 - 4.0265402e-004 - 2.9495311e-004 - 2.0430171e-004 - 1.0943831e-004 - 1.3494974e-005 - -6.1733441e-005 - -1.4463809e-004 - -2.0983373e-004 - -2.8969812e-004 - -3.5011759e-004 - -4.0951215e-004 - -4.6063255e-004 - -5.1455722e-004 - -5.5645764e-004 - -5.9461189e-004 - -6.3415949e-004 - -6.6504151e-004 - -6.9179375e-004 - -7.2153920e-004 - -7.3193572e-004 - -7.5300014e-004 - -7.6307936e-004 - -7.7579773e-004 - -7.8014496e-004 - -7.8036647e-004 - -7.7798695e-004 - -7.8343323e-004 - -7.7248486e-004 - -7.6813719e-004 - -7.4905981e-004 - -7.4409419e-004 - -7.2550431e-004 - -7.1577365e-004 - -6.9416146e-004 - -6.7776908e-004 - -6.5403334e-004 - -6.3124935e-004 - -6.1327474e-004 - -5.8709305e-004 - -5.6778026e-004 - -5.4665656e-004 - -5.2265643e-004 - -5.0407143e-004 - -4.8937912e-004 - -4.8752280e-004 - -4.9475181e-004 - -5.6176926e-004 - -5.5252865e-004 diff --git a/vocoder/models/__init__.py b/vocoder/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/vocoder/models/melgan_discriminator.py b/vocoder/models/melgan_discriminator.py deleted file mode 100644 index 3847babb..00000000 --- a/vocoder/models/melgan_discriminator.py +++ /dev/null @@ -1,78 +0,0 @@ -import numpy as np -from torch import nn -from torch.nn.utils import weight_norm - - -class MelganDiscriminator(nn.Module): - def __init__(self, - in_channels=1, - out_channels=1, - kernel_sizes=(5, 3), - base_channels=16, - max_channels=1024, - downsample_factors=(4, 4, 4, 4)): - super(MelganDiscriminator, self).__init__() - self.layers = nn.ModuleList() - - layer_kernel_size = np.prod(kernel_sizes) - layer_padding = (layer_kernel_size - 1) // 2 - - # initial layer - self.layers += [ - nn.Sequential( - nn.ReflectionPad1d(layer_padding), - weight_norm( - nn.Conv1d(in_channels, - base_channels, - layer_kernel_size, - stride=1)), nn.LeakyReLU(0.2, inplace=True)) - ] - - # downsampling layers - layer_in_channels = base_channels - for downsample_factor in downsample_factors: - layer_out_channels = min(layer_in_channels * downsample_factor, - max_channels) - layer_kernel_size = downsample_factor * 10 + 1 - layer_padding = (layer_kernel_size - 1) // 2 - layer_groups = layer_in_channels // 4 - self.layers += [ - nn.Sequential( - weight_norm( - nn.Conv1d(layer_in_channels, - layer_out_channels, - kernel_size=layer_kernel_size, - stride=downsample_factor, - padding=layer_padding, - groups=layer_groups)), - nn.LeakyReLU(0.2, inplace=True)) - ] - layer_in_channels = layer_out_channels - - # last 2 layers - layer_padding1 = (kernel_sizes[0] - 1) // 2 - layer_padding2 = (kernel_sizes[1] - 1) // 2 - self.layers += [ - nn.Sequential( - weight_norm( - nn.Conv1d(layer_out_channels, - layer_out_channels, - kernel_size=kernel_sizes[0], - stride=1, - padding=layer_padding1)), - nn.LeakyReLU(0.2, inplace=True), - ), - weight_norm( - nn.Conv1d(layer_out_channels, - out_channels, - kernel_size=kernel_sizes[1], - stride=1, - padding=layer_padding2)), - ] - - def forward(self, x): - feats = [] - for layer in self.layers: - x = layer(x) - feats.append(x) - return x, feats diff --git a/vocoder/models/melgan_generator.py b/vocoder/models/melgan_generator.py deleted file mode 100644 index 01b52ea8..00000000 --- a/vocoder/models/melgan_generator.py +++ /dev/null @@ -1,98 +0,0 @@ -import torch -from torch import nn -from torch.nn.utils import weight_norm - -from TTS.vocoder.layers.melgan import ResidualStack - - -class MelganGenerator(nn.Module): - def __init__(self, - in_channels=80, - out_channels=1, - proj_kernel=7, - base_channels=512, - upsample_factors=(8, 8, 2, 2), - res_kernel=3, - num_res_blocks=3): - super(MelganGenerator, self).__init__() - - # assert model parameters - assert (proj_kernel - - 1) % 2 == 0, " [!] proj_kernel should be an odd number." - - # setup additional model parameters - base_padding = (proj_kernel - 1) // 2 - act_slope = 0.2 - self.inference_padding = 2 - - # initial layer - layers = [] - layers += [ - nn.ReflectionPad1d(base_padding), - weight_norm( - nn.Conv1d(in_channels, - base_channels, - kernel_size=proj_kernel, - stride=1, - bias=True)) - ] - - # upsampling layers and residual stacks - for idx, upsample_factor in enumerate(upsample_factors): - layer_in_channels = base_channels // (2**idx) - layer_out_channels = base_channels // (2**(idx + 1)) - layer_filter_size = upsample_factor * 2 - layer_stride = upsample_factor - layer_output_padding = upsample_factor % 2 - layer_padding = upsample_factor // 2 + layer_output_padding - layers += [ - nn.LeakyReLU(act_slope), - weight_norm( - nn.ConvTranspose1d(layer_in_channels, - layer_out_channels, - layer_filter_size, - stride=layer_stride, - padding=layer_padding, - output_padding=layer_output_padding, - bias=True)), - ResidualStack( - channels=layer_out_channels, - num_res_blocks=num_res_blocks, - kernel_size=res_kernel - ) - ] - - layers += [nn.LeakyReLU(act_slope)] - - # final layer - layers += [ - nn.ReflectionPad1d(base_padding), - weight_norm( - nn.Conv1d(layer_out_channels, - out_channels, - proj_kernel, - stride=1, - bias=True)), - nn.Tanh() - ] - self.layers = nn.Sequential(*layers) - - def forward(self, c): - return self.layers(c) - - def inference(self, c): - c = c.to(self.layers[1].weight.device) - c = torch.nn.functional.pad( - c, - (self.inference_padding, self.inference_padding), - 'replicate') - return self.layers(c) - - def remove_weight_norm(self): - for _, layer in enumerate(self.layers): - if len(layer.state_dict()) != 0: - try: - nn.utils.remove_weight_norm(layer) - except ValueError: - layer.remove_weight_norm() - diff --git a/vocoder/models/melgan_multiscale_discriminator.py b/vocoder/models/melgan_multiscale_discriminator.py deleted file mode 100644 index dbcc1f30..00000000 --- a/vocoder/models/melgan_multiscale_discriminator.py +++ /dev/null @@ -1,41 +0,0 @@ -from torch import nn - -from TTS.vocoder.models.melgan_discriminator import MelganDiscriminator - - -class MelganMultiscaleDiscriminator(nn.Module): - def __init__(self, - in_channels=1, - out_channels=1, - num_scales=3, - kernel_sizes=(5, 3), - base_channels=16, - max_channels=1024, - downsample_factors=(4, 4, 4), - pooling_kernel_size=4, - pooling_stride=2, - pooling_padding=1): - super(MelganMultiscaleDiscriminator, self).__init__() - - self.discriminators = nn.ModuleList([ - MelganDiscriminator(in_channels=in_channels, - out_channels=out_channels, - kernel_sizes=kernel_sizes, - base_channels=base_channels, - max_channels=max_channels, - downsample_factors=downsample_factors) - for _ in range(num_scales) - ]) - - self.pooling = nn.AvgPool1d(kernel_size=pooling_kernel_size, stride=pooling_stride, padding=pooling_padding, count_include_pad=False) - - - def forward(self, x): - scores = list() - feats = list() - for disc in self.discriminators: - score, feat = disc(x) - scores.append(score) - feats.append(feat) - x = self.pooling(x) - return scores, feats \ No newline at end of file diff --git a/vocoder/models/multiband_melgan_generator.py b/vocoder/models/multiband_melgan_generator.py deleted file mode 100644 index 15e7426e..00000000 --- a/vocoder/models/multiband_melgan_generator.py +++ /dev/null @@ -1,39 +0,0 @@ -import torch - -from TTS.vocoder.models.melgan_generator import MelganGenerator -from TTS.vocoder.layers.pqmf import PQMF - - -class MultibandMelganGenerator(MelganGenerator): - def __init__(self, - in_channels=80, - out_channels=4, - proj_kernel=7, - base_channels=384, - upsample_factors=(2, 8, 2, 2), - res_kernel=3, - num_res_blocks=3): - super(MultibandMelganGenerator, - self).__init__(in_channels=in_channels, - out_channels=out_channels, - proj_kernel=proj_kernel, - base_channels=base_channels, - upsample_factors=upsample_factors, - res_kernel=res_kernel, - num_res_blocks=num_res_blocks) - self.pqmf_layer = PQMF(N=4, taps=62, cutoff=0.15, beta=9.0) - - def pqmf_analysis(self, x): - return self.pqmf_layer.analysis(x) - - def pqmf_synthesis(self, x): - return self.pqmf_layer.synthesis(x) - - @torch.no_grad() - def inference(self, cond_features): - cond_features = cond_features.to(self.layers[1].weight.device) - cond_features = torch.nn.functional.pad( - cond_features, - (self.inference_padding, self.inference_padding), - 'replicate') - return self.pqmf_synthesis(self.layers(cond_features)) diff --git a/vocoder/models/random_window_discriminator.py b/vocoder/models/random_window_discriminator.py deleted file mode 100644 index 3efd395e..00000000 --- a/vocoder/models/random_window_discriminator.py +++ /dev/null @@ -1,225 +0,0 @@ -import numpy as np -from torch import nn - - -class GBlock(nn.Module): - def __init__(self, in_channels, cond_channels, downsample_factor): - super(GBlock, self).__init__() - - self.in_channels = in_channels - self.cond_channels = cond_channels - self.downsample_factor = downsample_factor - - self.start = nn.Sequential( - nn.AvgPool1d(downsample_factor, stride=downsample_factor), - nn.ReLU(), - nn.Conv1d(in_channels, in_channels * 2, kernel_size=3, padding=1)) - self.lc_conv1d = nn.Conv1d(cond_channels, - in_channels * 2, - kernel_size=1) - self.end = nn.Sequential( - nn.ReLU(), - nn.Conv1d(in_channels * 2, - in_channels * 2, - kernel_size=3, - dilation=2, - padding=2)) - self.residual = nn.Sequential( - nn.Conv1d(in_channels, in_channels * 2, kernel_size=1), - nn.AvgPool1d(downsample_factor, stride=downsample_factor)) - - def forward(self, inputs, conditions): - outputs = self.start(inputs) + self.lc_conv1d(conditions) - outputs = self.end(outputs) - residual_outputs = self.residual(inputs) - outputs = outputs + residual_outputs - - return outputs - - -class DBlock(nn.Module): - def __init__(self, in_channels, out_channels, downsample_factor): - super(DBlock, self).__init__() - - self.in_channels = in_channels - self.downsample_factor = downsample_factor - self.out_channels = out_channels - - self.donwsample_layer = nn.AvgPool1d(downsample_factor, - stride=downsample_factor) - self.layers = nn.Sequential( - nn.ReLU(), - nn.Conv1d(in_channels, out_channels, kernel_size=3, padding=1), - nn.ReLU(), - nn.Conv1d(out_channels, - out_channels, - kernel_size=3, - dilation=2, - padding=2)) - self.residual = nn.Sequential( - nn.Conv1d(in_channels, out_channels, kernel_size=1), ) - - def forward(self, inputs): - if self.downsample_factor > 1: - outputs = self.layers(self.donwsample_layer(inputs))\ - + self.donwsample_layer(self.residual(inputs)) - else: - outputs = self.layers(inputs) + self.residual(inputs) - return outputs - - -class ConditionalDiscriminator(nn.Module): - def __init__(self, - in_channels, - cond_channels, - downsample_factors=(2, 2, 2), - out_channels=(128, 256)): - super(ConditionalDiscriminator, self).__init__() - - assert len(downsample_factors) == len(out_channels) + 1 - - self.in_channels = in_channels - self.cond_channels = cond_channels - self.downsample_factors = downsample_factors - self.out_channels = out_channels - - self.pre_cond_layers = nn.ModuleList() - self.post_cond_layers = nn.ModuleList() - - # layers before condition features - self.pre_cond_layers += [DBlock(in_channels, 64, 1)] - in_channels = 64 - for (i, channel) in enumerate(out_channels): - self.pre_cond_layers.append( - DBlock(in_channels, channel, downsample_factors[i])) - in_channels = channel - - # condition block - self.cond_block = GBlock(in_channels, cond_channels, - downsample_factors[-1]) - - # layers after condition block - self.post_cond_layers += [ - DBlock(in_channels * 2, in_channels * 2, 1), - DBlock(in_channels * 2, in_channels * 2, 1), - nn.AdaptiveAvgPool1d(1), - nn.Conv1d(in_channels * 2, 1, kernel_size=1), - ] - - def forward(self, inputs, conditions): - batch_size = inputs.size()[0] - outputs = inputs.view(batch_size, self.in_channels, -1) - for layer in self.pre_cond_layers: - outputs = layer(outputs) - outputs = self.cond_block(outputs, conditions) - for layer in self.post_cond_layers: - outputs = layer(outputs) - - return outputs - - -class UnconditionalDiscriminator(nn.Module): - def __init__(self, - in_channels, - base_channels=64, - downsample_factors=(8, 4), - out_channels=(128, 256)): - super(UnconditionalDiscriminator, self).__init__() - - self.downsample_factors = downsample_factors - self.in_channels = in_channels - self.downsample_factors = downsample_factors - self.out_channels = out_channels - - self.layers = nn.ModuleList() - self.layers += [DBlock(self.in_channels, base_channels, 1)] - in_channels = base_channels - for (i, factor) in enumerate(downsample_factors): - self.layers.append(DBlock(in_channels, out_channels[i], factor)) - in_channels *= 2 - self.layers += [ - DBlock(in_channels, in_channels, 1), - DBlock(in_channels, in_channels, 1), - nn.AdaptiveAvgPool1d(1), - nn.Conv1d(in_channels, 1, kernel_size=1), - ] - - def forward(self, inputs): - batch_size = inputs.size()[0] - outputs = inputs.view(batch_size, self.in_channels, -1) - for layer in self.layers: - outputs = layer(outputs) - return outputs - - -class RandomWindowDiscriminator(nn.Module): - """Random Window Discriminator as described in - http://arxiv.org/abs/1909.11646""" - def __init__(self, - cond_channels, - hop_length, - uncond_disc_donwsample_factors=(8, 4), - cond_disc_downsample_factors=((8, 4, 2, 2, 2), (8, 4, 2, 2), - (8, 4, 2), (8, 4), (4, 2, 2)), - cond_disc_out_channels=((128, 128, 256, 256), (128, 256, 256), - (128, 256), (256, ), (128, 256)), - window_sizes=(512, 1024, 2048, 4096, 8192)): - - super(RandomWindowDiscriminator, self).__init__() - self.cond_channels = cond_channels - self.window_sizes = window_sizes - self.hop_length = hop_length - self.base_window_size = self.hop_length * 2 - self.ks = [ws // self.base_window_size for ws in window_sizes] - - # check arguments - assert len(cond_disc_downsample_factors) == len( - cond_disc_out_channels) == len(window_sizes) - for ws in window_sizes: - assert ws % hop_length == 0 - - for idx, cf in enumerate(cond_disc_downsample_factors): - assert np.prod(cf) == hop_length // self.ks[idx] - - # define layers - self.unconditional_discriminators = nn.ModuleList([]) - for k in self.ks: - layer = UnconditionalDiscriminator( - in_channels=k, - base_channels=64, - downsample_factors=uncond_disc_donwsample_factors) - self.unconditional_discriminators.append(layer) - - self.conditional_discriminators = nn.ModuleList([]) - for idx, k in enumerate(self.ks): - layer = ConditionalDiscriminator( - in_channels=k, - cond_channels=cond_channels, - downsample_factors=cond_disc_downsample_factors[idx], - out_channels=cond_disc_out_channels[idx]) - self.conditional_discriminators.append(layer) - - def forward(self, x, c): - scores = [] - feats = [] - # unconditional pass - for (window_size, layer) in zip(self.window_sizes, - self.unconditional_discriminators): - index = np.random.randint(x.shape[-1] - window_size) - - score = layer(x[:, :, index:index + window_size]) - scores.append(score) - - # conditional pass - for (window_size, layer) in zip(self.window_sizes, - self.conditional_discriminators): - frame_size = window_size // self.hop_length - lc_index = np.random.randint(c.shape[-1] - frame_size) - sample_index = lc_index * self.hop_length - x_sub = x[:, :, - sample_index:(lc_index + frame_size) * self.hop_length] - c_sub = c[:, :, lc_index:lc_index + frame_size] - - score = layer(x_sub, c_sub) - scores.append(score) - return scores, feats diff --git a/vocoder/notebooks/Untitled.ipynb b/vocoder/notebooks/Untitled.ipynb deleted file mode 100644 index ce49d6fa..00000000 --- a/vocoder/notebooks/Untitled.ipynb +++ /dev/null @@ -1,678 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "#function example with several unknowns (variables) for optimization\n", - "#Gerald Schuller, Nov. 2016\n", - "import numpy as np\n", - "\n", - "def functionexamp(x):\n", - " #x: array with 2 variables\n", - " \n", - " y=np.sin(x[0])+np.cos(x[1])\n", - " return y" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " fun: -1.9999999999888387\n", - " jac: array([4.7236681e-06, 0.0000000e+00])\n", - " message: 'Optimization terminated successfully.'\n", - " nfev: 12\n", - " nit: 2\n", - " njev: 3\n", - " status: 0\n", - " success: True\n", - " x: array([-1.5707916 , -3.14159265])\n" - ] - } - ], - "source": [ - "#Optimization example, see also:\n", - "#https://docs.scipy.org/doc/scipy-0.18.1/reference/optimize.html\n", - "#Gerald Schuller, Nov. 2016\n", - "#run it with \"python optimizationExample.py\" in a termina shell\n", - "#or type \"ipython\" in a termina shell and copy lines below:\n", - "\n", - "import numpy as np\n", - "import scipy.optimize as optimize\n", - "\n", - "#Example for 2 unknowns, args: function-name, starting point, method:\n", - "xmin = optimize.minimize(functionexamp, [-1.0, -3.0], method='CG')\n", - "print(xmin)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "function [p,passedge] = opt_filter(filtorder,N)\n", - "\n", - "% opt_filter Create Lowpass Prototype Filter for the Pseudo-QMF \n", - "% Filter Bank with N Subbands\n", - "%\n", - "% Adapted from the paper by C. D. Creusere and S. K. Mitra, titled \n", - "% \"A simple method for designing high-quality prototype filters for \n", - "% M-band pseudo-QMF banks,\" IEEE Trans. Signal Processing,vol. 43, \n", - "% pp. 1005-1007, Apr. 1995 and the book by S. K. Mitra titled \"\n", - "% Digital Signal Processing: A Computer-Based Approach, McGraw-Hill, 2001\n", - "%\n", - "% Arguments:\n", - "% filtorder Filter order (i.e., filter length - 1)\n", - "% N Number of subbands\n", - "\n", - "stopedge = 1/N; % Stopband edge fixed at (1/N)pi\n", - "passedge = 1/(4*N); % Start value for passband edge\n", - "tol = 0.000001; % Tolerance\n", - "step = 0.1*passedge; % Step size for searching the passband edge\n", - "way = -1; % Search direction, increase or reduce the passband edge\n", - "tcost = 0; % Current error calculated with the cost function\n", - "pcost = 10; % Previous error calculated with the cost function\n", - "flag = 0; % Set to 1 to stop the search\n", - "\n", - "while flag == 0\n", - " \n", - "% Design the lowpass filter using Parks-McClellan algorithm\n", - " \n", - " p = remez(filtorder,[0,passedge,stopedge,1],[1,1,0,0],[5,1]);\n", - " \n", - "% Calculates the cost function according to Eq. (2.36)\n", - "\n", - " P = fft(p,4096);\n", - " OptRange = floor(2048/N); % 0 to pi/N\n", - " phi = zeros(OptRange,1); % Initialize to zeros\n", - "\n", - "% Compute the flatness in the range from 0 to pi/N\n", - "\n", - "\tfor k = 1:OptRange\n", - " phi(k) = abs(P(OptRange-k+2))^2 + abs(P(k))^2;\n", - "\tend\n", - "\ttcost = max(abs(phi - ones(max(size(phi)),1)));\n", - " \t\n", - "\tif tcost > pcost % If search in wrong direction\n", - "\t\tstep = step/2; % Reduce step size by half \n", - "\t\tway = -way; % Change the search direction \n", - "\tend\n", - "\t\n", - "\tif abs(pcost - tcost) < tol % If improvement is below tol \n", - "\t\tflag = 1; % Stop the search \n", - "\tend\n", - "\t\n", - "\tpcost = tcost;\n", - "\tpassedge = passedge + way*step; % Adjust the passband edge\n", - " \n", - "end" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "sig.remez" - ] - }, - { - "cell_type": "code", - "execution_count": 101, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "0.0125" - ] - }, - "execution_count": 101, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "1 / 4. / 20.0" - ] - }, - { - "cell_type": "code", - "execution_count": 90, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "ename": "ValueError", - "evalue": "Band edges should be less than 1/2 the sampling frequency", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mremez\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m64\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m16.0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m4.0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m~/miniconda3/lib/python3.7/site-packages/scipy/signal/fir_filter_design.py\u001b[0m in \u001b[0;36mremez\u001b[0;34m(numtaps, bands, desired, weight, Hz, type, maxiter, grid_density, fs)\u001b[0m\n\u001b[1;32m 854\u001b[0m \u001b[0mbands\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbands\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcopy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 855\u001b[0m return sigtools._remez(numtaps, bands, desired, weight, tnum, fs,\n\u001b[0;32m--> 856\u001b[0;31m maxiter, grid_density)\n\u001b[0m\u001b[1;32m 857\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 858\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mValueError\u001b[0m: Band edges should be less than 1/2 the sampling frequency" - ] - } - ], - "source": [ - "p = sig.remez(65, [0, 1/16.0, 1/4.0, 1], [1, 0], [5, 1])\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "def create_pqmf_filter(filter_len=64, N=4):\n", - " stop_edge = 1 / N\n", - " pass_edge = 1 / (4 * N)\n", - " tol = 1e-8\n", - " cutoff = 0.1 * pass_edge\n", - " cost = 0\n", - " cost_prev = float('inf')\n", - " \n", - " p = sig.remez(filter_len, [0, pass_edge, stop_edge, 1], [1, 1, 0, 0], [5, 1])\n", - " \n", - " P = sig.freqz(p, workN=2048)\n", - " opt_range = 2048 // N\n", - " phi = np.zeros(opt_range)\n", - " \n", - " H = np.abs(P)\n", - " phi = H[opt_range + 2] \n", - " for i in range(opt_range):\n", - " phi[i] = abs(P(opt_range - i + 2)) ** 2 + abs(P[i]) ** 2" - ] - }, - { - "cell_type": "code", - "execution_count": 81, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "import scipy as sp\n", - "import scipy.signal as sig\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "\n", - "\n", - "def optimfuncQMF(x):\n", - " \"\"\"Optimization function for a PQMF Filterbank\n", - " x: coefficients to optimize (first half of prototype h because of symmetry)\n", - " err: resulting total error\n", - " \"\"\"\n", - " K = ntaps * N \n", - " h = np.append(x, np.flipud(x))\n", - " cutoff = 0.15\n", - " \n", - "# breakpoint()\n", - " f, H_im = sig.freqz(h, worN=K)\n", - " H = np.abs(H_im) #only keeping the real part\n", - " \n", - " posfreq = np.square(H[0:K//N])\n", - " \n", - " #Negative frequencies are symmetric around 0:\n", - " negfreq = np.flipud(np.square(H[0:K//N]))\n", - " \n", - " #Sum of magnitude squared frequency responses should be closed to unity (or N)\n", - " unitycond = np.sum(np.abs(posfreq + negfreq - 2*(N*N)*np.ones(K//N)))/K\n", - " \n", - " #plt.plot(posfreq+negfreq)\n", - " \n", - " #High attenuation after the next subband:\n", - " att = np.sum(np.abs(H[int(cutoff*K//N):]))/K\n", - " \n", - " #Total (weighted) error:\n", - " err = unitycond + 100*att\n", - " return err" - ] - }, - { - "cell_type": "code", - "execution_count": 85, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "(32,)" - ] - }, - "execution_count": 85, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xmin.shape" - ] - }, - { - "cell_type": "code", - "execution_count": 86, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "8.684549400499243\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAEWCAYAAABIVsEJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3dd3xUVfr48c+TRgglhSQQEkLvHQMIoqCAYEVX17Wsoqvr6vbid7/6dVdd19113aJbv/74KnaxF0SFRQTpJbQQauikkUYSEtLz/P6YC44xIT035Xm/XvPK3Dvnzjz3zmSeOefce46oKsYYY0x9+bgdgDHGmLbJEogxxpgGsQRijDGmQSyBGGOMaRBLIMYYYxrEEogxxpgGsQRiWpyIrBKRe87z+LMi8uuWjKmtEJFYESkQEd8Gbn9URGY1dVytiYjsFpEZbsfREVgC6eCcL5Qi50spXUReFJGuLfj6d4rIWu91qnqfqv62GV6rn4ios68Fzr4/6PW4iMh/iUiSc0yOi8jvRSTAq8yLznPMq/LcTzvr7/Tarwqv1yoQkX9WE9MtIrK3yrrlNax7UFWPq2pXVa1oosPSpETkMREpq7Lfv2zG13tRRJ7wXqeqI1V1VXO9pvmSJRADcI2qdgXGAeOBh1yOp7mFOPt7C/CIiMx11v8duBe4A+gGXAHMAt6osv0BpwwAIuIH3AQcqlJug/Nlf/b2w2piWQ0ME5EIr+caC3Susm6KU7YteLPKfj/ldkCmeVgCMeeoajqwDE8iAUBELhSR9SKSKyI7vZsGnF/Zh0XktIgcEZHbnPWPicirXuXO/vL38349ERkOPAtMcX6p5jrrz/2qFJEZIpIsIr8QkQwRSRORu7yeo4eIfCQi+SKyRUSeqFqjOc/+bgB2A6NEZDDwfeA2Vd2gquWquhu4AbhKRKZ7bfoRME1EQp3luUACkF6X160SQwpwGLjEWTXBiemLKut8gC1Vj6XTHPhbEVnnvA//EZFwr+Nzu4gcE5FsEXnY+7VFpJOIPCMiqc7tGRHp5Dz2hYjc4Ny/yHnNq5zlmSKyoz77Wdtnog77Mc3rc3jC+ezdC9wG/NL5/HzklD3XTFfLPp73s2VqZwnEnCMiMXh+dR90lqOBj4EngDDgAeBdEYkQkS54frFfoardgKlAvb5UVHUvcB9f/lIPqaFoLyAYiAbuBv7l9eX9L6DQKTPfudVlX0VELgJGAtuBmUCyqm6uEuMJYCNwudfqYuBD4GZn+Q7g5bq8bg1W82WyuARYA6ytsm6jqpbVsP2twF1AJBCA531CREYA/wvcDvQGegAxXts9DFyI5wfDWGAS8CvnsS+AGc796Xw1yU13Hm9qNe1HX+BT4B9AhBPvDlVdALwGPOV8fq6p5jnPt49w/s+WqYUlEAPwgYicBk4AGcCjzvpvA5+o6ieqWqmqy4F44Ern8Uo8v947q2qa84u9OZQBj6tqmap+AhQAQ8XTkXwD8KiqnlHVPcBLdXi+LCAHeA54UFVXAOFAWg3l0/B8cXl7GbhDRELwfKF+UM12Fzq/mM/eLqzh+b1rGxfjSSBrqqw73xf2C6p6QFWLgLf4sgZ5I7BEVVeragnwazzv2Vm34TmuGaqaCfwGT7I5G9PZWtclwB+8lmtLIDdV2e/e5ylbl/24FfhMVRc5n4FsVa3rj5Xz7SPU8Nmq43N3eJZADMB1Ti1iBjAMz5cpQF/gm95fBsA0IEpVC4Fv4alBpInIxyIyrJniy1bVcq/lM0BXPF/qfngS31ne92sSrqqhqjpcVf/urMsComooH+U8fo6qrnVe/2E8X9JF1Wy3UVVDvG4ba3j+1cAY55fvhXhqZPuAKGfdNM7f/+HddHb22ICn1nHueDjvWbZX2d7AMa/lY846gA3AEBHpieeL/GWgj9OsNKmWeN6qst+p5ylbl/3ow9f7l+rqfPsINX+2TB1YAjHnqOoXwIvAn51VJ4BXqnwZdFHVJ53yy1R1Np4v2H3A/znbFQJBXk/d63wv24iQM4Fyvtos06eBz/U5ni/ISd4rRaQPni/1VdVs8yrwCxrXfIWqHgZS8XTgH1fVAuehDc66rnia0eorDa/jISJBeJqxzkrF8yPhrFhnHap6BtgK/ARIVNVSYD3wc+CQqn4lodZBfT4TVZ0ABtbwWG2fnxr30TSeJRBT1TPAbBEZi+cL8hoRmSMiviIS6HQ8xohITxGZ5/SFlOCp+p9tHtkBXCKeaxaCOf9ZXSeBGPE6VbaunFNZ3wMeE5EgpwZ0Ry2b1fRcB/B06L8mnhMHfEVkJPAuni/Oz6rZ7O/AbJrm7Kg1eL6c13itW+usi6+hhlObd4CrnQ7oAOBxvvo/vwj4ldOnFQ48guc9P+sL4Id82Vy1qspyfdTnM1HVa8AsEblJRPzEc+LE2eatk8CA82xb2z6aRrAEYr7CaSd+GXjE6UCeB/wPnl/7J4D/wvO58cHz5ZaKpz9hOnC/8xzLgTfxnJm0FVhynpf8HM9ZR+kiUt9fteD5QgvG0/zxCp4vjJIGPM/Z53oOzxfMGSART5PHdapaWbWwquao6gptmkl1vsDTeex9BtkaZ12DEpTTJ/UD4HU8tZFTQLJXkSfw9GklALuAbc4675i6eb1+1eX6xFKfz0TVbY/j6Xf7BZ7P2g48HeIAzwMjnCbW6vqhattH0whiE0qZ9kRE/gj0UtU6nY1Vy3P9BrgeuERVcxsdnDHtjNVATJsmIsNEZIxzWu4kPKdivt8Uz62qjwIL8PSBGGOqsBqIadNEZCKeZqveeNrDFwBPNlGzkjHmPCyBGGOMaRBrwjLGGNMgfrUXaT/Cw8O1X79+bodhjDFtytatW7NUtepoDB0rgfTr14/4+Hi3wzDGmDZFRI5Vt96asIwxxjSIJRBjjDENYgnEGGNMg1gCMcYY0yCWQIwxxjSIJRBjjDENYgnEGGNMg1gCaSGf7krjUGZB7QWNMcZRWaks2nycnMJSt0OpliWQesg8XcI/ViRRWFJee2Ev6w5mcf9r23hscXNNGW6MaY9WJ2Xy0Hu7eODtndR33MLP9pxk2e702gs2giWQevj1B4n8ZfkB/rXyYJ23KSgp55fvJOAjsCYpi+PZZ5oxQmNMe7Jo83F8BD7fl8G721LqvF1KbhE/XLSNH72+vVlbPiyB1NHn+06ydHc6Ed068dyaI3VOBH/4ZC+peUX845YJ+Ai8seV4M0dqjGkPMvKL+WxvBndP68/EfqH85qPdpOcV12nbJz/dhyp08vfh1x8k1rv2UleWQOqgqLSCRz7czaDIrrx3/1T8fIXffbLnvNsUl1Xwt8+SeG3Tce6+qD9XjYnismE9eSs+mbKKr82OiqqycO0RLvvLKo5mFTbXrhhjWomH39/Fbc9tJON09Unh7a3JVFQqt0yK5akbx1JWUcm9r8SzP/30eZ9385EcPtqZyvemD+SXc4ex/lA2i3emNscuWAKpi398nkTyqSKeuG4UfcKC+MGlg1i2+ySrD2R+rWxeURkfbE9h7jOrefqzA1w1JooH5gwF4NbJfcgqKGHF3pNf2aasopL/eT+Rx5fs4XBmIb/+8Ku/GMqrSTjGmLbF+/941f4MXtt0nHUHs7nun+vYk5r/lbJnO8+nDOjBgIiu9A/vwtM3jeN4zhmu/PsaHv9oD/vS879WsyirqOTxJbuJCg7k/ukDuXVSLGNjgvntkj3kFZU1+T65OhqviMwF/gb4As+p6pNVHu8EvAxcAGQD31LVo85jD+GZvrQC+LGqLmuuOCsqlZviYrhwQA8A7p7Wn3e2JvP917ax4I4LmDownMSUPJ5atp/1B7Mor1QGhHfh5e9M4pIhX46APH1IJFHBgfzj84McyiykslLZd/I0246dIi2vmO/PGEh41048vmQPSxLSmD40gh8v2s7BjAL+87NLCAqo/e3al57PvrTTXDc+urkOhzEd3omcM6xJyuLWybF1Kv/vVQd5dtUh/nbLeKYM6MEjH+5mQEQX/vzNsXz/1W1c9+91jOsTwtiYYEK7BJCRX0LyqSJ+OXfYuee4YnQUkwf04E/L9vHC+iMsXHeEmNDO3DGlL3dd1J/yCuX+17aSmJLPv26dQOcAXwB+d/1o/uudBLIKSgju7N+kx8G1GQlFxBc4AMwGkoEtwC2quserzPeBMap6n4jcDFyvqt8SkRF4pjGdhGcq08+AIapacb7XjIuL04YO566qiMi55fS8Ym5/fhPHss8wZ1QvPk5IJaxLAN+M68Os4ZGM6xOKr4987XmeW3OYJz7ee245OqQzY/sEc+3Y3swdFUVFpXLdv9aRnl9McGd/jmQVUlGp/M+Vw7j3koG1xnn785tYfyib7Y/Mpntg035YjDEeD723i0Wbj7P0pxczrFf385bNO1PGtD9+TlFZBZWqTOofxsbDObx+z2SmDgonI7+Y//3iENuP57InLZ/Sck9NJTYsiOU/v4ROfr5fe86M/GJW7MtgSUIq6w5mM6RnV4IC/EhIzuV314/mlklfTWxVv7/qS0S2qmpc1fVu1kAmAQdV9TCAiLwBzAO8OxfmAY85998B/imeozAPeENVS4AjInLQeb4NzRVs1YPfKziQt++bwl0vbmFJQiq3X9iXX1w+tNYMf8/FA7hzaj8qVFGFQP+vfjh8fYTfXT+Kef9aR2l5Ja/ePZl/rzrIgtWHuf3Cfud+VVQn43Qx6w5mUamw4VA2c0b2avgOG2NqtPagp/n6wx2pDJt7/gTy/LojnC4p5937p/LsF4dYvuck14+PZuqgcAAiuwfy6DUjAU8zV3ml4usj+PlIjV/6kd0DuWVSLLdMimX5npM8tng3R7PO8K9bJ3DF6KivlW9M8jgfNxNINHDCazkZmFxTGVUtF5E8oIezfmOVbVu8zSYkKIA3751CZkEJ0SGd67ydn6/PeQ/8mJgQXr/nQmJCO9MnLAh/X+HGZzfw2qZj3HPxgHPlSsorOJhRwMjewQB8nJBGpYK/r7DuYJYlEGOawfHsM5zIKcLfV1i8I5X/unwoPj5CVkEJRaUV9AkLOlc2r6iMF9YdYc7InlzQN5T/9+0L+M+ek0wbHF7tc/v5+lBNheO8Zo/oycWDwzl1ppSo4Lp/DzWFdt+JLiL3iki8iMRnZn6907uxAvx86pU86mrKwB7nPohx/cK4aFAPnv3i8LmLGHPPlHL785u56u9rWZLgOcPiwx2pjIjqzrRB4axNymrymIwxsMapfXzvkoGk5Bax7fgpikor+OazG5j99Bf8x+vivefXHuF0cTk/njkYAB8fYe6oXnTt1LS/3QP9fVs8eYC7NZAUoI/XcoyzrroyySLiBwTj6Uyvy7YAqOoCYAF4+kCaJHIX/GTmEG76fxuY/qeV3DIplk92pXEip4i+PYL49QeJ9OoeyI4TuTx0xTD8fH1YuX8PKblFzZLcjOnI1iZlERUcyH0zBvLc2sN8sCOFj3elcSSrkEGRXfneq1v53iUD2Xkilw2Hs5kzsue5VoL2xs0ayBZgsIj0F5EA4GZgcZUyi4H5zv0bgc/V0+u/GLhZRDqJSH9gMLC5heJ2xaT+YSz67oWMiQnhH58fJKuglFfunsTz8+MoLK3gzhe2IALXjuvNxU71eG1S09e4jOnIKiqV9YeymTYonK6d/Jg1vCfvbUvhhXVHmT+lLx/9cBqzhvfk2S8OcTS7kAevGMZfbhrndtjNxrUaiNOn8UNgGZ7TeBeq6m4ReRyIV9XFwPPAK04neQ6eJINT7i08He7lwA9qOwOrPZgysAdTBvbgWHYhAX4+56qsD1w+hN9/so/J/cOICu5Mr+5KZLdOrD2Yzbcm1u00Q2NM7RJT8sgrKjvXhzFvXDRLEtLo1yOI/75iGJ0DfHn22xeQkJzLqOhg/H3bdy+Bq9eBqOonwCdV1j3idb8Y+GYN2/4O+F2zBthK9e3R5SvLd08bQGpuMZeP6Al4zriYNiicVQcyqaxUfKo5ndgYU39rD3r6Fi9yzqCaPiSCm+Ji+PaFfc9dp+XrI4yPDXUtxpbUvtNjB+HrIzx27chzpwUCTBscTk5hKTuTc12MzJj2Q1VZuS+D4VHdCe/aCfCcRPPUjWMZExPicnTusATSTs0YGklIkD8/eWNHjWPtGGPq7unlB4g/dopv2CgP51gCaafCugTwwp0TyTxdwp0Lt5Bf3PTj4BjTUby0/ih///wgN8XFcM/F/d0Op9WwBNKOjY8N5dnbL+DAydP86v1Et8Mxpk3acSKXxz7azewRPfn99aOb7arutsgSSDs3fUgEt0/py9LEdKuFGNMA725NppOfD09/axx+7fysqvqyo9EBXDu2N6UVlfxn98naCxtjzimvqOSTXWnMHN6zya8ebw8sgXQA4/qEEBPamY+8JpU5klVIcVm7v3TGmHo7mFFwbtK3DYezyS4s5ZoxvV2OqnWyBNIBiAjXjO3NuoNZ5BSWsu34KWb/9Qu++3I8lZVtdnQXY5rcqv0ZzPrrF/zirZ2oKkt2ptG1kx8zhkbUvnEHZAmkg7h6TBTllcpb8Sf40evb6eTnw5qkLBauO+J2aMa0CpmnS3jg7Z10CfBl8c5UXt98nE8T07h8RM+vTbtgPCyBdBAjorozIKILT366j5P5xbxyz2Rmj+jJH5fuIzElz+3wjHFVZaXywNs7OV1czjv3T2XKgB48/H4i+cXlXDPWmq9qYgmkgxCRc+24D8wZyoTYUP54wxjCugTwwNs7vza3sjEdyXvbU/jiQCa/umo4w6O688zN4wjrEkBIkP+5YUvM19lpBR3I3Rf3p194EPPGeq6kDesSwM9mDeHB93axKyWvww7HYMxbW04wKLIr376wLwA9uwfy6t2TKSgpJ8DPfmfXxI5MB9I90J/rx8d8ZXDFK0ZF4e8rfLgj9TxbGtN+peQWsfloDvPG9v7KRYIjendnUv8wFyNr/SyBdHDBQf7MGBrJkoRUKuyMLNMBLXFOb792nPV11JclEMO1Y3tzMr+EzUdy3A7FmBa3eGcqY/uEfG2aBFM7SyCGWcN7EuScupieV8z8hZt5Ysket8MyplnsSc3n8qe/4P3tyRzMKGB3aj7X2plWDWKd6IbOAb7MHtGTJQmpLN+TTlZBKauTMvnGhBhG9O7udnjGNKnff7KXAycL+NmbOxkQ3gURz3VSpv6sBmIAmDeuN6eLywkJCuDd+6fQPdCfp5btczssY5rUmqRM1h7M4n+uHMb3LhnA4axCpgzoQc/ugW6H1iZZDcQAcOnQSF68ayIT+4XRpZMf358xkD98uo8Nh7KZMrCH2+EZ02iVlcofl+4jOqQz86f2o5OfL3NG9aKXJY8Gc6UGIiJhIrJcRJKcv9VOICwi850ySSIy31kXJCIfi8g+EdktIk+2bPTtk4gwY2gkXZwRR+dP7UdUcCBPLt1n42WZduHjXWkkpuTz89lD6OTnGZpkQmwovUM6uxxZ2+VWE9aDwApVHQyscJa/QkTCgEeBycAk4FGvRPNnVR0GjAcuEpErWibsjiPQ35efzx7CzhO5vLj+qNvhGNMoWQUl/OajPQyP6s51NiVtk3ErgcwDXnLuvwRcV02ZOcByVc1R1VPAcmCuqp5R1ZUAqloKbANiWiDmDufGC2KYNTySJz/dx960fLfDMaZBVJVfvpNAfnEZf71pLL4+NqNgU3ErgfRU1TTnfjrQs5oy0cAJr+VkZ905IhICXIOnFlMtEblXROJFJD4zM7NxUXcwIsIfbxhDcJA/P1603eYPMW3SyxuO8fm+DB66YhjDo+yswqbUbAlERD4TkcRqbvO8y6lnFL96N7KLiB+wCPi7qh6uqZyqLlDVOFWNi4iwMf3rq0fXTvz1prEkZRSwYHWNh9mYVimnsJTff7KXGUMjuHNqP7fDaXea7SwsVZ1V02MiclJEolQ1TUSigIxqiqUAM7yWY4BVXssLgCRVfaYJwjXncfHgCKYNCuedrcn86LJBXxkvyJjWbElCKiXllfz33GH2uW0GbjVhLQbmO/fnAx9WU2YZcLmIhDqd55c76xCRJ4Bg4KctEKsBrh8fzfGcM2w7fsrtUIyps/e2pTCsVzdrumombiWQJ4HZIpIEzHKWEZE4EXkOQFVzgN8CW5zb46qaIyIxwMPACGCbiOwQkXvc2ImOZM6oXgT6+/DethS3QzGmTg5nFrDjRC7fmGBnXTUXVy4kVNVsYGY16+OBe7yWFwILq5RJBqwu2sK6dvJjzsheLElI45FrRpw7j96Y1uqD7Sn4CMwbZwmkudhQJqbOrh8fTV5RGSv32dlspnVTVd7fkcJFg8JtmJJmZAnE1Nm0QeGEd+3E+9uT3Q7FmPOKP3aKEzlFXG8XDTYrSyCmzvx8fbhmbBQr92dSUFLudjjG1GjJzlQC/X2YM7KX26G0a5ZATL1cOTqK0vJKVu6r7sxrY9xXWaks232S6UMizo3tZpqHJRBTLxNiQwnv2omlu9PdDsWYau1MziU9v5i5o6z20dwsgZh68fURLh/Zk5X7MmxoE9MqLU1Mx99XuGxYdSMkmaZkCcTU2xWjenGmtII1SVluh2LMV6gqS3enM3VgOMGd/d0Op92zBGLq7cIBPege6MfSRGvGMq3LvvTTHMs+Y81XLcQSiKk3f18fZo3oyWd7T1JWUel2OMac82liOiIwe4Q1X7UESyCmQa4YFUVeURnffm4TH+1MpbTcEolxz8GM0/zmo90sXHuEif3CCO/aye2QOgRLIKZBZg6L5OErh5OSW8SPFm3ngbd3uh2S6aC2Hz/FnGfW8OrGY8wYGsGT3xjtdkgdhiUQ0yA+PsJ3LxnA6v+6lDum9OXjXWlk5Be7HZbpgF7ZcIwgf1/WPXgZ/7x1AgMiurodUodhCcQ0io+PcNdF/amoVN7eakOcmJaVd6aMj3elMW98byK72ZhXLc0SiGm0/uFdmNw/jLfiT1BZWe/JJY1psA93plBSXsnNE2PdDqVDsgRimsTNk/pwLPsMGw9nux2K6SBUlUWbTzCyd3dGRQe7HU6HZAnENIkrRkXRPdCPRVtOuB2K6SASkvPYm5bPzZOs9uEWG2nMNIlAf1+uHx/Nq5uOU1BcxsWDI7hlUiydA2ziKdO0Nh/JYfHOFFbszSDQ34d543q7HVKHZQnENJmfzR6CiLD6QCYr9+/hWHYhv5k3yu2wTDtyNKuQmxdsINDfl6kDe3Db5L50D7QhS9xiCcQ0mZCgAB67diQAP3ljO+9tS+HBK4ZbLcQ0mTe2nEBE+PwXM+gVbGdduc21PhARCROR5SKS5PwNraHcfKdMkojMr+bxxSKS2PwRm/q4dVIsp0vK+Sgh1e1QTDtRWl7JO1tPcNmwSEserYSbnegPAitUdTCwwln+ChEJAx4FJgOTgEe9E42IfAMoaJlwTX1M6h/GoMiuLNp83O1QTDuxfM9JsgpKuXWydZq3Fm4mkHnAS879l4DrqikzB1iuqjmqegpYDswFEJGuwM+BJ1ogVlNPIsItk2LZfjyXvWn5bodj2oHXNx8jOqQzlwyOcDsU43AzgfRU1TTnfjpQ3fCZ0YD3eaHJzjqA3wJ/Ac6c70VE5F4RiReR+MzMzEaGbOrjhgnRBPj58Pomq4WYxjmaVci6g9ncPLEPvj7idjjG0awJREQ+E5HEam7zvMupqgJ1voRZRMYBA1X1/drKquoCVY1T1biICPvl0pJCggK4Zkxv3txygs1HctwOx7RRpeWVPPTeLgJ8fbhpYh+3wzFemjWBqOosVR1Vze1D4KSIRAE4fzOqeYoUwPsTE+OsmwLEichRYC0wRERWNee+mIb59dXDiQntzL2vxHMkq9DtcEwbo6o8/P4uNhzO5o83jqZnd+s8b03cbMJaDJw9q2o+8GE1ZZYBl4tIqNN5fjmwTFX/V1V7q2o/YBpwQFVntEDMpp5CggJ44a6JCPCdF7eQX1zmdkimDVmw+jBvb03mxzMHc/34GLfDMVW4mUCeBGaLSBIwy1lGROJE5DkAVc3B09exxbk97qwzbUjfHl34920XcCSrkPe3pbgdjmkjissq+MfnB5k1PJKfzRrsdjimGq5dSKiq2cDMatbHA/d4LS8EFp7neY4CdrlzKzdlYA+G9OzKxwlpzJ/az+1wTBuw+kAmBSXl3DGlHyLWcd4a2WCKpsVcPaY3W47lkJ5nE0+Z2i1JSCM0yJ+pA3u4HYqpgSUQ02KuGhOFKnyyK632wqZDKyqt4LO9J5k7Kgo/X/uaaq3snTEtZmBEV4ZHdWeJDW9iarFqfwZnSiu4ZkyU26GY87AEYlrU1WOi2HY8l5TcIrdDMa3YkoQ0wrsGMKl/mNuhmPOwBGJa1NXOL8pPrRnL1OBMaTkr9p3kCmu+avXs3TEtqm+PLozs3Z2lieluh2JaqdUHsiguq+TK0dZ81dpZAjEtbuawSLYdP0XumVK3QzGt0Mp9GXQL9COuX7UzPJhWxBKIaXEzhkVSqbA6KcvtUEwro6qs3J/BJYMj8Lfmq1bP3iHT4sbGhBDWJYBV+6ob/sx0ZHvS8sk4XcKlwyLdDsXUgSUQ0+J8fYTpQyJYdSCTiso6D8JsOoCVzo+K6UNs5Oy2wBKIccWMoRHkFJaSkJzrdiimFVm5P5MxMcFEdOvkdiimDiyBGFdMHxKBj3i+MIwBOFVYyvbjp5gx1Jqv2gpLIMYVIUEBTIgNPddkYczqpEwqFS6z/o82wxKIcc2lwyLZlZLH1mM2Qn9HV1GpvLrxGOFdAxgTHex2OKaOLIEY19wxpS99wjrz0zd3cNommurQnv3iEFuOnuKhK4bjY3OetxmWQIxrugX688y3xpFyqohHF+92Oxzjkp0ncnl6+QGuHhPFNyZEux2OqQdLIMZVF/QN40eXDea9bSl8tuek2+GYFlZZqfzsrR1EduvE764bbRNHtTGWQIzrfnTZILoH+vH5futQ72iOZhdyOLOQH80cTHCQv9vhmHqqcwIRkaCmelERCROR5SKS5PytdtAbEZnvlEkSkfle6wNEZIGIHBCRfSJyQ1PFZlqen68Po6KD2Z2S53YopoUlpuYDntEJTNtTawIRkakisgfY5yyPFZF/N/J1HwRWqOpgYIWzXPV1w4BHgcnAJOBRr0TzMJChqkOAEcAXjYBN6I0AAB+3SURBVIzHuGxUdDB7009TVlHpdiimBe1OySPA14fBPbu6HYppgLrUQJ4G5gDZAKq6E7ikka87D3jJuf8ScF01ZeYAy1U1R1VPAcuBuc5j3wH+4MRTqao2Kl8bN7J3d0rLKzmYUeB2KKYFJabmMSyqmw2c2EbV6V1T1RNVVlU08nV7qurZGYXSgZ7VlIkGvF83GYgWkbN13d+KyDYReVtEqtseABG5V0TiRSQ+M9Ouem6tRjnn/u+yZqwOQ1VJTMlnZG+77qOtqksCOSEiUwEVEX8ReQDYW9tGIvKZiCRWc5vnXU5VFajPiHp+QAywXlUnABuAP9dUWFUXqGqcqsZFRNgAba1V/x5d6BLga/0gHUjyqSLyisoYFd3d7VBMA/nVocx9wN/w1AhSgP8AP6htI1WdVdNjInJSRKJUNU1EooDqTr9JAWZ4LccAq/A0pZ0B3nPWvw3cXetemFbNx0cY0bv7uU5V0/7tTvX8WBhlNZA2q9YaiKpmqeptqtpTVSNV9duqmt3I110MnD2raj7wYTVllgGXi0io03l+ObDMqbF8xJfJZSawp5HxmFZgZO9g9qTm2xDvHURiSj6+PsLQXt3cDsU0UK01EBF5gWqamFT1O4143SeBt0TkbuAYcJPzWnHAfap6j6rmiMhvgS3ONo+r6tlBk/4beEVEngEygbsaEYtpJUZFB/Pi+qMcySpgUKR9qbR3ial5DI7sSqC/r9uhmAaqSxPWEq/7gcD1QGpjXtSpwcysZn08cI/X8kJgYTXljtH4M8FMK3O2LTwxJZ/YsC5sOZrD1IE97OrkduRQZgF+PkJsWBCJKXk2dHsbV2sCUdV3vZdFZBGwttkiMh3WoIiudPLzYfmek7yw7gg7k/N47o44Zo2o8SQ708bc/eIWUvOK+dGlg8gqKGVUb+tAb8sacvL1YMB+Npgm5+frw7Co7ny8K43DWYUE+Pmw/lBju9tMa5GaW8TR7DN0D/TnL8sPAF+evm3aprr0gZzG0wcizt90PH0QxjS5GyZEE9LZnyeuG8Uv30lg0xFLIO3F2ffyxbsm8sWBTNYkZVoCaePq0oRlvZmmxdwxpR93TOkHwKT+Yfz98yTyisoI7mwD7bV1mw7n0C3Qj+FR3RkVHcwPLh3kdkimkWpMICIy4Xwbquq2pg/HmC9NHhCGroD4oznMHG79IG3dpiM5TOoXhq9NGNVunK8G8pfzPKbAZU0cizFfMSE2lABfHzYfsQTS1mXkF3Mkq5BbJvVxOxTThGpMIKp6aUsGYkxVgf6+jO0TzMYjNmd6W7fJeQ8n9+/hciSmKdXlOhBEZBSeYdMDz65T1ZebKyhjzprUP4xnvzhMQUk5XTvV6eNqWqFNR7LpEuDLSDttt12py3wgjwL/cG6XAk8B1zZzXMYAnl+sFZXK1mOn3A7FNMKmwzlc0C8MPxu2vV2py7t5I56rxtNV9S5gLGDn3pkWcUHfUHx9hI2H7XTetiqroISkjAIm9w9zOxTTxOqSQIpVtRIoF5HueEbOtZ4w0yK6dPJj2qBwXtt4jMzTJW6HYxrgHyuSEIGZw+364/amxgQiIv8SkWnAZmcSp/8DtgLb8MzBYUyLeOSaERSXVfL4Eht0ua3ZdvwUL288xvwp/RjWy/o/2pvz9UoeAP4E9AYKgUXAbKC7qia0QGzGADAwois/vGwQf11+gG+Mj+bSYfZLti0oq6jkoXd30at7IA/MGep2OKYZ1FgDUdW/qeoUPKPeZuMZFXcpcL2IDG6h+IwB4L7pAxkc2ZVffZBIVoE1ZbUFf/ssif0nT/PbeaPsDLp2qi4TSh1T1T+q6njgFuA6YF+zR2aMlwA/H/78zbFkF5Ywf+Fm8ovL3A7JnMfLG47yz5UHuSkuxkZTbsfqchqvn4hcIyKvAZ8C+4FvNHtkxlQxtk8Iz377Ag6cPM09L8ZTVFrhdkimGh9sT+GRD3cza3hPfn/9aLfDMc3ofJ3os0VkIZAMfBf4GBioqjeranVT0BrT7GYMjeTpb41jy7Ec/rkyye1wTBUn84v55TsJXDggjH/eOt6u+2jnzvfuPgSsB4ar6rWq+rqqFrZQXMbU6OoxvZk7shevbjxOYUm52+EYLy+uP0p5ZSVP3TDWpqrtAM7XiX6Zqj6nqnYJsGl17rl4AHlFZbwdf8LtUIyjoKSc1zYeY+6oXsT2CHI7HNMCXKtfikiYiCwXkSTnb2gN5eY7ZZJEZL7X+ltEZJeIJIjIUhEJb7nojdsu6BvKBX1DeX7dESoq1e1wDPDWlhPkF5fz3YsHuB2KaSFuNlA+CKxQ1cHACmf5K0QkDHgUmAxMAh4VkVAR8QP+BlyqqmOABOCHLRa5aRW+e3F/TuQUsWx3utuhdHjlFZUsXHeEuL6hjI+t9regaYfcTCDzgJec+y/hOT24qjnAclXNcZrSlgNz8UyvK0AXERGgO5Da/CGb1mT2iF707RHEgtWHUbVaiJuW7k4n+VQR373Eah8diZsJpKeqpjn304HqThaPBrwbuZOBaFUtA+4HduFJHCOA56t7ERG5V0TiRSQ+MzOzyYI37vP1Ee6e1p8dJ3JttF4XqSr/t/ow/XoEMcsm/upQmjWBiMhnIpJYzW2edzn1/Hys809IEfHHk0DG4xlqJQHPWWNfo6oLVDVOVeMiIiIavjOmVbrxghhCgvxZsPqw26F0WFuOnmJnch53XzzApqvtYJp1fAFVnVXTYyJyUkSiVDVNRKLwjPJbVQoww2s5BlgFjHOe/5DzXG9RTR+Kaf+CAvy4/cK+/HPlQY5kFdI/vIvbIXU4/7fmMKFB/tw4IcbtUEwLc7MJazFw9qyq+UB1FycuAy53Os5DgcuddSnACBE5W6WYDext5nhNK3X7lL74+/jw/FqrhbS0Q5kFfLb3JLdf2JfOAXbdR0fjZgJ5EpgtIknALGcZEYkTkecAVDUH+C2wxbk97nSopwK/AVaLSAKeGsnvXdgH0wpEdgvk+vHRvB2fTE5hqdvhdCgL1x7B39eH26f0czsU4wLXhshU1Ww8Mx1WXR8P3OO1vBDPSMBVyz0LPNucMZq24+ZJfXgz/gQbDmVz1Zgot8PpMD7fl8HsET2J6NbJ7VCMC2ygGtMujOjdnQBfHxJSct0OpcPIOF1MWl4x4/uEuB2KcYklENMudPLzZVhUN3Yl57kdSoeRmOI51mNiLIF0VJZATLsxKjqYXSl5dlFhC9mVnI8IjOxtU9V2VJZATLsxJjqY08XlHMs+43YoHcKulFwGRnSli8022GFZAjHtxqjoYAASUqwZqyUkJOcx2jnmpmOyBGLajSE9uxHg53Oubd40n5P5xWScLrEE0sFZAjHtRoCfD8OjupOQbGdiNbezJyuMibEE0pFZAjHtyujo7iSm5FNpc4Q0q4SUPHzEc/q06bgsgZh2ZUx0CAUl5RzNttmXm1NiSh6DIrsSFGAd6B2ZJRDTrox2mlR2WT9Is1FVpwPdrv/o6CyBmHZlcGRXOvn5sP249YM0l9S8YrIKShgdbc1XHZ0lENOu+Pn6cPHgCD7YkUJBSbnb4bRLr248hghcMsTm1+noLIGYdueHlw0i90wZr2485nYo7U7umVJeXn+Uq0ZHMSCiq9vhGJdZAjHtzrg+IVw8OJzn1hymqLTC7XDalYXrjlJYWsEPLxvkdiimFbAEYtqlH88cTFZBKa9vPu52KO1GfnEZL6w7wpyRPRnWy/o/jCUQ005N7BfGhQPC+H9fHKK8otLtcNqF1zYe53RxOT+6bLDboZhWwhKIabfmT+lHxukSttkZWU3i08Q0JsSGnBtzzBhLIKbdmjY4HD8fYeX+DLdDafMyT5eQkJzHzOE93Q7FtCKuJBARCROR5SKS5PwNraHcUhHJFZElVdb3F5FNInJQRN4UkYCWidy0Jd0C/ZnYL4yV+yyBNNYqJwnPGGqn7povuVUDeRBYoaqDgRXOcnX+BNxezfo/Ak+r6iDgFHB3s0Rp2rxLh0WwL/00qblFbofSpq3an0lkt06MiLLOc/MltxLIPOAl5/5LwHXVFVLVFcBp73UiIsBlwDu1bW/MpUMjAc8XIMCTn+7jwXcT3AypTYg/msO1/1xLSm4RZRWVrE7K5NKhkXj+/YzxcCuB9FTVNOd+OlCfhtUeQK6qnr3MOBmIrqmwiNwrIvEiEp+ZmdmwaE2bNSiyKzGhnVm5P4NPdqXx7BeHeDP+BFkFJW6H1qq9vvk4Ccl5/PSN7Ww5ksPp4nIuHRbpdlimlWm2BCIin4lIYjW3ed7l1DOBdbONva2qC1Q1TlXjIiKs/bajEREuHRrJ2qQsHnw3gT5hnVGFz/acdDu0VqusopIVezPoE9aZLUdP8Yu3d+LvK1w0qIfboZlWptkSiKrOUtVR1dw+BE6KSBSA87c+vZzZQIiInB1HOgZIadroTXty6bAIisoqqFR49e7J9AnrzNLd6W6H1WptOpxDXlEZv7pqBNePjyYtr5iJ/cLoFujvdmimlXGrCWsxMN+5Px/4sK4bOjWWlcCNDdnedDxTBoQzITaEp24cQ98eXZgzohfrD2aTX1zmdmit0rLd6QT6+3DJ4AgenzeSCweEccukWLfDMq2QWwnkSWC2iCQBs5xlRCRORJ47W0hE1gBvAzNFJFlE5jgP/TfwcxE5iKdP5PkWjd60KZ0DfHnv+xdx5egoAOaO6kVpRaWd3luNykpl2e50ZgyJpHOAL90C/Xnj3ilcM7a326GZVsiV6cRUNRuYWc36eOAer+WLa9j+MDCp2QI07dqE2FDCu3biP7tPMm9cjedfdEg7knPJOF3CnFF2waCpnV2JbjocHx/h8pE9Wbk/g+IyG63X27LEdPx8hMuGWQIxtbMEYjqkOSN7caa0grVJWW6H0mqoepqvpgzsQXBn6zA3tbMEYjqkKQN60C3Qj2V2NtY5B04WcDT7DHNH9XI7FNNGWAIxHVKAnw8zh0Xy2d6TNty7Y2liOiIwe4Q1X5m6sQRiOqy5o3px6kwZm4/muB1Kq7BsdzoXxIYS2S3Q7VBMG2EJxHRYlwyJoJOfD8sSrRnrRM4Z9qTlM2ekNV+ZurMEYjqsoAA/pg+JYNnuk1RWNttoOm3C2b4gSyCmPiyBmA5tzshepOcXk5CS53YorlqamM7wqO7E9ghyOxTThlgCMR3azOGR+PkInyam1V64ncrIL2br8VPMtdqHqSdLIKZDCwkKYMbQSN7dmkxJece8qHDR5hOowrXjbLgSUz+WQEyHN39qX7IKSvlkV8erhZRVVPLapmNMHxJB//Aubodj2hhLIKbDmzYonAERXXhx/TG3Q2lxSxPTyThdwp1T+7kdimmDLIGYDk9EmD+lHztP5LLjRK7b4bSolzccJTYsiOlDbLI1U3+WQIwBvjEhmi4Bvry8/qjbobSY3al5bDl6ijum9MXHx+Y6N/VnCcQYoFugPzdeEMNHCansSc13O5xmp6r8edl+Ovv78s0L+rgdjmmjLIEY4/jxzMGEBgXwo0XbOFNa7nY4zerF9UdZuT+TX84dSnCQjbxrGsYSiDGOHl078fS3xnE4q5DHP9rjdjjNJjEljz98so9ZwyOt89w0iiszEhrTWl00KJz7pw/k36sOkXyqiLh+ocwbF93mT3Etr6jk/e0pbDqSw6r9GYR28eepG8ciYn0fpuEsgRhTxc9mD6FClS/2Z/K3FUm8vuk4a//7MgL82m6FfcGawzy1dD9hXQKYEBvKT2YOJqxLgNthmTbOlf8IEQkTkeUikuT8Da2h3FIRyRWRJVXWvyYi+0UkUUQWiog14pom4+/rw0NXDGfpTy9h4fyJZJwuadMXGZZVVPLy+mNMGxTO1l/N4rn5cYyOCXY7LNMOuPWT6kFghaoOBlY4y9X5E3B7NetfA4YBo4HOwD3NEaQx04dEMDCiCwvXHUG1bY7Y+2liOun5xdw9rb81WZkm5VYCmQe85Nx/CbiuukKqugI4Xc36T9QBbAZimitQ07H5+Ah3XtSfhOQ8th475XY4DbJw7REGhHexiwVNk3MrgfRU1bNtAulAg+bQdJqubgeWnqfMvSISLyLxmZmZDXkZ08HdMCGa7oF+LFx3xO1Q6m3b8VPsOJHLnRf1s4sFTZNrtgQiIp85fRRVb/O8yzm1iIa2DfwbWK2qa2oqoKoLVDVOVeMiIuwXmKm/oAA/bpkcy9LEdJJPnXE7nHpZuPYI3QL9uGGCVdJN02u2BKKqs1R1VDW3D4GTIhIF4PzNqO/zi8ijQATw86aN3Jivu2NKP0SEVza0nQEXU3OL+DQxnVsmxdKlk51waZqeW01Yi4H5zv35wIf12VhE7gHmALeoamUTx2bM10SHdGbuqF4s2nycwpK2cZX6yxuOoarcMaWv26GYdsqtBPIkMFtEkoBZzjIiEiciz50tJCJrgLeBmSKSLCJznIeexdNvskFEdojIIy0bvumIvnNRf/KLy3lvW7LbodTqTGk5izYfZ+6oXsSE2jS1pnm4Uq9V1WxgZjXr4/E6JVdVL65he6uPmxY3ITaEsX1CeGHdUW6b3LpHsH1/ewp5RWV856L+bodi2rG2e2mtMS1MRPjORf04nFXI5/vq3W3XYsoqKnlh3VFGRwdzQd9qr9E1pklYAjGmHq4YFUVMaGfuf20rf/hkL6eLy9wO6StW7s9g7jOrOZhRwPemD7ALB02zsgRiTD0E+Pnw3vencv34aBasOcwVf1tDUWmF22EB8OwXh7jrhS1UKjw/P46rx/R2OyTTzlkCMaaeIrsF8tSNY1l450SSTxXxVvwJt0OisKSc/111iOlDIlj200uYObxB1+YaUy+WQIxpoEuHRnJB31D+b81hyisqUVUeW7ybxxbvbvbXTkzJ45p/rOVIViEAizYfJ6+ojJ/MGtymRw02bYt90oxphPumDyT5VBEf70rjlY3HeHH9Ud7YcpyS8uZt1vpoZyq7UvK475Wt5BWV8fzaI0zqH8aEWOs0Ny3HEogxjTBzWCSDI7vy1NL9/HbJHqJDOlNcVsmO47nnyvxr5UHWH8xq8GuUlFfwyIeJnMj5chiV9YeyiQoO5EDGaa771zrS8oq5f/rARu2LMfVlCcSYRvDxEe69ZAApuUX0DunMG/deiI/AukPZAKTkFvGnZft5bm3DB2LcevQUL284xgvrjgKQd6aMxNQ8bp4YywOXD+VIViHDenVjxlAb6820LLsgz5hGmjcumhM5Z7h2XG/6hAUxOjqYDYeyYPYQPknwDDq9/fgpVLVBp9VuO+4ZRv6TXWn86qrhbDicjSpMHdSDC5wmq4sGhdspu6bFWQ3EmEYK8PPh55cPZVBkNwCmDgpn+/FcCkvKWZKQCsCpM2UczW7YSL7bneaw9Pxi4o+dYsOhLIICfBkbE4KPj/CDSwcxrk9I0+yMMfVgCcSYJjZ1YA/KK5X3tiWzMzmPb0yIBjy1kPpSVbafyOXK0b3o5OfDkoRU1h/KZmK/MDvbyrjOPoHGNLG4vmH4+wp//s8BAH46cwhdO/mda4oqLCnnp29sZ+uxnK9tW1peyUPv7WLF3pMAHMs+Q05hKdMGRXDZsEg+2J5CUkYBUwf2aLkdMqYGlkCMaWKdA3wZHxtKXlEZ4/qEENsjiHF9Qs41RX2wI4UPdqRy78tbSc0t+sq2T3y8h0Wbj/O7T/Y6tQ9P0pnQN4Srx/Qmv9gzlPzUgeEtu1PGVMMSiDHN4GwN4eoxUQCMjw1hX/ppzpSW8/qm48SGBVFSXsl9r26luMxzzchb8Sd4ecMxRvbuzuHMQjYezmHbsVy6BPgyOLIblw2LJCjAl+6Bfozo3d21fTPmLDsLy5hmcO3Y3mw4lM28cZ7+j/GxIVRUKq9uPMbu1Hx+O28kvYI7892X45nzzGqCO/uzL+00Fw3qwf+7PY6pf1jB65uPcySrgLF9QvD1EToH+HLf9IFUquLbioeSNx2HJRBjmsGAiK68+b0p55bH9/Gcbvv08iQ6+/syb3w03QP9+eMNo1mamA7A1WOj+NVVI+jayY9vTIjhtU3HqFS+coHgj2cObtkdMeY8LIEY0wJCuwTQP7wLR7IK+VZcH7oH+gPwrYmxfGti7NfK3zY5lhfXHwU8tRdjWiPrAzGmhZxNBLdM/nrCqGpwz25M7BfqbGfjW5nWyZUaiIiEAW8C/YCjwE2q+rWT5EVkKXAhsFZVr67m8b8D31HVrs0asDFN4K6p/enfowtjY4LrVP5/rvRcdR7WJaCZIzOmYdyqgTwIrFDVwcAKZ7k6fwJur+4BEYkD7KeZaTNGxwTzo5mD6zzkyPjYUL4/Y1AzR2VMw7mVQOYBLzn3XwKuq66Qqq4ATlddLyK+eJLLL5srQGOMMefnVgLpqappzv10oL7Tp/0QWOz1HMYYY1pYs/WBiMhnQK9qHnrYe0FVVUS0Hs/bG/gmMKOO5e8F7gWIja2989IYY0zdNFsCUdVZNT0mIidFJEpV00QkCsiox1OPBwYBB5225CAROaiq1TYWq+oCYAFAXFxcnROVMcaY83OrCWsxMN+5Px/4sK4bqurHqtpLVfupaj/gTE3JwxhjTPNxK4E8CcwWkSRglrOMiMSJyHNnC4nIGuBtYKaIJIvIHFeiNcYY8zWuXAeiqtnAzGrWxwP3eC1fXIfnsmtAjDHGBXYlujHGmAYR1Y7TrywimcCxBm4eDmQ1YTgtzeJ3l8XvLou/cfqqakTVlR0qgTSGiMSrapzbcTSUxe8ui99dFn/zsCYsY4wxDWIJxBhjTINYAqm7BW4H0EgWv7ssfndZ/M3A+kCMMcY0iNVAjDHGNIglEGOMMQ1iCaQORGSuiOwXkYMiUtPkV62CiPQRkZUiskdEdovIT5z1YSKyXESSnL+tejIuEfEVke0issRZ7i8im5z34E0RabXT9IlIiIi8IyL7RGSviExpS8dfRH7mfHYSRWSRiAS25uMvIgtFJENEEr3WVXu8xePvzn4kiMgE9yI/F2t18f/J+fwkiMj7IhLi9dhDTvz73R7eyRJILZzJq/4FXAGMAG4RkRHuRnVe5cAvVHUEnumAf+DEW9dZIFuLnwB7vZb/CDztDJx5Crjblajq5m/AUlUdBozFsx9t4viLSDTwYyBOVUcBvsDNtO7j/yIwt8q6mo73FcBg53Yv8L8tFOP5vMjX418OjFLVMcAB4CEA53/5ZmCks82/ne8oV1gCqd0k4KCqHlbVUuANPDMqtkqqmqaq25z7p/F8eUVTx1kgWwMRiQGuAp5zlgW4DHjHKdJq4xeRYOAS4HkAVS1V1Vza0PHHM0ZeZxHxA4KANFrx8VfV1UBOldU1He95wMvqsREIcaaUcE118avqf1S13FncCMQ49+cBb6hqiaoeAQ7i+Y5yhSWQ2kUDJ7yWk511rZ6I9MMzf8omGj8LZEt6Bs90xZXOcg8g1+sfqjW/B/2BTOAFpwnuORHpQhs5/qqaAvwZOI4nceQBW2k7x/+smo53W/x//g7wqXO/VcVvCaSdEpGuwLvAT1U13/sx9Zy73SrP3xaRq4EMVd3qdiwN5AdMAP5XVccDhVRprmrlxz8Uz6/c/kBvoAtfb15pU1rz8a6NiDyMp1n6NbdjqY4lkNqlAH28lmOcda2WiPjjSR6vqep7zuqTZ6vqDZgFsiVdBFwrIkfxNBdehqdPIcRpUoHW/R4kA8mquslZfgdPQmkrx38WcERVM1W1DHgPz3vSVo7/WTUd7zbz/ywidwJXA7fplxfstar4LYHUbgsw2DkLJQBPB9Zil2OqkdNf8DywV1X/6vVQg2eBbEmq+pCqxjizTd4MfK6qtwErgRudYq05/nTghIgMdVbNBPbQRo4/nqarC0UkyPksnY2/TRx/LzUd78XAHc7ZWBcCeV5NXa2GiMzF04x7raqe8XpoMXCziHQSkf54TgbY7EaMAKiq3Wq5AVfiORPiEPCw2/HUEus0PNX1BGCHc7sSTz/CCiAJ+AwIczvWOuzLDGCJc38Ann+Ug3hmqezkdnzniXscEO+8Bx8AoW3p+AO/AfYBicArQKfWfPyBRXj6a8rw1ADvrul4A4LnrMpDwC48Z5u1xvgP4unrOPs//KxX+Yed+PcDV7gZuw1lYowxpkGsCcsYY0yDWAIxxhjTIJZAjDHGNIglEGOMMQ1iCcQYY0yDWAIxppFE5GFn9NoEEdkhIpOb8bVWiUhccz2/MfXhV3sRY0xNRGQKnquFJ6hqiYiEA61mqHNjmpPVQIxpnCggS1VLAFQ1S1VTReQREdnizKmxwLmq+2wN4mkRiXfmCpkoIu8581Y84ZTp58wF8ZpT5h0RCar6wiJyuYhsEJFtIvK2M/6ZMS3GEogxjfMfoI+IHBCRf4vIdGf9P1V1onrm1OiMp5ZyVqmqxgHP4hli4wfAKOBOEenhlBkK/FtVhwP5wPe9X9Sp6fwKmKWqE/Bc+f7z5tlFY6pnCcSYRlDVAuACPJMTZQJvOoPgXerM4LcLz4CQI702OzuW2i5gt3rmcCkBDvPlQHknVHWdc/9VPEPUeLsQzwRn60RkB57xnvo26c4ZUwvrAzGmkVS1AlgFrHISxveAMXjGWTohIo8BgV6blDh/K73un10++z9ZdYyhqssCLFfVWxq9A8Y0kNVAjGkEERkqIoO9Vo3DM8gdQJbTL3Hj17esVazTQQ9wK7C2yuMbgYtEZJATRxcRGdKA1zGmwawGYkzjdAX+ISIheCb+OYinOSsXz2i26XimBKiv/Xjms1+IZzj1r8zdraqZTlPZIhHp5Kz+FZ5Ro41pETYarzGtjDMV8RKnA96YVsuasIwxxjSI1UCMMcY0iNVAjDHGNIglEGOMMQ1iCcQYY0yDWAIxxhjTIJZAjDHGNMj/B22bEVUzrx2PAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEWCAYAAACNJFuYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nOy9d5wcd33//3pvL7d7e10n3alasmS5YFs2NjYY3DAYMNWhBAgJIbSQQvmFEAIJIZhOiGkGvgEDMRhjSmyDK7Zxt2TZlmVJVtcVXW/b6+f3x2c+s5+Zndlyt3t70n2ej8c97m52duazszOfd39/iDEGhUKhUChkHM0egEKhUCiWHko4KBQKhaIEJRwUCoVCUYISDgqFQqEoQQkHhUKhUJSghINCoVAoSlDCQWELEd1PRO8t8/p3iejTizmm5Q4R/Z6I3l3H45X9jhXLFyUcThCI6AgRJYkoRkQjRPQjImpZxPP/BRE9JG9jjL2fMfa5BpxrLREx7bPGtM/+T9LrREQfJ6L92jU5RkT/SUQeaZ8face4xnTsr2vb/0L6XHnpXDEiut5mXPdr7z3LtP3X2vaX1/M6WMEYexVj7MfS2B+q9J5GUOk7Upz4KOFwYvFaxlgLgBcBOBvAJ5s8nkYT0T7v2wD8KxFdpW3/JoD3AXgXgBCAVwG4HMDPTe9/QdsHAEBELgDXAjho2u9RxliL9PPhMmMyH7MDwIUAxmv9cCcJ4jt6M4BPE9EVzR6Qoj4o4XACwhgbAXAnuJAAABDRBUT0CBHNENEzsharaZiHiChKRIeJ6B3a9s8S0U+l/YQ26JLPR0RbAHwXwIWaljijbf8REf2H9vfLiWiQiD5KRGNEdJyI3iMdo4OI/o+I5ojoSSL6j2q1XsbYowB2AzidiDYC+CCAdzDGHmWM5RhjuwG8CcDVRHSJ9Nb/A3AxEbVp/18F4FkAI9Wc14afAfgzInJq/78NwK8BZKTPej4RPap9F8eJ6HqTVXMlEe0jolki+jYRPSBcO8IaIKKvENG09n29Snrv/UT03jLficFNZLYuiOgKItqrnft6ACR/OCL6SyLao537TiJaU81FYYxtB/+O5HvS8lia5fd17T6ZI6JdRHS69tqPiLsr79bu1wfkMRDRS7T7Z1b7/RLTtfkcET2svfcuIurUXvMR0U+JaFL7Xp4koh7ttVYi+qH2XQ1p96b4fpctSjicgBBRH7i2fED7fxWA2wH8B4B2AB8D8Csi6iKiILim/SrGWAjASwA8Xcv5GGN7ALwfRQ07YrPrCgCtAFYB+CsA35Im5m8BiGv7vFv7qeazEhFdBGArgJ0ALgMwyBh7wjTGAQCPAbhS2pwC8FsAb9X+fxeAG6s5bxmGATwvncfqmHkA/wCgE9yquAxcoEGbrG4Bt/o6AOwD/05kXqxt7wTwJQA/JCLDJF7Dd6KjnftWAP+iHfsggIuk168B8M8A3gigC8CfANxU6bjaey8AcDqK92S5Y10J4GUANoHfL9cCmJQO9w4An9PG+DS4QAYRtYPf598Ev3ZfA3A7cetN8HYA7wHQDcAD/iwA/H5rBdCvvff9AJLaaz8CkANwCrhFfiWAZR+HUcLhxOI3RBQFMABgDMBntO1/DuAOxtgdjLECY+xuANsBvFp7vQCudfsZY8c1TbsRZAH8O2Msyxi7A0AMwKmaFvYmAJ9hjCUYY88D+HEVx5sAMAXgBwD+iTF2L/iEcdxm/+PgE5HMjQDeRUQRAJcA+I3F+y7QtEnxc0GFcYljbgZ3qzwqv8gY28EYe0yzao4A+J52boB/J7sZY7cyxnLgE53ZkjnKGPs+YywPfp16AfRUGFM1iHPfwhjLAviG6dzvB/AFxtgebWz/CeBFFayHCSJKAngUwLdRvL7ljpUFdwduBkDaPvJ3ejtj7EHGWBrAp8Cto34AVwPYzxj7iXZtbwKwF8Brpff+D2PsBcZYEsDNKFoyWXChcApjLK99R3Oa9fBqAH/PGIszxsYAfB1FhWLZooTDicXrNe3/5eAPVqe2fQ2At8gTHICLAfQyxuIA/gz8YT1ORLdrk1ojmNQmAkECQAv4hO0CF2oC+W87OhljbYyxLYyxb2rbJsAnSyt6tdd1GGMPaef/FIDbtEnDzGOMsYj081iFcd0K4FIAHwbwE/OLRLSJiG4jnjgwBz4xiu9qJaTPznjny0HTIUak1xPan/VIPrA6t/w9rAHwX9I9NAXudlpV5pid2tg+Cn5fuisdizF2H4Drwa3JMSK6gYjC0jHlMca0967Ufo6azn/UND5Z2In7D+Df050Afk5Ew0T0JSJya+N0gz8bYqzfA7c8ljVKOJyAMMYeADeFv6JtGgDwE9MEF2SMXaftfydj7ArwyXMvgO9r74sDCEiHXlHutAsY8ji42d4nbeuf57HuA9BPROfLGzXN8gIA91u856fgk9dCXUoA9An79wA+AAvhAOA74Nd5I2MsDO5eEW6h45Cug+Yu6is5QpVDsdhW7js9Dum6a+eWv4cBAH9juo/8jLFHyg6Ca+JfA3fjfbCaYzHGvskYOxfAaeDupY9Lh5TH2ALuKh3WfsxWzGoAQ+XGp50vyxj7N8bYaeBuvNeAuwQHAKTBFRExzjBjbGulY57sKOFw4vINAFcQT6v8KYDXEtEricipBd9eTkR9RNRDRNdosYc0uKunoB3jaQAvI6LVRNSK8tlPowD6SAqsVovmHrkVwGeJKKBZLu+q8Da7Y70AHoj9GfEgvJOItgL4FYBHANxj8bZvArgCwIPzOacN/wzgEs1tZCYEYA5ATPusH5Beux3AGUT0euKB/w+hvFAuh9V38jSAN2rX+RTw2I987q1E9Ebt3B8xnfu7AD6pXU8RqH1LDeO5DsAniMhX7lhEdB4RvVjT3OPgQqUgHefVRHSx9rk+B27ZDQC4A8AmIno7EbmI6M/AhcttlQZGRK8gojM0F+ccuJupoLmz7gLwVSIKE5GDiDaQMbFhWaKEwwkKY2wcXBP+V+3BEQHAcXBt6OPg368DwD+Ca11T4L7vD2jHuBvAL8AzeHag/EN2H3g2yggRTZTZz44PgwcER8C17ZvAhdV8+DB4HOKn4K6D58DdC69njBXMOzPGphhj92pulLrAGBvWXFZWfAw8MBoFt9J+Ib1vAsBbwAPNk+CT23bM71pYfSdfB8+cGgWPV/zM4tzXaefeCOBh6fVfA/giuOtlDvy66plSVXA7gGkAf13hWGHw6zIN/r1NAviydJz/BY+nTQE4FzymBsbYJLjG/1HtPZ8A8Brtc1ViBXgiwByAPQAeQNHqexd48Pp5bUy3wN51uWygOj4vCkXVENEXAaxgjC242peI/g3AGwC8jDE2s+DBLSJE5ACPObyDMfbHZo+n2RDRj8Cz0f6l2WNZ7ijLQbEoENFmIjpTS009H9zd8et6HJsx9hkAN4DHHJY8mvsvQkReFOMRlYLgCsWi4qq8i0JRF0LgrqSV4C6Pr4LXINQFxphly4slyoXgrhPhyni9TRaVQtE0lFtJoVAoFCUot5JCoVAoSmiaW0nLS78RvPKTAbiBMfZfWon8LwCsBXAEwLWMselyx+rs7GRr165t6HgVCoXiZGPHjh0TjDFzVwEATXQrEVEveAXvU0QUAk+lfD2AvwAwxRi7jngL4DbG2P9X7ljbtm1j27dvb/iYFQqF4mSCiHYwxrZZvdY0t5LW4+cp7e8oeO7xKvB8fdF358fgAkOhUCgUi8iSiDkQ0VrwboiPA+iRmnCNwKbhGBG9j4i2E9H28fHl2kpfoVAoGkPThYPWO+VX4F0R5+TXtIpWS78XY+wGxtg2xti2ri5Ll5lCoVAo5klThYPWW+VXAH7GGLtV2zyqxSNEXGKsWeNTKBSK5UrThIPWEfKHAPZoHR0Fv0NxIZh3o46FUgqFQqGojmZWSF8E4J0AdhGRWJnsn8Gbgt1MRH8F3pTr2iaNT6FQKJYtTRMOWkdLsnn5ssUci0KhUCiMND0g3UwOjMXwtbtfwEP7J6DaiCgUCkWRZd14b8/xOVx/334UGHD1mb34+rUvgse1rOWlQqFQAFjmwuG1Z63EFaf14IcPHcaX79yHVr8b//mGM5o9LIVCoWg6y1o4AIDP7cSHXnEK5lJZfO+BQ7j6jF5cdEpn5TcqFArFSYzyoWj8w+Wb0Nfmx5f+sFfFHxQKxbJHCQcNn9uJv3nZejwzOIudAyfUSpMKhUJRd5RwkHjjOX0I+Vz40cNHmj0UhUKhaCpKOEgEvS68+dw+/P6544imss0ejkKhUDQNJRxMvPqMXmTzDA+8oDq9KhSK5YsSDibOWd2GjqAHd+0ebfZQFAqFomko4WDC6SBctqUbf9w7hkyu0OzhKBQKRVNQwsGCSzf3IJrOYdeQylpSKBTLEyUcLNi2tg0A8OSR6SaPRKFQKJqDEg4WdLZ4sb4ziO1Hppo9FIVCoWgKSjjYsG1tG3YcnUahoKqlFQrF8kMJBxu2rWnHdCKLQxOxZg9FoVAoFh0lHGw4Z00EAPD0wGyTR6JQKBSLjxIONqztCMLjcmDfyFyzh6JQKBSLjhIONricDmzsbsHekWizh6JQKBSLjhIOZTh1RQj7lHBQKBTLECUcyrB5RQhj0TSm45lmD0WhUCgWFSUcynDqijAAKNeSQqFYdijhUIbNK0IAoILSCoVi2aGEQxm6Q16EvC4cnog3eygKhUKxqCjhUAYiQn97AMemEs0eikKhUCwqSjhUYLUSDgqFYhmihEMF+tv9GJxOqh5LCoViWaGEQwVWtweQzhUwHks3eygKhUKxaCxZ4UBEVxHRPiI6QET/1Kxx9LcHAAADyrWkUCiWEUtSOBCRE8C3ALwKwGkA3kZEpzVjLEI4qLiDQqFYTixJ4QDgfAAHGGOHGGMZAD8HcE0zBrIq4geREg4KhWJ5sVSFwyoAA9L/g9o2HSJ6HxFtJ6Lt4+PjDRuIz+3EirBPCQeFQrGsWKrCoSKMsRsYY9sYY9u6uroaeq5VET+Oz6Qaeg6FQqFYSixV4TAEoF/6v0/b1hR6Wn0YnVPCQaFQLB+WqnB4EsBGIlpHRB4AbwXwu2YNpiekhINCoVheLEnhwBjLAfgwgDsB7AFwM2Nsd7PGs6LVi3gmj2gq26whKBQKxaLiavYA7GCM3QHgjmaPAwB6wj4AwOhcCiGfu8mjUSgUisazJC2HpYYQDiOzqkpaoVAsD5RwqIKukBcAMBlXwkGhUCwPlHCogs4gFw4TMbVcqEKhWB4o4VAFYb8LLgdhUjXfUygUywQlHKqAiNDR4sGkshwUCsUyQQmHKukIelXMQaFQLBuUcKiSjhaPijkoFIplgxIOVdLZ4sWEijkoFIplghIOVRIJuDGbUBXSCoVieaCEQ5W0+t2IpnPI5QvNHopCoVA0HCUcqiTi520z5lK5Jo9EoVAoGo8SDlXSGuDCYSahgtIKheLkRwmHKon4PQCA2aSKOygUipMfJRyqJKy5lWaUcFAoFMsAJRyqJKK5lVTGkkKhWA4o4VAlrZrloNxKCoViOaCEQ5Uo4aBQKJYTSjhUidvpgMflQDytUlkVCsXJjxIONdDidSGmhINCoVgGKOFQA0GvU1kOCoViWaCEQw0EPS7E0vlmD0OhUCgajhIONdDidSGRUZaDQqE4+VHCoQaCXpdyKykUimWBEg41EPQ6VUBaoVAsC5RwqIGgx4W4ijkoFIplgBIONaDcSgqFYrmghEMNtHhdiGdyYIw1eygKhULRUJRwqIGg14UCA1JZtRqcQqE4uWmKcCCiLxPRXiJ6loh+TUQR6bVPEtEBItpHRK9sxvjsCHqdAIC4SmdVKBQnOc2yHO4GcDpj7EwALwD4JAAQ0WkA3gpgK4CrAHybiJxNGmMJHie/XJmcshwUCsXJTVOEA2PsLsaYUL8fA9Cn/X0NgJ8zxtKMscMADgA4vxljtMLjUsJBoVAsD5ZCzOEvAfxe+3sVgAHptUFtWwlE9D4i2k5E28fHxxs8RI4uHPJKOCgUipObhgkHIrqHiJ6z+LlG2udTAHIAflbr8RljNzDGtjHGtnV1ddVz6LYIt1JaBaQVipOWe54fxY2PHkEqu7xrmlyNOjBj7PJyrxPRXwB4DYDLWDE3dAhAv7Rbn7ZtSVC0HJb3TaNQnKzc9MQxfPLWXQCAB18Yx/fftQ1E1ORRNYdmZStdBeATAF7HGEtIL/0OwFuJyEtE6wBsBPBEM8ZohRAOaRVzUChOOpKZPL70h7148bp2fOzKTbhnzxgeOjDR7GE1jWbFHK4HEAJwNxE9TUTfBQDG2G4ANwN4HsAfAHyIMbZk1HSvCkgrFCct9+wZxXQii49cthHve9kGdAQ9+PmTA5XfeJLSMLdSORhjp5R57fMAPr+Iw6kaj5Nn1S4n4ZAvMDDG4HIuhdwFhaJx/OG5EXSHvLhgfQecDsLVZ/biF08OIJnJw+9ZMhn1i4Z64mtguWUrpbJ5vP37j+GCL9yLZwdnmj0chaJhMMbw6KFJXLyxE04HjzFcurkb6VwBO45ON3l0zUEJhxpYbnUOv316CI8fnsJELIOv3vVCs4dTNQ8fmMBf/ehJ7B6ebfZQFCcIB8ZimIpncMH6Dn3beWvb4XIQHjm4POMOSjjUwHKLOfxm5zDWdwbxkcs24oEXxjEZSzd7SBVJZvJ4/0934N69Y/jbm3aiUFBNEhWV2TXEFYmz+/VOPgh6Xdi6MoynB5an1ayEQw0sJ7dSRjOnLz+tBy/b2AkA2H4CmNcPvDCGaCqHa7f14dB4HI8emmz2kBQnAHuOz8HjcmBdZ9CwfeuqVjw3NFv3TsyMsSXf3VkJhxpYTm6lvSNzyOQLeFF/BGf0tcLjcmD7kamGnzeTK+Cvb9yOq77x4LzWznjghXGEfS58+jWnwe0kPPjC4lTPK05s9hyP4tSeUEnixekrWzGXymFgKlm3c82lsnjDtx/B6Z+5E/ftHa3bceuNEg41oFdILwPh8MwgN7PP7GuF1+XEpp4W7BuNNfy89+wZxd3Pj2LvSBQ3Pnq05vc/fzyKrStbEfK5cfbqNjy8TP3Fito4OB7Dxu6Wku2be0MAgBdGo3U713fuP4hnBmfgdBA+/stnl2wlthIONSB3ZU1l80veLFwIh8ZjCHicWBXxAwDWdbbg8ETjhcNtzw6jJ+zFGata8ce9YzW9t1Bg2D8axakr+AO9bU0b9h6PIp2r38M3NJPENd96GB/82Q7kloF7cTmQyuYxMpfC6o5AyWsbOrnAOFSnez+XL+AXTw7gytN68N13novJeAa3PXu8LseuN0o41IDDQXA7Cb96ahCbP/0H3LJjsNlDahjHJhNY3R7QWwes6wxiaDpZ14nWil1Ds9i2th3nrW3Hs0MzyNYwAQ9OJ5HI5LFZEw5besPIFRj219Hi+fztz+OZgRncsWsEv9i+fAukTiaGZpJgDFjdXiocWgNudAQ9ODQer8u5nh2axVQ8g9edtQoXru/AylYf7to9Updj1xslHGrE43RgcJr7H3cPzzV5NI3j6FQCayRNan1nEAUGDEwlyrxrYcwmsxiYSmLryjDOWRNBKlvAvpHqzfnBaT42oQFu6Q0D4MHGehBNZXHP82P4y4vW4dSeEG5fohqfojaOafe0lXAAgPVdwboJh0cP8gSJCzd0gIjw8s3deOjAxJKMYyrhUCMiKA0Ax2frF6RaShQKDMemEljTUczcWNXG3UvHZ1MNO+9ebRLf0hvGes2cP1aDMBqZ42NbEfYB4NaOx+nA/rH6WA4P7Z9AJl/AVaevwCs2d+OJw1OIzSNorlhaDFYQDqvbg7risVB2HpvGKd0taA96AAAv2dCBRCaPvSNLT9FUwqFGjMKBT0bxdO6kqqKcSmSQyRX0eAMAdLV4AQDj0WKtw23PDuOlX7oPX71rX13iL8IiW9sR1IVRLZbK6BwfW48mHJwOQn+7H0cn66P17RyYgcflwIv6Izh/XRtyBaYLtOXC0EwSv9w+cFLVj4zOpeF0EDq0e9zMqogPI3OpmlycduwdieoWLQCcvboNAJZkLYUSDjUiC4fhGS4c/u7nT+NN33mkbpNQs5nQit06pYelM2QUDtFUFp+8dRcGppL47/sO4I5dC/ebCkust9WHVr8bYZ9LFxhW/PbpIVz61fsxPMP3GZ1LIeR1Iegttgxb2xHE0cn6aH3PDMxgS28YHpcDm3p4XGNfHbNYljrpXB6v+++H8PFbnsUPHjrU7OHUjfFoGh1Bj942w8zKiB8Fxu+vhRBNZTE4ndRjYgCwstWHrpAXTx9TwuGExyPlQU/E0oilc7hnD89VvvWpJbP0xIKYjGUAAJ0tHn1b0OOE3+3UhcOjBycRTeXws/e+GFt6w/jiH/YuOHtneDaF9qAHPjdvctbfHsCAjTmfLzD83c+fxqHxOH76GE95HZ1LoTts1P7WdnLhsFDLhjGG3cNzOGMV1/pWRfwIepw1xUROdB4/NIXJOL83bt5+8iRjjMfS6ApZWw1A0aUqlMH5ItybQrEAACLClt7wklQylHCokVa/2/C/nP/83NDJ0ctHWA6ymU1E6Ap5Ma699sjBSfjcDmxb24a/v3wjjk0lcPuuhQVoj88k0dvq0//vbfXjuM0DeXC8GEf4035eyzAWTesuJcHq9gCS2bw+7vkyHuWKwMZu/mATEU7pCRnGcbJz394x+NwOfPSKTTgwFtMtthOd8Wh54bBSc68OzSzMAhUu0rWmlNlN3S04MBZDfom56pRwqBExYQqt+sgEdyX53U5bLfdEQ1gHXSYfbFfIq7+2e3gWZ66KwOty4ootPdjY3YJv//HggnzRx2dT6G0txjnag27MJDOW+z6rFem9ZEMHjkzGwRjDXDJbIryFsBmdXZhwOKx9z2ul9gqrIr6GBuiXGs8Pz+H0la245FS+LO/J0ql3PJouuddlRIKDiGnNF+EiFZaIYNOKENK5Qk3JF4uBEg41IiYf4Y8/ovmzz13ThsHp5AlZGHfbs8P4yaNH9Il9Mp6B20kI+43LfbQHPbrL6dhUQk8ZdTgIH3zFBuwbjeLeGgvXZCbjGXSFiq6sSMCD6UTW8pruPT4Hn9uBV5zajWgqh+lEFtFUDiGfccwrNOGw0MyyI1o8aZ2UwbUi7MfIbKou33kuX8CPHzmypBMb9o9FsbGnBeu7RGHYiR9jKxQYJiq4lYJeF/xuJyai1sIhly/gy3fuxX/ds7/svTA4nURH0IOAx3iPbtCu52IUmdaCEg414tf84SIVTVgO56xpQyKTx3Qi27SxzYedx6bx4f/diU//djdueYr7kSeiaXQEvSVr54a8LsTSOaSyeYzOpQ2pf689cyX62vy4/o8H5j1ZziWzCEuafyTg1qrRS2MZI3PcyljfxSfrI5NxxNI5tHiNlkNR61uYhn9kMgG3k7AyIru9fEhk8ojWIZ31+j8ewGd+txtv+s4jGFqC7prJWBrTiSxO6Q6hxetCd8iLw1Lufyqbx83bB3QL60QhlskhV2D682xHZ8iju1vN3Lx9EN/640F8/Z4XyrpWB6cT6DNZDQDQ3y4y85bW966EQ4343PySubXA9JHJODwuB7au5IFKkQ9dKLCGtVcoFBj+uG+sLrnXP3v8GEJeFzpbvPi1FlCfS5W6ZwCuQcUzOd08loWDy+nA+y/ZgGcGZvRCn1pIZfNI5woI+4rnbQvwB3Y6UepaGtP8xP3aGAamElw4mCyHjhYvXA7SayBkhmaSiKaqE+bDM0msaPUZGrMJq2Rkga6lrGY1bF4RgstBeoB9KSGshA2aMF7XGTQIgi/+YS8+ccuz+PD/PnVCWc9zSf79my1OM10tXkzErF2ct+wYwMbuFvSEvfi/Z4ZtjzE8k9TjF+Zj+9yOhhaYzgclHGpEWA5CqT46mUB3yKtrHjOJLEbnUrjwunvx7v95oiFj+Ma9+/Ge/3kSf/6Dx+fVuVSQyRVw1+4RXLG1B687ayV2DkyjUGCIp/MIekuXRWzxuRBL5fTYilkLevO5fegOefGt+w9Udf4DY1F84fd7kMsX9IfUYDlof08nMmCM4Z7nR5HI8M87Hk2jO+TV9xG+/7DpIXc6CN0hb0ls4N49o3jpF+/D277/WFUtQUZmU+gJGYPdvbrLih97NpHFJ255Bk8crq177WOHJjGdyOIfr9iEs1dH8FgVbcYZYzg0Hlu0egNheQmB2N8e0C2cbL6A3+zkisXu4Tk8eaT5rrHr79uPP//B4xUTBqIpfj+FfKXKkExni9dQ41N8fxY7B2Zw9Zm9uPK0FXjwhQnbwPJkPGNIDxcQEfra7DPzkpk87nl+FDMWSlIjUcKhRkTVsNCaZ5NZtHhduuYRTeVw754xjM6l8fCBybovkJPJFXDjo0cAcFfHbc/aayqVePjABOZSOVx9Ri/WdwWRyhZwfC6FWDpnqBUQtHhdyBUYRrXJ0Fw05HM78dcvXY+HD0xaBivNguyTt+7C9x44hJ8/OYA5TYNvNbiVuMCdTWTxy+2DeO+N2/H9Bw8DAMbmUugO+XRhIjJnWizG3dPqK3Erffv+gygw4LmhOdy7p3KcZCyaRk+rUTgIhWBaS+/8wu/34Obtg/jAT3fU1IPqycNTcBBw0Smd2La2Hc8NzSKZKf/+nzx2FJd+9QH8+23PV32ehSAmxm5NQHa08PgTF1JxTCey+PdrtoIITV857f59Y/jKXS/goQMT+PRvnjO8NhXP4M3feQT/8IunARSFQ7iScAh5Ld1Ku4ZmwRjwov4IzuxrRTKbt3StZfMFzCSy6Gixdl/1t/kt3UqFAsN7b3wS771xO9747UcWtSJfCYcaeeM5q/C9d56Lv7p4nb7N63Lomkc0lTWktz50oL4Pyt6ROcwksrj+7WdjfVcQv95ZubYiX2D4+C+fwVXfeBBPSmsy3L7rOEI+Fy7e2Kn77g+Px5HI5BD0lE6yQW2R9WFNOFi5nt56fj/cTirxvX7h93uw9TN34iEt7TSVzeOZAZ5xdPfzo5gVloOk+bcFheWQxa+0eMizgzOIpXOIZ/LoDnvhdTngdlJROFi4B3pbfQbXz8hsCjuOTuNjV25C2OfCfRWC6IwxjMym9PiFQHz+uRQPmv9p/wQcxDXEWoqadg7MYPOKMIJeF/JbofoAACAASURBVM7qiyCbZ9g/Zp/3zhjD9fdx6+wnjx3Vr91COKplfNkxFk3D7STdUusMepHJFxBN5/SxnrumDVtXhquyfBrJzx4/hq6QF3932UY8cnASY9Hid3/bs8PYfnQav945hGOTiardSp0tXkwlMiVWwS69tX2kbC8voUDYVWH3tQUs3cQPH5zAwwcm8YpTu3BoIo6bn1y8Zo9KONQIEeGVW1cYNGuPy6FPatFUDntH5nD6qjCIUHXDLsYYbnjwIL557/6y+4kUzrP6Injl1hXYfmS6omvplh0D+OWOQewdieIDP92B2US26FI6rQdel1PPmDg0EdPcShbCQdt2XJuIrR6okM+N89a24/69xUV2svkC/ufhIwCAX+7gN/fgdEJfUW9gKoG5JP8MBsvBr7nqkhndhbH96LSeNdLZwoPmYZ9bj4NYuQd6wkbhsP0oF5Av29SFi07prDiZzaVySGbzJcJBWC2zCV75OjSTxD9cvglEqHoFOsYYnhmYwVna8pRrO7lFWi6t8dBEHGPRNN5ybh/yBYb7980/QwzgLrZLvnw/vnLXPtt9xubS6GzxwqFVEQsNeDKWwYGxGIh41s1ZfRE8PzzXtLhDMpPH/fvG8LqzVuKyLd0AYIiB3bl7RF/u9969o4imqxMOEb8bjKEkRnVgLIYuza28sacFLgdZFkaKeEWnTeC7v92PuVSuRND/ascgIgE3vvvOc3H26ghuXsROwEo4zJOAp+iT97qcCHpcIOI3z+GJOLasCGNF2Gfb/uGh/RN6phMAPHlkGv95x1587e4XMFYms+a5oVm0Bdzoa/PjJRs6kCswPFFhhbZf7RjiXUQ/cjGm4hl88c69uHfPKOZSObz2zJUAeFDMQcVirxaLmIN4gIZnkwh4nHpQ3swF6zuwbzSqm8C7hmaRyRXgczt0y0FMfuevbcfgdFIPOssxBxH8T2byGJlNwekgzCazunkvxhP2u8u6lVaEfYhn8vqDvePoNPxuJ7b0hnFabxiD08my5vq4pnmaq6/dTgcCHifmUlndt33hhg6c2hPCM1X2ypmIZTCXymFTDxfO/W1cOJRr+SFW5PubSzYg6HFiZw1WynNDsyUB7+8+cBAA8P0/Hbb1l4/HeIxHIDTgyVgaB8fj6Gvzw+d2YlNPCHOpHMZs0j4bzfajU8jmGV66sRNbV7aixevCU1J68N7jUbzxnFVoC7hxYCxWdCtZWMEyQmkxT97HphJYo7mYvS4nesI+y2yzybim0NikzIrvXQ5KC2v00lO74XU58arTV2DvSHTRig8rCgciWkNEndrfFxDRx4joDY0f2tLG55KFgwMOB6HF68JcKoeZRBbtQQ/62vyWpuJELI0//+HjePlX7tfNTaHNAjDUCjw7OGMQIkcnE1jf1QIiwrY17XA6qOzyndPxDLYfncIrt/Zg68pWvOeidfjfx4/hwzftxKqIHy/bxAuaHA5C2O/GTCKLuE3MQWwbnkmV9dEK83qf1mlSmN5vPW81JuMZxNI5HNMmv4s3diKTL+jtA1oNwoFf44GpBHIFhjNWtfL/tWsqXF8hH7/u4m8zIogq4g57j0exuTcEt9OhLwy0v0z7gqk4nxCs0h3DPjdmk1k9KN0b8WNDd0vVNQBCqAjLLahljpXLXDk8kYDHydc73tgTqrqj52wyi9f890P4l988h0c0d2ehwNuCtHhdyOQKej2HGXMVcYd2LSZiGYxHiy43vedUk9qKPH5oCk4H4by1/NlY1xnEYe1em45nMBnPYH1nC1a1+TE4nZQC0uUth3LCQV4kaGXEZzl5i/qgDlvLgR9Dni9eGI1hMp7BBRs6AACvOJVbQkLBajRlhQMRfRrAfQAeI6L/APANAJ0APkJE31iE8S1ZHA7SM5dEM76wz42JWJqnZPrd6G8LWFoOv5f88TsHuFbz1NEZrOsMIuRz6Z0+ZxIZvO76h3HZ1x7QzfSRuZQ+2fk9TmzpDZXVHJ8ZnEGBARdu6AQA/MMVm3BKdwsKjOFfrt5iaDbW6ndjPJpGrsAqCIekZbxBsEVbWvH543yCGJxOwOty4Nw1vAPlwFQCx6aS8LudeJHmThGTiaz5iz5WYqI9s08TDlrgzq9Zb7KgEt+JjJi4xAR+cDymT8ZCOJRbBnJKE+AitVam1a8Jh5kkHAR0h7xY3xnEwFSiqh79wu0oYj4AdzHIbqW7do/gDumeGZhKYFWbH04HYfOKEPaNRKty48jus19pactHpxJIZPJ487l9AHgVNMCvx1/fuF1fjS+ayhqus8i6mYilMR3P6tfmFG2pzWa1Fdk1NIuN3S36vcpTbvlYxGpuG7qD6IvwbKu5ZBYelwNeV+l9I9MaKBUO+gpyUkp3b6vfsmpeWMZW9xBQtBzk+WLnMT43nLe2HQC/tq1+tz5nNJpKlsPbAGwBsA3ARwBcyhj7JwBXALi8wWNb8ojJSfgwQz6XblKG/W6sjPhxfDZZkm749MAsWrzcDbVrkD+MhydiOLUnhFURv36D/PZpnomULzA8dWwajDGeKy1lzZyzug3PDMzYugOe1wTNaVodRovXhT/83Uvx6D9dhled0WvYN+J3Y1irJA56LFJZtQeOCz97TWtVxI+Ax4mDWqOxoZkkVkX8+uJBx6YSGItyIRfRHjqhWcmuKoeD4HEW8783r+CfQfwv0m3lsbicpZ01e6T2B9FUFmPRtC4cVkX8cBAwVKb7q0ghbLOyHPwuzCVzOD6bQlfIC7em0RdYdWtRHNXqZFa2GtujC4H03NAs3veTHfjgz57S3WIDUjHVxp4QphNZvSFeOR47xPthvXhdO/aN8vtCWHevPasXTslf/uudQ7j7+VF87e4XAKDEmhTKQTSVw2Q8o8cgOls88LudlkqRKJQ70qBCOd4ccRanaxYmwNudDE0nkckVdEG8TrccEphL5UrSn62wshyOz6bAGA8mC3ojPL5lfuZjmoVilTAB8Pso4HEamvsJi064rYgIZ/VHanIjLoRKwiHFGMswxmYAHGSMJQCAMZYDsLhJt0sQoaUKrSPkc+mTTNjnQlvQgwIrpssJjk3FcdrKMNZ1BvHcMHe5jEfT6Al70ddWzB8/IC1Ss28khplEFulcASukieTs1RHEM3nb7Jbnh+fQ1+Y3aPoup0O3PmTCfrc+frtUVn3fMm4lIkKvlD46NJ3Eqja/wa86l8oh7Hfr55mKZ+B0UEnbZK/LgRntgdQrSc1uJakq2knlhEOqRFN3OR3oDvkwVKbjpqh6by9nOUh9oUS687GpypPg8GwKva0+PdALcPeVEA6PSzUTj2iB1WNTCd0NIYSEXYNCmd1DvDfSGatasX+UN3oTWu66zhZ0tnj070y08RAN4cxJCkIhSmRymE5kdJcbz9m3dqde93teKPeh/32qIfUZ47E0JmIZvSAVANa0B1BgXEERQeGesBerIn6euj2b1JW8clgJBxH7kmMxK1v9yOQLJcI6ls5pmXXWU654ZuQ2L7uHZ3Fab9hwb5zV14oXRqNIZRu7XC9QWThEiOiNRPQmAK3a3/r/Cz05EX2UiJgU0yAi+iYRHSCiZ4nonIWeo5EIzdWjWw5uPRAX9rv1tD9z87gjkzyItbo9gJHZFFLZPOZSOXSHfdqDxXs0HZ6I44xVrfC4HDg6GS/6taWJ/ex+7qp56qi1NnFwPG5oEVyOVr9bv6mtArvyjV3OrcTHWDSvh2aS6GvzIxJww+UgTMYzvFWGz6WfZyaRMbRDF3jdTsxok3OfLlyMbiX54XZY9OT3e5wI+1wYm0uV+PgB7icu13tpOpGB1+WwnETCmnCQ+/OIyWIiWll/GplNlmRB8Z5SvIbgqaPT6A554XE68NTRaaSyecwksrr1KCyOalpuiNX95EZv41G+0E3E70ZnC8/lZ4zh2cEZhLwuJLNc8cjkCwa/vHCrjs6lkC8wg7tE3MMyyUwev9LWXN89PIddDehgLNp5yN+tCABPxdOYjKXhczsQ8Lj0+3cylrGdsGWEMmQQDlHRvbj42YW7bcpCOFSKa6yM+PU0ccYY9o1EdRetYFNPCAVWfRbkQqh0VR4A8BoAV0t/i58HFnJiIuoHcCWAY9LmVwHYqP28D8B3FnKORiM0KaFFeV3GyVO4TGakfkuJTA7j0TTWdga1RnbpYhfUkBd9bX7E0jyl7fBEHOu7gljdHsDhibjedlrWVNZ0BNAe9Oj+STOD0wn0W/RzsUKe8AMWwkGed30VtK0VWm1BLl/ARCyDnrAPRISQz4VoKos5zYctrmE8k4fbwiUkX9P2oAdel0OfCIXlIFsbVpYDoKWzapaD00FGP3HE2k8smI5nbHvvBDxOpLJ53vRP+yxCSMj59XYcn02VtFRoD7qRzTPE0jkcHI/hzL5W9EZ8GJ5N6ZOTKBDsjVTXWDCVzWM0yv3jQqCMzaUwEeML3TgcxZbss8ksUtkCLtVSQUXmldnV6Pc49e9CniD72gIlAfWdx6YRTedw3RvPAMBTkmX2jUT1Qsj5clQ7p7z2uQgAT8YymIpn0BHk341ssVopJWZ8bgc8TodROGgCQO7oahe4tisslVnZ6pcWrkojnsnrMRyBUPTK1cHUi0pX5TkAu7WfXaa/nyvzvmr4OoBPAJDty2sA3Mg4j4FbLr2W714CCK1XWA6y1hr2ScJBulGET7GvzY/OFi8m4xl9EukOeXUNbCKWxvBsEms6gljbEcDRyYTut5Rz+YkIZ/dHsNMidXI2yTuVyj7RcojxArCcqKuZhAW9rT6MRVP6ZxcPTcjnRjSVQzSV435Wt1NvReKxCAp63cVbNOBx6tecqJjqKo/LynIAuLAanUvj4HgMa9oDhhX9Vrby9EO7oO50IqNPxmY8TicyuQIvHNTG5nNzS8Wq3YJMocAwKiUYCPSeUvEsbxMS9mFF2IfjM0ld0RDfVUfQA4/LUbF1OP98wOoOvy7opuIZTMSKLR06W7yYiGZ0d4mYiOxcjX530UcuWw69ER/mUjm91QkA/f686vQVWBXxG9JLdw/P4pXfeBCXffUBw3tq5egkF/yysG2TPqscGxH30awWkK4EEc/mE/U4QNFykBWHokJoshxSOUtrXKY34sNELK3FR0otXIAH2O1qKepNpavSov2cC+ADAHoBrATwfgDzdvkQ0TUAhhhjz5heWgVArvIY1LYtScwxB5csHPwutGpFXLIWIWctdAQ9SOcKODzBNZ7ukE83PblriT/8XSEuRERA0hzUOnt1BAfGYiXayqBNDyQ7ZMvBavKXu7TazME6PWGfwfwVAi2k9WfibiU3HA7SLQCPhUDySdfW7Sz6bHldCd/fILTshIOWf35gLGbIDAL4dc/kCrbdVWeTWd1FaMbtImTyBcTTeQSk2hB5YSSACwKzJTEZzyCbZyVuJTHZjEVTmEpk0NXi1ZIbipaD+K6ICCtbK68rITT5/raAvhbJRDxjSFHlykpad42KiWnQpujR73HqNTmywiJabIxJ6x88PTCD9Z1BRAIebOk1LpL0w4d4S5TxaBoP7CsWT9bK0ckEVkX8BjeRbjnEM5iMp/X/hUs4ls5V5VYS75GF10QsjbaA29CMcaGWA9OWI9XdnybLweNyoL89YJtyXE/KXhXG2L8xxv4NQB+AcxhjH2OMfRRcWKwu914iuoeInrP4uQbAPwP414UMnIjeR0TbiWj7+Pj8b6iFIG4qod3KE6psOcxKWoTQ/NoCHn0SEIHn9qBHf8iERhb2uxD2uTGXyuqFWuaHVCxSbi68sltcxA6PKVPIjLzJ3M7bjNAkhYASGSEtXhcm4hk93Rcojd3IiGsrBLHbxc8r+//l625n0WzobsF4NI39YzE9fVWga3txa7dGzKZiHAC8TgfSuQIy+QJapJYj8sJIAPCp3+zC+Z+/Fzc8eFDfNqW3VDBaJULb3T8WA2O8+E4E+Ke0YipRPQ6IAHZ5K0UEY7tC3qI2HeNWQqe0gFU2z6SsniCcDrK1HAIeJ+JaDyjZ/SfcnrJwPDAWw2bNf74q4jfESHYem8Flm7vRFnDjbm3J3fnAYypGK9nndiLgcWIqnsFULIN2za0ka/FWVrIVfrcTCann1WSstJFeuIxwCFVhOQA8VfzgeBxBj9PgQhas0TwJjabaCukeGLOTMto2WxhjlzPGTjf/ADgEYB2AZ4joCLjgeYqIVgAYAtAvHaZP22Z1/BsYY9sYY9u6urqq/Bj1RdxUYlKVJ1Svy6FrEXLMQVgOkYBbv7FE9o3f49QnfrEkYavfjbCfr2sgHnBz36Mz+1pBBDxlijuIycm8dKYd8vitFPBqNHSBsG70bqmSW0n4VYXAEJOOlQYnJh0R4yhaDtZBaIfNHX2KZJ6LIj2B7max6XoZt6kYN49ZjtPIXTyPTSZw0xPcIP7mvQf0icMu913cN0Jp6GrhwiFXYDioTdyyC7A96MG0jWATyOm4bie/N6fiaS3TyG0Yh37ekBdhn0tXMszCwec2FoIK9JiLZjlk8wUMTCWwTltFr68tgKjWKmI6nsHhiTi2rW3HOavbFrTU7tHJUuEgPtdUPIOpRNGtFDQIh+qmwYDHaWiIOJ3IlKQ3h7QU9TkL4WCXxioQ2W7HZ7nlsKG7xVIJW9MewLE6rIteiWqFw40AniCizxLRZwE8DuBH8zkhY2wXY6ybMbaWMbYW3HV0DmNsBMDvALxLy1q6AMAsY2xhCxM3EHFTCY1XuJUcxDVrt9OBFq/LsADQrOQzFpOSePj8bkk46Cmxbn0SHZ5JosXrKpmYQz43Tu0pLYYrV7xlhcPgNrKyHKp3K4nJVK/70Cwi2RcvrKSWssKBH0e0KxGCWF5Ny1lh3AAMgb3TTMIhUmbdCKA0x19GtnZkAdLqd+uW3p8OcMv2S28+E7F0Dn/az/+fsREOcmU4AHSHi91nxb3SKgkHkd1Ujql4Bi4H6dprR9CDiVgGqWwBfu1aCmtscDqhZzBFAh699sWs+cotZDwWloNwow1OJ5ErMKzr1GpLNEt2aDqJA5r7ZEtvCJtWhHBoPF5V8aCZ2UQWs8ks1rQHS17raOEL9aSyBV2xkhUsbxUxB4Dfc7JbKZYurZFwOAitfrchzgiUv4cEYiGp4dkkDo3HS+INgjUdQUTTuZKMqHpT1VVhjH0ewHsATGs/72GMfaEB47kD3LI4AOD7AD7YgHPUDZdpMhMarDx5m/2U0wn+kLZ4XbomMRFNw+UgeKTurmISCGuWA1AUDlacvTqCpwdmDPnjU/EMQj5XVQE3oEbhUMly8BpbaYtCNdklJv7WYw4W4/SZ3Uq6cJDcStLb7NxK/e0BvPqMFXjHi1djbYdxAjG33jbDe01ZX3eD5SBNOC0+l17f8vihKawI+/CGs1ch4HHi8UO8dkEoDaL7rMCnXQdhdbUHPPo1GtYqsWUXVnuwsnAQQXWhiXa0eHTBLa6t+D08k0LE79YnOqGgWrmVBPJ31xbwwOUgPXYhKpTXaU0FV0WK6bfiM66M+LGppwW5ApuXP/2oVlOy2sJyEG4loHg/yeuVVGs5+D1Gt5LdfSFqX2TkbDY7Ah4XIlrPp6GZJNZ3lgo6QG610dgeS5VLAzUYY08BeKreA9CsB/E3A/Chep+jUYgAalbrLuq0CJB6XU6kJU1oJplFJOAGEekay2wyqz+YIclKAITlwCePoZmkba702f1tuOmJARyeLGock/GMbS8XKwyTbIWYg52GLtDdSjPGgKUcuBRWgZh0LOsctH18unDg55WzmIxuJetxOR2Eb7/jXMvX2gLF1uBmcvkC0rlClZaDJPi8Lh6L0PoVbexpgdvJW4iIAjM7t5L4rKI+xu9x6sHu4ZkkwtrELYgE3EhlC0hm8rYFXVPxovsI4NdcxBb82rXUz5vI6H/L7iuzhm10KxndfF0hr+5WEpOYKIIUrp3peEb/jCtafUhry8EekmpzRmZT+NubnkImz3D9287WJ0YzVqsTyuPUrXPt+ricDnhdPF5UtXBwO5GUis+iKWtXkRAO2XwBB8dj2NjN60qqKbbb0NWid0Ywx8YEvdK66KKbbyNQXVkXgLAcdOGgPbAuyfHtdTkM1YwziYzuUxYTSyyd028ct9MBn9uhF8OE/S5d6z4+m7L1W569mt8kchO+qXi64tq4MrJ/02rud1QR+BWIiXJ4NgmiokvCWEgl9i0TkHZZWw7yZFRpLJUI+9xwkLXlEE/z785WOEgTi7yP+PzxdA4DUwk9nXhjdwiHJ/jaCTOJLHxuh2GSBYqfWcSqfG6HbjlMxjMImPZvr+AW469lDULI63Lobi1zMeFMMqtr2IYMNpPgtbMcAO5aEm6l47MpuBykx9jkjJ7jsykEPU6EvC49pVfO6vra3fvw5JFpPDMwg4/+0pzcWERUdlvF1/hn1a6ldN+Uc2daETBbDqnSNcsBTYhk8vjBnw7jqm/8SV8ZsJrzbF0Z1lvhvMhm4i9aXgtbnrYSSjgsAJduOfAv0ynFHAQ+t9FymE0W12eWJzhZqxA3nNvJq1DlVhV2yxme0t2Cla0+3COtajYpZWdUgyHrx8pyqBCwlhEPXjSVQ4vHpb83YJFlVAxIW6Syuo0Tl5iEZC22UnC8Eg4HIRLwWAakY5pL0DYg7SqeW/5sLb7i8qXTiayu0a7tDCCZzWMsmsZ0PGMZD3I5HXA5SHdL+dxO3Q0yk8iUTMQiZlLOB20+l9fl1DvZimssBEIik9fvTbmRoTk4Kr9mtvq6Qj49tnR8JomecLFFSNDjgoP4Ikkjs7zOg4jQEfTA6SB97Y1kJo/fPTOMt53fj8++9jQ8cXgKO45adyAWixG1BUqfD5/bqcd/ZItTt1irdLv6pYB0OpcvqRrXr4XLgUy+oHfLFWswVJMVJeJhbieh2yaRJBJww+d26OuqNAolHBaA8AmKPHXdcnAaLQd5ychkJq/7puUJTn7QRJAr7HPrxTcCu0mKiHD5aT340/5x3VKZqtGtJGf62LmNxOZKqaxOqWutzxAfKJ5ECFdzMaGMveVQP+EAcIvGatEksc3ecijVROW/ReND0RdKxDsOT8QxIykKZsSELeo7xD1TYKUaaJtFx1Azwp2pj9vi3pPvQSEoyikEfinuYZ74usPFbC1eBV6c6ER7+NkkX29daPsObb3vUc0d9dzwLFLZAi7b3INrz+tH0OPELVoLDjNjc2l0aYs/mZGtBdlKK7ozq7t/Ah7uVmKMFRvpWdwXXpcT6WxBd6uJleFcdql0Elec1oM3n9uH/36bfRkZEWmtNpRwWLJcu60fP/7L8/HGc3idnphQ5YnV53YilS1aDulcwbLdhmw5FDU5o08esPbLC166sQupbAFPa/UOc6msIaulErVkI1UzIQsXmFwc6DJMNkbhYJmtpE1SYvLRYw6yW6kOwiHgcekuJBmhvdsJB3lSlIvghEYpKl2FAiHSOY9O8uVY7QLdegqvxT1gvk5ByYVlh6yUyMcHJLeSRQyhXJypnFXRrRVuZvMFHJ9NGZpFAkW/fDSVMwjInrBPdyuJup2z+iMIeFy4dEsP7tw9atmBeCyaQpeNpi1bC/KYhaJVvVvJhXyBIZMv6JaItXDglsN+LSVY9CurxnLoaPHiK285C1edvqLsfr1VFD4uFCUcFgAR4ZJNXfqD4XIYfwOllkM6V9BvViLSJ3v5ptXdU9q34zTEAuxvsPPXtcNBvDVzvsAMqXvVUClbybhv5eOJOINdewuhSZUz7y/Z1I1IwI0z+rj/1Zw+XM1YqyHocVq2boiXmQTM47CyHMx1HqIGYCKWQTKTL4k3CMyuHtll5TZdJ/Ga7A83k8zm9WMBRuEq3u+VhYOwHMq4Gu2sWKBYJT0eTWNkNmVoMw9w4TCXLF1Yqifs1d1Ku4fn0Nvq06/Z5Vu6MRXPYPdwaS3EeDRtWTAGGK0F+W8hLM3X0w7xjCYzeV1psIoBel0OJDN5vQ3JpPbbnN24ELolt12jUMKhjlilsnrdDpPlkDc8mEKDkx9+8XbxYMpzX7l5sNXvxrrOIPYcn9MnumCZB9iM00Krt6NSKis/dwXLQQ9I22crnb+uHU//65X4q4vXGfapt1sp4HXp1b4y+nW0EbJiPEQmrdSUdSYsCVGxOx3nNQZ2wsFryiByOx26IDK7QYrNC43C7ZO37sJnfvsc0rk88gVmtAwkQeGzcivploN9kkK5vH0xoe8b4R1dzf2jxAp65nTQDmkti8HphCH76EJtRbRHDpauzz1WRjjI94osIMvdd1bIQljvVmBxDTwuhyE5QBg6rjrcpwJRgd/IQjglHOqI02Iy97mcRsshWzDcrOZJAJCEjIWbqtKkvb6rBYfG47oWGajJcpA+i82NTPq+lW908TntLAexvZbAoLllCbDwbCVAsxws3DJCsNulIYoxy72egOKkISwHOZGgTQt+p0zavIzPlMIrxgiUukHEpCVX7yYzedz0xDH8+NGjeFhbElT+DFbxLrezuJ6GleVg/s7LCQcxUQsXZ6+NWymeyRsUmJBUHzI8Y+xY2x3yYX1XEE8eNgalM7kCpuIZ3VoxY2c5lGvbYoVfFg4VLAfx/MkCq1r3VTV0tXiRzhX0pIJGoIRDHbFahazUcjAJB1MFMFBqMRj9vuXHsL4riKOTCb1JXy2WQzktUcD0fSsfT1wPORBnZUUEa/D9ikOZ8+oXCq9+LbUcRLWunb9Yb+dhus6ilcbIbAoOMrb7aAvytbpT2bzlsqaArDSUFtmVCgcRcyiOf4+0rvT+0Zh2TOu6BDHpEUlJBFXEHMp1Ge0O80nx2UEhHEyWg5+vfZI3LUkb9rmRyfOajZE5YyAbAF7UF8Ezg7MGjVlfdCdc2XLwWwSka+mtBHDBK+odAhZKgyxseiXhZjU/zBfxWRvpWlLCoY5YadM8cyGPqXgGc6ksMrmC4SH1mLJxgNJiOkP9AcrfYBs6W5DJF7BvhE8ItVkO5VNZ7fa1QwgFO3eVOSBdjQYn5gSP5CFgIAAAIABJREFUwa1U8W0VCXqdJW4ZAMhoNSx2rgfZcpAREw5Pd3QbvkPR6yeVs3cr+SxSSe2uk9NB8LkdhpjJbqlHkejwaQw4W0+YQhjpzSTLZCtZTYyCzhYviIBnB/k4ek2TvN/tLAb7pWsn3G8Hx/kKdGaL48y+VkzE0oZgrKjEtnUr2bjTaq1zEPvlCgVJabAv3ARgiLVUk61ULWINiUYKh+pnDkVFxH0iz5tetwNzqRzO+dzd8LkdXDhID6aYdOR0z6LFINxKxeNVur96tJtRtCCwWgvafvy1BKSrEA6mDCPAaDmY3UrVPKQFTToYYw4Lf+gCHhcSFtlKosDRTnAVLQfjoyS7usy58G0BD45NJbSAtI1bycLdqNd6WFyngMdlEG5DMym4nYSwz6036zMEpC1iDvLf4vqSZMWakyHKWQ5upwPtAQ8m4xm4nYROU72NfE9YFQ+KxWzMloNYH3rvyJzuchJtw23dSi7rz1prnYO4X/MFpt8XVveswXKQhFu1Fko1dFl0vq03ynKoI3aWgyCVLaDAjBObmOzkFszmlFhj5XL5G0zkvItAqNWKbnZUG/gGqgsCW1kOVn+LCaGaBmgiuFfPCmmAC9FMvqA/9IJyGiIfR2lCAWD8nObCxbaAW7McyriVXE7Db8AYGzAT8DgNwk0Uva1q8+vptFapqvJnAErTqM3JETKVGsmJCWxDV0uJ6092sbQYYg5abzFtKdhWv7FOR7SGkZfJ1C0HG7eSzyLQLo+/WstBKDY5STiUq80BjO60emYr6c0iG9h8TwmHOmKVjWClGcoPpvBzy5Wd5lRWQH5Iy49BCBnRVK2c6W+mlpXeqoo5WLQTsRIOQrOuRjgIt5I8udTHrcTHYI47lNMQ5e1mLZqISj6foDXgQTSVA2NGl4eM3nDQU2o5WI0laIqZTGpLm3aHfHrPKDu3kqxwiO9MvG5lvernrNhllGvNGy3WMJfvCfk44lod1xcRMlldQb4OirxY0Fg0DSLYFnzKn9VgsZSxxKwQ32ehwPSuB5Ush85QcUzuOmYrRfR+YEo4nBBYadOyIBDIN4/wE8vLUJrdSvLfldw5kaDJcqhBONSSFVXJggGKE3glyyES8OBLbzoTrztrZcVjikBkLWOtBhFQNtc6ZLTWKPYBab7dykITn8/c1tln49Ix7iMCwxZZRRZCNGCKmYi+WoYMJRvhICNateiWg8P+vqvksrxkE19nxWrylZ8BS+Fgs/ocwDsTHJQsh/EoX+HNTjOXr7F839ZsOThly4HfF+WaRQLG57qeloPb6UDI5zKsFVNvlHCoI+JBkoPG1pZDcVu8nOVgkT1UaRoMaes96Kt3zbcIroKWU51bSbMcnDbCQTrftef12/aSkRE5KvLZ61UhDaCkSjqTK8DjdNgKQzHJWRWEic9vTgqQJxQ7t5KY0ORiYLHNOuZgbAo3nciiPeixDTwLQWH+WF2aa0ZfF13cdxYfv9Jk95Ztfbj6zF68/5L1pe+VvjNjN1vjSohWcY0NXS26qwyAYTU7K+wEYS2JEEAxtpWvwa3UbhAO9bMcAB67UpbDCYKVW8nKcpCDgcKnLXdPtTLlySL+YAURX6RFCJ1ATams1n9X2tcOMXnIk7dVQLoWRIzG4HKri3Cwthyy+ULZQGJxfQl7y8GsmXoMPn7rR/DVZ/QCMNXMmFqJGMdv7A01GePatF0BmNhuLuLq0YK6omZC3IvzqbUKeFz41tvPsXYrGdbBKG07IvoGWSk3G7qDmIhl9IWz5HWwrRDX27xPf1sALgeVBL3tkGMOmVwBDrK+h81rWwjcdcxW4sd2W7aZrxcqW6mOWAeky5udAtn8FMexSiOsxoXSGnBjMp6B00FV+1PN56tPKisZfgPWRXC1ICapWtqHV4OYrHKmvj2ZXKGsZulxOmzX+rX6/IBRWNhZDueva8fNf3Mh1ncFS/a1Go+81kA2z4uj2oIezCWLAsOqCC5savwngroiyFvpO3I7CZdu7i67j937BFYupmiKV05bCf712opyBydiOGd1GyZiab1nlRXimG8/37js/eqOAHZ99pVVrbMAyNlKBU1pKJ+kABhX7Ku35VDNCoALQQmHOlKsSyhus6ugNCN3zCya8lYxh8rj4NpKHF6XvTvECqvz2VFLKmsjLAerIOpC0LXCvFE4lJsEAC7sbvvIS/XGejLCDWGeFOTj2cUcAC4gZITFaZUv73KS3pBOrF8c8bsN7eKt+neZM6ku2dSF636/Fxeu560qil14rce4//Ovth1/OQxrnkidbWWhYbew1QZtydeDYzGc3R+paDls6gnhzr9/GTb1lC67Wa1g4GOWLIe8vdJg1fEWqG8qK8Ath0MTsco7zhMlHOqI1WTXbtGvX7Yc1nYEcGQyYZgwxHGs1kauZq4XZnqtGnVtvZUqH09MAPJEUEuhnRXCcpDfWQ+3klN/8E2prGUmAYGd1iomk3JuJa+NW8n6eHxfKw+Pk4rCIaUJBJ/baZmmChTbgrSblijd0hvGvv+4SurKOn+3UjnsLAeR5ZUvMNs6ir42P5wOwtHJBGLpHNK5gl4UZofdqmq1INc5iFiUFfLzbazxqa9bKRLwNDQgrYRDHbEUDhbpdfKEcOsHLzKsfAVIVoJ0L5HptXKISclZo6ZiFEbl961pHLLlIAen5+EO+shlG7FvNIqXbewqHqcOwkE8xOZ20OUmgUo4LT4/YGycZ+dWssIluTVKXpMsh7TmXuLCwTpD6ZzVEbzzgjX44Cs2lBzL0JqkvsqujiwwzRq1LhxsLAe304GVER+OTSX0CmE5ZbRRiMk9l+cB6UqWQ8DjNFq4dbYcWrw8zsQYq8lDUC1KONQRvfGetM1SOMjZDFretoxV+mDRvK98E+jujBqfbPnQ9XErlY6j0mpzlTh1RQj3/OMlNY+lEuKamWMOldxK5dB7S5kmBY+NNl8Jh+TWMCMmVKBoFXhdDvt6BqcDn3v96ZXP2YBJh5/fOBbDaw5CBvarHgLAmvYgjk4lMBHjPveuluqCygvBKSkQmTJrT4skk3PXtBm219tyCHpdKDDer62W+6haVLZSHbFyb1jVGVgFpA3HsQg+FwVG5XGIe7bWB9uue2qlfe2olMpaL22nHpaDfcyBVZ3qaDcuc5ZKtTEHuzHm8xbCgUgXGqILsM/tlNYOqWHgErW4M2uh3EQprpt5rWyZ/vYABhbdcigK52ye2cYQzlkTwbsuXIOvXnuW8f11thxEbU6szCJPC0EJhzpipalbTYDlMisAuVV3cVstD6lrnpZDLcKkulTWUguoHhO5mXpkKzlNLptMroBP3roLB8dj8w4kWmWdAdXVOZQdo0UAwOlwlFoOboeuiMw3jVIMvZExBzN6rKaMUF7TEcBUPKP3EKsUc6gH8j3CY1H2rU/+/ZrTS3o91TuVNajX5jRGOCi3Uh3Ri+BsJqu3nNuHF6/vqKiJksWkUksqa7H9Rq2WQ/X7VlUhbXH+RgiHejxzbqn6FQAePjiBm544BgCWmUjzObb+fxV1DlaI61mwcCsZYg6a5eB1FQPS89VaG/F9AeUL6Kpxi67RFgHacXQaTgcZ6gkahbnOodq1p/X3N8hysFreth4o4VBH7DRYt5OQzTN8+S1nWb5uxiqVVUQyapmUa485VL9/Ndq61QTQEMuhLtlKxWAjYGxoNl+3kmj1Yb4Onnm6lZw2tRiAdczB5y7GHOZ7jRoR6ASqsxzK3b/9knDoCHrqkrFWCXNX1lpjUfUXDtYrANYLJRzqiN3N/NSnr4DF82yLVSprsd9S5fdbLVda1XlrcStVlcrKjye7JBohHOpa56B9UaI3FVB9Y7ZKx9aPZ1jsqfpjr9fckZssKo55zIELBYPlIC0zOh8aFZAuNx5xj5SzLlZ3cOEwm8zi9FXh+g7OBuGuTecKZbOV7Ki7W8mr3EonDHbaS7msCyuEtmZ1L1Va7AewTiGthtpiDtWPg0mZ+fXO2Kh2LJUQ1+rBF8bxxOFJg0Cb78QqDlEiHKTj1aKZX3RKJ3734YtwhramgYzTQSgw4LX//ZCeGu1zO/TGffMVynXsFWegnEAvriBov0/Y50Z7kC+atLW39Ho0AnENv3znPgDFxoLVUm/rJmjTD6xeKOFQR/QK6QUfh/+2mvSqy1YSwqG2J7ua3a3aV9hh6VZqgCZal2wlbUL63TPDAIzVyfN3K4ljm7KV5nk8ADizL2K5XUyku6QV4AyWw5JzK1VjOZQ/twhKn7ZysSwHU+yoUZKzSvSYQ4PcSipbqY7Uy2VSrj13NdrHfGMOtWjgtaSyGtxKdfa7AvWxHMwWzYS0/OJCJ4FylkO9sLov5JjDfNtF6xXS8x+aJeUmfrvKcjMfu/JUALVr8PPFfI1rcQk2gkZnKzXt0xHR3xLRXiLaTURfkrZ/kogOENE+Inpls8Y3H+qlFZfro1TNKfSaiJqzlSrvX6nXjmEcFjstVcvBfAx5+UWPa37HryYgXS+sF5pyWtaa1MJiVEibERZvpe/1olM6ceS6q7G2Qmp4o6h3r6RaOSljDkT0CgDXADiLMZYmom5t+2kA3gpgK4CVAO4hok2MscY41eqMfjMv8J4pts+wcis1Mlup+n2r0ta1XWStc6lmK5mvVTRVfODmO5mLz22eRObrpiqH+Ro4iH8msSjNfIOh9XKVmik3HnG567lyWiOYSTaur1E1eFwOeJwOvT1/vWlWzOEDAK5jjKUBgDE2pm2/BsDPte2HiegAgPMBPNqcYdaGXqi2wOOUizlUc+xqNa+S99XZrVTP95WjLm4lCy1wVcSPF69vx5vO7VvQsc2fuREap1m4+dy8r8+6ziB6wl586uot8zpuo2IO5SyZYvHg0vZ6H5tKVLXff77hDDx+eLIhY/juO8/Bmo7GWE7NEg6bALyUiD4PIAXgY4yxJwGsAvCYtN+gtq0EInofgPcBwOrVq612WXTqlcesPxxyKqvptbLjsEiFreW85SgGpGs6tHSO+b2vHPVJZS2diLatbcPXrn3RvI+pB6RNx67ncpECswAS/nC/x4nH//nyeR9Xr5Ce9xGsKfesWK0TvtT4s239eOeFa6ra9+0vXo23v7gxc9Slm3saclyggcKBiO4BsMLipU9p520HcAGA8wDcTETrazk+Y+wGADcAwLZt2+p9786Lek18xZhBcZu+PGZN2Uo1Coca9q9FW5cD0o3QROuRImh1iIUGokUK72L4ps1adr0asTUiRgSUdyst5nWbL+96yRpsXbk4KbTNomHCgTFmq64Q0QcA3Mp4xO4JIioA6AQwBKBf2rVP23ZCUK/+M+VaZdTSPqPW57qaOZaIf85qxrFYj3Y9XFVWQqtesYFGtaCQKVdotxCoTq5SM9VYBUvZrdTsTKXFoFmf8DcAXgEARLQJgAfABIDfAXgrEXmJaB2AjQCeaNIY581CteNi4z2LmEMNlkPN563ifbXUOejvqbtTwkijtNuFZhWJa7UY+fBm66kerjagcW6lctekeN2WruVQqbPyyUCzYg7/D8D/I6LnAGQAvFuzInYT0c0AngeQA/ChEyVTqZ7oFdLSszGfmEOt1FQhXcV8pwvJBjv9GqVgLlT7LsYcSq/rJZu6cPEpnQs6voz5HJ116lLaKKunGuHQiGr6etGIjLOlRlOEA2MsA+DPbV77PIDPL+6I6kO95sByk3QtFdK1urnq3T5j0dxKTWgOVwtWLpQf/+X5dTm2wDyJ10s4NKq3Ui1FlEsR5VZSzIt6pbLKFAPS1cccaqWWt9VyjkZnC9RbuxWX2OOsj+tgMTTgUsuhPi2s9aLHuhytlL+7bGPJNr0n1RJ2KynLQVETp3S14N0XrsG7XrJ2QccpJwCqW+yncTGHYkC68vEapHRanKe+Jwp6XIilc3VwK4kK6cZfCHPM4TypN9SCjtug9hkAcOS6qy2321WWLyUaUeW+1FDCoY44HIR/u6byuryVsJqka4k5zDfLo5pJVriqqtn3Vaf34p49o/jEVafOazy1snlFaSvr+eBzOxFL5+rnVlpky+H2j1xctzTLxci0smMpu5WWsuCqF0o4LEHKPRPVPC+L8VBV4+f3e5z49jvObfhYAOC2v70Y/W2Buhwr6HViIrZwv/JiukfkSbye+feLZf1ZsZSFw3JACYclSDnroBqNfTFWxWpUoHK+nG6xxsF8CWjdLhdcBCdSMhfBcmiUht/M77nZLbGXO+rqL0GsHkg9IF3F+xdD41rCWYYLJuDhgegFxxy0b60RbcrNzLfwsdrjLiZCqDbTpaVQwmFJUu4Br6VCupEsNcuhnvjc/LGol+XQqFRbGRHXqPf30oz5WQjVpZyttBxQbqUTBD0gXcvazQ1MIl3IJPQvV29BV6g+efiNQAT0F245cBZDjjaqtXajurJWg3IrNRclHE4wlozlsIDn9r0vranH4qIjhOuJZB05GzTm/7+9uw+2o67vOP7+3OSGYEIIENQUCEFAESiGJCIPDkalTshQUjU62PIUH1JbIWqHUVs7FrTVGUY6FTtDyiDGUAFB1AkRBh8gBYGgEUkQkZoiHVFGHtqGplY6Id/+sb+TrHfPPffc3Hv24ZzPa+bM3ad79vs7e85+d3+/3d9W8Rm4WqkenBwaooyb4MajSTvO8Wp9fi/umtiZ15pzF3L13Y9P2t3KnUztVZtDFckh/S2jIX+81py78HceBNXPnBxqqNPPsZufahnJoYqdRlmmTlJyWHT4gfzTeZNzM9pYenXmUOmlrDVsc1h6/NyqQyhN/VKztW0pGN9NcD5zmIjW57dz166KI+ler65WqnI7u1qpWk4ODdNVtxUldHmnPv7mLDhsNpA9JrQpenXmUM2lrNnhkVNDtVyt1DBVXj2S14/VSn955jE8u+MF3vP6IzjlyIMa9aSvXrU5VHnw3odfsUZxcmiYuvxg+rFa6U/fcOTu4SYlBuhlm0N6vxIfxFuLZ/6aq5Wapi475RpeSDLQ9iSH3rxvqXZnh3p81weVf+INM57f6mQ907p9HP7h1kmnR8tOxO7vW4mb+yNLX8U+U4ca1ebTj1ytVEOdfod12Sn3Y5tDo7UeytMHVystPX4uj/3t4FwyWlc+c2iaEn6rnfYHl519HNOmDNWm7cMy43nOxnjsfjs3BAwcnzk0TK+P5L730Tey7/Doj8e84NT5XDDBJ93Z5Nt/32EAVrd57OZE+F6DweXk0DDd/FYPmJHtKI566cxxv/+hk/TAHCvX9OEpoz52cyJ2H4w4RwwcJ4eG6ebM4bjf25/r3/s6Fs0/oISIrJ+5WmlwOTk0TLcHcKceNaencdhg8IUHg8sN0g1TlzukbTDU5eo4K5+TQw11OoN3+6CVyclhcDk5NMyQs4OVqJ87WLTOvOlrqPNNcKWFYeY2hwHm5NA4/rFaeVytNLgqSQ6SFkjaJOkhSZslnZSmS9KVkrZJ2ippYRXxVe30Vx4MwPITDynM85mDlcm5YXBVdSnr5cBlEXG7pGVpfAlwJnB0er0OuCr9HSivOHjmqDc0+UjOyuSv2+CqqlopgFlpeH/gV2l4ObAuMpuA2ZLcA1eOk4OVaXhoiCHBX5/16qpDsZJVdebwIeAOSZ8lS1CnpumHAL/ILfdkmvbUyDeQtApYBTBv3ryeBlsnzg1WpqEh8fhnJr9bDqu/niUHSd8BXt5m1seBNwMfjohbJL0T+AJwxnjePyKuBq4GWLx4cd/f3L+7ixsnBzMrQc+SQ0SMurOXtA74YBq9GbgmDf8SOCy36KFp2sBrdcnsaiUzK0NVbQ6/At6Qht8E/CwNrwfOT1ctnQxsj4hCldIgc3IwszJU1ebwPuBzkqYCvyW1HQC3AcuAbcBvgJXVhFdfzg1mVoZKkkNEfA9Y1GZ6AB8oP6L6ayUF3+dgZmXwHdIN415ZzawMTg4N4QZpMyuTk0PDODWYWRmcHBpiT5uD04OZ9Z6TQ8M4N5hZGZwcGsYP+zGzMjg5NMSeBulq4zCzweDk0DByk7SZlcDJoSHc1mBmZXJyMDOzAieHhoi+75TczOrEycHMzAqcHBrCbQ5mViYnBzMzK3ByaIiXTMt6V/cZhJmVoaqH/dg4rV35WjZsfYqX7rdP1aGY2QBwcmiIww+awQfeeFTVYZjZgHC1kpmZFTg5mJlZgZODmZkVODmYmVmBk4OZmRU4OZiZWYGTg5mZFTg5mJlZgaIP+oKW9Azw73v573OAZycxnCo0vQxNjx9chjpoevxQfhkOj4iD283oi+QwEZI2R8TiquOYiKaXoenxg8tQB02PH+pVBlcrmZlZgZODmZkVODnA1VUHMAmaXoamxw8uQx00PX6oURkGvs3BzMyKfOZgZmYFTg5mZlYwMMlB0lJJj0naJuljbebvI+kraf4DkuaXH2VnXZThQknPSHoovd5bRZyjkXStpKcl/XiU+ZJ0ZSrfVkkLy46xky7iXyJpe+7z/0TZMXYi6TBJd0n6iaRHJH2wzTJ13wbdlKG220HSdEnfl7QlxX9Zm2XqsS+KiL5/AVOAfwNeAUwDtgDHjljmz4E1afgc4CtVx70XZbgQ+MeqY+1QhtOBhcCPR5m/DLgdEHAy8EDVMY8z/iXAhqrj7BD/XGBhGt4P+Nc236G6b4NuylDb7ZA+15lpeBh4ADh5xDK12BcNypnDScC2iHg8Iv4PuBFYPmKZ5cCX0vBXgTdLUokxjqWbMtRaRNwN/EeHRZYD6yKzCZgtaW450Y2ti/hrLSKeiogH0/B/A48Ch4xYrO7boJsy1Fb6XHek0eH0GnlVUC32RYOSHA4BfpEbf5LiF2r3MhGxE9gOHFRKdN3ppgwAb0/VAV+VdFg5oU2abstYZ6ekKoPbJR1XdTCjSVUVJ5IdueY1Zht0KAPUeDtImiLpIeBp4NsRMeo2qHJfNCjJYVDcCsyPiBOAb7Pn6MPK8SBZXzWvAT4PfKPieNqSNBO4BfhQRDxfdTx7Y4wy1Ho7RMSLEbEAOBQ4SdLxVcfUzqAkh18C+aPoQ9O0tstImgrsDzxXSnTdGbMMEfFcRLyQRq8BFpUU22TpZjvVVkQ836oyiIjbgGFJcyoO63dIGibbqX45Ir7WZpHab4OxytCE7QAQEf8F3AUsHTGrFvuiQUkOPwCOlnSEpGlkjTzrRyyzHrggDa8A7ozUIlQTY5ZhRN3w2WT1sU2yHjg/XTFzMrA9Ip6qOqhuSXp5q25Y0klkv6/aHGCk2L4APBoRfz/KYrXeBt2Uoc7bQdLBkman4X2BPwB+OmKxWuyLppa9wipExE5JFwF3kF31c21EPCLpk8DmiFhP9oW7TtI2skbHc6qLuKjLMqyWdDawk6wMF1YWcBuSbiC7kmSOpCeBvyFrkCMi1gC3kV0tsw34DbCymkjb6yL+FcCfSdoJ/C9wTs0OME4DzgMeTnXeAH8FzINmbAO6K0Odt8Nc4EuSppAlrZsiYkMd90XuPsPMzAoGpVrJzMzGwcnBzMwKnBzMzKzAycHMzAqcHMzMrMDJwWpBUki6Ijd+iaRLS45ho6TFafi21vXoE3i/JZI2jDI932vodyayHrNecHKwungBeNve3sma7iSdNBGxLN3B2iv3RMSC9DojP2Oyy2K2N5wcrC52kj0/98MjZ0iaL+nO1KHgdyXNS9PXSloj6QHg8jR+laRNkh5PR+jXSnpU0trc+10lafNo/emnZZ6QNEfS+3NH+D+XdFea/xZJ90t6UNLNqa+f1jM3firpQeBt3RZe2bM41ku6E/iupBkp9u9L+pGk5Wm5fSXdmMr0dWX9/bfOdnbk3m9Fq8zprtxbJP0gvU5L0y9N69iYPq/Vuf8/P33eWyRdJ2m/VP7hNH9Wftz6UBX9hPvl18gXsAOYBTxB1pfMJcClad6twAVp+N3AN9LwWmADMCU3fiNZn/nLgeeB3yc7CPohsCAtd2D6OwXYCJyQxjcCi9PwE8CcXHzDwD3AHwJzgLuBGWneR4FPANPJetM8OsVwE22eK0B2l/V24KH0+jjZ3exP5mL7NHBuGp5N9tyCGcBfkN0dD3ACWVJtxbwjt44VwNo0fD3w+jQ8j6zrCYBLgfuAfVKZnkvlPC6tb86Iz+uLwB+l4VXAFVV/b/zq3cunr1YbEfG8pHXAarJuD1pOYc9R+HXA5bl5N0fEi7nxWyMiJD0M/DoiHgaQ9Agwn2xn/E5Jq8i6j5kLHAtsHSO8z5H1cXOrpLPS/9ybuvCZBtwPHAP8PCJ+ltb5z2Q70XbuiYizWiOSLiTrvrn1vIi3AGdLuiSNTyfbsZ8OXAkQEVsljRU3wBnAsdrzSIBZrTMd4JuRddb4gqSngZcBbyL7XJ9N62nFdA3wEbJeTlcC7+ti3dZQTg5WN/9A1uXyF7tc/n9GjLd6pd2VG26NT5V0BNlZyWsj4j9T1cv0TitIO+7DgYtak8h25O8asdyCLmMeTb4sAt4eEY+NWEen/8/3hZMv0xDZ08Z+2+a98p/Ri3TYJ0TEvamKbwnZ2Vrbx6Vaf3Cbg9VKOkq9CXhPbvJ97Ol87E/Iqnf21iyynfB2SS8Dzuy0sKRFZMnk3IjYlSZvAk6TdFRaZoakV5L1rjlf0pFpuXcV3rB7dwAXS7t7Fz0xTb8b+OM07XiyqqWWX0t6taQh4K256d8CLs6VaawkdifwDkkHpeUPzM1bR1ZN1W3ytoZycrA6uoKsDrzlYmBlqkI5Dyg8VL5bEbEF+BHZjvx64N4x/uUi4EDgrtQofU1EPEPWRnBDiul+4Jh0ZL4K+GZqkH56b+MEPkVW/781VYl9Kk2/Cpgp6VHgk2RtKS0fI2uDuQ/Id7O9GlicGph/Ary/04oj4hHg74B/kbQFyHeN/WXgAOCGvS2YNYN7ZTVrMEkbgUsiYnNJ61sBLI+I88pYn1XHbQ5m1hVJnyerhltWdSzWez5zMDOzArc5mJlZgZODmZkVODmYmVmBk4OZmRU4OZiZWcH/A09HDwZiAAAAA0lEQVQgq1zuqXC/AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "import scipy.optimize as opt\n", - "import scipy.signal as sig\n", - "\n", - "ntaps = 64\n", - "N = 4\n", - "\n", - "#optimize for 16 filter coefficients:\n", - "xmin = opt.minimize(optimfuncQMF, ntaps*np.ones(ntaps), method='SLSQP', tol=1e-8)\n", - "xmin = xmin[\"x\"]\n", - "\n", - "err = optimfuncQMF(xmin)\n", - "print(err)\n", - "\n", - "#Restore symmetric upper half of window:\n", - "h = np.concatenate((xmin, np.flipud(xmin)))\n", - "plt.plot(h)\n", - "plt.title('Resulting PQMF Window Function')\n", - "plt.xlabel('Sample')\n", - "plt.ylabel('Value')\n", - "plt.show()\n", - "\n", - "f, H = sig.freqz(h)\n", - "plt.plot(f, 20*np.log10(np.abs(H)))\n", - "plt.title('Resulting PQMF Magnitude Response')\n", - "plt.xlabel('Normalized Frequency')\n", - "plt.ylabel('dB')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAb0AAAEWCAYAAADy9UlpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3deZwcZZnA8d/Tc2Yyk5kkM5lJJjfJ5CCBQEIAQcADBeTwQA5RjOKyuqJ47Srigcequ+t6gAoiIiAIciorUTmU+0xCIPdMCAk5JpPJNWfmfvaP9+2k05nuubqnumee7+czn+muqq56qrq6nqr3festUVWMMcaY4SAUdADGGGPMYLGkZ4wxZtiwpGeMMWbYsKRnjDFm2LCkZ4wxZtiwpGeMMWbYSHrSE5GbROSbyV5OMoiIisgM/zrueojI10XklsGLLrWIyAgR+T8RqROR+4KOJ5FEZLOIvDvoOGJJ9G9MRG4Tke8nan4R8y0VkadFpEFE/jfR8+9HPGtE5IxeThtzHxCRM0RkWwLjukxEHk3U/BJFRJ4UkU/FGDfVHy8zByGOAS2rx6QXeeCPGHadiNzZmwWo6qdV9Xv+cwPeOUSkQkTuE5Hd/gD7uoh8SUQyBjLfnvS0Hqr6A1XtdodIJBGZKCIPRKz/ahFZkuzl9sKFQCkwVlU/HD1SRIpE5FYR2ekPepUi8rXBDzOxfIJQEbkgavhP/fAlyY4h0b+xJLoS2A2MUtUvR48UkX/3+3ODiLwpIv8ea0YRB76lUcPvFJHrehOMqh6tqk/2bRWST1XvUtX3JGPeInKqiDzvjx17ReQ5ETkhGctKVWlVvCkiRwEvAVuB+apaCHwYWAQUBBnbIPo9bv2nAGOBjwE1gUbkTAEqVbUjxvifAvnAHKAQOB/YOEixHZSkM9FK4PKoZVwEvJGEZaWzKcBajd0jhuC242jgLOAqEbmkh3meKCJvS2CMgUrmlZKIjAL+AtwAjAHKge8ArclaZkpS1bh/gAIzooZdB9zpX58BbAO+DOwCqoFPREx7G/B9YCRwAOgCGv3fBKAZd3UQnv54oBbI6iaWO4FHeoj3fGANsB94EpgTMW4z8BXgdaAO+COQGzH+3338O4BPRq57L9bj4DYZaBw9rF8jsCDGuDOAbVHDNgPvjvje7vPbsQFYBVQA1/jvbivwnjjLnuPXZb9ft/P98O8AbUC7j++Kbj67Gnh/nHmfCaz32+MXwFPAp6L3N/9+qv9uMv37TwDr/DptAv41epsAXwV24k4aQsDXcElpD3AvMCbiMx8Dtvhx10Zuw27ivg34Me7EY7Qfdi7wV+BZYIkfdhTwDz/P3cBdQFHUfv+qX4f7/D7x/QT9xm4Lz6u7/QQ4Dljhl/1H4J6o6c8FVvrv/XngmDjf49uAV/z3+ArwtogY2/1+0hhre0bN63rghhjjwvvAV4F/Rh0jrutN7Bz+2xgB3A7s8/vSf0Rto83E+M1GfD9f99/tZuCyiM8WAnfgjmtbgG8AIT9uCfAc7qRwj/8elwDPRh2DPw1U+fX4JSB+XAbwv365bwJXEfHbiNpmi4D9cbb3dcT/nT0J/BB4GagH/oz/3URMeyXu+FkNfCViXouBF3z81bjfeHYf1vHHfh03AZ+NimuJH97gt8FlsdZRVRN2pVeG+2LLgSuAX4rI6MgJVLUJOBvYoar5/m8HbkNeFDHpx4B7VLW9m+W8G7g/VhAiUgHcDXwBKAGWAv8nItkRk12EO4ucBhyD22CIyFm4nfpMYKZf1hHirEdC4uiFF3Hb9xIRmdzLz0Q6D3fgH407yP4dlwTKge8Cv+7uQyKSBfwf8CgwDvgccJeIzFLVbwM/AP7ot8dvY8T9nyLyCRGZGTXvYuBB3MGgGJeMTunDOu3CHdxG4RLgT0Xk+IjxZbgz2ym4H+XngPcDp+OSwj7cjwwRmQvciNsPJ+Cupif2sPwW3AEgfFVyOe4gd9hq4g4YE3AnD5NwBxn8fvEQLjGMwe07H4j6/EB+YzH5Zf8Jt0+MwSXcD0WMPw64FfhX3Lb4NfCwiOR0M68xwCO4ZDUW+AnwiIiMVdUluET/3z6ux3uIS4C3406u4vkVUNFdfVtfYge+jTtwT8cdAz7azTTxfrNluH23HPg4cLOIzPLjbsB9d9Nx+9zluP007ETcQbsU+M8Y63kucIJf7kXAe/3wf8F95wtwJ07vj/F5cCUSnSJyu4icHb3/9NLluAuC8UAH7ruO9A7c8fM9wFcjvpdO4Iu4bXQy8C7g36I+G28dz8WdnC3CVaUAICIjfQxnq2oB7qRrZbwVSFTSawe+q6rtqroUdyY3q4fPhN2O38F8vdyluB9gd8bizhJiuRh3JfiYT5o/xp3B/UlEduEOONer6g5V3Ys7iC/wn70I+J2qrvYHj+viLOcJYJyIrBSRh/sQR2QxTKw4evJh4Bngm8CbPoa+lMk/o6p/V1cMeR8uKf/Ix3kPMFVEirr53Em44skfqWqbqv4DV1RyaS+X+zncQe8qYK2IbBSRs/24c4A1qnq/j+NnuKuyXlHVR1T1DXWewiXmt0dM0gV8W1VbVfUA7ozyWlXdpqqtuO/6Ql+0dCHwF1V92o/7pv98T+4ALvfb7nRcIomMcaPfH1pVtRaXEE73o08CMnH7RLuqPog7m440kN9YPCcBWcDP/Lzvx12hhV0J/FpVX1LVTlW9HVccdlI383ofUKWqv1fVDlW9G3f1fl4/4roOd3z6XQ/THcAliu4a3vQl9ouAH6jqPlXdxpEHc+j5N/tN//0+hUv+F/lj2iXANaraoKqbcVdmH4v43A5VvcFvswMx1vNHqrpfVd8C/snhx62f+315H/CjGJ9HVeuBU3FXSb8BakXkYREpjfWZbvw+4hj5zYh1DPuOqjap6ircd3epX/ZyVX3Rr+Nm3AnI6VHzjreOP1PVrX7b/zDqc13APBEZoarVqhr3RKk3Sa8T96OIlIX7EYbt0cPrcppxB8je+DMwV0Sm4c6w6lQ1+gd/cDm4M4xYJuCKDwBQ1S5ckd0y3BkaHH4wjYxzgp82bAuxtQG7VHWBqp7fhzjKI6aJFUdc/kf5NVU9GndmuBKX1KU3n+fw+r8DwG5V7Yx4T4xYJgBb/bqEbeHwdYoX9wF1jX0W4k5e7gXu81cHh217dWUWW7uf05H8WeuLvmJ+Py6JFkdMUquqLRHvpwAPich+P/063H5e2k0sTbj9rqf1exZ3AnEtLmkedvAS13LxHhHZLiL1uGK4cIwTgO1+vcOi138gv7F4ult25L4/BfhyeFv57TXJf667eUX/bnq9j4SJyFW4K4r3+ROPntwClIpIdHLta+yR27y7/S/eb3af31fCtvh5FuOOl1uixkVuk97s6709bsWdl6quU9UlqjoRmOc//7NeLL+7+W/BrVtxnPET4GADxL/4hmz1uJKhyM9BP47NfptfjDuRrRaRR0RkdrwV6E3Sewt32R9pGvGTQixHVGD7g9G9uKu9jxH7Kg/gcSKKXrqxA7ejAweLSCbh6lL2Rk7oG8V8FDhbRJ7BbeRJEZPEKzrs6dEUseLY3sPn+kRVd+OuIifgiqaagLyI5WbgDsSJsAOYJCKR+8xk+rFO/ozzB7g6qGm4q/eD2z5ie4Udtl64oqTwtDnAA7jtUKqqRbji5MiTgOjvayuuOKQo4i9XVbd3E0seLkn3xp24erfook1w66u4BlijcPteOMZqoDzqxGUS/dPdvhlz+8VYduS+vxX4z6htleev4qIdtt9HzKvX+4iIfBJX3/ouf8XVI1Vtw9Urf4/Dv/e+xF7N4cXYfd3+o31RW9hk3PbYjbtAmBI1LnKbDORRN/2OW1XX44rU5/lB8faT7uY/Gbduu+OMDxev34i76p/p9/+vc/h3Fc9hv0mijs2+5OpM3AXRetxVbEy9SXp/BL4hrql8yJfRnkecurU4aoCxIlIYNfwOXPn4+cRPet8G3iYi/yMiZQAiMsM3Uy7CJc/3ici7fB3Ul3HFGc93M6+bcQfHv+Lq8qYDS0Rkrj/QfTtOHNnABBF5RUS6K0PvSxxHENcU+4wY4/5LROaJSKaIFACfATaq6h5cmX2uiLzPL/cbQHf1F/3xEu7E4D9EJMvHdx6uSLRHIvJNETlBRLJFJBe4GldhvQFXFHS0iHzQFzF+nsN/cCuB00Rkst93rokYl41bx1qgwxeZ9tTc+yZc/eIUH1uJHLrl4H7gXHFNu7Nx9Zy9rQa4Hlda8XQ34wpwRZJ1IlKOazQV9gLuSvMq/71egKv474/ufmMrgXNEZIz/3XwhatkdwOf99/rBqGX/Bvi0iJwozki/f3XXWnoprn7tI349Lgbm4orBeyQil+FODs5U1U29XWHv90Auh0p0+hr7vcA1IjLafz9X9XH5AN/x+/fbcXVQ9/lSlHtx+1uB3+e+hDtBSoR7gatFpNwfA78aa0IRmS0iXxaRif79JFzx44t+kni/s7CPRhwjvwvcH1FSBPBNEckTkaNx9ZZ/9MMLcI1fGv2V2Gf6uI6f9zloNO6kKLxOpSJygT/haMX9xuJWR/Tmx/xd3MH6WVyF/3/jWses7kPQwMEzi7uBTb64YYIf/pwPdIWqxryCVNU3cJWgU4E1IlKHO8tfBjSo6gbcGfQNuLOP84Dz/JngQSKSj6tfuwhXFPZr3IHzZ7iWVHW4FlkAS0VkNe5gFjYFd4Y0A3hQRE6OirNXcXTH74jhlpXdycM1etiPq/yegjtZQFXrcJXDt+DOJJtwrcoGzMd+Hq7SfDeuAcHl/jvt1SxwZfy7cWd/Z+KKrxr9FeuHcfURe3AV4c9FLPsx3I/ndWA5EQdRVW3AJcl7cfvnR4Du6lkj/dxP86iINOB+9Cf6+a3BtQ77A+4Mcx+93IaquldVn4gqKgz7Dq6hQR0uyT8Y8bk24IO4Bir7cfvOX+hHU/IYv7HfA6/hWhU+yqEDUeSyl+BKQy6Oim0ZriHBL3DbYiMxGl35E69zcSd5e3AtIM/1329vfB93Vf2KiDT6v5t6ud6dwLdwJR59jh13nNuGa/33OO7kpy/bf6dfxg5c3fWnI34bn8P9FjfhjqN/wDWwSYTf4L7T13EN05biTmI6u5m2AbefvyQiTbj9fjXu+4r7O4vwe9yxbyfuJOPzUeOfwm3nJ4Afq2r4Jvuv4H6bDT7mP9J7v8E1uHsN18r4wYhxIdxJxA7c/ns6PSRU6f73OfhE5B/AH1Q1Kb2aiMhUXF3LPHH3q2xQ1Xj1g72d721+vv258u1ufh8FjlbV7s6yhg0ReRLXfHpY9nIjIi8BN6lqTw05TBKIyGeAS1Q1urFFSvMlHTepanQxs/FS4uZ0ca0Pj6dv2b/ffJ3SmyLyYb98EZFje/NZX/yR418X45rWr01gbHcO94Q3HInI6SJS5osFP45rtv23oOMaLkRkvIicIq4KZxbu6uehoOPqibju/87x+005rlom5eMOUuBJT0RuxxUnfMEXVSVjGXfj6i5micg2EbkCuAy4QkRew90LdEG8eUSYAyzzn/snrpltwpKeGbZm4Ypv9uMOuBeqarzbc0xiZeOqORpwDd/+jCvCT3WCKzrfhyveXIcr5jUxpEzxpjHGGJNsgV/pGWOMMYMl6Y+BCFpxcbFOnTo16DCMMSZtLF++fLeqJuoe35Qy5JPe1KlTWbZsWdBhGGNM2hCR/nQ+khaseNMYY8ywYUnPGGPMsJESSU9EJonIP0VkrYisEZGru5nmDHFP+13p/6xZrjHGmD5JlTq9DuDLqrrC94u3XEQe6+b+t2dU9dwA4jPGGDMEpMSVnn8G0gr/ugF3g2WfHkdijDHG9CQlkl4k30fmcbhe/aOdLCKvichffS/eseZxpYgsE5FltbW1SYrUGGNMukmppOeffvAArkuy+qjRK4Apqnos7ukFf4r+fJiq3qyqi1R1UUnJkLzVxBhjTD+kTNIT9/y3B4C7VPXB6PGqWq+qjf71UiDLd/icFD99rJIVb+1L1uyNMSYpVJWnK2u55Zm+PpJweEiJpCciAvwWWKeqP4kxTZmfDhFZjIt9TzLi2dfUxl0vvcUHf/U8V96xjI27GpOxGGOMSajXt+3nI795ictvfZm7XnqL1o7uHqs3vKVEh9MicirwDO7BqeGn3n4d/1h4Vb1JRK7CPRywAzgAfElVe3wS+aJFi7Q/PbI0tXbwu+fe5NdPbQKBx790OqWjcvs8H2OMGQyVNQ287/pnGJWbxefeOYNLT5xMTmZGv+YlIstVdVGCQ0wJKZH0kqm/SS9sU20jZ/38Gc6cW8ovP3J8AiMzxpjE6OpSLr75Bap2NfL4l06nOD9nQPMbykkvJYo3U9n0knw+944ZPPJ6Nf9cvyvocIwx5gj3Ld/KK5v38fWz5ww44Q11lvR64crTpzNjXD7f+NNqmts6gg7HGGMO2t3Yyg+WrmfxtDF8eNHEoMNJeZb0eiEnM4PvXTCP7fsPcP/ybUGHY4wxB9367Js0tLTzgw/Mw7f1M3FY0uulk48ay9zxo7h32dagQzHGGAA6u5QHVmzjjFnjmDGuIOhw0oIlvT64aNFEVm+vZ+2O6PvmjTFm8D1dVUtNfSsXWbFmr1nS64MLFpSTnRHivuV2tWeMCd59y7YyZmQ275xdGnQoacOSXh+MHpnNmUeX8qdXt9tNn8aYQO1tauOxtTW8f0E52Zl2KO8t21J9dNGiSexrbueJdXb7gjEmOH9euZ32TuWiE6xosy8s6fXRqTOKGV+Yy4MrrBWnMSY4D67YzvzyQmaXjQo6lLRiSa+PMkLCmXNLeW7jHiviNMYEYldDC6u213HWvLKgQ0k7lvT64fSKEg60d7Jssz2FwRgz+J6p3A24Y5HpG0t6/XDyUWPJzgjx5Aar1zPGDL6nKmspzs9h7ngr2uwrS3r9kJedyeJpY3iq0p7KbowZXJ1dyjNVtZxWUUwoZD2w9JUlvX46vaKEyppGduw/EHQoxphh5PVt+9nX3G5Fm/1kSa+fzpjldji72jPGDKanKmsRgdNmWtLrD0t6/TRjXD4TCnN5aoMlPWPM4HmqspZjJxYxemR20KGkJUt6/SQinD6rhOc27qa9s6vnDxhjzADta2rjta37rWhzACzpDcCpM0poaO1gjXVAbYwZBC9v3kuXwqkzi4MOJW1Z0huARVNHA7Bs896AIzHGDAfLt+wjOyPE/PLCoENJW5b0BqB0VC4TR49g+Ra7Sd0Yk3zLNu9l/sRCcrMygg4lbVnSG6BFU0azbMs+VDXoUIwxQ1hLeyert9ezaMrooENJa5b0Bmjh1DHUNrSyda/dr2eMSZ5V2+to6+xioSW9AbGkN0Dhs65lW6xezxiTPOG+fi3pDYwlvQGqKC2gICeTZVavZ4xJouVb9jK9eCRj83OCDiWtWdIboIyQcNyU0Sy3Jy4YY5JEVVm+ZZ9d5SWAJb0EWDRlNJW7Gqg70B50KMaYIeiN2ib2NbcfvE3K9J8lvQRYNGU0qvDqW3a1Z4xJvOW+zcDCKWMCjiT9WdJLgAWTi8gICSusXs8YkwQrtuynKC+Lo0pGBh1K2rOklwB52ZnMKMln1fa6oEMxxgxBr2+vY355ISL2/LyBsqSXIPMnFrJqe53dpG6MSaiW9k4qaxo4ZqJ1PZYIlvQSZH55Ibsb29hZ3xJ0KMaYIWRddT2dXWr9bSZISiQ9EZkkIv8UkbUiskZEru5mGhGR60Vko4i8LiLHBxFrLPP8DrlqmxVxGmMSZ7WvNplnSS8hUiLpAR3Al1V1LnAS8FkRmRs1zdnATP93JXDj4IYY39zxowjJoR3UGGMSYdX2OkbnZVFeNCLoUIaElEh6qlqtqiv86wZgHVAeNdkFwB3qvAgUicj4QQ41phHZGcwcV2CNWYwxCbVqez3zrBFLwqRE0oskIlOB44CXokaVA1sj3m/jyMQYnseVIrJMRJbV1tYmI8xuzSsvZNX2emvMYoxJiJb2TqqsEUtCpVTSE5F84AHgC6ra78eRq+rNqrpIVReVlJQkLsAezC8fxe7GVmrqWwdtmcaYoWv9zgY6rBFLQqVM0hORLFzCu0tVH+xmku3ApIj3E/2wlDHfn429vm1/wJEYY4aCVf5YYo1YEiclkp64wurfAutU9ScxJnsYuNy34jwJqFPV6kELshfmji+0xizGmISxRiyJlxl0AN4pwMeAVSKy0g/7OjAZQFVvApYC5wAbgWbgEwHEGZc1ZjHGJJI1Ykm8lEh6qvosEPdbVdc65LODE1H/zSsv5OmqwWs8Y4wZmlo7XCOWd8yaHnQoQ0pKFG8OJXPGF1Db0MruRmvMYozpv6qaRjq6lLkTRgUdypBiSS/B5o53O+j66oaAIzHGpLP1O90xZM54S3qJZEkvwWb7HXRddb/vuDDGGNZV15ObFWLqWHucUCJZ0kuwMSOzKR2Vw7qdlvSMMf23fmc9s0oLyAhZI5ZEsqSXBLPLRrHOijeNMf2kqqyrbmB2mRVtJpolvSSYM34UG3c10NbRFXQoxpg0VNvQyt6mNuaMLwg6lCHHkl4SzBlfQHunsml3Y9ChGGPS0FrfJmC2NWJJOEt6STDHGrMYYwYgXD0yx4o3E86SXhJMLx5JdkbIblswxvTL+p31lBeNoDAvK+hQhhxLekmQmRFiZmn+wSIKY4zpi3XV9cwus/q8ZLCklyRzxo86eHOpMcb0VmtHJ2/UNtlN6UliSS9JZpdZd2TGmL6rqmmks0uZbS03k8KSXpLMtcYsxph+CJcQ2T16yWFJL0kqfHl8ZY3dtmCM6b3KmgayM0NMK7bux5LBkl6SFOfnMHZkNlU1Vq9njOm9ypoGZpTkW/djSWJJL4lmluazwZKeMaYPKnc2UFGaH3QYQ5YlvSSqKC2gqqYR9/xbY4yJr6GlnR11LcwstUYsyWJJL4kqSgtobO1gR11L0KEYY9JA1S7XBmCWJb2ksaSXRBWl4cYsVsRpjOlZpW+5WWFJL2ks6SVRuFzeGrMYY3qjsqaREVkZTBw9IuhQhixLeklUlJfNuIIcNuy02xaMMT2rrGlgZmk+IWu5mTSW9JKsorSAql12pWeM6VllTQMzx1nRZjJZ0kuycAvOri5rwWmMiW1/cxu7GlqZVWa3KySTJb0kqyjN50B7J9v2HQg6FGNMCgv33mS3KySXJb0km2ktOI0xvRA+RljLzeSypJdk4Rac1jOLMSaeqpoG8nMymVCYG3QoQ5olvSQryM1iQmGu3bZgjIlrg2+5KWItN5PJkt4gmFlawAZ72oIxJo6qmkYqrOVm0lnSGwSzygp4o9Y9GNIYY6LtbmxlT1PbwUeSmeSxpDcIZo7Lp62jiy17moIOxRiTgg41YrHbFZLNkt4gmFVmLTiNMbFV1VhH04MlZZKeiNwqIrtEZHWM8WeISJ2IrPR/3xrsGPtrxjh39mZPUTfGdGdDTQOFI7IoKcgJOpQhLylJT5yPhhOTiEwWkcU9fOw24KwepnlGVRf4v+8mItbBkJedyaQxI+y2BWNMt6pq3INjreVm8iXrSu9XwMnApf59A/DLeB9Q1aeBvUmKJ3CzSgvstgVjzBFUlcqaRrspfZAkK+mdqKqfBVoAVHUfkJ2A+Z4sIq+JyF9F5OhYE4nIlSKyTESW1dbWJmCxAzeztIBNtU20dXQFHYoxJoXsamil7kC7Jb1Bkqyk1y4iGYACiEgJMNCj/QpgiqoeC9wA/CnWhKp6s6ouUtVFJSUlA1xsYswqLaCjS9lsLTiNMRGs+7HBlaykdz3wEDBORP4TeBb4wUBmqKr1qtroXy8FskSkeMCRDpKZpeHGLFbEaYw5JNzAzW5XGByZyZipqt4lIsuBdwECvF9V1w1kniJSBtSoqvpGMSFgz8CjHRxHleQTEqjc2QDHBB2NMSZVVO5sYOzIbMbmW8vNwZDQpCciYyLe7gLujhynqjEbqojI3cAZQLGIbAO+DWQBqOpNwIXAZ0SkAzgAXKKqadPFSW5WBlPHjrTbFowxh6nc1WBFm4Mo0Vd6y3H1eAJMBvb510XAW8C0WB9U1UtjjfPjfwH8ImGRBmBmaT6V9hR1Y4ynqlTVNPKh48uDDmXYSGidnqpOU9XpwOPAeaparKpjgXOBRxO5rHRUUVrA5t1NtLR3Bh2KMSYF7KhrobG1wx4cO4iS1ZDlJN/YBABV/SvwtiQtK21UlBbQpbCp1lpwGmOs5WYQkpX0dojIN0Rkqv+7FtiRpGWljQp7iroxJkLlTncssD43B0+ykt6lQAnutoWHgHEc6p1l2JpWPJLMkFjSM8YA7naF0lE5FOZlBR3KsJGsWxb2AlcnY97pLDszxLTikZb0jDGAK/Wxos3BlZSkJyL/xPfGEklV35mM5aWTirICVm2rCzoMY0zAurqUql0NXHbilKBDGVaSkvSAr0S8zgU+BHQkaVlppWJcAUtXVdPc1kFedrI2vzEm1W3d10xLe5f1xDLIklW8uTxq0HMi8nIylpVuZpXlowobdzVyzMSioMMxxgTkUPdjVrw5mJL1PL0xEX/FIvJeoDAZy0o3Mw+24LSeWYwZzsJ1+3aP3uBKVvlaZM8sHcCbwBVJWlZamTImj+yMkDVmMWaYq6xpoLxoBPk5Vs0xmJK1teeoakvkABGx3lSBzIwQR43Lt6RnzDDnHhxr9XmDLVn36T3fzbAXkrSstFNRmn/wplRjzPDT0dnFG7vsaelBSPRTFsqAcmCEiByHK94EGAXkJXJZ6ayitIA/r9xBQ0s7Bbl2U6oxw82Wvc20dXZZ0gtAoos33wssASYCP4kY3gB8PcHLSlvhHb1qVyPHTx4dcDTGmMEWLumxpDf4Epr0VPV24HYR+ZCqPpDIeQ8l4X72Knc2WNIzZhiqrGlEBGaMszq9wZbo4s2PquqdwFQR+VL0eFX9STcfG3Ymjh7BiKwMu23BmGGqsqaByWPyGJGdEXQow06iizdH+v92+hJHKCTugbLWgtOYYcn63AxOoos3f+3/fyeR8x2KZo4r4Jmq2qDDMMYMsraOLt7c3cR7ji4NOpRhKVkdTpcA/wJMjVyGqn4yGctLR7PK8nlgxTb2N7dRlJcddDjGmEHy5u4mOrrUrvQCkqyb0/8MPAM8DnQmaRlpLbI7ssXTxjW/iS4AABzjSURBVAQcjTFmsGywp6UHKllJL09Vv5qkeQ8JkU9Rt6RnzPBRVdNARkiYXjKy54lNwiWrR5a/iMg5SZr3kDChMJf8nExrzGLMMLNhZwNTx+aRk2ktN4OQrKR3NS7xHRCRehFpEJH6JC0rLYlYC05jhqMq634sUElJeqpaoKohVR2hqqP8+1HJWFY6m1VaYPfqGTOMtLR3smVPkyW9ACWr9ebx3QyuA7aoqj1B3ZtZWsA9r2xld2Mrxfn2EApjhrqNuxrpUmvEEqRkNWT5FXA8sMq/nw+sBgpF5DOq+miSlptWIrsjK55hSc+Yoa5ql6vOmFVm/XcEJVl1ejuA41R1oaouBBYAm4Azgf9O0jLTTvhZWlavZ8zwsGFnI1kZwpSx1nIzKMlKehWquib8RlXXArNVdVOSlpeWSgpyKMrLonKX1esZMxxU1TRwVEk+WRnJOvSaniSreHONiNwI3OPfXwys9U9Pb0/SMtOOiFAxrsAeKGvMMLGhpoHj7MkqgUrW6cYSYCPwBf+3yQ9rB96RpGWmpYqyfDbUNKCqQYdijEmixtYOtu07wKxSq88LUlKu9FT1APC//i+aleVFmF02ijtb3mL7/gNMHG0PlzdmqNqw092qPLvM7t4KUlKu9ERkpojcLyJrRWRT+K+Hz9wqIrtEZHWM8SIi14vIRhF5PcZtEWlnznj3A1hfbUWcxgxl6/xvfM4ES3pBSlbx5u+AG4EOXHHmHcCdPXzmNuCsOOPPBmb6vyv9/NPerDJ328K6auuwxpihbP3OegpyM5lQmBt0KMNaspLeCFV9AhBV3aKq1wHvi/cBVX0a2BtnkguAO9R5ESgSkfEJizgg+TmZTB6Tx3przGLMkLauuoE5ZaMQkaBDGdaSlfRaRSQEVInIVSLyAQb+NPVyYGvE+21+WNqbM77ArvSMGcK6upQNOxuYM956YglaMjuczgM+DywEPgZ8PEnLOoKIXCkiy0RkWW1t6j+dfHbZKN7c08SBNnv0oDFD0fb9B2hs7WD2eKvPC1qyWm++4l82Ap9I0Gy3A5Mi3k/0w7pb/s3AzQCLFi1K+XsB5owfharrmeXYSUVBh2OMSbC1viRnjiW9wCU06YnIw/HGq+r5A5j9w8BVInIPcCJQp6rVA5hfyggXeayrrrekZ8wQtL66AZFDXQ+a4CT6Su9kXL3b3cBLQK9rbEXkbuAMoFhEtgHfBrIAVPUmYClwDu6m92YSdwUZuEmj8xiZnWGNWYwZotZV1zN17EjyspPVCZbprUR/A2W4TqUvBT4CPALcHdkPZyyqemkP4xX4bCKCTDWhkDCrrOBgEYgxZmhZv7OeuXZ/XkpIaEMWVe1U1b+p6seBk3BXZU+KyFWJXM5QNHv8KNZX11t3ZMYMMU2tHWzZ22w9saSIhLfeFJEcEfkg7mb0zwLXAw8lejlDzZyyAupbOthR1xJ0KMaYBHJ968LsMrtdIRUkuiHLHcA8XP3bd1S12y7FzJHCrbrW7ainvGhEwNEYYxJlnbXcTCmJvtL7KK6bsKuB50Wk3v81iIhVWMUxZ/woRGDNDttMxgwla3bUUzgii4mj7WQ2FST0Sk9V7cmI/TQyJ5NpxSNZvaMu6FCMMQm0ZnsdR0+w7sdShSWpFDJvQiFrtlvSM2aoaO/sYt3OBuaVFwYdivEs6aWQeeWj2FHXwp7G1qBDMcYkwMZdjbR1dHG03a6QMizppZB5E9zZoNXrGTM0rPYlN3allzos6aWQo33Ss3o9Y4aGNTvqGZmdwbSxI4MOxXiW9FJIYV4Wk8aMYM12u9IzZihYvb2OuRNGEQpZI5ZUYUkvxcybUGhXesYMAZ1dytrq+oMlOCY1WNJLMfPKC9myp5m6A+1Bh2KMGYA3dzfR3NZp9XkpxpJeigm38lprjVmMSWtrdoQbsVjLzVRiSS/FHH2wBacVcRqTzlZvryMnM8SMEnuGXiqxpJdiSgpyKBuVyyq7Sd2YtLZ6ez2zywrIzLDDbCqxbyMFzZ9YyGtb9wcdhjGmnzq7lNe37eeYiUVBh2KiWNJLQQsmFbF5TzP7mtqCDsUY0w8bdzXS1NbJgkmW9FKNJb0UdJz/oby2za72jElHK7fuA2DBZEt6qcaSXgqaP7EQEVhpRZzGpKWVW/czKjfTemJJQZb0UlBBbhYzx+Vb0jMmTb361n6OnVRkPbGkIEt6Keq4SaN5bet+VDXoUIwxfdDU2kFlTcPBagqTWizppagFk4vY19zOlj3NQYdijOmDVdvr6FKrz0tVlvRSVLjVlxVxGpNewr/ZY+12hZRkSS9FVZQWkJedYUnPmDSz8q39TB6Tx9j8nKBDMd2wpJeiMkLC/PJCXrWkZ0xaWbl1v92fl8Is6aWwBZOLWLejnpb2zqBDMcb0ws66FnbWt1jSS2GW9FLYwsmjaevssn44jUkTr2zeC8DxU0YHHImJxZJeCjth6hgAXn5zb8CRGGN64+U395KXncG8CfY4oVRlSS+FjR6ZzazSAl6ypGdMWnj5zb0snDLanqyQwuybSXGLp41h+ea9dHR2BR2KMSaOfU1tbKhp4MRpY4IOxcRhSS/FLZ42hqa2TtZW25PUjUll4fq8xdPGBhyJiceSXopbPM3q9YxJBy+/uZfszBDHTCwMOhQTR8okPRE5S0Q2iMhGEflaN+OXiEitiKz0f58KIs7BVjoql6lj86xez5gU9/LmvSyYVERuVkbQoZg4UiLpiUgG8EvgbGAucKmIzO1m0j+q6gL/d8ugBhmgxdPG8MrmvXR1WefTxqSixtYOVm+vs/q8NJASSQ9YDGxU1U2q2gbcA1wQcEwpY/G0sexvbqdqV2PQoRhjurF8yz669FB1hEldqZL0yoGtEe+3+WHRPiQir4vI/SIyKdbMRORKEVkmIstqa2sTHeugC589vvTmnoAjMcZ056VNe8gICcdPtpvSU12qJL3e+D9gqqoeAzwG3B5rQlW9WVUXqeqikpKSQQswWSaOHsGkMSN4pmp30KEYY7rx7MbdHDepiJE5mUGHYnqQKklvOxB55TbRDztIVfeoaqt/ewuwcJBiC5yIcNrMEp7fuJu2Drtfz5hUsqexlVXb6zi9Iv1PsIeDVEl6rwAzRWSaiGQDlwAPR04gIuMj3p4PrBvE+AJ3ekUJTW2dLN+yL+hQjDERnt24G1U4zZJeWkiJpKeqHcBVwN9xyexeVV0jIt8VkfP9ZJ8XkTUi8hrweWBJMNEG4+SjxpIZEp6uSv86SmOGkqc21DJmZDbzy+3+vHSQMgXQqroUWBo17FsRr68BrhnsuFJFQW4WC6eM5qkNtXz1rNlBh2OMAbq6lKerdnPqjGJCIQk6HNMLKXGlZ3rntIoS1lbXs6uhJehQjDHA2up6dje2Wn1eGrGkl0bCP6xnKq0VpzGpIFzd8PaK4oAjMb1lSS+NzB0/iuL8bKvXMyZFPLWhlrnjRzGuIDfoUEwvWdJLI6GQcFpFCU9uqKXdHjVkTKD2N7exfMs+Tp9lRZvpxJJemjnr6DLqDrTzwhvWO4sxQXpsbQ0dXcrZ88qCDsX0gSW9NHNaRQkjszP46+rqoEMxZlhbuqqaiaNH2K0KacaSXprJzcrgXXNK+fuaGnuaujEBqTvQzrMbd3PO/PGI2K0K6cSSXho6Z34Ze5va7Bl7xgTk8bU1tHda0WY6sqSXhs6YNY687AweWWVFnMYEYemqaiYU5rJgUlHQoZg+sqSXhnKzMnjH7HH8ffVOOu3BssYMqvqWdp6p2s3ZVrSZlizppan3zR/PnqY2nn/DblQ3ZjA9uqaGts4uzplvRZvpyJJemnrn7HEUjsjinle29jyxMSZh7nn5LaYVj7QHxqYpS3ppKjcrgw8dP5FH1+xkd2Nrzx8wxgxYZU0Dy7bs49LFk6xoM01Z0ktjHzlxEu2dygPLtwUdijHDwt0vv0V2RogPHT8x6FBMP1nSS2MzxhWweOoY7n75LbqsQYsxSdXS3skDy7fx3nlljM3PCToc00+W9NLcR06czOY9zby4ybolMyaZlq6qpr6lg0sXTwo6FDMAlvTS3FnzyijKy+L2FzYHHYoxQ5aqcvsLW5hWPJKTp48NOhwzAJb00lxuVgaXnzyVv6+pYV11fdDhGDMkPV21m9e27udTb59mDVjSnCW9IeCKU6ZRkJPJDf+oCjoUY4YcVeXnj1cyoTCXDy+0os10Z0lvCCjMy2LJKVNZumon63fa1Z4xifRM1W5WvLWfz7xjBtmZdshMd/YNDhFXnDqN/JxMbnhiY9ChGDNkqCo/f6KK8YW5XLTIblMYCizpDRFFedl84pSpPLKqmuVb7OkLxiTC31bvZPmWffzbGUeRk5kRdDgmASzpDSGfPv0oyotG8LUHVtHWYc/aM2Yg6g608+2H1zB3/CguXTw56HBMgljSG0JG5mTy/ffPo2pXIzc99UbQ4RiT1v7rb+vZ3djKf33oGDIz7FA5VNg3OcS8Y/Y4zjt2Ar/4x0Y27moIOhxj0tJLm/bwh5fe4pOnTGP+xMKgwzEJZElvCPrWuXPJz83kyjuWU9fcHnQ4xqSVHfsP8Nk/vMrkMXl86T0VQYdjEsyS3hBUUpDDTR9dyNZ9zfzbH5bT3mn1e8b0RlNrB1fcvozW9k5u+fgi8rIzgw7JJJglvSFq8bQx/PCDx/Dcxj1c+9Aqe8K6MT1oae/k83e/yoad9dzwkeOoKC0IOiSTBHYaM4RduHAib+1t5vonqtjX3M71lxzHiGxrdm1MtP3NbfzLHct4ZfM+vvf+eZwxa1zQIZkksSu9Ie5LZ1Zw3XlzeXxdDZf85kW27GkKOiRjUsq66no+eOPzvLa1jusvPY6PnTQl6JBMElnSGwaWnDKNX390IRtrGnjPT5/m+ieqaGnvDDosYwLV2NrB9/+ylnNveJb9ze3c+akTOf/YCUGHZZJMVFOjrkdEzgJ+DmQAt6jqj6LG5wB3AAuBPcDFqrq5p/kuWrRIly1blviA09DOuha+98haHnm9muL8bC4+YRKXnDCZSWPygg5tWFBVWju6ONDmTjgyMoQMETJC7i8zJNaD/yDYuKuBu156iweWb6OhtYNLTpjMV8+aRVFedtChpQwRWa6qi4KOIxlSIumJSAZQCZwJbANeAS5V1bUR0/wbcIyqflpELgE+oKoX9zRvS3pHev6N3dz67Gb+sb6GLoXpxSN524yxzB1fyPSSkZQXjWDUiCzyczLJCA3eQVhVUYUuVbr8/0Pv3TCNGhf5PiRCKASZoRAZ/nU4oYSTS7ykoqq0dyrtnV20dXTR3tlFS3sXze0dNLV2cqCtk6a2jsP/t3bS3N5Bc2snzW2dNLd1RP33r1s7aW7v7LFBUUggJzODkTmZ5Oe4/+515qFh2YeG5eVkuHHZh17nZR8aNyIrw2+L3n2Pqkpnl9Lp/3d0KZ2dh95H/nV0ue8l/D68/TPEbeeMkBAS/HD3HYTk0HcSivheQuKSfm/j7I3mtg72NLbx1t5mNtU2smp7Hc+/sYdt+w6QlSGcNW88nzp1GsdOKkrYMocKS3rJDkLkZOA6VX2vf38NgKr+MGKav/tpXhCRTGAnUKI9rIAlvdi27z/AX1dV89zG3bz85l6a2o4s8szPyaQgNzNu7/LaiwTV1dV9QlMOf59sIriDa8QVliq0dXTR1s9bO7IzQ4zMziAvO5O87Az/51/nZJKXlUFezqHhI7IyCAkuoYQTTKcefN/S7pJqY2snTa0dNLZ20OT/wsMO9LF4Orze7ooy5JJRSA5LYOHXQQufpIQTaMhfBWdEfG/h/+7qGDq73AlLW2f4ZKWTlvbDv8/CEVmcNH0Mp8wo5ux54ykpyAloDVPfUE56qdJ6sxzYGvF+G3BirGlUtUNE6oCxwO7omYnIlcCVAJMnW595sZQXjeBTb5/Op94+na4uZUfdAd6obaKmroX6lnYaWjpoaOmgvqWdjh4SQsgfiMJn9hLxOiQg4g5OPU0TOjjdofeHpo/4vL9qEw6NCyfXw69C3PAOf7XS1XXkVUtIhKxMIScjRFZGiOxM95eVESInM8TInMzDE1m2uwIbkZ1BXlZGIF1UdXYpzW3uCrTJX0k2tnbQ3Nbh/7vk2NLeSWcXdHZ1HUxs4f9dqgcTR0YodPBK61ByPLzoNTwuJEJmRnTikSNOeroirgTD46KvDsPfUWdXl/sf8R11RcUa/n/4d+mWk5khZIW/vwwhJyuD0XnZjBmZxcTReRxVkk/pqBwrPjYpk/QSSlVvBm4Gd6UXcDhpIRQSJo7OY+Joq99LBxkhoSA3i4LcrKBDMSatpErrze1A5COJJ/ph3U7jizcLcQ1ajDHGmF5JlaT3CjBTRKaJSDZwCfBw1DQPAx/3ry8E/tFTfZ4xxhgTKSWKN30d3VXA33G3LNyqqmtE5LvAMlV9GPgt8HsR2QjsxSVGY4wxptdSIukBqOpSYGnUsG9FvG4BPjzYcRljjBk6UqV40xhjjEk6S3rGGGOGDUt6xhhjhg1LesYYY4aNlOiGLJlEpBbY0s+PF9NNjy9pxOIPVjrHn86xg8U/UFNUtSTA5SfNkE96AyEiy9K5/zmLP1jpHH86xw4Wv4nNijeNMcYMG5b0jDHGDBuW9OK7OegABsjiD1Y6x5/OsYPFb2KwOj1jjDHDhl3pGWOMGTYs6RljjBk2LOl1Q0TOEpENIrJRRL4WdDw9EZFJIvJPEVkrImtE5Go/fIyIPCYiVf7/6KBjjUdEMkTkVRH5i38/TURe8t/DH/1jp1KSiBSJyP0isl5E1onIyem0/UXki37fWS0id4tIbipvfxG5VUR2icjqiGHdbm9xrvfr8bqIHB9c5Adj7S7+//H7z+si8pCIFEWMu8bHv0FE3htM1EODJb0oIpIB/BI4G5gLXCoic4ONqkcdwJdVdS5wEvBZH/PXgCdUdSbwhH+fyq4G1kW8/y/gp6o6A9gHXBFIVL3zc+BvqjobOBa3Hmmx/UWkHPg8sEhV5+Ee73UJqb39bwPOihoWa3ufDcz0f1cCNw5SjPHcxpHxPwbMU9VjgErgGgD/W74EONp/5lf+OGX6wZLekRYDG1V1k6q2AfcAFwQcU1yqWq2qK/zrBtwBtxwX9+1+stuB9wcTYc9EZCLwPuAW/16AdwL3+0lSNn4RKQROwz3zEVVtU9X9pNH2xz1mbISIZAJ5QDUpvP1V9WncczUjxdreFwB3qPMiUCQi4wcn0u51F7+qPqqqHf7ti8BE//oC4B5VbVXVN4GNuOOU6QdLekcqB7ZGvN/mh6UFEZkKHAe8BJSqarUftRMoDSis3vgZ8B9Al38/FtgfcRBI5e9hGlAL/M4Xz94iIiNJk+2vqtuBHwNv4ZJdHbCc9Nn+YbG2dzr+pj8J/NW/Tsf4U5YlvSFERPKBB4AvqGp95Dh196ak5P0pInIusEtVlwcdSz9lAscDN6rqcUATUUWZKb79R+OuJqYBE4CRHFn0llZSeXv3RESuxVVZ3BV0LEORJb0jbQcmRbyf6IelNBHJwiW8u1T1QT+4JlyM4//vCiq+HpwCnC8im3HFye/E1ZEV+eI2SO3vYRuwTVVf8u/vxyXBdNn+7wbeVNVaVW0HHsR9J+my/cNibe+0+U2LyBLgXOAyPXQTddrEnw4s6R3pFWCmb7mWjatAfjjgmOLy9V+/Bdap6k8iRj0MfNy//jjw58GOrTdU9RpVnaiqU3Hb+x+qehnwT+BCP1kqx78T2Cois/ygdwFrSZPtjyvWPElE8vy+FI4/LbZ/hFjb+2Hgct+K8ySgLqIYNGWIyFm4Iv7zVbU5YtTDwCUikiMi03ANcl4OIsYhQVXtL+oPOAfXeuoN4Nqg4+lFvKfiinJeB1b6v3Nw9WJPAFXA48CYoGPtxbqcAfzFv56O+3FvBO4DcoKOL07cC4Bl/jv4EzA6nbY/8B1gPbAa+D2Qk8rbH7gbV//YjrvSviLW9gYE1yL7DWAVrpVqKsa/EVd3F/4N3xQx/bU+/g3A2UHHn85/1g2ZMcaYYcOKN40xxgwblvSMMcYMG5b0jDHGDBuW9IwxxgwblvSMMcYMG5b0zKATkU4RWRnxNzXomBJFRI4Tkd/612eISF3Een4rYrojetn3w2P2tB8xzQQRuT96eJyYNovIAxHvLxSR2/q1gvGXc0u8ztlFZImITIh4f4+IzEx0HMbEY0nPBOGAqi6I+NscHuFvIE7n/fLrwPUR75+JWM/vRgy/je67+uq2p/1IqrpDVS884pPxLUz200JU9VOqujbOJEtw3ZyF3Yi7GduYQZPOBxczRIjIVP+csDtwN0dPEpF/F5FX/BXPdyKmvVZEKkXkWXHPffuKH/6kiCzyr4t9l2bhZ/T9T8S8/tUPP8N/JvwMvLt8bySIyAki8ryIvCYiL4tIgYg8LSILIuJ4VkSOjVqPAuAYVX2tp3XW7p8SgMbuaT96e632r4/2Ma706xfryul/cTc4R84nJO7ZcyUR7zeKSInvkegFEVklIt8XkcaI7faXiHn8wneddfA78Nv8NnHP5lsl7ll9FwKLgLt8rCOAZ4B3y6GuzoxJOkt6JggjIor8HvLDZgK/UtWjgVn+/WJcTycLReQ0EVmI66ZsAa7HmRN6sawrcN1OneCn/xfflRO4p1F8AffcxOnAKeK6nvsjcLWqHovrl/IArpu3JQAiUgHkdpPcFuGSdqSTffL8q4gc3Yt4I0X2tB/Lp4Gfq+oCv/xtMaa7FzheRGaEB6hqF3AncJkf9G7gNVWtxfV9eqOqzsf1HNIXC4ByVZ3nP/87Vb0f12PNZf6q94Bf/kbc8weNGRSW9EwQIos3P+CHbVH3rDOA9/i/V4EVwGxcEnw78JCqNqt7ikRv+kR9D67fxZW4xy2N9fMCeFlVt/mD70pgKi7hVqvqKwCqWu+vvO4DzhXXsfcnccWT0cbjHjEUtgKY4pPnDbjuyXpFet/T/gvA10Xkq35ZB2JM1wn8D0cWl94KXO5ffxL4nX99Cq6rLHDdkvXFJmC6iNwgrj/J+jjT7uLwIk9jksqSnkkVTRGvBfhhRGKcoaq/7eHzHRzan3Oj5vW5iHlNU9VH/bjWiOk6cY8I6pa6DoAfwz2C5yK6T0YHIpftE2ajf70UyBKR4h7WI1ZP+7Hi+gNwvl/2UhF5Z5zJf4972O3BHvtVdSvu6QTvxF1ZR15ZdrfsyO0Mh2/r8Dz34a7ensRdid4SJ6ZcH7sxg8KSnklFfwc+Ke75gIhIuYiMA54G3i8iI3z92XkRn9kMLPSvL4ya12f8FRoiUiHuAa+xbADGi8gJfvqCiDqnW3CNVF7xB/Zo64CDxYciUhZRT7gY93vbE2/FJXZP+7Gmnw5sUtXrcU8VOCbWtOoeG/RT4ItRo27BFXPep6qdfthzuKJkOFT8CbAFmCuux/8i3BMZomMqBkKq+gDwDdxjlgAagIKoySs4skjYmKSxpGdSjr8S+wPwgoiswj2frkBVV+Dq217DXZG8EvGxH+OS26tA5NXULbjH5KzwjT9+TfwrujbgYuAGEXkNd3WX68ctxxXV/S7GZ9cDhT4hg0u+q/18rgcuCV+5icjduKLJWSKyTUSu8J/5BS4xPObrPG+Kv7W4yC9jJTAPuKOH6X/Lkev/MJAftV5XA5/12//gU7r9leG9uER1L64IOlo58KSP6U4OFaneBtwUbsgiIqW4ou6dPcRsTMLYUxZM2hKR64BGVf3xIC1vAq7IbravB+xumi8CDaoar0gvpfhWrz9V1bfHmaZRVfMTvNwvAvW9KLo2JmHsSs+YXhCRy3ENYa6NlfC8Gzm8rjClicjXgAfo5n7AQbAfuD2A5ZphzK70jDHGDBt2pWeMMWbYsKRnjDFm2LCkZ4wxZtiwpGeMMWbYsKRnjDFm2Ph/2hPcK/6KS8EAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "N = 4\n", - "f, H_im = sig.freqz(h)\n", - "posfreq = np.square(H[0:512//N])\n", - "negfreq = np.flipud(np.square(H[0:512//N]))\n", - "plt.plot((np.abs(posfreq) + np.abs(negfreq)))\n", - "plt.xlabel('Frequency (512 is Nyquist)')\n", - "plt.ylabel('Magnitude')\n", - "plt.title('Unity Condition, Sum of Squared Magnitude of 2 Neighboring Subbands')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "b = sig.firwin(80, 0.5, window=('kaiser', 8))" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAb0AAAEWCAYAAADy9UlpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3deZwcZZnA8d/Tc2Yyk5kkM5lJJjfJ5CCBQEIAQcADBeTwQA5RjOKyuqJ47Srigcequ+t6gAoiIiAIciorUTmU+0xCIPdMCAk5JpPJNWfmfvaP9+2k05nuubqnumee7+czn+muqq56qrq6nqr3festUVWMMcaY4SAUdADGGGPMYLGkZ4wxZtiwpGeMMWbYsKRnjDFm2LCkZ4wxZtiwpGeMMWbYSHrSE5GbROSbyV5OMoiIisgM/zrueojI10XklsGLLrWIyAgR+T8RqROR+4KOJ5FEZLOIvDvoOGJJ9G9MRG4Tke8nan4R8y0VkadFpEFE/jfR8+9HPGtE5IxeThtzHxCRM0RkWwLjukxEHk3U/BJFRJ4UkU/FGDfVHy8zByGOAS2rx6QXeeCPGHadiNzZmwWo6qdV9Xv+cwPeOUSkQkTuE5Hd/gD7uoh8SUQyBjLfnvS0Hqr6A1XtdodIJBGZKCIPRKz/ahFZkuzl9sKFQCkwVlU/HD1SRIpE5FYR2ekPepUi8rXBDzOxfIJQEbkgavhP/fAlyY4h0b+xJLoS2A2MUtUvR48UkX/3+3ODiLwpIv8ea0YRB76lUcPvFJHrehOMqh6tqk/2bRWST1XvUtX3JGPeInKqiDzvjx17ReQ5ETkhGctKVWlVvCkiRwEvAVuB+apaCHwYWAQUBBnbIPo9bv2nAGOBjwE1gUbkTAEqVbUjxvifAvnAHKAQOB/YOEixHZSkM9FK4PKoZVwEvJGEZaWzKcBajd0jhuC242jgLOAqEbmkh3meKCJvS2CMgUrmlZKIjAL+AtwAjAHKge8ArclaZkpS1bh/gAIzooZdB9zpX58BbAO+DOwCqoFPREx7G/B9YCRwAOgCGv3fBKAZd3UQnv54oBbI6iaWO4FHeoj3fGANsB94EpgTMW4z8BXgdaAO+COQGzH+3338O4BPRq57L9bj4DYZaBw9rF8jsCDGuDOAbVHDNgPvjvje7vPbsQFYBVQA1/jvbivwnjjLnuPXZb9ft/P98O8AbUC7j++Kbj67Gnh/nHmfCaz32+MXwFPAp6L3N/9+qv9uMv37TwDr/DptAv41epsAXwV24k4aQsDXcElpD3AvMCbiMx8Dtvhx10Zuw27ivg34Me7EY7Qfdi7wV+BZYIkfdhTwDz/P3cBdQFHUfv+qX4f7/D7x/QT9xm4Lz6u7/QQ4Dljhl/1H4J6o6c8FVvrv/XngmDjf49uAV/z3+ArwtogY2/1+0hhre0bN63rghhjjwvvAV4F/Rh0jrutN7Bz+2xgB3A7s8/vSf0Rto83E+M1GfD9f99/tZuCyiM8WAnfgjmtbgG8AIT9uCfAc7qRwj/8elwDPRh2DPw1U+fX4JSB+XAbwv365bwJXEfHbiNpmi4D9cbb3dcT/nT0J/BB4GagH/oz/3URMeyXu+FkNfCViXouBF3z81bjfeHYf1vHHfh03AZ+NimuJH97gt8FlsdZRVRN2pVeG+2LLgSuAX4rI6MgJVLUJOBvYoar5/m8HbkNeFDHpx4B7VLW9m+W8G7g/VhAiUgHcDXwBKAGWAv8nItkRk12EO4ucBhyD22CIyFm4nfpMYKZf1hHirEdC4uiFF3Hb9xIRmdzLz0Q6D3fgH407yP4dlwTKge8Cv+7uQyKSBfwf8CgwDvgccJeIzFLVbwM/AP7ot8dvY8T9nyLyCRGZGTXvYuBB3MGgGJeMTunDOu3CHdxG4RLgT0Xk+IjxZbgz2ym4H+XngPcDp+OSwj7cjwwRmQvciNsPJ+Cupif2sPwW3AEgfFVyOe4gd9hq4g4YE3AnD5NwBxn8fvEQLjGMwe07H4j6/EB+YzH5Zf8Jt0+MwSXcD0WMPw64FfhX3Lb4NfCwiOR0M68xwCO4ZDUW+AnwiIiMVdUluET/3z6ux3uIS4C3406u4vkVUNFdfVtfYge+jTtwT8cdAz7azTTxfrNluH23HPg4cLOIzPLjbsB9d9Nx+9zluP007ETcQbsU+M8Y63kucIJf7kXAe/3wf8F95wtwJ07vj/F5cCUSnSJyu4icHb3/9NLluAuC8UAH7ruO9A7c8fM9wFcjvpdO4Iu4bXQy8C7g36I+G28dz8WdnC3CVaUAICIjfQxnq2oB7qRrZbwVSFTSawe+q6rtqroUdyY3q4fPhN2O38F8vdyluB9gd8bizhJiuRh3JfiYT5o/xp3B/UlEduEOONer6g5V3Ys7iC/wn70I+J2qrvYHj+viLOcJYJyIrBSRh/sQR2QxTKw4evJh4Bngm8CbPoa+lMk/o6p/V1cMeR8uKf/Ix3kPMFVEirr53Em44skfqWqbqv4DV1RyaS+X+zncQe8qYK2IbBSRs/24c4A1qnq/j+NnuKuyXlHVR1T1DXWewiXmt0dM0gV8W1VbVfUA7ozyWlXdpqqtuO/6Ql+0dCHwF1V92o/7pv98T+4ALvfb7nRcIomMcaPfH1pVtRaXEE73o08CMnH7RLuqPog7m440kN9YPCcBWcDP/Lzvx12hhV0J/FpVX1LVTlW9HVccdlI383ofUKWqv1fVDlW9G3f1fl4/4roOd3z6XQ/THcAliu4a3vQl9ouAH6jqPlXdxpEHc+j5N/tN//0+hUv+F/lj2iXANaraoKqbcVdmH4v43A5VvcFvswMx1vNHqrpfVd8C/snhx62f+315H/CjGJ9HVeuBU3FXSb8BakXkYREpjfWZbvw+4hj5zYh1DPuOqjap6ircd3epX/ZyVX3Rr+Nm3AnI6VHzjreOP1PVrX7b/zDqc13APBEZoarVqhr3RKk3Sa8T96OIlIX7EYbt0cPrcppxB8je+DMwV0Sm4c6w6lQ1+gd/cDm4M4xYJuCKDwBQ1S5ckd0y3BkaHH4wjYxzgp82bAuxtQG7VHWBqp7fhzjKI6aJFUdc/kf5NVU9GndmuBKX1KU3n+fw+r8DwG5V7Yx4T4xYJgBb/bqEbeHwdYoX9wF1jX0W4k5e7gXu81cHh217dWUWW7uf05H8WeuLvmJ+Py6JFkdMUquqLRHvpwAPich+P/063H5e2k0sTbj9rqf1exZ3AnEtLmkedvAS13LxHhHZLiL1uGK4cIwTgO1+vcOi138gv7F4ult25L4/BfhyeFv57TXJf667eUX/bnq9j4SJyFW4K4r3+ROPntwClIpIdHLta+yR27y7/S/eb3af31fCtvh5FuOOl1uixkVuk97s6709bsWdl6quU9UlqjoRmOc//7NeLL+7+W/BrVtxnPET4GADxL/4hmz1uJKhyM9BP47NfptfjDuRrRaRR0RkdrwV6E3Sewt32R9pGvGTQixHVGD7g9G9uKu9jxH7Kg/gcSKKXrqxA7ejAweLSCbh6lL2Rk7oG8V8FDhbRJ7BbeRJEZPEKzrs6dEUseLY3sPn+kRVd+OuIifgiqaagLyI5WbgDsSJsAOYJCKR+8xk+rFO/ozzB7g6qGm4q/eD2z5ie4Udtl64oqTwtDnAA7jtUKqqRbji5MiTgOjvayuuOKQo4i9XVbd3E0seLkn3xp24erfook1w66u4BlijcPteOMZqoDzqxGUS/dPdvhlz+8VYduS+vxX4z6htleev4qIdtt9HzKvX+4iIfBJX3/ouf8XVI1Vtw9Urf4/Dv/e+xF7N4cXYfd3+o31RW9hk3PbYjbtAmBI1LnKbDORRN/2OW1XX44rU5/lB8faT7uY/Gbduu+OMDxev34i76p/p9/+vc/h3Fc9hv0mijs2+5OpM3AXRetxVbEy9SXp/BL4hrql8yJfRnkecurU4aoCxIlIYNfwOXPn4+cRPet8G3iYi/yMiZQAiMsM3Uy7CJc/3ici7fB3Ul3HFGc93M6+bcQfHv+Lq8qYDS0Rkrj/QfTtOHNnABBF5RUS6K0PvSxxHENcU+4wY4/5LROaJSKaIFACfATaq6h5cmX2uiLzPL/cbQHf1F/3xEu7E4D9EJMvHdx6uSLRHIvJNETlBRLJFJBe4GldhvQFXFHS0iHzQFzF+nsN/cCuB00Rkst93rokYl41bx1qgwxeZ9tTc+yZc/eIUH1uJHLrl4H7gXHFNu7Nx9Zy9rQa4Hlda8XQ34wpwRZJ1IlKOazQV9gLuSvMq/71egKv474/ufmMrgXNEZIz/3XwhatkdwOf99/rBqGX/Bvi0iJwozki/f3XXWnoprn7tI349Lgbm4orBeyQil+FODs5U1U29XWHv90Auh0p0+hr7vcA1IjLafz9X9XH5AN/x+/fbcXVQ9/lSlHtx+1uB3+e+hDtBSoR7gatFpNwfA78aa0IRmS0iXxaRif79JFzx44t+kni/s7CPRhwjvwvcH1FSBPBNEckTkaNx9ZZ/9MMLcI1fGv2V2Gf6uI6f9zloNO6kKLxOpSJygT/haMX9xuJWR/Tmx/xd3MH6WVyF/3/jWses7kPQwMEzi7uBTb64YYIf/pwPdIWqxryCVNU3cJWgU4E1IlKHO8tfBjSo6gbcGfQNuLOP84Dz/JngQSKSj6tfuwhXFPZr3IHzZ7iWVHW4FlkAS0VkNe5gFjYFd4Y0A3hQRE6OirNXcXTH74jhlpXdycM1etiPq/yegjtZQFXrcJXDt+DOJJtwrcoGzMd+Hq7SfDeuAcHl/jvt1SxwZfy7cWd/Z+KKrxr9FeuHcfURe3AV4c9FLPsx3I/ndWA5EQdRVW3AJcl7cfvnR4Du6lkj/dxP86iINOB+9Cf6+a3BtQ77A+4Mcx+93IaquldVn4gqKgz7Dq6hQR0uyT8Y8bk24IO4Bir7cfvOX+hHU/IYv7HfA6/hWhU+yqEDUeSyl+BKQy6Oim0ZriHBL3DbYiMxGl35E69zcSd5e3AtIM/1329vfB93Vf2KiDT6v5t6ud6dwLdwJR59jh13nNuGa/33OO7kpy/bf6dfxg5c3fWnI34bn8P9FjfhjqN/wDWwSYTf4L7T13EN05biTmI6u5m2AbefvyQiTbj9fjXu+4r7O4vwe9yxbyfuJOPzUeOfwm3nJ4Afq2r4Jvuv4H6bDT7mP9J7v8E1uHsN18r4wYhxIdxJxA7c/ns6PSRU6f73OfhE5B/AH1Q1Kb2aiMhUXF3LPHH3q2xQ1Xj1g72d721+vv258u1ufh8FjlbV7s6yhg0ReRLXfHpY9nIjIi8BN6lqTw05TBKIyGeAS1Q1urFFSvMlHTepanQxs/FS4uZ0ca0Pj6dv2b/ffJ3SmyLyYb98EZFje/NZX/yR418X45rWr01gbHcO94Q3HInI6SJS5osFP45rtv23oOMaLkRkvIicIq4KZxbu6uehoOPqibju/87x+005rlom5eMOUuBJT0RuxxUnfMEXVSVjGXfj6i5micg2EbkCuAy4QkRew90LdEG8eUSYAyzzn/snrpltwpKeGbZm4Ypv9uMOuBeqarzbc0xiZeOqORpwDd/+jCvCT3WCKzrfhyveXIcr5jUxpEzxpjHGGJNsgV/pGWOMMYMl6Y+BCFpxcbFOnTo16DCMMSZtLF++fLeqJuoe35Qy5JPe1KlTWbZsWdBhGGNM2hCR/nQ+khaseNMYY8ywYUnPGGPMsJESSU9EJonIP0VkrYisEZGru5nmDHFP+13p/6xZrjHGmD5JlTq9DuDLqrrC94u3XEQe6+b+t2dU9dwA4jPGGDMEpMSVnn8G0gr/ugF3g2WfHkdijDHG9CQlkl4k30fmcbhe/aOdLCKvichffS/eseZxpYgsE5FltbW1SYrUGGNMukmppOeffvAArkuy+qjRK4Apqnos7ukFf4r+fJiq3qyqi1R1UUnJkLzVxBhjTD+kTNIT9/y3B4C7VPXB6PGqWq+qjf71UiDLd/icFD99rJIVb+1L1uyNMSYpVJWnK2u55Zm+PpJweEiJpCciAvwWWKeqP4kxTZmfDhFZjIt9TzLi2dfUxl0vvcUHf/U8V96xjI27GpOxGGOMSajXt+3nI795ictvfZm7XnqL1o7uHqs3vKVEh9MicirwDO7BqeGn3n4d/1h4Vb1JRK7CPRywAzgAfElVe3wS+aJFi7Q/PbI0tXbwu+fe5NdPbQKBx790OqWjcvs8H2OMGQyVNQ287/pnGJWbxefeOYNLT5xMTmZGv+YlIstVdVGCQ0wJKZH0kqm/SS9sU20jZ/38Gc6cW8ovP3J8AiMzxpjE6OpSLr75Bap2NfL4l06nOD9nQPMbykkvJYo3U9n0knw+944ZPPJ6Nf9cvyvocIwx5gj3Ld/KK5v38fWz5ww44Q11lvR64crTpzNjXD7f+NNqmts6gg7HGGMO2t3Yyg+WrmfxtDF8eNHEoMNJeZb0eiEnM4PvXTCP7fsPcP/ybUGHY4wxB9367Js0tLTzgw/Mw7f1M3FY0uulk48ay9zxo7h32dagQzHGGAA6u5QHVmzjjFnjmDGuIOhw0oIlvT64aNFEVm+vZ+2O6PvmjTFm8D1dVUtNfSsXWbFmr1nS64MLFpSTnRHivuV2tWeMCd59y7YyZmQ275xdGnQoacOSXh+MHpnNmUeX8qdXt9tNn8aYQO1tauOxtTW8f0E52Zl2KO8t21J9dNGiSexrbueJdXb7gjEmOH9euZ32TuWiE6xosy8s6fXRqTOKGV+Yy4MrrBWnMSY4D67YzvzyQmaXjQo6lLRiSa+PMkLCmXNLeW7jHiviNMYEYldDC6u213HWvLKgQ0k7lvT64fSKEg60d7Jssz2FwRgz+J6p3A24Y5HpG0t6/XDyUWPJzgjx5Aar1zPGDL6nKmspzs9h7ngr2uwrS3r9kJedyeJpY3iq0p7KbowZXJ1dyjNVtZxWUUwoZD2w9JUlvX46vaKEyppGduw/EHQoxphh5PVt+9nX3G5Fm/1kSa+fzpjldji72jPGDKanKmsRgdNmWtLrD0t6/TRjXD4TCnN5aoMlPWPM4HmqspZjJxYxemR20KGkJUt6/SQinD6rhOc27qa9s6vnDxhjzADta2rjta37rWhzACzpDcCpM0poaO1gjXVAbYwZBC9v3kuXwqkzi4MOJW1Z0huARVNHA7Bs896AIzHGDAfLt+wjOyPE/PLCoENJW5b0BqB0VC4TR49g+Ra7Sd0Yk3zLNu9l/sRCcrMygg4lbVnSG6BFU0azbMs+VDXoUIwxQ1hLeyert9ezaMrooENJa5b0Bmjh1DHUNrSyda/dr2eMSZ5V2+to6+xioSW9AbGkN0Dhs65lW6xezxiTPOG+fi3pDYwlvQGqKC2gICeTZVavZ4xJouVb9jK9eCRj83OCDiWtWdIboIyQcNyU0Sy3Jy4YY5JEVVm+ZZ9d5SWAJb0EWDRlNJW7Gqg70B50KMaYIeiN2ib2NbcfvE3K9J8lvQRYNGU0qvDqW3a1Z4xJvOW+zcDCKWMCjiT9WdJLgAWTi8gICSusXs8YkwQrtuynKC+Lo0pGBh1K2rOklwB52ZnMKMln1fa6oEMxxgxBr2+vY355ISL2/LyBsqSXIPMnFrJqe53dpG6MSaiW9k4qaxo4ZqJ1PZYIlvQSZH55Ibsb29hZ3xJ0KMaYIWRddT2dXWr9bSZISiQ9EZkkIv8UkbUiskZEru5mGhGR60Vko4i8LiLHBxFrLPP8DrlqmxVxGmMSZ7WvNplnSS8hUiLpAR3Al1V1LnAS8FkRmRs1zdnATP93JXDj4IYY39zxowjJoR3UGGMSYdX2OkbnZVFeNCLoUIaElEh6qlqtqiv86wZgHVAeNdkFwB3qvAgUicj4QQ41phHZGcwcV2CNWYwxCbVqez3zrBFLwqRE0oskIlOB44CXokaVA1sj3m/jyMQYnseVIrJMRJbV1tYmI8xuzSsvZNX2emvMYoxJiJb2TqqsEUtCpVTSE5F84AHgC6ra78eRq+rNqrpIVReVlJQkLsAezC8fxe7GVmrqWwdtmcaYoWv9zgY6rBFLQqVM0hORLFzCu0tVH+xmku3ApIj3E/2wlDHfn429vm1/wJEYY4aCVf5YYo1YEiclkp64wurfAutU9ScxJnsYuNy34jwJqFPV6kELshfmji+0xizGmISxRiyJlxl0AN4pwMeAVSKy0g/7OjAZQFVvApYC5wAbgWbgEwHEGZc1ZjHGJJI1Ykm8lEh6qvosEPdbVdc65LODE1H/zSsv5OmqwWs8Y4wZmlo7XCOWd8yaHnQoQ0pKFG8OJXPGF1Db0MruRmvMYozpv6qaRjq6lLkTRgUdypBiSS/B5o53O+j66oaAIzHGpLP1O90xZM54S3qJZEkvwWb7HXRddb/vuDDGGNZV15ObFWLqWHucUCJZ0kuwMSOzKR2Vw7qdlvSMMf23fmc9s0oLyAhZI5ZEsqSXBLPLRrHOijeNMf2kqqyrbmB2mRVtJpolvSSYM34UG3c10NbRFXQoxpg0VNvQyt6mNuaMLwg6lCHHkl4SzBlfQHunsml3Y9ChGGPS0FrfJmC2NWJJOEt6STDHGrMYYwYgXD0yx4o3E86SXhJMLx5JdkbIblswxvTL+p31lBeNoDAvK+hQhhxLekmQmRFiZmn+wSIKY4zpi3XV9cwus/q8ZLCklyRzxo86eHOpMcb0VmtHJ2/UNtlN6UliSS9JZpdZd2TGmL6rqmmks0uZbS03k8KSXpLMtcYsxph+CJcQ2T16yWFJL0kqfHl8ZY3dtmCM6b3KmgayM0NMK7bux5LBkl6SFOfnMHZkNlU1Vq9njOm9ypoGZpTkW/djSWJJL4lmluazwZKeMaYPKnc2UFGaH3QYQ5YlvSSqKC2gqqYR9/xbY4yJr6GlnR11LcwstUYsyWJJL4kqSgtobO1gR11L0KEYY9JA1S7XBmCWJb2ksaSXRBWl4cYsVsRpjOlZpW+5WWFJL2ks6SVRuFzeGrMYY3qjsqaREVkZTBw9IuhQhixLeklUlJfNuIIcNuy02xaMMT2rrGlgZmk+IWu5mTSW9JKsorSAql12pWeM6VllTQMzx1nRZjJZ0kuycAvOri5rwWmMiW1/cxu7GlqZVWa3KySTJb0kqyjN50B7J9v2HQg6FGNMCgv33mS3KySXJb0km2ktOI0xvRA+RljLzeSypJdk4Rac1jOLMSaeqpoG8nMymVCYG3QoQ5olvSQryM1iQmGu3bZgjIlrg2+5KWItN5PJkt4gmFlawAZ72oIxJo6qmkYqrOVm0lnSGwSzygp4o9Y9GNIYY6LtbmxlT1PbwUeSmeSxpDcIZo7Lp62jiy17moIOxRiTgg41YrHbFZLNkt4gmFVmLTiNMbFV1VhH04MlZZKeiNwqIrtEZHWM8WeISJ2IrPR/3xrsGPtrxjh39mZPUTfGdGdDTQOFI7IoKcgJOpQhLylJT5yPhhOTiEwWkcU9fOw24KwepnlGVRf4v+8mItbBkJedyaQxI+y2BWNMt6pq3INjreVm8iXrSu9XwMnApf59A/DLeB9Q1aeBvUmKJ3CzSgvstgVjzBFUlcqaRrspfZAkK+mdqKqfBVoAVHUfkJ2A+Z4sIq+JyF9F5OhYE4nIlSKyTESW1dbWJmCxAzeztIBNtU20dXQFHYoxJoXsamil7kC7Jb1Bkqyk1y4iGYACiEgJMNCj/QpgiqoeC9wA/CnWhKp6s6ouUtVFJSUlA1xsYswqLaCjS9lsLTiNMRGs+7HBlaykdz3wEDBORP4TeBb4wUBmqKr1qtroXy8FskSkeMCRDpKZpeHGLFbEaYw5JNzAzW5XGByZyZipqt4lIsuBdwECvF9V1w1kniJSBtSoqvpGMSFgz8CjHRxHleQTEqjc2QDHBB2NMSZVVO5sYOzIbMbmW8vNwZDQpCciYyLe7gLujhynqjEbqojI3cAZQLGIbAO+DWQBqOpNwIXAZ0SkAzgAXKKqadPFSW5WBlPHjrTbFowxh6nc1WBFm4Mo0Vd6y3H1eAJMBvb510XAW8C0WB9U1UtjjfPjfwH8ImGRBmBmaT6V9hR1Y4ynqlTVNPKh48uDDmXYSGidnqpOU9XpwOPAeaparKpjgXOBRxO5rHRUUVrA5t1NtLR3Bh2KMSYF7KhrobG1wx4cO4iS1ZDlJN/YBABV/SvwtiQtK21UlBbQpbCp1lpwGmOs5WYQkpX0dojIN0Rkqv+7FtiRpGWljQp7iroxJkLlTncssD43B0+ykt6lQAnutoWHgHEc6p1l2JpWPJLMkFjSM8YA7naF0lE5FOZlBR3KsJGsWxb2AlcnY97pLDszxLTikZb0jDGAK/Wxos3BlZSkJyL/xPfGEklV35mM5aWTirICVm2rCzoMY0zAurqUql0NXHbilKBDGVaSkvSAr0S8zgU+BHQkaVlppWJcAUtXVdPc1kFedrI2vzEm1W3d10xLe5f1xDLIklW8uTxq0HMi8nIylpVuZpXlowobdzVyzMSioMMxxgTkUPdjVrw5mJL1PL0xEX/FIvJeoDAZy0o3Mw+24LSeWYwZzsJ1+3aP3uBKVvlaZM8sHcCbwBVJWlZamTImj+yMkDVmMWaYq6xpoLxoBPk5Vs0xmJK1teeoakvkABGx3lSBzIwQR43Lt6RnzDDnHhxr9XmDLVn36T3fzbAXkrSstFNRmn/wplRjzPDT0dnFG7vsaelBSPRTFsqAcmCEiByHK94EGAXkJXJZ6ayitIA/r9xBQ0s7Bbl2U6oxw82Wvc20dXZZ0gtAoos33wssASYCP4kY3gB8PcHLSlvhHb1qVyPHTx4dcDTGmMEWLumxpDf4Epr0VPV24HYR+ZCqPpDIeQ8l4X72Knc2WNIzZhiqrGlEBGaMszq9wZbo4s2PquqdwFQR+VL0eFX9STcfG3Ymjh7BiKwMu23BmGGqsqaByWPyGJGdEXQow06iizdH+v92+hJHKCTugbLWgtOYYcn63AxOoos3f+3/fyeR8x2KZo4r4Jmq2qDDMMYMsraOLt7c3cR7ji4NOpRhKVkdTpcA/wJMjVyGqn4yGctLR7PK8nlgxTb2N7dRlJcddDjGmEHy5u4mOrrUrvQCkqyb0/8MPAM8DnQmaRlpLbI7ssXTxjW/iS4AABzjSURBVAQcjTFmsGywp6UHKllJL09Vv5qkeQ8JkU9Rt6RnzPBRVdNARkiYXjKy54lNwiWrR5a/iMg5SZr3kDChMJf8nExrzGLMMLNhZwNTx+aRk2ktN4OQrKR3NS7xHRCRehFpEJH6JC0rLYlYC05jhqMq634sUElJeqpaoKohVR2hqqP8+1HJWFY6m1VaYPfqGTOMtLR3smVPkyW9ACWr9ebx3QyuA7aoqj1B3ZtZWsA9r2xld2Mrxfn2EApjhrqNuxrpUmvEEqRkNWT5FXA8sMq/nw+sBgpF5DOq+miSlptWIrsjK55hSc+Yoa5ql6vOmFVm/XcEJVl1ejuA41R1oaouBBYAm4Azgf9O0jLTTvhZWlavZ8zwsGFnI1kZwpSx1nIzKMlKehWquib8RlXXArNVdVOSlpeWSgpyKMrLonKX1esZMxxU1TRwVEk+WRnJOvSaniSreHONiNwI3OPfXwys9U9Pb0/SMtOOiFAxrsAeKGvMMLGhpoHj7MkqgUrW6cYSYCPwBf+3yQ9rB96RpGWmpYqyfDbUNKCqQYdijEmixtYOtu07wKxSq88LUlKu9FT1APC//i+aleVFmF02ijtb3mL7/gNMHG0PlzdmqNqw092qPLvM7t4KUlKu9ERkpojcLyJrRWRT+K+Hz9wqIrtEZHWM8SIi14vIRhF5PcZtEWlnznj3A1hfbUWcxgxl6/xvfM4ES3pBSlbx5u+AG4EOXHHmHcCdPXzmNuCsOOPPBmb6vyv9/NPerDJ328K6auuwxpihbP3OegpyM5lQmBt0KMNaspLeCFV9AhBV3aKq1wHvi/cBVX0a2BtnkguAO9R5ESgSkfEJizgg+TmZTB6Tx3przGLMkLauuoE5ZaMQkaBDGdaSlfRaRSQEVInIVSLyAQb+NPVyYGvE+21+WNqbM77ArvSMGcK6upQNOxuYM956YglaMjuczgM+DywEPgZ8PEnLOoKIXCkiy0RkWW1t6j+dfHbZKN7c08SBNnv0oDFD0fb9B2hs7WD2eKvPC1qyWm++4l82Ap9I0Gy3A5Mi3k/0w7pb/s3AzQCLFi1K+XsB5owfharrmeXYSUVBh2OMSbC1viRnjiW9wCU06YnIw/HGq+r5A5j9w8BVInIPcCJQp6rVA5hfyggXeayrrrekZ8wQtL66AZFDXQ+a4CT6Su9kXL3b3cBLQK9rbEXkbuAMoFhEtgHfBrIAVPUmYClwDu6m92YSdwUZuEmj8xiZnWGNWYwZotZV1zN17EjyspPVCZbprUR/A2W4TqUvBT4CPALcHdkPZyyqemkP4xX4bCKCTDWhkDCrrOBgEYgxZmhZv7OeuXZ/XkpIaEMWVe1U1b+p6seBk3BXZU+KyFWJXM5QNHv8KNZX11t3ZMYMMU2tHWzZ22w9saSIhLfeFJEcEfkg7mb0zwLXAw8lejlDzZyyAupbOthR1xJ0KMaYBHJ968LsMrtdIRUkuiHLHcA8XP3bd1S12y7FzJHCrbrW7ainvGhEwNEYYxJlnbXcTCmJvtL7KK6bsKuB50Wk3v81iIhVWMUxZ/woRGDNDttMxgwla3bUUzgii4mj7WQ2FST0Sk9V7cmI/TQyJ5NpxSNZvaMu6FCMMQm0ZnsdR0+w7sdShSWpFDJvQiFrtlvSM2aoaO/sYt3OBuaVFwYdivEs6aWQeeWj2FHXwp7G1qBDMcYkwMZdjbR1dHG03a6QMizppZB5E9zZoNXrGTM0rPYlN3allzos6aWQo33Ss3o9Y4aGNTvqGZmdwbSxI4MOxXiW9FJIYV4Wk8aMYM12u9IzZihYvb2OuRNGEQpZI5ZUYUkvxcybUGhXesYMAZ1dytrq+oMlOCY1WNJLMfPKC9myp5m6A+1Bh2KMGYA3dzfR3NZp9XkpxpJeigm38lprjVmMSWtrdoQbsVjLzVRiSS/FHH2wBacVcRqTzlZvryMnM8SMEnuGXiqxpJdiSgpyKBuVyyq7Sd2YtLZ6ez2zywrIzLDDbCqxbyMFzZ9YyGtb9wcdhjGmnzq7lNe37eeYiUVBh2KiWNJLQQsmFbF5TzP7mtqCDsUY0w8bdzXS1NbJgkmW9FKNJb0UdJz/oby2za72jElHK7fuA2DBZEt6qcaSXgqaP7EQEVhpRZzGpKWVW/czKjfTemJJQZb0UlBBbhYzx+Vb0jMmTb361n6OnVRkPbGkIEt6Keq4SaN5bet+VDXoUIwxfdDU2kFlTcPBagqTWizppagFk4vY19zOlj3NQYdijOmDVdvr6FKrz0tVlvRSVLjVlxVxGpNewr/ZY+12hZRkSS9FVZQWkJedYUnPmDSz8q39TB6Tx9j8nKBDMd2wpJeiMkLC/PJCXrWkZ0xaWbl1v92fl8Is6aWwBZOLWLejnpb2zqBDMcb0ws66FnbWt1jSS2GW9FLYwsmjaevssn44jUkTr2zeC8DxU0YHHImJxZJeCjth6hgAXn5zb8CRGGN64+U395KXncG8CfY4oVRlSS+FjR6ZzazSAl6ypGdMWnj5zb0snDLanqyQwuybSXGLp41h+ea9dHR2BR2KMSaOfU1tbKhp4MRpY4IOxcRhSS/FLZ42hqa2TtZW25PUjUll4fq8xdPGBhyJiceSXopbPM3q9YxJBy+/uZfszBDHTCwMOhQTR8okPRE5S0Q2iMhGEflaN+OXiEitiKz0f58KIs7BVjoql6lj86xez5gU9/LmvSyYVERuVkbQoZg4UiLpiUgG8EvgbGAucKmIzO1m0j+q6gL/d8ugBhmgxdPG8MrmvXR1WefTxqSixtYOVm+vs/q8NJASSQ9YDGxU1U2q2gbcA1wQcEwpY/G0sexvbqdqV2PQoRhjurF8yz669FB1hEldqZL0yoGtEe+3+WHRPiQir4vI/SIyKdbMRORKEVkmIstqa2sTHeugC589vvTmnoAjMcZ056VNe8gICcdPtpvSU12qJL3e+D9gqqoeAzwG3B5rQlW9WVUXqeqikpKSQQswWSaOHsGkMSN4pmp30KEYY7rx7MbdHDepiJE5mUGHYnqQKklvOxB55TbRDztIVfeoaqt/ewuwcJBiC5yIcNrMEp7fuJu2Drtfz5hUsqexlVXb6zi9Iv1PsIeDVEl6rwAzRWSaiGQDlwAPR04gIuMj3p4PrBvE+AJ3ekUJTW2dLN+yL+hQjDERnt24G1U4zZJeWkiJpKeqHcBVwN9xyexeVV0jIt8VkfP9ZJ8XkTUi8hrweWBJMNEG4+SjxpIZEp6uSv86SmOGkqc21DJmZDbzy+3+vHSQMgXQqroUWBo17FsRr68BrhnsuFJFQW4WC6eM5qkNtXz1rNlBh2OMAbq6lKerdnPqjGJCIQk6HNMLKXGlZ3rntIoS1lbXs6uhJehQjDHA2up6dje2Wn1eGrGkl0bCP6xnKq0VpzGpIFzd8PaK4oAjMb1lSS+NzB0/iuL8bKvXMyZFPLWhlrnjRzGuIDfoUEwvWdJLI6GQcFpFCU9uqKXdHjVkTKD2N7exfMs+Tp9lRZvpxJJemjnr6DLqDrTzwhvWO4sxQXpsbQ0dXcrZ88qCDsX0gSW9NHNaRQkjszP46+rqoEMxZlhbuqqaiaNH2K0KacaSXprJzcrgXXNK+fuaGnuaujEBqTvQzrMbd3PO/PGI2K0K6cSSXho6Z34Ze5va7Bl7xgTk8bU1tHda0WY6sqSXhs6YNY687AweWWVFnMYEYemqaiYU5rJgUlHQoZg+sqSXhnKzMnjH7HH8ffVOOu3BssYMqvqWdp6p2s3ZVrSZlizppan3zR/PnqY2nn/DblQ3ZjA9uqaGts4uzplvRZvpyJJemnrn7HEUjsjinle29jyxMSZh7nn5LaYVj7QHxqYpS3ppKjcrgw8dP5FH1+xkd2Nrzx8wxgxYZU0Dy7bs49LFk6xoM01Z0ktjHzlxEu2dygPLtwUdijHDwt0vv0V2RogPHT8x6FBMP1nSS2MzxhWweOoY7n75LbqsQYsxSdXS3skDy7fx3nlljM3PCToc00+W9NLcR06czOY9zby4ybolMyaZlq6qpr6lg0sXTwo6FDMAlvTS3FnzyijKy+L2FzYHHYoxQ5aqcvsLW5hWPJKTp48NOhwzAJb00lxuVgaXnzyVv6+pYV11fdDhGDMkPV21m9e27udTb59mDVjSnCW9IeCKU6ZRkJPJDf+oCjoUY4YcVeXnj1cyoTCXDy+0os10Z0lvCCjMy2LJKVNZumon63fa1Z4xifRM1W5WvLWfz7xjBtmZdshMd/YNDhFXnDqN/JxMbnhiY9ChGDNkqCo/f6KK8YW5XLTIblMYCizpDRFFedl84pSpPLKqmuVb7OkLxiTC31bvZPmWffzbGUeRk5kRdDgmASzpDSGfPv0oyotG8LUHVtHWYc/aM2Yg6g608+2H1zB3/CguXTw56HBMgljSG0JG5mTy/ffPo2pXIzc99UbQ4RiT1v7rb+vZ3djKf33oGDIz7FA5VNg3OcS8Y/Y4zjt2Ar/4x0Y27moIOhxj0tJLm/bwh5fe4pOnTGP+xMKgwzEJZElvCPrWuXPJz83kyjuWU9fcHnQ4xqSVHfsP8Nk/vMrkMXl86T0VQYdjEsyS3hBUUpDDTR9dyNZ9zfzbH5bT3mn1e8b0RlNrB1fcvozW9k5u+fgi8rIzgw7JJJglvSFq8bQx/PCDx/Dcxj1c+9Aqe8K6MT1oae/k83e/yoad9dzwkeOoKC0IOiSTBHYaM4RduHAib+1t5vonqtjX3M71lxzHiGxrdm1MtP3NbfzLHct4ZfM+vvf+eZwxa1zQIZkksSu9Ie5LZ1Zw3XlzeXxdDZf85kW27GkKOiRjUsq66no+eOPzvLa1jusvPY6PnTQl6JBMElnSGwaWnDKNX390IRtrGnjPT5/m+ieqaGnvDDosYwLV2NrB9/+ylnNveJb9ze3c+akTOf/YCUGHZZJMVFOjrkdEzgJ+DmQAt6jqj6LG5wB3AAuBPcDFqrq5p/kuWrRIly1blviA09DOuha+98haHnm9muL8bC4+YRKXnDCZSWPygg5tWFBVWju6ONDmTjgyMoQMETJC7i8zJNaD/yDYuKuBu156iweWb6OhtYNLTpjMV8+aRVFedtChpQwRWa6qi4KOIxlSIumJSAZQCZwJbANeAS5V1bUR0/wbcIyqflpELgE+oKoX9zRvS3pHev6N3dz67Gb+sb6GLoXpxSN524yxzB1fyPSSkZQXjWDUiCzyczLJCA3eQVhVUYUuVbr8/0Pv3TCNGhf5PiRCKASZoRAZ/nU4oYSTS7ykoqq0dyrtnV20dXTR3tlFS3sXze0dNLV2cqCtk6a2jsP/t3bS3N5Bc2snzW2dNLd1RP33r1s7aW7v7LFBUUggJzODkTmZ5Oe4/+515qFh2YeG5eVkuHHZh17nZR8aNyIrw2+L3n2Pqkpnl9Lp/3d0KZ2dh95H/nV0ue8l/D68/TPEbeeMkBAS/HD3HYTk0HcSivheQuKSfm/j7I3mtg72NLbx1t5mNtU2smp7Hc+/sYdt+w6QlSGcNW88nzp1GsdOKkrYMocKS3rJDkLkZOA6VX2vf38NgKr+MGKav/tpXhCRTGAnUKI9rIAlvdi27z/AX1dV89zG3bz85l6a2o4s8szPyaQgNzNu7/LaiwTV1dV9QlMOf59sIriDa8QVliq0dXTR1s9bO7IzQ4zMziAvO5O87Az/51/nZJKXlUFezqHhI7IyCAkuoYQTTKcefN/S7pJqY2snTa0dNLZ20OT/wsMO9LF4Orze7ooy5JJRSA5LYOHXQQufpIQTaMhfBWdEfG/h/+7qGDq73AlLW2f4ZKWTlvbDv8/CEVmcNH0Mp8wo5ux54ykpyAloDVPfUE56qdJ6sxzYGvF+G3BirGlUtUNE6oCxwO7omYnIlcCVAJMnW595sZQXjeBTb5/Op94+na4uZUfdAd6obaKmroX6lnYaWjpoaOmgvqWdjh4SQsgfiMJn9hLxOiQg4g5OPU0TOjjdofeHpo/4vL9qEw6NCyfXw69C3PAOf7XS1XXkVUtIhKxMIScjRFZGiOxM95eVESInM8TInMzDE1m2uwIbkZ1BXlZGIF1UdXYpzW3uCrTJX0k2tnbQ3Nbh/7vk2NLeSWcXdHZ1HUxs4f9dqgcTR0YodPBK61ByPLzoNTwuJEJmRnTikSNOeroirgTD46KvDsPfUWdXl/sf8R11RcUa/n/4d+mWk5khZIW/vwwhJyuD0XnZjBmZxcTReRxVkk/pqBwrPjYpk/QSSlVvBm4Gd6UXcDhpIRQSJo7OY+Joq99LBxkhoSA3i4LcrKBDMSatpErrze1A5COJJ/ph3U7jizcLcQ1ajDHGmF5JlaT3CjBTRKaJSDZwCfBw1DQPAx/3ry8E/tFTfZ4xxhgTKSWKN30d3VXA33G3LNyqqmtE5LvAMlV9GPgt8HsR2QjsxSVGY4wxptdSIukBqOpSYGnUsG9FvG4BPjzYcRljjBk6UqV40xhjjEk6S3rGGGOGDUt6xhhjhg1LesYYY4aNlOiGLJlEpBbY0s+PF9NNjy9pxOIPVjrHn86xg8U/UFNUtSTA5SfNkE96AyEiy9K5/zmLP1jpHH86xw4Wv4nNijeNMcYMG5b0jDHGDBuW9OK7OegABsjiD1Y6x5/OsYPFb2KwOj1jjDHDhl3pGWOMGTYs6RljjBk2LOl1Q0TOEpENIrJRRL4WdDw9EZFJIvJPEVkrImtE5Go/fIyIPCYiVf7/6KBjjUdEMkTkVRH5i38/TURe8t/DH/1jp1KSiBSJyP0isl5E1onIyem0/UXki37fWS0id4tIbipvfxG5VUR2icjqiGHdbm9xrvfr8bqIHB9c5Adj7S7+//H7z+si8pCIFEWMu8bHv0FE3htM1EODJb0oIpIB/BI4G5gLXCoic4ONqkcdwJdVdS5wEvBZH/PXgCdUdSbwhH+fyq4G1kW8/y/gp6o6A9gHXBFIVL3zc+BvqjobOBa3Hmmx/UWkHPg8sEhV5+Ee73UJqb39bwPOihoWa3ufDcz0f1cCNw5SjPHcxpHxPwbMU9VjgErgGgD/W74EONp/5lf+OGX6wZLekRYDG1V1k6q2AfcAFwQcU1yqWq2qK/zrBtwBtxwX9+1+stuB9wcTYc9EZCLwPuAW/16AdwL3+0lSNn4RKQROwz3zEVVtU9X9pNH2xz1mbISIZAJ5QDUpvP1V9WncczUjxdreFwB3qPMiUCQi4wcn0u51F7+qPqqqHf7ti8BE//oC4B5VbVXVN4GNuOOU6QdLekcqB7ZGvN/mh6UFEZkKHAe8BJSqarUftRMoDSis3vgZ8B9Al38/FtgfcRBI5e9hGlAL/M4Xz94iIiNJk+2vqtuBHwNv4ZJdHbCc9Nn+YbG2dzr+pj8J/NW/Tsf4U5YlvSFERPKBB4AvqGp95Dh196ak5P0pInIusEtVlwcdSz9lAscDN6rqcUATUUWZKb79R+OuJqYBE4CRHFn0llZSeXv3RESuxVVZ3BV0LEORJb0jbQcmRbyf6IelNBHJwiW8u1T1QT+4JlyM4//vCiq+HpwCnC8im3HFye/E1ZEV+eI2SO3vYRuwTVVf8u/vxyXBdNn+7wbeVNVaVW0HHsR9J+my/cNibe+0+U2LyBLgXOAyPXQTddrEnw4s6R3pFWCmb7mWjatAfjjgmOLy9V+/Bdap6k8iRj0MfNy//jjw58GOrTdU9RpVnaiqU3Hb+x+qehnwT+BCP1kqx78T2Cois/ygdwFrSZPtjyvWPElE8vy+FI4/LbZ/hFjb+2Hgct+K8ySgLqIYNGWIyFm4Iv7zVbU5YtTDwCUikiMi03ANcl4OIsYhQVXtL+oPOAfXeuoN4Nqg4+lFvKfiinJeB1b6v3Nw9WJPAFXA48CYoGPtxbqcAfzFv56O+3FvBO4DcoKOL07cC4Bl/jv4EzA6nbY/8B1gPbAa+D2Qk8rbH7gbV//YjrvSviLW9gYE1yL7DWAVrpVqKsa/EVd3F/4N3xQx/bU+/g3A2UHHn85/1g2ZMcaYYcOKN40xxgwblvSMMcYMG5b0jDHGDBuW9IwxxgwblvSMMcYMG5b0zKATkU4RWRnxNzXomBJFRI4Tkd/612eISF3Een4rYrojetn3w2P2tB8xzQQRuT96eJyYNovIAxHvLxSR2/q1gvGXc0u8ztlFZImITIh4f4+IzEx0HMbEY0nPBOGAqi6I+NscHuFvIE7n/fLrwPUR75+JWM/vRgy/je67+uq2p/1IqrpDVS884pPxLUz200JU9VOqujbOJEtw3ZyF3Yi7GduYQZPOBxczRIjIVP+csDtwN0dPEpF/F5FX/BXPdyKmvVZEKkXkWXHPffuKH/6kiCzyr4t9l2bhZ/T9T8S8/tUPP8N/JvwMvLt8bySIyAki8ryIvCYiL4tIgYg8LSILIuJ4VkSOjVqPAuAYVX2tp3XW7p8SgMbuaT96e632r4/2Ma706xfryul/cTc4R84nJO7ZcyUR7zeKSInvkegFEVklIt8XkcaI7faXiHn8wneddfA78Nv8NnHP5lsl7ll9FwKLgLt8rCOAZ4B3y6GuzoxJOkt6JggjIor8HvLDZgK/UtWjgVn+/WJcTycLReQ0EVmI66ZsAa7HmRN6sawrcN1OneCn/xfflRO4p1F8AffcxOnAKeK6nvsjcLWqHovrl/IArpu3JQAiUgHkdpPcFuGSdqSTffL8q4gc3Yt4I0X2tB/Lp4Gfq+oCv/xtMaa7FzheRGaEB6hqF3AncJkf9G7gNVWtxfV9eqOqzsf1HNIXC4ByVZ3nP/87Vb0f12PNZf6q94Bf/kbc8weNGRSW9EwQIos3P+CHbVH3rDOA9/i/V4EVwGxcEnw78JCqNqt7ikRv+kR9D67fxZW4xy2N9fMCeFlVt/mD70pgKi7hVqvqKwCqWu+vvO4DzhXXsfcnccWT0cbjHjEUtgKY4pPnDbjuyXpFet/T/gvA10Xkq35ZB2JM1wn8D0cWl94KXO5ffxL4nX99Cq6rLHDdkvXFJmC6iNwgrj/J+jjT7uLwIk9jksqSnkkVTRGvBfhhRGKcoaq/7eHzHRzan3Oj5vW5iHlNU9VH/bjWiOk6cY8I6pa6DoAfwz2C5yK6T0YHIpftE2ajf70UyBKR4h7WI1ZP+7Hi+gNwvl/2UhF5Z5zJf4972O3BHvtVdSvu6QTvxF1ZR15ZdrfsyO0Mh2/r8Dz34a7ensRdid4SJ6ZcH7sxg8KSnklFfwc+Ke75gIhIuYiMA54G3i8iI3z92XkRn9kMLPSvL4ya12f8FRoiUiHuAa+xbADGi8gJfvqCiDqnW3CNVF7xB/Zo64CDxYciUhZRT7gY93vbE2/FJXZP+7Gmnw5sUtXrcU8VOCbWtOoeG/RT4ItRo27BFXPep6qdfthzuKJkOFT8CbAFmCuux/8i3BMZomMqBkKq+gDwDdxjlgAagIKoySs4skjYmKSxpGdSjr8S+wPwgoiswj2frkBVV+Dq217DXZG8EvGxH+OS26tA5NXULbjH5KzwjT9+TfwrujbgYuAGEXkNd3WX68ctxxXV/S7GZ9cDhT4hg0u+q/18rgcuCV+5icjduKLJWSKyTUSu8J/5BS4xPObrPG+Kv7W4yC9jJTAPuKOH6X/Lkev/MJAftV5XA5/12//gU7r9leG9uER1L64IOlo58KSP6U4OFaneBtwUbsgiIqW4ou6dPcRsTMLYUxZM2hKR64BGVf3xIC1vAq7IbravB+xumi8CDaoar0gvpfhWrz9V1bfHmaZRVfMTvNwvAvW9KLo2JmHsSs+YXhCRy3ENYa6NlfC8Gzm8rjClicjXgAfo5n7AQbAfuD2A5ZphzK70jDHGDBt2pWeMMWbYsKRnjDFm2LCkZ4wxZtiwpGeMMWbYsKRnjDFm2Ph/2hPcK/6KS8EAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "f, H_im = sig.freqz(h)\n", - "posfreq = np.square(H[0:512//N])\n", - "negfreq = np.flipud(np.square(H[0:512//N]))\n", - "plt.plot((np.abs(posfreq) + np.abs(negfreq)))\n", - "plt.xlabel('Frequency (512 is Nyquist)')\n", - "plt.ylabel('Magnitude')\n", - "plt.title('Unity Condition, Sum of Squared Magnitude of 2 Neighboring Subbands')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 79, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "(63,)" - ] - }, - "execution_count": 79, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "b.shape" - ] - }, - { - "cell_type": "code", - "execution_count": 102, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZcAAAEWCAYAAACqitpwAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3deXycZbn4/8+VTPakSZomXdOk0JRSCrSQtqAIiKBFloIilIMCfjlf9Kscj4eDCi6oHDgH1N9BPeKCiIKsPSxSZKkgsimUpjultE3bpEmXrE2bpVkmc/3+eJ4p0yF7ZuaZSa/36zWvzNzPdt/JZK65l+e+RVUxxhhjIinJ6wwYY4wZeyy4GGOMiTgLLsYYYyLOgosxxpiIs+BijDEm4iy4GGOMiTgLLiZuiMirIvLPA2z/tYh8L5Z5ShQiMl1E2kQkeYTHV4nIuZHOVzwRkU0icrbX+ThaWHAxfXI/bA65H1j7ROQPIpIdw+tfKyJvhqap6pdV9T+icK1SEVG3rG1u2W8O2S4i8g0R2eb+TnaJyH+KSGrIPn9wz7Ek7Nx3u+nXhpSrN+RabSLyiz7ydKWIbA5Le6mftJtVdZeqZqtqb4R+LRElIj8QkZ6wcn8zitf7g4jcHpqmqieo6qvRuqY5kgUXM5CLVDUbmAfMB27xOD/RlueW90rgVhFZ7Kb/HLgeuBrIAc4HzgUeCzt+q7sPACLiAy4Htoft95YbCIKPG/rIy+vAbBEpDDnXyUBGWNrp7r6J4PGwcv/I6wyZ6LHgYgalqvuAFThBBgAROU1E/iEiLSKyPrS5wf12vkNEWkVkp4hc5ab/QEQeCtkvWGPwhV5PRI4Hfg2c7n7DbXHTD38bFZGzRaRWRP5dROpFZK+IfDHkHAUi8qyIHBSRVSJye3hNaIDyvgVsAuaKSBnwFeAqVX1LVf2qugn4LHCBiJwVcuizwBkiku++XgxsAPYN5bphedgN7ADOdJNOcfP0WlhaErAq/HfpNjH+h4j83f07/EVEJoT8fr4gItUi0iQi3wm9toikichPRWSP+/ipiKS5214Tkc+6zz/qXvMC9/UnRGTdcMo52HtiCOU4I+R9WOO+964HrgK+6b5/nnX3Pdz0N0gZB3xvmaGx4GIGJSLTcL6tV7qvpwLPAbcD44GbgCdFpFBEsnC+6Z+vqjnAR4BhfeCo6mbgy3zwDT+vn10nAbnAVOA64J6QD/Z7gHZ3n2vcx1DKKiLyUeAEYC3wCaBWVd8Jy2MN8DbwyZDkTuAZYKn7+mrgwaFctx+v80EgORN4A3gzLO1tVe3p5/h/Ar4IFAGpOH8nRGQO8CvgC8AUoACYFnLcd4DTcL5MnAwsBL7rbnsNONt9fhZHBsCz3O2R1l85SoAXgP8BCt38rlPVe4GHgR+575+L+jjnQGWEgd9bZggsuJiB/ElEWoEaoB74vpv+eeB5VX1eVQOq+hJQAXza3R7A+dafoap73W/60dAD3KaqPar6PNAGHCdOp/Znge+raoeqvgc8MITzNQLNwH3Azar6V2ACsLef/ffifKiFehC4WkTycD5s/9THcae537SDj9P6OX9oLeVjOMHljbC0gT7Mf6+qW1X1ELCMD2qelwF/VtXXVbUL+B7O3yzoKpzfa72qNgA/xAlEwTwFa2tnAv8V8nqw4HJ5WLmnDLDvUMrxT8DLqvqo+x5oUtWhfpEZqIzQz3triOc2WHAxA7vErX2cDczG+aAFKAE+F/pBAZwBTFbVduAKnJrHXhF5TkRmRyl/TarqD3ndAWTjfOD7cIJiUOjz/kxQ1XxVPV5Vf+6mNQKT+9l/srv9MFV9073+d3A+wA/1cdzbqpoX8ni7n/O/DpzkfmM+Dacm9z4w2U07g4H7W0Kb44K/G3BqK4d/H+7frClk3ylAdcjrajcN4C1glohMxPmQfxAodpuqFg6Sn2Vh5d4zwL5DKUcxH+7PGqqBygj9v7fMEFlwMYNS1deAPwA/cZNqgD+GfVBkqeqd7v4rVPU8nA/f94Hfuse1A5khp5400GVHkeUGwM+RTT3FIzzXKzgfngtDE0WkGOcD/9U+jnkI+HdG1ySGqu4A9uAMJtilqm3uprfctGycprnh2kvI70NEMnGaxoL24HyBCJrupqGqHcBq4F+Bd1W1G/gHcCOwXVWPCLZDMJz3RLga4Nh+tg32/um3jCYyLLiYofopcJ6InIzz4XmRiHxKRJJFJN3tBJ0mIhNFZInb99KF05wQbHJZB5wpzj0ZuQw8+qwOmCYhw32Hyh2O+xTwAxHJdGtOVw9yWH/n2oozuOBhcQYxJIvICcCTOB+qL/dx2M+B84jMKK43cD643whJe9NNq+inZjSYJ4AL3c7wVOA2jvwseBT4rtuHNgG4FedvHvQacAMfNIG9GvZ6OIbzngj3MHCuiFwuIj5xBnEEm8zqgGMGOHawMppRsuBihsRtl34QuNXtzF4CfBunllADfAPn/ZSE88G3B6f/4izg/7nneAl4HGcE1WrgzwNc8hWc0VH7RGS434bB+bDLxWlS+SPOh0nXCM4TPNd9OB8+HcC7OM0ol6hqIHxnVW1W1b9qZBZLeg2nIzt0pNsbbtqIgpfbB/ZV4BGcWsx+oDZkl9tx+tA2ABuBNW5aaJ5yQq4f/no4eRnOeyL82F04/Xz/jvNeW4fTOQ/wO2CO22zbV7/XYGU0oyS2WJg5GojIXcAkVR3SqLFBzvVD4FLgTFVtGXXmjBmDrOZixiQRmS0iJ7lDixfiDCd9OhLnVtXvA/fi9LkYY/pgNRczJonIApymsCk47e/3AndGqKnKGDMICy7GGGMizprFjDHGRJxv8F3GvgkTJmhpaanX2TDGmISyevXqRlUNn6UCsOACQGlpKRUVFV5nwxhjEoqIVPe3zZrFjDHGRJwFF2OMMRFnwcUYY0zEWXAxxhgTcRZcjDHGRJynwUVEFovIFhGpFJGb+9h+poisERG/iFwWtq1XRNa5j+Uh6TNEZKV7zsdHMquuMcaY0fEsuLirBd6Ds3zuHOBKd/nVULuAa3Fmbw13SFXnuY+LQ9LvAu5W1Zk4s71eF/HMG2OMGZCXNZeFQKWq7nAXHHoMZxr3w1S1SlU3cOQSrP0SEQHOwVmvApylbS+JXJaNiZ2Obj/LKmro7On1OivGDJuXwWUqRy49W+umDVW6iFSIyNsiEgwgBUBLyPKk/Z5TRK53j69oaGgYbt6Nibpbn9nEN5/YwJ0vvO91VowZtkTu0C9R1XLgn4Cfikh/y532SVXvVdVyVS0vLOxz9gJjPPPiu3t5YnUt08dn8od/VPHGNvsCZBKLl8FlN0euaz7NTRsSVd3t/tyBs8zqfKAJyBOR4LQ2wzqnMfGgvrWTW57ayIlTc3nua2cwsyibm/53PS0d3V5nzZgh8zK4rALK3NFdqcBSYPkgxwAgIvkikuY+nwB8FHjPXavjb0BwZNk1wDMRz7kxUaKqfPOJDXR093L3FSeTk57CT6+YR1NbN9/907vYEhkmUXgWXNx+kRuAFcBmYJmqbhKR20TkYnAWfBKRWuBzwG9EZJN7+PFAhYisxwkmd6rqe+62bwE3ikglTh/M72JXKmNG5+GVu3h1SwPf/vTxzCzKAWDu1Fz+7bxZ/HnDXpav3+NxDo0ZGlssDCgvL1ebFdl4bUdDGxf8/E3KS/N54IsLSUqSw9v8vQGuuPdttta18uLXz2RqXoaHOTXGISKr3b7vD0nkDn1jxgx/b4B/W7aeVF8SP77s5CMCC4AvOYn/vvxkAgHlpmXrCQTsS6GJbxZcjIkDK3c2s76mhe9dOIdJuel97lNSkMU3F8/mrR1NbNx9IMY5NGZ4LLgYEwdW7mgiSeBTJ0wccL/z505y9t/ZFItsGTNiFlyMiQNv72zmhCm55KSnDLhf0bh0ZkzIYuWO5hjlzJiRseBijMc6e3pZV9PCohnjh7T/ohnjeaeqmV7rdzFxzIKLMR5bV9NCtz/AomMKhrT/omPG09rpZ/Peg1HOmTEjZ8HFGI+t3NGMCCwsHWrNxQlCK3da05iJXxZcjPHYyp1NzJ40jtzMgftbgqbkZVA8PoOVO6xT38QvCy7GeKjbH2DNrv1D7m8JWjSjgHeqmu1+FxO3LLgY46ENtS109gQ47ZjhBpfxtHT0sLW+NUo5M2Z0LLgY46Fgv8nCGUPrzA86ze38f8f6XUycsuBijIdW7mxm1sRsxmelDuu4afkZTMlNt/tdTNyy4GKMR/y9AVZXNR8e/TUcIsKiYwpYubPJpuE3ccmCizEeeXfPQdq7e1k0zP6WoEUzxtPY1s32hvYI58yY0bPgYoxHgkOJFw5zpFhQ8KZLm2fMxCMLLsZ4ZOXOZo4pzKIop+9ZkAdTWpBJUU6a9buYuGTBxRgP9AaUVTubh31/SygRYeGM8dbvYuKSBRdjPLB570Fau/wj6swPteiYAuoOdlHd1BGhnBkTGRZcjPFA8P6WkXbmB53m1nzsfhcTbyy4GOOBlTuamD4+k8m5GaM6z8yibAqyUnnbOvVNnPE0uIjIYhHZIiKVInJzH9vPFJE1IuIXkctC0ueJyFsisklENojIFSHb/iAiO0VknfuYF6vyGDMUgYDyTtXo+luCDve7WKe+iTOeBRcRSQbuAc4H5gBXisicsN12AdcCj4SldwBXq+oJwGLgpyKSF7L9G6o6z32si0oBjBmh6uYOWjp6KC/Nj8j5ykvHs7vlEA2tXRE5nzGR4GXNZSFQqao7VLUbeAxYErqDqlap6gYgEJa+VVW3uc/3APVAYWyybczobKtzJps8btK4iJzvuIk5znltEksTR7wMLlOBmpDXtW7asIjIQiAV2B6SfIfbXHa3iKT1c9z1IlIhIhUNDQ3DvawxI7atvg1w+ksiYdZE5zzb6toicj5jIiGhO/RFZDLwR+CLqhqs3dwCzAYWAOOBb/V1rKreq6rlqlpeWGiVHhM72+pamZqXQXaaLyLnK8xJY1y6z2ouJq54GVx2A8Uhr6e5aUMiIuOA54DvqOrbwXRV3auOLuD3OM1vxsSNbfVtEau1gNOpXzYxx2ouJq54GVxWAWUiMkNEUoGlwPKhHOju/zTwoKo+EbZtsvtTgEuAdyOaa2NGoTegVNa3HW7KipRZE7MPN7cZEw88Cy6q6gduAFYAm4FlqrpJRG4TkYsBRGSBiNQCnwN+IyKb3MMvB84Eru1jyPHDIrIR2AhMAG6PYbGMGVDt/g66/AHKinIiet6ZRTk0t3fT1GYjxkx8iEyj7wip6vPA82Fpt4Y8X4XTXBZ+3EPAQ/2c85wIZ9OYiAk2Xc2McM2lzG1m21rXxunZfY5hMSamErpD35hEE1zzviyCfS4As9zhyJXWqW/ihAUXY2Kosq6Nybnp5KSnRPS8E8elkZPms34XEzcsuBgTQ5EeKRYkIsycmM3WOqu5mPhgwcWYGAkcHikW2c78oFlFOVRazcXECQsuxsTI7pZDHOrpjXh/S1DZxGwa27ppbu+OyvmNGQ4LLsbESPAO+rIIjxQLCja3bbOmMRMHLLgYEyNbg8OQI3yPS9CswxNYWtOY8Z4FF2NiZFtdGxPHpZGbEdmRYkGTc9PJTvNZzcXEBQsuxsTItvrWqHXmgztirMimgTHxwYKLMTEQHCkWjWHIocosuJg4YcHFmBjYc+AQHd29EZ9TLFzZxGwaWrto6bARY8ZbFlyMiYHgnGKRng05XJl16ps4YcHFmBgIDkOORbMYYHfqG89ZcDEmBrbVtVGYk0ZeZmpUrzMlN4PM1GRbOMx4zoKLMTGwNQoLhPUlKUkoK8q2aWCM5yy4GBNlqkplXWvUO/ODZhblWLOY8ZwFF2OibO+BTtq7e6Pe3xJUNjGb+tYuDnT0xOR6xvTFgosxURasRUTzBspQwea3bbZwmPGQBRdjoizY/xGt2ZDDBZvfbDiy8ZIFF2OibFtdGxOyU8nPiu5IsaCpeRlkpNiIMeMtT4OLiCwWkS0iUikiN/ex/UwRWSMifhG5LGzbNSKyzX1cE5J+qohsdM/5cxGRWJTFmP5srY9dZz44I8acOcasWcx4x7PgIiLJwD3A+cAc4EoRmRO22y7gWuCRsGPHA98HFgELge+LSL67+VfA/wXK3MfiKBXBmEE5I8XaoraGS3/KimzJY+MtL2suC4FKVd2hqt3AY8CS0B1UtUpVNwCBsGM/Bbykqs2quh94CVgsIpOBcar6tqoq8CBwSdRLYkw/Gtu6ae3yc8yErJhe99iibOoOdtHR7Y/pdY0J8jK4TAVqQl7XummjOXaq+3zQc4rI9SJSISIVDQ0NQ860McNR3dQOQGmMg0tJQaZ7/Y6YXteYoKO2Q19V71XVclUtLyws9Do7Zoza2egGl4LYBpfg9arc6xsTa14Gl91AccjraW7aaI7d7T4fyTmNibjqpg6Sk4Sp+Rkxve50t+ZSZTUX4xEvg8sqoExEZohIKrAUWD7EY1cAnxSRfLcj/5PAClXdCxwUkdPcUWJXA89EI/PGDEVVUzvT8jNISY7tv9q49BQKslIPN8sZE2ueBRdV9QM34ASKzcAyVd0kIreJyMUAIrJARGqBzwG/EZFN7rHNwH/gBKhVwG1uGsBXgPuASmA78EIMi2XMEaqbOmLeJBZUOiGLKgsuxiM+Ly+uqs8Dz4el3RryfBVHNnOF7nc/cH8f6RXA3Mjm1JjhU1Wqmto5ZXqeJ9cvKcjkre1NnlzbmKO2Q9+YaNvf0UNrp58Sr2ouBVnsPdBJZ0+vJ9c3RzcLLsZEyeGRYhMyPbm+DUc2XrLgYkyUBDvTvay5ANbvYjxhwcWYKKlq6iBJoDjfm5pLMLjYiDHjBQsuxkRJdVM7U/MzSPV582+Wm5lCfmaK3etiPGHBxZgoqfJwGHJQSUGW1VyMJyy4GBMl1U3thzvVvVJakElVo9VcTOxZcDEmClo6umnp6ImLmsueA4dsOLKJOQsuxkRBsJ/Dq5FiQaUTMlGF2v1WezGxZcHFmCg4PNW+x81iJYdnR7bgYmLLgosxUVDV2IEIFI/3NrjMsHtdjEcsuBgTBdVN7UzJzSA9JdnTfORlpjAu3Wd36ZuYs+BiTBTsjIORYgAiYrMjG09YcDEmCqqbOjzvzA8qKbDgYmLPgosxEXbgUA/N7d2ed+YHlRZksnv/Ibr9Aa+zYo4iFlyMibBdcTIMOaikIIuADUc2MWbBxZgICzZBzZgQH8FlxgSbet/EngUXYyIseI/LdI+HIQeV2HBk4wELLsZE2M7GDiaNSycj1dthyEEFWalkp/moarTgYmLH0+AiIotFZIuIVIrIzX1sTxORx93tK0Wk1E2/SkTWhTwCIjLP3faqe87gtqLYlsoc7eJhwspQIkJJQaZNvW9iyrPgIiLJwD3A+cAc4EoRmRO223XAflWdCdwN3AWgqg+r6jxVnQd8AdipqutCjrsquF1V66NeGGNCxMNU++FKbep9E2Ne1lwWApWqukNVu4HHgCVh+ywBHnCfPwF8QkQkbJ8r3WON8Vxbl5/Gti5KJsRPzQWgpCCT2v2H6Om14cgmNrwMLlOBmpDXtW5an/uoqh84ABSE7XMF8GhY2u/dJrHv9RGMjImaYO1gRrzVXCZk4Q8oe1oOeZ0Vc5RI6A59EVkEdKjquyHJV6nqicDH3McX+jn2ehGpEJGKhoaGGOTWHA2q4+wel6DSwyPGrN/FxMaQg4uIRLqevxsoDnk9zU3rcx8R8QG5QFPI9qWE1VpUdbf7sxV4BKf57UNU9V5VLVfV8sLCwlEUw5gP7HRHZMVThz58MPW/jRgzsTJocBGRj4jIe8D77uuTReSXEbj2KqBMRGaISCpOoFgets9y4Br3+WXAK6qqbj6SgMsJ6W8REZ+ITHCfpwAXAu9iTIxUN7VTmJNGVprP66wcoTAnjYyUZLvXxcTMUP4D7gY+hfvBr6rrReTM0V5YVf0icgOwAkgG7lfVTSJyG1ChqsuB3wF/FJFKoBknAAWdCdSo6o6QtDRghRtYkoGXgd+ONq/GDJUzUiy+ai3wwXBku0vfxMqQvl6pak1Yv3hEFuRW1eeB58PSbg153gl8rp9jXwVOC0trB06NRN6MGYnqpnY+VhafzaylBVlsrWv1OhvmKDGUPpcaEfkIoCKSIiI3AZujnC9jEk5Ht5+6g11xM6dYuNIJWdTs76A3oF5nxRwFhhJcvgx8FWdY8G5gnvvaGBPig5Fi8dcsBk6nfk+vDUc2sTFos5iqNgJXxSAvxiS04D0u8XZ3flDoBJbFcTKpphm7Bg0uIvJ74EP1aFX9P1HJkTEJKngPyfR4rbm4swZUNXXwsTKPM2PGvKF06P855Hk6cCmwJzrZMSZxVTe1U5CVyrj0FK+z0qeJOemk+ZKotntdTAwMpVnsydDXIvIo8GbUcmRMgqpq7Ijb/haApCSbHdnEzkimfykDbBp7Y8JUN7VTGqcjxYJsdmQTK0Ppc2nF6XMR9+c+4FtRzpcxCaWzp5c9BzrjtjM/qHRCFq9ubSAQUJKSbE5XEz1DaRbLiUVGjElku5rjexhyUElBJt3+AHsPdjI1L8Pr7JgxrN/gIiKnDHSgqq6JfHaMSUzBCSHjvubi5q+6sd2Ci4mqgWou/98A2xQ4J8J5MSZhBW+gjPfgEqxZVTV18JGZHmfGjGn9BhdV/XgsM2JMIqtqaicvM4XczPgchhw0OTeD1OQk69Q3UTekiStFZC7OOvfpwTRVfTBamTIm0VQ3dcR9rQUgOUmYXpBpU++bqBvKaLHvA2fjBJfngfNx7nOx4GKMa2djOwtK873OxpCU2tT7JgaGcp/LZcAngH2q+kXgZJwVIY0xQJe/lz0HDsXd0sb9KSnIoqqpHXfdPWOiYijBpVNVA4BfRMYB9Ry5PLExR7Wa5kOofjB3V7wrLciksydA3cEur7NixrB+g4uI3CMiZwDviEgezoqOq4E1wFsxyp8xcS/YOZ5INRfA+l1MVA3U57IV+DEwBWgHHgXOA8ap6oYY5M2YhFCVIMOQgw7f69LUzmnHFHicGzNW9VtzUdWfqerpOGvVNwH3Ay8Cl4qITdhtjKuqsZ1x6T7y43wYctCUvHRSksUmsDRRNWifi6pWq+pdqjofuBK4BHg/6jkzJkFUuRNWiiTGXF2+5CSK8zPtXhcTVYMGFxHxichFIvIw8AKwBfhMJC4uIotFZIuIVIrIzX1sTxORx93tK0Wk1E0vFZFDIrLOffw65JhTRWSje8zPJVH+403Cqm7qSJj+lqCSgkyqGq3mYqJnoA7980TkfqAW+L/Ac8CxqrpUVZ8Z7YVFJBm4B+e+mTnAlSIyJ2y364D9qjoTuBu4K2TbdlWd5z6+HJL+Kze/Ze5j8Wjzakx/uv0Bavd3UBrnE1aGK3Gn3rfhyCZaBqq53AL8AzheVS9W1UdUNZL16IVAparuUNVu4DFgSdg+S4AH3OdPAJ8YqCYiIpNxBhy8rc5/zYM4zXjGRMXulkMENHFGigWVFmTS3t1LQ5sNRzbRMVCH/jmqep+q7o/StacCNSGva920PvdRVT9wAAgOb5khImtF5DUR+VjI/rWDnBMAEbleRCpEpKKhoWF0JTFHreBw3oSruUwIjhizpjETHSNZiTIe7AWmu4MMbgQecW/wHDJVvVdVy1W1vLCwMCqZNGPf4an243wFynAzgve6NFqnvokOL4PLbo6803+am9bnPiLiw5l2pklVu1S1CUBVVwPbgVnu/tMGOacxEVPd1EF2mo+CrFSvszIsU/MzSE4Sq7mYqPEyuKwCykRkhoikAkuB5WH7LAeucZ9fBryiqioihe6AAETkGJyO+x2quhc4KCKnuX0zVwOjHnxgTH+qmtopKchMmGHIQSnJSUzLz7C79E3UDGnK/WhQVb+I3ACsAJKB+1V1k4jcBlSo6nLgd8AfRaQSaMYJQODc2HmbiPQAAeDLqtrsbvsK8AcgA2fo9AuxKpM5+lQ3dTBn8rBaZOOGM2LMai4mOjwLLgCq+jzONP6habeGPO8EPtfHcU8CT/ZzzgpgbmRzasyH+XsD1DR3cP7cSV5nZURKCzJZu2s/qppwNS8T/xK1Q98Yz+1p6cQf0ISZUyxcSUEWrZ1+mtu7vc6KGYMsuBgzQjsPz4acWMOQg4LDp22OMRMNFlyMGaHg3FwzEmwYclDphA9mRzYm0iy4GDNCVY0dZKQkU5iT5nVWRmRafgZJYjUXEx0WXIwZoeoEHYYclOZLZkpehtVcTFRYcDFmhHY2tidsZ37QjAlZ7LS79E0UWHAxZgS6/L1UN3cwsyjb66yMyrGF2VTWt9nsyCbiLLgYMwJVjR30BpSyiYkdXGYWZdPR3cueA51eZ8WMMRZcjBmByvo2wPnmn8iCNa9geYyJFAsuxozAtvpWRBI/uJS5wWVbXavHOTFjjQUXY0agsr6NafkZZKQme52VUSnITiM/M4XtDVZzMZFlwcWYEaisb6OsKMfrbEREWVEO2+osuJjIsuBizDD5ewPsaGxP+JFiQccWZbPNRoyZCLPgYsww1ew/RLc/MGaCS1lRNgcO9dDYZhNYmsix4GLMMAVHVpWNleAy0UaMmciz4GLMMG2rd0ZWHTtGgssHw5FtxJiJHAsuxgxTZX0bk8alMy49xeusRMSkcelkp/ms5mIiyoKLMcNUWd82ZvpbAETkcKe+MZFiwcWYYVDVMRdcwOk/spqLiSQLLsYMw54DnXR094654DKzKJv61i4OHOrxOitmjPA0uIjIYhHZIiKVInJzH9vTRORxd/tKESl1088TkdUistH9eU7IMa+651znPopiVyIz1o21kWJBZTbHmIkwz4KLiCQD9wDnA3OAK0VkTthu1wH7VXUmcDdwl5veCFykqicC1wB/DDvuKlWd5z7qo1YIc9QJzsE1FmsuYCPGTOR4WXNZCFSq6g5V7QYeA5aE7bMEeMB9/gTwCRERVV2rqnvc9E1Ahogk5lqzJqFsb2hjfFYqBdlj6+02LT+TNF+STQNjIsbL4DIVqAl5Xeum9bmPqvqBA0BB2D6fBdaoaldI2u/dJrHvST9r0IrI9SJSISIVDQ0NoymHOYpsq2tjZoLPhNyX5CThmMJsKm0CSxMhCd2hLyIn4DSVfSkk+Sq3uexj7nXtQTkAABl0SURBVOMLfR2rqveqarmqlhcWFkY/sybhqSqVDW3MTPAFwvpTVpRtNRcTMV4Gl91AccjraW5an/uIiA/IBZrc19OAp4GrVXV78ABV3e3+bAUewWl+M2bUmtq7aenoGZM1F3D6XXa3HKKj2+91VswY4GVwWQWUicgMEUkFlgLLw/ZZjtNhD3AZ8IqqqojkAc8BN6vq34M7i4hPRCa4z1OAC4F3o1wOc5QIfqtP9KWN+xMcMba9vt3jnJixwLPg4vah3ACsADYDy1R1k4jcJiIXu7v9DigQkUrgRiA4XPkGYCZwa9iQ4zRghYhsANbh1Hx+G7tSmbEs2B8x1kaKBR0eMdZgI8bM6Pm8vLiqPg88H5Z2a8jzTuBzfRx3O3B7P6c9NZJ5NCaosq6V7DQfk8ale52VqCgpyMKXJNbvYiIioTv0jYmlyoY2ji3Kpp8BiAkv1ZdESUGm3UhpIsKCizFDtK2ubczdmR+urCjHgouJCAsuxgzBgUM91Ld2jdn+lqCZRdlUN3fQ5e/1OismwVlwMWYIxuqcYuHKJmbTG1CqGju8zopJcBZcjBmC7fVje6RY0LGFNoGliQwLLsYMwbb6VtJ8SUzLz/Q6K1F1bGE2Ih8s5WzMSFlwMWYItta1cUxhNslJY3OkWFBGajLF+Zk2HNmMmgUXYwahqmyobeHEqeO8zkpMzJ06jvW1LV5nwyQ4Cy7GDKK6qYP9HT3Mn57vdVZiYn5xPrX7D9HQ2jX4zsb0w4KLMYNYV+N8i59XnOdxTmJj3nSnnMFyGzMSFlyMGcTaXfvJTE1m1sQcr7MSE3On5OJLEtbu2u91VkwCs+BizCDW1bRw0rTcMd+ZH5SRmszsyTlWczGjYsHFmAF09vTy3t6DR01/S9D84nzW17TQG1Cvs2ISlAUXYwawac9BenqV+UdJf0vQ/Ol5tHf32s2UZsQsuBgzgGC/Q7CT+2gRHLxg/S5mpDxdz8WMbYGAs+b86ur9dPX0UpiTTtG4NAqz0ygal0Zmavy//dbVtDA1L4OinLG5hkt/ZkzIIjcjhXU1LSxdON3r7AyqvctPfWsXDa1d1Ld2Un+wi+w0H6eU5HNsYdaYXSYhnsX/f7dJKO/uPsBrWxuoqGpmza4WDhzq6Xff048p4DsXHM/cqbkxzOHwrN3VctTVWgBEhHnFeazdFd+d+mt27eeO5zazurr/GlZ+ZgqnluRTXjqejx9XxHGTjo5Rf16z4GJG7UBHD8+s381j79Tw3t6DgDPB4/lzJx3+px6X7qOhrYv6g863y+rmDh56u5qLfvEmnz1lGjd98jgm5cZX7aC+tZPdLYf44kdLvc6KJ+ZPz+Nnf91GW5ef7LT4+qio3d/Bj17cwvL1eyjMSePG82YxLT+Dwpw0inLSKcxJY39HNxVVzVRU7Wd19X5e3lzPnS+8z8nFeSxdUMyFJ00mJz3F66KMWfH1jjEJQ1V5e0czj6/axQvv7qPLH+CEKeO4bckJXHDiZAqy0z50TEF2GrMnffD6ujNm8Mu/VfL7v1fx3Ia9fOmsY/jyWceSnpIcw5L0b537rX3+UVhzAaffRRU21LTwkZkTvM4O4DR/3fO3Su57cydJAl87ZyZfOutYsvoIfuOzUjm2MJsrFjjNevWtnTy7fi/LVtVwy1Mbue3Z97jgpMksXVDMqSX51nQWYRZczLA0tnXxxOpaHl9Vw87GdnLSfVxeXswVC4qH3byVm5HCLZ8+nqsWlXDXi+/z05e38c7OZu6/dkFcBJi1NS2kJAsnTInfZrtoOtypHyfBpb3Lz+d/t5K1u1r4zPyp3PSp45iSlzHk44ty0rnujBn8n4+Wsq6mhWUVNSxft4cnVtdSVpTN0oXT+cz8qeRnpUaxFEcPUfVuHLuILAZ+BiQD96nqnWHb04AHgVOBJuAKVa1yt90CXAf0Al9T1RVDOWdfysvLtaKiIlLFGnM6uv28sa2RZ9bt5i+b6vAHlAWl+SxdMJ1PnziZjNTIBIKn1tRy47L1fHLORH551Sn4kr0dzHjlvW/T3u1n+Q1neJoPL53zk1c5pjCb+64p9zQfXf5e/vmBCv5e2cgvrzqFxXMnR+S87V1+ntuwl0fe2cW6mhZSk5NYPHcSS+ZN4aMzJ8TFl5x4JiKrVbXPN4dnNRcRSQbuAc4DaoFVIrJcVd8L2e06YL+qzhSRpcBdwBUiMgdYCpwATAFeFpFZ7jGDnTNiVBVVUCBw+Ln7M+R5QBXFSXMODPk9JEGyCEkiiIAvSTz/UAWoO9jJy5vr+Ovmet6sbKTbHyA/M4VrP1LK0oXFzCyKfKfoZ06ZRmunn+8v38S3ntzIjy87iSSP7orvDTgzIV926jRPrh8v5k3P4/WtDaiqZ81GvQHl64+t441tjfz4spMiFlgAstJ8XL6gmMsXFLN570Eee2cXT6/dzfL1e0hPSeJjZYWcd/xEPj67iMKcDzf1xpKq0htQ/AEloEog+NkSCNnJ/ROJOE+DnyuC+9N9niTOoA0J7huFv62XzWILgUpV3QEgIo8BS4DQQLAE+IH7/AngF+L8FpYAj6lqF7BTRCrd8zGEc0bMb17fwZ0vvB/x8yYJpPqSSE1OItWXTEZqElmpPrLTfGSl+chKS2Zcegq5mSnkZvT/yElP6XfKElWlyx+gtdNPU3sXW/a1smVfK1vrWnl/Xyu1+w8BUDw+g88vKuHcOUUsKB1PSpQD3zUfKeXAoR7++6WtjMvwceuFczz5UNtW30p7d+9ROVIs1PziPJ5as5va/YcoHh/7hdJUlW8/tZEX3t3H9y6cw+fKi6N2reMnj+OHS+by7QuOZ+WOZl7eXMfL79Xx0nt1AEwfn8lxk3KYPSmHWROdx/isVHLSfaT5kvp9n/YGlIOHejjgPg52Oj9bOtzXIeltXb20d/mdR7efQ929dPkDdPsDdPcGiEZD0+2XzOXzp5VE/LxeBpepQE3I61pgUX/7qKpfRA4ABW7622HHTnWfD3ZOAETkeuB6gOnTRzaOv7wkn6+fW+Z8O+DIbwDBbwxJId8a3Os6P8GtzXzwLaQ34HwzCb6Ruv0BuvwBDnX7ae923nQtHd3U7vdzsNPPgUM9dPsDfeYtKCVZ3CDlPHxJSXR0+2nt9OMPm9rDlyQcU5jF/On5fP60Es6ZXURZUXbMP9z/5ZyZtHT0cP/fd5KXkcq/nlsW0+sDh4fgzi8+uqZ9CRec9mZtTUvMg4uq8p/Pb+bxihq+9okyrjtjRkyum+ZL5sxZhZw5q5AfXnwC7+09yKtbGnhvz0G21LXyyvv1H5oWJyVZyE7zkZnqwx9wg4H7f9zTO3BESPMlkZuRwriMFLLSfGSnJTM+K5PsNB/pKcmkuf+7wf/j5CRxHsFaifv5c7hhxI1AwdaTgNuSEjicHmxRcbafNC06fYpHbYe+qt4L3AtOn8tIzlFeOp7y0vERzddwdfb0Hv4GFPotKPhN6PC3HvfhDyhZaclkp/nITveRk+YjNzOVsqJsji3MJtXnfZOciPDdC47nYGcPd7+8lWn5GXw2xs1T63a1kJ+ZQknB2F7WeDDHTcohzZfEul0tXHzylJhe+6G3q/ntGzu59iOl/JsHXzDAeS+eMCX3iEEdnT29bG9oo7K+jYOHejjY6aety09bp1PbSEn64Mtcmi+JNF8y4zJ8TgDpo8VhrPbreBlcdgOhddxpblpf+9SKiA/IxenYH+jYwc45pqSnJDMpNznu7hEZraQk4c7PnMiu5g5+sHwTpx9bMKyRQaO1tmY/84rzjvrhqSnJSZw0LZe1NbGdBmZHQxt3PL+Zs48r9KxptD/pKckfCjjmw7z8mroKKBORGSKSitNBvzxsn+XANe7zy4BX1KnzLQeWikiaiMwAyoB3hnhOkyB8yUn85LKT6VXlW09uIFYjG1s7e9hW38a8o7xJLGhecR6b9hyky98bk+v1BpSb/nc9ab5k7vqsd4M6zOh4FlxU1Q/cAKwANgPLVHWTiNwmIhe7u/0OKHA77G8EbnaP3QQsw+mofxH4qqr29nfOWJbLRNb0gky+/enjeWNbIw+v3BWTa26oPYDq0XvzZLj50/Pp9gfYvLc1Jtf77Rs7WLOrhduWnMDEcWOrRn408bTPRVWfB54PS7s15Hkn8Ll+jr0DuGMo5zSJ7apF01mxaR//+fxmziwrZHqU+0GCi2SdfJRNs9+f4M2U63btj/pSz1vrWvnvv2xl8QmTYt7HYyLL+95bYwYhItz12ZNIFuGmJ9YTiPICVq9vbWDWxGxyM2zeKYDJuelMzcvg9W2NUb1OT2+AG5etIyfdx+2Xzo2rfhYzfBZcTEKYkpfBrRfN4Z2dzfz+H1VRu87ulkOs3NnMRSfZt+YgEeHCkybz+tYGmtq6onadX/5tO+/uPsgdl85lQh9z05nEYsHFJIzLTp3GJ2YX8aMX32d7Q3RWSHxmnTO48JL5UwfZ8+hyyfyp+APKcxv3RuX8m/Yc4H9e2caSeVMiege+8Y4FF5MwRIT/+syJpPmS+M7TGyM+ekxVeXrNbhaU5ntyN3o8O37yOGZPyuGpNZEf2d8bUG55aiN5man88OITIn5+4w0LLiahFI1L5+bzj+ftHc08sbo2oufetOcg2+rbrNbSj0vnT2VdTQs7G9sjet4H36piQ+0Bbr1oDnmZNiPxWGHBxSScpQuKKS/J547nN0e0D+DptbtJTU7ighOtWaYvF8+bggj8aW3kai97Wg7xkxVbOGtWIRedZL/3scSCi0k4SUlO81h7l587ntsckXP6ewMsX7+Hj88utG/P/Zicm8FHji3gT+t2R6xJ8gfLN9Gryu2X2OiwscaCi0lIZRNz+NKZx/LU2t28GYEhsv/Y3kRDaxeXWpPYgC6ZN5Xqpg7WuBN7jsaKTfv4y3t1fP3cWdbHNQZZcDEJ64ZzZlJakMl3/7SRzp7RTU3y9NrdjEv38fHZRRHK3di0eO4k0nxJo24aa+vy8/1nNjF7Uk7MZjs2sWXBxSSs9JRk7rj0RKqaOvjFK5UjPk97l58X393HBSdNIc03NmeojZSc9BQ+ecIknt2wZ9DlHgbykxVbqGvt5L8+c2LU1wgy3rC/qkloH505gc/Mn8qvX9vOxtoDIzrHX97bx6GeXmsSG6JL50+hpaOH17Y2jOj4VVXNPPBWFV84reTwejFm7LHgYhLedy+cQ1FOGl9+aDXN7d3DPv7ptXuYmpdBeYl90A3Fx8oKKchKHVHTWN3BTr7y8BpKxmfyjU8dF4XcmXhhwcUkvPFZqfzq86fS0NbFvzy6Bn/v0Jtr6ls7eXNbA5fOn2pTuw9RSnISF508hZc213Gws2fIx3X7A/y/h1bT3uXn3qvLyUm3udvGMgsuZkw4uTiP25fM5e+VTfx4xZYhH/fQW9UEFC6Zb3OJDccl86fS7Q/w6DCWQfjhs5tYs6uFH192MrMm5kQxdyYeWHAxY8blC4r5/GnT+c3rO/jzhj2D7v/Ht6r4+SuVXHDSZGYW2YfdcJw8LZezjyvkzhff58khzJTw+KpdPLxyF18+61gusJsljwoWXMyYcuuFJ3BqST7ffGIDW/b1v7jVIyt38b1nNnHu8UXcffm8GOZwbBARfv35Uzn9mAJuemL9gP0v62pa+N6fNvGxsgnWz3IUseBixpRUXxK/vOoUstJ8/PODq1i+fs+H7oF5fNUuvv30Rs6ZXcQ9V51Cqs/+DUYiPSWZ312zgEUzxnPjsnUsX39kbfFQdy9Pranly39cTdG4NH6+dD7J1q911JBYrUsez8rLy7WiosLrbJgIWrNrP//yyFp2txxiXLqPS+ZP5fLyYjbvPcg3n9zAmWWF/OYLp5KeYve1jFZHt59r71/F6l37+dnSeUzNy2BZRS3Prt9DW5ef0oJMfnnVqcyZMs7rrJoIE5HVqlre5zYLLhZcxqpAQPnH9iaWVdTw4qZ9h2/6+1jZBH57dbkFlghq7/Jzzf3vUFG9H4D0lCQ+feJkLi8vZtGM8TZv2BgVd8FFRMYDjwOlQBVwuaru72O/a4Dvui9vV9UHRCQT+F/gWKAXeFZVb3b3vxb4MRBsAP6Fqt43WH4suIx9Bzp6WL5+N1VNHXzjU8dZYImCti4/P1mxheMm5XDhSZNtqPFRIB6Dy4+AZlW9U0RuBvJV9Vth+4wHKoByQIHVwKlAF7BIVf8mIqnAX4H/VNUX3OBSrqo3DCc/FlyMMWb4BgouXvVkLgEecJ8/AFzSxz6fAl5S1Wa3VvMSsFhVO1T1bwCq2g2sAabFIM/GGGOGyKvgMlFVg4tx7wMm9rHPVKAm5HWtm3aYiOQBF+HUXoI+KyIbROQJESmOYJ6NMcYMkS9aJxaRl4FJfWz6TugLVVURGXbbnIj4gEeBn6vqDjf5WeBRVe0SkS/h1IrO6ef464HrAaZPnz7cyxtjjBlA1IKLqp7b3zYRqRORyaq6V0QmA/V97LYbODvk9TTg1ZDX9wLbVPWnIddsCtl+H/CjAfJ3r3sOysvLbcicMcZEkFfNYsuBa9zn1wDP9LHPCuCTIpIvIvnAJ900ROR2IBf4eugBbqAKuhiIzBq4xhhjhsWr4HIncJ6IbAPOdV8jIuUich+AqjYD/wGsch+3qWqziEzDaVqbA6wRkXUi8s/ueb8mIptEZD3wNeDaWBbKGGOMw26ixIYiG2PMSMTjUGRjjDFjmNVcABFpAKpHePgEoDGC2fGClSF+jIVyWBniQyzKUKKqhX1tsOAySiJS0V+1MFFYGeLHWCiHlSE+eF0GaxYzxhgTcRZcjDHGRJwFl9G71+sMRICVIX6MhXJYGeKDp2WwPhdjjDERZzUXY4wxEWfBxRhjTMRZcBkFEVksIltEpNJd9Czuicj9IlIvIu+GpI0XkZdEZJv7M9/LPA5GRIpF5G8i8p473c+/uukJUw4RSReRd0RkvVuGH7rpM0RkpfueetxdEC+uiUiyiKwVkT+7rxOqDCJSJSIb3amkKty0hHkvgbP8iLvMyPsisllETve6DBZcRkhEkoF7gPNx5jm7UkTmeJurIfkDsDgs7Wbgr6pahrM2TrwHSj/w76o6BzgN+Kr7u0+kcnQB56jqycA8YLGInAbcBdytqjOB/cB1HuZxqP6VIyeJTcQyfFxV54XcF5JI7yWAnwEvqups4GScv4e3ZVBVe4zgAZwOrAh5fQtwi9f5GmLeS4F3Q15vASa7zycDW7zO4zDL8wxwXqKWA8jEWVF1Ec4d1T43/Yj3WDw+cJbC+CvOukl/BiQBy1AFTAhLS5j3Es4M8TtxB2jFSxms5jJyg66UmUCGsjJoXBKRUmA+sJIEK4fbnLQOZz2jl4DtQIuq+t1dEuE99VPgm0DAfV1A4pVBgb+IyGp3EUFIrPfSDKAB+L3bPHmfiGThcRksuJgjqPM1JyHGp4tINvAk8HVVPRi6LRHKoaq9qjoP59v/QmC2x1kaFhG5EKhX1dVe52WUzlDVU3CauL8qImeGbkyA95IPOAX4larOB9oJawLzogwWXEZuN1Ac8nqam5aI6oILrQ2wMmhcEZEUnMDysKo+5SYnXDkAVLUF+BtOE1Keu4Q3xP976qPAxSJSBTyG0zT2MxKrDKjqbvdnPfA0TqBPpPdSLVCrqivd10/gBBtPy2DBZeRWAWXuyJhUYCnOCpuJaCgrg8YNERHgd8BmVf3vkE0JUw4RKRSRPPd5Bk6f0WacIHOZu1tcl0FVb1HVaapaivP+f0VVryKByiAiWSKSE3yOs+LtuyTQe0lV9wE1InKcm/QJ4D08LoPdoT8KIvJpnDbnZOB+Vb3D4ywNSkQeBc7GmY67Dvg+8CdgGTAdZ+mBy9VZCTQuicgZwBvARj5o6/82Tr9LQpRDRE4CHsB57yQBy1T1NhE5BqcWMB5YC3xeVbu8y+nQiMjZwE2qemEilcHN69PuSx/wiKreISIFJMh7CUBE5gH3AanADuCLuO8rPCqDBRdjjDERZ81ixhhjIs6CizHGmIiz4GKMMSbiLLgYY4yJOAsuxhhjIs6CizFRIiLfcWc83uDOuLsoitd6VUTKB9/TmNjwDb6LMWa4ROR04ELgFFXtEpEJOPcgGHNUsJqLMdExGWgM3jyoqo2qukdEbhWRVSLyrojc6842EKx53C0iFe56HAtE5Cl3LY7b3X1K3fU6Hnb3eUJEMsMvLCKfFJG3RGSNiPyvOwebMTFlwcWY6PgLUCwiW0XklyJylpv+C1VdoKpzgQyc2k1QtzrrifwaZ6qOrwJzgWvdO8YBjgN+qarHAweBr4Re1K0hfRc4152MsQK4MTpFNKZ/FlyMiQJVbQNOBa7HmQ79cRG5Fvi4u0rjRpyJHk8IOSw4N91GYJOq7nVrPjv4YJLUGlX9u/v8IeCMsEufhrN43d/d6fyvAUoiWjhjhsD6XIyJElXtBV4FXnWDyZeAk4ByVa0RkR8A6SGHBOffCoQ8D74O/q+Gz9cU/lqAl1T1ylEXwJhRsJqLMVEgIseJSFlI0jyclQEBGt1+kMs+fOSgpruDBQD+CXgzbPvbwEdFZKabjywRmTWC6xgzKlZzMSY6soH/cafV9wOVOE1kLThTuu/DWbZhuLbgLGh1P8606r8K3aiqDW7z26MikuYmfxfYOpJCGDNSNiuyMQnCXdL5z+5gAGPimjWLGWOMiTiruRhjjIk4q7kYY4yJOAsuxhhjIs6CizHGmIiz4GKMMSbiLLgYY4yJuP8f4qhAidPtIUcAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "cutoff = 0.15\n", - "beta = 9\n", - "ntaps = 63\n", - "N = 4\n", - "\n", - "b = sig.firwin(ntaps, cutoff, window=('kaiser', beta))\n", - "w, h = sig.freqz(b)\n", - "\n", - "plt.plot(b)\n", - "plt.title('Resulting PQMF Window Function')\n", - "plt.xlabel('Sample')\n", - "plt.ylabel('Value')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 103, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbcAAAEWCAYAAADl19mgAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nOydd3hVVdb/PyudhN6r9F4SEBTbCIoDCoqAIkrJ1XF8HXVmnFffmZ8z48So4ziOfazojBQLomLFLiAWUBAIvXcIvbeEJOv3xz7n5ubmtoQkN4n78zznufecs84+617C+d6999priapisVgsFkt1IibaDlgsFovFUtZYcbNYLBZLtcOKm8VisViqHVbcLBaLxVLtsOJmsVgslmqHFTeLxWKxVDusuFlCIiIviMi9ZW0b5HoVkQ5BzjURkbkiclREHhORP4vIy865Ns61caW9dwS+Fbl/ed3HYrGUDeX2MLBUfkRkM9AEyAPygZXAFGCiqhYAqOqtkbbnaysiA4BXVbVlGbl7C7APqK1hFmeKyBzn3i+X0b1LdH+LxRJ9bM/NcqWq1gJaAw8DfwL+E12XAtIaWFkRwiIisSW9f3n2GisjQb4ji6XyoKp2+5luwGZgkN+xc4ACoIezPwl40Of8H4FsYCdwM6BAB19bIAU46bRzzNmaO23PAw45bTwDJPi07W3Lz6dJwGkg12lrEHAfpncG0Ma5Ng74O6YXesqxfcax6QJ8ARwA1gCj/dp/HvgYOB7gOwl2/7eBV4EjzndRB/PDIBvY4XwXsU4bscCjmN7fRuB21+dA/xa+n8/Z7w9873x3WcAAn3NzgAeA74CjwOdAQ5/zF/pcuw3wAP2A3a5/jt1IICvI30qx78j5N30H2AtsAn7n93e00PludgOP+/1b3YL5G8oG7va5LhF40jm303mf6JwbAGwH7gL2ONfe6HPtFZjRh6PO9+/b7jBgifMdfA/0ivb/P7uV7xZ1B+wWxX/8AOLmHN8K/MZ5PwlH3IAhwC6gO5DsPNiLiZvzfgCw3a/ds52HdJzzkFsF3OlzPqC4+bft7Hsf/j4PTFco5gA3+9imOA/1G51798aITDeftg8DF2BGM5IivP9p4GrnmhrAu8CLzv0aAz8C/+PY3wqsBloB9YHZRChuQAtgv/PwjgEuc/Yb+XzeDUAnx485wMPOudaYh/31QDzQAEhzzq0ELve557vAXSG+f9/vKBn4CfgbkAC0w4j2YMd+HjDeeV8T6O/3b/WG8z31xIjjIOf8/cB85/trhBGiB3z+pvIcm3jn+zgB1HPOZwMXOe/rAX2c970xYngu5kdGuvN9J0b7/6Ddym+zw5KWQOzEPID9GQ28oqorVPUE5gEcMar6k6rOV9U8Vd2MEYKLz9TZCBgGbFbVV5x7L8b0OK71sXlfVb9T1QJVPRVhu/NU9T0185O1MQ/bO1X1uKruAZ4Axji2o4EnVXWbqh4A/lEC/8cBH6vqx45/X2B6RVf42LyiqmtV9SQwHUhzjt8AfKmqb6jqaVXdr6pLnHOTnbYRkfrAYOD1EH54vyOMKDVS1ftVNVdVNwIv+Xze00AHEWmoqsdUdb5fW5nO97QMeAUjvgBjgftVdY+q7gUygfE+1512zp9W1Y8xPenOPue6iUhtVT2oqouc47cAL6rqD6qar6qTgRzMDy1LNcWKmyUQLTDDd/40x/SAXLYFsAmKiHQSkY9EZJeIHAEeAhqW3s2IaQ2cKyKH3A3zEG3qY1OizxLgmtaY3kS2zz1exPRAoPh3t6UE92kNXOvn/4VAMx+bXT7vT2B6S2B6ihuCtPsqcKWIpGDE9xtVzQ7hh//nbe7n058xAUoAv8L0JFeLyAIRGRairS2Y7wfndUuQcwD7VTXPZ9/3s47CCP4WEflaRM7z8fUuP19b+bVrqWb8rCbBLeERkX4Ycfs2wOlswDf6sVWIpgIFXjwPLAauV9WjInIncE1pfS3BvbcBX6vqZSW4pqT32YbpDTT0e/i6ZFP0+zrL7/xxzFCfi7/wTlXVX5fCx22Y+a9iqOoOEZmHmWsbj/n3CYX/592kqh2DtL0OuF5EYpz23xaRBj4mrTDDtGC+i53O+50YMVoR4Fxo51QXAMNFJB64A9ODbeX4+ndV/Xsk7ViqB7bnZgFARGo7v66nYeZ6lgUwmw7cKCJdRSQZCLWmbTfQQETq+ByrhQkwOCYiXYDflJH7ge7dzmf/I6CTiIwXkXhn6yciXcvqhk6P53PgMee7jBGR9iLiDrtOB34nIi1FpB7w//yaWAKMcXzrS1HRd3tYg0UkVkSSRGSAiESyzOI1YJCIjBaROBFpICJpPuenYIKEegIzSvCRfwSOisifRKSG41cP58cRIjJORBo5Q5iHnGsKfK6/V0SSRaQ7Zi70Tef4G8BfRaSRiDTEzOm9Gs4ZEUkQkbEiUkdVT2P+ztz7vQTcKiLniiFFRIaKSK0SfF5LFcOKm+VDETmK+XX7F+BxzMOmGKr6CfA0JhhiPWbiH0yPxd92NeZBtdEZCmoO3I2ZAzqKeeC86X9dGfEUcI2IHBSRp1X1KPBLzHzQTswQ3j8xkXllyQRMcMVK4CAmmtIdOnwJ+AwT6biI4kJyL9DeuS4Tn7kvVd0GDMcM++3F/Fv9HxH8/1XVrZihurswQ81LgFQfk3cxPaV3nXnUiFDVfMxcZhomUnIf8DImYhRM8NEKETmG+fcY48wHunyN+Rv6CnhUVT93jj+ImU9cCizDfFcPRujWeGCzM+R9K2boGVVdCPwaE5170LmvJ9LPaqmaiKpdj2opHU7PZzkm6izQUJwlCCLSBiMK8dH+7kRkAyaq88sKuFcbKsnntlRvbM/NUiJEZISIJDpDa/8EPrQPqaqLiIzCzKXNirYvFktZYsXNUlL+B7NmaANmsXR5zZtZyhknTdnzwO3O3JjFUm2ww5IWi8ViqXbYnpvFYrFYqh3Vfp1bTEyM1qhRI9puWCwWS5XhxIkTqqpVuvNT7cWtRo0aHD9+PNpuWCwWS5VBRE6Gt6rcVGlltlgsFoslEFbcLBaLxVLtsOJmsVgslmqHFTeLxWKxVDusuFksFoul2lHlxE1EhojIGhFZLyL+mdUtFovFUgFU9mdxlRI3EYkFngUuB7ph6kV1i65XFovF8vOiKjyLq9o6t3OA9U5Je0RkGqYUyMqyvMnJ0ycZ8Y/naJTXm3ZyCSKU21azJtSrV7i1aAF165blp7FYLJYyp0KexWdCVRO3FhQtT78dONffSERuAW4BaNGiBXPmzCnRTfI1n+95iLMSupBWo/SdW1WzheLkSdi7t+ixuDhITIQaNYz41axp9i0Wi6WCiBORhT77E1V1os9+RM/iaFLVxC0inH+EiQApKSk6YMCAErfxP6dv4skfnmT8rd1olNzYK1RlvR07BgcPwoED5nXbNli1CtauhcWL4ZBTw7hLFxg5EiZMgM6dy/DLslgsluLkqWrfaDtxJlQ1cdsBtPLZb+kcK3PS09J5dN6jvL7sde7sfyci5XEXaNIk+LmCAlizBmbNgnffhX/+Ex56CAYPhrvvhkGDyscni8ViCUOFPYtLS5UqeSMiccBa4FLMF7kAuEFVVwS7JiUlRUubW7LfS/04nX+aJbcuKdX1Zc3u3TBxIjz/PGRnG5F79FHo0SPanlksluqEiJxQ1ZQQ50v8LK5oqlS0pFPx+Q7gM2AVML08v8z01HSydmexZFflELcmTeDee2HTJnj8cfjhB0hNhb/+FXJzo+2dxWL5uVDRz+LSUKV6bqXhTHpu+0/sp9ljzbi93+08MeSJMvbszNm/3wxPTpoEffrAjBnQunW0vbJYLFWdcD23qkCV6rlVNA2SG3BV56t4bdlrnM4/HW13itGgAbzyipmP27AB+vaFuXOj7ZXFYrFEHytuYUhPTWfvib18sv6TaLsSlKuvhh9/hIYN4bLL4IMPou2RxWKxRBcrbmEY0mEIjVMaM2nJpGi7EpJOneC77yAtzSwZePPNaHtksVgs0cOKWxjiY+MZ13McH639iH0n9kXbnZDUrw9ffgnnnw/jxsGnn0bbI4vFYokOVtwiID0tndMFp3lj2RvRdiUstWrBhx9C9+4wahQsWBBtjywWi6XisdGSEdLnxT6ICD/d8lMZeFX+7NoF/ftDXh789FPoxeIWi8Xii42W/BnhSfOwKHsRy3Yvi7YrEdG0Kbz3nknrde21cLryBXtaLBZLuWHFLUKu73E9cTFxTM6aHG1XIiYtDV5+Gb75Bh58MNreWCwWS8VhxS1CGqU0YlinYby69FXyCvKi7U7E3HADpKcbcZs3L9reWCwWS8Vgxa0EeFI97D6+m8/WfxZtV0rE00+bzCXjxsGJE9H2xmKxWMofK24l4PKOl9MwuSGTsiZF25USUbu2yWSycSM88EC0vbFYLJbyx4pbCUiITWBsz7F8sOYDDpw8EG13SsTFF4PHY6oILF8ebW8sFoulfLHiVkI8aR5y83OZtnxatF0pMf/6F9SpA7ffHr5CuMVisVRlrLiVkLSmafRq0qvSp+MKRMOGZlhy7lyz0NtisViqK1bcSoEn1cOCnQtYuXdltF0pMTffDJ07w5/+ZBZ4WywWS3XEilspGNtrrFnztqTqrHlziY+Hhx+G1avhP/+JtjcWi8VSPlQ6cRORf4nIahFZKiLvikhdn3P3iMh6EVkjIoOj5WPjlMZc3uFypi6dWqXWvLkMH25Scz30kM1cYrFYqieVTtyAL4AeqtoLWAvcAyAi3YAxQHdgCPCciMRGy0lPmofsY9l8seGLaLlQakTg3nth61aYOjXa3lgsFkvZU+nETVU/V1W3OzQfaOm8Hw5MU9UcVd0ErAfOiYaPAEM7DqV+jfpVKh2XL5dfDn36wD/+YefeLBZL9aPSiZsfNwFuCewWwDafc9udY8UQkVtEZKGILMwrpyd3YlwiN/S4gfdWv8fBkwfL5R7liQj89a+wfj28/Xa0vbFYLJayJSriJiJfisjyANtwH5u/AHnAayVtX1UnqmpfVe0bFxdXlq4XwZPmISc/hzdXVM2y18OHQ4cO8NRT0fbEYrFYypaoiJuqDlLVHgG29wFExAMMA8ZqYcG5HUArn2ZaOseiRp9mfejeqHuVHZqMiYHf/hbmz4cff4y2NxaLxVJ2VLphSREZAvwRuEpVfdP8fgCMEZFEEWkLdASi+kgWETxpHuZvn8/qfauj6Uqp8XhM9e6nn462JxaLxVJ2VDpxA54BagFfiMgSEXkBQFVXANOBlcCnwO2qmh89Nw1je44lVmKr5Jo3MEmVb7oJpk+H3buj7Y3FYrGUDaLVPMlgSkqKHj9+vFzvMfT1oWTtymLLnVuIjYna6oRSs3o1dO1qck/efXe0vbFYLNFGRE6oakq0/TgTKmPPrcrhSfWw4+gOvtr0VbRdKRVdusB558F//2sTKlssluqBFbcy4MrOV1IvqV6VDSwBMzS5ahX88EO0PbFYLJYzx4pbGZAUl8SYHmOYsWoGh08djrY7pWL0aEhONkVNLRaLpapjxa2M8KR5OJV3iukrpkfblVJRuzZcey1MmwY5OdH2xmKxWM4MK25lRL/m/ejasGuVHpocMwaOHIHPPou2JxaLxXJmWHErI0SE9NR0vtv2Hev2r4u2O6Xi0kuhfn2zLMBisViqMlbcypBxvcYRIzFVtvcWHw8jR8L778PJk9H2xmKxWEqPFbcypEXtFvyy/S+ZkjWFAi2ItjulYvRoOHbMDk1aLJaqjRW3MiY9NZ1tR7Yxe9PsaLtSKgYOhIYN7dCkxWIpHSJyn4jscDJMLRGRK6LhhxW3MmZ45+HUSazDpKxJ0XalVMTFwbBh8Mknts6bxWIpNU+oapqzfRwNB6y4lTE14mswpscY3ln5DkdyjkTbnVIxdCgcOgTffx9tTywWi6V0lF+xs0pC/fr1mTNnToXes2d+T07mneTBGQ9yRbOo9MjPiPr14bHHYNs2qOCvzmKxVA7iRGShz/5EVZ1YguvvEJEJwELgLlWt8IrONnFyOaCqdHm2C01SmjD3xrkVeu+y4tJLTZWA5cuj7YnFYqlowiVOFpEvgaYBTv0FmA/sAxR4AGimqjeVi6MhsMOS5YCI4En18M3Wb9hwYEO03SkVw4bBihWweXO0PbFYLJWNUAWnVXW3quaragHwEnBONHy04lZOjE8djyBMyZoSbVdKxdCh5nXmzOj6YbFYqhYi0sxndwQQlfEfK27lRMvaLRnUbhCTsyZXyTVvnTpB69Ywa1a0PbFYLFWMR0RkmYgsBQYCf4iGE5VW3ETkLhFREWno7IuIPC0i60VkqYj0ibaP4UhPTWfL4S3M3VI1590GDjQBJQVVT5stFkuUUNXxqtpTVXup6lWqmh0NPyqluIlIK+CXwFafw5cDHZ3tFuD5KLhWIkZ0HUGthFpMWjIp2q6UioED4cABWLYs2p5YLBZLyaiU4gY8AfwRE23jMhyYoob5QF2/sd1KR3J8Mtd1v463V77Nsdxj0XanxAwcaF7t0KTFYqlqVDpxE5HhwA5VzfI71QLY5rO/3TlWqUlPS+f46eO8s/KdaLtSYlq1gvbtYXbVzCRmsVh+xkRF3ETkSxFZHmAbDvwZ+NsZtn+LiCwUkYV5Uc4hdUGrC2hfr32VTcc1cCDMnQv5+dH2xGKxWCInKuIWbI0EsBFoC2SJyGagJbBIRJoCO4BWPs20dI4Fan+iqvZV1b5xcdFNwiIieNI8zNk8h82HNkfVl9IwcCAcPgyLF0fbE4vFYomcSjUsqarLVLWxqrZR1TaYocc+qroL+ACY4ERN9gcORysKp6SM7zUeoEquebvoIvM6b150/bBYLJaSUKnELQwfY3p26zGr3m+LrjuR07puay5pewmTsyZT1dKdtWoFLVpYcbNYLFWLSi1uTg9un/NeVfV2VW3vrKFYGO76yoQn1cPGgxv5duu30XalxPTvD/PnR9sLi8ViiZxKLW7ViZFdR1IzoWaVXPN23nmwaZNJpGyxWCxVAStuFURKQgrXdruW6Sunczy3YqsUnCnnnWde7dCkxWKpKlhxq0A8aR6O5R7j3dXvRtuVEtGnj6nQ/eOP0fbEYrFYIsOKWwVy4VkX0rZu2yo3NJmUBN262eUAFoul6mDFrQKJkRjSU9OZtWkWWw9vDX9BJaJPH1i0CKpYsKfFYvmZYsWtgpmQOgFFmZo1NdqulIg+fWDPHsiuEisLLRbLzx0rbhVM23ptubj1xUzKmlSl1rz1cQoMLVoUXT8sFoslEqKbm+pniifNw43v38j3277ngrMuiLY7EZGaCiJG3IYNi7Y3FoulOiOZch4wDrgIaAacxFT0ngm8qhl6OGwbwXoPInwQgQ8HVPFE6nA0SElJ0ePHK1fo/bHcYzR9tCk39LyBiVdOjLY7EdO5M3TvDjNmRNsTi8VSnojICVVNicq9M+UTYCfwPrAQ2AMkAZ0wlb2vBB7XDA2pUaF6bl2Bm0P5ADxbAp8tDjUTajKq2yjeXPEmTw55kuT45Gi7FBE9esDy5dH2wmKxVHPGa4bJTOXDMWCRsz0mmdIwXCOhxO0vqnwd6mIRMsO6aQmIJ9XDlKwpvLf6PW7oeUO03YmIbt3gvfcgJwcSE6PtjcViqY64wiaZkgKc1AwtkEzpBHQBPtEMPR1A/IoRNKBElelhnYjAxhKYi9tcTOs6rZmcNTnarkRM9+5QUABr1kTbE4vF8jNgLpAkmdIC+BwYD0yK9OKg4iZCQxEyRPidCDVFeF6E5SK8L0KHM3b7Z06MxDAhdQJfbPiC7Ue2R9udiOje3byuWBFdPywWy88C0Qw9AYwEntMMvRboHunFoZYCvA4kAh2BHzHlZq4BPgJeLrW7Fi/pqelVas1bp04QGwsrV0bbE4vF8jNAnKjJsZgoSYDYSC8OJW5NVPkz8Dugpir/UmW1Ki8BdUvtrsVL+/rtufCsC6tMnbfEROjY0fbcLBZLhfB74B7gXc3QFZIp7YDZkV4cKqAkH0AVFcF/8q6gxG5aAuJJ9XDzhzfzw44f6N+yf7TdCUu3blbcLBZL+aMZOhcz7+bub8R0tiIilLi1c9a6ic97nP22pfA1YkTkt8DtGIGdqap/dI7fA/zKOf47Vf2sPP2oCK7tfi2//eS3TFoyqUqIW4cO8NFHkJ9vhigtFoulPHAiJO8G2uCjVZqhl0RyfShxG+7z/lG/c/77ZYaIDHTunaqqOSLS2DneDRiDmVBsDnwpIp1UNb+8fKkIaifWZmTXkUxbPo0nhzxJUlxStF0KSfv2kJsLO3bAWWdF2xuLxVKNeQt4ARPjUeLnfFBxC7fGrRz5DfCwquYYP3SPc3w4MM05vklE1gPnAFW+hKYnzcNry17j/dXvc12P66LtTkjatzevGzZYcbNYLOVKnmbo86W9OKi4ibAMCBrloEqv0t40DJ2Ai0Tk78Ap4G5VXQC0AOb72G13jlV5BrYZSKvarZicNbnSi1sHZxHI+vUwcGB0fbFYLNWaDyVTbgPeBXLcg5qhByK5ONSwpJse93bn1Y1XH0cI0YsEEfkSaBrg1F8cn+oD/YF+wHQRaVfC9m8BbgFISEg4E1crhNiYWMb3Gs/D3z3MzqM7aV6rebRdCkrLlhAfb3puFovFUo6kO6//53NMgYj0IGjiZK+BsFiV3n7HFqnSpyReRoqIfAr8U1VnO/sbMEJ3M4Cq/sM5/hlwn6qGHJasjImTA7F2/1o6P9OZfw76J3+84I/RdicknTtDr17w1lvR9sRisZQH0UycXFZEUvJGRLhAle/MDudTvnXg3sNkfp4tIp2ABGAf8AHwuog8jgkocReXVws6NejE+a3OZ3LWZP7v/P9DRKLtUlDatzfDkhaLxVKeSKb0ALphqgIAoBk6JZJrIxGpXwHPibBZhM3Ac8BNpfAzUv4LtBOR5cA0IF0NK4DpwErgU+D2qh4p6U96ajor965k4c6F0XYlJB06mGHJKrDu3GKxVFEkUzKAfzvbQOAR4KpIrw8rbqr8pEoqmE2VNFXKrR6zquaq6jhV7aGqfVR1ls+5v6tqe1XtrKqflJcP0WJ099EkxSUxacmkaLsSkrZt4ehROHgw2p5YLJZqzDXApcAuzdAbMRpUJ9KLQyVOLlJvWZXDqhwOZWM5M+om1WVElxG8sfwNcvJywl8QJVq1Mq/btkXXD4vFUvkQkWtFZIWIFIhIX79z94jIehFZIyKDwzR1UjO0AMiTTKmNKVraKlI/QvXc/iVCbxH6BNuAhyK9kSUy0lPTOXjqIB+u/TDargTFipvFYgnBckwm/7m+B/0ScQwBnhORUHmOFkqm1AVeAn7CFCqNeF1zqICS3cDjYa5fF+mNLJExqN0gmtdqzqQlk7im2zXRdicgrrht3RpdPywWS+VDVVcBgYLiSpSIQzP0NuftC5IpnwK1NUOXRupHqAwlAyJtpDJTv3595syZE203SsSAugOYtm4aMz6fQf2E+tF2JyCPPQZ16kAV+2otFktkxImIb2TbRFWdeIZtRpSIQzKli2boasmUYsvNJFP6aIZGFPMRyVKAKs2BAwcYMGBAtN0oEU17NOX1Z19nU81NjDx/ZLTdCYjHAxddBFOrRik6i8VSMvJUtW+wk6EScajq+2d477uAXwOPBTinwBknTrZEiS4Nu3Bui3OZlDWJ/z3vfyvlmrdmzWDXrmh7YbFYooGqDirFZTsoGhDS0jlWtO0M/bXzekYJ/qy4VVI8aR5+M/M3LN61mD7NyiUZzBnRtKldyG2xWEpERIk4JFNCDldphs6I5GZh17mJkCzCvSK85Ox3tEsAyp/rul9HYmxipV3z1rQpZGdH2wuLxVLZEJERIrIdOA+Y6aRKpASJOK50tl8B/wHGOtvLlCCBSCQZSl7BZGQ+z9nfATwY6Q0spaNejXoM7zKc15e9Tm5+brTdKUazZrB/v6ntZrFYLC6q+q6qtlTVRFVtoqqDfc6FTcShGXqjs2g7HuimGTpKM3QUZglBfKR+RCJu7VV5BDhtnOMEphq3pZzxpHrYf3I/M9fOjLYrxWjqTCXv3h1dPywWS7WllWao7/jQbiDiKpKRzLnlilADp8yNCO3xqa1jKT8ua38ZTWs2ZVLWJEZ0HRFtd4rgituuXYXr3iwWi6UM+Uoy5TPgDWf/OuDLSC+OpOeWgRkfbSXCa8BXQOWuyVJNiIuJY3yv8Xy87mP2HN8T/oIKpGFD87p/f3T9sFgs1RPN0DuAF8Gb23iiZuhvI70+bD03ABEaYGqqCTBflX2lc7fiqSr13IKxYs8KejzfgycGP8Gd/e+Mtjte1qyBLl3g1Vdh7Nhoe2OxWL7b+h39W/YnNiZURqvIqA713EIlTvbNIdkayAZ2Amc5xywVQPfG3enbvG+li5ps0MC82p6bxVK+LNy5kFmbZoW02XBgAxe+ciEv/vRiBXlV/kim9JdMWSCZckwyJVcyJV8y5Uik14calnzM2Z4FfgAmYhJY/uAcs1QQnlQPWbuzWLJrSbRd8VKvnnm14maxlI5jucf4zUe/Ye/xvSHt7v/6fq6Zfk3ISiGHc0zBlleWvFKmPkaZZ4DrMTmMawA3UwLtCSpuqgxUZSCmx9ZHlb6qnA30JsCqckv5MabHGOJj4pm8ZHK0XfESGwt161pxs1gC8eDcB/nb7L+FtFmUvYgXfnqB5xc+H9LuZN7JsJVCXOFbuHMhK/asKLnDlRTN0PVArGZovmboK5hqAhERSUBJZ1WWeW+mLAe6ltxNS2lpkNyAqzpfxWvLXuN0/ulou+OlQQMrbhZLIGaum8kj3z3CoVOHgtq461cnZ00mVOyDaxdqasJ3LezkrMrzI/gMOSGZkgAskUx5RDLlD0SmWRCh4VIRXhZhgLO9BERcdqCkiEiaiMwXkSUislBEznGOi4g87RS6WypSPGN0dcaT5mHvib18sr7yFCC34mb5ObJy70p+2P5DSJvc/Fxy8nN4c/mbIW0ANh7cyLdbvw1r9+n6T9l1LHBCV9emSUoTpi6dSl5BXkj/qgjjMRp1B3Ack5dyVKQXRyJuNwIrgN8720rnWHnxCJCpqmnA35x9gMsxucg6ArcAofvy1YzB7QfTOKVxpQosadAADhyItn0ECy4AACAASURBVBcWS9lwKu8U93x5DwdPHgxp99dZf2Xo60NDZg7y7ZWFs4HwvbJODTqRr/m8tvS1kG3d1Psmdh3bxecbPg/1ESo9kimxwEOaoac0Q49ohmZqhv6vM0wZEWHFTZVTqjyhyghne0KVU2fkeZhbArWd93UwEZpgCt1NUcN8oK6INCtHPyoV8bHxjOs5jo/WfsS+E5VjJUatWnDsWLS9sFjC89T8p3jku0dC2izcuZCHv3uYFxa+ENLu+Onj7D+5n4/XfRzUJjc/F0GYt30ea/atCWoDcG6Lc5m+cjrHcwMvWcrNz6VH4x70b9mfSVmTAg5hum2N6DKCBjUaVPmhSc3QfKC1MyxZKiJJnLxJhI3+W2lvGAF3Av8SkW3Ao8A9zvEWwDYfu4CF7ozPcoszpLkwL69adM8BSE9L53TBad5Y9kZ44wogJcWKmyW6rNm3htZPtg4qIC5vrXyL++bcx5Gc4JHk3rmtIAJSzC5Mb2tQu0HESExQoXHbueXsWziWe4x3V78b0C4nL4eE2AQ8qR6W71nO4l2Lg7ZVM6EmN/S8gfdWvxe2B1oF2Ah8J5lyr2TK/7pbpBdHMizZF+jnbBcBTwOvlspVBxH5UkSWB9iGA78B/qCqrYA/YLJClwhVnaiqfVW1b1xc9anq06tJL3o37c2krEnRdgWAmjWhCq+Pt1QD1u5fy9bDW3lp0Ush7XLzczmZd5K3VrwV0sZt84cdwefUXLuZ62YGDePPzc+ldZ3WDOkwhKlLp5JfUDz5vRvheEnbS2hXr11QsczNzyUhNoHregSvFOL6lBCbgCfNQ25+Lm+uCD7fV0XYAHyE0alaPltERDIsud9n26HKk8DQ0npr2tRBqtojwPY+kA649XreAs5x3kdU6K6640nzsCh7Ect2LwtvXM7YnpulPDmdf5p/fvtPjuYcDWrjPtRfXfpqyCCKnHwjJGUxB5aTl0Pbum3JK8jj9WWvB23L7W1tP7I94CJs935JcUlM6DWBWZtmsfXw1sBtxSRQN6kuV3e5OmClEPfzJcQm0Ltpb3o27lmp5udLgzPPVmyL9PpIhiX7+Gx9RbiV8i1yuhO42Hl/CWYBH5hCdxOcqMn+wGFV/dlVFLuh5w1mzVslGFOvWRPy8mzZG0vJ+e/i//LcgudC2izcuZD/99X/CzkH5j7Udx/fzWfrPwtq54rBN1u/Yf2BwDEJbk+qd9PeTFs+jZOnTwZtK61pGmc3OzvoKIo7lHhl5yupm1Q3oJ3rU2JsIhNSJ6AoU7OmBrRLjEsEzI/bQJVCfHtuIsKtfW+lY4OOlbJcVjgkU16STOkZ5FyKZMpNkilhk/5FMiz5mM/2D6APMLokzpaQXwOPiUgW8BAmMhLgY8wY7HpMppTbytGHSkvD5IYM7TSUV5e+GvU1bylO5jk7NGlx2XRwE6kvpLLxYOhp+VeWvMIfv/gjx3KDd/1d4Qo1B+Y+vONj4kMO1+fm5zKgzQAEYUrWlJBt3XL2LRzOOcz7a94PaucO/y3ZtYSsXVlBbZLikri+x/W8u+pdDp86HPB+CbEJtK3XlgFtBgRc8+a2BXBZu8toVrNZsc/qFUpHBG/rdxtTR0z1XlfFeBa4VzJllWTKW5Ipz0mm/Fcy5Rvge8zQ5NvhGolE3H7lZitR5TJVbgHK7eeAqn6rqmeraqqqnquqPznHVVVvdwrd9VTVheXlQ2XHk+oxv1Q3BP+lWhG44maHJi0uK/euZOnupbz0U/g5sOOnjzNj1YyQNm6bP2X/FNLm6i5X88GaDzhwMvDalNz8XNrVbcdl7S9jStYUCrQgaFuD2w+mVe1WIQNBEmITuL7H9QFHUVS1WG/rZN5J3lr5VrF2AK8Apaems+7AOuZtnxfwfgCxMbGM7zWemWtnsvvY7qBtVWU0Q5doho7GxHk8C3yDGbm7WTM0VTP0Kc3QsGXXIhG3QAoZVjUt5ccVHa+gUXKjqA9N1qxpXm3P7edBfkE+Ly58MehwHRQ+ZKcsnRIwiMLfLtKsG6GCLcD0tnLzc5m2fFpQu4TYBNJT09lyeAtfb/46aFtJcUmkp6bz+YbP2XGk+LS+21aD5AZc2fnKYqMo+ZqPol6h6de8H10adin2/9XtmcbFmFmea7pdQ0p8SpHPqqrk5OcUEa30tHTyNb/IfF91EjcXzdBjmqFzNEPf0Ax9TzM0dEisH6GqAnQRYRRQR4SRPpsHSDpDvy1nQHxsPDf0vIEP1nzA/hPRSxFie27Vh+krpgcdrnNZlL2IW2feGjLzvPuQ3Xl0J19t+iqs3ezNs9lyaEtIm26NuvH6stcDJg52bc5pcQ6pTVKDiqA7B3Z1l6upnVg75BxYQmwCE1InUKAFvLq0eGC4b0/Kk2oyB326/tOA7QCICJ5UD99u/bbIfJ/bjogAJoz/mm7X8OaKN70/INwgGV/R6taoG/2a9ysilu66ulg583I31YVQPbfOwDCgLnClz9YHMy9miSJuuG+wX6oVge25VR+eW/Acv/3ktyF7ZSfzzLlIeluxEhvW7vxW5wMEFVVXzG7pcwsHTx3ko7UfBb2f2ytbsHMBK/euDGiXEJtAcnwy13W/jndWvlNsvs+3rY4NOnJBqwvCzoEN6TCERsmNiohloF7UuF7jzJq3JUUFKTE2sUjb6anpHMk5wnur3yvSlr+dJ61opRB/obSErgrwvio3AsNUudFn+50q31egj5YApDVNI7VJalSHJm1ASdXg4MmDZB8NHVick59T5KEa0MYRm1Dll9yhtqGdhvLu6neDJg52U0oNbDMwaOJg98F+RccraF6redje1theY4mLiQtYPcNXkNJT0zl++jhvr3w7aFtgBGTVvlUs2Lmg2Gd0beJj4xnXaxwfrvnQO4oSSNxa1G7BZe0uY8rSwvk+X59cLm5zMa3rtPZ+1mDDjWN6jCEhNsH7A8LtmVZHJFOSS3NdqGHJPzpvbxDhaf+tVF5ayhT3l2q0Slz4Dkvu2RMVF37WqCpvLn8zbLj3nZ/dyYWvXBgwiMLFNztHOBsgaPkl7xxYn1s4lXeK6SumB7TLycshIcZEHG44uIHvtn0XtK3k+GTG9xrPJ+s+KRJE4bYTFxNHjMTQOKUxl3e4vFji4PyCfPI13xvgcX6r8+lQv0PQObD42HgAru12LUlxScV6oP6i5EnzmMxBy98o4re/2HjSPGw9vJU5m+cEbAcgRmKYkDqBLzZ8wfYj24O2Vb9GfW+lkNz83IBtVXUkU86XTFkJrHb2UyVTQq8f8SHUsOQq53Uh8FOAzRJlvL9Uo9R7c4cln3kGmjSBxX5ZgQqCP0stZcCSXUsY884YXlwYuvry/hP72Xhwo/ehGgj3Ifrlxi8DBlH42nSs3zFo+SXX5vxW59O9UfeQEYeJcYmM7DqyWBCFf1vukGO+5vPasteK2fgLTfaxbL7c+KX32OmC0952oHAObM7mOWw6uKlIW/Ex8cSIeSzWSarDyK4jmbZ8GqfyTDrd/IJ8CrSgyDBhrya9SGuaVqQX5Xs/l+Gdh1MnsY7XLpggpaemoyivLn21yOJsfzypHvad2Mcn6z4pEp1ZjXgCGAzsB9AMzQJ+EenFoYYlP3ReJwfaztBpSxnQOKUxV3S8Imx2hvLC7bl98415/dCppXj0KPTvD23awK7AFTosERAqvyGYBL4Qvn5XpJGJ/Zr3o0ALmLq0+EJi33Z+3efXQcsv+a63Sk9N5/tt37N2/9qAdgmxCdRMqMm13a9l+orpnDh9ImBbCbEJdG3UlXNbnMsrS14p8r34C8SwTsNoUKNBkc8aqPczPnV8sTVvgcTGk+oxhULXfBi0Ldfup+yfWL5nedB5shrxNRjTYwzvrHqHozlHi0VBurSv356LzrqISUsmBRVKgMEdBtMkpQmTsiaRW1D9em4AmqHb/A4FD8H1I9Sw5IcifBBsK7W3ljIlPTWd7GPZfLHhiwq/t9tzc/nKCY57/XX44QfYtg3uv7/C3aoWHDp1iKaPNY0o64b7UA1n5z5Ug9l0bdTV+1ANNQd2ZecrzUM1TG8rUBCFr53vHNjR3KO8u6po4mD/Xkt6anqxxMH+guSuP/NNHBxIIM6qcxaXtL2EyVmTQ86BXdL2ElrUahF2DuyGnjd45/tCheWnp6Zz4vQJ3l75dsihxPTUdNbsX8M3W80vx0C9sriYOMb1MpVCdhzZUR3FbZtkyvmASqbES6bcTeGIYlhCDUs+StHsJP6bpRLg/aUahWTKSUngG5y1ebN5fest6NYNrr0WZsyA/Ih/a1lcso9ms+f4Hv7947+D2viGxgebAwPzQG5Qo4H3oRqsLXcObM3+Nfy448eA7QCkxKcwtufYgOWXcvJyvCHpzWo1Y0iHIcXWvKkqpwtOex/Gv2j9C9rUbRM064Zr5wZR+EccBprbysnP8c73hZoD23Rok7dQaKC2YmNimZA6gc/Wf0b20eygbTVKacTQjkOZunSqtwcaSGz6t+xPpwadmJw1OeRQ4rXdr6VGXA1vQuhQIphXkMesTbOqo7jdCtyOqf6yA0hz9iMi1LDk1+4GzAMOAgeAec4xSyXA/aX6/ur3K7zEhQjExha+37nTCNnSpXDeeXDVVbB7NyzzyfGsCh99BBvLs2hSJUdVefjbh9l8aHNQG/chGkn15dZ1WoesvpyTn8O5Lc+lY/2OQX8EuQ/2a7pdQ424GuHnwIKUX/IPSU9PTWf7ke3M3jzba+M/BxYjMaSnpvPVxq/YdnhbkbZiJZbYGPNHVq9GPa7ucrU3iAIgt6B4OH2fZn3o0bhHsd6Wv92ILiOomVAzojkwd74vVK/Mk2YyB3249sOgNu5839dbvmbNvjVBBal2Ym1GdRvF/O3zg7YF0LNJT/o061Nk0Xh1QTN0n2boWM3QJpqhjTVDx2mGRrywN5LEyUMxpQeeBp4B1otweeldtpQ17i/VaJS4cEevunY1SZRXrIC9e6F7d+jXz5xb4hM1/te/wpVXmvPr1hVv7+fAvhP7uOere3jg6weC2rgP0UiqL9/c52Z2H98dtPqyu57Kk+Zh7pa5AfM+ur0I96E6bUVhEIWL7zBhrya96NOsT8Delm9v5KrOV5nEwWHmwLyJg33m+wKJTXpqepHEwYFsRIT01HTmb5/Pmn1rggpSSkIKo7uN5q2Vb3E893hQcevcsLMpFLpkUsgAjys6XkHD5Ia8suSVoDZQON+37sC6kIKUnprufR/KzpPqCWtTlZBM+bdkytPBtkjbiTRx8kBVBqhyMTAQE8ViqSS4v1SjETXpilvnzub1C2fqr1s36NABkpMLxe3gQXjySejd2/T4MjIq3N0KYc/xPSFTT7kPyHDVl8E8sCKpvtwwuWHIFFWJcYmM71U8iMLXxndu69CpQ3yw5oNiNlA4/5Oeml6s/JK/QCTFJTGm+xhmrJrhLRQaaA6sXb12/KL1L4qseQskNr9s/0ua1mzq/VsPJkhje44lVmK9w3/+93PxpHk4lnuMGatmBA3wACMgK/auYN62eUHbSohN4IYeN3h72sHaalm7JYPaDQppAzCwzUBa1W4V1u76nibHZXURN4JH6JcoUj8ScTuqim+NiI1A8AJLlgrH95fq6n2ro+JDly7m9VszfUG7dkbAevYsHJb88ks4ccIsHZgwAd5/3+xXJwq0gM7PdOaBueF7Ze5DNZTNqK6jWL5nOYuyFwW1qZlQkxt63MD7a94PmDjYFYBWdVpxabtLiwRRQGGiX/fh6D5UA63vgsIHbaDyS8HmwHwLhQbNupHqYe3+td6huEALk+Ni4kzi4HUz2XN8T9DFy81qNWNwh8FMyZoScg7swrMupF29dmHnwNxCoRMXTTS+B7HzpHm870P2thy7UDbufF84u4bJDbnrvLsY0n5IUJuqhGbo5FBbpO1EIm4LRfhYBI8I6cCHwAI312SpP4GlTPH+Ug0RWFAeuB2K9u3N6xontWnDhua1TRsTNQlmyUByshmuHDnSCNvs2UWaY9MmeOop2BI43WClJycvh0OnDvHyopeD9t6KLIYO0tt2e3fje40nMTYxoJ2v2HirLy8vPjSdm2+KXYIRkM2HNvPNlm+85/0T/XqDKDZ8xs6jO4u045u/sGFyQ4Z1GlYkcXCg3s85Lc6hS8MuYSMOr+l2DcnxyRHNgbmFQkNFHHpSPew4usO7ZCHYHFh6ajqzNs1i/YH1Qduqm1SXEV1HMHfL3KBtgckc1KtJLyC4AIKpYlAroRY14moEtQG445w7+J+z/4fUJqkh7f4x6B/cc9E9IW2qGpIpH0qmfOC3TZVM+b1kStj8xpGIWxKwG1NAdACwF6iByTM57Ax8t5QhbmRasJL25YUrbo0bm9cdOyAmBurUMfstW8L27cbuhx/gnHMgPt4EnMTEmGMuhw6Z43feaV4PVmx8TETc9dldxQpF+uI+uHcc3RE0cbBrk9oklVmbZgVMHOzaNE5p7A2i8E8c7Dv/4z5Ug6Woch/GI7qOoFZCrSJ2gYYJ3cTBvvN9gfIXpqemFym/FGoOzE0cHEzcaiXWYlTXUd7EwcHWbnVv3J2+zft6e1vBhObKzldSL6ke/1n8HyC42LjzfSv3rjzjOTAR4ca0GwETFBKM5Phk3hn9Dn++6M9BbQCa1mzKC8NeoEZ8aBGspmwEjmHqd74EHMGMGnZy9kMSVtz88kr6bzedofOWMiQ9NT3kQ7U8ccXtyBGoV88IFxhxO3kSDhyADRsK5+ZSUkxQyUKfqnzPPWeiK//9b7P4+8knK/YzRMILP73AX2f/Nej5iHpljpjc3OfmYkEU/u24vbIDJw8wc13o6svpqen8uONHVu1dVczOfRgnxyczuvto3lrxljdxcCCx6dSgE+e3Or/IfF8gIfEvvxRMbMb3Gk+MxDAla0rYOTC3UGioYUJPqikUumzPsqA2SXFJjOkxhu1Htge9H0Cbum0Y0GZASBswhUKb12oe1u6Oc+7g2xu/5aw6ZwW1Abis/WX0adYnpM3PnPM1Q2/QDP3Q2cYB/TRDb8ck8A9JJNGSbUV4XIQZZbWIW0SuFZEVIlIgIn39zt0jIutFZI2IDPY5PsQ5tl5E/t+Z3L+64v5SDZWJorxwhyEB6tcvfN+ypXldtQr27zfDlC5nn100kvKdd0xmkzvugEGDYPLkwp5hRbB8z/KgYfcuufm5LNm1JGjiYPfBXTuxNjNWzShWfdnXpnODziGrL4PpbbgPVX+x9A/w8A2i8MV/XsqT5ilSKDTUHNjKvStZuHOh187/oR4fG8/YnmO95ZfcyEx/3MTBk7Mme6MwAwnEgDYDOKvOWUxaMilkr2xMjzHEx8Rz6NShiOa2gt3P97OGs3ELhYazi4uJ44KzLgh6vroT7PkuIm1E5KSILHG2F8I0VVMyxfsLwXnvpo4IWzA7kmHJ94DNwL8pu0Xcy4GRwFzfgyLSDRgDdAeGAM+JSKyIxGIqsl4OdAOud2wtPnhL2q8uXtK+vKlVy8ynQWBxc1N0tW1beK5DB8jONnNv+/bBokVmmQDAddeZebfVfvExkydDo0YwfHjZB6OMmj4Kz3ueoOcLtMC7lixc4uBxPccFTRzsnzNx/YH1fL/t+6A2sTGxjOs5Lmj15fgYk+i3Sc0mXNHxiiJD0/7BIgAXtLqA9vXaF5nbcu/ly+juo0mKS/KKZbDgjfS0dHLzTfmlcFk3th7e6l2yEMguRmKY0GsCX2z8gk0HNwVtq0FyA67qfFXQdlz6Ne9H14Zdw9qN6jaKlPiUgMLsyx3n3MF13a+jR+MeIe1+5gR8vjtsUNU0Z7s1TDt3Ad9KpsyWTJkDfAPcLZmSAuFTQEYibqdUeVqV2X4Lu0uNqq5SDVhVdTgwTVVzVHUTsB44x9nWq+pGVc0Fpjm2Fj/S09JDZmMvL2rWLBS3Bg0Kj7s9ukVOsJ+vuLnvN2+GrCzz3l0bN2CAef3a5y9tyxb49a/NfT74AB58sCw/ARw+dZgvNn4RNnEwEDZx8AVnXUDXhl3DBoIEqr7sbwPBqy/Hx8QXmwPbeXSnN3GwGyziO3TnDmHO3jybzYc2BxW3Okl1GNFlhLdQaLA5MN/yS6HEzS0U+vLilwPez/sZ0tIp0AKydmedccShiHjtaibUDGpXM6EmU0ZM4e7z7w5qAyaMf9o100LOp/3cCfF8L1k7Gfox0BG4E/g90FkzdKZm6HHN0LCTFnER3OMpETKAzwHvjLYqxWOTz5wWwHyf/e3OMYBtfsfPDdaIiNwC3ALQokUL5syZU7ZeVmJUldbJrXlq7lN0PNqx3O83daqZJ/vxR/jb3yA31/Tc3K88Lw8efRRq1DBDjocPF56rU8ecW7cOTp0y70UKzz/1lEnx5e7v3An/+IdZXrB9u5nfmz27aAqwYDyx9glqxdfi5rY3B7U5kXOCAi3g/vfu5/qzri92/nieWZOWVieNJYeX8Mi7j3BBw6LDTxuPmQXS61av46JaFzFx00Re/fhVWia39Nr8tM8s1Vm6ZCk5tXK4sP6FvLH0DUaljCIp1gSBLd9hckUumL+AOvEmOqdLrS48+92z9M7pDcD6zeuJJbbI33etglrUjqvNI58/QuL2RE7lmyHAbZu3Maeg0K7jKfO3cf/79/OLhibR+ro165hzsNAGoLf05o1Tb/Dwuw+zfe92CnILAv5/urDmhTy74VnqxNehY82OQf/PXVT/ImZmm7nDlctWErs1cOXoHrV7sPzIck4cORG0raSCJBolNiLnQE7I/+NpBWk81OMhtmZtZStbg9rVpz555DFnY/C2fkbEiYjPjDgTVXViGbTbVkQWY4JD/qqq34SxPxtog9GqVMkUNENDl4x3UdWQG+g/QLeDfg0629lmhb+OLzHdU/9tuI/NHKCvz/4zwDif/f8A1zjbyz7HxwPPhPNBVUlOTtafG//89p/KfejafWvL/V4FBWZTVe3cWRVUf/e7wvPHjpljjRqZ1yNHCs9lZ5tjTz+teuONqk2aFG37sstU+/Yt3O/ZU/XSS837zz83177zTtFrDh5UHTxYtV071ZkzC493f7a7pvw9RY/mHA36WVL+nqLch3Z5posWuB/Kh73H9yr3oY9//7g2/ldjHfnmyGI2C3YsUO5DP1j9gW4/vF1jMmP0L1/9pYjN2yveVu5Dl+5aqqqqszbOUu5DX1v6mtfm0e8eVe5DD5867D327I/PKvehi7MXq6rqHTPv0HoP1yvmwx0z79DEBxL14MmDevDkQeU+9Il5TxSzGzhpoLZ7qp0u2rlIuQ+dsXJGMZu8/Dxt8VgLHfb6MB355kjt8VyPgN/d7mO7Ne7+OOU+dOhrQwPaqKp+t/U75T6U+9Aftv8Q1O6ln15S7kMHTx0c1Ma977GcYyFtLCUHOK5l/3xPBBo478/GdFhqB73HfUzlPr7nPp7jPv7tbE+H8st3i2RY8lqgnSoXqzLQ2S6JQDQHqWqPANv7IS7bAbTy2W/pHAt23BIAbzb2CshYIlLYcwo0LJmcbM7v3Wv23TI5YGrAxcaayMjt26F166Jtd+1q5txUTbTlsmWFw5UDBpi2Zs0qes3dd5ssKSdPwpgxplcJZggvUPVlX3Lzc2lWsxmr960OnTg4IYVxPU31Zf/Ewb5DfC1qt+CX7X/JlKwpRRZN+w8DeqsvB0hR5TsH5F99OVg0YXpaujdxcLjIxI0HNzJrk/kSA7XlBlF8su4Tth7eGnQI0C2/FKwdl/NankfH+h2D+uTiFgoNl3WjcUpjUhJSQtpYyp7SPN/VTDftd97/hEnr2CnEbfoCF2iG3qYZ+ltn+12kPkYibsuBupE2eIZ8AIwRkUQRaYsZb/0RWAB0FJG2IpKACTqxZXeC0LxW84AP1fImUECJSGFpnFq1CpcIuOfq1TPClZ0NzZoVba9LF1Ple8eOwvVwF15oXuPj4fzzCwNVwLQzdSr85jdG9I4eheefN+fch7yJTDRDnUUTOptM9dd1v44acTXCzpWFShwMRVNUbTuyjdmbCler++cndBMHf7nxS2/YujdYxKkKDQGqLweZAzu72dl0b9TdG3Hoey9fRnUdRc2EmuEzzzvzfQt3LjzjiEN3vg8IuYC5TlIdXhz2Irf3izgJvKWSIyKNnOBARKQd5vkeKoX6cqBpae8XibjVBVaL8FkZLgUYISLbgfOAmSLyGYCqrgCmAyuBT4HbVTVfVfOAO4DPMPV8pju2liB4Uj3FHqrlTSBxAyNqvq++1KtnFmsHEreOzpThhg2FmU96+ASpnX++Eajdh47Q6/lePDZ9Hrm5cNNNRhgHDYJp04xtTn4OsRLLnM1zuOVPmxg4ENLS4E0noYcrAo1SGjGy60jeWP5G8cTBeUUTB/du2jtoeL77gPdWX/ZZNO0vgOCTODhrqtcmLibOWxXaxb/6cqisG/O2z/PmfQxkl5KQwjXdrmHN/jVBbQC6NOzCuS3ODWkDMLTTUBqnNKZeUr2gNgB/OO8PvDriVTo1CPWj3XwngzsMDmljqXwEe75jqmgvFZElwNvArapaPF9cIQ2BlZIpn/lmKYnUj0jELQMYATxEGS0FUNV3VbWlqiaqahNVHexz7u+q2l5VO6vqJz7HP1bVTs65v5/J/X8ODO9S/KFa3gQaloRCUasdIMCsfn0jbPv3Fxc3d3/XLli/3gSg+LbdrZsZsvx22TaW7VnG6xufpEEDk5gZYNgwI4qbNhmxGNppKILwnwVTGD0a+vQxa+qOHy8enn/o1CFeW/Ahp30CIv2Fy5Nmqi/7Jw72tfFWX175jjdxcKDelLf6srNoOlgS3yLVl0NEJrpD0+F6ZW5vK5SN+1nD2STEJvDjzT/y90tC//dMjk9mbK+xRaI8LdWHYM93VX1HVburWQbQR1U/DNPUfcDVlFJ7IslQ8rUWXQKQD4yO9AaW6OBmZ/B9qJY3NZxRJv+em++wpD/165sF3lBc3Jo6AxKuuHXoUDQysqtZvsTayU2N9QAAIABJREFUDUYstiS9T9+LDnptLrvMvM6dawSlQ70OnJV/KZo6maeeLuDRR836unffLTrHdUnbS6hV0JKbn5lEt26w1Qmw8xel63tcb6ov+yUO9rUBMzR5Mu+kd74vVPHMtfvX8sOOH4IKV6TVl910bG5m/2Drty5qfRFt67YN6I8v13U3iYPDzYG1rtuaejVC99wslkjQDP3ad6OE2hNJzw0ReovwLxE2Aw9QglLflujhZmMPFURRlpSm51avXmGwib+41asHcXEmKGTDhsLkzC4dO5o5vPWbjFhobA5xaYWJgzt3Nj4tXlyYXeP0gnSot4m1p77lF78wGVOmTy8qON99G8vRbydAx0/ZeTSb3//etOcf5FEvsREXNBzG1KxXvYu7AwmXW3053KJp30KhwTJ9QGHi4Hnb54XNhZiv+QHv5RIjMd7M80lxwXPR1qtRj8cHP86vev8qqI3FUtZIpvSWTPmXZMpmSqg9QcVNhE4iZIiwGpOdZCsgTrTkM2fqtKX8ObfFuUUequVNsDm3cD03F//zMTEmonLB4Y/YdPHFNGleNONOUhI0bw7Ze5zllwWxrE6a5D0fGwu9esHiJSZLx+lTCez8agSJ1GLSkkmImN7d3LlwMrdQcJ59FuptmwBSwAW/eY333jOVw31FKT8fhgyBr5/ysOfEbiZ/W5g42LVxcasvf7P1GzYc2FAss4iLt1Do8mkczjkcVJDc6ssQvEcGhYVC/f3x5w/9/8Azlz/jzeQRjNv63cbIrrYQiKV8kUzpJJmSIZlSVHsydKBmaMTaE6rnthq4BBimyoWq/BvTLbRUEfwfquVN7dqQkFC8hxZuzs0lMcBzumlT2HDqR/JbzWVv3eLZ+Js2hb0HHNFbO4wNp34oUtOuVy9YvtJMnO3bkwCnU7ikSWH15QEDzMLyZSud3l9+Ah9+CGMGdea8luexpe4kQJk2rahw/ec/8NVXcMeQy+FEQ/42YxJQPMP+ggXg8UDS2sJCobn5ucRKLLExxRcwe1JN4uCP1318xpGJbjq2cHZ1kupw+zm32zkwS2WhUHsy9ELN0FJpTyhxGwlkA7NFeEmESwH711/FcEvaB6q+XNbcfjt89FHRcH8o7LnFBkhGUc9neiaQuDVpAvsOGlFZEV88PL9pUzhw2BG3xTcVq2nXpk3h+b3Z5gF/2wXp3kKhZ59t7JavNqK0fUsCJ0/C5Zc7c2CHVtB5wCK+/LJoCP+LL5qAlKefSODcGmPZWesDlq47UGTocutWGDjQ5MP835tb0r3GIG/iYF+xyc01hVy3b4eBbU2h0CM5R0IKUqTVl2/rdxudG3SmQ/0OIe0slkpEofZkykuSKaXSnqDipsp7qowBugCzMfm9GovwvAi/LKXTlgrGLWnvX325PGjevDCIw5fhThbQdu2Kn3OHMsEMM/pTqxYcPWEEY+VpU33ZF19xq5F7FkM6DGHK0inexMGtWwOxjrjtSqB5cxjaw1RfnpQ1ifbtTW/TDUrZuNaIRf/+JnFwYmwiif0nMX8+nMgxNruzE1i0CMaPNwEu917pgbhcMt6aVqR39+CDkJ9v5gvPOQd2fuxhy+EtfLHxC+8ygLw8GDrUfG+dO8Oin2KCVl/OzjaRnWAKhT4w8AHG9BhT/EvzoUfjHqy+YzUtarcIaWexVBY0Q9/TDC2uPZnyvGRKxNoTSbTkcVVeV+VKTGaQxcCfSum3JQp40sxD9evNZ5TvutQMHWqSI98dICdtjRrAFXfARX8P2HNLSsKIU0EM+eQVSRwMRtwOHzOCUq+2qX228+hOb027s87CK26HDyTSurVP4uBNs9lxbAudO8OGzY64rUukbVtTecCtvrwx5XVO5uZ4A1eWLTaO/tL5bzb07DQSD/Vi9sHCRdOal8D06TB6tBH1P/4RDnx/NTViarF091KvcL36qum1/fnPZoj217+GCb3MImdfcXvwQfPjoUULM9QJ8KcL/+QVQpc9ewoDdCyWqoyTIPl1zdBSaU9E0ZLemykHVZmoyqUl9NMSRdyS9hWRjisYrVsH7pklJQHtvoALHqEg9mSx8zVqYMTpeBO61u5b7DM0bQoaYwSlbq0EruxUtKadb8/t8IEEbwke76LppVPp0gW2bDc22dsT6NKlsH1Pqodj+Qeg00y27jA2WYsSqFuXInZ9YjwcrrmAxdmmztsP8xI4fNiIG5hSPjUTk2lz/DqgULiefRZSU414ZWSY+na7V3XkkraX0LSmWQuxaBHce68ZKq1Tx/QY8wPMQLz+uikx1Ly5eR+MdeuMoOblBbexWCoTmqEHNUMnaoZGrD0lEjdL1SQ5Ppnrul/H2yvf9lZfrix4xSvpCLN2vlfsvOm55UB+Atd29BQrFPr/2zvz+CiqrO//TnYIJOxhJ7IJRCAgKoqoqOAyiKigKEqizouOMvq8Ko7K+IyzqI/jjDOPOjPvMDoDCrjgKOBHEGXAFZFFEpaEJYQl7AYIOwlJzvvHqdtV3V1VqZBOdyd9v59PPp2uul11b3VSvzrnnntOmzbGfoi4JSck+9W069ABPnE7UpqEToZ3LrNFJkZkjsDM/Jno2Il983r7dif5uU+v7X4tOjbrCGTPwJ790qZwQxKGDPGfW7zrgolAVQI+3PQh4igO338XDyIzXVhSkvx+anmuvI9Pwq5dUon8rrvEvXnXXTLejz4C5k+Yj3duk9Ref/6zuGffeQf44x9lYfrHActfDxwAHnxQFrBfeCHwwAPixgxk4UJZHzhyJDB+PFDt4KlesgR49FEz7ZlG09DQ4hYjqOrL/y74d6S74odP3ADM3RJsWfrckpXJmJgt1ZetASPWz7dKNzOHqJp2CQlAarqIX8Vp03ID4CsUWtFuOc6clTYnjyX51ZyLj4vHPQPvAXotRHGpVF3aXpSE3gGZo4b2bwcU3YCzRr7H774DsrLE0lJceSWw8+vL0D29J5Lik3xJn3/yE3lt2lTaLFok9cVSk1Jx9qzUrhs/Xo41dqysI5w71//8M2ZILs2ZM4G33pKcnP/8p3+bigoRwH79gKlTgXnzgA8/DP5OvvhCXK6vvgpccYV/Dk4rf/2rPFxcc43k9bSDGZg/X6qsOwmpRlMfaHGLES7rchl6tuoZ1nRcXrDOqS3dGVwo1CdeVUno3r61L3GwKhTqJ25pMhc2pOMQ9GvbzzfWZulGNGWVabkBZvXlguQZvmOgKiko8CVnYA4QV4WiZiKqx8uS0DMg+LBPHwB5uQDEKtuwwUwDphg4EAAIj3Sfjt+O+C1WrRKLrK9ledmIEVIJ4cgReb9ihSxVuFES7iMhQVycCxf6i8WcOcCwYdKP3r2lasKsWf7nnzcPKCkBXnpJ6uL16gX86U8I4vHHpZBscbEs35g6NbjNd99JdGzXrsA334ho2jF1qgjyuHGS0NqO48eBMWNEtP/nf+zbAOJOfeYZsWzd2LlTgng0sY0WtxhBBVF8seMLbD+yPdLd8eETp603opqr8fa6t/32+8SvKgkJCWKV/XjqRywqWuT/eQCtWojlpsa6vGQ5th7aitQ0Q7gqk32VwQGxjsb1G4dVp94DUsp8bbpYiysB6Nu2L9qUX4yKpjt9bQLFrVkzoNPJ0Uiuao3k+GTs3o2gNllZ8pq8d4Scd5W4EK3uTSWIeYbndYVRuleV+gGA4cOBsjIzmXRpKbBunSmAgOTV3LRJKioo5s2TQJlRo2RZxqRJwPLl/u7LtWtlju+xx0TgHn4Y+OwzEQwrL74oVtvXXwNPPSWWZEGBf5sNG4BXXpFE1j//OTB9ur2b84kngE8+kev19NPAp58Gt9m3T9y6L74I3HqrWKd2/Otf0m8n4QakMO7EiUC7djLPKeXFglm7VsomPfss/HKMWmEGZs8GXn7Z2XoFJNBnzhx5uHBj7drg6xjI8eNSud5u3lVRUWGWe4pVtLjFEJMGTgKBggQkkvjE68d+uLzr5UZJGvNu4xOvahGu63pI4mAVWOL7PIA2LczoQmtNu6bNTassNaD0V252Lk5VHQey3ve1CcywAgCDKdd8Y2PdAUD3bklov2sKujfrDyA4XViXLmKpbdwoN8XCQllkbkWsO1Pc8vIkSMSa0mzoUHlVQvHNN/J65ZVmmxEj5HWZURSCGVi8WFygar3hmDHyahWTBQtk/m+CscLgnnvks9Y5vrIycZ3ee6/U1Hv4YRHo98zMZwCAf/xD5hpffhl4/nkZ++sB+SX27hX36UMPiVD26AH85jcI4oUXRDzWrpVqEE8+KTX7rOzZI4mwhw8HrrtOrMYtW4KP9atfidBkZsq5PrDJTrdvnzwEzJsnwT52kb6AfP7uu6U/o0YB5eXBbfbulTWREyfKA86mTcFtAKlkP3iwtPnrX+3bbN0qwp2dLRlyKiqC2zDLA8XQoeKejlW0uMUQXdO7YsR5IzAjb0ZY67wBwLNLn8U3u74J2p6SwkCCBIzkDMwJKhSqxIsMcUuMT8TE/hN9hUKbNIF8HkDrlgm+z1lr2jVpbpSuqUryW1cHAFd0uwJdm2cCPRf72rS0yft7ResJQGUSCHEAx6Ndu+A27dsDKSuew7TOsgwh0HIjkpv3zp1yoz5xAn7ze4AsWm/b1nx6z883BU/Rp4/Mz+Xny/u1a0Vc1IJ0QD6TkiL7AKmMcPgwcOmlZpsLLhC340pLXdZly8R6VGLao4eIgLUo7OLFEmk5dqy8b9dOBGW+pUwls1hzo0fLEofmzWXe8OOP/aM033tP3j/6qAjh5Mni8ty61WxTXi5ziXfdJTf13/xGrJL5AWUx33hDBO+f/5Q5yPh4mTe0UlYGvPaaiPby5fJw8etfB1tvf/yjuIbXrhV36l/+Im5aKyUlIroTJsg41qyR8wbyzDNiXc+eLWOcMiW4TV6eiOjtt0uwz+OPmwm7rUyeLFbk1KkS9BP4sACIWM+eDfz0p2YChVhEi1uMkTswF9vLttsKTX3ywjcvYNrSaUHbE5OrAGKgKhnj+433JQ5WKMtNiRsAv0KhZsBJElq29E9ioGraHWtnmCY2llscxeHu/pOkDwBQneQXBKI4r31LYPPNiK+SA9gJYEaG3HSVCy8zM7hNhw7yJL/d8AwHihsgyxdKSmRObcsW053p63OciI6aV9q0SY5jXWoRHy9zeRuNqodqbdyQIf7HGTLE3FdVJdag1QIExCX69demACxfLhbbJZeYbUaMENfo8ePyfutWsX6uv95sc/31Mn9oFdNPP5V+qgcBtXTCak0uWSLHVdbkVVfJtQ4MhpkzR/b16CH7x46VG711bvL990UAH31U5i8ffliu0Zo1ZpvKSnFv3nab9O2ZZ+QYs2f7n2/mTLGcXnxRhPuii0QErRw6JFGukyeLOE+dKmnb1PeieO01eWCZPl0s3vJyebWyZo0E+/zyl8Dvfw9cfbW4XgMDdV5+WR6AnnoKMY0Wtxjj1r63ollSM7+Iw/qmqroK1VyNr3Z+heIj/o+/8UmmyzA9JR239r0V725811coNCUFQEK5n7gNyBiAwR0GY2b+TEvASXJQ4mVV025ni5m+cwSKGwDcO9hcCJ3WNCkofRggN0ss+l90Wf4R0tPlxmjXpqxMxAsIro4AyBq0vXtlUTtgL4Bdu8pT+8GD8pTetWtwmx49pAwQIOJmDUpRZGWZN9H160XwrMVeAbH21q0TYSsulvkoO1dpaan0B5DlC4MG+adTGzpUxE8J5Vdfyevw4WYb5SpdvlxeKyulnVoMr65Ht27m5wGxJpOT5WYOyHlvvFFEQgluSYk8CChXKyAu2AMHTBcvIFZnly7i/gNEwIhku+Lbb8XKVULbubPM9/07INB47lzZnpkpx7jzTrnO1mCWefNEAO+9V97n5prbFZWVcmwVEdutm1yTwLWKH3wgf3fqWD/9qaRs+8bynLp9u3wH999vn+4uloiIuBHReCLaSETVRDTEsn0kEa0hovXG69WWfRca24uI6FXSWV7PidSkVIzvNx7vF7yPkxUnw3JOlbUDQFCOy7hEU9wA+AqFqjpkyjKLY/9UVDkDc7Bm3xpsP7neF3ASKG6qpt2ZuEPGOZKD3JIA0LN1D8SVyF24RZp9rsaMDAAnOmD3V9fYzskBZv25TZvgKIAdO5olfAB3cVPBB9blC74+9xQxqqyUm/r55we36ddPbn4nT8pNr0sXcYtZ6dVLBHT3btMV2q+ffxsliBs3igiuXevvAgXEagEkGAUQQUlLg9+SiTZtZCxKbDZvFjG1WpOACOK335rvV64UMbVmsBk6VARIuQq//lper7jCbKNSwal91dXiXh050qwL2Lq1iLmamwSkTVycv+iOHCkPAWVG3NGhQ/L+hhvMNjfdJK+ffWZuW7ZM/nays+V9RoZcu0WLzDarVvlHxALye3Gx+RAESNDN8OFACyn0gNGjpZ+ff262WbhQXm+5BTFPpCy3DZDkmF8FbC8FcBMz9weQA8Aa+fA3AP8HQC/j53pozonc7Fxf4uBwYBW3wByXlOAvblefdzU6p3X2uSaVZRZX7Z+b667+dyExLhFzt870iZtd1QFVQVqdw85yA4BmP/wS2DQGrZo3sd2vjn32bHBJH0VGhrwWFDi36dBBbrKFhfYVFAARt5MnzfVlduLWvbu4rjZuFIHo1i24jfqcshTtXKAqMGb7dlPcAq1A5RbdsEGOdfp0sAC2aiXiZbUm+/TxLy4LiBWo5gqVyAXOKQ4YIC7NsjIR0zVr/F2ggOTqBMygmlWr5G/FeqwOHeSBQ51n5045ZuCxLr9cjqOswNWrZXzWh6XLLpP9KnpVvQ4bZrbp0UOugbJeAeDLL8XNa70OV18tbVQE5pdfmtsV1xh5OJToHjsmfw/K+gWkf4MGmeKt+tWhg30e11gjIuLGzIXMvNlm+1pmNpw62AigCRElE1EHAGnMvIIllO4tSPlxzTlweVdJHByudFxK3C7tfCl2lO3A1zvN/8az1f7iFh8Xj0kDJmHxtsXYd3yfo+XWpmkbjO49Gu8WzAIST9paboDUtMuIP993DjtrCgBaHBoFvDsfrVra/0tYrQYn4VJBJps3O7fp2FFeCwrEurPzP6i1eGpuyk7c1LmUINkFuKhzKXGzsxKV4CkroV274Lp67dvLHNuOHe7uVKurdPNm//Rkiv79RfiqquRmnZgY3E6937RJrNdTp4LdqVlZYrWoKu6bNon1GuiKy842g2rWrZPXQLdr//4S3LNrlwjY6tXB1qQSUzU3t3KlOWepIBILVolbaalYxOqzikGDxFWpoibz8+V6Wt3YffsGBw0Bwf0aNkz6oubdVqwQq1b7taJ7zu02AD8wczmATgB2W/btNrbZQkSTiWg1Ea2u1An0goijOEwaMAlLty/FzrKdNX+gjqhSMXdk3YHmSc39FpL7rLoq/4CRaq7GrHWzTMuNg92FOQNzcODkAYl0dBA3IsLINvcDVQlAuU0DA+WutAsUAfzFzW4uzXoMwFnc1PF37jTdS4Go7YWF4tps2za4jeqDF3Hbvl0Ezs5y69JFBKG4WKylwGrogNwovcwV9uwp4nbypNzU7Vyl3bqJK3X/fjlWt24icFaU5VhYaLpvA5dVJCaKhav2Fxbazzv27y/XqLpaxI3IXigBsYIPHZK5xUABbN5croGK4tyyRa5BoCdg0CDpy9mzpvAGWrnKRaksynXrgs8XFyf92rBB3it3b6A7+IILxJLetUseAoqKzPnEWKfexI2IlhDRBpufmz18NgvASwAeOJdzM/N0Zh7CzEMSnB7VYxxr4uD6RglYi5QWuD3rdszdONeX41IV97SKW+/WvXFp50sxI38GkpPZUdxu7HUj2jZtCzQ95ChuAHB/v8eAv60DzjgoFwz3J5zFzRqJ6KWNk7gpATx40FnclKuypETa2AW4KHFTN1DlErWihErdRO2EKzFRhPHAAREcNW8YSKC4OQW57Nrl3kYtkC8pEQG0s0rPO08Et6jIWdzUtm3b5Ka+c6e9pZiZKUJz4IAIeMeOweHxStwKCsy+27n1zj/fXDen1pvZ9amqSsbnJG69e8t1LygQC27zZhHhQKzitnWr/E0FPsSoB4jNm835x8AlKLFKvYkbM1/LzBfY/Mx3+xwRdQbwEYBJzKzijvZASh4oOhvbNOfIeS3Pw1WZVwUtmq4PrDXOVI5LNd/ns9wq/efUcrNzUfBjAQqPrQHiKxCPYHFTa97U5+1K5gBAh4x4oNTmsd5CTZabNRDDSUS9uC6t1l1N4rZ3b7BloPBiuaWni2irNk5ja9lSgjO8iNuuXcZ6PpsKD+3bi1tPnc9OcK3iVlKCoGwwgAibEtziYhGCTjZ+GiVuuw2fjlNwDiD93r3b/nwtW8o1LylxX6LRu7eIG7OzuClRLC4WwWnSJPic8fEi6iUlsvC8qspevPv0kWtw/LizW9nqwlUuYS1uQlS5JYmoBYBPADzFzL54KWbeB+AYEQ01oiQnAXAVSU3NqMTBy0uW1+t5fNWpE5IxrMsw9GjZwxcwovalpviL1+1ZtyMlIQXzts8A4iuC9itysqX2GaqSHOcZ7G6ygdQkbnFx5nxOE/uYE0/WnVXc7NbTAaa4lZc7L8K1Wm7x8fZiSiRjV3M7Tn1q1cq7uB0+7OyWDbQm7QRX3eh37pQbu53YANKP/ftFADp3tg9rz8wUN6ISJLu+B4qbnaUIyPj27XN3u3bvLufbuVMCPOxExBqgs2ePnM/O8lYRsUqY7cRbbVP9sutT27bysLVtm7uVG4tEainALUS0G8ClAD4hIrXKZAqAngD+m4jyjB/1L/IQgDcAFAHYBmBR4HE1tWNcv3FITUz1WzRdVwp+LEDZmTK/bVbLzVcodMcy7Cjb4ds3a6a/eLVIaYGxfcZiYckcUGI5xt1iL27Z7bOBvYOB0w6mEpxFxEpNbkm7toFYxc3JuquN5QY4i1tysmnVtWljfwNVn1fr7twst6Iicd/ZuS4B2X76tPtcoRdrskULuQarV4vF4iQ2akH84cPwywdqRW13c81axU0JpR1qcf2OHaYlF4gaj3IV2l2rTp3E0iwudn9Y6NrVdM0C9v1Sc6Z79jiLG5Gc4+BBEdT0dG9/w7FApKIlP2LmzsyczMwZzHydsf13zJzKzNmWn4PGvtWGW7MHM0/h+valxQAqcfD7Be/j1NlTITnmyLdH4uGFD/tts4obAF/16Lfz3/bta2mzvix3YC6OnDkCRjVapzv4HAFg9kJgwZuOu71EjtVkuQFmRJqTuFndknbr6QK3O4mEVdDc0icpMXHqjzqfSrDrFuSibrJOQqI+60XclNjYBcIQyefV/JBdG0CEav9+ETenfgeKqZ24pafLQ0BBgQS6uInbvn0iEk6CpPqqzmd3reLjZXtpqbu4deki11yl2HITt/Xr5cHCbrkHIKJ78KCc0+6BIlaJKrekJvzkDMzBsfJjmLcpuFDouVB2pgwfFn7oZ72poBElbt1adPMVClWRlGqflWu7X4uOzTs67vdxMgM44XAXMWjd2j+zfiBexE09TjmJiXVezkncrJ91Eom4OFPU3MRN3fQDF2Y7nc/NLamoSZRLS50tYavYtG5tv4gdEKtIWZNOFm779mK5HTrkLG5qe2GhiKad2CgxVa5ZJ8tUuV2PHHH+XpS4KfF2ehBo2VKO4yZunTpJ1GhenlwDO0vRKm5u51NW7qFDzi7jWESLW4xzZeaV6JbeLWSuyfLKcl+hUEWg5QZIwMi2I9uwbPuyoH2K+Lh43DPgHsf9irlzJWegG6Wl/lkoAgmFW9JqITq1sc4dOd1EAfNm5yZuyg3qJm5KlOLjnYXEOmanoBwvFqe6sVZWOltkgPSjJnHLyBA36bZt3iy3tm3dxVSV/rETEUBE78wZd8s00HJzGmOrVjK+o0edxVQ9IBQVOc8JN29uWp2Ac7+U5abFzR8tbjFOHMUhZ2AOlhQvwe5ju2v+gAtV1VWoYvGBWReI+wJK4s075619b0VqYir+lfcvAM7ilZudiziKQ8smzqozbpzk5asLXiw3hZsbMPB4brjdiNRN2ClaEjBFLXCdmF0/WrRwds9ax2wXBWk9jjqWUxv1ebfxN29uWsFO4m0VvZost8OH3cU0Lc2sWed0PiU2avmFHYEL552+v5Yt3ecBVZ8AEUEngVfWqHLhOlnMGRkibAcOaHGzosVNY655y6/bmrez1ZJPqF1qOywvWY4th2RRkJ3l1iypGcZnjcePp34EIJGUdvRp0wfrf7Yed15wZ536VhOhFjcvbZzcTIA3y02JmxfLzW1ctRU3twAddXN165P1Zl6XZRXWG7mbmKalmXXPnK6n+r7Ky53FLTVV2p04Icd0GmOrVmYOSqfr7mW5ByDj2r9ffnfqV0aGPCzs3avFzYoWNw16tOqB4V2HY0b+jDqteVMiNmnAJCkUalQesBM3QAJGFG5ux35t+zmKX6i47DJJjhtOy81N3NQNPRziZhWYulhugHlt6ipuXhbEp6SY/XJypwLeok+9zIVa++L23Xl5WLCO2+07tgqf00OFtb9a3Ey0uGkASGDJlkNb8P2e78/5GErEurXohut6XIe31r2Fquoqx6CR4d2GI7NFpu2+cDNqlGRzdwqptxIqy83tRqTWS7ndtGvjlnQTN6tw1VXcVH/dxM3rUgeFF6uzruLmdXxqXG7Wq1WMna6nlz557Zf1WmtxM9HipgEAjM8KLhRaW6xRkbnZudh9bDeW7VjmaLnFURzuzb4X8RSPZkkNp2RwOObc1EJcFXhhR6gsN6t1UFdx8yK4ympJSHAWJev2mtx21vPaEUrLTQWtuI3Pi+VWW3GLj3e+Dtax6zVuJlrcNACAtOQ03NbvNry7wSwUWlusIjbm/DFokdICM/Jm+GUoCeTpy5/G6smrkZbsEMYWhYTKcnMTQFWWxSmUHPAmbl6iQGtrubmJspc+KXFLSnIOcrH2w+1aqnZeLTcngfAq3kq+mCtLAAAUeElEQVTc3FLWeok+ra24OVWQAPyvtZe/u1hBi5vGR87AHBwtP4r5m84ts5lV3FISUjAhawI+LPwQpadKfdsDSYxPlCwjDYhQWW5uDBsmdbqefNK5TajckrW13Nzmm7y4JZ3m2eyOA4RW3JyulVfLTX3eTdy8XM/kZPMYXsXNCeu1drsO4YKIXiaiTUS0jog+MtIqqn1PGwWnNxPRdfXZDy1uGh8jMkegS1oXv5I0tSEw5D8nOwenK09j9vrZACI/rxYq6mq5vfgikJNT8zEuv9xdJLwISajm3LzO69TGcquudm5j7Yfbg0Jt5/icqK3l5uWBAnC+nkTmPi/i5sUVHPh7BPkcwAXMPADAFgBPAwAR9QMwAUAWpNj0X4nIJmtoaNDipvERHxePSQMn4bNtn2HvcZfJHgcC59Yu6XQJzm99PooOS7ryxDiXO0IDoq6W21NPATNm1L0foZpzs/bV6cnf6hLzsvbOrU+qntoZF+93KC232kbA1tUt6cVyA8zgJS/i5mYtR5vlxsyfMbMqpLkCZkWXmwG8y8zlzLwdkif4YrtjhAJq7Ckau3Tpwm+/Xf81yxoLJadKMGnVJEw+bzLu7Fq7tWUbj27ElLwpeKn/S7i4lfzNztk1B//Y/g/EIQ7/ufI/9dHlsKGqMAcWjKxtm1CxZ4+sgWrTxjnvYFmZZPno0cP5ps3sXAzTipexbdsm52zb1r6em6KkRATCKYNHRYWZdmrgQGcxKSqSTCDt2jlXGCgvN5MdO/W9utqsdu12vs2bZZ1berpzaZnTp82F3tnZ9hUNAKmyXVkp352TeO3dKwvQW7WyL8MTeD5VwbuujBgxogLAesum6cw8vbbHIaKPAbzHzLOI6HUAK5h5lrHvTQCLmPmDuvfYBmZu1D9NmzZlTe247M3LuO/rfbm6urpWn1u2fRnjOfDS4qW+bSVHS5ieI27yuyah7mbYSUpiBtzbiFSEpz/Tpsm5HnrIuc2iRdJm6VLnNtXV3vrtpc0dd0ibKVPc29XEnj3m+U6ccG43bpy0eewx5zZVVTX3vaLCbHPmjHO7K66QNjff7Nxm82bzWCdPOre76ippM3u2c5vnn5c2Dzzg3KagwDzfunXO7WoDgJPscl8FsATABpufmy1tpkFqcyoj6nUAd1v2vwlgnNt56vKj3ZKaIHIH5qKwtBCr966u1efsQv47p3XGyB4j0SSx4Ydx5ecDM2fW3C5cKHeUk2UASLLoF1+U+TsnvFRNqG2f6jr349Ut6WXOLS5OinoOG+bcJjFRrmOTJu6uvdrOubkda8gQeS0tde9X4DEDiYRbkmsoRk1EuQBGA5hoiCUgBaat9nW9Fp128RxrYpXbs27HI58+ghl5M3BRp4s8f84p5P/1G17HptJNIe1jJOjTx6x87MTWrWYZk/pG3dTcZhZSUmSOLxTcfbf70gRrn0Ipbm4L69V5arqpFxTULOJNm7rPfwHeoiWtQuT24PHss+LivOce5zanT8urm8BH25wbEV0P4EkAVzKztZbWAgBziOgVAB0B9AKwsr76ocVNE0R6Sjpu6XML3tnwDl657hXX1FfbDm/Dlzu/xH2D7nNcrN2rdS/0at2rXvscLfTs6TwXE2rCHRnnZeo6VOLmFohhRYlHTTd1L9ZpkybuwSRA7QNK3EhLA/72N/c2pwxp8BIxCkRNtOTrAJIBfE5y4Vcw84PMvJGI3gdQAKASwMPMRqb1eiBSlbjHE9FGIqomoiE2+7sS0QkiesKy7XpjbUQREYXoWVTjRG62FAr9eMvHru1m5s/E/Qvux8aDGx3FTVM/uN1gI4USGze3nRe8jk2JVihu6k2bhkbcQikwXsQt2iw3Zu7JzF3YLDj9oGXf8ywFp89n5kX12Y9IzbltAHArgK8c9r8CwDdwYy3EXwDcAKAfgDuNNROaeuKa865Bp+adakzHpbKZzMyfGVSUVFO/qBt7NAY8h8uCUNcgFDf12oibm3iHcg7ziSdkvtTNdRlt4hYtRETcmLmQmTfb7SOisQC2A9ho2XwxgCJmLmbmCgDvQtZMaOoJVSj006JPsf/Efsd2ylqbtW4WTp2Vx0wtbuEhlDfRUFuB4XaPhaL/zz0nYuLlPOGymrt2lUw1DWmdW7QQVdGSRNQMwC8A/DpgVycAJZb3u41tTseZTESriWh1ZWWlUzNNDeRk56CKqzB73WzHNkrc9p3Yh0+2fgLAvyippmFQVgYcP1734yjB9VJdIRSo84TCeh0/Hrj6avc2XgJKwo01aMUtgCXWqLc/QSJaQkQbbH7cLK7nAPyJmU/U5dzMPJ2ZhzDzkIRo+itsYPRp0weXdLrEtc5bRVUF2jZti9ZNWmNRkXiSteUWXkJxY09NrTlS0AuhtCZrcz63VF6hJNyWmxes1zzc1z+aqbeviJmvPYePXQJgHBH9HkALANVEdAbAGoRxfYTGJDc7Fz/75Gf4Yd8PuLBjcHqHiqoKNEtqhtG9R+O1la8B0OIWLqL5RhauecBwzzt6mXPTRAdR5ZZk5uHMnMnMmQD+DOAFZn4dwCoAvYjoPCJKgiTfXBDBrsYMd2TdgeT4ZMzMt1+9XFFV4avfptDiFl6iMaAkVLRr561duMWtJsvtD38A/vzn+u+PxplILQW4hYh2A7gUwCdEtNitPUsSzikAFgMoBPA+M290+4wmNLRs0hI397kZs9fP9kVDWimvKkdyQjIGtR+E/u36I47iEB+nHf/hIBqjJUNpTR49ChQXu7cJ5ZybF7yK2+OPA48+Wv/90TgTqWjJj5i5MzMnM3MGMwfV9WHm55j5D5b3C5m5t7FG4vnw9ji2yR2Yi8OnD/sCRqwoy42IMG34NNzU+6YI9DA2iUa3ZCgFNy2t5gXR4RZ4db5omnPT2BNVbklNdDKyx0h0aNbB1jWpxA0A7rjgDsybMC/c3YtZso0ar1ddFdFuRJTcXHn9yU/Ce1495xb96OcPTY0kxCXg7gF3408r/oSDJw+iXao5EWIVN014ufRS4OBBKS8TLYTbkho8ODJuWW25RT/actN4ImdgDiqrKzFn/Ry/7VrcIks0CVssoIRUryeLfrS4aTyR1S4LQzoOCUrHpcVNY+UXvwDGjAHuvTfSPalfwrVIXXPu6K9I45ncgbnIP5CPvP15vm0VVRU6I4nGR0YGMH9+zTkaGyrK7RptwTz33Qc8+GDN7WIJLW4az0y4YAKS4pMwM88MLCmvLNeWm0YTYd58s+byObGGFjeNZ1o3bY2bet+EWetn+XJKarekJpaIpjWFGne0uGlqRW52LkpPlWLRVskjqcVNo9FEI1rcNLXiuh7XISM1w7fmTYubRqOJRrS4aWpFYnwiJvafiI+3fIwfT/6oA0o0Gk1UosVNU2tysmXN2zsb3tGWm0ajiUq0uGlqzYCMARjcYTBm5M3Q4qaJSaJtKYAmGC1umnMiZ2AO1u5fCwZrcdNoNFGHFjfNOXFX/7uQGCfZY7W4aWINvSQg+tHipjkn2jRtg9G9RwMAkhN0QIlGo4kutLhpzpmcgTkA4LPgNJrGTrLxHJeknRVRT6QqcY8noo1EVE1EQwL2DSCi74z964koxdh+ofG+iIheJdJTupHmxl434pnLn8GNvW6MdFc0mrDw3/8NTJ3a+BNDNwaII+A8JqK+AKoB/B3AE8y82tieAOAHAPcwcz4RtQZQxsxVRLQSwCMAvgewEMCrzLyopnOlpqbyyZMn62soGo1G0+ggolPMXEMd9OgmIpYbMxcy82abXaMArGPmfKPdIUPYOgBIY+YVLGr8FoCxYeyyRqPRaBoQ0Tbn1hsAE9FiIvqBiJ40tncCsNvSbrexzRYimkxEq4lodWVlZT12V6PRaDTRSL0VSyeiJQDa2+yaxszzXfpzOYCLAJwC8B8iWgPgaG3OzczTAUwHxC1Zm89qNBqNpuFTb+LGzNeew8d2A/iKmUsBgIgWAhgMYBaAzpZ2nQHsqXMnNRqNRtMoiTa35GIA/YmoqRFcciWAAmbeB+AYEQ01oiQnAXCy/jQajUYT40RqKcAtRLQbwKUAPiGixQDAzEcAvAJgFYA8AD8w8yfGxx4C8AaAIgDbANQYKanRaDSa2CQiSwHCiV4KoNFoNLVDLwXQaDQajSYKafSWGxFVAzh9jh9PABBrawn0mGMDPebY4FzH3ISZG7Tx0+jFrS4Q0WpmHlJzy8aDHnNsoMccG8TimBUNWpk1Go1Go7FDi5tGo9FoGh1a3NyZHukORAA95thAjzk2iMUxA9BzbhqNRqNphGjLTaPRaDSNDi1uGo1Go2l0aHEDQETXE9Fmo8r3Uzb7k4noPWP/90SUGf5ehg4P473CKDlUSUTjItHHUONhzI8RUQERrSOi/xBRt0j0M9R4GPeDRoX7PCL6hoj6RaKfoaSmMVva3UZETEQNOlTew3ecS0Q/Gt9xHhH9NBL9DDvMHNM/AOIhuSq7A0gCkA+gX0CbhwD8P+P3CQDei3S/63m8mQAGQIrCjot0n8M05hEAmhq//6whf8e1HHea5fcxAD6NdL/re8xGu+YAvgKwAsCQSPe7nr/jXACvR7qv4f7RlhtwMYAiZi5m5goA7wK4OaDNzQBmGr9/AOAaozpBQ6TG8TLzDmZeB6A6Eh2sB7yMeRkznzLeroB/iaWGipdxH7O8TQXQ0CPMvPw/A8BvAbwE4Ew4O1cPeB1vzKHFTSp6l1je21X59rVh5kpI8dTWYeld6PEy3sZGbcd8PxpH1QlP4yaih4loG4DfA3gkTH2rL2ocMxENBtCFzYojDRmvf9u3GS73D4ioS3i6Flm0uGk0FojobgBDALwc6b6EC2b+CzP3APALAL+MdH/qEyKKg5TVejzSfQkjHwPIZOYBAD6H6YVq1Ghxk4re1icZuyrfvjZGEdV0AIfC0rvQ42W8jQ1PYyaiawFMAzCGmcvD1Lf6pLbf9bsAxtZrj+qfmsbcHMAFAL4goh0AhgJY0ICDSmr8jpn5kOXv+Q0AF4apbxFFi5sURu1FROcRURIkYGRBQJsFAHKM38cBWMrGTG0DxMt4Gxs1jpmIBgH4O0TYDkagj/WBl3H3srz9CYCtYexffeA6ZmY+ysxtmDmTmTMh86tjmHl1ZLpbZ7x8xx0sb8cAKAxj/yJGQqQ7EGmYuZKIpgBYDIk8+iczbySi3wBYzcwLALwJ4G0iKgJwGPIH1CDxMl4iugjARwBaAriJiH7NzFkR7Had8PgdvwygGYC5RqzQLmYeE7FOhwCP455iWKxnARyB+RDXIPE45kaDx/E+QkRjIKVvDkOiJxs9Ov2WRqPRaBod2i2p0Wg0mkaHFjeNRqPRNDq0uGk0Go2m0aHFTaPRaDSNDi1uGo1Go2l0aHHTRB1EVGXJYJ7X0KswKCzZ2d+o43GeI6InLO+HEtE/6t7DWvfjRA37mxjfXwURtQlXvzQaQK9z00Qnp5k5226HkbCamLmhJnV+j5mnBG4kogQjb+m5cAOAT+vWrdDDzKcBZBuZQDSasKItN03UQ0SZRr2qtwBsANCFiKYS0SojGeyvLW2nEdEWozbZO8rCIaIvVIolImqjbrhEFE9EL1uO9YCx/SrjMx8Q0SYimq0qQRDRRUS0nIjyiWglETUnoq+IKNvSj2+IaGAN48ologVEtBTAf4ioGUktuR9IaqzdbGnrGxeA8wMOdQ2AJUSUZfQnzxhLL+Oz84hoDRFtJKLJlmOeMMa+kYiWENHFxpiLjUW/qo/zje1biehXDmOx/T40mkihLTdNNNKEiPKM37cD+L8AegHIYeYVRDTKeH8xAILkBrwCwElI9phsyN/2DwDW1HCu+wEcZeaLiCgZwLdE9JmxbxCALAB7AXwLYBgRrQTwHoA7mHkVEaUBOA3JYpML4L+IqDeAFGbO9zDWwQAGMPNhkryltzDzMcONt4KIFhhtbMdltDvLzEeJ6HcA/peZZxupmOKNc9xnHL8JgFVE9G9mPgQpcbOUmacS0UcAfgdgJIB+kOS6KpvHxZB8jKeMz39iTVfl9H0w81cexq/R1Ata3DTRiJ9b0phz28nMK4xNo4yftcb7ZpCba3MAH6m6bIYw1MQoAAPIrDiebhyrAsBKZt5tHCsPUsT1KIB9zLwKMOuhEdFcAM8S0VQA9wGY4XGsnzPzYTVUAC8YQl0NKV2SAWC4y7hGAVBi/B2AaUTUGcCHzKzyRD5CRLcYv3cxxnfIGKNyZ64HUM7MZ4lovTFWax8PGef+EMDlAKy5GJ2+Dy1umoihxU3TUDhp+Z0AvMjMf7c2IKL/cvl8JUw3fErAsX7OzIsDjnUVAGtlgCq4/L8w8yki+hxSKPJ2eM+8bh3XRABtAVxoiMyOgL7acQOkhAuYeQ4RfQ9JgLzQcLFWA7gWwKVGH7+wHPOsJQF4NYzxMnO1YUX6hhc43ID3tt+HRhNJ9JybpiGyGMB9RNQMAIioExG1g1gKY40oveYAbrJ8ZgdMwRkXcKyfEVGicazeRJTqcu7NADqQJJeGMd+mhOANAK8CWMXMR85hXOkADhrCNgJAN2O77biMOcABAPKM990BFDPzqwDmG/vSARwxhK0PpMRLbRlJRK0Mt+ZYiIvWitP3odFEDG25aRoczPwZEfUF8J0R43ECwN3M/AMRvQcgH8BBSDkQxR8AvG8EVFgrML8BccH9YIjFj3CpacbMFUR0B4DXjJv9aYhldIKZ1xDRMQD/OsehzQbwseEWXA1gk3FOp3FdCGCtxfq6HcA9RHQWwH4AL0AswweJqBAizMq1WxtWAvg3pFbYrMDyME7fh9FXjSYi6KoAmkYLET0HEZ0/hOl8HQF8AaCP3VIFIsoFMMRuKcA5nu+XAIqY+d1QHM/hHLmoY58N9+oQZi4NVb80mprQbkmNJgQQ0SQA3wOY5rIG7zSAG6iOi7gVzPy7+hS2umK4UfMAJELm9DSasKEtN41Go9E0OrTlptFoNJpGhxY3jUaj0TQ6tLhpNBqNptGhxU2j0Wg0jQ4tbhqNRqNpdPx/G4lXtvvjmj4AAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "fig, ax1 = plt.subplots()\n", - "ax1.set_title('Digital filter frequency response')\n", - "\n", - "ax1.plot(w / (2 * np.pi), 20 * np.log10(abs(h)), 'b')\n", - "ax1.set_ylabel('Amplitude [dB]', color='b')\n", - "ax1.set_xlabel('Frequency [rad/sample]')\n", - "\n", - "ax2 = ax1.twinx()\n", - "angles = np.unwrap(np.angle(h))\n", - "ax2.plot(w / (2 * np.pi), angles, 'g')\n", - "ax2.set_ylabel('Angle (radians)', color='g')\n", - "ax2.grid()\n", - "ax2.axis('tight')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 105, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "(63,)" - ] - }, - "execution_count": 105, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "b.shape" - ] - }, - { - "cell_type": "code", - "execution_count": 129, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "def optimfuncQMF(x):\n", - " \"\"\"Optimization function for a PQMF Filterbank\n", - " x: coefficients to optimize (first half of prototype h because of symmetry)\n", - " err: resulting total error\n", - " \"\"\"\n", - " N = 2 #4 subbands\n", - " cutoff = 1.5 #1.5\n", - " h = np.append(x, np.flipud(x))\n", - " f, H_im = sig.freqz(h)\n", - " H = np.abs(H_im) #only keeping the real part\n", - " \n", - " posfreq = np.square(H[0:512//N])\n", - " \n", - " #Negative frequencies are symmetric around 0:\n", - " negfreq = np.flipud(np.square(H[0:512//N]))\n", - " \n", - " #Sum of magnitude squared frequency responses should be closed to unity (or N)\n", - " unitycond = np.sum(np.abs(posfreq + negfreq - 2*(N*N)*np.ones(512//N)))//512\n", - " \n", - " #plt.plot(posfreq+negfreq)\n", - " \n", - " #High attenuation after the next subband:\n", - " att = np.sum(np.abs(H[int(cutoff*512//N):]))//512\n", - " \n", - " #Total (weighted) error:\n", - " err = unitycond + 100*att\n", - " return err" - ] - }, - { - "cell_type": "code", - "execution_count": 131, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "3.0\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAEWCAYAAABIVsEJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nO3dd3xUVfr48c+TRgglhSQQEkLvHQMIoqCAYEVX17Wsoqvr6vbid7/6dVdd19113aJbv/74KnaxF0SFRQTpJbQQauikkUYSEtLz/P6YC44xIT035Xm/XvPK3Dvnzjz3zmSeOefce46oKsYYY0x9+bgdgDHGmLbJEogxxpgGsQRijDGmQSyBGGOMaRBLIMYYYxrEEogxxpgGsQRiWpyIrBKRe87z+LMi8uuWjKmtEJFYESkQEd8Gbn9URGY1dVytiYjsFpEZbsfREVgC6eCcL5Qi50spXUReFJGuLfj6d4rIWu91qnqfqv62GV6rn4ios68Fzr4/6PW4iMh/iUiSc0yOi8jvRSTAq8yLznPMq/LcTzvr7/Tarwqv1yoQkX9WE9MtIrK3yrrlNax7UFWPq2pXVa1oosPSpETkMREpq7Lfv2zG13tRRJ7wXqeqI1V1VXO9pvmSJRADcI2qdgXGAeOBh1yOp7mFOPt7C/CIiMx11v8duBe4A+gGXAHMAt6osv0BpwwAIuIH3AQcqlJug/Nlf/b2w2piWQ0ME5EIr+caC3Susm6KU7YteLPKfj/ldkCmeVgCMeeoajqwDE8iAUBELhSR9SKSKyI7vZsGnF/Zh0XktIgcEZHbnPWPicirXuXO/vL38349ERkOPAtMcX6p5jrrz/2qFJEZIpIsIr8QkQwRSRORu7yeo4eIfCQi+SKyRUSeqFqjOc/+bgB2A6NEZDDwfeA2Vd2gquWquhu4AbhKRKZ7bfoRME1EQp3luUACkF6X160SQwpwGLjEWTXBiemLKut8gC1Vj6XTHPhbEVnnvA//EZFwr+Nzu4gcE5FsEXnY+7VFpJOIPCMiqc7tGRHp5Dz2hYjc4Ny/yHnNq5zlmSKyoz77Wdtnog77Mc3rc3jC+ezdC9wG/NL5/HzklD3XTFfLPp73s2VqZwnEnCMiMXh+dR90lqOBj4EngDDgAeBdEYkQkS54frFfoardgKlAvb5UVHUvcB9f/lIPqaFoLyAYiAbuBv7l9eX9L6DQKTPfudVlX0VELgJGAtuBmUCyqm6uEuMJYCNwudfqYuBD4GZn+Q7g5bq8bg1W82WyuARYA6ytsm6jqpbVsP2twF1AJBCA531CREYA/wvcDvQGegAxXts9DFyI5wfDWGAS8CvnsS+AGc796Xw1yU13Hm9qNe1HX+BT4B9AhBPvDlVdALwGPOV8fq6p5jnPt49w/s+WqYUlEAPwgYicBk4AGcCjzvpvA5+o6ieqWqmqy4F44Ern8Uo8v947q2qa84u9OZQBj6tqmap+AhQAQ8XTkXwD8KiqnlHVPcBLdXi+LCAHeA54UFVXAOFAWg3l0/B8cXl7GbhDRELwfKF+UM12Fzq/mM/eLqzh+b1rGxfjSSBrqqw73xf2C6p6QFWLgLf4sgZ5I7BEVVeragnwazzv2Vm34TmuGaqaCfwGT7I5G9PZWtclwB+8lmtLIDdV2e/e5ylbl/24FfhMVRc5n4FsVa3rj5Xz7SPU8Nmq43N3eJZADMB1Ti1iBjAMz5cpQF/gm95fBsA0IEpVC4Fv4alBpInIxyIyrJniy1bVcq/lM0BXPF/qfngS31ne92sSrqqhqjpcVf/urMsComooH+U8fo6qrnVe/2E8X9JF1Wy3UVVDvG4ba3j+1cAY55fvhXhqZPuAKGfdNM7f/+HddHb22ICn1nHueDjvWbZX2d7AMa/lY846gA3AEBHpieeL/GWgj9OsNKmWeN6qst+p5ylbl/3ow9f7l+rqfPsINX+2TB1YAjHnqOoXwIvAn51VJ4BXqnwZdFHVJ53yy1R1Np4v2H3A/znbFQJBXk/d63wv24iQM4Fyvtos06eBz/U5ni/ISd4rRaQPni/1VdVs8yrwCxrXfIWqHgZS8XTgH1fVAuehDc66rnia0eorDa/jISJBeJqxzkrF8yPhrFhnHap6BtgK/ARIVNVSYD3wc+CQqn4lodZBfT4TVZ0ABtbwWG2fnxr30TSeJRBT1TPAbBEZi+cL8hoRmSMiviIS6HQ8xohITxGZ5/SFlOCp+p9tHtkBXCKeaxaCOf9ZXSeBGPE6VbaunFNZ3wMeE5EgpwZ0Ry2b1fRcB/B06L8mnhMHfEVkJPAuni/Oz6rZ7O/AbJrm7Kg1eL6c13itW+usi6+hhlObd4CrnQ7oAOBxvvo/vwj4ldOnFQ48guc9P+sL4Id82Vy1qspyfdTnM1HVa8AsEblJRPzEc+LE2eatk8CA82xb2z6aRrAEYr7CaSd+GXjE6UCeB/wPnl/7J4D/wvO58cHz5ZaKpz9hOnC/8xzLgTfxnJm0FVhynpf8HM9ZR+kiUt9fteD5QgvG0/zxCp4vjJIGPM/Z53oOzxfMGSART5PHdapaWbWwquao6gptmkl1vsDTeex9BtkaZ12DEpTTJ/UD4HU8tZFTQLJXkSfw9GklALuAbc4675i6eb1+1eX6xFKfz0TVbY/j6Xf7BZ7P2g48HeIAzwMjnCbW6vqhattH0whiE0qZ9kRE/gj0UtU6nY1Vy3P9BrgeuERVcxsdnDHtjNVATJsmIsNEZIxzWu4kPKdivt8Uz62qjwIL8PSBGGOqsBqIadNEZCKeZqveeNrDFwBPNlGzkjHmPCyBGGOMaRBrwjLGGNMgfrUXaT/Cw8O1X79+bodhjDFtytatW7NUtepoDB0rgfTr14/4+Hi3wzDGmDZFRI5Vt96asIwxxjSIJRBjjDENYgnEGGNMg1gCMcYY0yCWQIwxxjSIJRBjjDENYgnEGGNMg1gCaSGf7krjUGZB7QWNMcZRWaks2nycnMJSt0OpliWQesg8XcI/ViRRWFJee2Ev6w5mcf9r23hscXNNGW6MaY9WJ2Xy0Hu7eODtndR33MLP9pxk2e702gs2giWQevj1B4n8ZfkB/rXyYJ23KSgp55fvJOAjsCYpi+PZZ5oxQmNMe7Jo83F8BD7fl8G721LqvF1KbhE/XLSNH72+vVlbPiyB1NHn+06ydHc6Ed068dyaI3VOBH/4ZC+peUX845YJ+Ai8seV4M0dqjGkPMvKL+WxvBndP68/EfqH85qPdpOcV12nbJz/dhyp08vfh1x8k1rv2UleWQOqgqLSCRz7czaDIrrx3/1T8fIXffbLnvNsUl1Xwt8+SeG3Tce6+qD9XjYnismE9eSs+mbKKr82OiqqycO0RLvvLKo5mFTbXrhhjWomH39/Fbc9tJON09Unh7a3JVFQqt0yK5akbx1JWUcm9r8SzP/30eZ9385EcPtqZyvemD+SXc4ex/lA2i3emNscuWAKpi398nkTyqSKeuG4UfcKC+MGlg1i2+ySrD2R+rWxeURkfbE9h7jOrefqzA1w1JooH5gwF4NbJfcgqKGHF3pNf2aasopL/eT+Rx5fs4XBmIb/+8Ku/GMqrSTjGmLbF+/941f4MXtt0nHUHs7nun+vYk5r/lbJnO8+nDOjBgIiu9A/vwtM3jeN4zhmu/PsaHv9oD/vS879WsyirqOTxJbuJCg7k/ukDuXVSLGNjgvntkj3kFZU1+T65OhqviMwF/gb4As+p6pNVHu8EvAxcAGQD31LVo85jD+GZvrQC+LGqLmuuOCsqlZviYrhwQA8A7p7Wn3e2JvP917ax4I4LmDownMSUPJ5atp/1B7Mor1QGhHfh5e9M4pIhX46APH1IJFHBgfzj84McyiykslLZd/I0246dIi2vmO/PGEh41048vmQPSxLSmD40gh8v2s7BjAL+87NLCAqo/e3al57PvrTTXDc+urkOhzEd3omcM6xJyuLWybF1Kv/vVQd5dtUh/nbLeKYM6MEjH+5mQEQX/vzNsXz/1W1c9+91jOsTwtiYYEK7BJCRX0LyqSJ+OXfYuee4YnQUkwf04E/L9vHC+iMsXHeEmNDO3DGlL3dd1J/yCuX+17aSmJLPv26dQOcAXwB+d/1o/uudBLIKSgju7N+kx8G1GQlFxBc4AMwGkoEtwC2quserzPeBMap6n4jcDFyvqt8SkRF4pjGdhGcq08+AIapacb7XjIuL04YO566qiMi55fS8Ym5/fhPHss8wZ1QvPk5IJaxLAN+M68Os4ZGM6xOKr4987XmeW3OYJz7ee245OqQzY/sEc+3Y3swdFUVFpXLdv9aRnl9McGd/jmQVUlGp/M+Vw7j3koG1xnn785tYfyib7Y/Mpntg035YjDEeD723i0Wbj7P0pxczrFf385bNO1PGtD9+TlFZBZWqTOofxsbDObx+z2SmDgonI7+Y//3iENuP57InLZ/Sck9NJTYsiOU/v4ROfr5fe86M/GJW7MtgSUIq6w5mM6RnV4IC/EhIzuV314/mlklfTWxVv7/qS0S2qmpc1fVu1kAmAQdV9TCAiLwBzAO8OxfmAY85998B/imeozAPeENVS4AjInLQeb4NzRVs1YPfKziQt++bwl0vbmFJQiq3X9iXX1w+tNYMf8/FA7hzaj8qVFGFQP+vfjh8fYTfXT+Kef9aR2l5Ja/ePZl/rzrIgtWHuf3Cfud+VVQn43Qx6w5mUamw4VA2c0b2avgOG2NqtPagp/n6wx2pDJt7/gTy/LojnC4p5937p/LsF4dYvuck14+PZuqgcAAiuwfy6DUjAU8zV3ml4usj+PlIjV/6kd0DuWVSLLdMimX5npM8tng3R7PO8K9bJ3DF6KivlW9M8jgfNxNINHDCazkZmFxTGVUtF5E8oIezfmOVbVu8zSYkKIA3751CZkEJ0SGd67ydn6/PeQ/8mJgQXr/nQmJCO9MnLAh/X+HGZzfw2qZj3HPxgHPlSsorOJhRwMjewQB8nJBGpYK/r7DuYJYlEGOawfHsM5zIKcLfV1i8I5X/unwoPj5CVkEJRaUV9AkLOlc2r6iMF9YdYc7InlzQN5T/9+0L+M+ek0wbHF7tc/v5+lBNheO8Zo/oycWDwzl1ppSo4Lp/DzWFdt+JLiL3iki8iMRnZn6907uxAvx86pU86mrKwB7nPohx/cK4aFAPnv3i8LmLGHPPlHL785u56u9rWZLgOcPiwx2pjIjqzrRB4axNymrymIwxsMapfXzvkoGk5Bax7fgpikor+OazG5j99Bf8x+vivefXHuF0cTk/njkYAB8fYe6oXnTt1LS/3QP9fVs8eYC7NZAUoI/XcoyzrroyySLiBwTj6Uyvy7YAqOoCYAF4+kCaJHIX/GTmEG76fxuY/qeV3DIplk92pXEip4i+PYL49QeJ9OoeyI4TuTx0xTD8fH1YuX8PKblFzZLcjOnI1iZlERUcyH0zBvLc2sN8sCOFj3elcSSrkEGRXfneq1v53iUD2Xkilw2Hs5kzsue5VoL2xs0ayBZgsIj0F5EA4GZgcZUyi4H5zv0bgc/V0+u/GLhZRDqJSH9gMLC5heJ2xaT+YSz67oWMiQnhH58fJKuglFfunsTz8+MoLK3gzhe2IALXjuvNxU71eG1S09e4jOnIKiqV9YeymTYonK6d/Jg1vCfvbUvhhXVHmT+lLx/9cBqzhvfk2S8OcTS7kAevGMZfbhrndtjNxrUaiNOn8UNgGZ7TeBeq6m4ReRyIV9XFwPPAK04neQ6eJINT7i08He7lwA9qOwOrPZgysAdTBvbgWHYhAX4+56qsD1w+hN9/so/J/cOICu5Mr+5KZLdOrD2Yzbcm1u00Q2NM7RJT8sgrKjvXhzFvXDRLEtLo1yOI/75iGJ0DfHn22xeQkJzLqOhg/H3bdy+Bq9eBqOonwCdV1j3idb8Y+GYN2/4O+F2zBthK9e3R5SvLd08bQGpuMZeP6Al4zriYNiicVQcyqaxUfKo5ndgYU39rD3r6Fi9yzqCaPiSCm+Ji+PaFfc9dp+XrI4yPDXUtxpbUvtNjB+HrIzx27chzpwUCTBscTk5hKTuTc12MzJj2Q1VZuS+D4VHdCe/aCfCcRPPUjWMZExPicnTusATSTs0YGklIkD8/eWNHjWPtGGPq7unlB4g/dopv2CgP51gCaafCugTwwp0TyTxdwp0Lt5Bf3PTj4BjTUby0/ih///wgN8XFcM/F/d0Op9WwBNKOjY8N5dnbL+DAydP86v1Et8Mxpk3acSKXxz7azewRPfn99aOb7arutsgSSDs3fUgEt0/py9LEdKuFGNMA725NppOfD09/axx+7fysqvqyo9EBXDu2N6UVlfxn98naCxtjzimvqOSTXWnMHN6zya8ebw8sgXQA4/qEEBPamY+8JpU5klVIcVm7v3TGmHo7mFFwbtK3DYezyS4s5ZoxvV2OqnWyBNIBiAjXjO3NuoNZ5BSWsu34KWb/9Qu++3I8lZVtdnQXY5rcqv0ZzPrrF/zirZ2oKkt2ptG1kx8zhkbUvnEHZAmkg7h6TBTllcpb8Sf40evb6eTnw5qkLBauO+J2aMa0CpmnS3jg7Z10CfBl8c5UXt98nE8T07h8RM+vTbtgPCyBdBAjorozIKILT366j5P5xbxyz2Rmj+jJH5fuIzElz+3wjHFVZaXywNs7OV1czjv3T2XKgB48/H4i+cXlXDPWmq9qYgmkgxCRc+24D8wZyoTYUP54wxjCugTwwNs7vza3sjEdyXvbU/jiQCa/umo4w6O688zN4wjrEkBIkP+5YUvM19lpBR3I3Rf3p194EPPGeq6kDesSwM9mDeHB93axKyWvww7HYMxbW04wKLIr376wLwA9uwfy6t2TKSgpJ8DPfmfXxI5MB9I90J/rx8d8ZXDFK0ZF4e8rfLgj9TxbGtN+peQWsfloDvPG9v7KRYIjendnUv8wFyNr/SyBdHDBQf7MGBrJkoRUKuyMLNMBLXFOb792nPV11JclEMO1Y3tzMr+EzUdy3A7FmBa3eGcqY/uEfG2aBFM7SyCGWcN7EuScupieV8z8hZt5Ysket8MyplnsSc3n8qe/4P3tyRzMKGB3aj7X2plWDWKd6IbOAb7MHtGTJQmpLN+TTlZBKauTMvnGhBhG9O7udnjGNKnff7KXAycL+NmbOxkQ3gURz3VSpv6sBmIAmDeuN6eLywkJCuDd+6fQPdCfp5btczssY5rUmqRM1h7M4n+uHMb3LhnA4axCpgzoQc/ugW6H1iZZDcQAcOnQSF68ayIT+4XRpZMf358xkD98uo8Nh7KZMrCH2+EZ02iVlcofl+4jOqQz86f2o5OfL3NG9aKXJY8Gc6UGIiJhIrJcRJKcv9VOICwi850ySSIy31kXJCIfi8g+EdktIk+2bPTtk4gwY2gkXZwRR+dP7UdUcCBPLt1n42WZduHjXWkkpuTz89lD6OTnGZpkQmwovUM6uxxZ2+VWE9aDwApVHQyscJa/QkTCgEeBycAk4FGvRPNnVR0GjAcuEpErWibsjiPQ35efzx7CzhO5vLj+qNvhGNMoWQUl/OajPQyP6s51NiVtk3ErgcwDXnLuvwRcV02ZOcByVc1R1VPAcmCuqp5R1ZUAqloKbANiWiDmDufGC2KYNTySJz/dx960fLfDMaZBVJVfvpNAfnEZf71pLL4+NqNgU3ErgfRU1TTnfjrQs5oy0cAJr+VkZ905IhICXIOnFlMtEblXROJFJD4zM7NxUXcwIsIfbxhDcJA/P1603eYPMW3SyxuO8fm+DB66YhjDo+yswqbUbAlERD4TkcRqbvO8y6lnFL96N7KLiB+wCPi7qh6uqZyqLlDVOFWNi4iwMf3rq0fXTvz1prEkZRSwYHWNh9mYVimnsJTff7KXGUMjuHNqP7fDaXea7SwsVZ1V02MiclJEolQ1TUSigIxqiqUAM7yWY4BVXssLgCRVfaYJwjXncfHgCKYNCuedrcn86LJBXxkvyJjWbElCKiXllfz33GH2uW0GbjVhLQbmO/fnAx9WU2YZcLmIhDqd55c76xCRJ4Bg4KctEKsBrh8fzfGcM2w7fsrtUIyps/e2pTCsVzdrumombiWQJ4HZIpIEzHKWEZE4EXkOQFVzgN8CW5zb46qaIyIxwMPACGCbiOwQkXvc2ImOZM6oXgT6+/DethS3QzGmTg5nFrDjRC7fmGBnXTUXVy4kVNVsYGY16+OBe7yWFwILq5RJBqwu2sK6dvJjzsheLElI45FrRpw7j96Y1uqD7Sn4CMwbZwmkudhQJqbOrh8fTV5RGSv32dlspnVTVd7fkcJFg8JtmJJmZAnE1Nm0QeGEd+3E+9uT3Q7FmPOKP3aKEzlFXG8XDTYrSyCmzvx8fbhmbBQr92dSUFLudjjG1GjJzlQC/X2YM7KX26G0a5ZATL1cOTqK0vJKVu6r7sxrY9xXWaks232S6UMizo3tZpqHJRBTLxNiQwnv2omlu9PdDsWYau1MziU9v5i5o6z20dwsgZh68fURLh/Zk5X7MmxoE9MqLU1Mx99XuGxYdSMkmaZkCcTU2xWjenGmtII1SVluh2LMV6gqS3enM3VgOMGd/d0Op92zBGLq7cIBPege6MfSRGvGMq3LvvTTHMs+Y81XLcQSiKk3f18fZo3oyWd7T1JWUel2OMac82liOiIwe4Q1X7UESyCmQa4YFUVeURnffm4TH+1MpbTcEolxz8GM0/zmo90sXHuEif3CCO/aye2QOgRLIKZBZg6L5OErh5OSW8SPFm3ngbd3uh2S6aC2Hz/FnGfW8OrGY8wYGsGT3xjtdkgdhiUQ0yA+PsJ3LxnA6v+6lDum9OXjXWlk5Be7HZbpgF7ZcIwgf1/WPXgZ/7x1AgMiurodUodhCcQ0io+PcNdF/amoVN7eakOcmJaVd6aMj3elMW98byK72ZhXLc0SiGm0/uFdmNw/jLfiT1BZWe/JJY1psA93plBSXsnNE2PdDqVDsgRimsTNk/pwLPsMGw9nux2K6SBUlUWbTzCyd3dGRQe7HU6HZAnENIkrRkXRPdCPRVtOuB2K6SASkvPYm5bPzZOs9uEWG2nMNIlAf1+uHx/Nq5uOU1BcxsWDI7hlUiydA2ziKdO0Nh/JYfHOFFbszSDQ34d543q7HVKHZQnENJmfzR6CiLD6QCYr9+/hWHYhv5k3yu2wTDtyNKuQmxdsINDfl6kDe3Db5L50D7QhS9xiCcQ0mZCgAB67diQAP3ljO+9tS+HBK4ZbLcQ0mTe2nEBE+PwXM+gVbGdduc21PhARCROR5SKS5PwNraHcfKdMkojMr+bxxSKS2PwRm/q4dVIsp0vK+Sgh1e1QTDtRWl7JO1tPcNmwSEserYSbnegPAitUdTCwwln+ChEJAx4FJgOTgEe9E42IfAMoaJlwTX1M6h/GoMiuLNp83O1QTDuxfM9JsgpKuXWydZq3Fm4mkHnAS879l4DrqikzB1iuqjmqegpYDswFEJGuwM+BJ1ogVlNPIsItk2LZfjyXvWn5bodj2oHXNx8jOqQzlwyOcDsU43AzgfRU1TTnfjpQ3fCZ0YD3eaHJzjqA3wJ/Ac6c70VE5F4RiReR+MzMzEaGbOrjhgnRBPj58Pomq4WYxjmaVci6g9ncPLEPvj7idjjG0awJREQ+E5HEam7zvMupqgJ1voRZRMYBA1X1/drKquoCVY1T1biICPvl0pJCggK4Zkxv3txygs1HctwOx7RRpeWVPPTeLgJ8fbhpYh+3wzFemjWBqOosVR1Vze1D4KSIRAE4fzOqeYoUwPsTE+OsmwLEichRYC0wRERWNee+mIb59dXDiQntzL2vxHMkq9DtcEwbo6o8/P4uNhzO5o83jqZnd+s8b03cbMJaDJw9q2o+8GE1ZZYBl4tIqNN5fjmwTFX/V1V7q2o/YBpwQFVntEDMpp5CggJ44a6JCPCdF7eQX1zmdkimDVmw+jBvb03mxzMHc/34GLfDMVW4mUCeBGaLSBIwy1lGROJE5DkAVc3B09exxbk97qwzbUjfHl34920XcCSrkPe3pbgdjmkjissq+MfnB5k1PJKfzRrsdjimGq5dSKiq2cDMatbHA/d4LS8EFp7neY4CdrlzKzdlYA+G9OzKxwlpzJ/az+1wTBuw+kAmBSXl3DGlHyLWcd4a2WCKpsVcPaY3W47lkJ5nE0+Z2i1JSCM0yJ+pA3u4HYqpgSUQ02KuGhOFKnyyK632wqZDKyqt4LO9J5k7Kgo/X/uaaq3snTEtZmBEV4ZHdWeJDW9iarFqfwZnSiu4ZkyU26GY87AEYlrU1WOi2HY8l5TcIrdDMa3YkoQ0wrsGMKl/mNuhmPOwBGJa1NXOL8pPrRnL1OBMaTkr9p3kCmu+avXs3TEtqm+PLozs3Z2lieluh2JaqdUHsiguq+TK0dZ81dpZAjEtbuawSLYdP0XumVK3QzGt0Mp9GXQL9COuX7UzPJhWxBKIaXEzhkVSqbA6KcvtUEwro6qs3J/BJYMj8Lfmq1bP3iHT4sbGhBDWJYBV+6ob/sx0ZHvS8sk4XcKlwyLdDsXUgSUQ0+J8fYTpQyJYdSCTiso6D8JsOoCVzo+K6UNs5Oy2wBKIccWMoRHkFJaSkJzrdiimFVm5P5MxMcFEdOvkdiimDiyBGFdMHxKBj3i+MIwBOFVYyvbjp5gx1Jqv2gpLIMYVIUEBTIgNPddkYczqpEwqFS6z/o82wxKIcc2lwyLZlZLH1mM2Qn9HV1GpvLrxGOFdAxgTHex2OKaOLIEY19wxpS99wjrz0zd3cNommurQnv3iEFuOnuKhK4bjY3OetxmWQIxrugX688y3xpFyqohHF+92Oxzjkp0ncnl6+QGuHhPFNyZEux2OqQdLIMZVF/QN40eXDea9bSl8tuek2+GYFlZZqfzsrR1EduvE764bbRNHtTGWQIzrfnTZILoH+vH5futQ72iOZhdyOLOQH80cTHCQv9vhmHqqcwIRkaCmelERCROR5SKS5PytdtAbEZnvlEkSkfle6wNEZIGIHBCRfSJyQ1PFZlqen68Po6KD2Z2S53YopoUlpuYDntEJTNtTawIRkakisgfY5yyPFZF/N/J1HwRWqOpgYIWzXPV1w4BHgcnAJOBRr0TzMJChqkOAEcAXjYBN6I0AAB+3SURBVIzHuGxUdDB7009TVlHpdiimBe1OySPA14fBPbu6HYppgLrUQJ4G5gDZAKq6E7ikka87D3jJuf8ScF01ZeYAy1U1R1VPAcuBuc5j3wH+4MRTqao2Kl8bN7J3d0rLKzmYUeB2KKYFJabmMSyqmw2c2EbV6V1T1RNVVlU08nV7qurZGYXSgZ7VlIkGvF83GYgWkbN13d+KyDYReVtEqtseABG5V0TiRSQ+M9Ouem6tRjnn/u+yZqwOQ1VJTMlnZG+77qOtqksCOSEiUwEVEX8ReQDYW9tGIvKZiCRWc5vnXU5VFajPiHp+QAywXlUnABuAP9dUWFUXqGqcqsZFRNgAba1V/x5d6BLga/0gHUjyqSLyisoYFd3d7VBMA/nVocx9wN/w1AhSgP8AP6htI1WdVdNjInJSRKJUNU1EooDqTr9JAWZ4LccAq/A0pZ0B3nPWvw3cXetemFbNx0cY0bv7uU5V0/7tTvX8WBhlNZA2q9YaiKpmqeptqtpTVSNV9duqmt3I110MnD2raj7wYTVllgGXi0io03l+ObDMqbF8xJfJZSawp5HxmFZgZO9g9qTm2xDvHURiSj6+PsLQXt3cDsU0UK01EBF5gWqamFT1O4143SeBt0TkbuAYcJPzWnHAfap6j6rmiMhvgS3ONo+r6tlBk/4beEVEngEygbsaEYtpJUZFB/Pi+qMcySpgUKR9qbR3ial5DI7sSqC/r9uhmAaqSxPWEq/7gcD1QGpjXtSpwcysZn08cI/X8kJgYTXljtH4M8FMK3O2LTwxJZ/YsC5sOZrD1IE97OrkduRQZgF+PkJsWBCJKXk2dHsbV2sCUdV3vZdFZBGwttkiMh3WoIiudPLzYfmek7yw7gg7k/N47o44Zo2o8SQ708bc/eIWUvOK+dGlg8gqKGVUb+tAb8sacvL1YMB+Npgm5+frw7Co7ny8K43DWYUE+Pmw/lBju9tMa5GaW8TR7DN0D/TnL8sPAF+evm3aprr0gZzG0wcizt90PH0QxjS5GyZEE9LZnyeuG8Uv30lg0xFLIO3F2ffyxbsm8sWBTNYkZVoCaePq0oRlvZmmxdwxpR93TOkHwKT+Yfz98yTyisoI7mwD7bV1mw7n0C3Qj+FR3RkVHcwPLh3kdkimkWpMICIy4Xwbquq2pg/HmC9NHhCGroD4oznMHG79IG3dpiM5TOoXhq9NGNVunK8G8pfzPKbAZU0cizFfMSE2lABfHzYfsQTS1mXkF3Mkq5BbJvVxOxTThGpMIKp6aUsGYkxVgf6+jO0TzMYjNmd6W7fJeQ8n9+/hciSmKdXlOhBEZBSeYdMDz65T1ZebKyhjzprUP4xnvzhMQUk5XTvV6eNqWqFNR7LpEuDLSDttt12py3wgjwL/cG6XAk8B1zZzXMYAnl+sFZXK1mOn3A7FNMKmwzlc0C8MPxu2vV2py7t5I56rxtNV9S5gLGDn3pkWcUHfUHx9hI2H7XTetiqroISkjAIm9w9zOxTTxOqSQIpVtRIoF5HueEbOtZ4w0yK6dPJj2qBwXtt4jMzTJW6HYxrgHyuSEIGZw+364/amxgQiIv8SkWnAZmcSp/8DtgLb8MzBYUyLeOSaERSXVfL4Eht0ua3ZdvwUL288xvwp/RjWy/o/2pvz9UoeAP4E9AYKgUXAbKC7qia0QGzGADAwois/vGwQf11+gG+Mj+bSYfZLti0oq6jkoXd30at7IA/MGep2OKYZ1FgDUdW/qeoUPKPeZuMZFXcpcL2IDG6h+IwB4L7pAxkc2ZVffZBIVoE1ZbUFf/ssif0nT/PbeaPsDLp2qi4TSh1T1T+q6njgFuA6YF+zR2aMlwA/H/78zbFkF5Ywf+Fm8ovL3A7JnMfLG47yz5UHuSkuxkZTbsfqchqvn4hcIyKvAZ8C+4FvNHtkxlQxtk8Iz377Ag6cPM09L8ZTVFrhdkimGh9sT+GRD3cza3hPfn/9aLfDMc3ofJ3os0VkIZAMfBf4GBioqjeranVT0BrT7GYMjeTpb41jy7Ec/rkyye1wTBUn84v55TsJXDggjH/eOt6u+2jnzvfuPgSsB4ar6rWq+rqqFrZQXMbU6OoxvZk7shevbjxOYUm52+EYLy+uP0p5ZSVP3TDWpqrtAM7XiX6Zqj6nqnYJsGl17rl4AHlFZbwdf8LtUIyjoKSc1zYeY+6oXsT2CHI7HNMCXKtfikiYiCwXkSTnb2gN5eY7ZZJEZL7X+ltEZJeIJIjIUhEJb7nojdsu6BvKBX1DeX7dESoq1e1wDPDWlhPkF5fz3YsHuB2KaSFuNlA+CKxQ1cHACmf5K0QkDHgUmAxMAh4VkVAR8QP+BlyqqmOABOCHLRa5aRW+e3F/TuQUsWx3utuhdHjlFZUsXHeEuL6hjI+t9regaYfcTCDzgJec+y/hOT24qjnAclXNcZrSlgNz8UyvK0AXERGgO5Da/CGb1mT2iF707RHEgtWHUbVaiJuW7k4n+VQR373Eah8diZsJpKeqpjn304HqThaPBrwbuZOBaFUtA+4HduFJHCOA56t7ERG5V0TiRSQ+MzOzyYI37vP1Ee6e1p8dJ3JttF4XqSr/t/ow/XoEMcsm/upQmjWBiMhnIpJYzW2edzn1/Hys809IEfHHk0DG4xlqJQHPWWNfo6oLVDVOVeMiIiIavjOmVbrxghhCgvxZsPqw26F0WFuOnmJnch53XzzApqvtYJp1fAFVnVXTYyJyUkSiVDVNRKLwjPJbVQoww2s5BlgFjHOe/5DzXG9RTR+Kaf+CAvy4/cK+/HPlQY5kFdI/vIvbIXU4/7fmMKFB/tw4IcbtUEwLc7MJazFw9qyq+UB1FycuAy53Os5DgcuddSnACBE5W6WYDext5nhNK3X7lL74+/jw/FqrhbS0Q5kFfLb3JLdf2JfOAXbdR0fjZgJ5EpgtIknALGcZEYkTkecAVDUH+C2wxbk97nSopwK/AVaLSAKeGsnvXdgH0wpEdgvk+vHRvB2fTE5hqdvhdCgL1x7B39eH26f0czsU4wLXhshU1Ww8Mx1WXR8P3OO1vBDPSMBVyz0LPNucMZq24+ZJfXgz/gQbDmVz1Zgot8PpMD7fl8HsET2J6NbJ7VCMC2ygGtMujOjdnQBfHxJSct0OpcPIOF1MWl4x4/uEuB2KcYklENMudPLzZVhUN3Yl57kdSoeRmOI51mNiLIF0VJZATLsxKjqYXSl5dlFhC9mVnI8IjOxtU9V2VJZATLsxJjqY08XlHMs+43YoHcKulFwGRnSli8022GFZAjHtxqjoYAASUqwZqyUkJOcx2jnmpmOyBGLajSE9uxHg53Oubd40n5P5xWScLrEE0sFZAjHtRoCfD8OjupOQbGdiNbezJyuMibEE0pFZAjHtyujo7iSm5FNpc4Q0q4SUPHzEc/q06bgsgZh2ZUx0CAUl5RzNttmXm1NiSh6DIrsSFGAd6B2ZJRDTrox2mlR2WT9Is1FVpwPdrv/o6CyBmHZlcGRXOvn5sP249YM0l9S8YrIKShgdbc1XHZ0lENOu+Pn6cPHgCD7YkUJBSbnb4bRLr248hghcMsTm1+noLIGYdueHlw0i90wZr2485nYo7U7umVJeXn+Uq0ZHMSCiq9vhGJdZAjHtzrg+IVw8OJzn1hymqLTC7XDalYXrjlJYWsEPLxvkdiimFbAEYtqlH88cTFZBKa9vPu52KO1GfnEZL6w7wpyRPRnWy/o/jCUQ005N7BfGhQPC+H9fHKK8otLtcNqF1zYe53RxOT+6bLDboZhWwhKIabfmT+lHxukSttkZWU3i08Q0JsSGnBtzzBhLIKbdmjY4HD8fYeX+DLdDafMyT5eQkJzHzOE93Q7FtCKuJBARCROR5SKS5PwNraHcUhHJFZElVdb3F5FNInJQRN4UkYCWidy0Jd0C/ZnYL4yV+yyBNNYqJwnPGGqn7povuVUDeRBYoaqDgRXOcnX+BNxezfo/Ak+r6iDgFHB3s0Rp2rxLh0WwL/00qblFbofSpq3an0lkt06MiLLOc/MltxLIPOAl5/5LwHXVFVLVFcBp73UiIsBlwDu1bW/MpUMjAc8XIMCTn+7jwXcT3AypTYg/msO1/1xLSm4RZRWVrE7K5NKhkXj+/YzxcCuB9FTVNOd+OlCfhtUeQK6qnr3MOBmIrqmwiNwrIvEiEp+ZmdmwaE2bNSiyKzGhnVm5P4NPdqXx7BeHeDP+BFkFJW6H1qq9vvk4Ccl5/PSN7Ww5ksPp4nIuHRbpdlimlWm2BCIin4lIYjW3ed7l1DOBdbONva2qC1Q1TlXjIiKs/bajEREuHRrJ2qQsHnw3gT5hnVGFz/acdDu0VqusopIVezPoE9aZLUdP8Yu3d+LvK1w0qIfboZlWptkSiKrOUtVR1dw+BE6KSBSA87c+vZzZQIiInB1HOgZIadroTXty6bAIisoqqFR49e7J9AnrzNLd6W6H1WptOpxDXlEZv7pqBNePjyYtr5iJ/cLoFujvdmimlXGrCWsxMN+5Px/4sK4bOjWWlcCNDdnedDxTBoQzITaEp24cQ98eXZgzohfrD2aTX1zmdmit0rLd6QT6+3DJ4AgenzeSCweEccukWLfDMq2QWwnkSWC2iCQBs5xlRCRORJ47W0hE1gBvAzNFJFlE5jgP/TfwcxE5iKdP5PkWjd60KZ0DfHnv+xdx5egoAOaO6kVpRaWd3luNykpl2e50ZgyJpHOAL90C/Xnj3ilcM7a326GZVsiV6cRUNRuYWc36eOAer+WLa9j+MDCp2QI07dqE2FDCu3biP7tPMm9cjedfdEg7knPJOF3CnFF2waCpnV2JbjocHx/h8pE9Wbk/g+IyG63X27LEdPx8hMuGWQIxtbMEYjqkOSN7caa0grVJWW6H0mqoepqvpgzsQXBn6zA3tbMEYjqkKQN60C3Qj2V2NtY5B04WcDT7DHNH9XI7FNNGWAIxHVKAnw8zh0Xy2d6TNty7Y2liOiIwe4Q1X5m6sQRiOqy5o3px6kwZm4/muB1Kq7BsdzoXxIYS2S3Q7VBMG2EJxHRYlwyJoJOfD8sSrRnrRM4Z9qTlM2ekNV+ZurMEYjqsoAA/pg+JYNnuk1RWNttoOm3C2b4gSyCmPiyBmA5tzshepOcXk5CS53YorlqamM7wqO7E9ghyOxTThlgCMR3azOGR+PkInyam1V64ncrIL2br8VPMtdqHqSdLIKZDCwkKYMbQSN7dmkxJece8qHDR5hOowrXjbLgSUz+WQEyHN39qX7IKSvlkV8erhZRVVPLapmNMHxJB//Aubodj2hhLIKbDmzYonAERXXhx/TG3Q2lxSxPTyThdwp1T+7kdimmDLIGYDk9EmD+lHztP5LLjRK7b4bSolzccJTYsiOlDbLI1U3+WQIwBvjEhmi4Bvry8/qjbobSY3al5bDl6ijum9MXHx+Y6N/VnCcQYoFugPzdeEMNHCansSc13O5xmp6r8edl+Ovv78s0L+rgdjmmjLIEY4/jxzMGEBgXwo0XbOFNa7nY4zerF9UdZuT+TX84dSnCQjbxrGsYSiDGOHl078fS3xnE4q5DHP9rjdjjNJjEljz98so9ZwyOt89w0iiszEhrTWl00KJz7pw/k36sOkXyqiLh+ocwbF93mT3Etr6jk/e0pbDqSw6r9GYR28eepG8ciYn0fpuEsgRhTxc9mD6FClS/2Z/K3FUm8vuk4a//7MgL82m6FfcGawzy1dD9hXQKYEBvKT2YOJqxLgNthmTbOlf8IEQkTkeUikuT8Da2h3FIRyRWRJVXWvyYi+0UkUUQWiog14pom4+/rw0NXDGfpTy9h4fyJZJwuadMXGZZVVPLy+mNMGxTO1l/N4rn5cYyOCXY7LNMOuPWT6kFghaoOBlY4y9X5E3B7NetfA4YBo4HOwD3NEaQx04dEMDCiCwvXHUG1bY7Y+2liOun5xdw9rb81WZkm5VYCmQe85Nx/CbiuukKqugI4Xc36T9QBbAZimitQ07H5+Ah3XtSfhOQ8th475XY4DbJw7REGhHexiwVNk3MrgfRU1bNtAulAg+bQdJqubgeWnqfMvSISLyLxmZmZDXkZ08HdMCGa7oF+LFx3xO1Q6m3b8VPsOJHLnRf1s4sFTZNrtgQiIp85fRRVb/O8yzm1iIa2DfwbWK2qa2oqoKoLVDVOVeMiIuwXmKm/oAA/bpkcy9LEdJJPnXE7nHpZuPYI3QL9uGGCVdJN02u2BKKqs1R1VDW3D4GTIhIF4PzNqO/zi8ijQATw86aN3Jivu2NKP0SEVza0nQEXU3OL+DQxnVsmxdKlk51waZqeW01Yi4H5zv35wIf12VhE7gHmALeoamUTx2bM10SHdGbuqF4s2nycwpK2cZX6yxuOoarcMaWv26GYdsqtBPIkMFtEkoBZzjIiEiciz50tJCJrgLeBmSKSLCJznIeexdNvskFEdojIIy0bvumIvnNRf/KLy3lvW7LbodTqTGk5izYfZ+6oXsSE2jS1pnm4Uq9V1WxgZjXr4/E6JVdVL65he6uPmxY3ITaEsX1CeGHdUW6b3LpHsH1/ewp5RWV856L+bodi2rG2e2mtMS1MRPjORf04nFXI5/vq3W3XYsoqKnlh3VFGRwdzQd9qr9E1pklYAjGmHq4YFUVMaGfuf20rf/hkL6eLy9wO6StW7s9g7jOrOZhRwPemD7ALB02zsgRiTD0E+Pnw3vencv34aBasOcwVf1tDUWmF22EB8OwXh7jrhS1UKjw/P46rx/R2OyTTzlkCMaaeIrsF8tSNY1l450SSTxXxVvwJt0OisKSc/111iOlDIlj200uYObxB1+YaUy+WQIxpoEuHRnJB31D+b81hyisqUVUeW7ybxxbvbvbXTkzJ45p/rOVIViEAizYfJ6+ojJ/MGtymRw02bYt90oxphPumDyT5VBEf70rjlY3HeHH9Ud7YcpyS8uZt1vpoZyq7UvK475Wt5BWV8fzaI0zqH8aEWOs0Ny3HEogxjTBzWCSDI7vy1NL9/HbJHqJDOlNcVsmO47nnyvxr5UHWH8xq8GuUlFfwyIeJnMj5chiV9YeyiQoO5EDGaa771zrS8oq5f/rARu2LMfVlCcSYRvDxEe69ZAApuUX0DunMG/deiI/AukPZAKTkFvGnZft5bm3DB2LcevQUL284xgvrjgKQd6aMxNQ8bp4YywOXD+VIViHDenVjxlAb6820LLsgz5hGmjcumhM5Z7h2XG/6hAUxOjqYDYeyYPYQPknwDDq9/fgpVLVBp9VuO+4ZRv6TXWn86qrhbDicjSpMHdSDC5wmq4sGhdspu6bFWQ3EmEYK8PPh55cPZVBkNwCmDgpn+/FcCkvKWZKQCsCpM2UczW7YSL7bneaw9Pxi4o+dYsOhLIICfBkbE4KPj/CDSwcxrk9I0+yMMfVgCcSYJjZ1YA/KK5X3tiWzMzmPb0yIBjy1kPpSVbafyOXK0b3o5OfDkoRU1h/KZmK/MDvbyrjOPoHGNLG4vmH4+wp//s8BAH46cwhdO/mda4oqLCnnp29sZ+uxnK9tW1peyUPv7WLF3pMAHMs+Q05hKdMGRXDZsEg+2J5CUkYBUwf2aLkdMqYGlkCMaWKdA3wZHxtKXlEZ4/qEENsjiHF9Qs41RX2wI4UPdqRy78tbSc0t+sq2T3y8h0Wbj/O7T/Y6tQ9P0pnQN4Srx/Qmv9gzlPzUgeEtu1PGVMMSiDHN4GwN4eoxUQCMjw1hX/ppzpSW8/qm48SGBVFSXsl9r26luMxzzchb8Sd4ecMxRvbuzuHMQjYezmHbsVy6BPgyOLIblw2LJCjAl+6Bfozo3d21fTPmLDsLy5hmcO3Y3mw4lM28cZ7+j/GxIVRUKq9uPMbu1Hx+O28kvYI7892X45nzzGqCO/uzL+00Fw3qwf+7PY6pf1jB65uPcySrgLF9QvD1EToH+HLf9IFUquLbioeSNx2HJRBjmsGAiK68+b0p55bH9/Gcbvv08iQ6+/syb3w03QP9+eMNo1mamA7A1WOj+NVVI+jayY9vTIjhtU3HqFS+coHgj2cObtkdMeY8LIEY0wJCuwTQP7wLR7IK+VZcH7oH+gPwrYmxfGti7NfK3zY5lhfXHwU8tRdjWiPrAzGmhZxNBLdM/nrCqGpwz25M7BfqbGfjW5nWyZUaiIiEAW8C/YCjwE2q+rWT5EVkKXAhsFZVr67m8b8D31HVrs0asDFN4K6p/enfowtjY4LrVP5/rvRcdR7WJaCZIzOmYdyqgTwIrFDVwcAKZ7k6fwJur+4BEYkD7KeZaTNGxwTzo5mD6zzkyPjYUL4/Y1AzR2VMw7mVQOYBLzn3XwKuq66Qqq4ATlddLyK+eJLLL5srQGOMMefnVgLpqappzv10oL7Tp/0QWOz1HMYYY1pYs/WBiMhnQK9qHnrYe0FVVUS0Hs/bG/gmMKOO5e8F7gWIja2989IYY0zdNFsCUdVZNT0mIidFJEpV00QkCsiox1OPBwYBB5225CAROaiq1TYWq+oCYAFAXFxcnROVMcaY83OrCWsxMN+5Px/4sK4bqurHqtpLVfupaj/gTE3JwxhjTPNxK4E8CcwWkSRglrOMiMSJyHNnC4nIGuBtYKaIJIvIHFeiNcYY8zWuXAeiqtnAzGrWxwP3eC1fXIfnsmtAjDHGBXYlujHGmAYR1Y7TrywimcCxBm4eDmQ1YTgtzeJ3l8XvLou/cfqqakTVlR0qgTSGiMSrapzbcTSUxe8ui99dFn/zsCYsY4wxDWIJxBhjTINYAqm7BW4H0EgWv7ssfndZ/M3A+kCMMcY0iNVAjDHGNIglEGOMMQ1iCaQORGSuiOwXkYMiUtPkV62CiPQRkZUiskdEdovIT5z1YSKyXESSnL+tejIuEfEVke0issRZ7i8im5z34E0RabXT9IlIiIi8IyL7RGSviExpS8dfRH7mfHYSRWSRiAS25uMvIgtFJENEEr3WVXu8xePvzn4kiMgE9yI/F2t18f/J+fwkiMj7IhLi9dhDTvz73R7eyRJILZzJq/4FXAGMAG4RkRHuRnVe5cAvVHUEnumAf+DEW9dZIFuLnwB7vZb/CDztDJx5Crjblajq5m/AUlUdBozFsx9t4viLSDTwYyBOVUcBvsDNtO7j/yIwt8q6mo73FcBg53Yv8L8tFOP5vMjX418OjFLVMcAB4CEA53/5ZmCks82/ne8oV1gCqd0k4KCqHlbVUuANPDMqtkqqmqaq25z7p/F8eUVTx1kgWwMRiQGuAp5zlgW4DHjHKdJq4xeRYOAS4HkAVS1V1Vza0PHHM0ZeZxHxA4KANFrx8VfV1UBOldU1He95wMvqsREIcaaUcE118avqf1S13FncCMQ49+cBb6hqiaoeAQ7i+Y5yhSWQ2kUDJ7yWk511rZ6I9MMzf8omGj8LZEt6Bs90xZXOcg8g1+sfqjW/B/2BTOAFpwnuORHpQhs5/qqaAvwZOI4nceQBW2k7x/+smo53W/x//g7wqXO/VcVvCaSdEpGuwLvAT1U13/sx9Zy73SrP3xaRq4EMVd3qdiwN5AdMAP5XVccDhVRprmrlxz8Uz6/c/kBvoAtfb15pU1rz8a6NiDyMp1n6NbdjqY4lkNqlAH28lmOcda2WiPjjSR6vqep7zuqTZ6vqDZgFsiVdBFwrIkfxNBdehqdPIcRpUoHW/R4kA8mquslZfgdPQmkrx38WcERVM1W1DHgPz3vSVo7/WTUd7zbz/ywidwJXA7fplxfstar4LYHUbgsw2DkLJQBPB9Zil2OqkdNf8DywV1X/6vVQg2eBbEmq+pCqxjizTd4MfK6qtwErgRudYq05/nTghIgMdVbNBPbQRo4/nqarC0UkyPksnY2/TRx/LzUd78XAHc7ZWBcCeV5NXa2GiMzF04x7raqe8XpoMXCziHQSkf54TgbY7EaMAKiq3Wq5AVfiORPiEPCw2/HUEus0PNX1BGCHc7sSTz/CCiAJ+AwIczvWOuzLDGCJc38Ann+Ug3hmqezkdnzniXscEO+8Bx8AoW3p+AO/AfYBicArQKfWfPyBRXj6a8rw1ADvrul4A4LnrMpDwC48Z5u1xvgP4unrOPs//KxX+Yed+PcDV7gZuw1lYowxpkGsCcsYY0yDWAIxxhjTIJZAjDHGNIglEGOMMQ1iCcQYY0yDWAIxppFE5GFn9NoEEdkhIpOb8bVWiUhccz2/MfXhV3sRY0xNRGQKnquFJ6hqiYiEA61mqHNjmpPVQIxpnCggS1VLAFQ1S1VTReQREdnizKmxwLmq+2wN4mkRiXfmCpkoIu8581Y84ZTp58wF8ZpT5h0RCar6wiJyuYhsEJFtIvK2M/6ZMS3GEogxjfMfoI+IHBCRf4vIdGf9P1V1onrm1OiMp5ZyVqmqxgHP4hli4wfAKOBOEenhlBkK/FtVhwP5wPe9X9Sp6fwKmKWqE/Bc+f7z5tlFY6pnCcSYRlDVAuACPJMTZQJvOoPgXerM4LcLz4CQI702OzuW2i5gt3rmcCkBDvPlQHknVHWdc/9VPEPUeLsQzwRn60RkB57xnvo26c4ZUwvrAzGmkVS1AlgFrHISxveAMXjGWTohIo8BgV6blDh/K73un10++z9ZdYyhqssCLFfVWxq9A8Y0kNVAjGkEERkqIoO9Vo3DM8gdQJbTL3Hj17esVazTQQ9wK7C2yuMbgYtEZJATRxcRGdKA1zGmwawGYkzjdAX+ISIheCb+OYinOSsXz2i26XimBKiv/Xjms1+IZzj1r8zdraqZTlPZIhHp5Kz+FZ5Ro41pETYarzGtjDMV8RKnA96YVsuasIwxxjSI1UCMMcY0iNVAjDHGNIglEGOMMQ1iCcQYY0yDWAIxxhjTIJZAjDHGNMj/B22bEVUzrx2PAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEWCAYAAACNJFuYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADt0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjByYzMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy9h23ruAAAgAElEQVR4nOy9d5wcd33//3pvL7d7e10n3alasmS5YFs2NjYY3DAYMNWhBAgJIbSQQvmFEAIJIZhOiGkGvgEDMRhjSmyDK7Zxt2TZlmVJVtcVXW/b6+f3x2c+s5+Zndlyt3t70n2ej8c97m52duazszOfd39/iDEGhUKhUChkHM0egEKhUCiWHko4KBQKhaIEJRwUCoVCUYISDgqFQqEoQQkHhUKhUJSghINCoVAoSlDCQWELEd1PRO8t8/p3iejTizmm5Q4R/Z6I3l3H45X9jhXLFyUcThCI6AgRJYkoRkQjRPQjImpZxPP/BRE9JG9jjL2fMfa5BpxrLREx7bPGtM/+T9LrREQfJ6L92jU5RkT/SUQeaZ8face4xnTsr2vb/0L6XHnpXDEiut5mXPdr7z3LtP3X2vaX1/M6WMEYexVj7MfS2B+q9J5GUOk7Upz4KOFwYvFaxlgLgBcBOBvAJ5s8nkYT0T7v2wD8KxFdpW3/JoD3AXgXgBCAVwG4HMDPTe9/QdsHAEBELgDXAjho2u9RxliL9PPhMmMyH7MDwIUAxmv9cCcJ4jt6M4BPE9EVzR6Qoj4o4XACwhgbAXAnuJAAABDRBUT0CBHNENEzsharaZiHiChKRIeJ6B3a9s8S0U+l/YQ26JLPR0RbAHwXwIWaljijbf8REf2H9vfLiWiQiD5KRGNEdJyI3iMdo4OI/o+I5ojoSSL6j2q1XsbYowB2AzidiDYC+CCAdzDGHmWM5RhjuwG8CcDVRHSJ9Nb/A3AxEbVp/18F4FkAI9Wc14afAfgzInJq/78NwK8BZKTPej4RPap9F8eJ6HqTVXMlEe0jolki+jYRPSBcO8IaIKKvENG09n29Snrv/UT03jLficFNZLYuiOgKItqrnft6ACR/OCL6SyLao537TiJaU81FYYxtB/+O5HvS8lia5fd17T6ZI6JdRHS69tqPiLsr79bu1wfkMRDRS7T7Z1b7/RLTtfkcET2svfcuIurUXvMR0U+JaFL7Xp4koh7ttVYi+qH2XQ1p96b4fpctSjicgBBRH7i2fED7fxWA2wH8B4B2AB8D8Csi6iKiILim/SrGWAjASwA8Xcv5GGN7ALwfRQ07YrPrCgCtAFYB+CsA35Im5m8BiGv7vFv7qeazEhFdBGArgJ0ALgMwyBh7wjTGAQCPAbhS2pwC8FsAb9X+fxeAG6s5bxmGATwvncfqmHkA/wCgE9yquAxcoEGbrG4Bt/o6AOwD/05kXqxt7wTwJQA/JCLDJF7Dd6KjnftWAP+iHfsggIuk168B8M8A3gigC8CfANxU6bjaey8AcDqK92S5Y10J4GUANoHfL9cCmJQO9w4An9PG+DS4QAYRtYPf598Ev3ZfA3A7cetN8HYA7wHQDcAD/iwA/H5rBdCvvff9AJLaaz8CkANwCrhFfiWAZR+HUcLhxOI3RBQFMABgDMBntO1/DuAOxtgdjLECY+xuANsBvFp7vQCudfsZY8c1TbsRZAH8O2Msyxi7A0AMwKmaFvYmAJ9hjCUYY88D+HEVx5sAMAXgBwD+iTF2L/iEcdxm/+PgE5HMjQDeRUQRAJcA+I3F+y7QtEnxc0GFcYljbgZ3qzwqv8gY28EYe0yzao4A+J52boB/J7sZY7cyxnLgE53ZkjnKGPs+YywPfp16AfRUGFM1iHPfwhjLAviG6dzvB/AFxtgebWz/CeBFFayHCSJKAngUwLdRvL7ljpUFdwduBkDaPvJ3ejtj7EHGWBrAp8Cto34AVwPYzxj7iXZtbwKwF8Brpff+D2PsBcZYEsDNKFoyWXChcApjLK99R3Oa9fBqAH/PGIszxsYAfB1FhWLZooTDicXrNe3/5eAPVqe2fQ2At8gTHICLAfQyxuIA/gz8YT1ORLdrk1ojmNQmAkECQAv4hO0CF2oC+W87OhljbYyxLYyxb2rbJsAnSyt6tdd1GGMPaef/FIDbtEnDzGOMsYj081iFcd0K4FIAHwbwE/OLRLSJiG4jnjgwBz4xiu9qJaTPznjny0HTIUak1xPan/VIPrA6t/w9rAHwX9I9NAXudlpV5pid2tg+Cn5fuisdizF2H4Drwa3JMSK6gYjC0jHlMca0967Ufo6azn/UND5Z2In7D+Df050Afk5Ew0T0JSJya+N0gz8bYqzfA7c8ljVKOJyAMMYeADeFv6JtGgDwE9MEF2SMXaftfydj7ArwyXMvgO9r74sDCEiHXlHutAsY8ji42d4nbeuf57HuA9BPROfLGzXN8gIA91u856fgk9dCXUoA9An79wA+AAvhAOA74Nd5I2MsDO5eEW6h45Cug+Yu6is5QpVDsdhW7js9Dum6a+eWv4cBAH9juo/8jLFHyg6Ca+JfA3fjfbCaYzHGvskYOxfAaeDupY9Lh5TH2ALuKh3WfsxWzGoAQ+XGp50vyxj7N8bYaeBuvNeAuwQHAKTBFRExzjBjbGulY57sKOFw4vINAFcQT6v8KYDXEtEricipBd9eTkR9RNRDRNdosYc0uKunoB3jaQAvI6LVRNSK8tlPowD6SAqsVovmHrkVwGeJKKBZLu+q8Da7Y70AHoj9GfEgvJOItgL4FYBHANxj8bZvArgCwIPzOacN/wzgEs1tZCYEYA5ATPusH5Beux3AGUT0euKB/w+hvFAuh9V38jSAN2rX+RTw2I987q1E9Ebt3B8xnfu7AD6pXU8RqH1LDeO5DsAniMhX7lhEdB4RvVjT3OPgQqUgHefVRHSx9rk+B27ZDQC4A8AmIno7EbmI6M/AhcttlQZGRK8gojM0F+ccuJupoLmz7gLwVSIKE5GDiDaQMbFhWaKEwwkKY2wcXBP+V+3BEQHAcXBt6OPg368DwD+Ca11T4L7vD2jHuBvAL8AzeHag/EN2H3g2yggRTZTZz44PgwcER8C17ZvAhdV8+DB4HOKn4K6D58DdC69njBXMOzPGphhj92pulLrAGBvWXFZWfAw8MBoFt9J+Ib1vAsBbwAPNk+CT23bM71pYfSdfB8+cGgWPV/zM4tzXaefeCOBh6fVfA/giuOtlDvy66plSVXA7gGkAf13hWGHw6zIN/r1NAviydJz/BY+nTQE4FzymBsbYJLjG/1HtPZ8A8Brtc1ViBXgiwByAPQAeQNHqexd48Pp5bUy3wN51uWygOj4vCkXVENEXAaxgjC242peI/g3AGwC8jDE2s+DBLSJE5ACPObyDMfbHZo+n2RDRj8Cz0f6l2WNZ7ijLQbEoENFmIjpTS009H9zd8et6HJsx9hkAN4DHHJY8mvsvQkReFOMRlYLgCsWi4qq8i0JRF0LgrqSV4C6Pr4LXINQFxphly4slyoXgrhPhyni9TRaVQtE0lFtJoVAoFCUot5JCoVAoSmiaW0nLS78RvPKTAbiBMfZfWon8LwCsBXAEwLWMselyx+rs7GRr165t6HgVCoXiZGPHjh0TjDFzVwEATXQrEVEveAXvU0QUAk+lfD2AvwAwxRi7jngL4DbG2P9X7ljbtm1j27dvb/iYFQqF4mSCiHYwxrZZvdY0t5LW4+cp7e8oeO7xKvB8fdF358fgAkOhUCgUi8iSiDkQ0VrwboiPA+iRmnCNwKbhGBG9j4i2E9H28fHl2kpfoVAoGkPThYPWO+VX4F0R5+TXtIpWS78XY+wGxtg2xti2ri5Ll5lCoVAo5klThYPWW+VXAH7GGLtV2zyqxSNEXGKsWeNTKBSK5UrThIPWEfKHAPZoHR0Fv0NxIZh3o46FUgqFQqGojmZWSF8E4J0AdhGRWJnsn8Gbgt1MRH8F3pTr2iaNT6FQKJYtTRMOWkdLsnn5ssUci0KhUCiMND0g3UwOjMXwtbtfwEP7J6DaiCgUCkWRZd14b8/xOVx/334UGHD1mb34+rUvgse1rOWlQqFQAFjmwuG1Z63EFaf14IcPHcaX79yHVr8b//mGM5o9LIVCoWg6y1o4AIDP7cSHXnEK5lJZfO+BQ7j6jF5cdEpn5TcqFArFSYzyoWj8w+Wb0Nfmx5f+sFfFHxQKxbJHCQcNn9uJv3nZejwzOIudAyfUSpMKhUJRd5RwkHjjOX0I+Vz40cNHmj0UhUKhaCpKOEgEvS68+dw+/P6544imss0ejkKhUDQNJRxMvPqMXmTzDA+8oDq9KhSK5YsSDibOWd2GjqAHd+0ebfZQFAqFomko4WDC6SBctqUbf9w7hkyu0OzhKBQKRVNQwsGCSzf3IJrOYdeQylpSKBTLEyUcLNi2tg0A8OSR6SaPRKFQKJqDEg4WdLZ4sb4ziO1Hppo9FIVCoWgKSjjYsG1tG3YcnUahoKqlFQrF8kMJBxu2rWnHdCKLQxOxZg9FoVAoFh0lHGw4Z00EAPD0wGyTR6JQKBSLjxIONqztCMLjcmDfyFyzh6JQKBSLjhIONricDmzsbsHekWizh6JQKBSLjhIOZTh1RQj7lHBQKBTLECUcyrB5RQhj0TSm45lmD0WhUCgWFSUcynDqijAAKNeSQqFYdijhUIbNK0IAoILSCoVi2aGEQxm6Q16EvC4cnog3eygKhUKxqCjhUAYiQn97AMemEs0eikKhUCwqSjhUYLUSDgqFYhmihEMF+tv9GJxOqh5LCoViWaGEQwVWtweQzhUwHks3eygKhUKxaCxZ4UBEVxHRPiI6QET/1Kxx9LcHAAADyrWkUCiWEUtSOBCRE8C3ALwKwGkA3kZEpzVjLEI4qLiDQqFYTixJ4QDgfAAHGGOHGGMZAD8HcE0zBrIq4geREg4KhWJ5sVSFwyoAA9L/g9o2HSJ6HxFtJ6Lt4+PjDRuIz+3EirBPCQeFQrGsWKrCoSKMsRsYY9sYY9u6uroaeq5VET+Oz6Qaeg6FQqFYSixV4TAEoF/6v0/b1hR6Wn0YnVPCQaFQLB+WqnB4EsBGIlpHRB4AbwXwu2YNpiekhINCoVheLEnhwBjLAfgwgDsB7AFwM2Nsd7PGs6LVi3gmj2gq26whKBQKxaLiavYA7GCM3QHgjmaPAwB6wj4AwOhcCiGfu8mjUSgUisazJC2HpYYQDiOzqkpaoVAsD5RwqIKukBcAMBlXwkGhUCwPlHCogs4gFw4TMbVcqEKhWB4o4VAFYb8LLgdhUjXfUygUywQlHKqAiNDR4sGkshwUCsUyQQmHKukIelXMQaFQLBuUcKiSjhaPijkoFIplgxIOVdLZ4sWEijkoFIplghIOVRIJuDGbUBXSCoVieaCEQ5W0+t2IpnPI5QvNHopCoVA0HCUcqiTi520z5lK5Jo9EoVAoGo8SDlXSGuDCYSahgtIKheLkRwmHKon4PQCA2aSKOygUipMfJRyqJKy5lWaUcFAoFMsAJRyqJKK5lVTGkkKhWA4o4VAlrZrloNxKCoViOaCEQ5Uo4aBQKJYTSjhUidvpgMflQDytUlkVCsXJjxIONdDidSGmhINCoVgGKOFQA0GvU1kOCoViWaCEQw0EPS7E0vlmD0OhUCgajhIONdDidSGRUZaDQqE4+VHCoQaCXpdyKykUimWBEg41EPQ6VUBaoVAsC5RwqIGgx4W4ijkoFIplgBIONaDcSgqFYrmghEMNtHhdiGdyYIw1eygKhULRUJRwqIGg14UCA1JZtRqcQqE4uWmKcCCiLxPRXiJ6loh+TUQR6bVPEtEBItpHRK9sxvjsCHqdAIC4SmdVKBQnOc2yHO4GcDpj7EwALwD4JAAQ0WkA3gpgK4CrAHybiJxNGmMJHie/XJmcshwUCsXJTVOEA2PsLsaYUL8fA9Cn/X0NgJ8zxtKMscMADgA4vxljtMLjUsJBoVAsD5ZCzOEvAfxe+3sVgAHptUFtWwlE9D4i2k5E28fHxxs8RI4uHPJKOCgUipObhgkHIrqHiJ6z+LlG2udTAHIAflbr8RljNzDGtjHGtnV1ddVz6LYIt1JaBaQVipOWe54fxY2PHkEqu7xrmlyNOjBj7PJyrxPRXwB4DYDLWDE3dAhAv7Rbn7ZtSVC0HJb3TaNQnKzc9MQxfPLWXQCAB18Yx/fftQ1E1ORRNYdmZStdBeATAF7HGEtIL/0OwFuJyEtE6wBsBPBEM8ZohRAOaRVzUChOOpKZPL70h7148bp2fOzKTbhnzxgeOjDR7GE1jWbFHK4HEAJwNxE9TUTfBQDG2G4ANwN4HsAfAHyIMbZk1HSvCkgrFCct9+wZxXQii49cthHve9kGdAQ9+PmTA5XfeJLSMLdSORhjp5R57fMAPr+Iw6kaj5Nn1S4n4ZAvMDDG4HIuhdwFhaJx/OG5EXSHvLhgfQecDsLVZ/biF08OIJnJw+9ZMhn1i4Z64mtguWUrpbJ5vP37j+GCL9yLZwdnmj0chaJhMMbw6KFJXLyxE04HjzFcurkb6VwBO45ON3l0zUEJhxpYbnUOv316CI8fnsJELIOv3vVCs4dTNQ8fmMBf/ehJ7B6ebfZQFCcIB8ZimIpncMH6Dn3beWvb4XIQHjm4POMOSjjUwHKLOfxm5zDWdwbxkcs24oEXxjEZSzd7SBVJZvJ4/0934N69Y/jbm3aiUFBNEhWV2TXEFYmz+/VOPgh6Xdi6MoynB5an1ayEQw0sJ7dSRjOnLz+tBy/b2AkA2H4CmNcPvDCGaCqHa7f14dB4HI8emmz2kBQnAHuOz8HjcmBdZ9CwfeuqVjw3NFv3TsyMsSXf3VkJhxpYTm6lvSNzyOQLeFF/BGf0tcLjcmD7kamGnzeTK+Cvb9yOq77x4LzWznjghXGEfS58+jWnwe0kPPjC4lTPK05s9hyP4tSeUEnixekrWzGXymFgKlm3c82lsnjDtx/B6Z+5E/ftHa3bceuNEg41oFdILwPh8MwgN7PP7GuF1+XEpp4W7BuNNfy89+wZxd3Pj2LvSBQ3Pnq05vc/fzyKrStbEfK5cfbqNjy8TP3Fito4OB7Dxu6Wku2be0MAgBdGo3U713fuP4hnBmfgdBA+/stnl2wlthIONSB3ZU1l80veLFwIh8ZjCHicWBXxAwDWdbbg8ETjhcNtzw6jJ+zFGata8ce9YzW9t1Bg2D8axakr+AO9bU0b9h6PIp2r38M3NJPENd96GB/82Q7kloF7cTmQyuYxMpfC6o5AyWsbOrnAOFSnez+XL+AXTw7gytN68N13novJeAa3PXu8LseuN0o41IDDQXA7Cb96ahCbP/0H3LJjsNlDahjHJhNY3R7QWwes6wxiaDpZ14nWil1Ds9i2th3nrW3Hs0MzyNYwAQ9OJ5HI5LFZEw5besPIFRj219Hi+fztz+OZgRncsWsEv9i+fAukTiaGZpJgDFjdXiocWgNudAQ9ODQer8u5nh2axVQ8g9edtQoXru/AylYf7to9Updj1xslHGrE43RgcJr7H3cPzzV5NI3j6FQCayRNan1nEAUGDEwlyrxrYcwmsxiYSmLryjDOWRNBKlvAvpHqzfnBaT42oQFu6Q0D4MHGehBNZXHP82P4y4vW4dSeEG5fohqfojaOafe0lXAAgPVdwboJh0cP8gSJCzd0gIjw8s3deOjAxJKMYyrhUCMiKA0Ax2frF6RaShQKDMemEljTUczcWNXG3UvHZ1MNO+9ebRLf0hvGes2cP1aDMBqZ42NbEfYB4NaOx+nA/rH6WA4P7Z9AJl/AVaevwCs2d+OJw1OIzSNorlhaDFYQDqvbg7risVB2HpvGKd0taA96AAAv2dCBRCaPvSNLT9FUwqFGjMKBT0bxdO6kqqKcSmSQyRX0eAMAdLV4AQDj0WKtw23PDuOlX7oPX71rX13iL8IiW9sR1IVRLZbK6BwfW48mHJwOQn+7H0cn66P17RyYgcflwIv6Izh/XRtyBaYLtOXC0EwSv9w+cFLVj4zOpeF0EDq0e9zMqogPI3OpmlycduwdieoWLQCcvboNAJZkLYUSDjUiC4fhGS4c/u7nT+NN33mkbpNQs5nQit06pYelM2QUDtFUFp+8dRcGppL47/sO4I5dC/ebCkust9WHVr8bYZ9LFxhW/PbpIVz61fsxPMP3GZ1LIeR1Iegttgxb2xHE0cn6aH3PDMxgS28YHpcDm3p4XGNfHbNYljrpXB6v+++H8PFbnsUPHjrU7OHUjfFoGh1Bj942w8zKiB8Fxu+vhRBNZTE4ndRjYgCwstWHrpAXTx9TwuGExyPlQU/E0oilc7hnD89VvvWpJbP0xIKYjGUAAJ0tHn1b0OOE3+3UhcOjBycRTeXws/e+GFt6w/jiH/YuOHtneDaF9qAHPjdvctbfHsCAjTmfLzD83c+fxqHxOH76GE95HZ1LoTts1P7WdnLhsFDLhjGG3cNzOGMV1/pWRfwIepw1xUROdB4/NIXJOL83bt5+8iRjjMfS6ApZWw1A0aUqlMH5ItybQrEAACLClt7wklQylHCokVa/2/C/nP/83NDJ0ctHWA6ymU1E6Ap5Ma699sjBSfjcDmxb24a/v3wjjk0lcPuuhQVoj88k0dvq0//vbfXjuM0DeXC8GEf4035eyzAWTesuJcHq9gCS2bw+7vkyHuWKwMZu/mATEU7pCRnGcbJz394x+NwOfPSKTTgwFtMtthOd8Wh54bBSc68OzSzMAhUu0rWmlNlN3S04MBZDfom56pRwqBExYQqt+sgEdyX53U5bLfdEQ1gHXSYfbFfIq7+2e3gWZ66KwOty4ootPdjY3YJv//HggnzRx2dT6G0txjnag27MJDOW+z6rFem9ZEMHjkzGwRjDXDJbIryFsBmdXZhwOKx9z2ul9gqrIr6GBuiXGs8Pz+H0la245FS+LO/J0ql3PJouuddlRIKDiGnNF+EiFZaIYNOKENK5Qk3JF4uBEg41IiYf4Y8/ovmzz13ThsHp5AlZGHfbs8P4yaNH9Il9Mp6B20kI+43LfbQHPbrL6dhUQk8ZdTgIH3zFBuwbjeLeGgvXZCbjGXSFiq6sSMCD6UTW8pruPT4Hn9uBV5zajWgqh+lEFtFUDiGfccwrNOGw0MyyI1o8aZ2UwbUi7MfIbKou33kuX8CPHzmypBMb9o9FsbGnBeu7RGHYiR9jKxQYJiq4lYJeF/xuJyai1sIhly/gy3fuxX/ds7/svTA4nURH0IOAx3iPbtCu52IUmdaCEg414tf84SIVTVgO56xpQyKTx3Qi27SxzYedx6bx4f/diU//djdueYr7kSeiaXQEvSVr54a8LsTSOaSyeYzOpQ2pf689cyX62vy4/o8H5j1ZziWzCEuafyTg1qrRS2MZI3PcyljfxSfrI5NxxNI5tHiNlkNR61uYhn9kMgG3k7AyIru9fEhk8ojWIZ31+j8ewGd+txtv+s4jGFqC7prJWBrTiSxO6Q6hxetCd8iLw1Lufyqbx83bB3QL60QhlskhV2D682xHZ8iju1vN3Lx9EN/640F8/Z4XyrpWB6cT6DNZDQDQ3y4y85bW966EQ4343PySubXA9JHJODwuB7au5IFKkQ9dKLCGtVcoFBj+uG+sLrnXP3v8GEJeFzpbvPi1FlCfS5W6ZwCuQcUzOd08loWDy+nA+y/ZgGcGZvRCn1pIZfNI5woI+4rnbQvwB3Y6UepaGtP8xP3aGAamElw4mCyHjhYvXA7SayBkhmaSiKaqE+bDM0msaPUZGrMJq2Rkga6lrGY1bF4RgstBeoB9KSGshA2aMF7XGTQIgi/+YS8+ccuz+PD/PnVCWc9zSf79my1OM10tXkzErF2ct+wYwMbuFvSEvfi/Z4ZtjzE8k9TjF+Zj+9yOhhaYzgclHGpEWA5CqT46mUB3yKtrHjOJLEbnUrjwunvx7v95oiFj+Ma9+/Ge/3kSf/6Dx+fVuVSQyRVw1+4RXLG1B687ayV2DkyjUGCIp/MIekuXRWzxuRBL5fTYilkLevO5fegOefGt+w9Udf4DY1F84fd7kMsX9IfUYDlof08nMmCM4Z7nR5HI8M87Hk2jO+TV9xG+/7DpIXc6CN0hb0ls4N49o3jpF+/D277/WFUtQUZmU+gJGYPdvbrLih97NpHFJ255Bk8crq177WOHJjGdyOIfr9iEs1dH8FgVbcYZYzg0Hlu0egNheQmB2N8e0C2cbL6A3+zkisXu4Tk8eaT5rrHr79uPP//B4xUTBqIpfj+FfKXKkExni9dQ41N8fxY7B2Zw9Zm9uPK0FXjwhQnbwPJkPGNIDxcQEfra7DPzkpk87nl+FDMWSlIjUcKhRkTVsNCaZ5NZtHhduuYRTeVw754xjM6l8fCBybovkJPJFXDjo0cAcFfHbc/aayqVePjABOZSOVx9Ri/WdwWRyhZwfC6FWDpnqBUQtHhdyBUYRrXJ0Fw05HM78dcvXY+HD0xaBivNguyTt+7C9x44hJ8/OYA5TYNvNbiVuMCdTWTxy+2DeO+N2/H9Bw8DAMbmUugO+XRhIjJnWizG3dPqK3Erffv+gygw4LmhOdy7p3KcZCyaRk+rUTgIhWBaS+/8wu/34Obtg/jAT3fU1IPqycNTcBBw0Smd2La2Hc8NzSKZKf/+nzx2FJd+9QH8+23PV32ehSAmxm5NQHa08PgTF1JxTCey+PdrtoIITV857f59Y/jKXS/goQMT+PRvnjO8NhXP4M3feQT/8IunARSFQ7iScAh5Ld1Ku4ZmwRjwov4IzuxrRTKbt3StZfMFzCSy6Gixdl/1t/kt3UqFAsN7b3wS771xO9747UcWtSJfCYcaeeM5q/C9d56Lv7p4nb7N63Lomkc0lTWktz50oL4Pyt6ROcwksrj+7WdjfVcQv95ZubYiX2D4+C+fwVXfeBBPSmsy3L7rOEI+Fy7e2Kn77g+Px5HI5BD0lE6yQW2R9WFNOFi5nt56fj/cTirxvX7h93uw9TN34iEt7TSVzeOZAZ5xdPfzo5gVloOk+bcFheWQxa+0eMizgzOIpXOIZ/LoDnvhdTngdlJROFi4B3pbfQbXz8hsCjuOTuNjV25C2OfCfRWC6IwxjMym9PiFQHz+uRQPmv9p/wQcxDXEWoqadg7MYPOKMIJeF/JbofoAACAASURBVM7qiyCbZ9g/Zp/3zhjD9fdx6+wnjx3Vr91COKplfNkxFk3D7STdUusMepHJFxBN5/SxnrumDVtXhquyfBrJzx4/hq6QF3932UY8cnASY9Hid3/bs8PYfnQav945hGOTiardSp0tXkwlMiVWwS69tX2kbC8voUDYVWH3tQUs3cQPH5zAwwcm8YpTu3BoIo6bn1y8Zo9KONQIEeGVW1cYNGuPy6FPatFUDntH5nD6qjCIUHXDLsYYbnjwIL557/6y+4kUzrP6Injl1hXYfmS6omvplh0D+OWOQewdieIDP92B2US26FI6rQdel1PPmDg0EdPcShbCQdt2XJuIrR6okM+N89a24/69xUV2svkC/ufhIwCAX+7gN/fgdEJfUW9gKoG5JP8MBsvBr7nqkhndhbH96LSeNdLZwoPmYZ9bj4NYuQd6wkbhsP0oF5Av29SFi07prDiZzaVySGbzJcJBWC2zCV75OjSTxD9cvglEqHoFOsYYnhmYwVna8pRrO7lFWi6t8dBEHGPRNN5ybh/yBYb7980/QwzgLrZLvnw/vnLXPtt9xubS6GzxwqFVEQsNeDKWwYGxGIh41s1ZfRE8PzzXtLhDMpPH/fvG8LqzVuKyLd0AYIiB3bl7RF/u9969o4imqxMOEb8bjKEkRnVgLIYuza28sacFLgdZFkaKeEWnTeC7v92PuVSuRND/ascgIgE3vvvOc3H26ghuXsROwEo4zJOAp+iT97qcCHpcIOI3z+GJOLasCGNF2Gfb/uGh/RN6phMAPHlkGv95x1587e4XMFYms+a5oVm0Bdzoa/PjJRs6kCswPFFhhbZf7RjiXUQ/cjGm4hl88c69uHfPKOZSObz2zJUAeFDMQcVirxaLmIN4gIZnkwh4nHpQ3swF6zuwbzSqm8C7hmaRyRXgczt0y0FMfuevbcfgdFIPOssxBxH8T2byGJlNwekgzCazunkvxhP2u8u6lVaEfYhn8vqDvePoNPxuJ7b0hnFabxiD08my5vq4pnmaq6/dTgcCHifmUlndt33hhg6c2hPCM1X2ypmIZTCXymFTDxfO/W1cOJRr+SFW5PubSzYg6HFiZw1WynNDsyUB7+8+cBAA8P0/Hbb1l4/HeIxHIDTgyVgaB8fj6Gvzw+d2YlNPCHOpHMZs0j4bzfajU8jmGV66sRNbV7aixevCU1J68N7jUbzxnFVoC7hxYCxWdCtZWMEyQmkxT97HphJYo7mYvS4nesI+y2yzybim0NikzIrvXQ5KC2v00lO74XU58arTV2DvSHTRig8rCgciWkNEndrfFxDRx4joDY0f2tLG55KFgwMOB6HF68JcKoeZRBbtQQ/62vyWpuJELI0//+HjePlX7tfNTaHNAjDUCjw7OGMQIkcnE1jf1QIiwrY17XA6qOzyndPxDLYfncIrt/Zg68pWvOeidfjfx4/hwzftxKqIHy/bxAuaHA5C2O/GTCKLuE3MQWwbnkmV9dEK83qf1mlSmN5vPW81JuMZxNI5HNMmv4s3diKTL+jtA1oNwoFf44GpBHIFhjNWtfL/tWsqXF8hH7/u4m8zIogq4g57j0exuTcEt9OhLwy0v0z7gqk4nxCs0h3DPjdmk1k9KN0b8WNDd0vVNQBCqAjLLahljpXLXDk8kYDHydc73tgTqrqj52wyi9f890P4l988h0c0d2ehwNuCtHhdyOQKej2HGXMVcYd2LSZiGYxHiy43vedUk9qKPH5oCk4H4by1/NlY1xnEYe1em45nMBnPYH1nC1a1+TE4nZQC0uUth3LCQV4kaGXEZzl5i/qgDlvLgR9Dni9eGI1hMp7BBRs6AACvOJVbQkLBajRlhQMRfRrAfQAeI6L/APANAJ0APkJE31iE8S1ZHA7SM5dEM76wz42JWJqnZPrd6G8LWFoOv5f88TsHuFbz1NEZrOsMIuRz6Z0+ZxIZvO76h3HZ1x7QzfSRuZQ+2fk9TmzpDZXVHJ8ZnEGBARdu6AQA/MMVm3BKdwsKjOFfrt5iaDbW6ndjPJpGrsAqCIekZbxBsEVbWvH543yCGJxOwOty4Nw1vAPlwFQCx6aS8LudeJHmThGTiaz5iz5WYqI9s08TDlrgzq9Zb7KgEt+JjJi4xAR+cDymT8ZCOJRbBnJKE+AitVam1a8Jh5kkHAR0h7xY3xnEwFSiqh79wu0oYj4AdzHIbqW7do/gDumeGZhKYFWbH04HYfOKEPaNRKty48jus19pactHpxJIZPJ487l9AHgVNMCvx1/fuF1fjS+ayhqus8i6mYilMR3P6tfmFG2pzWa1Fdk1NIuN3S36vcpTbvlYxGpuG7qD6IvwbKu5ZBYelwNeV+l9I9MaKBUO+gpyUkp3b6vfsmpeWMZW9xBQtBzk+WLnMT43nLe2HQC/tq1+tz5nNJpKlsPbAGwBsA3ARwBcyhj7JwBXALi8wWNb8ojJSfgwQz6XblKG/W6sjPhxfDZZkm749MAsWrzcDbVrkD+MhydiOLUnhFURv36D/PZpnomULzA8dWwajDGeKy1lzZyzug3PDMzYugOe1wTNaVodRovXhT/83Uvx6D9dhled0WvYN+J3Y1irJA56LFJZtQeOCz97TWtVxI+Ax4mDWqOxoZkkVkX8+uJBx6YSGItyIRfRHjqhWcmuKoeD4HEW8783r+CfQfwv0m3lsbicpZ01e6T2B9FUFmPRtC4cVkX8cBAwVKb7q0ghbLOyHPwuzCVzOD6bQlfIC7em0RdYdWtRHNXqZFa2GtujC4H03NAs3veTHfjgz57S3WIDUjHVxp4QphNZvSFeOR47xPthvXhdO/aN8vtCWHevPasXTslf/uudQ7j7+VF87e4XAKDEmhTKQTSVw2Q8o8cgOls88LudlkqRKJQ70qBCOd4ccRanaxYmwNudDE0nkckVdEG8TrccEphL5UrSn62wshyOz6bAGA8mC3ojPL5lfuZjmoVilTAB8Pso4HEamvsJi064rYgIZ/VHanIjLoRKwiHFGMswxmYAHGSMJQCAMZYDsLhJt0sQoaUKrSPkc+mTTNjnQlvQgwIrpssJjk3FcdrKMNZ1BvHcMHe5jEfT6Al70ddWzB8/IC1Ss28khplEFulcASukieTs1RHEM3nb7Jbnh+fQ1+Y3aPoup0O3PmTCfrc+frtUVn3fMm4lIkKvlD46NJ3Eqja/wa86l8oh7Hfr55mKZ+B0UEnbZK/LgRntgdQrSc1uJakq2knlhEOqRFN3OR3oDvkwVKbjpqh6by9nOUh9oUS687GpypPg8GwKva0+PdALcPeVEA6PSzUTj2iB1WNTCd0NIYSEXYNCmd1DvDfSGatasX+UN3oTWu66zhZ0tnj070y08RAN4cxJCkIhSmRymE5kdJcbz9m3dqde93teKPeh/32qIfUZ47E0JmIZvSAVANa0B1BgXEERQeGesBerIn6euj2b1JW8clgJBxH7kmMxK1v9yOQLJcI6ls5pmXXWU654ZuQ2L7uHZ3Fab9hwb5zV14oXRqNIZRu7XC9QWThEiOiNRPQmAK3a3/r/Cz05EX2UiJgU0yAi+iYRHSCiZ4nonIWeo5EIzdWjWw5uPRAX9rv1tD9z87gjkzyItbo9gJHZFFLZPOZSOXSHfdqDxXs0HZ6I44xVrfC4HDg6GS/6taWJ/ex+7qp56qi1NnFwPG5oEVyOVr9bv6mtArvyjV3OrcTHWDSvh2aS6GvzIxJww+UgTMYzvFWGz6WfZyaRMbRDF3jdTsxok3OfLlyMbiX54XZY9OT3e5wI+1wYm0uV+PgB7icu13tpOpGB1+WwnETCmnCQ+/OIyWIiWll/GplNlmRB8Z5SvIbgqaPT6A554XE68NTRaaSyecwksrr1KCyOalpuiNX95EZv41G+0E3E70ZnC8/lZ4zh2cEZhLwuJLNc8cjkCwa/vHCrjs6lkC8wg7tE3MMyyUwev9LWXN89PIddDehgLNp5yN+tCABPxdOYjKXhczsQ8Lj0+3cylrGdsGWEMmQQDlHRvbj42YW7bcpCOFSKa6yM+PU0ccYY9o1EdRetYFNPCAVWfRbkQqh0VR4A8BoAV0t/i58HFnJiIuoHcCWAY9LmVwHYqP28D8B3FnKORiM0KaFFeV3GyVO4TGakfkuJTA7j0TTWdga1RnbpYhfUkBd9bX7E0jyl7fBEHOu7gljdHsDhibjedlrWVNZ0BNAe9Oj+STOD0wn0W/RzsUKe8AMWwkGed30VtK0VWm1BLl/ARCyDnrAPRISQz4VoKos5zYctrmE8k4fbwiUkX9P2oAdel0OfCIXlIFsbVpYDoKWzapaD00FGP3HE2k8smI5nbHvvBDxOpLJ53vRP+yxCSMj59XYcn02VtFRoD7qRzTPE0jkcHI/hzL5W9EZ8GJ5N6ZOTKBDsjVTXWDCVzWM0yv3jQqCMzaUwEeML3TgcxZbss8ksUtkCLtVSQUXmldnV6Pc49e9CniD72gIlAfWdx6YRTedw3RvPAMBTkmX2jUT1Qsj5clQ7p7z2uQgAT8YymIpn0BHk341ssVopJWZ8bgc8TodROGgCQO7oahe4tisslVnZ6pcWrkojnsnrMRyBUPTK1cHUi0pX5TkAu7WfXaa/nyvzvmr4OoBPAJDty2sA3Mg4j4FbLr2W714CCK1XWA6y1hr2ScJBulGET7GvzY/OFi8m4xl9EukOeXUNbCKWxvBsEms6gljbEcDRyYTut5Rz+YkIZ/dHsNMidXI2yTuVyj7RcojxArCcqKuZhAW9rT6MRVP6ZxcPTcjnRjSVQzSV435Wt1NvReKxCAp63cVbNOBx6tecqJjqKo/LynIAuLAanUvj4HgMa9oDhhX9Vrby9EO7oO50IqNPxmY8TicyuQIvHNTG5nNzS8Wq3YJMocAwKiUYCPSeUvEsbxMS9mFF2IfjM0ld0RDfVUfQA4/LUbF1OP98wOoOvy7opuIZTMSKLR06W7yYiGZ0d4mYiOxcjX530UcuWw69ER/mUjm91QkA/f686vQVWBXxG9JLdw/P4pXfeBCXffUBw3tq5egkF/yysG2TPqscGxH30awWkK4EEc/mE/U4QNFykBWHokJoshxSOUtrXKY34sNELK3FR0otXIAH2O1qKepNpavSov2cC+ADAHoBrATwfgDzdvkQ0TUAhhhjz5heWgVArvIY1LYtScwxB5csHPwutGpFXLIWIWctdAQ9SOcKODzBNZ7ukE83PblriT/8XSEuRERA0hzUOnt1BAfGYiXayqBNDyQ7ZMvBavKXu7TazME6PWGfwfwVAi2k9WfibiU3HA7SLQCPhUDySdfW7Sz6bHldCd/fILTshIOWf35gLGbIDAL4dc/kCrbdVWeTWd1FaMbtImTyBcTTeQSk2hB5YSSACwKzJTEZzyCbZyVuJTHZjEVTmEpk0NXi1ZIbipaD+K6ICCtbK68rITT5/raAvhbJRDxjSFHlykpad42KiWnQpujR73HqNTmywiJabIxJ6x88PTCD9Z1BRAIebOk1LpL0w4d4S5TxaBoP7CsWT9bK0ckEVkX8BjeRbjnEM5iMp/X/hUs4ls5V5VYS75GF10QsjbaA29CMcaGWA9OWI9XdnybLweNyoL89YJtyXE/KXhXG2L8xxv4NQB+AcxhjH2OMfRRcWKwu914iuoeInrP4uQbAPwP414UMnIjeR0TbiWj7+Pj8b6iFIG4qod3KE6psOcxKWoTQ/NoCHn0SEIHn9qBHf8iERhb2uxD2uTGXyuqFWuaHVCxSbi68sltcxA6PKVPIjLzJ3M7bjNAkhYASGSEtXhcm4hk93Rcojd3IiGsrBLHbxc8r+//l625n0WzobsF4NI39YzE9fVWga3txa7dGzKZiHAC8TgfSuQIy+QJapJYj8sJIAPCp3+zC+Z+/Fzc8eFDfNqW3VDBaJULb3T8WA2O8+E4E+Ke0YipRPQ6IAHZ5K0UEY7tC3qI2HeNWQqe0gFU2z6SsniCcDrK1HAIeJ+JaDyjZ/SfcnrJwPDAWw2bNf74q4jfESHYem8Flm7vRFnDjbm3J3fnAYypGK9nndiLgcWIqnsFULIN2za0ka/FWVrIVfrcTCann1WSstJFeuIxwCFVhOQA8VfzgeBxBj9PgQhas0TwJjabaCukeGLOTMto2WxhjlzPGTjf/ADgEYB2AZ4joCLjgeYqIVgAYAtAvHaZP22Z1/BsYY9sYY9u6urqq/Bj1RdxUYlKVJ1Svy6FrEXLMQVgOkYBbv7FE9o3f49QnfrEkYavfjbCfr2sgHnBz36Mz+1pBBDxlijuIycm8dKYd8vitFPBqNHSBsG70bqmSW0n4VYXAEJOOlQYnJh0R4yhaDtZBaIfNHX2KZJ6LIj2B7max6XoZt6kYN49ZjtPIXTyPTSZw0xPcIP7mvQf0icMu913cN0Jp6GrhwiFXYDioTdyyC7A96MG0jWATyOm4bie/N6fiaS3TyG0Yh37ekBdhn0tXMszCwec2FoIK9JiLZjlk8wUMTCWwTltFr68tgKjWKmI6nsHhiTi2rW3HOavbFrTU7tHJUuEgPtdUPIOpRNGtFDQIh+qmwYDHaWiIOJ3IlKQ3h7QU9TkL4WCXxioQ2W7HZ7nlsKG7xVIJW9MewLE6rIteiWqFw40AniCizxLRZwE8DuBH8zkhY2wXY6ybMbaWMbYW3HV0DmNsBMDvALxLy1q6AMAsY2xhCxM3EHFTCY1XuJUcxDVrt9OBFq/LsADQrOQzFpOSePj8bkk46Cmxbn0SHZ5JosXrKpmYQz43Tu0pLYYrV7xlhcPgNrKyHKp3K4nJVK/70Cwi2RcvrKSWssKBH0e0KxGCWF5Ny1lh3AAMgb3TTMIhUmbdCKA0x19GtnZkAdLqd+uW3p8OcMv2S28+E7F0Dn/az/+fsREOcmU4AHSHi91nxb3SKgkHkd1Ujql4Bi4H6dprR9CDiVgGqWwBfu1aCmtscDqhZzBFAh699sWs+cotZDwWloNwow1OJ5ErMKzr1GpLNEt2aDqJA5r7ZEtvCJtWhHBoPF5V8aCZ2UQWs8ks1rQHS17raOEL9aSyBV2xkhUsbxUxB4Dfc7JbKZYurZFwOAitfrchzgiUv4cEYiGp4dkkDo3HS+INgjUdQUTTuZKMqHpT1VVhjH0ewHsATGs/72GMfaEB47kD3LI4AOD7AD7YgHPUDZdpMhMarDx5m/2U0wn+kLZ4XbomMRFNw+UgeKTurmISCGuWA1AUDlacvTqCpwdmDPnjU/EMQj5XVQE3oEbhUMly8BpbaYtCNdklJv7WYw4W4/SZ3Uq6cJDcStLb7NxK/e0BvPqMFXjHi1djbYdxAjG33jbDe01ZX3eD5SBNOC0+l17f8vihKawI+/CGs1ch4HHi8UO8dkEoDaL7rMCnXQdhdbUHPPo1GtYqsWUXVnuwsnAQQXWhiXa0eHTBLa6t+D08k0LE79YnOqGgWrmVBPJ31xbwwOUgPXYhKpTXaU0FV0WK6bfiM66M+LGppwW5ApuXP/2oVlOy2sJyEG4loHg/yeuVVGs5+D1Gt5LdfSFqX2TkbDY7Ah4XIlrPp6GZJNZ3lgo6QG610dgeS5VLAzUYY08BeKreA9CsB/E3A/Chep+jUYgAalbrLuq0CJB6XU6kJU1oJplFJOAGEekay2wyqz+YIclKAITlwCePoZmkba702f1tuOmJARyeLGock/GMbS8XKwyTbIWYg52GLtDdSjPGgKUcuBRWgZh0LOsctH18unDg55WzmIxuJetxOR2Eb7/jXMvX2gLF1uBmcvkC0rlClZaDJPi8Lh6L0PoVbexpgdvJW4iIAjM7t5L4rKI+xu9x6sHu4ZkkwtrELYgE3EhlC0hm8rYFXVPxovsI4NdcxBb82rXUz5vI6H/L7iuzhm10KxndfF0hr+5WEpOYKIIUrp3peEb/jCtafUhry8EekmpzRmZT+NubnkImz3D9287WJ0YzVqsTyuPUrXPt+ricDnhdPF5UtXBwO5GUis+iKWtXkRAO2XwBB8dj2NjN60qqKbbb0NWid0Ywx8YEvdK66KKbbyNQXVkXgLAcdOGgPbAuyfHtdTkM1YwziYzuUxYTSyyd028ct9MBn9uhF8OE/S5d6z4+m7L1W569mt8kchO+qXi64tq4MrJ/02rud1QR+BWIiXJ4NgmiokvCWEgl9i0TkHZZWw7yZFRpLJUI+9xwkLXlEE/z785WOEgTi7yP+PzxdA4DUwk9nXhjdwiHJ/jaCTOJLHxuh2GSBYqfWcSqfG6HbjlMxjMImPZvr+AW469lDULI63Lobi1zMeFMMqtr2IYMNpPgtbMcAO5aEm6l47MpuBykx9jkjJ7jsykEPU6EvC49pVfO6vra3fvw5JFpPDMwg4/+0pzcWERUdlvF1/hn1a6ldN+Uc2daETBbDqnSNcsBTYhk8vjBnw7jqm/8SV8ZsJrzbF0Z1lvhvMhm4i9aXgtbnrYSSjgsAJduOfAv0ynFHAQ+t9FymE0W12eWJzhZqxA3nNvJq1DlVhV2yxme0t2Cla0+3COtajYpZWdUgyHrx8pyqBCwlhEPXjSVQ4vHpb83YJFlVAxIW6Syuo0Tl5iEZC22UnC8Eg4HIRLwWAakY5pL0DYg7SqeW/5sLb7i8qXTiayu0a7tDCCZzWMsmsZ0PGMZD3I5HXA5SHdL+dxO3Q0yk8iUTMQiZlLOB20+l9fl1DvZimssBEIik9fvTbmRoTk4Kr9mtvq6Qj49tnR8JomecLFFSNDjgoP4Ikkjs7zOg4jQEfTA6SB97Y1kJo/fPTOMt53fj8++9jQ8cXgKO45adyAWixG1BUqfD5/bqcd/ZItTt1irdLv6pYB0OpcvqRrXr4XLgUy+oHfLFWswVJMVJeJhbieh2yaRJBJww+d26OuqNAolHBaA8AmKPHXdcnAaLQd5ychkJq/7puUJTn7QRJAr7HPrxTcCu0mKiHD5aT340/5x3VKZqtGtJGf62LmNxOZKqaxOqWutzxAfKJ5ECFdzMaGMveVQP+EAcIvGatEksc3ecijVROW/ReND0RdKxDsOT8QxIykKZsSELeo7xD1TYKUaaJtFx1Azwp2pj9vi3pPvQSEoyikEfinuYZ74usPFbC1eBV6c6ER7+NkkX29daPsObb3vUc0d9dzwLFLZAi7b3INrz+tH0OPELVoLDjNjc2l0aYs/mZGtBdlKK7ozq7t/Ah7uVmKMFRvpWdwXXpcT6WxBd6uJleFcdql0Elec1oM3n9uH/36bfRkZEWmtNpRwWLJcu60fP/7L8/HGc3idnphQ5YnV53YilS1aDulcwbLdhmw5FDU5o08esPbLC166sQupbAFPa/UOc6msIaulErVkI1UzIQsXmFwc6DJMNkbhYJmtpE1SYvLRYw6yW6kOwiHgcekuJBmhvdsJB3lSlIvghEYpKl2FAiHSOY9O8uVY7QLdegqvxT1gvk5ByYVlh6yUyMcHJLeSRQyhXJypnFXRrRVuZvMFHJ9NGZpFAkW/fDSVMwjInrBPdyuJup2z+iMIeFy4dEsP7tw9atmBeCyaQpeNpi1bC/KYhaJVvVvJhXyBIZMv6JaItXDglsN+LSVY9CurxnLoaPHiK285C1edvqLsfr1VFD4uFCUcFgAR4ZJNXfqD4XIYfwOllkM6V9BvViLSJ3v5ptXdU9q34zTEAuxvsPPXtcNBvDVzvsAMqXvVUClbybhv5eOJOINdewuhSZUz7y/Z1I1IwI0z+rj/1Zw+XM1YqyHocVq2boiXmQTM47CyHMx1HqIGYCKWQTKTL4k3CMyuHtll5TZdJ/Ga7A83k8zm9WMBRuEq3u+VhYOwHMq4Gu2sWKBYJT0eTWNkNmVoMw9w4TCXLF1Yqifs1d1Ku4fn0Nvq06/Z5Vu6MRXPYPdwaS3EeDRtWTAGGK0F+W8hLM3X0w7xjCYzeV1psIoBel0OJDN5vQ3JpPbbnN24ELolt12jUMKhjlilsnrdDpPlkDc8mEKDkx9+8XbxYMpzX7l5sNXvxrrOIPYcn9MnumCZB9iM00Krt6NSKis/dwXLQQ9I22crnb+uHU//65X4q4vXGfapt1sp4HXp1b4y+nW0EbJiPEQmrdSUdSYsCVGxOx3nNQZ2wsFryiByOx26IDK7QYrNC43C7ZO37sJnfvsc0rk88gVmtAwkQeGzcivploN9kkK5vH0xoe8b4R1dzf2jxAp65nTQDmkti8HphCH76EJtRbRHDpauzz1WRjjI94osIMvdd1bIQljvVmBxDTwuhyE5QBg6rjrcpwJRgd/IQjglHOqI02Iy97mcRsshWzDcrOZJAJCEjIWbqtKkvb6rBYfG47oWGajJcpA+i82NTPq+lW908TntLAexvZbAoLllCbDwbCVAsxws3DJCsNulIYoxy72egOKkISwHOZGgTQt+p0zavIzPlMIrxgiUukHEpCVX7yYzedz0xDH8+NGjeFhbElT+DFbxLrezuJ6GleVg/s7LCQcxUQsXZ6+NWymeyRsUmJBUHzI8Y+xY2x3yYX1XEE8eNgalM7kCpuIZ3VoxY2c5lGvbYoVfFg4VLAfx/MkCq1r3VTV0tXiRzhX0pIJGoIRDHbFahazUcjAJB1MFMFBqMRj9vuXHsL4riKOTCb1JXy2WQzktUcD0fSsfT1wPORBnZUUEa/D9ikOZ8+oXCq9+LbUcRLWunb9Yb+dhus6ilcbIbAoOMrb7aAvytbpT2bzlsqaArDSUFtmVCgcRcyiOf4+0rvT+0Zh2TOu6BDHpEUlJBFXEHMp1Ge0O80nx2UEhHEyWg5+vfZI3LUkb9rmRyfOajZE5YyAbAF7UF8Ezg7MGjVlfdCdc2XLwWwSka+mtBHDBK+odAhZKgyxseiXhZjU/zBfxWRvpWlLCoY5YadM8cyGPqXgGc6ksMrmC4SH1mLJxgNJiOkP9AcrfYBs6W5DJF7BvhE8ItVkO5VNZ7fa1QwgFO3eVOSBdjQYn5gSP5CFgIAAAIABJREFUwa1U8W0VCXqdJW4ZAMhoNSx2rgfZcpAREw5Pd3QbvkPR6yeVs3cr+SxSSe2uk9NB8LkdhpjJbqlHkejwaQw4W0+YQhjpzSTLZCtZTYyCzhYviIBnB/k4ek2TvN/tLAb7pWsn3G8Hx/kKdGaL48y+VkzE0oZgrKjEtnUr2bjTaq1zEPvlCgVJabAv3ARgiLVUk61ULWINiUYKh+pnDkVFxH0iz5tetwNzqRzO+dzd8LkdXDhID6aYdOR0z6LFINxKxeNVur96tJtRtCCwWgvafvy1BKSrEA6mDCPAaDmY3UrVPKQFTToYYw4Lf+gCHhcSFtlKosDRTnAVLQfjoyS7usy58G0BD45NJbSAtI1bycLdqNd6WFyngMdlEG5DMym4nYSwz6036zMEpC1iDvLf4vqSZMWakyHKWQ5upwPtAQ8m4xm4nYROU72NfE9YFQ+KxWzMloNYH3rvyJzuchJtw23dSi7rz1prnYO4X/MFpt8XVveswXKQhFu1Fko1dFl0vq03ynKoI3aWgyCVLaDAjBObmOzkFszmlFhj5XL5G0zkvItAqNWKbnZUG/gGqgsCW1kOVn+LCaGaBmgiuFfPCmmAC9FMvqA/9IJyGiIfR2lCAWD8nObCxbaAW7McyriVXE7Db8AYGzAT8DgNwk0Uva1q8+vptFapqvJnAErTqM3JETKVGsmJCWxDV0uJ6092sbQYYg5abzFtKdhWv7FOR7SGkZfJ1C0HG7eSzyLQLo+/WstBKDY5STiUq80BjO60emYr6c0iG9h8TwmHOmKVjWClGcoPpvBzy5Wd5lRWQH5Iy49BCBnRVK2c6W+mlpXeqoo5WLQTsRIOQrOuRjgIt5I8udTHrcTHYI47lNMQ5e1mLZqISj6foDXgQTSVA2NGl4eM3nDQU2o5WI0laIqZTGpLm3aHfHrPKDu3kqxwiO9MvG5lvernrNhllGvNGy3WMJfvCfk44lod1xcRMlldQb4OirxY0Fg0DSLYFnzKn9VgsZSxxKwQ32ehwPSuB5Ush85QcUzuOmYrRfR+YEo4nBBYadOyIBDIN4/wE8vLUJrdSvLfldw5kaDJcqhBONSSFVXJggGKE3glyyES8OBLbzoTrztrZcVjikBkLWOtBhFQNtc6ZLTWKPYBab7dykITn8/c1tln49Ix7iMCwxZZRRZCNGCKmYi+WoYMJRvhICNateiWg8P+vqvksrxkE19nxWrylZ8BS+Fgs/ocwDsTHJQsh/EoX+HNTjOXr7F839ZsOThly4HfF+WaRQLG57qeloPb6UDI5zKsFVNvlHCoI+JBkoPG1pZDcVu8nOVgkT1UaRoMaes96Kt3zbcIroKWU51bSbMcnDbCQTrftef12/aSkRE5KvLZ61UhDaCkSjqTK8DjdNgKQzHJWRWEic9vTgqQJxQ7t5KY0ORiYLHNOuZgbAo3nciiPeixDTwLQWH+WF2aa0ZfF13cdxYfv9Jk95Ztfbj6zF68/5L1pe+VvjNjN1vjSohWcY0NXS26qwyAYTU7K+wEYS2JEEAxtpWvwa3UbhAO9bMcAB67UpbDCYKVW8nKcpCDgcKnLXdPtTLlySL+YAURX6RFCJ1ATams1n9X2tcOMXnIk7dVQLoWRIzG4HKri3Cwthyy+ULZQGJxfQl7y8GsmXoMPn7rR/DVZ/QCMNXMmFqJGMdv7A01GePatF0BmNhuLuLq0YK6omZC3IvzqbUKeFz41tvPsXYrGdbBKG07IvoGWSk3G7qDmIhl9IWz5HWwrRDX27xPf1sALgeVBL3tkGMOmVwBDrK+h81rWwjcdcxW4sd2W7aZrxcqW6mOWAeky5udAtn8FMexSiOsxoXSGnBjMp6B00FV+1PN56tPKisZfgPWRXC1ICapWtqHV4OYrHKmvj2ZXKGsZulxOmzX+rX6/IBRWNhZDueva8fNf3Mh1ncFS/a1Go+81kA2z4uj2oIezCWLAsOqCC5savwngroiyFvpO3I7CZdu7i67j937BFYupmiKV05bCf712opyBydiOGd1GyZiab1nlRXimG8/37js/eqOAHZ99pVVrbMAyNlKBU1pKJ+kABhX7Ku35VDNCoALQQmHOlKsSyhus6ugNCN3zCya8lYxh8rj4NpKHF6XvTvECqvz2VFLKmsjLAerIOpC0LXCvFE4lJsEAC7sbvvIS/XGejLCDWGeFOTj2cUcAC4gZITFaZUv73KS3pBOrF8c8bsN7eKt+neZM6ku2dSF636/Fxeu560qil14rce4//Ovth1/OQxrnkidbWWhYbew1QZtydeDYzGc3R+paDls6gnhzr9/GTb1lC67Wa1g4GOWLIe8vdJg1fEWqG8qK8Ath0MTsco7zhMlHOqI1WTXbtGvX7Yc1nYEcGQyYZgwxHGs1kauZq4XZnqtGnVtvZUqH09MAPJEUEuhnRXCcpDfWQ+3klN/8E2prGUmAYGd1iomk3JuJa+NW8n6eHxfKw+Pk4rCIaUJBJ/baZmmChTbgrSblijd0hvGvv+4SurKOn+3UjnsLAeR5ZUvMNs6ir42P5wOwtHJBGLpHNK5gl4UZofdqmq1INc5iFiUFfLzbazxqa9bKRLwNDQgrYRDHbEUDhbpdfKEcOsHLzKsfAVIVoJ0L5HptXKISclZo6ZiFEbl961pHLLlIAen5+EO+shlG7FvNIqXbewqHqcOwkE8xOZ20OUmgUo4LT4/YGycZ+dWssIluTVKXpMsh7TmXuLCwTpD6ZzVEbzzgjX44Cs2lBzL0JqkvsqujiwwzRq1LhxsLAe304GVER+OTSX0CmE5ZbRRiMk9l+cB6UqWQ8DjNFq4dbYcWrw8zsQYq8lDUC1KONQRvfGetM1SOMjZDFretoxV+mDRvK98E+jujBqfbPnQ9XErlY6j0mpzlTh1RQj3/OMlNY+lEuKamWMOldxK5dB7S5kmBY+NNl8Jh+TWMCMmVKBoFXhdDvt6BqcDn3v96ZXP2YBJh5/fOBbDaw5CBvarHgLAmvYgjk4lMBHjPveuluqCygvBKSkQmTJrT4skk3PXtBm219tyCHpdKDDer62W+6haVLZSHbFyb1jVGVgFpA3HsQg+FwVG5XGIe7bWB9uue2qlfe2olMpaL22nHpaDfcyBVZ3qaDcuc5ZKtTEHuzHm8xbCgUgXGqILsM/tlNYOqWHgErW4M2uh3EQprpt5rWyZ/vYABhbdcigK52ye2cYQzlkTwbsuXIOvXnuW8f11thxEbU6szCJPC0EJhzpipalbTYDlMisAuVV3cVstD6lrnpZDLcKkulTWUguoHhO5mXpkKzlNLptMroBP3roLB8dj8w4kWmWdAdXVOZQdo0UAwOlwlFoOboeuiMw3jVIMvZExBzN6rKaMUF7TEcBUPKP3EKsUc6gH8j3CY1H2rU/+/ZrTS3o91TuVNajX5jRGOCi3Uh3Ri+BsJqu3nNuHF6/vqKiJksWkUksqa7H9Rq2WQ/X7VlUhbXH+RgiHejxzbqn6FQAePjiBm544BgCWmUjzObb+fxV1DlaI61mwcCsZYg6a5eB1FQPS89VaG/F9AeUL6Kpxi67RFgHacXQaTgcZ6gkahbnOodq1p/X3N8hysFreth4o4VBH7DRYt5OQzTN8+S1nWb5uxiqVVUQyapmUa485VL9/Ndq61QTQEMuhLtlKxWAjYGxoNl+3kmj1Yb4Onnm6lZw2tRiAdczB5y7GHOZ7jRoR6ASqsxzK3b/9knDoCHrqkrFWCXNX1lpjUfUXDtYrANYLJRzqiN3N/NSnr4DF82yLVSprsd9S5fdbLVda1XlrcStVlcrKjye7JBohHOpa56B9UaI3FVB9Y7ZKx9aPZ1jsqfpjr9fckZssKo55zIELBYPlIC0zOh8aFZAuNx5xj5SzLlZ3cOEwm8zi9FXh+g7OBuGuTecKZbOV7Ki7W8mr3EonDHbaS7msCyuEtmZ1L1Va7AewTiGthtpiDtWPg0mZ+fXO2Kh2LJUQ1+rBF8bxxOFJg0Cb78QqDlEiHKTj1aKZX3RKJ3734YtwhramgYzTQSgw4LX//ZCeGu1zO/TGffMVynXsFWegnEAvriBov0/Y50Z7kC+atLW39Ho0AnENv3znPgDFxoLVUm/rJmjTD6xeKOFQR/QK6QUfh/+2mvSqy1YSwqG2J7ua3a3aV9hh6VZqgCZal2wlbUL63TPDAIzVyfN3K4ljm7KV5nk8ADizL2K5XUyku6QV4AyWw5JzK1VjOZQ/twhKn7ZysSwHU+yoUZKzSvSYQ4PcSipbqY7Uy2VSrj13NdrHfGMOtWjgtaSyGtxKdfa7AvWxHMwWzYS0/OJCJ4FylkO9sLov5JjDfNtF6xXS8x+aJeUmfrvKcjMfu/JUALVr8PPFfI1rcQk2gkZnKzXt0xHR3xLRXiLaTURfkrZ/kogOENE+Inpls8Y3H+qlFZfro1TNKfSaiJqzlSrvX6nXjmEcFjstVcvBfAx5+UWPa37HryYgXS+sF5pyWtaa1MJiVEibERZvpe/1olM6ceS6q7G2Qmp4o6h3r6RaOSljDkT0CgDXADiLMZYmom5t+2kA3gpgK4CVAO4hok2MscY41eqMfjMv8J4pts+wcis1Mlup+n2r0ta1XWStc6lmK5mvVTRVfODmO5mLz22eRObrpiqH+Ro4iH8msSjNfIOh9XKVmik3HnG567lyWiOYSTaur1E1eFwOeJwOvT1/vWlWzOEDAK5jjKUBgDE2pm2/BsDPte2HiegAgPMBPNqcYdaGXqi2wOOUizlUc+xqNa+S99XZrVTP95WjLm4lCy1wVcSPF69vx5vO7VvQsc2fuREap1m4+dy8r8+6ziB6wl586uot8zpuo2IO5SyZYvHg0vZ6H5tKVLXff77hDDx+eLIhY/juO8/Bmo7GWE7NEg6bALyUiD4PIAXgY4yxJwGsAvCYtN+gtq0EInofgPcBwOrVq612WXTqlcesPxxyKqvptbLjsEiFreW85SgGpGs6tHSO+b2vHPVJZS2diLatbcPXrn3RvI+pB6RNx67ncpECswAS/nC/x4nH//nyeR9Xr5Ce9xGsKfesWK0TvtT4s239eOeFa6ra9+0vXo23v7gxc9Slm3saclyggcKBiO4BsMLipU9p520HcAGA8wDcTETrazk+Y+wGADcAwLZt2+p9786Lek18xZhBcZu+PGZN2Uo1Coca9q9FW5cD0o3QROuRImh1iIUGokUK72L4ps1adr0asTUiRgSUdyst5nWbL+96yRpsXbk4KbTNomHCgTFmq64Q0QcA3Mp4xO4JIioA6AQwBKBf2rVP23ZCUK/+M+VaZdTSPqPW57qaOZaIf85qxrFYj3Y9XFVWQqtesYFGtaCQKVdotxCoTq5SM9VYBUvZrdTsTKXFoFmf8DcAXgEARLQJgAfABIDfAXgrEXmJaB2AjQCeaNIY581CteNi4z2LmEMNlkPN563ifbXUOejvqbtTwkijtNuFZhWJa7UY+fBm66kerjagcW6lctekeN2WruVQqbPyyUCzYg7/D8D/I6LnAGQAvFuzInYT0c0AngeQA/ChEyVTqZ7oFdLSszGfmEOt1FQhXcV8pwvJBjv9GqVgLlT7LsYcSq/rJZu6cPEpnQs6voz5HJ116lLaKKunGuHQiGr6etGIjLOlRlOEA2MsA+DPbV77PIDPL+6I6kO95sByk3QtFdK1urnq3T5j0dxKTWgOVwtWLpQf/+X5dTm2wDyJ10s4NKq3Ui1FlEsR5VZSzIt6pbLKFAPS1cccaqWWt9VyjkZnC9RbuxWX2OOsj+tgMTTgUsuhPi2s9aLHuhytlL+7bGPJNr0n1RJ2KynLQVETp3S14N0XrsG7XrJ2QccpJwCqW+yncTGHYkC68vEapHRanKe+Jwp6XIilc3VwK4kK6cZfCHPM4TypN9SCjtug9hkAcOS6qy2321WWLyUaUeW+1FDCoY44HIR/u6byuryVsJqka4k5zDfLo5pJVriqqtn3Vaf34p49o/jEVafOazy1snlFaSvr+eBzOxFL5+rnVlpky+H2j1xctzTLxci0smMpu5WWsuCqF0o4LEHKPRPVPC+L8VBV4+f3e5z49jvObfhYAOC2v70Y/W2Buhwr6HViIrZwv/JiukfkSbye+feLZf1ZsZSFw3JACYclSDnroBqNfTFWxWpUoHK+nG6xxsF8CWjdLhdcBCdSMhfBcmiUht/M77nZLbGXO+rqL0GsHkg9IF3F+xdD41rCWYYLJuDhgegFxxy0b60RbcrNzLfwsdrjLiZCqDbTpaVQwmFJUu4Br6VCupEsNcuhnvjc/LGol+XQqFRbGRHXqPf30oz5WQjVpZyttBxQbqUTBD0gXcvazQ1MIl3IJPQvV29BV6g+efiNQAT0F245cBZDjjaqtXajurJWg3IrNRclHE4wlozlsIDn9r0vranH4qIjhOuJZB05GzTm/7+9uw+2o67vOP7+3OSGYEIIENQUCEFAESiGJCIPDkalTshQUjU62PIUH1JbIWqHUVs7FrTVGUY6FTtDyiDGUAFB1AkRBh8gBYGgEUkQkZoiHVFGHtqGplY6Id/+sb+TrHfPPffc3Hv24ZzPa+bM3ad79vs7e85+d3+/3d9W8Rm4WqkenBwaooyb4MajSTvO8Wp9fi/umtiZ15pzF3L13Y9P2t3KnUztVZtDFckh/S2jIX+81py78HceBNXPnBxqqNPPsZufahnJoYqdRlmmTlJyWHT4gfzTeZNzM9pYenXmUOmlrDVsc1h6/NyqQyhN/VKztW0pGN9NcD5zmIjW57dz166KI+ler65WqnI7u1qpWk4ODdNVtxUldHmnPv7mLDhsNpA9JrQpenXmUM2lrNnhkVNDtVyt1DBVXj2S14/VSn955jE8u+MF3vP6IzjlyIMa9aSvXrU5VHnw3odfsUZxcmiYuvxg+rFa6U/fcOTu4SYlBuhlm0N6vxIfxFuLZ/6aq5Wapi475RpeSDLQ9iSH3rxvqXZnh3p81weVf+INM57f6mQ907p9HP7h1kmnR8tOxO7vW4mb+yNLX8U+U4ca1ebTj1ytVEOdfod12Sn3Y5tDo7UeytMHVystPX4uj/3t4FwyWlc+c2iaEn6rnfYHl519HNOmDNWm7cMy43nOxnjsfjs3BAwcnzk0TK+P5L730Tey7/Doj8e84NT5XDDBJ93Z5Nt/32EAVrd57OZE+F6DweXk0DDd/FYPmJHtKI566cxxv/+hk/TAHCvX9OEpoz52cyJ2H4w4RwwcJ4eG6ebM4bjf25/r3/s6Fs0/oISIrJ+5WmlwOTk0TLcHcKceNaencdhg8IUHg8sN0g1TlzukbTDU5eo4K5+TQw11OoN3+6CVyclhcDk5NMyQs4OVqJ87WLTOvOlrqPNNcKWFYeY2hwHm5NA4/rFaeVytNLgqSQ6SFkjaJOkhSZslnZSmS9KVkrZJ2ippYRXxVe30Vx4MwPITDynM85mDlcm5YXBVdSnr5cBlEXG7pGVpfAlwJnB0er0OuCr9HSivOHjmqDc0+UjOyuSv2+CqqlopgFlpeH/gV2l4ObAuMpuA2ZLcA1eOk4OVaXhoiCHBX5/16qpDsZJVdebwIeAOSZ8lS1CnpumHAL/ILfdkmvbUyDeQtApYBTBv3ryeBlsnzg1WpqEh8fhnJr9bDqu/niUHSd8BXt5m1seBNwMfjohbJL0T+AJwxnjePyKuBq4GWLx4cd/f3L+7ixsnBzMrQc+SQ0SMurOXtA74YBq9GbgmDf8SOCy36KFp2sBrdcnsaiUzK0NVbQ6/At6Qht8E/CwNrwfOT1ctnQxsj4hCldIgc3IwszJU1ebwPuBzkqYCvyW1HQC3AcuAbcBvgJXVhFdfzg1mVoZKkkNEfA9Y1GZ6AB8oP6L6ayUF3+dgZmXwHdIN415ZzawMTg4N4QZpMyuTk0PDODWYWRmcHBpiT5uD04OZ9Z6TQ8M4N5hZGZwcGsYP+zGzMjg5NMSeBulq4zCzweDk0DByk7SZlcDJoSHc1mBmZXJyMDOzAieHhoi+75TczOrEycHMzAqcHBrCbQ5mViYnBzMzK3ByaIiXTMt6V/cZhJmVoaqH/dg4rV35WjZsfYqX7rdP1aGY2QBwcmiIww+awQfeeFTVYZjZgHC1kpmZFTg5mJlZgZODmZkVODmYmVmBk4OZmRU4OZiZWYGTg5mZFTg5mJlZgaIP+oKW9Azw73v573OAZycxnCo0vQxNjx9chjpoevxQfhkOj4iD283oi+QwEZI2R8TiquOYiKaXoenxg8tQB02PH+pVBlcrmZlZgZODmZkVODnA1VUHMAmaXoamxw8uQx00PX6oURkGvs3BzMyKfOZgZmYFTg5mZlYwMMlB0lJJj0naJuljbebvI+kraf4DkuaXH2VnXZThQknPSHoovd5bRZyjkXStpKcl/XiU+ZJ0ZSrfVkkLy46xky7iXyJpe+7z/0TZMXYi6TBJd0n6iaRHJH2wzTJ13wbdlKG220HSdEnfl7QlxX9Zm2XqsS+KiL5/AVOAfwNeAUwDtgDHjljmz4E1afgc4CtVx70XZbgQ+MeqY+1QhtOBhcCPR5m/DLgdEHAy8EDVMY8z/iXAhqrj7BD/XGBhGt4P+Nc236G6b4NuylDb7ZA+15lpeBh4ADh5xDK12BcNypnDScC2iHg8Iv4PuBFYPmKZ5cCX0vBXgTdLUokxjqWbMtRaRNwN/EeHRZYD6yKzCZgtaW450Y2ti/hrLSKeiogH0/B/A48Ch4xYrO7boJsy1Fb6XHek0eH0GnlVUC32RYOSHA4BfpEbf5LiF2r3MhGxE9gOHFRKdN3ppgwAb0/VAV+VdFg5oU2abstYZ6ekKoPbJR1XdTCjSVUVJ5IdueY1Zht0KAPUeDtImiLpIeBp4NsRMeo2qHJfNCjJYVDcCsyPiBOAb7Pn6MPK8SBZXzWvAT4PfKPieNqSNBO4BfhQRDxfdTx7Y4wy1Ho7RMSLEbEAOBQ4SdLxVcfUzqAkh18C+aPoQ9O0tstImgrsDzxXSnTdGbMMEfFcRLyQRq8BFpUU22TpZjvVVkQ836oyiIjbgGFJcyoO63dIGibbqX45Ir7WZpHab4OxytCE7QAQEf8F3AUsHTGrFvuiQUkOPwCOlnSEpGlkjTzrRyyzHrggDa8A7ozUIlQTY5ZhRN3w2WT1sU2yHjg/XTFzMrA9Ip6qOqhuSXp5q25Y0klkv6/aHGCk2L4APBoRfz/KYrXeBt2Uoc7bQdLBkman4X2BPwB+OmKxWuyLppa9wipExE5JFwF3kF31c21EPCLpk8DmiFhP9oW7TtI2skbHc6qLuKjLMqyWdDawk6wMF1YWcBuSbiC7kmSOpCeBvyFrkCMi1gC3kV0tsw34DbCymkjb6yL+FcCfSdoJ/C9wTs0OME4DzgMeTnXeAH8FzINmbAO6K0Odt8Nc4EuSppAlrZsiYkMd90XuPsPMzAoGpVrJzMzGwcnBzMwKnBzMzKzAycHMzAqcHMzMrMDJwWpBUki6Ijd+iaRLS45ho6TFafi21vXoE3i/JZI2jDI932vodyayHrNecHKwungBeNve3sma7iSdNBGxLN3B2iv3RMSC9DojP2Oyy2K2N5wcrC52kj0/98MjZ0iaL+nO1KHgdyXNS9PXSloj6QHg8jR+laRNkh5PR+jXSnpU0trc+10lafNo/emnZZ6QNEfS+3NH+D+XdFea/xZJ90t6UNLNqa+f1jM3firpQeBt3RZe2bM41ku6E/iupBkp9u9L+pGk5Wm5fSXdmMr0dWX9/bfOdnbk3m9Fq8zprtxbJP0gvU5L0y9N69iYPq/Vuf8/P33eWyRdJ2m/VP7hNH9Wftz6UBX9hPvl18gXsAOYBTxB1pfMJcClad6twAVp+N3AN9LwWmADMCU3fiNZn/nLgeeB3yc7CPohsCAtd2D6OwXYCJyQxjcCi9PwE8CcXHzDwD3AHwJzgLuBGWneR4FPANPJetM8OsVwE22eK0B2l/V24KH0+jjZ3exP5mL7NHBuGp5N9tyCGcBfkN0dD3ACWVJtxbwjt44VwNo0fD3w+jQ8j6zrCYBLgfuAfVKZnkvlPC6tb86Iz+uLwB+l4VXAFVV/b/zq3cunr1YbEfG8pHXAarJuD1pOYc9R+HXA5bl5N0fEi7nxWyMiJD0M/DoiHgaQ9Agwn2xn/E5Jq8i6j5kLHAtsHSO8z5H1cXOrpLPS/9ybuvCZBtwPHAP8PCJ+ltb5z2Q70XbuiYizWiOSLiTrvrn1vIi3AGdLuiSNTyfbsZ8OXAkQEVsljRU3wBnAsdrzSIBZrTMd4JuRddb4gqSngZcBbyL7XJ9N62nFdA3wEbJeTlcC7+ti3dZQTg5WN/9A1uXyF7tc/n9GjLd6pd2VG26NT5V0BNlZyWsj4j9T1cv0TitIO+7DgYtak8h25O8asdyCLmMeTb4sAt4eEY+NWEen/8/3hZMv0xDZ08Z+2+a98p/Ri3TYJ0TEvamKbwnZ2Vrbx6Vaf3Cbg9VKOkq9CXhPbvJ97Ol87E/Iqnf21iyynfB2SS8Dzuy0sKRFZMnk3IjYlSZvAk6TdFRaZoakV5L1rjlf0pFpuXcV3rB7dwAXS7t7Fz0xTb8b+OM07XiyqqWWX0t6taQh4K256d8CLs6VaawkdifwDkkHpeUPzM1bR1ZN1W3ytoZycrA6uoKsDrzlYmBlqkI5Dyg8VL5bEbEF+BHZjvx64N4x/uUi4EDgrtQofU1EPEPWRnBDiul+4Jh0ZL4K+GZqkH56b+MEPkVW/781VYl9Kk2/Cpgp6VHgk2RtKS0fI2uDuQ/Id7O9GlicGph/Ary/04oj4hHg74B/kbQFyHeN/WXgAOCGvS2YNYN7ZTVrMEkbgUsiYnNJ61sBLI+I88pYn1XHbQ5m1hVJnyerhltWdSzWez5zMDOzArc5mJlZgZODmZkVODmYmVmBk4OZmRU4OZiZWcH/A09HDwZiAAAAA0lEQVQgq1zuqXC/AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "err = optimfuncQMF(b)\n", - "print(err)\n", - "\n", - "#Restore symmetric upper half of window:\n", - "h = np.concatenate((xmin, np.flipud(xmin)))\n", - "plt.plot(h)\n", - "plt.title('Resulting PQMF Window Function')\n", - "plt.xlabel('Sample')\n", - "plt.ylabel('Value')\n", - "plt.show()\n", - "\n", - "f, H = sig.freqz(h)\n", - "plt.plot(f, 20*np.log10(np.abs(H)))\n", - "plt.title('Resulting PQMF Magnitude Response')\n", - "plt.xlabel('Normalized Frequency')\n", - "plt.ylabel('dB')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3.7.4 64-bit ('base': conda)", - "language": "python", - "name": "python37464bitbaseconda58faf23c4b5f4fef93406f29a1005f35" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/vocoder/notebooks/Untitled1.ipynb b/vocoder/notebooks/Untitled1.ipynb deleted file mode 100644 index 7fec5150..00000000 --- a/vocoder/notebooks/Untitled1.ipynb +++ /dev/null @@ -1,6 +0,0 @@ -{ - "cells": [], - "metadata": {}, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/vocoder/pqmf_output.wav b/vocoder/pqmf_output.wav deleted file mode 100644 index 8a77747b..00000000 Binary files a/vocoder/pqmf_output.wav and /dev/null differ diff --git a/vocoder/tests/__init__.py b/vocoder/tests/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/vocoder/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/vocoder/tf/convert_melgan_tflite.py b/vocoder/tf/convert_melgan_tflite.py deleted file mode 100644 index 9a652b57..00000000 --- a/vocoder/tf/convert_melgan_tflite.py +++ /dev/null @@ -1,33 +0,0 @@ -# Convert Tensorflow Tacotron2 model to TF-Lite binary - -import argparse - -from TTS.utils.io import load_config -from TTS.vocoder.tf.utils.generic_utils import setup_generator -from TTS.vocoder.tf.utils.io import load_checkpoint -from TTS.vocoder.tf.utils.tflite import convert_melgan_to_tflite - - -parser = argparse.ArgumentParser() -parser.add_argument('--tf_model', - type=str, - help='Path to target torch model to be converted to TF.') -parser.add_argument('--config_path', - type=str, - help='Path to config file of torch model.') -parser.add_argument('--output_path', - type=str, - help='path to tflite output binary.') -args = parser.parse_args() - -# Set constants -CONFIG = load_config(args.config_path) - -# load the model -model = setup_generator(CONFIG) -model.build_inference() -model = load_checkpoint(model, args.tf_model) - -# create tflite model -tflite_model = convert_melgan_to_tflite(model, output_path=args.output_path) - diff --git a/vocoder/tf/convert_melgan_torch_to_tf.py b/vocoder/tf/convert_melgan_torch_to_tf.py deleted file mode 100644 index 4c8515d9..00000000 --- a/vocoder/tf/convert_melgan_torch_to_tf.py +++ /dev/null @@ -1,117 +0,0 @@ -import argparse -import os - -import numpy as np -import tensorflow as tf -import torch -from fuzzywuzzy import fuzz - -from TTS.utils.io import load_config -from TTS.vocoder.tf.utils.convert_torch_to_tf_utils import ( - compare_torch_tf, convert_tf_name, transfer_weights_torch_to_tf) -from TTS.vocoder.tf.utils.generic_utils import \ - setup_generator as setup_tf_generator -from TTS.vocoder.tf.utils.io import save_checkpoint -from TTS.vocoder.utils.generic_utils import setup_generator - -# prevent GPU use -os.environ['CUDA_VISIBLE_DEVICES'] = '' - -# define args -parser = argparse.ArgumentParser() -parser.add_argument('--torch_model_path', - type=str, - help='Path to target torch model to be converted to TF.') -parser.add_argument('--config_path', - type=str, - help='Path to config file of torch model.') -parser.add_argument( - '--output_path', - type=str, - help='path to output file including file name to save TF model.') -args = parser.parse_args() - -# load model config -config_path = args.config_path -c = load_config(config_path) -num_speakers = 0 - -# init torch model -model = setup_generator(c) -checkpoint = torch.load(args.torch_model_path, - map_location=torch.device('cpu')) -state_dict = checkpoint['model'] -model.load_state_dict(state_dict) -model.remove_weight_norm() -state_dict = model.state_dict() - -# init tf model -model_tf = setup_tf_generator(c) - -common_sufix = '/.ATTRIBUTES/VARIABLE_VALUE' -# get tf_model graph by passing an input -# B x D x T -dummy_input = tf.random.uniform((7, 80, 64), dtype=tf.float32) -mel_pred = model_tf(dummy_input, training=False) - -# get tf variables -tf_vars = model_tf.weights - -# match variable names with fuzzy logic -torch_var_names = list(state_dict.keys()) -tf_var_names = [we.name for we in model_tf.weights] -var_map = [] -for tf_name in tf_var_names: - # skip re-mapped layer names - if tf_name in [name[0] for name in var_map]: - continue - tf_name_edited = convert_tf_name(tf_name) - ratios = [ - fuzz.ratio(torch_name, tf_name_edited) - for torch_name in torch_var_names - ] - max_idx = np.argmax(ratios) - matching_name = torch_var_names[max_idx] - del torch_var_names[max_idx] - var_map.append((tf_name, matching_name)) - -# pass weights -tf_vars = transfer_weights_torch_to_tf(tf_vars, dict(var_map), state_dict) - -# Compare TF and TORCH models -# check embedding outputs -model.eval() -dummy_input_torch = torch.ones((1, 80, 10)) -dummy_input_tf = tf.convert_to_tensor(dummy_input_torch.numpy()) -dummy_input_tf = tf.transpose(dummy_input_tf, perm=[0, 2, 1]) -dummy_input_tf = tf.expand_dims(dummy_input_tf, 2) - -out_torch = model.layers[0](dummy_input_torch) -out_tf = model_tf.model_layers[0](dummy_input_tf) -out_tf_ = tf.transpose(out_tf, perm=[0, 3, 2, 1])[:, :, 0, :] - -assert compare_torch_tf(out_torch, out_tf_) < 1e-5 - -for i in range(1, len(model.layers)): - print(f"{i} -> {model.layers[i]} vs {model_tf.model_layers[i]}") - out_torch = model.layers[i](out_torch) - out_tf = model_tf.model_layers[i](out_tf) - out_tf_ = tf.transpose(out_tf, perm=[0, 3, 2, 1])[:, :, 0, :] - diff = compare_torch_tf(out_torch, out_tf_) - assert diff < 1e-5, diff - -torch.manual_seed(0) -dummy_input_torch = torch.rand((1, 80, 100)) -dummy_input_tf = tf.convert_to_tensor(dummy_input_torch.numpy()) -model.inference_padding = 0 -model_tf.inference_padding = 0 -output_torch = model.inference(dummy_input_torch) -output_tf = model_tf(dummy_input_tf, training=False) -assert compare_torch_tf(output_torch, output_tf) < 1e-5, compare_torch_tf( - output_torch, output_tf) - -# save tf model -save_checkpoint(model_tf, checkpoint['step'], checkpoint['epoch'], - args.output_path) -print(' > Model conversion is successfully completed :).') - diff --git a/vocoder/tf/layers/melgan.py b/vocoder/tf/layers/melgan.py deleted file mode 100644 index 3fad4c2a..00000000 --- a/vocoder/tf/layers/melgan.py +++ /dev/null @@ -1,57 +0,0 @@ -import tensorflow as tf - - -class ReflectionPad1d(tf.keras.layers.Layer): - def __init__(self, padding): - super(ReflectionPad1d, self).__init__() - self.padding = padding - - def call(self, x): - return tf.pad(x, [[0, 0], [self.padding, self.padding], [0, 0], [0, 0]], "REFLECT") - - -class ResidualStack(tf.keras.layers.Layer): - def __init__(self, channels, num_res_blocks, kernel_size, name): - super(ResidualStack, self).__init__(name=name) - - assert (kernel_size - 1) % 2 == 0, " [!] kernel_size has to be odd." - base_padding = (kernel_size - 1) // 2 - - self.blocks = [] - num_layers = 2 - for idx in range(num_res_blocks): - layer_kernel_size = kernel_size - layer_dilation = layer_kernel_size**idx - layer_padding = base_padding * layer_dilation - block = [ - tf.keras.layers.LeakyReLU(0.2), - ReflectionPad1d(layer_padding), - tf.keras.layers.Conv2D(filters=channels, - kernel_size=(kernel_size, 1), - dilation_rate=(layer_dilation, 1), - use_bias=True, - padding='valid', - name=f'blocks.{idx}.{num_layers}'), - tf.keras.layers.LeakyReLU(0.2), - tf.keras.layers.Conv2D(filters=channels, - kernel_size=(1, 1), - use_bias=True, - name=f'blocks.{idx}.{num_layers + 2}') - ] - self.blocks.append(block) - self.shortcuts = [ - tf.keras.layers.Conv2D(channels, - kernel_size=1, - use_bias=True, - name=f'shortcuts.{i}') - for i in range(num_res_blocks) - ] - - def call(self, x): - # breakpoint() - for block, shortcut in zip(self.blocks, self.shortcuts): - res = shortcut(x) - for layer in block: - x = layer(x) - x += res - return x \ No newline at end of file diff --git a/vocoder/tf/layers/pqmf.py b/vocoder/tf/layers/pqmf.py deleted file mode 100644 index c018971f..00000000 --- a/vocoder/tf/layers/pqmf.py +++ /dev/null @@ -1,66 +0,0 @@ -import numpy as np -import tensorflow as tf - -from scipy import signal as sig - - -class PQMF(tf.keras.layers.Layer): - def __init__(self, N=4, taps=62, cutoff=0.15, beta=9.0): - super(PQMF, self).__init__() - # define filter coefficient - self.N = N - self.taps = taps - self.cutoff = cutoff - self.beta = beta - - QMF = sig.firwin(taps + 1, cutoff, window=('kaiser', beta)) - H = np.zeros((N, len(QMF))) - G = np.zeros((N, len(QMF))) - for k in range(N): - constant_factor = (2 * k + 1) * (np.pi / - (2 * N)) * (np.arange(taps + 1) - - ((taps - 1) / 2)) - phase = (-1)**k * np.pi / 4 - H[k] = 2 * QMF * np.cos(constant_factor + phase) - - G[k] = 2 * QMF * np.cos(constant_factor - phase) - - # [N, 1, taps + 1] == [filter_width, in_channels, out_channels] - self.H = np.transpose(H[:, None, :], (2, 1, 0)).astype('float32') - self.G = np.transpose(G[None, :, :], (2, 1, 0)).astype('float32') - - # filter for downsampling & upsampling - updown_filter = np.zeros((N, N, N), dtype=np.float32) - for k in range(N): - updown_filter[0, k, k] = 1.0 - self.updown_filter = updown_filter.astype(np.float32) - - def analysis(self, x): - """ - x : B x 1 x T - """ - x = tf.transpose(x, perm=[0, 2, 1]) - x = tf.pad(x, [[0, 0], [self.taps // 2, self.taps // 2], [0, 0]], constant_values=0.0) - x = tf.nn.conv1d(x, self.H, stride=1, padding='VALID') - x = tf.nn.conv1d(x, - self.updown_filter, - stride=self.N, - padding='VALID') - x = tf.transpose(x, perm=[0, 2, 1]) - return x - - def synthesis(self, x): - """ - x : B x D x T - """ - x = tf.transpose(x, perm=[0, 2, 1]) - x = tf.nn.conv1d_transpose( - x, - self.updown_filter * self.N, - strides=self.N, - output_shape=(tf.shape(x)[0], tf.shape(x)[1] * self.N, - self.N)) - x = tf.pad(x, [[0, 0], [self.taps // 2, self.taps // 2], [0, 0]], constant_values=0.0) - x = tf.nn.conv1d(x, self.G, stride=1, padding="VALID") - x = tf.transpose(x, perm=[0, 2, 1]) - return x diff --git a/vocoder/tf/models/melgan_generator.py b/vocoder/tf/models/melgan_generator.py deleted file mode 100644 index 168fd29e..00000000 --- a/vocoder/tf/models/melgan_generator.py +++ /dev/null @@ -1,128 +0,0 @@ -import logging -import os - -os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # FATAL -logging.getLogger('tensorflow').setLevel(logging.FATAL) - -import tensorflow as tf -from TTS.vocoder.tf.layers.melgan import ResidualStack, ReflectionPad1d - - -#pylint: disable=too-many-ancestors -#pylint: disable=abstract-method -class MelganGenerator(tf.keras.models.Model): - """ Melgan Generator TF implementation dedicated for inference with no - weight norm """ - def __init__(self, - in_channels=80, - out_channels=1, - proj_kernel=7, - base_channels=512, - upsample_factors=(8, 8, 2, 2), - res_kernel=3, - num_res_blocks=3): - super(MelganGenerator, self).__init__() - - self.in_channels = in_channels - - # assert model parameters - assert (proj_kernel - - 1) % 2 == 0, " [!] proj_kernel should be an odd number." - - # setup additional model parameters - base_padding = (proj_kernel - 1) // 2 - act_slope = 0.2 - self.inference_padding = 2 - - # initial layer - self.initial_layer = [ - ReflectionPad1d(base_padding), - tf.keras.layers.Conv2D(filters=base_channels, - kernel_size=(proj_kernel, 1), - strides=1, - padding='valid', - use_bias=True, - name="1") - ] - num_layers = 3 # count number of layers for layer naming - - # upsampling layers and residual stacks - self.upsample_layers = [] - for idx, upsample_factor in enumerate(upsample_factors): - layer_out_channels = base_channels // (2**(idx + 1)) - layer_filter_size = upsample_factor * 2 - layer_stride = upsample_factor - # layer_output_padding = upsample_factor % 2 - self.upsample_layers += [ - tf.keras.layers.LeakyReLU(act_slope), - tf.keras.layers.Conv2DTranspose( - filters=layer_out_channels, - kernel_size=(layer_filter_size, 1), - strides=(layer_stride, 1), - padding='same', - # output_padding=layer_output_padding, - use_bias=True, - name=f'{num_layers}'), - ResidualStack(channels=layer_out_channels, - num_res_blocks=num_res_blocks, - kernel_size=res_kernel, - name=f'layers.{num_layers + 1}') - ] - num_layers += num_res_blocks - 1 - - self.upsample_layers += [tf.keras.layers.LeakyReLU(act_slope)] - - # final layer - self.final_layers = [ - ReflectionPad1d(base_padding), - tf.keras.layers.Conv2D(filters=out_channels, - kernel_size=(proj_kernel, 1), - use_bias=True, - name=f'layers.{num_layers + 1}'), - tf.keras.layers.Activation("tanh") - ] - - # self.model_layers = tf.keras.models.Sequential(self.initial_layer + self.upsample_layers + self.final_layers, name="layers") - self.model_layers = self.initial_layer + self.upsample_layers + self.final_layers - - @tf.function(experimental_relax_shapes=True) - def call(self, c, training=False): - """ - c : B x C x T - """ - if training: - raise NotImplementedError() - return self.inference(c) - - def inference(self, c): - c = tf.transpose(c, perm=[0, 2, 1]) - c = tf.expand_dims(c, 2) - # FIXME: TF had no replicate padding as in Torch - # c = tf.pad(c, [[0, 0], [self.inference_padding, self.inference_padding], [0, 0], [0, 0]], "REFLECT") - o = c - for layer in self.model_layers: - o = layer(o) - # o = self.model_layers(c) - o = tf.transpose(o, perm=[0, 3, 2, 1]) - return o[:, :, 0, :] - - def build_inference(self): - x = tf.random.uniform((1, self.in_channels, 4), dtype=tf.float32) - self(x, training=False) - - @tf.function( - experimental_relax_shapes=True, - input_signature=[ - tf.TensorSpec([1, None, None], dtype=tf.float32), - ],) - def inference_tflite(self, c): - c = tf.transpose(c, perm=[0, 2, 1]) - c = tf.expand_dims(c, 2) - # FIXME: TF had no replicate padding as in Torch - # c = tf.pad(c, [[0, 0], [self.inference_padding, self.inference_padding], [0, 0], [0, 0]], "REFLECT") - o = c - for layer in self.model_layers: - o = layer(o) - # o = self.model_layers(c) - o = tf.transpose(o, perm=[0, 3, 2, 1]) - return o[:, :, 0, :] \ No newline at end of file diff --git a/vocoder/tf/models/multiband_melgan_generator.py b/vocoder/tf/models/multiband_melgan_generator.py deleted file mode 100644 index bdd333ed..00000000 --- a/vocoder/tf/models/multiband_melgan_generator.py +++ /dev/null @@ -1,60 +0,0 @@ -import tensorflow as tf - -from TTS.vocoder.tf.models.melgan_generator import MelganGenerator -from TTS.vocoder.tf.layers.pqmf import PQMF - -#pylint: disable=too-many-ancestors -#pylint: disable=abstract-method -class MultibandMelganGenerator(MelganGenerator): - def __init__(self, - in_channels=80, - out_channels=4, - proj_kernel=7, - base_channels=384, - upsample_factors=(2, 8, 2, 2), - res_kernel=3, - num_res_blocks=3): - super(MultibandMelganGenerator, - self).__init__(in_channels=in_channels, - out_channels=out_channels, - proj_kernel=proj_kernel, - base_channels=base_channels, - upsample_factors=upsample_factors, - res_kernel=res_kernel, - num_res_blocks=num_res_blocks) - self.pqmf_layer = PQMF(N=4, taps=62, cutoff=0.15, beta=9.0) - - def pqmf_analysis(self, x): - return self.pqmf_layer.analysis(x) - - def pqmf_synthesis(self, x): - return self.pqmf_layer.synthesis(x) - - def inference(self, c): - c = tf.transpose(c, perm=[0, 2, 1]) - c = tf.expand_dims(c, 2) - # FIXME: TF had no replicate padding as in Torch - # c = tf.pad(c, [[0, 0], [self.inference_padding, self.inference_padding], [0, 0], [0, 0]], "REFLECT") - o = c - for layer in self.model_layers: - o = layer(o) - o = tf.transpose(o, perm=[0, 3, 2, 1]) - o = self.pqmf_layer.synthesis(o[:, :, 0, :]) - return o - - @tf.function( - experimental_relax_shapes=True, - input_signature=[ - tf.TensorSpec([1, 80, None], dtype=tf.float32), - ],) - def inference_tflite(self, c): - c = tf.transpose(c, perm=[0, 2, 1]) - c = tf.expand_dims(c, 2) - # FIXME: TF had no replicate padding as in Torch - # c = tf.pad(c, [[0, 0], [self.inference_padding, self.inference_padding], [0, 0], [0, 0]], "REFLECT") - o = c - for layer in self.model_layers: - o = layer(o) - o = tf.transpose(o, perm=[0, 3, 2, 1]) - o = self.pqmf_layer.synthesis(o[:, :, 0, :]) - return o diff --git a/vocoder/tf/utils/__init__.py b/vocoder/tf/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/vocoder/tf/utils/convert_torch_to_tf_utils.py b/vocoder/tf/utils/convert_torch_to_tf_utils.py deleted file mode 100644 index 25139cc3..00000000 --- a/vocoder/tf/utils/convert_torch_to_tf_utils.py +++ /dev/null @@ -1,45 +0,0 @@ -import numpy as np -import tensorflow as tf - - -def compare_torch_tf(torch_tensor, tf_tensor): - """ Compute the average absolute difference b/w torch and tf tensors """ - return abs(torch_tensor.detach().numpy() - tf_tensor.numpy()).mean() - - -def convert_tf_name(tf_name): - """ Convert certain patterns in TF layer names to Torch patterns """ - tf_name_tmp = tf_name - tf_name_tmp = tf_name_tmp.replace(':0', '') - tf_name_tmp = tf_name_tmp.replace('/forward_lstm/lstm_cell_1/recurrent_kernel', '/weight_hh_l0') - tf_name_tmp = tf_name_tmp.replace('/forward_lstm/lstm_cell_2/kernel', '/weight_ih_l1') - tf_name_tmp = tf_name_tmp.replace('/recurrent_kernel', '/weight_hh') - tf_name_tmp = tf_name_tmp.replace('/kernel', '/weight') - tf_name_tmp = tf_name_tmp.replace('/gamma', '/weight') - tf_name_tmp = tf_name_tmp.replace('/beta', '/bias') - tf_name_tmp = tf_name_tmp.replace('/', '.') - return tf_name_tmp - - -def transfer_weights_torch_to_tf(tf_vars, var_map_dict, state_dict): - """ Transfer weigths from torch state_dict to TF variables """ - print(" > Passing weights from Torch to TF ...") - for tf_var in tf_vars: - torch_var_name = var_map_dict[tf_var.name] - print(f' | > {tf_var.name} <-- {torch_var_name}') - # if tuple, it is a bias variable - if 'kernel' in tf_var.name: - torch_weight = state_dict[torch_var_name] - numpy_weight = torch_weight.permute([2, 1, 0]).numpy()[:, None, :, :] - if 'bias' in tf_var.name: - torch_weight = state_dict[torch_var_name] - numpy_weight = torch_weight - assert np.all(tf_var.shape == numpy_weight.shape), f" [!] weight shapes does not match: {tf_var.name} vs {torch_var_name} --> {tf_var.shape} vs {numpy_weight.shape}" - tf.keras.backend.set_value(tf_var, numpy_weight) - return tf_vars - - -def load_tf_vars(model_tf, tf_vars): - for tf_var in tf_vars: - model_tf.get_layer(tf_var.name).set_weights(tf_var) - return model_tf diff --git a/vocoder/tf/utils/generic_utils.py b/vocoder/tf/utils/generic_utils.py deleted file mode 100644 index 580a3738..00000000 --- a/vocoder/tf/utils/generic_utils.py +++ /dev/null @@ -1,35 +0,0 @@ -import re -import importlib - - -def to_camel(text): - text = text.capitalize() - return re.sub(r'(?!^)_([a-zA-Z])', lambda m: m.group(1).upper(), text) - - -def setup_generator(c): - print(" > Generator Model: {}".format(c.generator_model)) - MyModel = importlib.import_module('TTS.vocoder.tf.models.' + - c.generator_model.lower()) - MyModel = getattr(MyModel, to_camel(c.generator_model)) - if c.generator_model in 'melgan_generator': - model = MyModel( - in_channels=c.audio['num_mels'], - out_channels=1, - proj_kernel=7, - base_channels=512, - upsample_factors=c.generator_model_params['upsample_factors'], - res_kernel=3, - num_res_blocks=c.generator_model_params['num_res_blocks']) - if c.generator_model in 'melgan_fb_generator': - pass - if c.generator_model in 'multiband_melgan_generator': - model = MyModel( - in_channels=c.audio['num_mels'], - out_channels=4, - proj_kernel=7, - base_channels=384, - upsample_factors=c.generator_model_params['upsample_factors'], - res_kernel=3, - num_res_blocks=c.generator_model_params['num_res_blocks']) - return model \ No newline at end of file diff --git a/vocoder/tf/utils/io.py b/vocoder/tf/utils/io.py deleted file mode 100644 index d95d972c..00000000 --- a/vocoder/tf/utils/io.py +++ /dev/null @@ -1,27 +0,0 @@ -import datetime -import pickle -import tensorflow as tf - - -def save_checkpoint(model, current_step, epoch, output_path, **kwargs): - """ Save TF Vocoder model """ - state = { - 'model': model.weights, - 'step': current_step, - 'epoch': epoch, - 'date': datetime.date.today().strftime("%B %d, %Y"), - } - state.update(kwargs) - pickle.dump(state, open(output_path, 'wb')) - - -def load_checkpoint(model, checkpoint_path): - """ Load TF Vocoder model """ - checkpoint = pickle.load(open(checkpoint_path, 'rb')) - chkp_var_dict = {var.name: var.numpy() for var in checkpoint['model']} - tf_vars = model.weights - for tf_var in tf_vars: - layer_name = tf_var.name - chkp_var_value = chkp_var_dict[layer_name] - tf.keras.backend.set_value(tf_var, chkp_var_value) - return model \ No newline at end of file diff --git a/vocoder/tf/utils/tflite.py b/vocoder/tf/utils/tflite.py deleted file mode 100644 index d0637596..00000000 --- a/vocoder/tf/utils/tflite.py +++ /dev/null @@ -1,31 +0,0 @@ -import tensorflow as tf - - -def convert_melgan_to_tflite(model, - output_path=None, - experimental_converter=True): - """Convert Tensorflow MelGAN model to TFLite. Save a binary file if output_path is - provided, else return TFLite model.""" - - concrete_function = model.inference_tflite.get_concrete_function() - converter = tf.lite.TFLiteConverter.from_concrete_functions( - [concrete_function]) - converter.experimental_new_converter = experimental_converter - converter.optimizations = [] - converter.target_spec.supported_ops = [ - tf.lite.OpsSet.TFLITE_BUILTINS, tf.lite.OpsSet.SELECT_TF_OPS - ] - tflite_model = converter.convert() - print(f'Tflite Model size is {len(tflite_model) / (1024.0 * 1024.0)} MBs.') - if output_path is not None: - # same model binary if outputpath is provided - with open(output_path, 'wb') as f: - f.write(tflite_model) - return None - return tflite_model - - -def load_tflite_model(tflite_path): - tflite_model = tf.lite.Interpreter(model_path=tflite_path) - tflite_model.allocate_tensors() - return tflite_model \ No newline at end of file diff --git a/vocoder/train.py b/vocoder/train.py deleted file mode 100644 index dc081a5e..00000000 --- a/vocoder/train.py +++ /dev/null @@ -1,657 +0,0 @@ -import argparse -import glob -import os -import sys -import time -import traceback - -import torch -from torch.utils.data import DataLoader - -from inspect import signature - -from TTS.utils.audio import AudioProcessor -from TTS.utils.generic_utils import (KeepAverage, count_parameters, - create_experiment_folder, get_git_branch, - remove_experiment_folder, set_init_dict) -from TTS.utils.io import copy_config_file, load_config -from TTS.utils.radam import RAdam -from TTS.utils.tensorboard_logger import TensorboardLogger -from TTS.utils.training import setup_torch_training_env -from TTS.vocoder.datasets.gan_dataset import GANDataset -from TTS.vocoder.datasets.preprocess import load_wav_data, load_wav_feat_data -# from distribute import (DistributedSampler, apply_gradient_allreduce, -# init_distributed, reduce_tensor) -from TTS.vocoder.layers.losses import DiscriminatorLoss, GeneratorLoss -from TTS.vocoder.utils.io import save_checkpoint, save_best_model -from TTS.vocoder.utils.console_logger import ConsoleLogger -from TTS.vocoder.utils.generic_utils import (check_config, plot_results, - setup_discriminator, - setup_generator) - - -use_cuda, num_gpus = setup_torch_training_env(True, True) - - -def setup_loader(ap, is_val=False, verbose=False): - if is_val and not c.run_eval: - loader = None - else: - dataset = GANDataset(ap=ap, - items=eval_data if is_val else train_data, - seq_len=c.seq_len, - hop_len=ap.hop_length, - pad_short=c.pad_short, - conv_pad=c.conv_pad, - is_training=not is_val, - return_segments=not is_val, - use_noise_augment=c.use_noise_augment, - use_cache=c.use_cache, - verbose=verbose) - dataset.shuffle_mapping() - # sampler = DistributedSampler(dataset) if num_gpus > 1 else None - loader = DataLoader(dataset, - batch_size=1 if is_val else c.batch_size, - shuffle=True, - drop_last=False, - sampler=None, - num_workers=c.num_val_loader_workers - if is_val else c.num_loader_workers, - pin_memory=False) - return loader - - -def format_data(data): - if isinstance(data[0], list): - # setup input data - c_G, x_G = data[0] - c_D, x_D = data[1] - - # dispatch data to GPU - if use_cuda: - c_G = c_G.cuda(non_blocking=True) - x_G = x_G.cuda(non_blocking=True) - c_D = c_D.cuda(non_blocking=True) - x_D = x_D.cuda(non_blocking=True) - - return c_G, x_G, c_D, x_D - - # return a whole audio segment - co, x = data - if use_cuda: - co = co.cuda(non_blocking=True) - x = x.cuda(non_blocking=True) - return co, x, None, None - - -def train(model_G, criterion_G, optimizer_G, model_D, criterion_D, optimizer_D, - scheduler_G, scheduler_D, ap, global_step, epoch): - data_loader = setup_loader(ap, is_val=False, verbose=(epoch == 0)) - model_G.train() - model_D.train() - epoch_time = 0 - keep_avg = KeepAverage() - if use_cuda: - batch_n_iter = int( - len(data_loader.dataset) / (c.batch_size * num_gpus)) - else: - batch_n_iter = int(len(data_loader.dataset) / c.batch_size) - end_time = time.time() - c_logger.print_train_start() - for num_iter, data in enumerate(data_loader): - start_time = time.time() - - # format data - c_G, y_G, c_D, y_D = format_data(data) - loader_time = time.time() - end_time - - global_step += 1 - - ############################## - # GENERATOR - ############################## - - # generator pass - y_hat = model_G(c_G) - y_hat_sub = None - y_G_sub = None - y_hat_vis = y_hat # for visualization - - # PQMF formatting - if y_hat.shape[1] > 1: - y_hat_sub = y_hat - y_hat = model_G.pqmf_synthesis(y_hat) - y_hat_vis = y_hat - y_G_sub = model_G.pqmf_analysis(y_G) - - scores_fake, feats_fake, feats_real = None, None, None - if global_step > c.steps_to_start_discriminator: - - # run D with or without cond. features - if len(signature(model_D.forward).parameters) == 2: - D_out_fake = model_D(y_hat, c_G) - else: - D_out_fake = model_D(y_hat) - D_out_real = None - - if c.use_feat_match_loss: - with torch.no_grad(): - D_out_real = model_D(y_G) - - # format D outputs - if isinstance(D_out_fake, tuple): - scores_fake, feats_fake = D_out_fake - if D_out_real is None: - feats_real = None - else: - _, feats_real = D_out_real - else: - scores_fake = D_out_fake - - # compute losses - loss_G_dict = criterion_G(y_hat, y_G, scores_fake, feats_fake, - feats_real, y_hat_sub, y_G_sub) - loss_G = loss_G_dict['G_loss'] - - # optimizer generator - optimizer_G.zero_grad() - loss_G.backward() - if c.gen_clip_grad > 0: - torch.nn.utils.clip_grad_norm_(model_G.parameters(), - c.gen_clip_grad) - optimizer_G.step() - if scheduler_G is not None: - scheduler_G.step() - - loss_dict = dict() - for key, value in loss_G_dict.items(): - if isinstance(value, int): - loss_dict[key] = value - else: - loss_dict[key] = value.item() - - ############################## - # DISCRIMINATOR - ############################## - if global_step >= c.steps_to_start_discriminator: - # discriminator pass - with torch.no_grad(): - y_hat = model_G(c_D) - - # PQMF formatting - if y_hat.shape[1] > 1: - y_hat = model_G.pqmf_synthesis(y_hat) - - # run D with or without cond. features - if len(signature(model_D.forward).parameters) == 2: - D_out_fake = model_D(y_hat.detach(), c_D) - D_out_real = model_D(y_D, c_D) - else: - D_out_fake = model_D(y_hat.detach()) - D_out_real = model_D(y_D) - - # format D outputs - if isinstance(D_out_fake, tuple): - scores_fake, feats_fake = D_out_fake - if D_out_real is None: - scores_real, feats_real = None, None - else: - scores_real, feats_real = D_out_real - else: - scores_fake = D_out_fake - scores_real = D_out_real - - # compute losses - loss_D_dict = criterion_D(scores_fake, scores_real) - loss_D = loss_D_dict['D_loss'] - - # optimizer discriminator - optimizer_D.zero_grad() - loss_D.backward() - if c.disc_clip_grad > 0: - torch.nn.utils.clip_grad_norm_(model_D.parameters(), - c.disc_clip_grad) - optimizer_D.step() - if scheduler_D is not None: - scheduler_D.step() - - for key, value in loss_D_dict.items(): - if isinstance(value, (int, float)): - loss_dict[key] = value - else: - loss_dict[key] = value.item() - - step_time = time.time() - start_time - epoch_time += step_time - - # get current learning rates - current_lr_G = list(optimizer_G.param_groups)[0]['lr'] - current_lr_D = list(optimizer_D.param_groups)[0]['lr'] - - # update avg stats - update_train_values = dict() - for key, value in loss_dict.items(): - update_train_values['avg_' + key] = value - update_train_values['avg_loader_time'] = loader_time - update_train_values['avg_step_time'] = step_time - keep_avg.update_values(update_train_values) - - # print training stats - if global_step % c.print_step == 0: - c_logger.print_train_step(batch_n_iter, num_iter, global_step, - step_time, loader_time, current_lr_G, - current_lr_D, loss_dict, - keep_avg.avg_values) - - # plot step stats - if global_step % 10 == 0: - iter_stats = { - "lr_G": current_lr_G, - "lr_D": current_lr_D, - "step_time": step_time - } - iter_stats.update(loss_dict) - tb_logger.tb_train_iter_stats(global_step, iter_stats) - - # save checkpoint - if global_step % c.save_step == 0: - if c.checkpoint: - # save model - save_checkpoint(model_G, - optimizer_G, - scheduler_G, - model_D, - optimizer_D, - scheduler_D, - global_step, - epoch, - OUT_PATH, - model_losses=loss_dict) - - # compute spectrograms - figures = plot_results(y_hat_vis, y_G, ap, global_step, - 'train') - tb_logger.tb_train_figures(global_step, figures) - - # Sample audio - sample_voice = y_hat_vis[0].squeeze(0).detach().cpu().numpy() - tb_logger.tb_train_audios(global_step, - {'train/audio': sample_voice}, - c.audio["sample_rate"]) - end_time = time.time() - - # print epoch stats - c_logger.print_train_epoch_end(global_step, epoch, epoch_time, keep_avg) - - # Plot Training Epoch Stats - epoch_stats = {"epoch_time": epoch_time} - epoch_stats.update(keep_avg.avg_values) - tb_logger.tb_train_epoch_stats(global_step, epoch_stats) - # TODO: plot model stats - # if c.tb_model_param_stats: - # tb_logger.tb_model_weights(model, global_step) - return keep_avg.avg_values, global_step - - -@torch.no_grad() -def evaluate(model_G, criterion_G, model_D, criterion_D, ap, global_step, epoch): - data_loader = setup_loader(ap, is_val=True, verbose=(epoch == 0)) - model_G.eval() - model_D.eval() - epoch_time = 0 - keep_avg = KeepAverage() - end_time = time.time() - c_logger.print_eval_start() - for num_iter, data in enumerate(data_loader): - start_time = time.time() - - # format data - c_G, y_G, _, _ = format_data(data) - loader_time = time.time() - end_time - - global_step += 1 - - ############################## - # GENERATOR - ############################## - - # generator pass - y_hat = model_G(c_G) - y_hat_sub = None - y_G_sub = None - - # PQMF formatting - if y_hat.shape[1] > 1: - y_hat_sub = y_hat - y_hat = model_G.pqmf_synthesis(y_hat) - y_G_sub = model_G.pqmf_analysis(y_G) - - - scores_fake, feats_fake, feats_real = None, None, None - if global_step > c.steps_to_start_discriminator: - - if len(signature(model_D.forward).parameters) == 2: - D_out_fake = model_D(y_hat, c_G) - else: - D_out_fake = model_D(y_hat) - D_out_real = None - - if c.use_feat_match_loss: - with torch.no_grad(): - D_out_real = model_D(y_G) - - # format D outputs - if isinstance(D_out_fake, tuple): - scores_fake, feats_fake = D_out_fake - if D_out_real is None: - feats_real = None - else: - _, feats_real = D_out_real - else: - scores_fake = D_out_fake - feats_fake, feats_real = None, None - - # compute losses - loss_G_dict = criterion_G(y_hat, y_G, scores_fake, feats_fake, - feats_real, y_hat_sub, y_G_sub) - - loss_dict = dict() - for key, value in loss_G_dict.items(): - if isinstance(value, (int, float)): - loss_dict[key] = value - else: - loss_dict[key] = value.item() - - ############################## - # DISCRIMINATOR - ############################## - - if global_step >= c.steps_to_start_discriminator: - # discriminator pass - with torch.no_grad(): - y_hat = model_G(c_G) - - # PQMF formatting - if y_hat.shape[1] > 1: - y_hat = model_G.pqmf_synthesis(y_hat) - - # run D with or without cond. features - if len(signature(model_D.forward).parameters) == 2: - D_out_fake = model_D(y_hat.detach(), c_G) - D_out_real = model_D(y_G, c_G) - else: - D_out_fake = model_D(y_hat.detach()) - D_out_real = model_D(y_G) - - # format D outputs - if isinstance(D_out_fake, tuple): - scores_fake, feats_fake = D_out_fake - if D_out_real is None: - scores_real, feats_real = None, None - else: - scores_real, feats_real = D_out_real - else: - scores_fake = D_out_fake - scores_real = D_out_real - - # compute losses - loss_D_dict = criterion_D(scores_fake, scores_real) - - for key, value in loss_D_dict.items(): - if isinstance(value, (int, float)): - loss_dict[key] = value - else: - loss_dict[key] = value.item() - - - step_time = time.time() - start_time - epoch_time += step_time - - # update avg stats - update_eval_values = dict() - for key, value in loss_dict.items(): - update_eval_values['avg_' + key] = value - update_eval_values['avg_loader_time'] = loader_time - update_eval_values['avg_step_time'] = step_time - keep_avg.update_values(update_eval_values) - - # print eval stats - if c.print_eval: - c_logger.print_eval_step(num_iter, loss_dict, keep_avg.avg_values) - - # compute spectrograms - figures = plot_results(y_hat, y_G, ap, global_step, 'eval') - tb_logger.tb_eval_figures(global_step, figures) - - # Sample audio - sample_voice = y_hat[0].squeeze(0).detach().cpu().numpy() - tb_logger.tb_eval_audios(global_step, {'eval/audio': sample_voice}, - c.audio["sample_rate"]) - - # synthesize a full voice - data_loader.return_segments = False - - tb_logger.tb_eval_stats(global_step, keep_avg.avg_values) - - return keep_avg.avg_values - - -# FIXME: move args definition/parsing inside of main? -def main(args): # pylint: disable=redefined-outer-name - # pylint: disable=global-variable-undefined - global train_data, eval_data - print(f" > Loading wavs from: {c.data_path}") - if c.feature_path is not None: - print(f" > Loading features from: {c.feature_path}") - eval_data, train_data = load_wav_feat_data(c.data_path, c.feature_path, c.eval_split_size) - else: - eval_data, train_data = load_wav_data(c.data_path, c.eval_split_size) - - # setup audio processor - ap = AudioProcessor(**c.audio) - - # DISTRUBUTED - # if num_gpus > 1: - # init_distributed(args.rank, num_gpus, args.group_id, - # c.distributed["backend"], c.distributed["url"]) - - # setup models - model_gen = setup_generator(c) - model_disc = setup_discriminator(c) - - # setup optimizers - optimizer_gen = RAdam(model_gen.parameters(), lr=c.lr_gen, weight_decay=0) - optimizer_disc = RAdam(model_disc.parameters(), - lr=c.lr_disc, - weight_decay=0) - - # schedulers - scheduler_gen = None - scheduler_disc = None - if 'lr_scheduler_gen' in c: - scheduler_gen = getattr(torch.optim.lr_scheduler, c.lr_scheduler_gen) - scheduler_gen = scheduler_gen(optimizer_gen, **c.lr_scheduler_gen_params) - if 'lr_scheduler_disc' in c: - scheduler_disc = getattr(torch.optim.lr_scheduler, c.lr_scheduler_disc) - scheduler_disc = scheduler_disc(optimizer_disc, **c.lr_scheduler_disc_params) - - # setup criterion - criterion_gen = GeneratorLoss(c) - criterion_disc = DiscriminatorLoss(c) - - if args.restore_path: - checkpoint = torch.load(args.restore_path, map_location='cpu') - try: - print(" > Restoring Generator Model...") - model_gen.load_state_dict(checkpoint['model']) - print(" > Restoring Generator Optimizer...") - optimizer_gen.load_state_dict(checkpoint['optimizer']) - print(" > Restoring Discriminator Model...") - model_disc.load_state_dict(checkpoint['model_disc']) - print(" > Restoring Discriminator Optimizer...") - optimizer_disc.load_state_dict(checkpoint['optimizer_disc']) - if 'scheduler' in checkpoint: - print(" > Restoring Generator LR Scheduler...") - scheduler_gen.load_state_dict(checkpoint['scheduler']) - # NOTE: Not sure if necessary - scheduler_gen.optimizer = optimizer_gen - if 'scheduler_disc' in checkpoint: - print(" > Restoring Discriminator LR Scheduler...") - scheduler_disc.load_state_dict(checkpoint['scheduler_disc']) - scheduler_disc.optimizer = optimizer_disc - except RuntimeError: - # retore only matching layers. - print(" > Partial model initialization...") - model_dict = model_gen.state_dict() - model_dict = set_init_dict(model_dict, checkpoint['model'], c) - model_gen.load_state_dict(model_dict) - - model_dict = model_disc.state_dict() - model_dict = set_init_dict(model_dict, checkpoint['model_disc'], c) - model_disc.load_state_dict(model_dict) - del model_dict - - # reset lr if not countinuining training. - for group in optimizer_gen.param_groups: - group['lr'] = c.lr_gen - - for group in optimizer_disc.param_groups: - group['lr'] = c.lr_disc - - print(" > Model restored from step %d" % checkpoint['step'], - flush=True) - args.restore_step = checkpoint['step'] - else: - args.restore_step = 0 - - if use_cuda: - model_gen.cuda() - criterion_gen.cuda() - model_disc.cuda() - criterion_disc.cuda() - - # DISTRUBUTED - # if num_gpus > 1: - # model = apply_gradient_allreduce(model) - - num_params = count_parameters(model_gen) - print(" > Generator has {} parameters".format(num_params), flush=True) - num_params = count_parameters(model_disc) - print(" > Discriminator has {} parameters".format(num_params), flush=True) - - if 'best_loss' not in locals(): - best_loss = float('inf') - - global_step = args.restore_step - for epoch in range(0, c.epochs): - c_logger.print_epoch_start(epoch, c.epochs) - _, global_step = train(model_gen, criterion_gen, optimizer_gen, - model_disc, criterion_disc, optimizer_disc, - scheduler_gen, scheduler_disc, ap, global_step, - epoch) - eval_avg_loss_dict = evaluate(model_gen, criterion_gen, model_disc, criterion_disc, ap, - global_step, epoch) - c_logger.print_epoch_end(epoch, eval_avg_loss_dict) - target_loss = eval_avg_loss_dict[c.target_loss] - best_loss = save_best_model(target_loss, - best_loss, - model_gen, - optimizer_gen, - scheduler_gen, - model_disc, - optimizer_disc, - scheduler_disc, - global_step, - epoch, - OUT_PATH, - model_losses=eval_avg_loss_dict) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument( - '--continue_path', - type=str, - help= - 'Training output folder to continue training. Use to continue a training. If it is used, "config_path" is ignored.', - default='', - required='--config_path' not in sys.argv) - parser.add_argument( - '--restore_path', - type=str, - help='Model file to be restored. Use to finetune a model.', - default='') - parser.add_argument('--config_path', - type=str, - help='Path to config file for training.', - required='--continue_path' not in sys.argv) - parser.add_argument('--debug', - type=bool, - default=False, - help='Do not verify commit integrity to run training.') - - # DISTRUBUTED - parser.add_argument( - '--rank', - type=int, - default=0, - help='DISTRIBUTED: process rank for distributed training.') - parser.add_argument('--group_id', - type=str, - default="", - help='DISTRIBUTED: process group id.') - args = parser.parse_args() - - if args.continue_path != '': - args.output_path = args.continue_path - args.config_path = os.path.join(args.continue_path, 'config.json') - list_of_files = glob.glob( - args.continue_path + - "/*.pth.tar") # * means all if need specific format then *.csv - latest_model_file = max(list_of_files, key=os.path.getctime) - args.restore_path = latest_model_file - print(f" > Training continues for {args.restore_path}") - - # setup output paths and read configs - c = load_config(args.config_path) - check_config(c) - _ = os.path.dirname(os.path.realpath(__file__)) - - OUT_PATH = args.continue_path - if args.continue_path == '': - OUT_PATH = create_experiment_folder(c.output_path, c.run_name, - args.debug) - - AUDIO_PATH = os.path.join(OUT_PATH, 'test_audios') - - c_logger = ConsoleLogger() - - if args.rank == 0: - os.makedirs(AUDIO_PATH, exist_ok=True) - new_fields = {} - if args.restore_path: - new_fields["restore_path"] = args.restore_path - new_fields["github_branch"] = get_git_branch() - copy_config_file(args.config_path, - os.path.join(OUT_PATH, 'config.json'), new_fields) - os.chmod(AUDIO_PATH, 0o775) - os.chmod(OUT_PATH, 0o775) - - LOG_DIR = OUT_PATH - tb_logger = TensorboardLogger(LOG_DIR, model_name='VOCODER') - - # write model desc to tensorboard - tb_logger.tb_add_text('model-description', c['run_description'], 0) - - try: - main(args) - except KeyboardInterrupt: - remove_experiment_folder(OUT_PATH) - try: - sys.exit(0) - except SystemExit: - os._exit(0) # pylint: disable=protected-access - except Exception: # pylint: disable=broad-except - remove_experiment_folder(OUT_PATH) - traceback.print_exc() - sys.exit(1) diff --git a/vocoder/utils/__init__.py b/vocoder/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/vocoder/utils/console_logger.py b/vocoder/utils/console_logger.py deleted file mode 100644 index 6af0b823..00000000 --- a/vocoder/utils/console_logger.py +++ /dev/null @@ -1,97 +0,0 @@ -import datetime -from TTS.utils.io import AttrDict - - -tcolors = AttrDict({ - 'OKBLUE': '\033[94m', - 'HEADER': '\033[95m', - 'OKGREEN': '\033[92m', - 'WARNING': '\033[93m', - 'FAIL': '\033[91m', - 'ENDC': '\033[0m', - 'BOLD': '\033[1m', - 'UNDERLINE': '\033[4m' -}) - - -class ConsoleLogger(): - # TODO: merge this with TTS ConsoleLogger - def __init__(self): - # use these to compare values between iterations - self.old_train_loss_dict = None - self.old_epoch_loss_dict = None - self.old_eval_loss_dict = None - - # pylint: disable=no-self-use - def get_time(self): - now = datetime.datetime.now() - return now.strftime("%Y-%m-%d %H:%M:%S") - - def print_epoch_start(self, epoch, max_epoch): - print("\n{}{} > EPOCH: {}/{}{}".format(tcolors.UNDERLINE, tcolors.BOLD, - epoch, max_epoch, tcolors.ENDC), - flush=True) - - def print_train_start(self): - print(f"\n{tcolors.BOLD} > TRAINING ({self.get_time()}) {tcolors.ENDC}") - - def print_train_step(self, batch_steps, step, global_step, - step_time, loader_time, lrG, lrD, - loss_dict, avg_loss_dict): - indent = " | > " - print() - log_text = "{} --> STEP: {}/{} -- GLOBAL_STEP: {}{}\n".format( - tcolors.BOLD, step, batch_steps, global_step, tcolors.ENDC) - for key, value in loss_dict.items(): - # print the avg value if given - if f'avg_{key}' in avg_loss_dict.keys(): - log_text += "{}{}: {:.5f} ({:.5f})\n".format(indent, key, value, avg_loss_dict[f'avg_{key}']) - else: - log_text += "{}{}: {:.5f} \n".format(indent, key, value) - log_text += f"{indent}step_time: {step_time:.2f}\n{indent}loader_time: {loader_time:.2f}\n{indent}lrG: {lrG}\n{indent}lrD: {lrD}" - print(log_text, flush=True) - - # pylint: disable=unused-argument - def print_train_epoch_end(self, global_step, epoch, epoch_time, - print_dict): - indent = " | > " - log_text = f"\n{tcolors.BOLD} --> TRAIN PERFORMACE -- EPOCH TIME: {epoch_time:.2f} sec -- GLOBAL_STEP: {global_step}{tcolors.ENDC}\n" - for key, value in print_dict.items(): - log_text += "{}{}: {:.5f}\n".format(indent, key, value) - print(log_text, flush=True) - - def print_eval_start(self): - print(f"{tcolors.BOLD} > EVALUATION {tcolors.ENDC}\n") - - def print_eval_step(self, step, loss_dict, avg_loss_dict): - indent = " | > " - print() - log_text = f"{tcolors.BOLD} --> STEP: {step}{tcolors.ENDC}\n" - for key, value in loss_dict.items(): - # print the avg value if given - if f'avg_{key}' in avg_loss_dict.keys(): - log_text += "{}{}: {:.5f} ({:.5f})\n".format(indent, key, value, avg_loss_dict[f'avg_{key}']) - else: - log_text += "{}{}: {:.5f} \n".format(indent, key, value) - print(log_text, flush=True) - - def print_epoch_end(self, epoch, avg_loss_dict): - indent = " | > " - log_text = " {}--> EVAL PERFORMANCE{}\n".format( - tcolors.BOLD, tcolors.ENDC) - for key, value in avg_loss_dict.items(): - # print the avg value if given - color = '' - sign = '+' - diff = 0 - if self.old_eval_loss_dict is not None and key in self.old_eval_loss_dict: - diff = value - self.old_eval_loss_dict[key] - if diff < 0: - color = tcolors.OKGREEN - sign = '' - elif diff > 0: - color = tcolors.FAIL - sign = '+' - log_text += "{}{}:{} {:.5f} {}({}{:.5f})\n".format(indent, key, color, value, tcolors.ENDC, sign, diff) - self.old_eval_loss_dict = avg_loss_dict - print(log_text, flush=True) diff --git a/vocoder/utils/generic_utils.py b/vocoder/utils/generic_utils.py deleted file mode 100644 index 031d299d..00000000 --- a/vocoder/utils/generic_utils.py +++ /dev/null @@ -1,149 +0,0 @@ -import re -import importlib -import numpy as np -from matplotlib import pyplot as plt - -from TTS.utils.visual import plot_spectrogram - - -def plot_results(y_hat, y, ap, global_step, name_prefix): - """ Plot vocoder model results """ - - # select an instance from batch - y_hat = y_hat[0].squeeze(0).detach().cpu().numpy() - y = y[0].squeeze(0).detach().cpu().numpy() - - spec_fake = ap.melspectrogram(y_hat).T - spec_real = ap.melspectrogram(y).T - spec_diff = np.abs(spec_fake - spec_real) - - # plot figure and save it - fig_wave = plt.figure() - plt.subplot(2, 1, 1) - plt.plot(y) - plt.title("groundtruth speech") - plt.subplot(2, 1, 2) - plt.plot(y_hat) - plt.title(f"generated speech @ {global_step} steps") - plt.tight_layout() - plt.close() - - figures = { - name_prefix + "spectrogram/fake": plot_spectrogram(spec_fake), - name_prefix + "spectrogram/real": plot_spectrogram(spec_real), - name_prefix + "spectrogram/diff": plot_spectrogram(spec_diff), - name_prefix + "speech_comparison": fig_wave, - } - return figures - - -def to_camel(text): - text = text.capitalize() - return re.sub(r'(?!^)_([a-zA-Z])', lambda m: m.group(1).upper(), text) - - -def setup_generator(c): - print(" > Generator Model: {}".format(c.generator_model)) - MyModel = importlib.import_module('TTS.vocoder.models.' + - c.generator_model.lower()) - MyModel = getattr(MyModel, to_camel(c.generator_model)) - if c.generator_model in 'melgan_generator': - model = MyModel( - in_channels=c.audio['num_mels'], - out_channels=1, - proj_kernel=7, - base_channels=512, - upsample_factors=c.generator_model_params['upsample_factors'], - res_kernel=3, - num_res_blocks=c.generator_model_params['num_res_blocks']) - if c.generator_model in 'melgan_fb_generator': - pass - if c.generator_model in 'multiband_melgan_generator': - model = MyModel( - in_channels=c.audio['num_mels'], - out_channels=4, - proj_kernel=7, - base_channels=384, - upsample_factors=c.generator_model_params['upsample_factors'], - res_kernel=3, - num_res_blocks=c.generator_model_params['num_res_blocks']) - if c.generator_model in 'parallel_wavegan_generator': - model = MyModel( - in_channels=1, - out_channels=1, - kernel_size=3, - num_res_blocks=c.generator_model_params['num_res_blocks'], - stacks=c.generator_model_params['stacks'], - res_channels=64, - gate_channels=128, - skip_channels=64, - aux_channels=c.audio['num_mels'], - aux_context_window=c['conv_pad'], - dropout=0.0, - bias=True, - use_weight_norm=True, - upsample_conditional_features=True, - upsample_factors=c.generator_model_params['upsample_factors']) - return model - - -def setup_discriminator(c): - print(" > Discriminator Model: {}".format(c.discriminator_model)) - if 'parallel_wavegan' in c.discriminator_model: - MyModel = importlib.import_module('TTS.vocoder.models.parallel_wavegan_discriminator') - else: - MyModel = importlib.import_module('TTS.vocoder.models.' + - c.discriminator_model.lower()) - MyModel = getattr(MyModel, to_camel(c.discriminator_model.lower())) - if c.discriminator_model in 'random_window_discriminator': - model = MyModel( - cond_channels=c.audio['num_mels'], - hop_length=c.audio['hop_length'], - uncond_disc_donwsample_factors=c. - discriminator_model_params['uncond_disc_donwsample_factors'], - cond_disc_downsample_factors=c. - discriminator_model_params['cond_disc_downsample_factors'], - cond_disc_out_channels=c. - discriminator_model_params['cond_disc_out_channels'], - window_sizes=c.discriminator_model_params['window_sizes']) - if c.discriminator_model in 'melgan_multiscale_discriminator': - model = MyModel( - in_channels=1, - out_channels=1, - kernel_sizes=(5, 3), - base_channels=c.discriminator_model_params['base_channels'], - max_channels=c.discriminator_model_params['max_channels'], - downsample_factors=c. - discriminator_model_params['downsample_factors']) - if c.discriminator_model == 'residual_parallel_wavegan_discriminator': - model = MyModel( - in_channels=1, - out_channels=1, - kernel_size=3, - num_layers=c.discriminator_model_params['num_layers'], - stacks=c.discriminator_model_params['stacks'], - res_channels=64, - gate_channels=128, - skip_channels=64, - dropout=0.0, - bias=True, - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - ) - if c.discriminator_model == 'parallel_wavegan_discriminator': - model = MyModel( - in_channels=1, - out_channels=1, - kernel_size=3, - num_layers=c.discriminator_model_params['num_layers'], - conv_channels=64, - dilation_factor=1, - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - bias=True - ) - return model - - -def check_config(c): - pass \ No newline at end of file diff --git a/vocoder/utils/io.py b/vocoder/utils/io.py deleted file mode 100644 index 9d350238..00000000 --- a/vocoder/utils/io.py +++ /dev/null @@ -1,63 +0,0 @@ -import os -import torch -import datetime - - -def save_model(model, optimizer, scheduler, model_disc, optimizer_disc, - scheduler_disc, current_step, epoch, output_path, **kwargs): - model_state = model.state_dict() - model_disc_state = model_disc.state_dict()\ - if model_disc is not None else None - optimizer_state = optimizer.state_dict()\ - if optimizer is not None else None - optimizer_disc_state = optimizer_disc.state_dict()\ - if optimizer_disc is not None else None - scheduler_state = scheduler.state_dict()\ - if scheduler is not None else None - scheduler_disc_state = scheduler_disc.state_dict()\ - if scheduler_disc is not None else None - state = { - 'model': model_state, - 'optimizer': optimizer_state, - 'scheduler': scheduler_state, - 'model_disc': model_disc_state, - 'optimizer_disc': optimizer_disc_state, - 'scheduler_disc': scheduler_disc_state, - 'step': current_step, - 'epoch': epoch, - 'date': datetime.date.today().strftime("%B %d, %Y"), - } - state.update(kwargs) - torch.save(state, output_path) - - -def save_checkpoint(model, optimizer, scheduler, model_disc, optimizer_disc, - scheduler_disc, current_step, epoch, output_folder, - **kwargs): - file_name = 'checkpoint_{}.pth.tar'.format(current_step) - checkpoint_path = os.path.join(output_folder, file_name) - print(" > CHECKPOINT : {}".format(checkpoint_path)) - save_model(model, optimizer, scheduler, model_disc, optimizer_disc, - scheduler_disc, current_step, epoch, checkpoint_path, **kwargs) - - -def save_best_model(target_loss, best_loss, model, optimizer, scheduler, - model_disc, optimizer_disc, scheduler_disc, current_step, - epoch, output_folder, **kwargs): - if target_loss < best_loss: - file_name = 'best_model.pth.tar' - checkpoint_path = os.path.join(output_folder, file_name) - print(" > BEST MODEL : {}".format(checkpoint_path)) - save_model(model, - optimizer, - scheduler, - model_disc, - optimizer_disc, - scheduler_disc, - current_step, - epoch, - checkpoint_path, - model_loss=target_loss, - **kwargs) - best_loss = target_loss - return best_loss \ No newline at end of file