From fb0acd40a2485d9485120349a01485e06e04f52e Mon Sep 17 00:00:00 2001 From: TianYuan Date: Mon, 24 Jan 2022 10:08:16 +0000 Subject: [PATCH 01/22] add wavernn, test=tts --- examples/csmsc/voc6/conf/default.yaml | 68 ++ examples/csmsc/voc6/local/preprocess.sh | 15 + examples/csmsc/voc6/local/synthesize.sh | 14 + examples/csmsc/voc6/local/train.sh | 9 + examples/csmsc/voc6/path.sh | 13 + examples/csmsc/voc6/run.sh | 33 + paddlespeech/t2s/datasets/__init__.py | 1 + paddlespeech/t2s/datasets/csmsc.py | 56 ++ paddlespeech/t2s/datasets/vocoder_batch_fn.py | 125 ++++ paddlespeech/t2s/exps/wavernn/__init__.py | 13 + paddlespeech/t2s/exps/wavernn/preprocess.py | 157 +++++ paddlespeech/t2s/exps/wavernn/synthesize.py | 89 +++ paddlespeech/t2s/exps/wavernn/train.py | 192 ++++++ paddlespeech/t2s/models/__init__.py | 1 + paddlespeech/t2s/models/wavernn/__init__.py | 15 + paddlespeech/t2s/models/wavernn/wavernn.py | 592 ++++++++++++++++++ .../t2s/models/wavernn/wavernn_updater.py | 203 ++++++ paddlespeech/t2s/modules/losses.py | 140 +++++ 18 files changed, 1736 insertions(+) create mode 100644 examples/csmsc/voc6/conf/default.yaml create mode 100755 examples/csmsc/voc6/local/preprocess.sh create mode 100755 examples/csmsc/voc6/local/synthesize.sh create mode 100755 examples/csmsc/voc6/local/train.sh create mode 100755 examples/csmsc/voc6/path.sh create mode 100755 examples/csmsc/voc6/run.sh create mode 100644 paddlespeech/t2s/datasets/csmsc.py create mode 100644 paddlespeech/t2s/exps/wavernn/__init__.py create mode 100644 paddlespeech/t2s/exps/wavernn/preprocess.py create mode 100644 paddlespeech/t2s/exps/wavernn/synthesize.py create mode 100644 paddlespeech/t2s/exps/wavernn/train.py create mode 100644 paddlespeech/t2s/models/wavernn/__init__.py create mode 100644 paddlespeech/t2s/models/wavernn/wavernn.py create mode 100644 paddlespeech/t2s/models/wavernn/wavernn_updater.py diff --git a/examples/csmsc/voc6/conf/default.yaml b/examples/csmsc/voc6/conf/default.yaml new file mode 100644 index 00000000..2c838fb9 --- /dev/null +++ b/examples/csmsc/voc6/conf/default.yaml @@ -0,0 +1,68 @@ + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +fs: 24000 # Sampling rate. +n_fft: 2048 # FFT size (samples). +n_shift: 300 # Hop size (samples). 12.5ms +win_length: 1200 # Window length (samples). 50ms + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +n_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. (Hz) +fmax: 7600 # Maximum frequency in mel basis calculation. (Hz) +mu_law: True # Recommended to suppress noise if using raw bitsexit() +peak_norm: True + + +########################################################### +# MODEL SETTING # +########################################################### +model: + rnn_dims: 512 # Hidden dims of RNN Layers. + fc_dims: 512 + bits: 9 # Bit depth of signal + aux_context_window: 2 + aux_channels: 80 # Number of channels for auxiliary feature conv. + # Must be the same as num_mels. + upsample_scales: [4, 5, 3, 5] # Upsampling scales. Prodcut of these must be the same as hop size, same with pwgan here + compute_dims: 128 + res_out_dims: 128 + res_blocks: 10 + mode: RAW # either 'raw'(softmax on raw bits) or 'mold' (sample from mixture of logistics) +inference: + gen_batched: True # whether to genenate sample in batch mode + target: 12000 # target number of samples to be generated in each batch entry + overlap: 600 # number of samples for crossfading between batches + + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 64 # Batch size. +batch_max_steps: 4500 # Length of each audio in batch. Make sure dividable by hop_size. +num_workers: 2 # Number of workers in DataLoader. +valid_size: 50 + +########################################################### +# OPTIMIZER SETTING # +########################################################### +grad_clip: 4.0 +learning_rate: 1.0e-4 + + +########################################################### +# INTERVAL SETTING # +########################################################### + +train_max_steps: 400000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 1000 # Interval steps to evaluate the network. +gen_eval_samples_interval_steps: 5000 # the iteration interval of generating valid samples +generate_num: 5 # number of samples to generate at each checkpoint + +########################################################### +# OTHER SETTING # +########################################################### +num_snapshots: 10 # max number of snapshots to keep while training +seed: 42 # random seed for paddle, random, and np.random diff --git a/examples/csmsc/voc6/local/preprocess.sh b/examples/csmsc/voc6/local/preprocess.sh new file mode 100755 index 00000000..064aea55 --- /dev/null +++ b/examples/csmsc/voc6/local/preprocess.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +stage=0 +stop_stage=100 + +config_path=$1 + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + python3 ${BIN_DIR}/preprocess.py \ + --input=~/datasets/BZNSYP/ \ + --output=dump \ + --dataset=csmsc \ + --config=${config_path} \ + --num-cpu=20 +fi diff --git a/examples/csmsc/voc6/local/synthesize.sh b/examples/csmsc/voc6/local/synthesize.sh new file mode 100755 index 00000000..876c8444 --- /dev/null +++ b/examples/csmsc/voc6/local/synthesize.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +config_path=$1 +train_output_path=$2 +ckpt_name=$3 +test_input=$4 + +FLAGS_allocator_strategy=naive_best_fit \ +FLAGS_fraction_of_gpu_memory_to_use=0.01 \ +python3 ${BIN_DIR}/synthesize.py \ + --config=${config_path} \ + --checkpoint=${train_output_path}/checkpoints/${ckpt_name} \ + --input=${test_input} \ + --output-dir=${train_output_path}/test diff --git a/examples/csmsc/voc6/local/train.sh b/examples/csmsc/voc6/local/train.sh new file mode 100755 index 00000000..900450cd --- /dev/null +++ b/examples/csmsc/voc6/local/train.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +config_path=$1 +train_output_path=$2 +python ${BIN_DIR}/train.py \ + --config=${config_path} \ + --data=dump/ \ + --output-dir=${train_output_path} \ + --ngpu=1 diff --git a/examples/csmsc/voc6/path.sh b/examples/csmsc/voc6/path.sh new file mode 100755 index 00000000..b0c98584 --- /dev/null +++ b/examples/csmsc/voc6/path.sh @@ -0,0 +1,13 @@ +#!/bin/bash +export MAIN_ROOT=`realpath ${PWD}/../../../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +export PYTHONDONTWRITEBYTECODE=1 +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} + +MODEL=wavernn +export BIN_DIR=${MAIN_ROOT}/paddlespeech/t2s/exps/${MODEL} \ No newline at end of file diff --git a/examples/csmsc/voc6/run.sh b/examples/csmsc/voc6/run.sh new file mode 100755 index 00000000..bd32e3d2 --- /dev/null +++ b/examples/csmsc/voc6/run.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +set -e +source path.sh + +gpus=0,1 +stage=0 +stop_stage=100 + +conf_path=conf/default.yaml +train_output_path=exp/default +test_input=dump/mel_test +ckpt_name=snapshot_iter_100000.pdz + +source ${MAIN_ROOT}/utils/parse_options.sh || exit 1 + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # prepare data + ./local/preprocess.sh ${conf_path} || exit -1 +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # prepare data + CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} || exit -1 +fi + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # copy some test mels from dump + mkdir -p ${test_input} + cp -r dump/mel/00995*.npy ${test_input} + # synthesize + CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} ${test_input}|| exit -1 +fi diff --git a/paddlespeech/t2s/datasets/__init__.py b/paddlespeech/t2s/datasets/__init__.py index fc64a82f..acaf808a 100644 --- a/paddlespeech/t2s/datasets/__init__.py +++ b/paddlespeech/t2s/datasets/__init__.py @@ -12,4 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. from .common import * +from .csmsc import * from .ljspeech import * diff --git a/paddlespeech/t2s/datasets/csmsc.py b/paddlespeech/t2s/datasets/csmsc.py new file mode 100644 index 00000000..9928a73a --- /dev/null +++ b/paddlespeech/t2s/datasets/csmsc.py @@ -0,0 +1,56 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +from pathlib import Path + +from paddle.io import Dataset + +__all__ = ["CSMSCMetaData"] + + +class CSMSCMetaData(Dataset): + def __init__(self, root): + """ + :param root: the path of baker dataset + """ + self.root = os.path.abspath(root) + records = [] + index = 1 + self.meta_info = ["file_path", "text", "pinyin"] + + metadata_path = os.path.join(root, "ProsodyLabeling/000001-010000.txt") + wav_dirs = os.path.join(self.root, "Wave") + with open(metadata_path, 'r', encoding='utf-8') as f: + while True: + line1 = f.readline().strip() + if not line1: + break + line2 = f.readline().strip() + strs = line1.split() + wav_fname = line1.split()[0].strip() + '.wav' + wav_filepath = os.path.join(wav_dirs, wav_fname) + text = strs[1].strip() + pinyin = line2 + records.append([wav_filepath, text, pinyin]) + + self.records = records + + def __getitem__(self, i): + return self.records[i] + + def __len__(self): + return len(self.records) + + def get_meta_info(self): + return self.meta_info diff --git a/paddlespeech/t2s/datasets/vocoder_batch_fn.py b/paddlespeech/t2s/datasets/vocoder_batch_fn.py index 2e4f740f..496bf902 100644 --- a/paddlespeech/t2s/datasets/vocoder_batch_fn.py +++ b/paddlespeech/t2s/datasets/vocoder_batch_fn.py @@ -11,8 +11,133 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import math +from pathlib import Path + import numpy as np import paddle +from paddle.io import Dataset + + +def label_2_float(x, bits): + return 2 * x / (2**bits - 1.) - 1. + + +def float_2_label(x, bits): + assert abs(x).max() <= 1.0 + x = (x + 1.) * (2**bits - 1) / 2 + return x.clip(0, 2**bits - 1) + + +def encode_mu_law(x, mu): + mu = mu - 1 + fx = np.sign(x) * np.log(1 + mu * np.abs(x)) / np.log(1 + mu) + return np.floor((fx + 1) / 2 * mu + 0.5) + + +def decode_mu_law(y, mu, from_labels=True): + # TODO: get rid of log2 - makes no sense + if from_labels: + y = label_2_float(y, math.log2(mu)) + mu = mu - 1 + x = paddle.sign(y) / mu * ((1 + mu)**paddle.abs(y) - 1) + return x + + +class WaveRNNDataset(Dataset): + """A simple dataset adaptor for the processed ljspeech dataset.""" + + def __init__(self, root): + self.root = Path(root).expanduser() + + records = [] + + with open(self.root / "metadata.csv", 'r') as rf: + + for line in rf: + name = line.split("\t")[0] + mel_path = str(self.root / "mel" / (str(name) + ".npy")) + wav_path = str(self.root / "wav" / (str(name) + ".npy")) + records.append((mel_path, wav_path)) + + self.records = records + + def __getitem__(self, i): + mel_name, wav_name = self.records[i] + mel = np.load(mel_name) + wav = np.load(wav_name) + return mel, wav + + def __len__(self): + return len(self.records) + + +class WaveRNNClip(object): + def __init__(self, + mode: str='RAW', + batch_max_steps: int=4500, + hop_size: int=300, + aux_context_window: int=2, + bits: int=9): + self.mode = mode + self.mel_win = batch_max_steps // hop_size + 2 * aux_context_window + self.batch_max_steps = batch_max_steps + self.hop_size = hop_size + self.aux_context_window = aux_context_window + if self.mode == 'MOL': + self.bits = 16 + else: + self.bits = bits + + def __call__(self, batch): + # batch: [mel, quant] + # voc_pad = 2 this will pad the input so that the resnet can 'see' wider than input length + # max_offsets = n_frames - 2 - (mel_win + 2 * hp.voc_pad) = n_frames - 15 + max_offsets = [ + x[0].shape[-1] - 2 - (self.mel_win + 2 * self.aux_context_window) + for x in batch + ] + # the slice point of mel selecting randomly + mel_offsets = [np.random.randint(0, offset) for offset in max_offsets] + # the slice point of wav selecting randomly, which is behind 2(=pad) frames + sig_offsets = [(offset + self.aux_context_window) * self.hop_size + for offset in mel_offsets] + # mels.sape[1] = voc_seq_len // hop_length + 2 * voc_pad + mels = [ + x[0][:, mel_offsets[i]:mel_offsets[i] + self.mel_win] + for i, x in enumerate(batch) + ] + # label.shape[1] = voc_seq_len + 1 + labels = [ + x[1][sig_offsets[i]:sig_offsets[i] + self.batch_max_steps + 1] + for i, x in enumerate(batch) + ] + + mels = np.stack(mels).astype(np.float32) + labels = np.stack(labels).astype(np.int64) + + mels = paddle.to_tensor(mels) + labels = paddle.to_tensor(labels, dtype='int64') + + # x is input, y is label + x = labels[:, :self.batch_max_steps] + y = labels[:, 1:] + ''' + mode = RAW: + mu_law = True: + quant: bits = 9 0, 1, 2, ..., 509, 510, 511 int + mu_law = False + quant bits = 9 [0, 511] float + mode = MOL: + quant: bits = 16 [0. 65536] float + ''' + # x should be normalizes in.[0, 1] in RAW mode + x = label_2_float(paddle.cast(x, dtype='float32'), self.bits) + # y should be normalizes in.[0, 1] in MOL mode + if self.mode == 'MOL': + y = label_2_float(paddle.cast(y, dtype='float32'), self.bits) + + return x, y, mels class Clip(object): diff --git a/paddlespeech/t2s/exps/wavernn/__init__.py b/paddlespeech/t2s/exps/wavernn/__init__.py new file mode 100644 index 00000000..abf198b9 --- /dev/null +++ b/paddlespeech/t2s/exps/wavernn/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/paddlespeech/t2s/exps/wavernn/preprocess.py b/paddlespeech/t2s/exps/wavernn/preprocess.py new file mode 100644 index 00000000..a26c6702 --- /dev/null +++ b/paddlespeech/t2s/exps/wavernn/preprocess.py @@ -0,0 +1,157 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import os +from multiprocessing import cpu_count +from multiprocessing import Pool +from pathlib import Path + +import librosa +import numpy as np +import pandas as pd +import tqdm +import yaml +from yacs.config import CfgNode + +from paddlespeech.t2s.data.get_feats import LogMelFBank +from paddlespeech.t2s.datasets import CSMSCMetaData +from paddlespeech.t2s.datasets import LJSpeechMetaData +from paddlespeech.t2s.datasets.vocoder_batch_fn import encode_mu_law +from paddlespeech.t2s.datasets.vocoder_batch_fn import float_2_label + + +class Transform(object): + def __init__(self, output_dir: Path, config): + self.fs = config.fs + self.peak_norm = config.peak_norm + self.bits = config.model.bits + self.mode = config.model.mode + self.mu_law = config.mu_law + + self.wav_dir = output_dir / "wav" + self.mel_dir = output_dir / "mel" + self.wav_dir.mkdir(exist_ok=True) + self.mel_dir.mkdir(exist_ok=True) + + self.mel_extractor = LogMelFBank( + sr=config.fs, + n_fft=config.n_fft, + hop_length=config.n_shift, + win_length=config.win_length, + window=config.window, + n_mels=config.n_mels, + fmin=config.fmin, + fmax=config.fmax) + + if self.mode != 'RAW' and self.mode != 'MOL': + raise RuntimeError('Unknown mode value - ', self.mode) + + def __call__(self, example): + wav_path, _, _ = example + + base_name = os.path.splitext(os.path.basename(wav_path))[0] + # print("self.sample_rate:",self.sample_rate) + wav, _ = librosa.load(wav_path, sr=self.fs) + peak = np.abs(wav).max() + if self.peak_norm or peak > 1.0: + wav /= peak + + mel = self.mel_extractor.get_log_mel_fbank(wav).T + if self.mode == 'RAW': + if self.mu_law: + quant = encode_mu_law(wav, mu=2**self.bits) + else: + quant = float_2_label(wav, bits=self.bits) + elif self.mode == 'MOL': + quant = float_2_label(wav, bits=16) + + mel = mel.astype(np.float32) + audio = quant.astype(np.int64) + + np.save(str(self.wav_dir / base_name), audio) + np.save(str(self.mel_dir / base_name), mel) + + return base_name, mel.shape[-1], audio.shape[-1] + + +def create_dataset(config, + input_dir, + output_dir, + nprocs: int=1, + dataset_type: str="ljspeech"): + input_dir = Path(input_dir).expanduser() + ''' + LJSpeechMetaData.records: [filename, normalized text, speaker name(ljspeech)] + CSMSCMetaData.records: [filename, normalized text, pinyin] + ''' + if dataset_type == 'ljspeech': + dataset = LJSpeechMetaData(input_dir) + else: + dataset = CSMSCMetaData(input_dir) + output_dir = Path(output_dir).expanduser() + output_dir.mkdir(exist_ok=True) + + transform = Transform(output_dir, config) + + file_names = [] + + pool = Pool(processes=nprocs) + + for info in tqdm.tqdm(pool.imap(transform, dataset), total=len(dataset)): + base_name, mel_len, audio_len = info + file_names.append((base_name, mel_len, audio_len)) + + meta_data = pd.DataFrame.from_records(file_names) + meta_data.to_csv( + str(output_dir / "metadata.csv"), sep="\t", index=None, header=None) + print("saved meta data in to {}".format( + os.path.join(output_dir, "metadata.csv"))) + + print("Done!") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="create dataset") + parser.add_argument( + "--config", type=str, help="config file to overwrite default config.") + + parser.add_argument( + "--input", type=str, help="path of the ljspeech dataset") + parser.add_argument( + "--output", type=str, help="path to save output dataset") + parser.add_argument( + "--num-cpu", + type=int, + default=cpu_count() // 2, + help="number of process.") + parser.add_argument( + "--dataset", + type=str, + default="ljspeech", + help="The dataset to preprocess, ljspeech or csmsc") + + args = parser.parse_args() + + with open(args.config, 'rt') as f: + config = CfgNode(yaml.safe_load(f)) + + if args.dataset != "ljspeech" and args.dataset != "csmsc": + raise RuntimeError('Unknown dataset - ', args.dataset) + + create_dataset( + config, + input_dir=args.input, + output_dir=args.output, + nprocs=args.num_cpu, + dataset_type=args.dataset) diff --git a/paddlespeech/t2s/exps/wavernn/synthesize.py b/paddlespeech/t2s/exps/wavernn/synthesize.py new file mode 100644 index 00000000..e08c52b6 --- /dev/null +++ b/paddlespeech/t2s/exps/wavernn/synthesize.py @@ -0,0 +1,89 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import os +from pathlib import Path + +import numpy as np +import paddle +import soundfile as sf +import yaml +from paddle import distributed as dist +from yacs.config import CfgNode + +from paddlespeech.t2s.models.wavernn import WaveRNN + + +def main(): + parser = argparse.ArgumentParser(description="Synthesize with WaveRNN.") + + parser.add_argument("--config", type=str, help="GANVocoder config file.") + parser.add_argument("--checkpoint", type=str, help="snapshot to load.") + parser.add_argument( + "--input", + type=str, + help="path of directory containing mel spectrogram (in .npy format)") + parser.add_argument("--output-dir", type=str, help="output dir.") + parser.add_argument( + "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") + + args = parser.parse_args() + + with open(args.config) as f: + config = CfgNode(yaml.safe_load(f)) + + print("========Args========") + print(yaml.safe_dump(vars(args))) + print("========Config========") + print(config) + print( + f"master see the word size: {dist.get_world_size()}, from pid: {os.getpid()}" + ) + + if args.ngpu == 0: + paddle.set_device("cpu") + elif args.ngpu > 0: + paddle.set_device("gpu") + else: + print("ngpu should >= 0 !") + + model = WaveRNN( + hop_length=config.n_shift, sample_rate=config.fs, **config["model"]) + state_dict = paddle.load(args.checkpoint) + model.set_state_dict(state_dict["main_params"]) + + model.eval() + + mel_dir = Path(args.input).expanduser() + output_dir = Path(args.output_dir).expanduser() + output_dir.mkdir(parents=True, exist_ok=True) + for file_path in sorted(mel_dir.iterdir()): + mel = np.load(str(file_path)) + mel = paddle.to_tensor(mel) + mel = mel.transpose([1, 0]) + # input shape is (T', C_aux) + audio = model.generate( + c=mel, + batched=config.inference.gen_batched, + target=config.inference.target, + overlap=config.inference.overlap, + mu_law=config.mu_law, + gen_display=True) + audio_path = output_dir / (os.path.splitext(file_path.name)[0] + ".wav") + sf.write(audio_path, audio.numpy(), samplerate=config.fs) + print("[synthesize] {} -> {}".format(file_path, audio_path)) + + +if __name__ == "__main__": + main() diff --git a/paddlespeech/t2s/exps/wavernn/train.py b/paddlespeech/t2s/exps/wavernn/train.py new file mode 100644 index 00000000..d7bfc49b --- /dev/null +++ b/paddlespeech/t2s/exps/wavernn/train.py @@ -0,0 +1,192 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import os +import shutil +from pathlib import Path + +import paddle +import yaml +from paddle import DataParallel +from paddle import distributed as dist +from paddle.io import DataLoader +from paddle.io import DistributedBatchSampler +from paddle.optimizer import Adam +from yacs.config import CfgNode + +from paddlespeech.t2s.data import dataset +from paddlespeech.t2s.datasets.vocoder_batch_fn import WaveRNNClip +from paddlespeech.t2s.datasets.vocoder_batch_fn import WaveRNNDataset +from paddlespeech.t2s.models.wavernn import WaveRNN +from paddlespeech.t2s.models.wavernn import WaveRNNEvaluator +from paddlespeech.t2s.models.wavernn import WaveRNNUpdater +from paddlespeech.t2s.modules.losses import discretized_mix_logistic_loss +from paddlespeech.t2s.training.extensions.snapshot import Snapshot +from paddlespeech.t2s.training.extensions.visualizer import VisualDL +from paddlespeech.t2s.training.seeding import seed_everything +from paddlespeech.t2s.training.trainer import Trainer + + +def train_sp(args, config): + # decides device type and whether to run in parallel + # setup running environment correctly + world_size = paddle.distributed.get_world_size() + if (not paddle.is_compiled_with_cuda()) or args.ngpu == 0: + paddle.set_device("cpu") + else: + paddle.set_device("gpu") + if world_size > 1: + paddle.distributed.init_parallel_env() + + # set the random seed, it is a must for multiprocess training + seed_everything(config.seed) + + print( + f"rank: {dist.get_rank()}, pid: {os.getpid()}, parent_pid: {os.getppid()}", + ) + + wavernn_dataset = WaveRNNDataset(args.data) + + train_dataset, dev_dataset = dataset.split( + wavernn_dataset, len(wavernn_dataset) - config.valid_size) + + batch_fn = WaveRNNClip( + mode=config.model.mode, + aux_context_window=config.model.aux_context_window, + hop_size=config.n_shift, + batch_max_steps=config.batch_max_steps, + bits=config.model.bits) + + # collate function and dataloader + train_sampler = DistributedBatchSampler( + train_dataset, + batch_size=config.batch_size, + shuffle=True, + drop_last=True) + dev_sampler = DistributedBatchSampler( + dev_dataset, + batch_size=config.batch_size, + shuffle=False, + drop_last=False) + print("samplers done!") + + train_dataloader = DataLoader( + train_dataset, + batch_sampler=train_sampler, + collate_fn=batch_fn, + num_workers=config.num_workers) + + dev_dataloader = DataLoader( + dev_dataset, + collate_fn=batch_fn, + batch_sampler=dev_sampler, + num_workers=config.num_workers) + valid_generate_loader = DataLoader(dev_dataset, batch_size=1) + print("dataloaders done!") + + model = WaveRNN( + hop_length=config.n_shift, sample_rate=config.fs, **config["model"]) + if world_size > 1: + model = DataParallel(model) + print("model done!") + + if config.model.mode == 'RAW': + criterion = paddle.nn.CrossEntropyLoss(axis=1) + elif config.model.mode == 'MOL': + criterion = discretized_mix_logistic_loss + else: + criterion = None + RuntimeError('Unknown model mode value - ', config.model.mode) + print("criterions done!") + clip = paddle.nn.ClipGradByGlobalNorm(config.grad_clip) + optimizer = Adam( + parameters=model.parameters(), + learning_rate=config.learning_rate, + grad_clip=clip) + + print("optimizer done!") + + output_dir = Path(args.output_dir) + output_dir.mkdir(parents=True, exist_ok=True) + if dist.get_rank() == 0: + config_name = args.config.split("/")[-1] + # copy conf to output_dir + shutil.copyfile(args.config, output_dir / config_name) + + updater = WaveRNNUpdater( + model=model, + optimizer=optimizer, + criterion=criterion, + dataloader=train_dataloader, + output_dir=output_dir, + mode=config.model.mode) + + evaluator = WaveRNNEvaluator( + model=model, + dataloader=dev_dataloader, + criterion=criterion, + output_dir=output_dir, + valid_generate_loader=valid_generate_loader, + config=config) + + trainer = Trainer( + updater, + stop_trigger=(config.train_max_steps, "iteration"), + out=output_dir) + + if dist.get_rank() == 0: + trainer.extend( + evaluator, trigger=(config.eval_interval_steps, 'iteration')) + trainer.extend(VisualDL(output_dir), trigger=(1, 'iteration')) + trainer.extend( + Snapshot(max_size=config.num_snapshots), + trigger=(config.save_interval_steps, 'iteration')) + + print("Trainer Done!") + trainer.run() + + +def main(): + # parse args and config and redirect to train_sp + + parser = argparse.ArgumentParser(description="Train a WaveRNN model.") + parser.add_argument( + "--config", type=str, help="config file to overwrite default config.") + parser.add_argument("--data", type=str, help="input") + parser.add_argument("--output-dir", type=str, help="output dir.") + parser.add_argument( + "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") + + args = parser.parse_args() + + with open(args.config, 'rt') as f: + config = CfgNode(yaml.safe_load(f)) + + print("========Args========") + print(yaml.safe_dump(vars(args))) + print("========Config========") + print(config) + print( + f"master see the word size: {dist.get_world_size()}, from pid: {os.getpid()}" + ) + + # dispatch + if args.ngpu > 1: + dist.spawn(train_sp, (args, config), nprocs=args.ngpu) + else: + train_sp(args, config) + + +if __name__ == "__main__": + main() diff --git a/paddlespeech/t2s/models/__init__.py b/paddlespeech/t2s/models/__init__.py index 65227374..97f8695c 100644 --- a/paddlespeech/t2s/models/__init__.py +++ b/paddlespeech/t2s/models/__init__.py @@ -20,3 +20,4 @@ from .speedyspeech import * from .tacotron2 import * from .transformer_tts import * from .waveflow import * +from .wavernn import * diff --git a/paddlespeech/t2s/models/wavernn/__init__.py b/paddlespeech/t2s/models/wavernn/__init__.py new file mode 100644 index 00000000..80ffd068 --- /dev/null +++ b/paddlespeech/t2s/models/wavernn/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from .wavernn import * +from .wavernn_updater import * diff --git a/paddlespeech/t2s/models/wavernn/wavernn.py b/paddlespeech/t2s/models/wavernn/wavernn.py new file mode 100644 index 00000000..5d1cbd39 --- /dev/null +++ b/paddlespeech/t2s/models/wavernn/wavernn.py @@ -0,0 +1,592 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys +import time +from typing import List + +import numpy as np +import paddle +from paddle import nn +from paddle.nn import functional as F + +from paddlespeech.t2s.datasets.vocoder_batch_fn import decode_mu_law +from paddlespeech.t2s.modules.losses import sample_from_discretized_mix_logistic +from paddlespeech.t2s.modules.nets_utils import initialize +from paddlespeech.t2s.modules.upsample import Stretch2D + + +class ResBlock(nn.Layer): + def __init__(self, dims): + super(ResBlock, self).__init__() + self.conv1 = nn.Conv1D(dims, dims, kernel_size=1, bias_attr=False) + self.conv2 = nn.Conv1D(dims, dims, kernel_size=1, bias_attr=False) + self.batch_norm1 = nn.BatchNorm1D(dims) + self.batch_norm2 = nn.BatchNorm1D(dims) + + def forward(self, x): + ''' + conv -> bn -> relu -> conv -> bn + residual connection + ''' + residual = x + x = self.conv1(x) + x = self.batch_norm1(x) + x = F.relu(x) + x = self.conv2(x) + x = self.batch_norm2(x) + return x + residual + + +class MelResNet(nn.Layer): + def __init__(self, + res_blocks: int=10, + compute_dims: int=128, + res_out_dims: int=128, + aux_channels: int=80, + aux_context_window: int=0): + super().__init__() + k_size = aux_context_window * 2 + 1 + # pay attention here, the dim reduces aux_context_window * 2 + self.conv_in = nn.Conv1D( + aux_channels, compute_dims, kernel_size=k_size, bias_attr=False) + self.batch_norm = nn.BatchNorm1D(compute_dims) + self.layers = nn.LayerList() + for _ in range(res_blocks): + self.layers.append(ResBlock(compute_dims)) + self.conv_out = nn.Conv1D(compute_dims, res_out_dims, kernel_size=1) + + def forward(self, x): + ''' + Parameters + ---------- + x : Tensor + Input tensor (B, in_dims, T). + Returns + ---------- + Tensor + Output tensor (B, res_out_dims, T). + ''' + x = self.conv_in(x) + x = self.batch_norm(x) + x = F.relu(x) + for f in self.layers: + x = f(x) + x = self.conv_out(x) + return x + + +class UpsampleNetwork(nn.Layer): + def __init__(self, + aux_channels: int=80, + upsample_scales: List[int]=[4, 5, 3, 5], + compute_dims: int=128, + res_blocks: int=10, + res_out_dims: int=128, + aux_context_window: int=2): + super().__init__() + # total_scale is the total Up sampling multiple + total_scale = np.prod(upsample_scales) + # TODO pad*total_scale is numpy.int64 + self.indent = int(aux_context_window * total_scale) + self.resnet = MelResNet( + res_blocks=res_blocks, + aux_channels=aux_channels, + compute_dims=compute_dims, + res_out_dims=res_out_dims, + aux_context_window=aux_context_window) + self.resnet_stretch = Stretch2D(total_scale, 1) + self.up_layers = nn.LayerList() + for scale in upsample_scales: + k_size = (1, scale * 2 + 1) + padding = (0, scale) + stretch = Stretch2D(scale, 1) + + conv = nn.Conv2D( + 1, 1, kernel_size=k_size, padding=padding, bias_attr=False) + weight_ = paddle.full_like(conv.weight, 1. / k_size[1]) + conv.weight.set_value(weight_) + self.up_layers.append(stretch) + self.up_layers.append(conv) + + def forward(self, m): + ''' + Parameters + ---------- + c : Tensor + Input tensor (B, C_aux, T). + Returns + ---------- + Tensor + Output tensor (B, (T - 2 * pad) * prob(upsample_scales), C_aux). + Tensor + Output tensor (B, (T - 2 * pad) * prob(upsample_scales), res_out_dims). + ''' + # aux: [B, C_aux, T] + # -> [B, res_out_dims, T - 2 * aux_context_window] + # -> [B, 1, res_out_dims, T - 2 * aux_context_window] + aux = self.resnet(m).unsqueeze(1) + # aux: [B, 1, res_out_dims, T - 2 * aux_context_window] + # -> [B, 1, res_out_dims, (T - 2 * pad) * prob(upsample_scales)] + aux = self.resnet_stretch(aux) + # aux: [B, 1, res_out_dims, T * prob(upsample_scales)] + # -> [B, res_out_dims, T * prob(upsample_scales)] + aux = aux.squeeze(1) + # m: [B, C_aux, T] -> [B, 1, C_aux, T] + m = m.unsqueeze(1) + for f in self.up_layers: + m = f(m) + # m: [B, 1, C_aux, T*prob(upsample_scales)] + # -> [B, C_aux, T * prob(upsample_scales)] + # -> [B, C_aux, (T - 2 * pad) * prob(upsample_scales)] + m = m.squeeze(1)[:, :, self.indent:-self.indent] + # m: [B, (T - 2 * pad) * prob(upsample_scales), C_aux] + # aux: [B, (T - 2 * pad) * prob(upsample_scales), res_out_dims] + return m.transpose([0, 2, 1]), aux.transpose([0, 2, 1]) + + +class WaveRNN(nn.Layer): + def __init__( + self, + rnn_dims: int=512, + fc_dims: int=512, + bits: int=9, + aux_context_window: int=2, + upsample_scales: List[int]=[4, 5, 3, 5], + aux_channels: int=80, + compute_dims: int=128, + res_out_dims: int=128, + res_blocks: int=10, + hop_length: int=300, + sample_rate: int=24000, + mode='RAW', + init_type: str="xavier_uniform", ): + ''' + Parameters + ---------- + rnn_dims : int, optional + Hidden dims of RNN Layers. + fc_dims : int, optional + Dims of FC Layers. + bits : int, optional + bit depth of signal. + aux_context_window : int, optional + The context window size of the first convolution applied to the + auxiliary input, by default 2 + upsample_scales : List[int], optional + Upsample scales of the upsample network. + aux_channels : int, optional + Auxiliary channel of the residual blocks. + compute_dims : int, optional + Dims of Conv1D in MelResNet. + res_out_dims : int, optional + Dims of output in MelResNet. + res_blocks : int, optional + Number of residual blocks. + mode : str, optional + Output mode of the WaveRNN vocoder. `MOL` for Mixture of Logistic Distribution, + and `RAW` for quantized bits as the model's output. + init_type : str + How to initialize parameters. + ''' + super().__init__() + self.mode = mode + self.aux_context_window = aux_context_window + if self.mode == 'RAW': + self.n_classes = 2**bits + elif self.mode == 'MOL': + self.n_classes = 30 + else: + RuntimeError('Unknown model mode value - ', self.mode) + + # List of rnns to call 'flatten_parameters()' on + self._to_flatten = [] + + self.rnn_dims = rnn_dims + self.aux_dims = res_out_dims // 4 + self.hop_length = hop_length + self.sample_rate = sample_rate + + # initialize parameters + initialize(self, init_type) + + self.upsample = UpsampleNetwork( + aux_channels=aux_channels, + upsample_scales=upsample_scales, + compute_dims=compute_dims, + res_blocks=res_blocks, + res_out_dims=res_out_dims, + aux_context_window=aux_context_window) + self.I = nn.Linear(aux_channels + self.aux_dims + 1, rnn_dims) + + self.rnn1 = nn.GRU(rnn_dims, rnn_dims) + self.rnn2 = nn.GRU(rnn_dims + self.aux_dims, rnn_dims) + self._to_flatten += [self.rnn1, self.rnn2] + + self.fc1 = nn.Linear(rnn_dims + self.aux_dims, fc_dims) + self.fc2 = nn.Linear(fc_dims + self.aux_dims, fc_dims) + self.fc3 = nn.Linear(fc_dims, self.n_classes) + + # Avoid fragmentation of RNN parameters and associated warning + self._flatten_parameters() + + nn.initializer.set_global_initializer(None) + + def forward(self, x, c): + ''' + Parameters + ---------- + x : Tensor + wav sequence, [B, T] + c : Tensor + mel spectrogram [B, C_aux, T'] + + T = (T' - 2 * aux_context_window ) * hop_length + Returns + ---------- + Tensor + [B, T, n_classes] + ''' + # Although we `_flatten_parameters()` on init, when using DataParallel + # the model gets replicated, making it no longer guaranteed that the + # weights are contiguous in GPU memory. Hence, we must call it again + self._flatten_parameters() + + bsize = paddle.shape(x)[0] + h1 = paddle.zeros([1, bsize, self.rnn_dims]) + h2 = paddle.zeros([1, bsize, self.rnn_dims]) + # c: [B, T, C_aux] + # aux: [B, T, res_out_dims] + c, aux = self.upsample(c) + + aux_idx = [self.aux_dims * i for i in range(5)] + a1 = aux[:, :, aux_idx[0]:aux_idx[1]] + a2 = aux[:, :, aux_idx[1]:aux_idx[2]] + a3 = aux[:, :, aux_idx[2]:aux_idx[3]] + a4 = aux[:, :, aux_idx[3]:aux_idx[4]] + + x = paddle.concat([x.unsqueeze(-1), c, a1], axis=2) + x = self.I(x) + res = x + x, _ = self.rnn1(x, h1) + + x = x + res + res = x + x = paddle.concat([x, a2], axis=2) + x, _ = self.rnn2(x, h2) + + x = x + res + x = paddle.concat([x, a3], axis=2) + x = F.relu(self.fc1(x)) + + x = paddle.concat([x, a4], axis=2) + x = F.relu(self.fc2(x)) + + return self.fc3(x) + + @paddle.no_grad() + def generate(self, + c, + batched: bool=True, + target: int=12000, + overlap: int=600, + mu_law: bool=True, + gen_display: bool=False): + """ + Parameters + ---------- + c : Tensor + input mels, (T', C_aux) + batched : bool + generate in batch or not + target : int + target number of samples to be generated in each batch entry + overlap : int + number of samples for crossfading between batches + mu_law : bool + use mu law or not + Returns + ---------- + wav sequence + Output (T' * prod(upsample_scales), out_channels, C_out). + """ + + self.eval() + + mu_law = mu_law if self.mode == 'RAW' else False + + output = [] + start = time.time() + rnn1 = self.get_gru_cell(self.rnn1) + rnn2 = self.get_gru_cell(self.rnn2) + # pseudo batch + # (T, C_aux) -> (1, C_aux, T) + c = paddle.transpose(c, [1, 0]).unsqueeze(0) + + wave_len = (paddle.shape(c)[-1] - 1) * self.hop_length + # TODO remove two transpose op by modifying function pad_tensor + c = self.pad_tensor( + c.transpose([0, 2, 1]), pad=self.aux_context_window, + side='both').transpose([0, 2, 1]) + c, aux = self.upsample(c) + + if batched: + # (num_folds, target + 2 * overlap, features) + c = self.fold_with_overlap(c, target, overlap) + aux = self.fold_with_overlap(aux, target, overlap) + + b_size, seq_len, _ = paddle.shape(c) + h1 = paddle.zeros([b_size, self.rnn_dims]) + h2 = paddle.zeros([b_size, self.rnn_dims]) + x = paddle.zeros([b_size, 1]) + + d = self.aux_dims + aux_split = [aux[:, :, d * i:d * (i + 1)] for i in range(4)] + + for i in range(seq_len): + m_t = c[:, i, :] + + a1_t, a2_t, a3_t, a4_t = (a[:, i, :] for a in aux_split) + x = paddle.concat([x, m_t, a1_t], axis=1) + x = self.I(x) + h1, _ = rnn1(x, h1) + x = x + h1 + inp = paddle.concat([x, a2_t], axis=1) + h2, _ = rnn2(inp, h2) + + x = x + h2 + x = paddle.concat([x, a3_t], axis=1) + x = F.relu(self.fc1(x)) + + x = paddle.concat([x, a4_t], axis=1) + x = F.relu(self.fc2(x)) + + logits = self.fc3(x) + + if self.mode == 'MOL': + sample = sample_from_discretized_mix_logistic( + logits.unsqueeze(0).transpose([0, 2, 1])) + output.append(sample.reshape([-1])) + x = sample.transpose([1, 0, 2]) + + elif self.mode == 'RAW': + posterior = F.softmax(logits, axis=1) + distrib = paddle.distribution.Categorical(posterior) + # corresponding operate [np.floor((fx + 1) / 2 * mu + 0.5)] in enocde_mu_law + sample = 2 * distrib.sample([1])[0].cast('float32') / ( + self.n_classes - 1.) - 1. + output.append(sample) + x = sample.unsqueeze(-1) + else: + raise RuntimeError('Unknown model mode value - ', self.mode) + + if gen_display: + if i % 1000 == 0: + self.gen_display(i, int(seq_len), int(b_size), start) + + output = paddle.stack(output).transpose([1, 0]) + + if mu_law: + output = decode_mu_law(output, self.n_classes, False) + + if batched: + output = self.xfade_and_unfold(output, target, overlap) + else: + output = output[0] + + # Fade-out at the end to avoid signal cutting out suddenly + fade_out = paddle.linspace(1, 0, 20 * self.hop_length) + output = output[:wave_len] + output[-20 * self.hop_length:] *= fade_out + + self.train() + + # 增加 C_out 维度 + return output.unsqueeze(-1) + + def get_gru_cell(self, gru): + gru_cell = nn.GRUCell(gru.input_size, gru.hidden_size) + gru_cell.weight_hh = gru.weight_hh_l0 + gru_cell.weight_ih = gru.weight_ih_l0 + gru_cell.bias_hh = gru.bias_hh_l0 + gru_cell.bias_ih = gru.bias_ih_l0 + + return gru_cell + + def _flatten_parameters(self): + [m.flatten_parameters() for m in self._to_flatten] + + def pad_tensor(self, x, pad, side='both'): + ''' + Parameters + ---------- + x : Tensor + mel, [1, n_frames, 80] + pad : int + side : str + 'both', 'before' or 'after' + Returns + ---------- + Tensor + ''' + b, t, c = paddle.shape(x) + total = t + 2 * pad if side == 'both' else t + pad + padded = paddle.zeros([b, total, c]) + if side == 'before' or side == 'both': + padded[:, pad:pad + t, :] = x + elif side == 'after': + padded[:, :t, :] = x + return padded + + def fold_with_overlap(self, x, target, overlap): + ''' + Fold the tensor with overlap for quick batched inference. + Overlap will be used for crossfading in xfade_and_unfold() + + Parameters + ---------- + x : Tensor + Upsampled conditioning features. mels or aux + shape=(1, T, features) + mels: [1, T, 80] + aux: [1, T, 128] + target : int + Target timesteps for each index of batch + overlap : int + Timesteps for both xfade and rnn warmup + overlap = hop_length * 2 + + Returns + ---------- + Tensor + shape=(num_folds, target + 2 * overlap, features) + num_flods = (time_seq - overlap) // (target + overlap) + mel: [num_folds, target + 2 * overlap, 80] + aux: [num_folds, target + 2 * overlap, 128] + + Details + ---------- + x = [[h1, h2, ... hn]] + + Where each h is a vector of conditioning features + + Eg: target=2, overlap=1 with x.size(1)=10 + + folded = [[h1, h2, h3, h4], + [h4, h5, h6, h7], + [h7, h8, h9, h10]] + ''' + + _, total_len, features = paddle.shape(x) + + # Calculate variables needed + num_folds = (total_len - overlap) // (target + overlap) + extended_len = num_folds * (overlap + target) + overlap + remaining = total_len - extended_len + + # Pad if some time steps poking out + if remaining != 0: + num_folds += 1 + padding = target + 2 * overlap - remaining + x = self.pad_tensor(x, padding, side='after') + + folded = paddle.zeros([num_folds, target + 2 * overlap, features]) + + # Get the values for the folded tensor + for i in range(num_folds): + start = i * (target + overlap) + end = start + target + 2 * overlap + folded[i] = x[0][start:end, :] + return folded + + def xfade_and_unfold(self, y, target: int=12000, overlap: int=600): + ''' Applies a crossfade and unfolds into a 1d array. + + Parameters + ---------- + y : Tensor + Batched sequences of audio samples + shape=(num_folds, target + 2 * overlap) + dtype=paddle.float64 + overlap : int + Timesteps for both xfade and rnn warmup + + Returns + ---------- + Tensor + audio samples in a 1d array + shape=(total_len) + dtype=paddle.float64 + + Details + ---------- + y = [[seq1], + [seq2], + [seq3]] + + Apply a gain envelope at both ends of the sequences + + y = [[seq1_in, seq1_target, seq1_out], + [seq2_in, seq2_target, seq2_out], + [seq3_in, seq3_target, seq3_out]] + + Stagger and add up the groups of samples: + + [seq1_in, seq1_target, (seq1_out + seq2_in), seq2_target, ...] + + ''' + # num_folds = (total_len - overlap) // (target + overlap) + num_folds, length = y.shape + target = length - 2 * overlap + total_len = num_folds * (target + overlap) + overlap + + # Need some silence for the run warmup + slience_len = overlap // 2 + fade_len = overlap - slience_len + slience = paddle.zeros([slience_len], dtype=paddle.float64) + linear = paddle.ones([fade_len], dtype=paddle.float64) + + # Equal power crossfade + # fade_in increase from 0 to 1, fade_out reduces from 1 to 0 + t = paddle.linspace(-1, 1, fade_len, dtype=paddle.float64) + fade_in = paddle.sqrt(0.5 * (1 + t)) + fade_out = paddle.sqrt(0.5 * (1 - t)) + # Concat the silence to the fades + fade_out = paddle.concat([linear, fade_out]) + fade_in = paddle.concat([slience, fade_in]) + + # Apply the gain to the overlap samples + y[:, :overlap] *= fade_in + y[:, -overlap:] *= fade_out + + unfolded = paddle.zeros([total_len], dtype=paddle.float64) + + # Loop to add up all the samples + for i in range(num_folds): + start = i * (target + overlap) + end = start + target + 2 * overlap + unfolded[start:end] += y[i] + + return unfolded + + def gen_display(self, i, seq_len, b_size, start): + gen_rate = (i + 1) / (time.time() - start) * b_size / 1000 + pbar = self.progbar(i, seq_len) + msg = f'| {pbar} {i*b_size}/{seq_len*b_size} | Batch Size: {b_size} | Gen Rate: {gen_rate:.1f}kHz | ' + sys.stdout.write(f"\r{msg}") + + def progbar(self, i, n, size=16): + done = int(i * size) // n + bar = '' + for i in range(size): + bar += '█' if i <= done else '░' + return bar diff --git a/paddlespeech/t2s/models/wavernn/wavernn_updater.py b/paddlespeech/t2s/models/wavernn/wavernn_updater.py new file mode 100644 index 00000000..e6064e4c --- /dev/null +++ b/paddlespeech/t2s/models/wavernn/wavernn_updater.py @@ -0,0 +1,203 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from pathlib import Path + +import paddle +import soundfile as sf +from paddle import distributed as dist +from paddle.io import DataLoader +from paddle.nn import Layer +from paddle.optimizer import Optimizer + +from paddlespeech.t2s.datasets.vocoder_batch_fn import decode_mu_law +from paddlespeech.t2s.datasets.vocoder_batch_fn import label_2_float +from paddlespeech.t2s.training.extensions.evaluator import StandardEvaluator +from paddlespeech.t2s.training.reporter import report +from paddlespeech.t2s.training.updaters.standard_updater import StandardUpdater +logging.basicConfig( + format='%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s', + datefmt='[%Y-%m-%d %H:%M:%S]') +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +def calculate_grad_norm(parameters, norm_type: str=2): + ''' + calculate grad norm of mdoel's parameters + parameters: + model's parameters + norm_type: str + Returns + ------------ + Tensor + grad_norm + ''' + + grad_list = [ + paddle.to_tensor(p.grad) for p in parameters if p.grad is not None + ] + norm_list = paddle.stack( + [paddle.norm(grad, norm_type) for grad in grad_list]) + total_norm = paddle.norm(norm_list) + return total_norm + + +# for save name in gen_valid_samples() +ITERATION = 0 + + +class WaveRNNUpdater(StandardUpdater): + def __init__(self, + model: Layer, + optimizer: Optimizer, + criterion: Layer, + dataloader: DataLoader, + init_state=None, + output_dir: Path=None, + mode='RAW'): + super().__init__(model, optimizer, dataloader, init_state=None) + + self.criterion = criterion + # self.scheduler = scheduler + + log_file = output_dir / 'worker_{}.log'.format(dist.get_rank()) + self.filehandler = logging.FileHandler(str(log_file)) + logger.addHandler(self.filehandler) + self.logger = logger + self.msg = "" + self.mode = mode + + def update_core(self, batch): + + self.msg = "Rank: {}, ".format(dist.get_rank()) + losses_dict = {} + # parse batch + self.model.train() + self.optimizer.clear_grad() + + wav, y, mel = batch + + y_hat = self.model(wav, mel) + if self.mode == 'RAW': + y_hat = y_hat.transpose([0, 2, 1]).unsqueeze(-1) + elif self.mode == 'MOL': + y_hat = paddle.cast(y, dtype='float32') + + y = y.unsqueeze(-1) + loss = self.criterion(y_hat, y) + loss.backward() + grad_norm = float( + calculate_grad_norm(self.model.parameters(), norm_type=2)) + + self.optimizer.step() + + report("train/loss", float(loss)) + report("train/grad_norm", float(grad_norm)) + + losses_dict["loss"] = float(loss) + losses_dict["grad_norm"] = float(grad_norm) + self.msg += ', '.join('{}: {:>.6f}'.format(k, v) + for k, v in losses_dict.items()) + global ITERATION + ITERATION = self.state.iteration + 1 + + +class WaveRNNEvaluator(StandardEvaluator): + def __init__(self, + model: Layer, + criterion: Layer, + dataloader: Optimizer, + output_dir: Path=None, + valid_generate_loader=None, + config=None): + super().__init__(model, dataloader) + + log_file = output_dir / 'worker_{}.log'.format(dist.get_rank()) + self.filehandler = logging.FileHandler(str(log_file)) + logger.addHandler(self.filehandler) + self.logger = logger + self.msg = "" + + self.criterion = criterion + self.valid_generate_loader = valid_generate_loader + self.config = config + self.mode = config.model.mode + + self.valid_samples_dir = output_dir / "valid_samples" + self.valid_samples_dir.mkdir(parents=True, exist_ok=True) + + def evaluate_core(self, batch): + self.msg = "Evaluate: " + losses_dict = {} + # parse batch + wav, y, mel = batch + y_hat = self.model(wav, mel) + + if self.mode == 'RAW': + y_hat = y_hat.transpose([0, 2, 1]).unsqueeze(-1) + elif self.mode == 'MOL': + y_hat = paddle.cast(y, dtype='float32') + + y = y.unsqueeze(-1) + loss = self.criterion(y_hat, y) + report("eval/loss", float(loss)) + + losses_dict["loss"] = float(loss) + + self.iteration = ITERATION + if self.iteration % self.config.gen_eval_samples_interval_steps == 0: + self.gen_valid_samples() + + self.msg += ', '.join('{}: {:>.6f}'.format(k, v) + for k, v in losses_dict.items()) + self.logger.info(self.msg) + + def gen_valid_samples(self): + + for i, (mel, wav) in enumerate(self.valid_generate_loader): + if i >= self.config.generate_num: + print("before break") + break + print( + '\n| Generating: {}/{}'.format(i + 1, self.config.generate_num)) + wav = wav[0] + if self.mode == 'MOL': + bits = 16 + else: + bits = self.config.model.bits + if self.config.mu_law and self.mode != 'MOL': + wav = decode_mu_law(wav, 2**bits, from_labels=True) + else: + wav = label_2_float(wav, bits) + origin_save_path = self.valid_samples_dir / '{}_steps_{}_target.wav'.format( + self.iteration, i) + sf.write(origin_save_path, wav.numpy(), samplerate=self.config.fs) + + if self.config.inference.gen_batched: + batch_str = 'gen_batched_target{}_overlap{}'.format( + self.config.inference.target, self.config.inference.overlap) + else: + batch_str = 'gen_not_batched' + gen_save_path = str(self.valid_samples_dir / + '{}_steps_{}_{}.wav'.format(self.iteration, i, + batch_str)) + # (1, C_aux, T) -> (T, C_aux) + mel = mel.squeeze(0).transpose([1, 0]) + gen_sample = self.model.generate( + mel, self.config.inference.gen_batched, + self.config.inference.target, self.config.inference.overlap, + self.config.mu_law) + sf.write( + gen_save_path, gen_sample.numpy(), samplerate=self.config.fs) diff --git a/paddlespeech/t2s/modules/losses.py b/paddlespeech/t2s/modules/losses.py index 3cc7a93c..618f444a 100644 --- a/paddlespeech/t2s/modules/losses.py +++ b/paddlespeech/t2s/modules/losses.py @@ -14,6 +14,7 @@ import math import librosa +import numpy as np import paddle from paddle import nn from paddle.fluid.layers import sequence_mask @@ -23,6 +24,145 @@ from scipy import signal from paddlespeech.t2s.modules.nets_utils import make_non_pad_mask +# Losses for WaveRNN +def log_sum_exp(x): + """ numerically stable log_sum_exp implementation that prevents overflow """ + # TF ordering + axis = len(x.shape) - 1 + m = paddle.max(x, axis=axis) + m2 = paddle.max(x, axis=axis, keepdim=True) + return m + paddle.log(paddle.sum(paddle.exp(x - m2), axis=axis)) + + +# It is adapted from https://github.com/r9y9/wavenet_vocoder/blob/master/wavenet_vocoder/mixture.py +def discretized_mix_logistic_loss(y_hat, + y, + num_classes=65536, + log_scale_min=None, + reduce=True): + if log_scale_min is None: + log_scale_min = float(np.log(1e-14)) + y_hat = y_hat.transpose([0, 2, 1]) + assert y_hat.dim() == 3 + assert y_hat.shape[1] % 3 == 0 + nr_mix = y_hat.shape[1] // 3 + + # (B x T x C) + y_hat = y_hat.transpose([0, 2, 1]) + + # unpack parameters. (B, T, num_mixtures) x 3 + logit_probs = y_hat[:, :, :nr_mix] + means = y_hat[:, :, nr_mix:2 * nr_mix] + log_scales = paddle.clip( + y_hat[:, :, 2 * nr_mix:3 * nr_mix], min=log_scale_min) + + # B x T x 1 -> B x T x num_mixtures + y = y.expand_as(means) + centered_y = paddle.cast(y, dtype=paddle.get_default_dtype()) - means + inv_stdv = paddle.exp(-log_scales) + plus_in = inv_stdv * (centered_y + 1. / (num_classes - 1)) + cdf_plus = F.sigmoid(plus_in) + min_in = inv_stdv * (centered_y - 1. / (num_classes - 1)) + cdf_min = F.sigmoid(min_in) + + # log probability for edge case of 0 (before scaling) + # equivalent: torch.log(F.sigmoid(plus_in)) + # softplus: log(1+ e^{-x}) + log_cdf_plus = plus_in - F.softplus(plus_in) + + # log probability for edge case of 255 (before scaling) + # equivalent: (1 - F.sigmoid(min_in)).log() + log_one_minus_cdf_min = -F.softplus(min_in) + + # probability for all other cases + cdf_delta = cdf_plus - cdf_min + + mid_in = inv_stdv * centered_y + # log probability in the center of the bin, to be used in extreme cases + # (not actually used in our code) + log_pdf_mid = mid_in - log_scales - 2. * F.softplus(mid_in) + + # TODO: cdf_delta <= 1e-5 actually can happen. How can we choose the value + # for num_classes=65536 case? 1e-7? not sure.. + inner_inner_cond = cdf_delta > 1e-5 + + inner_inner_cond = paddle.cast( + inner_inner_cond, dtype=paddle.get_default_dtype()) + + # inner_inner_out = inner_inner_cond * \ + # paddle.log(paddle.clip(cdf_delta, min=1e-12)) + \ + # (1. - inner_inner_cond) * (log_pdf_mid - np.log((num_classes - 1) / 2)) + + inner_inner_out = inner_inner_cond * paddle.log( + paddle.clip(cdf_delta, min=1e-12)) + (1. - inner_inner_cond) * ( + log_pdf_mid - np.log((num_classes - 1) / 2)) + + inner_cond = y > 0.999 + + inner_cond = paddle.cast(inner_cond, dtype=paddle.get_default_dtype()) + + inner_out = inner_cond * log_one_minus_cdf_min + (1. - inner_cond + ) * inner_inner_out + cond = y < -0.999 + cond = paddle.cast(cond, dtype=paddle.get_default_dtype()) + + log_probs = cond * log_cdf_plus + (1. - cond) * inner_out + log_probs = log_probs + F.log_softmax(logit_probs, -1) + + if reduce: + return -paddle.mean(log_sum_exp(log_probs)) + else: + return -log_sum_exp(log_probs).unsqueeze(-1) + + +def sample_from_discretized_mix_logistic(y, log_scale_min=None): + """ + Sample from discretized mixture of logistic distributions + Parameters + ---------- + y : Tensor + (B, C, T) + log_scale_min : float + Log scale minimum value + Returns + ---------- + Tensor + sample in range of [-1, 1]. + """ + if log_scale_min is None: + log_scale_min = float(np.log(1e-14)) + + assert y.shape[1] % 3 == 0 + nr_mix = y.shape[1] // 3 + + # (B, T, C) + y = y.transpose([0, 2, 1]) + logit_probs = y[:, :, :nr_mix] + + # sample mixture indicator from softmax + temp = paddle.uniform( + logit_probs.shape, dtype=logit_probs.dtype, min=1e-5, max=1.0 - 1e-5) + temp = logit_probs - paddle.log(-paddle.log(temp)) + argmax = paddle.argmax(temp, axis=-1) + + # (B, T) -> (B, T, nr_mix) + one_hot = F.one_hot(argmax, nr_mix) + one_hot = paddle.cast(one_hot, dtype=paddle.get_default_dtype()) + + # select logistic parameters + means = paddle.sum(y[:, :, nr_mix:2 * nr_mix] * one_hot, axis=-1) + log_scales = paddle.clip( + paddle.sum(y[:, :, 2 * nr_mix:3 * nr_mix] * one_hot, axis=-1), + min=log_scale_min) + # sample from logistic & clip to interval + # we don't actually round to the nearest 8bit value when sampling + u = paddle.uniform(means.shape, min=1e-5, max=1.0 - 1e-5) + x = means + paddle.exp(log_scales) * (paddle.log(u) - paddle.log(1. - u)) + x = paddle.clip(x, min=-1., max=-1.) + + return x + + # Loss for new Tacotron2 class GuidedAttentionLoss(nn.Layer): """Guided attention loss function module. From 4c3e57a23ccbe085f014cf31163b18dd70cac2a3 Mon Sep 17 00:00:00 2001 From: TianYuan Date: Tue, 25 Jan 2022 06:33:24 +0000 Subject: [PATCH 02/22] align preprocess of wavernn, test=tts --- examples/csmsc/voc6/local/preprocess.sh | 48 +++- examples/csmsc/voc6/local/synthesize.sh | 3 +- examples/csmsc/voc6/local/train.sh | 6 +- examples/csmsc/voc6/run.sh | 7 +- paddlespeech/t2s/datasets/vocoder_batch_fn.py | 216 +++++++++--------- paddlespeech/t2s/exps/wavernn/preprocess.py | 157 ------------- paddlespeech/t2s/exps/wavernn/synthesize.py | 61 +++-- paddlespeech/t2s/exps/wavernn/train.py | 36 ++- .../t2s/models/wavernn/wavernn_updater.py | 36 ++- 9 files changed, 250 insertions(+), 320 deletions(-) delete mode 100644 paddlespeech/t2s/exps/wavernn/preprocess.py diff --git a/examples/csmsc/voc6/local/preprocess.sh b/examples/csmsc/voc6/local/preprocess.sh index 064aea55..2dcc39ac 100755 --- a/examples/csmsc/voc6/local/preprocess.sh +++ b/examples/csmsc/voc6/local/preprocess.sh @@ -6,10 +6,50 @@ stop_stage=100 config_path=$1 if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then - python3 ${BIN_DIR}/preprocess.py \ - --input=~/datasets/BZNSYP/ \ - --output=dump \ - --dataset=csmsc \ + # get durations from MFA's result + echo "Generate durations.txt from MFA results ..." + python3 ${MAIN_ROOT}/utils/gen_duration_from_textgrid.py \ + --inputdir=./baker_alignment_tone \ + --output=durations.txt \ + --config=${config_path} +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # extract features + echo "Extract features ..." + python3 ${BIN_DIR}/../gan_vocoder/preprocess.py \ + --rootdir=~/datasets/BZNSYP/ \ + --dataset=baker \ + --dumpdir=dump \ + --dur-file=durations.txt \ --config=${config_path} \ + --cut-sil=True \ --num-cpu=20 fi + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # get features' stats(mean and std) + echo "Get features' stats ..." + python3 ${MAIN_ROOT}/utils/compute_statistics.py \ + --metadata=dump/train/raw/metadata.jsonl \ + --field-name="feats" +fi + +if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + # normalize, dev and test should use train's stats + echo "Normalize ..." + + python3 ${BIN_DIR}/../gan_vocoder/normalize.py \ + --metadata=dump/train/raw/metadata.jsonl \ + --dumpdir=dump/train/norm \ + --stats=dump/train/feats_stats.npy + python3 ${BIN_DIR}/../gan_vocoder/normalize.py \ + --metadata=dump/dev/raw/metadata.jsonl \ + --dumpdir=dump/dev/norm \ + --stats=dump/train/feats_stats.npy + + python3 ${BIN_DIR}/../gan_vocoder/normalize.py \ + --metadata=dump/test/raw/metadata.jsonl \ + --dumpdir=dump/test/norm \ + --stats=dump/train/feats_stats.npy +fi diff --git a/examples/csmsc/voc6/local/synthesize.sh b/examples/csmsc/voc6/local/synthesize.sh index 876c8444..7f0cbe48 100755 --- a/examples/csmsc/voc6/local/synthesize.sh +++ b/examples/csmsc/voc6/local/synthesize.sh @@ -3,12 +3,11 @@ config_path=$1 train_output_path=$2 ckpt_name=$3 -test_input=$4 FLAGS_allocator_strategy=naive_best_fit \ FLAGS_fraction_of_gpu_memory_to_use=0.01 \ python3 ${BIN_DIR}/synthesize.py \ --config=${config_path} \ --checkpoint=${train_output_path}/checkpoints/${ckpt_name} \ - --input=${test_input} \ + --test-metadata=dump/test/norm/metadata.jsonl \ --output-dir=${train_output_path}/test diff --git a/examples/csmsc/voc6/local/train.sh b/examples/csmsc/voc6/local/train.sh index 900450cd..9695631e 100755 --- a/examples/csmsc/voc6/local/train.sh +++ b/examples/csmsc/voc6/local/train.sh @@ -2,8 +2,12 @@ config_path=$1 train_output_path=$2 + +FLAGS_cudnn_exhaustive_search=true \ +FLAGS_conv_workspace_size_limit=4000 \ python ${BIN_DIR}/train.py \ + --train-metadata=dump/train/norm/metadata.jsonl \ + --dev-metadata=dump/dev/norm/metadata.jsonl \ --config=${config_path} \ - --data=dump/ \ --output-dir=${train_output_path} \ --ngpu=1 diff --git a/examples/csmsc/voc6/run.sh b/examples/csmsc/voc6/run.sh index bd32e3d2..5f754fff 100755 --- a/examples/csmsc/voc6/run.sh +++ b/examples/csmsc/voc6/run.sh @@ -9,7 +9,7 @@ stop_stage=100 conf_path=conf/default.yaml train_output_path=exp/default -test_input=dump/mel_test +test_input=dump/dump_gta_test ckpt_name=snapshot_iter_100000.pdz source ${MAIN_ROOT}/utils/parse_options.sh || exit 1 @@ -25,9 +25,6 @@ if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then fi if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then - # copy some test mels from dump - mkdir -p ${test_input} - cp -r dump/mel/00995*.npy ${test_input} # synthesize - CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} ${test_input}|| exit -1 + CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1 fi diff --git a/paddlespeech/t2s/datasets/vocoder_batch_fn.py b/paddlespeech/t2s/datasets/vocoder_batch_fn.py index 496bf902..b1d22db9 100644 --- a/paddlespeech/t2s/datasets/vocoder_batch_fn.py +++ b/paddlespeech/t2s/datasets/vocoder_batch_fn.py @@ -12,11 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. import math -from pathlib import Path import numpy as np import paddle -from paddle.io import Dataset def label_2_float(x, bits): @@ -44,102 +42,6 @@ def decode_mu_law(y, mu, from_labels=True): return x -class WaveRNNDataset(Dataset): - """A simple dataset adaptor for the processed ljspeech dataset.""" - - def __init__(self, root): - self.root = Path(root).expanduser() - - records = [] - - with open(self.root / "metadata.csv", 'r') as rf: - - for line in rf: - name = line.split("\t")[0] - mel_path = str(self.root / "mel" / (str(name) + ".npy")) - wav_path = str(self.root / "wav" / (str(name) + ".npy")) - records.append((mel_path, wav_path)) - - self.records = records - - def __getitem__(self, i): - mel_name, wav_name = self.records[i] - mel = np.load(mel_name) - wav = np.load(wav_name) - return mel, wav - - def __len__(self): - return len(self.records) - - -class WaveRNNClip(object): - def __init__(self, - mode: str='RAW', - batch_max_steps: int=4500, - hop_size: int=300, - aux_context_window: int=2, - bits: int=9): - self.mode = mode - self.mel_win = batch_max_steps // hop_size + 2 * aux_context_window - self.batch_max_steps = batch_max_steps - self.hop_size = hop_size - self.aux_context_window = aux_context_window - if self.mode == 'MOL': - self.bits = 16 - else: - self.bits = bits - - def __call__(self, batch): - # batch: [mel, quant] - # voc_pad = 2 this will pad the input so that the resnet can 'see' wider than input length - # max_offsets = n_frames - 2 - (mel_win + 2 * hp.voc_pad) = n_frames - 15 - max_offsets = [ - x[0].shape[-1] - 2 - (self.mel_win + 2 * self.aux_context_window) - for x in batch - ] - # the slice point of mel selecting randomly - mel_offsets = [np.random.randint(0, offset) for offset in max_offsets] - # the slice point of wav selecting randomly, which is behind 2(=pad) frames - sig_offsets = [(offset + self.aux_context_window) * self.hop_size - for offset in mel_offsets] - # mels.sape[1] = voc_seq_len // hop_length + 2 * voc_pad - mels = [ - x[0][:, mel_offsets[i]:mel_offsets[i] + self.mel_win] - for i, x in enumerate(batch) - ] - # label.shape[1] = voc_seq_len + 1 - labels = [ - x[1][sig_offsets[i]:sig_offsets[i] + self.batch_max_steps + 1] - for i, x in enumerate(batch) - ] - - mels = np.stack(mels).astype(np.float32) - labels = np.stack(labels).astype(np.int64) - - mels = paddle.to_tensor(mels) - labels = paddle.to_tensor(labels, dtype='int64') - - # x is input, y is label - x = labels[:, :self.batch_max_steps] - y = labels[:, 1:] - ''' - mode = RAW: - mu_law = True: - quant: bits = 9 0, 1, 2, ..., 509, 510, 511 int - mu_law = False - quant bits = 9 [0, 511] float - mode = MOL: - quant: bits = 16 [0. 65536] float - ''' - # x should be normalizes in.[0, 1] in RAW mode - x = label_2_float(paddle.cast(x, dtype='float32'), self.bits) - # y should be normalizes in.[0, 1] in MOL mode - if self.mode == 'MOL': - y = label_2_float(paddle.cast(y, dtype='float32'), self.bits) - - return x, y, mels - - class Clip(object): """Collate functor for training vocoders. """ @@ -174,7 +76,7 @@ class Clip(object): self.end_offset = -(self.batch_max_frames + aux_context_window) self.mel_threshold = self.batch_max_frames + 2 * aux_context_window - def __call__(self, examples): + def __call__(self, batch): """Convert into batch tensors. Parameters @@ -192,11 +94,11 @@ class Clip(object): """ # check length - examples = [ - self._adjust_length(b['wave'], b['feats']) for b in examples + batch = [ + self._adjust_length(b['wave'], b['feats']) for b in batch if b['feats'].shape[0] > self.mel_threshold ] - xs, cs = [b[0] for b in examples], [b[1] for b in examples] + xs, cs = [b[0] for b in batch], [b[1] for b in batch] # make batch with random cut c_lengths = [c.shape[0] for c in cs] @@ -214,7 +116,7 @@ class Clip(object): c_batch = np.stack( [c[start:end] for c, start, end in zip(cs, c_starts, c_ends)]) - # convert each batch to tensor, asuume that each item in batch has the same length + # convert each batch to tensor, assume that each item in batch has the same length y_batch = paddle.to_tensor( y_batch, dtype=paddle.float32).unsqueeze(1) # (B, 1, T) c_batch = paddle.to_tensor( @@ -245,3 +147,111 @@ class Clip(object): 0] * self.hop_size, f"wave length: ({len(x)}), mel length: ({c.shape[0]})" return x, c + + +class WaveRNNClip(Clip): + def __init__(self, + mode: str='RAW', + batch_max_steps: int=4500, + hop_size: int=300, + aux_context_window: int=2, + bits: int=9, + mu_law: bool=True): + self.mode = mode + self.mel_win = batch_max_steps // hop_size + 2 * aux_context_window + self.batch_max_steps = batch_max_steps + self.hop_size = hop_size + self.aux_context_window = aux_context_window + self.mu_law = mu_law + self.batch_max_frames = batch_max_steps // hop_size + self.mel_threshold = self.batch_max_frames + 2 * aux_context_window + if self.mode == 'MOL': + self.bits = 16 + else: + self.bits = bits + + def to_quant(self, wav): + if self.mode == 'RAW': + if self.mu_law: + quant = encode_mu_law(wav, mu=2**self.bits) + else: + quant = float_2_label(wav, bits=self.bits) + elif self.mode == 'MOL': + quant = float_2_label(wav, bits=16) + quant = quant.astype(np.int64) + return quant + + def __call__(self, batch): + # voc_pad = 2 this will pad the input so that the resnet can 'see' wider than input length + # max_offsets = n_frames - 2 - (mel_win + 2 * hp.voc_pad) = n_frames - 15 + """Convert into batch tensors. + + Parameters + ---------- + batch : list + list of tuple of the pair of audio and features. + Audio shape (T, ), features shape(T', C). + + Returns + ---------- + Tensor + Auxiliary feature batch (B, C, T'), where + T = (T' - 2 * aux_context_window) * hop_size. + Tensor + Target signal batch (B, 1, T). + + """ + # check length + batch = [ + self._adjust_length(b['wave'], b['feats']) for b in batch + if b['feats'].shape[0] > self.mel_threshold + ] + wav, mel = [b[0] for b in batch], [b[1] for b in batch] + # mel 此处需要转置 + mel = [x.T for x in mel] + max_offsets = [ + x.shape[-1] - 2 - (self.mel_win + 2 * self.aux_context_window) + for x in mel + ] + # the slice point of mel selecting randomly + mel_offsets = [np.random.randint(0, offset) for offset in max_offsets] + # the slice point of wav selecting randomly, which is behind 2(=pad) frames + sig_offsets = [(offset + self.aux_context_window) * self.hop_size + for offset in mel_offsets] + # mels.shape[1] = voc_seq_len // hop_length + 2 * voc_pad + mels = [ + x[:, mel_offsets[i]:mel_offsets[i] + self.mel_win] + for i, x in enumerate(mel) + ] + # label.shape[1] = voc_seq_len + 1 + wav = [self.to_quant(x) for x in wav] + + labels = [ + x[sig_offsets[i]:sig_offsets[i] + self.batch_max_steps + 1] + for i, x in enumerate(wav) + ] + + mels = np.stack(mels).astype(np.float32) + labels = np.stack(labels).astype(np.int64) + + mels = paddle.to_tensor(mels) + labels = paddle.to_tensor(labels, dtype='int64') + # x is input, y is label + x = labels[:, :self.batch_max_steps] + y = labels[:, 1:] + ''' + mode = RAW: + mu_law = True: + quant: bits = 9 0, 1, 2, ..., 509, 510, 511 int + mu_law = False + quant bits = 9 [0, 511] float + mode = MOL: + quant: bits = 16 [0. 65536] float + ''' + # x should be normalizes in.[0, 1] in RAW mode + x = label_2_float(paddle.cast(x, dtype='float32'), self.bits) + # y should be normalizes in.[0, 1] in MOL mode + if self.mode == 'MOL': + y = label_2_float(paddle.cast(y, dtype='float32'), self.bits) + + return x, y, mels diff --git a/paddlespeech/t2s/exps/wavernn/preprocess.py b/paddlespeech/t2s/exps/wavernn/preprocess.py deleted file mode 100644 index a26c6702..00000000 --- a/paddlespeech/t2s/exps/wavernn/preprocess.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import os -from multiprocessing import cpu_count -from multiprocessing import Pool -from pathlib import Path - -import librosa -import numpy as np -import pandas as pd -import tqdm -import yaml -from yacs.config import CfgNode - -from paddlespeech.t2s.data.get_feats import LogMelFBank -from paddlespeech.t2s.datasets import CSMSCMetaData -from paddlespeech.t2s.datasets import LJSpeechMetaData -from paddlespeech.t2s.datasets.vocoder_batch_fn import encode_mu_law -from paddlespeech.t2s.datasets.vocoder_batch_fn import float_2_label - - -class Transform(object): - def __init__(self, output_dir: Path, config): - self.fs = config.fs - self.peak_norm = config.peak_norm - self.bits = config.model.bits - self.mode = config.model.mode - self.mu_law = config.mu_law - - self.wav_dir = output_dir / "wav" - self.mel_dir = output_dir / "mel" - self.wav_dir.mkdir(exist_ok=True) - self.mel_dir.mkdir(exist_ok=True) - - self.mel_extractor = LogMelFBank( - sr=config.fs, - n_fft=config.n_fft, - hop_length=config.n_shift, - win_length=config.win_length, - window=config.window, - n_mels=config.n_mels, - fmin=config.fmin, - fmax=config.fmax) - - if self.mode != 'RAW' and self.mode != 'MOL': - raise RuntimeError('Unknown mode value - ', self.mode) - - def __call__(self, example): - wav_path, _, _ = example - - base_name = os.path.splitext(os.path.basename(wav_path))[0] - # print("self.sample_rate:",self.sample_rate) - wav, _ = librosa.load(wav_path, sr=self.fs) - peak = np.abs(wav).max() - if self.peak_norm or peak > 1.0: - wav /= peak - - mel = self.mel_extractor.get_log_mel_fbank(wav).T - if self.mode == 'RAW': - if self.mu_law: - quant = encode_mu_law(wav, mu=2**self.bits) - else: - quant = float_2_label(wav, bits=self.bits) - elif self.mode == 'MOL': - quant = float_2_label(wav, bits=16) - - mel = mel.astype(np.float32) - audio = quant.astype(np.int64) - - np.save(str(self.wav_dir / base_name), audio) - np.save(str(self.mel_dir / base_name), mel) - - return base_name, mel.shape[-1], audio.shape[-1] - - -def create_dataset(config, - input_dir, - output_dir, - nprocs: int=1, - dataset_type: str="ljspeech"): - input_dir = Path(input_dir).expanduser() - ''' - LJSpeechMetaData.records: [filename, normalized text, speaker name(ljspeech)] - CSMSCMetaData.records: [filename, normalized text, pinyin] - ''' - if dataset_type == 'ljspeech': - dataset = LJSpeechMetaData(input_dir) - else: - dataset = CSMSCMetaData(input_dir) - output_dir = Path(output_dir).expanduser() - output_dir.mkdir(exist_ok=True) - - transform = Transform(output_dir, config) - - file_names = [] - - pool = Pool(processes=nprocs) - - for info in tqdm.tqdm(pool.imap(transform, dataset), total=len(dataset)): - base_name, mel_len, audio_len = info - file_names.append((base_name, mel_len, audio_len)) - - meta_data = pd.DataFrame.from_records(file_names) - meta_data.to_csv( - str(output_dir / "metadata.csv"), sep="\t", index=None, header=None) - print("saved meta data in to {}".format( - os.path.join(output_dir, "metadata.csv"))) - - print("Done!") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="create dataset") - parser.add_argument( - "--config", type=str, help="config file to overwrite default config.") - - parser.add_argument( - "--input", type=str, help="path of the ljspeech dataset") - parser.add_argument( - "--output", type=str, help="path to save output dataset") - parser.add_argument( - "--num-cpu", - type=int, - default=cpu_count() // 2, - help="number of process.") - parser.add_argument( - "--dataset", - type=str, - default="ljspeech", - help="The dataset to preprocess, ljspeech or csmsc") - - args = parser.parse_args() - - with open(args.config, 'rt') as f: - config = CfgNode(yaml.safe_load(f)) - - if args.dataset != "ljspeech" and args.dataset != "csmsc": - raise RuntimeError('Unknown dataset - ', args.dataset) - - create_dataset( - config, - input_dir=args.input, - output_dir=args.output, - nprocs=args.num_cpu, - dataset_type=args.dataset) diff --git a/paddlespeech/t2s/exps/wavernn/synthesize.py b/paddlespeech/t2s/exps/wavernn/synthesize.py index e08c52b6..61723e03 100644 --- a/paddlespeech/t2s/exps/wavernn/synthesize.py +++ b/paddlespeech/t2s/exps/wavernn/synthesize.py @@ -15,13 +15,16 @@ import argparse import os from pathlib import Path +import jsonlines import numpy as np import paddle import soundfile as sf import yaml from paddle import distributed as dist +from timer import timer from yacs.config import CfgNode +from paddlespeech.t2s.datasets.data_table import DataTable from paddlespeech.t2s.models.wavernn import WaveRNN @@ -30,10 +33,7 @@ def main(): parser.add_argument("--config", type=str, help="GANVocoder config file.") parser.add_argument("--checkpoint", type=str, help="snapshot to load.") - parser.add_argument( - "--input", - type=str, - help="path of directory containing mel spectrogram (in .npy format)") + parser.add_argument("--test-metadata", type=str, help="dev data.") parser.add_argument("--output-dir", type=str, help="output dir.") parser.add_argument( "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") @@ -65,24 +65,43 @@ def main(): model.eval() - mel_dir = Path(args.input).expanduser() - output_dir = Path(args.output_dir).expanduser() + with jsonlines.open(args.test_metadata, 'r') as reader: + metadata = list(reader) + test_dataset = DataTable( + metadata, + fields=['utt_id', 'feats'], + converters={ + 'utt_id': None, + 'feats': np.load, + }) + output_dir = Path(args.output_dir) output_dir.mkdir(parents=True, exist_ok=True) - for file_path in sorted(mel_dir.iterdir()): - mel = np.load(str(file_path)) - mel = paddle.to_tensor(mel) - mel = mel.transpose([1, 0]) - # input shape is (T', C_aux) - audio = model.generate( - c=mel, - batched=config.inference.gen_batched, - target=config.inference.target, - overlap=config.inference.overlap, - mu_law=config.mu_law, - gen_display=True) - audio_path = output_dir / (os.path.splitext(file_path.name)[0] + ".wav") - sf.write(audio_path, audio.numpy(), samplerate=config.fs) - print("[synthesize] {} -> {}".format(file_path, audio_path)) + + N = 0 + T = 0 + for example in test_dataset: + utt_id = example['utt_id'] + mel = example['feats'] + mel = paddle.to_tensor(mel) # (T, C) + with timer() as t: + with paddle.no_grad(): + wav = model.generate( + c=mel, + batched=config.inference.gen_batched, + target=config.inference.target, + overlap=config.inference.overlap, + mu_law=config.mu_law, + gen_display=True) + wav = wav.numpy() + N += wav.size + T += t.elapse + speed = wav.size / t.elapse + rtf = config.fs / speed + print( + f"{utt_id}, mel: {mel.shape}, wave: {wav.shape}, time: {t.elapse}s, Hz: {speed}, RTF: {rtf}." + ) + sf.write(str(output_dir / (utt_id + ".wav")), wav, samplerate=config.fs) + print(f"generation speed: {N / T}Hz, RTF: {config.fs / (N / T) }") if __name__ == "__main__": diff --git a/paddlespeech/t2s/exps/wavernn/train.py b/paddlespeech/t2s/exps/wavernn/train.py index d7bfc49b..aec745f7 100644 --- a/paddlespeech/t2s/exps/wavernn/train.py +++ b/paddlespeech/t2s/exps/wavernn/train.py @@ -16,6 +16,8 @@ import os import shutil from pathlib import Path +import jsonlines +import numpy as np import paddle import yaml from paddle import DataParallel @@ -25,9 +27,8 @@ from paddle.io import DistributedBatchSampler from paddle.optimizer import Adam from yacs.config import CfgNode -from paddlespeech.t2s.data import dataset +from paddlespeech.t2s.datasets.data_table import DataTable from paddlespeech.t2s.datasets.vocoder_batch_fn import WaveRNNClip -from paddlespeech.t2s.datasets.vocoder_batch_fn import WaveRNNDataset from paddlespeech.t2s.models.wavernn import WaveRNN from paddlespeech.t2s.models.wavernn import WaveRNNEvaluator from paddlespeech.t2s.models.wavernn import WaveRNNUpdater @@ -56,10 +57,26 @@ def train_sp(args, config): f"rank: {dist.get_rank()}, pid: {os.getpid()}, parent_pid: {os.getppid()}", ) - wavernn_dataset = WaveRNNDataset(args.data) - - train_dataset, dev_dataset = dataset.split( - wavernn_dataset, len(wavernn_dataset) - config.valid_size) + # construct dataset for training and validation + with jsonlines.open(args.train_metadata, 'r') as reader: + train_metadata = list(reader) + train_dataset = DataTable( + data=train_metadata, + fields=["wave", "feats"], + converters={ + "wave": np.load, + "feats": np.load, + }, ) + + with jsonlines.open(args.dev_metadata, 'r') as reader: + dev_metadata = list(reader) + dev_dataset = DataTable( + data=dev_metadata, + fields=["wave", "feats"], + converters={ + "wave": np.load, + "feats": np.load, + }, ) batch_fn = WaveRNNClip( mode=config.model.mode, @@ -92,7 +109,9 @@ def train_sp(args, config): collate_fn=batch_fn, batch_sampler=dev_sampler, num_workers=config.num_workers) + valid_generate_loader = DataLoader(dev_dataset, batch_size=1) + print("dataloaders done!") model = WaveRNN( @@ -160,10 +179,11 @@ def train_sp(args, config): def main(): # parse args and config and redirect to train_sp - parser = argparse.ArgumentParser(description="Train a WaveRNN model.") + parser = argparse.ArgumentParser(description="Train a HiFiGAN model.") parser.add_argument( "--config", type=str, help="config file to overwrite default config.") - parser.add_argument("--data", type=str, help="input") + parser.add_argument("--train-metadata", type=str, help="training data.") + parser.add_argument("--dev-metadata", type=str, help="dev data.") parser.add_argument("--output-dir", type=str, help="output dir.") parser.add_argument( "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") diff --git a/paddlespeech/t2s/models/wavernn/wavernn_updater.py b/paddlespeech/t2s/models/wavernn/wavernn_updater.py index e6064e4c..b2756d00 100644 --- a/paddlespeech/t2s/models/wavernn/wavernn_updater.py +++ b/paddlespeech/t2s/models/wavernn/wavernn_updater.py @@ -21,8 +21,6 @@ from paddle.io import DataLoader from paddle.nn import Layer from paddle.optimizer import Optimizer -from paddlespeech.t2s.datasets.vocoder_batch_fn import decode_mu_law -from paddlespeech.t2s.datasets.vocoder_batch_fn import label_2_float from paddlespeech.t2s.training.extensions.evaluator import StandardEvaluator from paddlespeech.t2s.training.reporter import report from paddlespeech.t2s.training.updaters.standard_updater import StandardUpdater @@ -156,31 +154,22 @@ class WaveRNNEvaluator(StandardEvaluator): losses_dict["loss"] = float(loss) - self.iteration = ITERATION - if self.iteration % self.config.gen_eval_samples_interval_steps == 0: - self.gen_valid_samples() - self.msg += ', '.join('{}: {:>.6f}'.format(k, v) for k, v in losses_dict.items()) self.logger.info(self.msg) def gen_valid_samples(self): - for i, (mel, wav) in enumerate(self.valid_generate_loader): + for i, item in enumerate(self.valid_generate_loader): if i >= self.config.generate_num: - print("before break") break print( '\n| Generating: {}/{}'.format(i + 1, self.config.generate_num)) - wav = wav[0] - if self.mode == 'MOL': - bits = 16 - else: - bits = self.config.model.bits - if self.config.mu_law and self.mode != 'MOL': - wav = decode_mu_law(wav, 2**bits, from_labels=True) - else: - wav = label_2_float(wav, bits) + + mel = item['feats'] + wav = item['wave'] + wav = wav.squeeze(0) + origin_save_path = self.valid_samples_dir / '{}_steps_{}_target.wav'.format( self.iteration, i) sf.write(origin_save_path, wav.numpy(), samplerate=self.config.fs) @@ -193,11 +182,20 @@ class WaveRNNEvaluator(StandardEvaluator): gen_save_path = str(self.valid_samples_dir / '{}_steps_{}_{}.wav'.format(self.iteration, i, batch_str)) - # (1, C_aux, T) -> (T, C_aux) - mel = mel.squeeze(0).transpose([1, 0]) + # (1, T, C_aux) -> (T, C_aux) + mel = mel.squeeze(0) gen_sample = self.model.generate( mel, self.config.inference.gen_batched, self.config.inference.target, self.config.inference.overlap, self.config.mu_law) sf.write( gen_save_path, gen_sample.numpy(), samplerate=self.config.fs) + + def __call__(self, trainer=None): + summary = self.evaluate() + for k, v in summary.items(): + report(k, v) + # gen samples at then end of evaluate + self.iteration = ITERATION + if self.iteration % self.config.gen_eval_samples_interval_steps == 0: + self.gen_valid_samples() From 1cc7905d51de62ba566c6315fbd2702c6892881f Mon Sep 17 00:00:00 2001 From: TianYuan Date: Tue, 25 Jan 2022 06:38:06 +0000 Subject: [PATCH 03/22] rm csmsc.py, test=tts --- paddlespeech/t2s/datasets/__init__.py | 1 - paddlespeech/t2s/datasets/csmsc.py | 56 --------------------------- 2 files changed, 57 deletions(-) delete mode 100644 paddlespeech/t2s/datasets/csmsc.py diff --git a/paddlespeech/t2s/datasets/__init__.py b/paddlespeech/t2s/datasets/__init__.py index acaf808a..fc64a82f 100644 --- a/paddlespeech/t2s/datasets/__init__.py +++ b/paddlespeech/t2s/datasets/__init__.py @@ -12,5 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. from .common import * -from .csmsc import * from .ljspeech import * diff --git a/paddlespeech/t2s/datasets/csmsc.py b/paddlespeech/t2s/datasets/csmsc.py deleted file mode 100644 index 9928a73a..00000000 --- a/paddlespeech/t2s/datasets/csmsc.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -from pathlib import Path - -from paddle.io import Dataset - -__all__ = ["CSMSCMetaData"] - - -class CSMSCMetaData(Dataset): - def __init__(self, root): - """ - :param root: the path of baker dataset - """ - self.root = os.path.abspath(root) - records = [] - index = 1 - self.meta_info = ["file_path", "text", "pinyin"] - - metadata_path = os.path.join(root, "ProsodyLabeling/000001-010000.txt") - wav_dirs = os.path.join(self.root, "Wave") - with open(metadata_path, 'r', encoding='utf-8') as f: - while True: - line1 = f.readline().strip() - if not line1: - break - line2 = f.readline().strip() - strs = line1.split() - wav_fname = line1.split()[0].strip() + '.wav' - wav_filepath = os.path.join(wav_dirs, wav_fname) - text = strs[1].strip() - pinyin = line2 - records.append([wav_filepath, text, pinyin]) - - self.records = records - - def __getitem__(self, i): - return self.records[i] - - def __len__(self): - return len(self.records) - - def get_meta_info(self): - return self.meta_info From 2071774d813a5f12628b2e9eea3b242567208171 Mon Sep 17 00:00:00 2001 From: TianYuan Date: Tue, 25 Jan 2022 09:25:54 +0000 Subject: [PATCH 04/22] add wavernn in synthesize_e2e, test=tts --- examples/csmsc/tts3/local/synthesize_e2e.sh | 21 ++++++++++++++++++ paddlespeech/t2s/exps/synthesize_e2e.py | 21 +++++++++++++----- paddlespeech/t2s/models/wavernn/wavernn.py | 24 +++++++++++++++++++++ 3 files changed, 61 insertions(+), 5 deletions(-) diff --git a/examples/csmsc/tts3/local/synthesize_e2e.sh b/examples/csmsc/tts3/local/synthesize_e2e.sh index d4744486..49101ea0 100755 --- a/examples/csmsc/tts3/local/synthesize_e2e.sh +++ b/examples/csmsc/tts3/local/synthesize_e2e.sh @@ -89,3 +89,24 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then --inference_dir=${train_output_path}/inference \ --phones_dict=dump/phone_id_map.txt fi + + +# wavernn +if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then + echo "in wavernn syn_e2e" + FLAGS_allocator_strategy=naive_best_fit \ + FLAGS_fraction_of_gpu_memory_to_use=0.01 \ + python3 ${BIN_DIR}/../synthesize_e2e.py \ + --am=fastspeech2_csmsc \ + --am_config=${config_path} \ + --am_ckpt=${train_output_path}/checkpoints/${ckpt_name} \ + --am_stat=dump/train/speech_stats.npy \ + --voc=wavernn_csmsc \ + --voc_config=wavernn_test/default.yaml \ + --voc_ckpt=wavernn_test/snapshot_iter_5000.pdz \ + --voc_stat=wavernn_test/feats_stats.npy \ + --lang=zh \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=${train_output_path}/test_e2e \ + --phones_dict=dump/phone_id_map.txt +fi diff --git a/paddlespeech/t2s/exps/synthesize_e2e.py b/paddlespeech/t2s/exps/synthesize_e2e.py index 9f58579f..1f3f6773 100644 --- a/paddlespeech/t2s/exps/synthesize_e2e.py +++ b/paddlespeech/t2s/exps/synthesize_e2e.py @@ -59,6 +59,10 @@ model_alias = { "paddlespeech.t2s.models.hifigan:HiFiGANGenerator", "hifigan_inference": "paddlespeech.t2s.models.hifigan:HiFiGANInference", + "wavernn": + "paddlespeech.t2s.models.wavernn:WaveRNN", + "wavernn_inference": + "paddlespeech.t2s.models.wavernn:WaveRNNInference", } @@ -148,10 +152,16 @@ def evaluate(args): voc_name = args.voc[:args.voc.rindex('_')] voc_class = dynamic_import(voc_name, model_alias) voc_inference_class = dynamic_import(voc_name + '_inference', model_alias) - voc = voc_class(**voc_config["generator_params"]) - voc.set_state_dict(paddle.load(args.voc_ckpt)["generator_params"]) - voc.remove_weight_norm() - voc.eval() + if voc_name != 'wavernn': + voc = voc_class(**voc_config["generator_params"]) + voc.set_state_dict(paddle.load(args.voc_ckpt)["generator_params"]) + voc.remove_weight_norm() + voc.eval() + else: + voc = voc_class(**voc_config["model"]) + voc.set_state_dict(paddle.load(args.voc_ckpt)["main_params"]) + voc.eval() + voc_mu, voc_std = np.load(args.voc_stat) voc_mu = paddle.to_tensor(voc_mu) voc_std = paddle.to_tensor(voc_std) @@ -307,7 +317,8 @@ def main(): default='pwgan_csmsc', choices=[ 'pwgan_csmsc', 'pwgan_ljspeech', 'pwgan_aishell3', 'pwgan_vctk', - 'mb_melgan_csmsc', 'style_melgan_csmsc', 'hifigan_csmsc' + 'mb_melgan_csmsc', 'style_melgan_csmsc', 'hifigan_csmsc', + 'wavernn_csmsc' ], help='Choose vocoder type of tts task.') diff --git a/paddlespeech/t2s/models/wavernn/wavernn.py b/paddlespeech/t2s/models/wavernn/wavernn.py index 5d1cbd39..2c6941b0 100644 --- a/paddlespeech/t2s/models/wavernn/wavernn.py +++ b/paddlespeech/t2s/models/wavernn/wavernn.py @@ -590,3 +590,27 @@ class WaveRNN(nn.Layer): for i in range(size): bar += '█' if i <= done else '░' return bar + + +class WaveRNNInference(nn.Layer): + def __init__(self, normalizer, wavernn): + super().__init__() + self.normalizer = normalizer + self.wavernn = wavernn + + def forward(self, + logmel, + batched: bool=True, + target: int=12000, + overlap: int=600, + mu_law: bool=True, + gen_display: bool=False): + normalized_mel = self.normalizer(logmel) + wav = self.wavernn.generate( + normalized_mel, + batched=batched, + target=target, + overlap=overlap, + mu_law=mu_law, + gen_display=gen_display) + return wav From 001afee6440311336bdca5c391614c5421f59b9a Mon Sep 17 00:00:00 2001 From: TianYuan Date: Wed, 26 Jan 2022 07:27:42 +0000 Subject: [PATCH 05/22] fix wavernn dygraph to static , test=tts --- examples/csmsc/tts3/local/inference.sh | 11 ++++ examples/csmsc/tts3/local/synthesize_e2e.sh | 3 +- paddlespeech/t2s/exps/inference.py | 2 +- paddlespeech/t2s/models/wavernn/wavernn.py | 73 ++++++++++++--------- 4 files changed, 55 insertions(+), 34 deletions(-) diff --git a/examples/csmsc/tts3/local/inference.sh b/examples/csmsc/tts3/local/inference.sh index 7c58980c..9322cfd6 100755 --- a/examples/csmsc/tts3/local/inference.sh +++ b/examples/csmsc/tts3/local/inference.sh @@ -48,4 +48,15 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then --text=${BIN_DIR}/../sentences.txt \ --output_dir=${train_output_path}/pd_infer_out \ --phones_dict=dump/phone_id_map.txt +fi + +# wavernn +if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then + python3 ${BIN_DIR}/../inference.py \ + --inference_dir=${train_output_path}/inference \ + --am=fastspeech2_csmsc \ + --voc=wavernn_csmsc \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=${train_output_path}/pd_infer_out \ + --phones_dict=dump/phone_id_map.txt fi \ No newline at end of file diff --git a/examples/csmsc/tts3/local/synthesize_e2e.sh b/examples/csmsc/tts3/local/synthesize_e2e.sh index 49101ea0..d1fadf77 100755 --- a/examples/csmsc/tts3/local/synthesize_e2e.sh +++ b/examples/csmsc/tts3/local/synthesize_e2e.sh @@ -108,5 +108,6 @@ if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then --lang=zh \ --text=${BIN_DIR}/../sentences.txt \ --output_dir=${train_output_path}/test_e2e \ - --phones_dict=dump/phone_id_map.txt + --phones_dict=dump/phone_id_map.txt \ + --inference_dir=${train_output_path}/inference fi diff --git a/paddlespeech/t2s/exps/inference.py b/paddlespeech/t2s/exps/inference.py index 37afd0ab..8044c445 100644 --- a/paddlespeech/t2s/exps/inference.py +++ b/paddlespeech/t2s/exps/inference.py @@ -54,7 +54,7 @@ def main(): default='pwgan_csmsc', choices=[ 'pwgan_csmsc', 'mb_melgan_csmsc', 'hifigan_csmsc', 'pwgan_aishell3', - 'pwgan_vctk' + 'pwgan_vctk', 'wavernn_csmsc' ], help='Choose vocoder type of tts task.') # other diff --git a/paddlespeech/t2s/models/wavernn/wavernn.py b/paddlespeech/t2s/models/wavernn/wavernn.py index 2c6941b0..f30879ed 100644 --- a/paddlespeech/t2s/models/wavernn/wavernn.py +++ b/paddlespeech/t2s/models/wavernn/wavernn.py @@ -76,6 +76,7 @@ class MelResNet(nn.Layer): Tensor Output tensor (B, res_out_dims, T). ''' + x = self.conv_in(x) x = self.batch_norm(x) x = F.relu(x) @@ -230,6 +231,7 @@ class WaveRNN(nn.Layer): self.rnn1 = nn.GRU(rnn_dims, rnn_dims) self.rnn2 = nn.GRU(rnn_dims + self.aux_dims, rnn_dims) + self._to_flatten += [self.rnn1, self.rnn2] self.fc1 = nn.Linear(rnn_dims + self.aux_dims, fc_dims) @@ -326,17 +328,17 @@ class WaveRNN(nn.Layer): output = [] start = time.time() - rnn1 = self.get_gru_cell(self.rnn1) - rnn2 = self.get_gru_cell(self.rnn2) + # pseudo batch # (T, C_aux) -> (1, C_aux, T) c = paddle.transpose(c, [1, 0]).unsqueeze(0) - - wave_len = (paddle.shape(c)[-1] - 1) * self.hop_length + T = paddle.shape(c)[-1] + wave_len = (T - 1) * self.hop_length # TODO remove two transpose op by modifying function pad_tensor c = self.pad_tensor( c.transpose([0, 2, 1]), pad=self.aux_context_window, side='both').transpose([0, 2, 1]) + c, aux = self.upsample(c) if batched: @@ -344,7 +346,13 @@ class WaveRNN(nn.Layer): c = self.fold_with_overlap(c, target, overlap) aux = self.fold_with_overlap(aux, target, overlap) - b_size, seq_len, _ = paddle.shape(c) + # for dygraph to static graph, if use seq_len of `b_size, seq_len, _ = paddle.shape(c)` in for + # will not get TensorArray + # see https://www.paddlepaddle.org.cn/documentation/docs/zh/guides/04_dygraph_to_static/case_analysis_cn.html#list-lodtensorarray + # b_size, seq_len, _ = paddle.shape(c) + b_size = paddle.shape(c)[0] + seq_len = paddle.shape(c)[1] + h1 = paddle.zeros([b_size, self.rnn_dims]) h2 = paddle.zeros([b_size, self.rnn_dims]) x = paddle.zeros([b_size, 1]) @@ -354,14 +362,20 @@ class WaveRNN(nn.Layer): for i in range(seq_len): m_t = c[:, i, :] - - a1_t, a2_t, a3_t, a4_t = (a[:, i, :] for a in aux_split) + # for dygraph to static graph + # a1_t, a2_t, a3_t, a4_t = (a[:, i, :] for a in aux_split) + a1_t = aux_split[0][:, i, :] + a2_t = aux_split[1][:, i, :] + a3_t = aux_split[2][:, i, :] + a4_t = aux_split[3][:, i, :] x = paddle.concat([x, m_t, a1_t], axis=1) x = self.I(x) - h1, _ = rnn1(x, h1) + # use GRUCell here + h1, _ = self.rnn1[0].cell(x, h1) x = x + h1 inp = paddle.concat([x, a2_t], axis=1) - h2, _ = rnn2(inp, h2) + # use GRUCell here + h2, _ = self.rnn2[0].cell(inp, h2) x = x + h2 x = paddle.concat([x, a3_t], axis=1) @@ -413,15 +427,6 @@ class WaveRNN(nn.Layer): # 增加 C_out 维度 return output.unsqueeze(-1) - def get_gru_cell(self, gru): - gru_cell = nn.GRUCell(gru.input_size, gru.hidden_size) - gru_cell.weight_hh = gru.weight_hh_l0 - gru_cell.weight_ih = gru.weight_ih_l0 - gru_cell.bias_hh = gru.bias_hh_l0 - gru_cell.bias_ih = gru.bias_ih_l0 - - return gru_cell - def _flatten_parameters(self): [m.flatten_parameters() for m in self._to_flatten] @@ -438,7 +443,9 @@ class WaveRNN(nn.Layer): ---------- Tensor ''' - b, t, c = paddle.shape(x) + b, t, _ = paddle.shape(x) + # for dygraph to static graph + c = x.shape[-1] total = t + 2 * pad if side == 'both' else t + pad padded = paddle.zeros([b, total, c]) if side == 'before' or side == 'both': @@ -516,7 +523,7 @@ class WaveRNN(nn.Layer): y : Tensor Batched sequences of audio samples shape=(num_folds, target + 2 * overlap) - dtype=paddle.float64 + dtype=paddle.float32 overlap : int Timesteps for both xfade and rnn warmup @@ -525,7 +532,7 @@ class WaveRNN(nn.Layer): Tensor audio samples in a 1d array shape=(total_len) - dtype=paddle.float64 + dtype=paddle.float32 Details ---------- @@ -545,19 +552,19 @@ class WaveRNN(nn.Layer): ''' # num_folds = (total_len - overlap) // (target + overlap) - num_folds, length = y.shape + num_folds, length = paddle.shape(y) target = length - 2 * overlap total_len = num_folds * (target + overlap) + overlap # Need some silence for the run warmup slience_len = overlap // 2 fade_len = overlap - slience_len - slience = paddle.zeros([slience_len], dtype=paddle.float64) - linear = paddle.ones([fade_len], dtype=paddle.float64) + slience = paddle.zeros([slience_len], dtype=paddle.float32) + linear = paddle.ones([fade_len], dtype=paddle.float32) # Equal power crossfade # fade_in increase from 0 to 1, fade_out reduces from 1 to 0 - t = paddle.linspace(-1, 1, fade_len, dtype=paddle.float64) + t = paddle.linspace(-1, 1, fade_len, dtype=paddle.float32) fade_in = paddle.sqrt(0.5 * (1 + t)) fade_out = paddle.sqrt(0.5 * (1 - t)) # Concat the silence to the fades @@ -568,7 +575,7 @@ class WaveRNN(nn.Layer): y[:, :overlap] *= fade_in y[:, -overlap:] *= fade_out - unfolded = paddle.zeros([total_len], dtype=paddle.float64) + unfolded = paddle.zeros([total_len], dtype=paddle.float32) # Loop to add up all the samples for i in range(num_folds): @@ -606,11 +613,13 @@ class WaveRNNInference(nn.Layer): mu_law: bool=True, gen_display: bool=False): normalized_mel = self.normalizer(logmel) + wav = self.wavernn.generate( - normalized_mel, - batched=batched, - target=target, - overlap=overlap, - mu_law=mu_law, - gen_display=gen_display) + normalized_mel, ) + # batched=batched, + # target=target, + # overlap=overlap, + # mu_law=mu_law, + # gen_display=gen_display) + return wav From 2a42421a63fa22a1bb7547fffc19ecadbe3633f6 Mon Sep 17 00:00:00 2001 From: huangyuxin Date: Thu, 27 Jan 2022 06:06:08 +0000 Subject: [PATCH 06/22] cli add ds2-librispeech offline, fix versionm, test=asr --- paddlespeech/cli/asr/infer.py | 35 ++++++++++++------- paddlespeech/cli/utils.py | 2 +- paddlespeech/s2t/io/sampler.py | 2 +- .../t2s/modules/transformer/repeat.py | 2 +- 4 files changed, 26 insertions(+), 15 deletions(-) diff --git a/paddlespeech/cli/asr/infer.py b/paddlespeech/cli/asr/infer.py index 447b0a1a..64b32520 100644 --- a/paddlespeech/cli/asr/infer.py +++ b/paddlespeech/cli/asr/infer.py @@ -91,6 +91,20 @@ pretrained_models = { 'lm_md5': '29e02312deb2e59b3c8686c7966d4fe3' }, + "deepspeech2offline_librispeech-en-16k": { + 'url': + 'https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr0/asr0_deepspeech2_librispeech_ckpt_0.1.1.model.tar.gz', + 'md5': + 'f5666c81ad015c8de03aac2bc92e5762', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2/checkpoints/avg_1', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/en_lm/common_crawl_00.prune01111.trie.klm', + 'lm_md5': + '099a601759d467cd0a8523ff939819c5' + }, } model_alias = { @@ -328,18 +342,15 @@ class ASRExecutor(BaseExecutor): audio = self._inputs["audio"] audio_len = self._inputs["audio_len"] if "deepspeech2online" in model_type or "deepspeech2offline" in model_type: - result_transcripts = self.model.decode( - audio, - audio_len, - self.text_feature.vocab_list, - decoding_method=cfg.decoding_method, - lang_model_path=cfg.lang_model_path, - beam_alpha=cfg.alpha, - beam_beta=cfg.beta, - beam_size=cfg.beam_size, - cutoff_prob=cfg.cutoff_prob, - cutoff_top_n=cfg.cutoff_top_n, - num_processes=cfg.num_proc_bsearch) + decode_batch_size = audio.shape[0] + self.model.decoder.init_decoder( + decode_batch_size, self.text_feature.vocab_list, + cfg.decoding_method, cfg.lang_model_path, cfg.alpha, cfg.beta, + cfg.beam_size, cfg.cutoff_prob, cfg.cutoff_top_n, + cfg.num_proc_bsearch) + + result_transcripts = self.model.decode(audio, audio_len) + self.model.decoder.del_decoder() self._outputs["result"] = result_transcripts[0] elif "conformer" in model_type or "transformer" in model_type: diff --git a/paddlespeech/cli/utils.py b/paddlespeech/cli/utils.py index 4f2c8906..d7dcc90c 100644 --- a/paddlespeech/cli/utils.py +++ b/paddlespeech/cli/utils.py @@ -34,7 +34,7 @@ from .entry import commands try: from .. import __version__ except ImportError: - __version__ = 0.0.0 # for develop branch + __version__ = "0.0.0" # for develop branch requests.adapters.DEFAULT_RETRIES = 3 diff --git a/paddlespeech/s2t/io/sampler.py b/paddlespeech/s2t/io/sampler.py index ac55af12..89752bb9 100644 --- a/paddlespeech/s2t/io/sampler.py +++ b/paddlespeech/s2t/io/sampler.py @@ -51,7 +51,7 @@ def _batch_shuffle(indices, batch_size, epoch, clipped=False): """ rng = np.random.RandomState(epoch) shift_len = rng.randint(0, batch_size - 1) - batch_indices = list(zip(* [iter(indices[shift_len:])] * batch_size)) + batch_indices = list(zip(*[iter(indices[shift_len:])] * batch_size)) rng.shuffle(batch_indices) batch_indices = [item for batch in batch_indices for item in batch] assert clipped is False diff --git a/paddlespeech/t2s/modules/transformer/repeat.py b/paddlespeech/t2s/modules/transformer/repeat.py index 0325a638..f738b556 100644 --- a/paddlespeech/t2s/modules/transformer/repeat.py +++ b/paddlespeech/t2s/modules/transformer/repeat.py @@ -41,4 +41,4 @@ def repeat(N, fn): MultiSequential Repeated model instance. """ - return MultiSequential(* [fn(n) for n in range(N)]) + return MultiSequential(*[fn(n) for n in range(N)]) From 26f6074168188c88e974d27964f369c8ce68d85a Mon Sep 17 00:00:00 2001 From: Jackwaterveg <87408988+Jackwaterveg@users.noreply.github.com> Date: Thu, 27 Jan 2022 14:58:52 +0800 Subject: [PATCH 07/22] Update released_model.md (#1401) --- docs/source/released_model.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/released_model.md b/docs/source/released_model.md index 3310bfb2..5d29968e 100644 --- a/docs/source/released_model.md +++ b/docs/source/released_model.md @@ -9,6 +9,7 @@ Acoustic Model | Training Data | Token-based | Size | Descriptions | CER | WER | [Ds2 Offline Aishell ASR0 Model](https://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_aishell_ckpt_0.1.1.model.tar.gz)| Aishell Dataset | Char-based | 306 MB | 2 Conv + 3 bidirectional GRU layers| 0.064 |-| 151 h | [Ds2 Offline Aishell ASR0](../../examples/aishell/asr0) [Conformer Offline Aishell ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/aishell/asr1/asr1_conformer_aishell_ckpt_0.1.1.model.tar.gz) | Aishell Dataset | Char-based | 284 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention rescoring | 0.056 |-| 151 h | [Conformer Offline Aishell ASR1](../../examples/aishell/asr1) [Transformer Aishell ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/aishell/asr1/asr1_transformer_aishell_ckpt_0.1.1.model.tar.gz) | Aishell Dataset | Char-based | 128 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: Attention rescoring | 0.0523 || 151 h | [Transformer Aishell ASR1](../../examples/aishell/asr1) +[Ds2 Offline Librispeech ASR0 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr0/asr0_deepspeech2_librispeech_ckpt_0.1.1.model.tar.gz)| Librispeech Dataset | Char-based | 518 MB | 2 Conv + 3 bidirectional LSTM layers| - |0.0725| 960 h | [Ds2 Offline Librispeech ASR0](../../examples/librispeech/asr0) [Conformer Librispeech ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr1/asr1_conformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 191 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention rescoring |-| 0.0337 | 960 h | [Conformer Librispeech ASR1](../../example/librispeech/asr1) [Transformer Librispeech ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr1/asr1_transformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 131 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: Attention rescoring |-| 0.0381 | 960 h | [Transformer Librispeech ASR1](../../example/librispeech/asr1) [Transformer Librispeech ASR2 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr2/asr2_transformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 131 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: JoinCTC w/ LM |-| 0.0240 | 960 h | [Transformer Librispeech ASR2](../../example/librispeech/asr2) From 8b086684a588e467a147d65e22b0dea5d16965e4 Mon Sep 17 00:00:00 2001 From: WilliamZhang06 <97937340+WilliamZhang06@users.noreply.github.com> Date: Thu, 27 Jan 2022 15:03:45 +0800 Subject: [PATCH 08/22] updated PaddleSpeech Server diagram (#1407) --- ...addleSpeech_Server_architecture_diagram.png | Bin 0 -> 322373 bytes .../arch/PaddleSpeech_Server_class_diagram.png | Bin 0 -> 160139 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/images/arch/PaddleSpeech_Server_architecture_diagram.png create mode 100644 docs/images/arch/PaddleSpeech_Server_class_diagram.png diff --git a/docs/images/arch/PaddleSpeech_Server_architecture_diagram.png b/docs/images/arch/PaddleSpeech_Server_architecture_diagram.png new file mode 100644 index 0000000000000000000000000000000000000000..16f8ddccb48f5219f89560a021746bcd5c07efb3 GIT binary patch literal 322373 zcmeGEdoP^oZ2ZuQDig@gUy5xLS~#MJBP|KOd^d63FFW> zGvgRSa>_UlgCPdPFiykxzU+H*f7ab^z3= z%kAmU&jfJCgoG>=6cna!D1gD#8$I_g!G|xub@@fu_D8Z#GyCuV%V@6yz~~|SCXCc$w*3cB{@-sM7xwcgdAGfr z2>vev|C*+wd}s9hC;Rpz|NSlx|2VOaL%3q{7X9-7uqKCpSOKFit@eGI`ft~~b1B|Y zZ0tPNUZ;^K|EJM;!00#rvwdFw%l1_0KHe9WcQHZZ@c%OUk~uK?o93h0i~n`8feW{m zs$tXV6*Uk)_`i&P0|G`*iL0-O{x?VgD;%76#P;z!7d~nJU;dZT-voitUNK_+(f=JK zk1zKgRI2H?5FW?*=l?Q#iVGNB-W09*54?A`;vabbOho^{`zK<2`wx5nM2z15u=jt) z{osGt`=_}7k2v{Hl<@9=fUKlCcqq12`fw}N z6ja3U`-lIuT2r@9S~Xn^^q&b$E#yBNGFYt0`v!jUw{=#Fwf5F(RUwl+@stG-GATPDv75)lrq6 zfLBoo&>VU|$tW<@wo!hoaSOOw+aB(s?XKz`o0-Dlnc*Np!>{vi%imtPt1ro;)R)Yy zZ)ma7BXrEH_kB7gytS!r2v(HP_;{v;sm!wi^e!_-QPR0${N%*5EYSu1^ifC`LJo%ziYCis#A< zFED$^=@Ys=;BvMvna5-)3}kM5u)4I|hTQE@O2+o!nJY^_Tw|W(@0nydH-Hs3dvv)^ zY!}J@UVPMka-Ffih~icX(tiU}j5=3ta8yw;SJdiYs;YZ6y=e^}_=1=F41#Y#^zO~} z;O^}OSsUj$DRv%5sWIxVP_3y1{2B7_$3^AcPxj77$GiQxAS1^B(*^X1^*WOIyK1!n z%vyw$W7XPqoSU7eSrd!5(VFs8*g2&doZouAPV#&t42~pjZcb|x9zJ$!Sbuvv-Y5@A za{8zU+VmYDg$fGSm6R7+d)iH06J$VDw@ES1LQ1wl-~lm<;w?tE?VN>9`w*M^8nVw)2x& zmQ$~Zh-+md4kAIQwcN}ttFU0em5DF8^z*3y_2{pq?=M1nmI$Xd-D8eCvZ zl!ml_D{-3xR^95Zb*K=ezA#zg$yp6fQuWkj=4Qb|>9Dur9K6D|PH&Xg_Ykti4Op~m z+y!NFKTYnPeYtGnsN$JWw>3X{Ssf*_fOmvsMK{x1fdU)}>O6jVh-EZLU8U>A+$AsMQmZ z0*}%dI%ICU(Y*h8i+=&OSgGuA37OgqK9u;PP9;Fqb+Gc+2S*eyhq{%GENpe7y5^+< z&%-wRM!309tf)+1Qxi!&E_8W z6E1R4-PAMVvcR&`1?!UgxwnH4{PjiNbgz?}f&7CDzSYH}zf-x}vWkg6EY_Ya{CKTu zg=8)umx@FnFej(m5bNT5Ac*Zj?cMbTF1Jrw zH6LQ8h{A;vEQ|-Y+oFgC3P=o2jMGM>s`yR$0xaJs6E@in(yAmDOk|n;hCcccW9w z@;XD;#N}k(?L@qCb@eT^{PRWW=?(|NR-W_lN^eWA_FXy>D&ZRVE>LD%YL~ohFYJWj zl5ZJIj!E;9twYqZk4}(_c2Z)bVjGr_#6H13E$cLYc@NG6M*QLwLaP zs@9cVsFV1CPe}Y`^8&m3|8Gi3x-5L{;1h1ULDB0@RSOsXnmB90_5=eC%~p`UVAFr% zfo}kmo}B+VkX+^vh+-ZAFU8oz`gFp3{kGleGaa@9AM`ORXJfGo>)Ok1Xod|lf2D59 zRX|A8V~6QS-BQ`NESLF7v8;F76JZSgE%%=0Qn9X)saDbm+z`il2VP-~MAhS1sVrRY zb|)&HLGvYzYx-N^V3tN)j;S6PsFj9f&TSXO{h+janaPxu=e%=*LPplX?+9`A&jm&6 zo<#@>Tl~5g)89w}2E3!F?8>h{-7P8pvhdl1iP3d0_l1=b)_O(Xk3OWP@laZr`RA~m-Kchn8vrUytrpp> z+_XT}7utQ?hPL0E(s?PHmp$fX!>xjkT3UpBkz1YOP?0f0=FzHDu_4Kv{G!)NJ1YeU zs7gP++1obLH|ET2ob&0qD{?kCBUbX38)=F+VYQVDRl0PIX<22uUMmLMc&_eXz1%6` zPu*U(#EGL|Fa-DWhA@W<$=g}5L?I)MHaBK>@(J8a4QGA*Hf0Ditoc1pu7LTPE|q(4 zsSaZA@5mnp*=}l=bNAM4bm(9Rx0)R#wBY7EaPa;{oWwvqx5}3qHgR!izc(hKf zn!2B&8H>X>nL}rrXSRO`5r&$orKn?cMX=?G7wBji!VL{+kR@j2A>}x9-Jpj%HJlbc zSiL$|bndL(UrTrb!FRR01)H8U|f`%(r1G94dr4;_84$Mh8)M}Y#rW%6*OyQNVX zwpRH@(ddiC_M$r8N1_5oY;C0^F}UXbQGmNHhzqV9+P(R`g-~Tq`GrTRW0U)f0_moQ_})S$=`2PdLF&sEwjNJ3rg zkkcVpk4k|!?CVFQsjb!4{KOucp?ON$5oAflm3~-;IyJ*k8E4WkZOUcSe$@;La+j#u zHY$~4iowjXe*M>ySp@K%LUDFyIKQ#VKCb{yzCDwWZE#JtXLGRMGWjL?d0f|GB)%)5 zD?^TPOa_0)^9sbqyQ4K=mWith96Un1sSmly>yDg$g)LaV;&vN108$F!r`})dm&I;% zTOB{0@};f>xt|ZDip4CQ_1~*S;fsNz2~#^2c!sRf$xc zC?P*Q!p`He_=jvvQp6^pN~wS<)iF=`hCKZDE_5(vTsqVWz4)NCt^)gsY)LKw5`7h#Gc@*DXP8l0iz zz}`a2#Wz_CTLph!k-_x%yBHpex)6ayM8~ z@Md!hZyUzsgW7K(X7eE7X6oh#thorij*I6$#^!`!6FjuLo&MgTxZRK}1+g7P4gY;i zF8l7(;odZ%^+JE5oJ`4-p8Rhn%BTxrg0Cm>au=u;TUix`v9dR=Xg_VuiUJd z0IaW&0cIrPr+KDskG|nm3b71zqe>STz@{xM{d6D*g;}cXDQdB97itg@h5c&giy&)u z`M*B^O=`g>=HBsnm_(1ndTVUG3^iY4Q3h9-#o>W}L>$_|vkUcNEH<`-#YE05(xN_~ z&!>^TWMfR$n1m`dg`CWUgsiVK6R`}*>fK%d697@C%eP6jx1mX(M}FHrqujXJ8-Ix9 zyje&Wpr0^|^O?a99|xoYi2Xt-`E$>ZiuVj^YB8iW|KC9^9=Jc+vzULA^)7`4e>;58 z{E-pix1&{lC2mZvUjgV)uS{hLsUT+&fGP@`0)F$2XVpFM-_Wzo-HE%z)O{L3yPSIf>*+0; z24?flgk)9x{dKaOX_{e7 zx{6FF(-UU>pK^Hr0=s|61zquR3h6>|R-F z{hB#kbQAj(gfNm98}zcTbJ6}_8so@ZX=W9PK5_IK)Z%NN8)zzD$2)C=GPzkvP6o0< z(dHh_SGzCSlh+39)iM^={v5Fw!E9)_=B9XbRhLg7wQ}wZDOx9?(OF-SQUGFnUu2RLBZ6# zHK^9L-Ia&r1G|*7+E?JXdyw~V?D6fFx(rbuaKUa^zfM&JPhd3gwi2a^YpHW}^Ut|4 zb{%iat=E_4xMz5jLL%lJNgOnfU`_I*5a$grg$9AT!Nu>HwE1z0zP%kYbMHwl&UkMAZ}8kL17%t#yv!* z!4RT^j6+b((A)X>=cGU)ZGKs`a>&rL8C>=)jRG+F8Dl!&kRrJaRNI}Np>r{M6hdVMq!F26Jr zl}WH_h^7?Lb@Zv@XQ71pZdmrYob-zs@?J9v=CAvkyju81sJSB8mKJ{LjQ0Q&kk7gY zBCkS>k#&|5#$Q@^fOU9i1}RZ`eO@hdb~NQ&!+jHtoEsWBB&SA0vut|F*VDytU5PW| zbb5Uu?qO$9uMohx=(w&1=o>373>sZH-?$k&H8Mgwfu#7{Wh0g5@TU{T4x`e(k z+Sas_j&~4Y1%+MlTIyAyrl!$HZ(E@wKvZ#-C1Ii zx#!d`E%E2!rL^|iBp^vUK%ISDp#Bq37lICkfYj{{0FWZhzqd3GO5F$i81OV0=vCQV z4gAIlWGo2nZ;eB;yU<#A1jI4?o5Le})9b_X)3Kvg!LIhtiY%xXNoYJ7oV*Rkv&=|@8YNU8|GW?hWc}m!%L++r)bjEvUsAhwHe*VL&5SbFQeUrtCd4B!5dJefG8f3VEDNa#d zl-{Ld2Gz_+cSXV=ikR!!H%^=D!-i@3^!%^cO%VRmBX5ol=ic!yJ-2wpS(tucD(zW5F3voxP$YD&}H8NbCzf)s$;clYI z^sdk&$iNHDhV$VzFc zh9pS)JeA=p9uLp4yE{F0)g(nDrhf7I;C)fZlG=yoz!_ZW`@>6O)fLT`vvo{%ia9be zzzVdx7GLF*kFB0#Vpl($0!8+Oy7})5O+LXIF3{DXejT5h@Te@XE|0rKT~fUCWa-x5 zyKd_S^zam@L@USqoXl9Av|{6m=V~jBT_7I-nznok{@lkQ@`vqX09_7G?cDf}rqHUr zc$5~jwf8Voyw<wy_|aVOx0V)OGeDxtfy`1C>sk)j zT?=!yLZd70547rq!x;$x^5D9}u$BPe9P=(>yBa*frpdABn0t6vPXkDW(hTDm-=q#OYVo{Vw6(v zV9bq?Fv9H%S?9AzUFi=ghrnjS=pJRYX?QC)K$^5gq8sKVTHcJXdCLrvr!fyt(}xCT zq`^?rj<~MS17LG#Y;m62Bs^TkffB|iZN?{O{z;HshJdLn!roadNK_C*qd^LYtSe>^ zchAOF$fN_qEot=}kcMQLn4khK6Thb!lDv4EH`Xqg!nP+5LHldv@_jvYA=&s*cs?r$ zyM3e$dC!@V=t!`#yF!Z9Y`RyhmAulW!;P!s;_9atVZG0jL#LoF4UxHCEf3z9ws)i_ zk}gT-I64hjnwMKb+7}2U&T)Mxb^H)m%`xo&K)5Q?Db+puQSGD|8dN1YR`44)YH$?^ zwL1_7h*dR(V&hLh`nK>=uWiX_iNoiD_rtkVeu>;Q)Rv$J&u9@4r~QtygJlhi z+p4`Mtvw|sP7(qDc`cK?V$yICnzZI&vV>+%JDEUivS#4i$~b!c63qR)0_cJog<*A8 z#>KW3pZH7$9x8ZzL*5L!PA^IX4bE`J8=!UZyq1n|2F#4H@!lVvp=wUJVP^sv=_u

&57b%0;hv}yIyr4#FdUnipQmzg zDm+HW^4e@Hi+&p{sPJV&A{0t{Pd@^kCa1yfI;Z)10P*wuJcG}Sf}Z+ri7}NoYS9Js zV#6UY)~QpVt;NCyv#9vI37(rHO0S!eh>ZDS;6`Yt4qKp$^EAjA;|HKwfBXtkvU*jm zO&&pMop`}eT}Lr2KuUdoXpvJM(jfq(;8}JT%x#@WAM#AMxv6K1AY5JVClmmYoD{Wv zZ7==$ze_7cdD|iAGUKYdNAr058`6mx;vB+?-*aBJQ)qy>_cxK1#;PTv%t_OMXKOt8 zjK_uq{|=MbLqMub{A@S*`+sJ81tkaSQm*(bNgg+Ga)RnNt3T^az(2Zx_d?t>yi~rh zlzS+Ak~fhW$0#kQ;;jJTWQL*pfq)FzvrC<@&@lI{2lT-a9qovglUVe7UP$M%pOK4{ z6TAsf-Z1W*j()D-{+2X@L_Ks2AF{ve6&8a4q{t+k(xQj=b_Q4wDdUW=0HjU}>i#!N zp{UlGsCxi~L}IOwMC$RBSo2pV#&!n%Eq@9@*uLDEg!gMfH9LdAg>YTS#thPZn0Op2 zA&Oa6%)arZ#WhQYw;iOck?+%>53Yv8AnwjB+)VT{zE%ruwQ))~U#r120oZoW60UCi z5J-D@K%O6sWZcPy-ACvWIStcbeyvz6W3i~$G`ppRM*%@?HG@togcCfrg?NpcTOMf6 z}#{%W`|fx7N=k7GF|KfILRg_(#H6c2`N)HP6PvnD}o!)u4G<4O!BC z5eT9gE#nY)a$6dV1*%F^3lK9z;3h%wjfq!Q*d%|5pio?)c^YGUu5hbG*+NaA#JyH; zH=g~Uu1wbDGS}`3#qU;jK&&i&xG!!uaPDaT?GX*&eR?`Y{cmd~_`*pb0XU)dMdRvE z>0!lXVcUcGdbwGWIq#&wct?b=hto*jhl%Qn`>dexHnhl$N&SN%t6;kQxQNLva#ivC)_rJJa8hm9^?Nq6S0C!svXABGtz_KRXQ*d zOQq2zfH<{Y?}CBNFbU}6$&4;@u~;9^_1Hwv{7ZDyBR?ufcCyeIMxb3(h9kk@ZD=4`gr^y5UaAz2q4?~_?YrXu*^ zfm;K)vt~@f&}bQ!7kv?Inv&2{xc)Zr8Q{ZAIaUCP5SFcWSXQ>OLm^lkcXWi zS{>drM2UQETPI zwq$L;D{&0fK%0e!olgP(hLZ7zWne9-@$u?g8>A;KAV3^acb&KWi-pY))mjU0NWAGU zzB<3&BZ6ZsB;$pSNmI#=aaBkrJ5J>oY9WSz3 z+pKj4;#3;#^*bfqBKf8xv@1rK{ibm~ZN85X_WZ2dM2Q*t%Jya%wzVYEZ+$EQ>*`FR z=7_#Pf9jERML!)2a<@PK8#S)$5l!89>k5(F1Y5!cnBjta&p4BN^PA;S$o$00F+M2! zqGUdJK#rQ@t2sTq72@Dl{vc7+a~TRXCY+>|3&igRx{9Ez zgWKtwfHU7~1eM61U3oCH_zD|ple)k{Sm7{1($iH#zYus#MBhK)X9|vTi*wpxlXmy(Ash7`W;gqo>H1@6DFZc6n3Qjwpg%X>->DYBC zo=MrM2ZFvC=PsG(;2MrwOnoZ28vF7Tc<5o&HvfKct=*96x8wA0N&dn4lRO(GaD}*B zB5xQo&lxRANx*<-3@pKy&ehbgJ>fm8R1=GDxL(Rqx5><2b!DulJ3*X552`OE#*CmM7Fw2kGIVeNXDwEJn(M{E@|yi~C85TeHmfj;IIKB( z*+2)Ex3zi^jJ>?@&X|_S7{cLuUE&yDQ{%+9a~%jn*Eb4JQBi?aGu9RZ($pXM#|WyY zpzVq>p(ESNXPXCn)&%ouE!Mb=h(g%CB|LW7w(u`MhWCyP{^k>PhvAFsjx<4i;!>-= zG2sjn47{HrXXCj+(9W_erT#96~4Bng{Dtm@pzA8A|u^(@z|jUp_o zYoGhc0ok05g+sJ!`owB0|IgHzM!5A>6p|*bPjqRkTU;M*jT40QsgYu__KfsOlr%Ka zl{C$JRM){;E5W-pc%+#th6>kz*>72ml9t;fX@d=kLaeRd^Q!?f+BAjUl>wwamVhpy zhhRx92|_Hf1=Xpo?blv7Pyff{w+|P8545d!B-qDcE28~#Glc36<@mh6P0uAF<)HW9 z0mTrX9{hqmP!Xv!uM-Px?;YJfI4b#fLU~yaz(mLG@Q62iM&4fX=__?uaWI6coy!Es z@g|RH2zM!ul2VPW4iAq~$_V$Gtj$-O2nl6?aL4KU_6(6|ph}O>yD_v^S=uYEROp;M z4tF{OZtZ%g>6|4AZF(7(OuGnE?~E1XI2qSlsZ#VYS%{RZfxogrQw=hLloa$*W1@lc zMqBLaOwoYYs^vcsNOH1(RHb~k=>t(eO-&R-AZx5A6)Q~snYb2f(g%7jPFF&0k}wd9 zO-il)!RuOelc>6)F{kOCbNu>=ZHfdT0*Ja5-X5vGt+kHutsp3Am9OeF755{PM7F`7 zqN3qQvfz5}aKK&Shp|WQZVdM)0;t&FZmYRE|ak{saB&kZ^hGU&eE_z`P3W{Ecr zS}}qaR;x{h3JS=?XnV_AJEo3~+$blJHSTdoINs$&I>ZPerF4S}SW`Jw>a-XC05->NahDJrl& zSEjF-LVLGtYyIf#O4fR7(k^$e=41L+ZS zK|lzE8qYy zbC;bnDg5c4XS|!1{0Z`tpjP|+s;!g!_RPeAI zwdcEZ?n}l#i<7A{^fe=Q z`yx{q`Iy_8q4!Bl){oE;%!M zxaDZgFcN98=%`iyz=k(CEk@LFpc+VQM=@}!9z9|>EmpZ4jZ)!2C`ed-k>maX4IX}R zN8@9_;@b^QUql0#BnQGiJ`908tuE3dhM9C-b`Hm`>%r)C1_x8|K@bSaxwMoK#*V84 zQBtiFu*LzCkMiA@6rt>$lJ2Gf;n;H7iO>-gW1A%H@^;PM-4f_oAg{7K{LP5kyuj!3?DwxnaP<^_1R8h74c)(35YOi43E(UY70;VneFW=6 z{N=(Yo2)$)rzH2PctAy|W4I|%@8^a}*`27Vk{IpyFEb@t#@(i#t>=I+;M|eZyOkvN zQd)Q5Ny7TUUp@C)C-;=!5AWcGW20n+S{I*YIz=!bDKz=}$6{D^#dn|$!(eSnNZMsg z$lb4Z*!wC`yQvgm9u#zyXv08t1Nx%XKv0&jG~Nh@iVz!5rK5 z`j_Gn(HB08=#iC4ly$rygrjV-@d@ssr-?i!vBx}Vkwq!wgcD5SS3BHC5`6k!5@2K( zJ@|p}R87`%EN_N7!Bs;FWR3OPS}eooJb-MQsZyt^6IUJeA>Q@B=WACP5*vR!qAh@J zUI?x*Td4QoZl~uL!m3LwA$Rr3m=OcWHoDaBwE-ubA;gDZ2QJ!IJaE|GD9&fgScbsc zAkmT>ls+FizVOAq{ma0Gm8YfdlgE+W#?B+lrkCz+p;w1Q3~>Ov&NM=y{au)JdJ&G| z%s4s~&!yL6T2LG6pWHtq3c#^yn=iC9_Dh2Rjzwz?8nq7UnZxBVC;#-FNQ@I?MdN>K z=0X`q=g7ERgr2 z5bwX^q^#Ba!VU!CA&-u5p&d>HvSMG9B1LvrDh4kBJvH^~w!Cu!K~@%5UE{q1dDhq6ZP7AJ(I6_;PA-&P3;9)D0wPx zixh}EXFf}(1L`N~Bap}l`N;#m5Bd<;`glA#JJMfdwp?W7T~n&{N_6ijkOpSqIZH?y z3WQ|y6LAdPLZO$Zs7oWs00WNv<}y}$4ZLPnRb&oPJh4jD1F~!LVD!}s|nCA@)EX9tV^Q(5SK0#xs=%(Q|o^#?O!uf$@W#bwR35Y||`7vv) zfC%7z!88WE*}}wR;b41p0WVXiN&@n-k4~tB!&^CDx5-;#%!1ZnV9Rn#;eDX9l2e2PLe9Y&53X2SpBjwF}319_yMIrowf&>7L@EL(F-(0 z|2Jo{!y~#%%9`Sj{zRL-r;s-(K&;%P+}r%+CxCgsx?C1r&sJ|i@Ms3QhIp6(2*BO5 z_bI@a|l(3P4C zgacxJ5Nc>pcrNyW()lU3Gm)(u9_z8@WqI<{4+LW*3SkYzDk`mF^#YW=f#w%3u@(>T zF|`(K)H;)Y!*ALNF!AuQcanhGD_$w<4%{ROQP(;!Be7Vf4e`fsQQSy@G1W34jc8*O z{0fmmc>~M#h`_oARF<9{kj|k!uf#`;v*Q%S3(5>9<%z!&3iMO*yEY z^fS77Bb;%KNeJ#zgmUu}0Qy?|Bq9TchXKr2RbmWi;)$)eVseWctjhZ`qy#X4gnXMf znkOP)clV8Lyl!-`w%W||ojBCY1p)Re|LU<}kT+t)?lf%7903>B+S z)(}=UJ8ZHAFp(OV_}2I8@WAQHRm7^6_Z8x^FnsbUL-JMNDTk=And0PLK$e29DAm+h_N8oB2LtMlYO1QeDEu4VL5zXP6!W8S)W@I zfZD7zs?gy2#LaH~ME?W}fAhc{l$YQc4DQxU_pgcvabn>tuQc_PW0r^XeM&W6Iq<2&)L7l608)}#6m?AMM* z?rUL3fD~hV^@4--uX77@$`c%pa>QMy$edV`)JsSNsJi%d4vErf>@a7A=;2=VZ>?fF zxSbgx!_(Mt9AS<4*-1dDGPlABZ8nhJI6SQ$_YkNjUlPA<;uRYX z6d&?Y5kT|a2PH*nzG_f=rO$>BG?901S%v3j^Na$7;0+`$D7!l5b>~t%s46YSYEdle zhUSlAbK8%Gyg4twTg2HZyqQB{{{~D4FTj%C^|vR^{v1Uufv&Ft2&YgTz=A;Z)zQNL z<-UZVgk0Oh4*-pFNWaO~yTGe)1|~|;trOZ!t@jA>t;>OA8m~C)RmJIME}AE*`<W&GtA&5qz`%81K&E zw_?r#KA~GxGqY(;4;Qr7tpi&lco`cIFO!+17fhQ3#=v|0yZm7gNGJ*@sl39H-g)Lb zI$1&7;VrER_u!Q0OR$pjpqy&&n#z^RLL4j(KXpHjk(DpDtR*@um$RLwiX7P_AA#og zPzWcRE6VU~gGG!?+$843>XnqNO$n@2+3U7N$1s>n+kJQJBGfhoUcx!$2 z?-H~QFiMsdmF%FPfv98lbpRedT$OfWFnqugw!^HyE zq`{&wpuAL|EntA+wgp^v!5gQ!arb%>@Pq`#J9gXfbVJkI>C){M;kbPlR&L%|EH`FkbakkmO8kv?TCXPn;`+c^w zNlrv7L?IK82L0wTu|UHE-Jk`yTWa~CZfTvdnPJ<##Zx}w!hc6*?>9i1*4p9QN%c}3YzpR;^K3^W88QNjssTfVECEnfg$7tjZ0AGZH_i~lp$!eIA zhe<7xuEtC;vWVkgNzh6BU6pm;}0b*e5b0upswUrCx8N#fjIXC>8e;f<2gaTT9P+t`TEwWKFL%f{gv%1viL z;&DFu-j(KoMT*q+I3b8b8B;ZmQE47W0uuk6wyHm+GoUDmiid)eEsLs0Pi#E6wI8+B2pq?`-QmM>w}FEN2q(=W8I z6S)obn*hywuW;0=!mb({fGq8mpe0<`Uho7D&HpPSnOzL42GSS}e!d|vbc-?!djLpJ zpC`UZAsbCas-C?&g1qii&o3U^Q@#FJRjJIJNQfa4dZ}@OLG6L<*NOSC8(biNyj47w zHuMP4M;a$Vmf3zg4W5{;vP2Qg3-ogtg2z5oE^AA_$&8Q)sd%$h1tf`cv;Y&CexsdR?M@7pZkK=nqPJX#1eHkQUd7^8@pM6Av2xZNI1kU?dqmrTx^YQgcxN zF-ScQaxb7S5iwYN2adiZs4XmW4Dfv7p9t(&ia@gB;KD$og~obYrW_^I5ROlHL2teV zp?H+mmOnr5_Mk4e{kvE*2uOqlmCFGldo;S6Xp<@d@p{`-N8jXTuF%sG?SrZsRE;av z_rLdiOkIfcWz&8~*$}}LrJt%?O`eHpA{0#2(H~k9J)1>P2z_FPqwv>@pEgV6z&}5HrQG#J>4^- zb-d-XfPQECIa0#8&V+}ZuX=%t#m2byPXYPbap-xAyH&Vfr^4DlS9WnyFVX-Ce^+n)hqxa1kDb+=aGobpGS~hqfBrHszX$U5I-P9 zWBj+(0DCD)J(V7`!r8nL(~#a2txxQ$*WX@A11eb%(lk7O00=o6b8v%k`g>0}R!z?A zL}ITMV((;~`*o}> zu%+e>P8jp}!uNsY^y0AeO_p5oX~vc))LvF>dXbB8P}=BRhG6~ z8Ry?@{No{TQYtptmKOd~4&3G4Z>RWI$ByFX12_L=ciQ2LH(Em1R1uOWS>W-N$G;0} z>^!}&ng83$?$co;13>APc9H#eN&Vju!swSez>=yusXVmcI#@x-{tmIbOFwZbFa6qu zqK^%fahQvK_~h0Z1%2WnX|amc1Hf}G>VyEi4H1rhLTb`h{*Bt)O9@0x)#DT^XGBO} z1KXRkFHD4**RX6vfJYZB#aHY6Q^Qw3mabmYUMF;#SH&f^57I zHP3)0ZajaKr9F_bIXQ@r%20Q(Rw-(2<$sW9-j@?#m(L+D0ILF8JJb<1gSQ4ifZz!) z&GLASWwn+11vY5nRQQY$>rPjJKD1Yfa^eZ-m?nI|!z(H$;)P;B)ckDsyrRR#@?K#; zx341Bf*uLl!p7Je9KCb?9%n)f-)W&JzoaoqC^cVW z)NSAr$Yl%7#Pik*;2u&wr6Y995xVR2^u(Om1hl-{>ISGA$3m@;J{!iRX)w1^=LwvL zL>9`YgwJ;udz-DE*|?1RRO=e|!Da6Fq2L$4{~ED{+&5^O_bD>;eEqCT`*PLS$UYvG zy416c!uCpk^jzv5HbbuuE$L1O^uHO-m`|D}NVKiX$tz1KOQDj?ZfA~1m2Ob&qpJz@ zbCK`di^XVx6LP{d>}K&eYZWCP%^z@V6*XG8R>P@1wWiOl1baMXyX7p5|7H?+P&a54 zpLUmU@CkKQOgy-D?(5^uUi|*FTjisp=cs6rL_3dil!q_}&5_li6#39+=!O62naHXQ zWD{%hxG({o7QOW()9xt0nD4BF=$|kAf25?{5ecnNA-7k??|{m>JBiCGhS2=mBnrTWnAE3)dwcA?!`qcY~eF zw(kb>QYp`u$N%}7RblV12~h%`A>obc2$ubG%bxL84tk_iUFnr`Fhi}rNRXW)(OviP zS?@ri=V*tgFehjG#f=<2yE|Hbjf-hE>>SFVAvW89=^=B`YTyxgM_d(0%^HeNYq~7+ z(GgvDudlbUQFW(d;rH6Ws_{W{#He?-Rtp!lW@kN*-mm0jg?z@$9F4>@7S-R&5czue z7B(e(O7uC}>vWIeB`&m~lAj2~cfWh!!t*rmiA#}xcnx%V{Wur-aJbuCopXG26t3s) z^Go!=FMcipeauloy)3N*QZ`8*3CYSz8+3Q1S>R+VH38SYa#Gl+gk>Wk%<@Ag0NWG2E(G$J46C%pdSG+iM zt1gm%WJT9~k)SX4%Gw>}RJ>^LXny8!3mO#7nsF9R8;KID{>4^fTcAB2g*?(YYTqPk z9I$aNa%#JB9C75a=d=rk#R;xCedmmSonS)DSGDJD`ojJkVPb*&Hld3|PO$pjm?ZT& zpHoR14~x$V55X>J@IIj;hhmW#k9f-mE-b55kDdN5YZub~Y~%2AhUA3&lqjA*bl`9# zfjva+*+G;c`<;WKQ|vkqG6K z32NHigW9P}#YYgjFY=>)b8p|+&((8Vn0Z1VT6G1rzcGJb6Su0m5{LF8c3j(rBhsc+ zvmM6T7s;9t5-Sg{ItVQN?Iz_k?LQYSMELGOUb$RT5>|Sp00qyG zN3R;aYz{l`@h<9MSo`P0m1lT{mCr7ec`Y4@T<6o!|83TMpYT*ZYv9v%t*OTyMZ4N6iTkQ|NN`E|A9U)$I@Ui(Y!=h@~aPlLmjRKEZYoxX5+qqN>TEuJUJ!x6UB9Jnn`*D{S{6z)g6406?@<^p(J0I2`+g_-&qacH?KtGF5of zGS7~m=KEIDDXi1^kx$x6_mdy8wr24tSFv0|vbmTh`?%68yLaaiVvUv(!aheudTkvF z&n$n>w?pQoWmY`~0P;Og5-6_x$TF(`(jz0fUI9NMtm7FQ`Cd)!`4z9vou(pTx1EgC zA2N;$S8sKM*c&tCBdx~${v=dA$ArNvUtKYFtGk_TT>nM;{7l*E3#t+qdwujnAD-wI zXadM_v8t(EtqLWe6m)>!F>gDYUHEI+0ykS@FRObA!+*2xhx+-+A8t455!9NGPv$`4 z zam{fewMeM%tBo7?r{Veyf^pv!$i`{#URWtkV&tA^eHSuisIEM%ilY}7^yuUV0f3mX zT0YZFN-g2h>Kh;-+hws!lZa(-xZP&^8POE;6DEqMWFrTU@L+ruIe1Fu8xAT3jgK~o zW;99*ueUr+yY(gn;Bi@3;$zA7>Pmxqwxc}jnA)A~mD8h*o)T^ITvkyK_%ro(+wne1 zc%~iR$F}b_0>RNZc;7yn5alVnJ{e-)UIQ0Sc`~@&Vz26DQ*DUN^S?HA4TKxYE+^CKXUt%A8GVBdMkA$rMPX$#&wtPILeu zDm*>Eg(*_zV*c_vN?dJc zIhAkBYM$pyyHh!VVAfCBX{?lvc&C&Fu{U<7fiJRx)})0~{U%4mGIZMOe&kp`Tqf$; z9(^sqN6L&{KiT|vp(Gfj8z068DCFE17%FJnT%%8R6%i4Zoy2gQDYsX2Z$HQ(5 zr_fJf5ZYy0<`Tic$$QwLP1E~ycE6Olh);*TaT|u$rRjp6aXl{hM)-}x0gC-WtduJo z7mL)jI1$64r|S1BN1st>ia#SLFzVP?caEvm)~jo`;Pt*wIJA~Wk-*zJ`A5HyqWW_* zw-PB})RR+r{j*c;tHN0=BC1&9`lT`>9$~OW$qT-P7u6>oi8_fsxt$l#?`g9H{NKk_ zoSe#s&q}dZl3lBtm(I*pmUH@7t)D7AV)+d|B5-cxN=fSExtNk=4GXD%gF1p2pxjUV z3e9``{r~)c4c`}Sb$W-3C7;@la~NMdn8n7i9e;q!b|F}X=e%w6;{ykjT!;L2VeJbC`$#9zx&!Q^NV^d_p}50&9lXGqFvW^v*^J8$JdvKL*0J= zV=}feDq=856g|W+WgAPSP()gZ7}>J#`<}HLBa|f}sZ`36J==^alkAm!8;yM%yRnS< zy*=OPxvo#w?+;yfHSU@BxzByh>%7h~EJ%f7GEA%gsn7FMT-pbH+vgad)ESOv$=~?% zf+(9vm>Pwake#p2o?WIeD*5x+aO&x-()jr*>72`NW`!3^053gF#%9#-!Fch~4j$G)2P-zj*;gjdBYtS+sX$t-N8TmJ=gYpnED~kt zh(WDFF?FN`|FoMH%h!n9(v6>Ct2d&AngTDwpM~M|c-INph}ECbLa(B0p<*}b4{}r@ zO}*H^Oci98xeL@+N4KI@yIS#*>Ym>W-(^G_I-7bgE65O7Qz1{ul24a5Qh@t-2-f6? z+pM28HCLC9jhz29xZ1WKVkUyT=9;>ema!Cj9M<%Fe`l9=e16=ElS*Sd2$X}DmHjj z)k)F($Z~b_%!v5hSXAA(wk6~|@s}RtOD{95f+fRkxtd~NQ9wVvc?_PVt?OTU7aB9R z-`KosE&{??n*zf0=Ex|mcE#Z(MN}rs^Zl6Wmt=7Z0u15RE)%xv_h?R6>JN}u8xoFu zE&!`~}SMthan^@P-oK5WNS-EF%@lIGL|w)OHV0W)dn6D!Qx8t`dMUyGpDU=l^y8 zcMw+_n+QMO-WW#(39^9F^~9rH(b*p1nUnl@`jRe=(^s9=w1j}(sF|6_&xk6l_5Qg) z9n!;5*Hz(7&)TBseE5$lMy|}Q$tNSu%ZoiRF`0!zqO1eAC%Vux74#q2?XV?;e=Zpc z9UHYr?iZYRK4*Ky1D9a!Vkw|iIClKnYZ5HwtOd?yJ7Dp8<2N*NFbAG=8z*@(2M%Yb z9&clX*z`_(Xa%5Z4!mMY34^e#RK9=5;T8R`k(1Tse;6#@E{fuwo{?k7)rda)R#U!uiEBT+$J^#h5 z@6&-GQ$>nvb%)RdC`ZriW64uI~Aim zu#WVvI3^o`4T@ms!eE>#xm(c{Tpu=dlt|I=XR2Q_k0NV9V6)|9Rw2Y@pI(DaVXdz2 z5WW5`K?jKxKjjOb!&xU~nRjvgF`Pz&p-ix(wHGCO$l<>Whr0lZ0OG-)p`6T!@-s`3 zc#hRujF5*R@)3+|5LNVH;FlE;-FMaQRsMSgPHm}YYwj8T_q9OC`o7jO{mh^1v)iqyRQv84_;|M#;8m;tWk>4J>+zt1_e|I+r3W2uJW#)j z9d@wMrxF?nu$UrZM)Ie&#mGAXc5qAyegO<>c`w_MW8pfc%JP+?#1N+TL<%aLL;rdb zlMyC`z2d>7Rj58j3bwP^XQN~EcDc*=jUMhRCkEv|oiuyJofY|F< zA%ha762+7@*2QRPj&Ls4sayj*x-tbHn#y}I8u+Z!K%_T?fY{ihQ=63xo%L}O%AP&# zA-?S3YQ7!2E25ict0pX=YP;gbb0YWb;MY=&RIkqW2n3ELnX-oJ zBsnC0Rj!ZEHTLG9oAON&Tz^Jge#0qxvw|e43LvL1K5E0%FPyj-lrZASeiU!g3>R`V za38Emb$K8YR9_kQ%xgJ_bP^NJ*{W%f5xJ1d3bna|))4sA)8zL{oVP@B6cmll0-<6|-cG1U&6G7{2pd0+=T#%zxWaV!87+OfbjUH@+iogD_-7C{P7{n8X}|Df}g-GTL_K6Qf= z;>Fcc96C6~9^#ak*e~lY4=htPfIPcq%T?T}DN0&QgvaVImyoF|uz|J(-bW1c=mq^W1H&o3E-54_- zN-E4`v@clmOeeRS2%4Q!ibLh3`!K&tzI-dnedVAkpJ84eq?Ir&*%>wb#{5isd~un2 zgc%XoHPPV`JT+&qw{7~$JI>*ii4UpVmY*$*W#JcW5(YPzVXrn)zx-^;wz`(@`%ku{ zvexBy(&YvnQWK{RQko)g7{TE!Z>O}=8-er63ltA^gRDz<A8wEO8Bc;bbz-_>c;K)K_B&r`O)Zr z8TD@3A*13L_t_M3do4V2!2!2$YaeX*WID{1fwu<-IS}vFT-uPYzbIprDXd(__?^8* z9(1IRt(%;?M<+ZA>rwZ~nICzelz+oHFj!tXhzo$rZxc(n zUQBFkbA<`BqYGdlk*Y^p{{a8aS+g~$G7IsfFfkpoVMtz(kQ4VwC zr`tJ0L9GPk=Zmm>jCYKFvyAcI1 zibO4hjEcCquE)~iEJRI1e1^1LQT-ij2YiJIw}a|MX63iMh6dK6jn1vh_0Liz=VU^7 ze4b>^KhD72)>>bPh#SsZB=Ra{x4xXAv(8P?SqIzJjOw%(qH5xPagIvBRp$mqpzzyo809VnL-MUj~BWe#Nr+B-8h2d^VqC>vwuAV!ltUHfzhxSN*=?<5&aVglRLH zS6(Oa;!0KieBd&$tF*YE^Y^u*akgupGt|_hd*uCOCQnu6uLo|6&P|zooeQJ`j|NlS zrqSzbxClC#xpAbqkN8#)J-9)gSldol!YD2We|Eb`5vij#Zxi7zJ#-l6djP$EA(h-p z@U;-e-ZTltZ$BE);bGL$aeH(vh75&lr^PZ_3TFt;vVto`r!U4%5BWB`p-FbI&8Io= zpblH~)>D)jPp@czJ{QmLKzER*$&AmI7Gt__Z5>h6F5qeco+ zC24n8+=d{)M{J-v`S(2fe;0QT-?y6ZJOXp0o|;vVrr`T$-iOz6ZcnMAx2i^!7gMJZ z51tfdgo*O7L>0J@UXn#*wxlp=q2b8u%4a+$e|A~DmXvZGliGs+0GxcMg5?Ep^vhs} zU??~zuk;r$Mi6&|YBJh0B-5HV?wus%WUKaMTXEQ>14J9NYZ%Wn@yjLFc1%y)$Q+;T z%tPI7CFfuBMzvntbPFh>6-^gWP%^gDPMz|7ghlVZ!dtTMi68qsKK|$`(l9D4mS)L) zeh5?_IASp-l$SZpprs(kh=d^uWC7^$2DXu<{8 zU6e^*f9q@e&2|QwcS-&d%98SxIkoP$G zN=u!JN7qO^CM`ObpkMV47J0}L=9jF-6RS97e8M-47jEQ$!jU#bv-e-Xv{>h3hNo<% z2Nvb?UCNS&*|K9yPBqXzJ=yjhD0Np8DRMqK?NsJ|G%c`}NYZnxiNAYiUF@asR;c8V zd0`{=4*wMuHY4}wA3XHw zbaLhztCP|%g%{nW=h-eC4xQ!p*>BO=UMH;zAkVb+s_<5WN@kc|1NwIQf`BR=u@wZ{ zw6}#vvKSD`YyEz@Eq|*elpi4AeXPq{u&?3^rgeIW?9vtL6_mgb@4JyV z?rUC_uu^JLk#GsXGKG|xAalxQ)kw|zH8!7#g+n_b>QTzk4|`YNh;N7g=KG-X=8q2k zaVR)`)!s)@l9RANSg$i0&s~J^VS7xDO#u1m;<=WL7P2TCc3E z&rR=PmBCh2wWn~jbu!rvDI%q=CH3Te?Tw>-?XObH3ZH6QReaMd@GosLlPrrifYihl zy}g_uWSnW;MgH(HefmZ$r)e&?r?Odchl5w=tPh-h@9Hi z_nI+Y87GtPRvdyDtwu(kOB%&}lr>6<<{gt8Pl==xF_6%Z?Y>!F%>1g3J}!c7>wer# z2ABxFPElx$Gb6f?(q>4k1{vUCw6T1lrheCKX4o*bhfCV*@(W}iJyMT5re9+;a-9@1=pRlE(iqV811d0}G;`GeyS%B-bUt(T9tiQxEm=zx7)N#LbW7(19f5 zkPjD2vvrX!mZ##rx<83VuQ;YwUv}madjlNYpzHn;UytODhf=ezkM2BkOC%F4Av2e> zd8+*_LsW;F#=E1tEJMLg^6>qP&Z+;}=)HAkQ>e%!Y2HwR2~!b4_h&;4ZVuZD0SLij zxvK>Q?Zo2C=S%p6o*n;|`Wo_xm7K16M&|>`HcuX;xu`obao9VQUr-)|wD7^6EK@oC zaOo~8#bCRHtbjovldoDNlHG$)_JoPgLM*ngy`^rN)U>_%^*H2=&qG{^95)-yY;=-i9g4U zySPJDCWtyE?w=F&i2f`9+_b(GpGadxiC06&3O#?!=xf;No zZ6Adt4*56WQ?t}FnOuw1##xc4EgPNNv>Y{%Epj!H*5Bc2xl@zRXKjky^WJ6N`Q*wM zAatzi7}CaM0S;+VvZf()8dKk;1Jjj>}(Ft%4@03MU)Ogov6p%BS}= z(>~7XIP~($6e=L|T!Z{oYvNMVA~SjHnrBiKY?O-w@m1a?WmhW8)ha_eHKvc=B%DtY z^oTcElU~MsbkGfAuA%eUnj1ZGvq&{y?JMH`f)$TH>z_1I73l1z*z>HM6BY=kh97?) ztuKgu!wj2M<}&(`oUYFSj%%FD5dziAy7)uYg4ycvgmHL9m?dIx;j0_5mUa+fHtk!- zsHsU?BI@xtGG~6foXLj+ztUR1_~!(YP5}nly5#PE5Ws(|Z?AxkmJDDVSaRgu`QH>E zeDP);L9ajYx^$hkVTgS=0RE>!9V59|HHn=(X2Q2^$&BqU9NBP8o%z}^tmHtur}RG`7a3g$-v@l6eW5F+D$!k7P|kVK%+_RJ1Ya; z3tIbHqPNFI80#rP^AXvmY8=`5{w)iuy+05&+tsDvgoI~?-@M&I8|YVjIhsD>7+akA zCzQABd-!;5(eoY`Cc~`To-~D8Z%5tezlUFq;OmdKe&afQ%!vzpvOn04xZe{u9*&G5 zc!cXOLXMjJVvXMn+Qd}qMR5r$dV8G+*R(w#dXzQCM)tVn;gk0G>1A9hSX>b~lbG5q zl7SjrhLS!;i(joy)H~vMb8YOVg=LVdK0``eIze#kh9t zP>s==d}ids%;@FyrTp)q;F^o!_A}Y(tv;)k>QfBhO=kbGKIG$6f72m*BBGAX7dJu^ z)Lud`tiG?YB@;PBkgMSzfaKbI!g#uf3THO`>v?S#8V`m=SYw)+pUe=>&+99?OfXP( zwypiP6e%w?w)*MJ>>sPo0l*3izJ9CQyKwH0f`vl%f#9dn4F{gQ9I=N!b4|5YYI$ca z9`F$$KSVZ-OKQIyN`|`12)mm2Re}|ieeWJ+fh+GPGR_z4mHwN*xp4W*H%j5?U?Sj( zs5O1XUF>Mr3l{-vS8m>sunQ<~ojaKJgO9<_BY%lKi~>NI)1;p|`W7vE&5nfD$Wl`# zuA@J3W}iPRr9Sm(r&`nY%yhx)r*Rp6NRmunfoC zAy<)c@c=`w=GVT^XpBpt_)_!kH29cQauZnjwNy^vSceqrBFmu(G|I1%tQB4JhzCv5 z1o!prq>f}^9fv(9UHWc^d+I=Cd;cZ+$K4Vjna?ZN0f`6OOO$(NG5-YD-_N==4l@P6I&Z9ez+Cz4wEm{LlYYc>m0Y7>_*r6wv zD$CP>2Xo)S6lE$PKBIp^MOCHzck>o~{eVR%UFW>EWW8IUwD(D^(AU9A^Mz*_G&f8- z)ww~r9#Pgxd~wOg2Po~@rz^?B`wX|`*M2T3TJ(od8?ga`eCYnQ`A1}-W?OXNW{xXR zAygowYWDRrZb^buH-tSj|NEnTl~H;tuF8AwkNR=?`_dODoX(Xag&I|CxRi~&@j)@5 z)<#MKzBxs-P~EP$K-6`@rOf*Fc8+UL8Z5S1Vh-RFSKA%ao@9~D z%D-3JkeC|3colW@$UNDDZze-KkGQRNh2t-e2qfp{W!EwLCKAE`C+w4VOZ&HD5Bp$w zc%5lS={u&y0r-_?;FySM$HML`kgON}5LBecb;>2U+kSN;AlJO*MoV|%-_ zH}pV{v|d^)e}I`YV||m4nOnbx??Hg^H*VEzCE^x~}4$TlOHw!Y_9-w`T8_kL}bk zj8$}{{zX#gA3NydhfA2iuFsgh8>&eKtcG@jO7Y$n0s_Dm0^fdS{ZXSm#X{!_kmk=* znb~m7HCD$W+nn=QtrPBqAr;69DsHHzG^U1O#KTd^}zbgC|FjWf8tUD1`j9^Y$B z#A#y*)Zx})ov5323n#k5<>HdEV`1P{;`Fbk)&em>-hU#be(tiy-RHsGq8q|04^$A{ zaq^Z2LyRBr^Vg_uh#;VywIkh%O8c@^UB55<>)4$p(RdcN1tx{lf8QW=+UlxD;rNMgo>t3q8plY<{nv@o$e1K z(!CY>9WSPJOh{aJuOD+@z<9LMn%tf*9EH7_0=UmvaZH#*6kKELDB2Z#SLrZPT^y;x z?}~?TIckI;QCpn~L_4F8VU^R~)Aqm5_^=!fDmQs1+wbsGu7ML}?&rT(Tk^UFZjY3q zm({87vulI-v6>R?Ars;)=0P2Dd>}1}#e&UQ&t62*1$@B-v`}5QRLaDC^SirUOJt-M zZ@M1G-lLfG?p(d)Swr&Qw8057A4i4kLo+b2fR$Vei>+`wS=BN93CZ6Z6vTtOe@KN| zh<%tg-P^OwWrsYa=m-3%E8C08aHVpOIML|Cpsr|KGZ*QPe7K9$G` zW+l%y%c`)0i0I{0`Y8Jld3K=AEsT7OA$;kW=IiKn1+@5hstY5yulXQaQtOKQlw?{n zC_>(JAwjBx{kIM1_S?}dhNyBRN4St%xa4*WSA6!RKU?kUnS22mg194Ua*Xer1r)4wj-*E%x&HHv<7d}?3L0?|nmF7RkMDqkX&Uc(4$>c*|7eDH;8QR6r*?nM@#36{5wDQDGY=%3M_`^quG z2*i6ziMv@B98>`p+&-=qb1NT@))!lS@ctC43lLrsjn?RdQ(BY|1Im4qEqMM(hHnBn zk%d_iyoK7Q_6ytVfx1Z^$9w9yZ8v@LxRjFvD=mwMFVv>K?BR8kFgxSUARIjkts#6m zxqCo%JF(i2xi@**e?a=4{+YEh&>v-+xIo1dR^wQpB)4ua@bKyn{~GJmS< zN{$gQXrN1P{-g)?BEYb8$52KNI1%0O_DMt2`Rz_m7CBz>xV`)k-r)dxr#1z?roPOB z0X?zA(mJvc>RT1lSP|gsHEgX)UqCU?TUdoH4PlK3jM+#EBe!J4*9WXIM-Dston;Q; z_N&m%zh~mT*xXt2TP+wk#c5`xwHVZCN408t7uFKC<-fn=79!=oMGXk1Q90>HC+6WM zK0Nd45AcQ62Q`zi6gm?kusdQ@`z|W5^yPB-BwM05dsf<*B2%!(y!pOBqgU1ztd~6U zf4my`lzij){{Q&Qe_;WY^UBhm;QlPommu?8>lRk`K+Fv;X8@;g>E!g`-)dD*0ueW7 z0OVid{P0gPQCkOK2R^-P2k!~>&!YPb7=~-R&k%kbnxKp+Y_2QrNfAv9Gw{Osvb2N* zvV|_x)-GSt9?G)asn8pAb0zV^47NjBm4n;C7-Qe>#}n0EkDO(RY9+L>Ay1{civG0K z?B3RALELGqLkrSEvTjkmTr!pKO}`CWx>`;PMa!FG0&v-!b5%4jifWWDcWGNrK;j zV5$R-8V(ekKmI4jV#eW_M<6LqpFnNkRHCZg$>YYe(%cVfH_TQ2aK@ijrLYY_QtsXV zFzMb^o+<%UY~S|&VZZxa0DJ(_3g~kuv@1`s5Q5Wtj`r# zBbjUvL|9lAvjsRHZZI}0galJx@iF(#!7uG9lFv1@3DTbj^#$ zO2v;SPP3;~G(~wR7p3v;A&=6$ou?iM$WHnjP8JX#ePsqQ0*V&p?j`wK>fQ13M~ZiA z0eXRtxN`)X>TkXFjsOUXgqe2FXzwC3@^>EsD;`dCb zY)TtYKJZN$(7+svt@R@Mgzs@MB;%D3qgD!k1GnqpqdlL^2j_5fOK8-|XifLe;+p&9 zQ~8we3DLG=%t@xL{*}LyrjPvbw$8}MSq?T_Gaj-9wN?$mNc%F2=Re$BX|XWGYx>H0 zBEFsvJ(FwhIg@W1v9M6P9!2h23YFt%RlS9{)1-2?ww2pc%Fni_Pm>^aSV8(>Vvr#E z{M?(pN+$qh0RZJjHPSwL7qjg7$56=ow~X88GqZp?J)^7DDk;BT;b;L8b%T< z;VnV{xeur1zTWjm(4E*%$XnA(c~6di76ny|&@;srwX#Cm9#9UBY^ zHhoI~6W-y04FUmAB%(A&ty>QXUyvu2^Mk;IqjeEPpk&dpelhmkhG6%Wms4p><3ldo ztKyOU7>z@H_}SYAxtzb>s34rvlM8LsZ|*&-cv~U-WcHaHH4Z{ZE`u}9!+k6ovB9FR z0{y9lp4r675zU2ygn_0Gz^!MeZ#|qvJ%|_Ak4ZGIVILCt%{JdMD8GD+rgW+ zpZrDg(hko%u;}?#>e8$qtjS~kp#u7(`l(;t=9T-hmr2Y-)H@*B@ftH1dgj)!9=j^_ zV>|hE(6N0AMUHyKOoTe!zMU?$GF{;V0Q;N z0UJLcvq>plvtn-iw4X6vVyYNjOg7eL*&cF)z_!fhm)qhjW&-AoiG9=K=*L5JX5x=z zI&*z)Xm~2D8#`Ye%6Uh-MZUt%B3{<$lZ%-5$*~w#LGKl(t}nZ&)f)k5#H?%bPp!Y) zB_pw&bc9&E3T9WkY(WjaH2@G?x^%7Y_dP({?r3RCRsN@c{tJbx?xsy z_-u+DKS<$Sf+K06BK2s#RJD=w@Dq1GyJk<3>V*Hh`(f$!p)`?A!iY+R%JB-AG~y%v zqbw{HiaIM;A9#{N+V&8_`Eq8|s;;&oR4l#$1py~fbz8ftIkFpF&^!{Y|j zxz?`igT)1|zO$qQh1MfOYwyWE_7dp8QIu=bmry+Ihb-?y!EY3hqX7oWn=r?B>kZ$Y zF#104gbNL^SCD&NVDAoEk@O+1&+aj@u%pYl(|T|I7D4Gl#$XUy=kyi#m0#tdC*4CC z*&5@TbsQvR=cVu?XFRpX8q-ag{#GE{KRbh2XG>>E3Jy+D6v z<4mFPF&a;$RGv-hOp-IU?My_)-9dbIkx8ry?ktQAZ$@QisK<%Kq2`=SBMK>aB-ZCt zD~_5&A>e7g!8WEb?NP%fKN#uf@8U_`HdxkKD^1N*XXEWd)rCzzf7vIcHV;sU2@h`V zvPANacA^K&{{Qsy?D-Vj7|du}mIToFPJ&(Y2Ak491m><^i6tLkM?WvaJ(y`bsZk6#LkxXmW_l4OtEm74*k9hFE>m8{g0GVdg3?aGquH><01ZL`>+$&FD? zPn7){qsJWHg-JRw_`$%Tn?-uwx{5$J*e;ZHBk7?+FOy<{>6J6PVb5vsCm7Hj$0v8| z#9zqmX$$mZW?@B?=04L^+wogiiDJ|NNQ>jXF~B$+pqjK9LSRi1JITPIt+(T922lKj zpwjTf&XEV#y7kK~85ALWJ-3(7?c1bHD}OFe-H&g5upIJ^Dl{0^lwn0g*_5Z2Bt5=xFH;GNj==J6nd%Hu7F5yVG{#@N@b}3trKgH* zEEs}oy=|dj4fV6Ni6K$qA!{8e-d&UYv}8cyHl#udnP>Eu_&LVM@wl;@G9V(!NL<(={;sUc1M zr)%vRlC1k$R<`_0a&Yz6sf zeRK8RnTSz3Q%ec_4d5`Ygy)Bp-9_}oRod!ed5&s|!(X!z^{1l4Ud{k@fBVGxHqNhB zklf29_XIiq50Zfwo5LsQa5x}*Yn-r%=TM-2%^F_j77d6PO zV|dYlPoabBxwlGw9r{?5_s=N&a7T|HjyhoW1;eLZAWmBA`#5^I^AweSC$(5!$qwZb^ZF#yEZl{?iz!ytttip zYLA9kI?G{bRr_VO+EsCoFX@@_w}k2IueG|qGZR>y%bH598{867>po>pd(b~_ciAfk zh|nsP(sR-WTC6iN8>K5kN=Llsa@-wMC8}h5n_m=Cgdy9-|}=a zJ-fpBY&U6!`_b}9e-l>CWU(f?Nv87_idV`TmEXCSlx+i?#oe=i%epx}2pgpztl8>~ z%ffeh4Y>@%>w?KC<%6Esp_?H%Nu+V%K$ zE3N40w425gi?2afGX)2efmD46u*Z3kO4CEDWeuQwhNw|-yYyX%hda9ickXKD|HWsa zt2tengz3*SpT!~ftv_sd&-;|Hku`$b{3VKT9@G00vSENL&kwG1dmem1YY4|i42sP` z2hudg;EIee_*XXM1wWErs zwkQZj0pKzZ03-|cjheOjj0m&)s`>jfq(75$Fx>w;`9ykX}+Q4 zl~w%O?XM!-prS(hd$NL5Wr?MQP(ZpGG}$=6&#kzok{s*$V|Lp!fbMPLq8tZr7*~j^ z98r!56^w5cKnKZhGJ8k4N!-JyuBZD7XYi`HNzHl~9i1#I{~G+1pQVofEEw$VsY)Ju z?8w~L9PB33Aha?o!c^t=5p)c*4E^qVB@s0r0Tkanvu%iKt~j8QyNfS(0B5(BqVA<9{|V`S zyLU$Lr!66%bz0EVMY&)O#59B?0gdNi37e}Ud?;=e{Uh^a(I(3aQmo+ z9h>dEM?Jc`iT3I7dm{h+rmFdVAG0W+EvuY-dX@9?wM41p)7kAYMicdtm`W!dY zHvfJ7rm**#cZt)@YLm*Y#?6hxgQnYu2fuGx5M5S+Vf7{#<5V2eqQ^f5EF%W5sDF$Z z`bQ0Oep&tAko?7rvd8MI(dmssiNvcFTR}aNvfII*)-tN;2M2$yAyiXaqeqtRWrDJ# zH84ZKPvygKtm2b6+&pE0Ky4oUlucL53|&}Hq>oT1FX0LtA@z=piQXC)v@*MG&0>Jz z!3gGqDC$d*JM_;Tb}ng?&ox;}D`dL|d5`$sDVBCITwM88uZq?S+PqFgfit5Fo#8&q zP*qOyRrB&W-w&KH(O1mp+YFNPPt4@^2qzgkn2rU_f3T)FTf3hXM!$0RSec&reaHnT zcA`C#L4{Md{s)eu`}j#B9jqcXCPC1h!K3%(m<8| z4`!1~#B2F_3vNP0Y1Wq_2G|}-jw@n`X`FX4NBY-YDENh>=$2lY%bo8~h3MHhQb;Tc zZfj_S<1M#!L^SSyxBtfbckv%qRUu}INJNzYV|~b0-PLls9IV1;`>hy#TNeLS&G(xj zTnVTc2*wxY`>|<-kY8k9Q{&2Zoxo(9qPW~pP?_hZm7!N=?*L87q3*PqJNJejN_NS- zFs*)!&X;|W?=3SdyWlB?M5-V46^S+(BmwCk-kQ^%!W*7^%%ASd#Bw?Avh`UDCM zum4241<0(cTUB&t2OX;trZtPgUiJDJtVmzHzPY77K9QdpX6QTrhWvpN%#IyeM^>%) zY(wa)SCwFQyX`?>y6$GPI0$r=2XQuj;M$c%2gc2;d79|5!}izy*zukTdCs=wq5ZwU zz2@dGsMu0>zq7H4SmhG-TIMo~C=4NC$Sww34E2G6&;pRN7lYVB{q)=muD7*6`+s>I zm=Qrb zzHMC($^R1Sww^1iG>3ABXf0%AV*}CvN_U(fYy&R0GZs98R$rYz>3F*TX>^$Wj0JP4 ztRdk%82xhe#W~L5kDWkUfKUP_bauS$fh+-U6lZNi2n>KmWoGh64k^&TsG@lW*X6X= zt3x=+r&nHP_rt<~Ry7nm(9F2LG4ip%j6#B7oL7p2{X+1%P6z#nJc}jFKP4WS>vSu` z2095u;&|9kSst-~M?RICe5;>LDt~a+i_ux6ueEE*$tdf??Tkp*6TQLHl<<-5CG`S5 zN^}U+eTB^zXp4yxGZ!&Am!%HYc7jCq!XEHDri;Bzo^v6}Q>!NVySv9z2P>a!2ail8 z`l_woAX<#fA}i%0@BB#2jl~1aNJ7=kazMBBA>!bgI*3R!KvQT2q%XYi8Di@FuxdjI zXF^OiR?PExYH$ir3KUXgwf05c(^7Ej-ncEwFMXnTI7aMn=?p~uI0488wwiB+0jbS{ z^)s@r4_-ZgTN?$&Jh~#PEZmA3x{r2MHu-paK-VV8KFk>Q-ZbIti@vam;PC2c z&t1cozGK)>Luw`e+s=;O1Pt59MERnxZqdtJ3tJEUU#qjI1C5Ul;}{|(D%}o8S}O@m z7dAQW=%!%kbq2x*t_{k|(`@+nVB|=b+TQN!blOh- zS)tOxKJR~Z+WQoV9??2f4vlB;shwGp-I7k6eXqdBST9vL{;SI@yfELr$xuLv(=ufC zu_ft#5h9p+5MIOpY4IF!%T_=~aK6WRpA*8dYADQVO8Tm0-I8ZZAX-;g&)s@`rU!E% zEB!rDz6YV`!wt?4ka-$?CLfsidmlEh17gIM^E$2KHCNXnCuZYxvXT#}_ zztk)7e*cisDS;#-x08H_A?rtvBL6`U?#VtZar4zOe9te&*5K|1KTqU&aNW{hu7r&T zn0fUDxs+V&@XFhvvNh1PPgo0R3VYrLyI3A^mV9urSqf`v$)japDQ_X57HbC#dpKsQ zX+RTjFPVP$^=3Zau<}8bC3w9T;=b~BU(3W=s!$+r6z~<)N!Qh<%VzD@ghK3ILu{Ii zYNUZyL{|+PGuKw5pncVBUwVNNJ1dy%wdxRNGSd8+d%q(HT*<$x4SDl0o3;KP`@<`O zZW^5iVj{F8&q|GR6GIBqUlPFUvI2&xEZH3yUJ(q<;@pJHzy%Y;wS+U*yOR)ixWy zF27x(OK^t;RH(AE|M-^!KS`qBYYw9zj^H&MmncI$@;xo~0Q*A-FofQB9%+WV<1STx-UJQBHd@aZtKD@J!kzC5n-JQKG~ha8w&Kj|A5?mp?4l zH&f8Zi>Q6WmKybn5wd;p?CnlVeg&%3?B^3o{eewi2e>O`UujTZ0=b7G#Di!VrA1IS z!hf0@dlrTmZ=#Pj26{oX^5mzCW`o})FYw!Bb7fyvOkkY-)Ep(EU84|>xF6_raQWu@ zIGhLbmrriW7O?XuD8j{%VHnpkcXjuW0CzAL_!|)>ED*ZqElR%Dh0I zlvP>;TwujmOgKMmRl$Z-0ZeiLCd`Div|kN?YAJAQ=6N5&ezG<Z52eff>NuI{a*cjFd3Lv$Z3 zjY8vwErn(efH9J*o+?hTVU1PIRBTeN@#|EEErC2+er#F0Y&S!2tWMH8O;n2m`zBK9 zBZYUnwnqV71rOMs_ZSK_RQ3Gk_FQ?N_NX2FjcROu@Y+$9S_*=|T>YB=B~|U1-TuC= zS9&**+?x9~lS_wcpULOqEm5}3j{ukIGs{{ISuydxOt$&P@TT5KK(J^LLqv;W*( zp=qGr8E}~=Sj;sKhXIV`D6R4b-j5kk)k4~KW_>-$TaH@+YJ6|^`&Xeg-Z4b26y#d& zW*)ISQG<}5iI?GCu&}oD;_2tO}#8zjZ*v|Jgx8izQvtG#~jI?X0s0ld!p`x_e#kAHHi?VsK zw$%8C#dY;H-%hsCUzE_VJ0Q8YE!C1zIiLCUgwcJtLl*=`wS|H@JP;2`+yDgv5t>`d z_%|%-)otf;O${WT8K*ly313BoJzVQ)8B^kzjE!Qku9Q$iINZ(kb+E;}JTUV>$>QL8 zlpNX4?ED~*Sp*st311b7@L`F6Jn4$7YLB)~vLfx5g;GP?uL^mOq;VtIqix}}L3eqy zhUZVQP;B$m8N*X~s6RkkeqfoLQQs?FtA&{og*_XIf&sJsomYmITR*0yiHBdTQHYF; zX!@r=j>z_k&i3?<7gL5CPcB*Vx-4oVHwV3CGik_zWd1CN_N2*rk}iDFt9>jJ^`O8k zb=uflZQI`k4eG9Xxx5aUp>LTboQzmWltR};WHy7eRuYqiXe+viZ*273bqI#OgRJr^$P)>~8~t z>H_Eq@44!ZC;ywibUY5)HW&ohh|F!p$uTQ^AjBLvptLgz#x1iFjNj?t`aDkBbe7h9 zj%N=O2k3=9=u?`E7#pB;rNLf%g$`jmnFWx`JDCgr8-Yj(lFa95nA+l3TO75m*Kk{` z`Ai7)kSU;RuwEG!smUi)4Uzx`p(vWWRk?B_lq}^^jV5w@J{`}1pFwNwlUdqoOnzou zzSYSb&BgcmsUw?ihU~TeZa&8w?wtxpU=Su}Q^o(JZHZM+c!#z%-Divn)v8IQ9Tji;_3HW!MH z2d2OQ-*koW=Q8)GazewkCgby!R)5x8Y^`bnqhc(1gfqjk+sf}nzD!k4@CUv@5ayYw zNU9DtX|1L|8yjEwX1%I%EI$9$yq{(LlRHlPv?{y@7wW%jqY)rYohLZxvEw~7U4p|V581#LjlXXmThX5q&)SzRD=#A#=|t0;#7LLaQgzGr&=EDFNFN?{*=OyUld zwcR&~NeF4Gas^T$e67Jmpcs!y%Vp(WcC9y4y#(F47q7?JuVH#c{_=icbR1QL$dz&- z6bY|Pp3GOt*_8QORdDxJdZY(84RSFQn4WZ0(DY+^gI&<}zA-}`TEv*dihO0H7Sc!a zY1pu3dF7TIFm?xC(F=jJ%v0?2@Lx|I`y8bL)6SE_W^AP7D1>G7?t9Rjud)g?jW!hC z0_JORK?ssuSkz(%5VF;mXWOEAw6auYB|G!*-5z%{hpBgWYXRyE9Ze(MTC)ylySWNP z&21i2TU%ipnc@b9SE1ZNiHMW~uX0pCVncx~z;GWBFqLa|B8Uo1%Ch_!Z*!~sULawf zpFbrFpK(e%=%6x@$K5Mn$l#b`LNsSkt5dqej2$op<9rRUgUq(XuA=Wds=k_tGU%iU)gW1U(7wK-u zy>l#zQq2lm6H*`T9)d--Z<6e^XSv7GTO(k2kBex!{ys6aAi=x*LI&=2EvAt0m5H=j z3otl1j=c7!fN-p@YjPJ_p?08^ca?*E(f>lL(o?tv7Qm@}KQZ!E)7>9frJt$yoOe;{ zeF=E0l}h0?N6t8)_vp5eYBodEr70JJ9iU*>{`8{a-tF-A*PmYdU(0hE+*yBG#WGHr zeap03@C45H6?y$fPR0&C|Mml3S2MOBdeT5ieS59TqUs<)PJ|H*62tLh)3F~+AN_+j zXCW>WS*s*I@?bUIShbn<(E9Z~0V$QreeY;ofr9$YAfAjQ#lyz>AXslf$goDI?hV(q z0=4&)!&)pwq2#>piGT-_)6P@p%#IdEh7ll6EkAEe+P=>(ivD+26hsfO7+n^RpPKv3 z_}x9IMUS7x1_+n4X1ln4j~9W38@_8*@^?TY%KMQBaQ4qp_AK!g7)paB>^GHJ%5zRA|{gAYA zS$5qHlpKV&Mx!s+Z<6fmGIm{d{b4b}o?SYmx30K1rFz=aA{=7K36;W8m~8p;Bln5si=5g5I! z7ktFc#9)_2W!QI@OM=IYBz9BdLbd?6gqlm5y7wmsyIUMl`Y~*oYjD_Lu|OjjNXVsA z1XzT9K?&B(2v5O@zSL>J1R*KY7eT9lIZIy)n)GmU2;*qF1&sAW&1Uv6BCUX}eeADo z?oz4x|5iK(M|%bRu|vC-59m#$2euBOZ5sR%@=F=X_gjMpfXp^jtHOP-ur?S{pQ(nN z+$qU{vP3j-z@Lv(Ps2qOOjGtL-kLP|>|JPTWH~$y40IsKe3x%;1X(qrGX?7�KGi zEWJpu&h)`=54lH7->GgIjpTo-Y9=547-?6;4^!esCBH0a?IK$609HtcLI46Tejae` zuZg#2;qmza{_iXiizSBIL&L{m72nd!Ur5n!jx7+hKv+|U}7Sx5#t>@DU47^r<$ zEZ)^pLaDS+n(F54E86AnucB#*YML5fW(E6ZU)x%0b{E>@Ht6jarV49f>y@xdX& zv0<4&wn1#j_lui3jUP735~sY%1cT)Y*wGrt znbV6loc$iI?f3WqNQ0VNud}_x2&trG$X3yPHvH{K_qcRO4jMvHD)aur$A)vdZaqWNUJe2MGK0agL_cgn0S+k2U zBU^+aBtj*bh#~vfcaiAHl6`DZ2pPLUAw?MbzK5}lCHwxndp_@aKkx7B`RDi7%mIJ-I*;=>kMp{FNt>f+1WE@dqAS#@UVE2SNJt>=vak!%j?J5 zTe3wbJXxW}CAQDjgC0?7&!B*lY6$Wys>b$79L8vKEVwif3mFUN@h?A%4EA38Sw-&i zIH9b&R>4ALms22>OaXVaS!x~bwhsh6Y0$y}cf%~MI5V_QU1=3K9QEpp>0?iwiet_f z8tosi-qZgmbct%OV!R65*XY_XEr9^}sDmR6-}iNgs6>eyJiSr`spmeGCv(`4d!dSj&5G&y{BRB!!1>YDYzkj{@?hEg`n4tsJ8QY;U zOP4&dXo-nKhwNV8`eLUu^WtK|p`GhPgTq&bf1E%ioO`2ns+(z)X`*j~aJR0_MmboF zcbF9@(Z1hLq!+(?pyR26E`0CW-7}_M|Ha&rAURm@KDh-!a*GMM1#x5n!^d6;uzm|e zIg$APzJ%-~1lx2BbI_ZI%v}#Zwtt+X(jt2GUXEgKDM4$JKjVbx)r=XA!#h8{pHH|j zvo(GHYAM z9cOkAlWG_fzi-e{Z?Irx?KWdy``gH(Rqt0-2z}=GxhN}`px`=4Lx7GblvErywa#%3 zZn7?U@pgb17nxwg)W=Tor~~_k>&E+1TRG$ebV};R^&lb?y5^?9maA?l^uSZMrUr**LzXIAEBJ zcwo){;WShD+1bL!^J(O%1@qb~t*>-Ee&C!+N9d}gUulkJ7t1DBj zh1~2KU--z~k=vVcRLdc0$jZZ|(AtFZ@IkA_#?iUx)R}^`I#$Xz^Xg;^A!)C(q*SZz z)QbcwtxLY`g;>g@Yc7wH@^absrWmpshR}5ztM{ogWaz?;s4eL)D}x8#coJ3(&i!-o z{Pk<`2wwh3|NpF-|E!^qW_2(our#F^SSvHxD=BGV8=&GASKk?>(-_6Ts(D~NSR6mT zun(~FeYF{7cE48U;Dq=@(uz2;%;V6yBrw$M{-KH6_}4#+mH8ELNiDxurE~tjt|B4S zK?QJ&#$9zq?YTT^n}<8ue5C_HeI{a(oN6?%4r|qiRTI>DXcEh3R*=lwQEk$oeEda> zAP>_X5z2t3RfTYo00#G0gnF?xPOqyLFUBl?!XpH$ThCpXt(AonRk; z>A?-Wis4Vz2ueGjGLI}O275JAy0L*uzg?J@>??#_lEv^ZQic4rQLd}f0HHQSv`oDu z>5C(&;ke?Rg5}tHy2ph+rLo-an0i9yK;iaGSmV=WLUc!PxFUh$*ZtVH>ol;u0kKPy z+(gK}*?mb1m%}s94mw-Nh|Wm-08!}PQvL0`ywJt`DK*$m%%2^6YcL$X<+;l!X|abu+QvF)OCEY{;4bd zbv7{38iRcIOq4d_HGHc&_%EAmtPT6>v)8=Nnb{sX&?gQMQ+E}W3v>fx{?Rk5XpD;Z z`W&>Gc3p!1YTO$uGdv$Jj-LP01;JUSnct;anZOy3W=S*04L1bmuF*N@vY$n7pT0T-P??N|CbudO>=>Te8F-rLJ52ITO- z-?jGMMNBnaL&>o2C8A!hT&_0+p-!?7E8(3_3FRBC=O^k4pQ&QTea9X}kM>;ryKb|YH8TAq<))-RYj;EuT->s?H&s(@|9kAu=J#tW z&VCKS+t00v%n4U{PqL<>ytYg>_x792?iUH2`<6Mn53ZeCu2K_7^p+OER7eAOBqfj> zjpYwCzNl>F}zlQe!L2GFfO50W7=48|twb9?*|7|C;F1 zDRRdi>S!J0Gzs39c7A4qjYR$jvTaWhPZWm_J6v1}K+Sla-MxMgNbgZjR&l`wA?)R5 z8Y!J2(Yv`xuJXz;n~vOp4J|q$#y!2OfQ#SM@b|fd@>Zi<4`f8WB#qM2cabfcp1895 zGY^Gy&=knHN#;$*U5Az`=LmO|XRSHg z;C9|le7i82_H&w|dcjYeo=sJ+b%e!jeTb&6s`ik^4&~HW6@st^v_>%`2(h9*Jj2M3 zn$Rr2Jb@)q(8^Jkhg189($SNl&)nso>E(D-c?M04#PaYIiWZ7xmcu3jl_0VWX^L*b4i)k zg4!b-wDm#g`Osv5KOv?BDtlQPo;&^@R;4Y7NdSs8ifD`A0;p4{Uc@Kz;4G31_hb|g zyzU1X@(X~=p68t;f2K+A~W+tTi_+PgWTX8Uk`-9?_suz}$j$N8+L23C15$=BX zvB@6FnCkG1vP1|n?+i_g4ap1PKul5T0r4nvh1fcMlj*V)H}R8R%JFCAq5LAas1W>B zPjUy_@)3NoMDYqvc!-a6G3i5OvbXb2AW}C!{=!fPH zugPF3sGhIOiQo-me1JXU;Mq-1F~a;Hd!B3(9Hy55!41Ya#N+7^n9VSBA+uUs=K1m6 zyNzyasGc0!HtIFSP_>LOC1Wt|ES|7zL!JD%E$WaAbapb8$3HDi@g9-QyuHpO!sY0y z4MbPNilrqr?CZR2va_r#af01*hma8(aK1!RJQy{>?Q0`2pkG91iO9TTAVhMAUv6oN)-2VZU_du z94#iow@Dxdsglw=Dc2GVcUn)hAy2$xYxi=lVBH}8<|Mi^%|R4&9l;0jW!jHTuBufFviXH`K*d_ZcOGkWekZ;{6*??KTSOR*AygK5sYyy zQOvC>nFi68=>p`Cvcb+n2Y$Cwj_aOZHe{0({x18@LH35Rgy)O{C|ODn zW_JI+AOJCxDlpJYI7C6*g*gM_A$MPa_fMynC#=V%z{FVciPPT`hwNkn_o8%>I?nlf z2>j2iNV1q{x4&8~&T`^))?;182XCQbnN#!c0d&>5=;db^V#^c_DDlr{E3yWXSivbzQGFm z!N2@Wp0WpU<*QyjJ>rFoR3n&1eq<3CU@Ym5i+w$~QR&>57`uC9Q5-H)lKV~M3Hw(o zotEqps)G&ftAlN(X>*#R&ZL+iBc+@4#h7l?rpHsdUd#zQE}Aw+Rj{Nvh-z@O`gTdD zSD4qP$`!c_jzDE9J!LDcjsQYlpesfIbSYr2w)SlRA)a8l4Y;F_tb*@(M53*%$Vsu37+!d<3EA6{{YF?Q?fVrdls3U~m8#Wb#<7|DlZoew zc$iV{4vi{oExd~Khdr`vnKW&o0iai(x>a-QN=eb36ITK~ba-%Qd30$t`w1ahKd&$w z0$(on<7Swnj=FcHJTI4f#m~tr&e6ZKr7cebAbt0@J}7Zc`91cx4)7l;+QNDZf&iF@ z62_Fv1~R~j zX7|Z>TFXbDd?MxsLYky&i-O;8{LB&v{Q4zGk%Ip3p?~TPA^J*SOc1xHeTf-OXrcZc zm4_nr30 zF<5TFqTZMLT_CMBt`|8&AjdAaY+iMc^75*1pG@*%gyfwOISU@=Pj@FO+pZOt7MIy8 zrB^PSXU5XR&-a7p91}%B20iCylgMQu*!j!SdCP;FF#VIquRK(s-F5ifD--0FnP*|O z%kS<$FY>l+=zI5qFO3*QCymi#+v`KT3Z!bL(+S#t`f2)4Richh6XD+vLIbCcN|aU! zcQr4d6{0hwF)FS6E9(RS9TOtx&;233!RGdg&(X3Jucn@RcnN3W{LJ9Aq>NHMA?6ny zcJq!7ltottu2@cXqtFdn0>gV@=GC;Z79C7Q)9Fm)4g`av!-5JMNYZ@OOQ-Koh+$pz zGqGlh2(bJkZsr;HP0|35OY;j95XH&e#M)tBCb^SUcDIj2u#qh*jgf{rJ0d2;gyeXT zG_|sMsL#<91w3vcCL;pLRAt8@cp@zz$Z&I;m$2)c#af6_}n%|(dv zcoWOO#yis{M$Kr_xn~*tOhZPI`c;EH($0vY?E<`Weu=RT<27#N!Y*Vc2*jON^)-H z&q@z>75>zbFYr;8AHP9T`jm^&4Z+}1a6xmg1P>gTh6xaop zT>Pj8k$%~=2Yt(sAY8!BTTQh*yQ?Q1?b`$pW_3EQgHr+sSr1~D?48a64qHZuE&)#H zKHas$*FocFhCYrf5_UX4UkUFEUJS^I9T^}sxZ$U8~VCBMQ0W>BQJob0{thDgmhg4@<7*X zo2pjc|B3^tY=}qB9a}(Hk>Z)Zld7>ZtbYu!zK2t{8W>WXnAv!ffJ_evZM6)5-E?08 zh=;Kggkf0$zy)Q`HXaZBVT-K7zb0~fR#Wg_x$hqt=e-i5UA=0r%-~8yzS9Ne_(MZP zjgG{U2gJg$mog}@hUmDb-mq`pGwjl2i#(SVR0^|`igl27+I#+xm{+aZlH0npt8ec` zXC;CN2d}cPx*)?mDk~r-!)DNQJZVI9ON8HLxHBgne z(@d9TPash*p`4_UnhT_+zsiXS{MgJEZbagJ=0!0$@A=%rg=}zCO|r>-O)_Esdc|fd ze^G_!Yn+d(l&)Xy*g=mQtaZEx639Y*bT{8!mOp6>T(5jm>$9R7Y-mQXk$D64rba)1 z1P|(7SK&KML^@&|Oa+G3h%JMHQn1`yHi0~-fD5Jq#h={2 z&ec#|!ljaS_OY34q|!9pC=gqme6Ff?;YIH;WiX+6;{!*ct3vEVU^DCPO>+L#=X@E) zz8g#;or+YsrdPt`Qs9-I!O4B3`i{{ zQ+t%}-$oToyNvYqem8??@n~2n**3e#ij37;%OO>uGj+ZVJ^*(bpWnR2&TDPRlqFeY zL85yH(g4Tywvz9bzBCUY@Z5S|Ph|ao@pK}3haL>ZzOVw_uJSBR+ce96E zVm%;-AiOs@GKhrVvAER0!1Ht3eUI|wxen!_3I@o8!^`JMrB;yV>0dO@j-gnJGFt%* zJ51lJkZtme4=2ETu5)S*`@F_1?M;{X)rhRY?XVqjoHPLnpEk7mf6$8l2sY zo_UI-$(P8>p7f*}{IMVNMuq6rREUKa>4)wMsJ|-(iqJizc*+pQufqS=I`gN@mZ2kV zw}U)CDDe0dB^xj<@8K*N=(y`si?_QfDstHP6+E4IsLCK3>A;z|`iAA(cXO;$6d#)S z;wBF-K4i9%A98#)lyzzM%mIW;wl#<~8xw}Kp7468R(kJ+T#}j4lu;pj@5I4SJnjb> z&ueWubAU+nJa*e@l?paY;ypj?)Ilw+d(n3phOEjg@Z)JhhLt$lvqnJ5cgcTja1#$L zpJ+XsqQQO_6K=cmc$P4=K}Yk(+HI6o;IyFH4*1g1ZDaJKnqY?M1YPf2!D*Rl6>hsk zx;yoo6~l^u|7JfUUQ2T=Z6;R$|#0#awICj3|31FW9Iz48O%V{;dc z!78KB`E}2b$p;TqTkd7d@RYVBJky3;^zWjS%t>(YD_fg2cu)ZbTp^=EJh$Gi=ODET zM8;ud8b`N|w&Yj0Q6}uU{v;^eDU|l2svt{AvqL8!M@JX$1kvZ>HZ00>V&}H?tgebJ zW_M2QQT2Jrnsq;%nZGwfnYe%Nx;YViWG6eu44;X56j&IoVg;pjKB@%xOa4IX@-;o; zK=xeIUFdPyJ;{*Vpi6BLqItw)l)nSn?9X5H@?O(P&+s1<{Lhjv7C^DS*z0sZv>_d_ zaV=vv_!~9N>OQ|Bl}mo+j2<{;KmlNs(^NGSROTU{xF1xWHd~p8@D=I3TfS?%NHxEi z_C@sSDewUDw%-~^OS|r`Szin~r~V%LN1U@DV3h)6LO;keel{sXtaTv6ta*e=nK^dZ z0d+nDEbpKYt6j!}et&rjGl7I6{3&uIo&+$_i+oBUHH&hmvD#ub%qDp<(A4Qq&g40S z5@Qo8LKGK#sM~LWRp#``UP|_6#aSj9f6}O*?sJr=uUCFs4H{y}sVWHk;3qEPSzQh)vScr#$px84qCn9IWq#q1eo3U*kmoU^MYwv)m-u@jG>_$B8)Z z{rB)_-`8Z~7Y1mtnx7i-YmqP6VUwA3(g;P7zF}c*p4KQjJL7zCr6V2T_3v ziKgA!0?Ir7hCe~DxeAA7=M-`Orcq-lPh#gf9d(ZvHhzPfK7QSw6@4Ms_R2R{$mY7B zxrz#ey6#Gjs_x+O7s$8MBDiaH@Z;$W#^ZoZpZlzE&&RK8l?Q116rzhKNcs+2yB~U1 zQ}QbY57mJMCXkx$E-G*wbu=NUXm2_*w|$LRUI~ztIEYJooEeW*4pV3;P)m~vuP%mm zc+UdP2Ar}((=sIZvp7fv#|t&ZD;d#Wd@UPy!4REJ@WHe(Oh#Sg(oYqLpRLMe^fEI( zDBMtjLCfL;HBdqJE(YF~eGu;b`OO@cCqYm?E{;>N6vV9tT z*n^kl1MO!!zp}Tn$Q`~7zdK0aD9f9NZ(nF&#h=lpv9_y zyFKM#-R@I=ry8U$hREstTJgz)fv;955=5--4=;^JikDway_nf=l0fS)XF9)I4@*8T z6f4)Q`#KB9q#R$r7qSZf#6H@;W^eb0TSiP5AxKx%oP+=L6;>? zzWxqjwMD<8TI{7}iGPLo{V_%?<%m2pTW<36W8d0`%4yZ#IsCWDili%{a*HI8Cxu3n zTMOLS2^~IkyBiqF@#SL>1=DqKrV*4DNms|Wi4+`x1QbcT18(3dIWMVbGl8{E9 zqPlfIB-ZSHbehKMs^je3$@r(^iv0z;Dk^IC!4IMy*PV~vNeJ3r5TUY9UG(ItfPXzk z?bX+V;Q6CW{>8Y^v`qSK=R%I%o(Its*ImV#KoUFMpXay?26kCp*8Q>)L7*vS<$tIy8@FBU;!?s zZ4({Py)ke_eID!J+4_SV)f?B<4#Dua2D1`J4ifoOSIE5-9#ARZedph4QG?|v@?L7h zqs~WoX~3xA6R#LeCqm>#H;z4Ud9%{?M~sfAhx8#1~ho3sr!q@~ZQqlFNl zL9Ciczl|bp$##<--~0yLK91CsQ6!I1kp&n%@th&5C$mKlac^+B5K@t_2WVUE?@HIjY&McdZQEBZb}#(aN?;k zSoh>gOb^d4h@HzJ>@s@48P#!TpJC5eRZDAGf4r~n+79K*b>uw&n9Hmyaz7^9&0|0j z9Hyd#`A4egMSGZCU~%-dDb&!Kcl3vqR-okV_o$zuqP}I-&j3PbRr12?mPZT|UL~`A zIjO#-e3yQ=t-#*%vHaz6t3Q#B;nuI16ROf~{_Qt1{wLBA2%%U5QfggFoKSfy)W1~7 zZ~k?FZ1M{`mhS)*e$A8kJC7^Z(G?H+B~!XS-)({UjXz{&b7ZVI_>JZk!Pmd!(MMhp zrGHeM|5a>mwj2at@0=IPGklJrN|J#XuT^ikJKqiJBrS4U3f67NM42`j3CK=BdID}d zyl?dpWpuJg9G^Syl9>9?DIetbp4+9Jo11ZyP1{BTqp{{?Ak!(9w~-CtmAui zPaXVzM_Vi4G239q8e`3=7w7I3UYaCu{mLg7Hv=z1oXW=S`A;1kkn!8W`{k~9gDF(x z`oO~1Po0a{ z`t=2-R{1~)7n!aQ9RSn_gvIEDL9mkm6|LA2LPy65@%_eUm-1B=?gdS-iDXV(Y0N$d z^SGdD{t;%G(ak?4O{V8b73;HRjvcVmX#-g8OC1u;*OgozU5Ep6>*m%%(D8Ozq~`6l zO=xkZa62Y`lbGv(58$GfQ`5qK9X)}4PiOY*p%Pg#BoBB5{mjv(p?vQkHmPgJDiNsQ zuT-hDUktdC>c-zjHn}o@;u?K!$vIR9_q$~4F4aSyPkQyC1L+dIkZqS7#EKk_8FOab z_xCiB*M}BLs=t?^h+?Mih!&ZOE@!Z6sY>B|6O+|C7CEdnVA?yB%l;LHH$3VDXGm90 zo>dU^<3*V}d8cCX?1MLaA8N>XKP7GdsLDT7O^iOFCQ~))J4*7UspQqBMw?s-$t6Cq zzt3lV!@93PZz{)_eVt8prr#umC+@M-pOOKf6#{r6-{iEC{|(6h(GuN%)$6KK2W`e^ z)XTU^Hl2iRs}DX=m`deGDJlUaI*->QECa0Vfut_9lk)GlIEv_wI4M19d|FCy z1C})RFf$VO45%UqRXi0Atmgea(X5y-a81)Q4~3k6RZt5uT5_4Wc8^P|IjIyiNS!Ey zoQ<2E=Rrm70owT!Cy;mdYTy^`BDmQnWX^Sk)Hppsp8TD7%gHB<7~*XIy6c`VEr*{* z(+uMWO`?M8A~K^YT8W+X4VsCf`DZJ60L7CiU@&|dj%A5UE8hQ(O3!PpB3`=!LN9mw zZih`&hnX+dJb!zSRwN0}Fyk@2nURlYmC9?0oH}JE7O5k2Vyeo>6MSP{fmK@ z1B6AgrUX|f`CyF!CjQgi5*%K02q8z}NKG%I!8h4V+mlK~Id&G4t|x}rbAs^wqOE?u z;C*hg)2%FenB6{mMZhK}?lOdv1R}*r_B2fiRDVIssIgouv40RihAk!~=m3fDVwYH< zHxUidRq>%sNqE-r0iB&=_0Uz9Nx`&CLo7~AE2nFSo5eOSf{p@lZHC01KXl;N?ZZy*4B#ZIb<4AOy+&ey7mGF;Q;DTL?Ff*H@#qi{*_o{!wFSdR zC_~m=x+(vKWWtpKL6=9i_ZhC zoFAbxYu>LjHV-doW?z*k!Ym~Bv%6TXj z2iU3pyh1{jLk5%{92{TV&mE||!A?}_;-`Ima!;t}B;s0yax?K@!kCa{1{k&wm))*l48 zD9l9db5sQq^|cb3;)ic5=2s80YhNbe$XC7nguPqp(u~c2d8ffk>ow&kNJ`2HU6;B8deyqr=u0B%RrANi6Tlf0m z;`jsD#{lg?l`z1jt0$n=~t7P6>)=8613a%4YHZN1oeLEZziBp58d`$n77(s6M4nM`tDQ?t~F7~ zzfw^X$@!RaUBt$p2X!GyG_698{i@IG*892)@OW8k_?he0H9(r#mH6Jy z5aPc0k!Uq~a)=BoFn!(77H{B?zz;m!w+>e549&jq ze%LB?OVZ7oDY0{dVdmBaQNC(cB8+6k?E?%1Kd6jq&pdD@&J=;-wwLg&ARzO66Fr3m zDnvZY-FQ{h8e)xMpv(+{%yJ(mEec1)sa@saJ{qNMV>hKBrq9#~7c1&`fWM0hl_Bf* zBaHuR?p1}C=2VNl+6A`VobGz3#vT2&+vc616&f<5J23GS!nx!pMpRCS83Mel}Q{ zG`0WVD4t_j`;|5RBCb9IgMZii%pU^r)jVYNp=m$xhAit&&cVpVOZSE{nRk-IqF3Pz zwt`yeUw-4$Vwwo39sXQ$*0213?Zv-}BDHXG_=FX!oVWPG_(!RaC-#hmVR)N!q3+VUdrPS3wjK@+%2&yq_*fr_)!!k zjhIX#B{7}odO@hzMa>1X9w_r2(aR+8xYBM2{D~h0T4?67dCew4Rjo?ffy*i(_4>6d z=*4%{F9wtIEm;DB_4v4Kwqj95?N8pE02RlJ#N&6-4TLBQ7mLAVZRjvzYs8be#vcYZ zy&0sv8ORZqgZ9FZcbndUl{PD|AUVlWq(T|L&rrJR$s7svGRKi%I^+ z2?%v++(S<hM4qGV-IL%@H7I6%q-KYb5fE9a^G_;24z~ z{42X5{Tm8>vrmaj?<(@oI|EfbFLQy(C$PK`iL{9ZsOK1z-dh`DM3X^()|&!N^{akv zws8X`UpMm{(a=mX<+Fom*C7t3IRSU84^7j?_%(!oSpH?{6z}rn)cDTMT@Q$=0h_tS)8MH zo<-6Og=5bq)ARMKB1^Yol)VW|Hw|KHRmkUV@on4!{Ll2B({e@A3$>@|l`)k1=i2vL zD9qguREa%t(Fi~$>*r>3j2BMw4Myt}Ue@tX;9lDEf6zpfz}=gLQw3S#_o6kcDHER& z-tn`7#(OpkzB~dwX{Kz~qbI`onw0080GxDjbsN^3z1v6Cw99XtY(qHe9_-PHx?DL( zeK-+yLb>&%{C@XcF&W%yVi)L12dvMvIrO6~^f^XC0_V1Zi3d5JeUt2~t)?7IoZ=gv zzDA}jN1Ah#B?`)arRggp>Q#-RuNWa-D!j29ONS3}y_LzUcZ-zBefTWLUalGL@Au$O zk)r-rK-2f08BM$U+bI9fkeV?<=z(K1&F8$alaP8p7knD{)BqzB=+WUdEFiUH$MP^+sONBZDF zq(d|OyI0sH&gqQFh{#(h7MW9I$Ii?8^A_|d6G2oL^xcaVc9aK6fu1ogP$b1a;heHb zb#mnaM`yX(i=NHwhu$FoXRT9-K{{x(?Z~3uD6)U#h>kT^s0ArqwA*(2ci)|la@BH^evD#Uu%!X6%^gH|e+4)A3_`t&I3>-0Gh zPMFps=79El$dV`>VbD(~bxFxAUL#Xh$m3#dUtrQ2W$_p$kZGK-Ph>ozKosL3tl60! z;#&LRc0VPd-$VFUJo8OJ0?p>4cS8`FZc@d*Q6YKXW%&G&3rXV7NUZ=bQh3CG*y^(z zD5Q6dMQSGT&|-&1Ymr?UXI%wWY1MtfVoc2|SsB5UulhoKc_u@m_?V+CzWg16i{NbY z%Sk&s=*Jzs+zCl1Ul`(z+VtzON z#B$YZ<;0|q^obF5S>r-jV0mDrF5notgejeWOX?k>mKBEGc{4y9?6_o$AmAK|Iv^C{ekY9OjlVFz z*gcW_uYOY3EQYOsYPAwe$LjHZ11q*bO9EU@VK(C(l^~Fy%Q{o>cmtHy>>%2i7+!M^R9qp4ExuAFZdNM#%O*Z}o0BXoV?~=qLf zX6wP!)o?0aUvmfWhabpovSVS|A#n4iNPL>jB$mks6IbV3ZIp(IEAlho8Bxp|iP4_d zDeF7anYLfvg?&$D8vjduSbr&#^b!sJ9T_xFQhcB9WvOqchWpubA@vC&G zhUK==<<#_K8>&OD*yV$0i$In{OZ6+~Gd#|urMo0r;_5VdO0RR@1xUf|0o!D4=``Ew z6!g^w8=>G%$nm;}NROxIV{Af@jlj59*k6YCBLFx!@vyG!{R)$AufF0*1S`YlPipo< zX;^q)9)h@T%--ffF_L9m0i!odTlQ+Gg8R7`b9*uY?@t3Eo-w-0h&PLe*2vIZq#%t%e(z4)EimJzowN$Br)g8ivl}{lfQg6|2b@UOYLlpF(^Z=y$yA zw7hP$DGNcjt=_hGo(hO;sPecT0Na*UfQuYfEgC5ubG&an7-8=50`fzroL}7p@B1kg z@?sK8j9Ywk?fj+Gh<)3X8?2LqdFIv>wesx{NGj>O81BGa8D$zRowpQ+woy*+qGVNj zR>D5gWRc(s<2P7}GbK<<-_6~u11WJ0b;50#=TBi(aw-e-&r7Koax%i0%< z$S~J;d)2>4D^wIPUPq!B01HivNi+NM&aa}!G`#t0A+zoM4|A!J09Xm`j*0xaiGshI zj^uy`nnnEUO%Urr#FXLs9XEDBQ%9q*xzd%)A(C>B>s_mAv`!rtx=?n^WX^p+e+bUM z3VLtl1#SO9{Wa!tPh0v$*r11rPBaNl!oSop2k0ykF2XF7@vLpZ(WtomeKu5qRH@>% zKADSwJ+Tu|#G0xtk*-*>zbOg4^{dK!nibGHGiKPAxJF#1G|}$uo({R(%f;xf28l>` zYax;XxpW8nvMTgh3-iLe-{%6np}NNx(f%VZ2%@uhiPQ~CijQ_nK>=Yl1(>~vt)Nu$3Q2axzkNb~dyAp8Vs_^*8lgyq_m745u* z*7Kyhq!E>VrHQGZRD#>P=(VYM-SDZ5_N(mI58X{n;L!$7HXJlyU7i_VnyYDIeE@}~LJ~HkMiNtF^c-+p zqBLFNi+X!PM^%D%>1JO%jF647=m@u%dl9_*ij&-%RwRphu*x8$pUY3vI*~s)k=$v~L*Q5xDIqVRRV}y(gC)s<_e3 zI6EC|J)|sTQpcX)Vk<67p|>ooh3T?k(1vC6FpBLgpb+*tS^!Vhi(uFw6UpLXsn~x2 zv&OyOH^z2OhU-3dfUK;SM60|;6`b3w@9dnMa2{wwt{@5z3)>`kRb4a-!E+&kNVLfz zv`&UrLn$Jj0r#jv1U;Y4-P_qNqw5#0j3-2UuA*wa&RAe}6Xtz?t%SfQUM5%LYuB33 z1$O;cc&XFV)N zFd7%mXSvNABBRHvT9!cwx#1W8+^p0ntly$szgh;< z*HbcV%JQv#^+~wWTL%68hL`!)|M}m@n$`c7Z2>e(`S!r`{zGQ(ZU34>>|E?V_>Wrk z-#J`!7_llNmVp(EE2Ea$w^xFTsI1N-3>N)~E~*44?h&JU^+F~BD@hYwCPdRFGf`$e z3fm_i6rtnuu1!=4J$L-BVh;*(X=5x`pHQA3`7$jp=cw`|O4Ig7y7%pCN})@0Kh!19 zxlY+|l~xb-wjLc}3r$&P^WKktDSI3WSM{h(c97Dou~G-RRUa2}aK0>}R@h@77~lE4 z?$iNwKG=)N-(eC-+NAkW|Ekk7uYE#1ux`Z+o5M)gP64T}Mahe>zIWCRGx39oB*yYu zU*IBKTx!AN?vcsBVf)z{hFZ-69x9gt53h>!jbE7`a{>HZ0`t=%UoDlgXIOe{<$9TW zu8O?MrhDSRFvt{BLmi5a@E>G^CSxRS@u%{z&eT`1CfWco`Fi!SgZ{-$pe!CXdO|Q{ z-=S3!z=Y;kWOMB(|IBqmT%8?vA3>T|)JseJndNwUl1O*Cxjz+D(D;PD_f~K&%S8{{ zD(60$uBeMT44{h-3uI|xmK}hC522nc15jXcy+vWo?;N1gU9ab(^$)OS zb$rGnYGBJ(T&!_x*~GsrrB_xY>?m$J+{y=V`lics$=kuSZM+b&eUCSlowPvKj(_Dv z=x(IX_1fmLFBx;sI?RIj0IZ3<;5@s>`)^ zt=+DLlAN(_7qOp?>=eS=bJBsvayNyu4d7!r=-yx_S zXxpNmTj*~aQ?D&7z9{4ghM z8Ln#h%k%0^coEa^;O@Zq_p!Z@F&zM=}{v*Zf)_K_aWwW zzFVlcPG6NYXoQZ@G~lft>${Y#+T@b*(%fqJJASf8IiyEdpX-A?+TU5scOMCVtua#; zjMlQNT8^A$pE&-G|4W_pJ&K89+=N9G^yHGIbVw!DV~)lmQ&2w(il+zcF=E&IoTZQv zFy6&@c##%KH<vC`IT0Xn}t$9Gx|!LR;lM&rkglZaUK2JO;64M#NwDlR?2s~ zR~*%V|J=c?pEfKih2uxiPmwCEAEbLAUaSv zW9LU>hZZ`Z&*u}hS!R&)T`t?@!h-2q8pK!aHJ06bs0sF@zBsE)N%1^(|QIDarD$r*PqGT4?a}0*sQFH zm}uW5I0)HTzK?lWC!Mel2R5=`kGgv9A(=xpt(}3R$i=#uY+m&#NeQRIWp4qxrz6+a z{+6Ej=5A3~XX8#PzHWAp;N7P-R?joFudhpJ`cad2vbRmL501=U)%)>S3nNS3e>XzpFy7AU*8!aXA+>iDt~c!a?F4hPkK+_s3w-l-<7Es`^y z=3OPp6Quc{7lD5POV<|Mb%M;gx%(hOyhFaP;by0#1n+-4SB)jdbPN znI5~w@s)?RT1d4VtC9llPho@~vV?I*s6b0AbH8UrE_L7eki}9S*Ty?SnnpO~?^tX2 z%#M6>>e{+hxXubyPFlLV1x}+>ZG0ewTdnTUs%+pI#MT>Ase2Q6j7LTH$oS8`@K|Ry z1wZf6ZRJ`?@sHm6URuxmAc?*?IK~#F$46~iBdpw9!TK8{Ed}YnxVgtcTxsgR(BhOQ z1jq+8| zm2buk18x7coCu`0;+MThFW73_ToIars?Uxs{}*Qm5S+3wXD7lTwhfTko31UF46W->3HMcX+&^J$Pe$6OMSx22| zTsF9RV4$k3+CQDpe}4^SN5tCXiE-*$O@S^my~x=_-}A3u?Ie^fH=L> zl%5ap~|U=XHKwR6~j$f%6q{peP+(N1J=8kRWrUgn+d)&C22e}Od4}7kADsbl6B-cn|Unf5A-kEBS&63 zY_?|6qoat#!m{|qNxSAK&S?ZDrHMkh%}2ZRumgIdt=fYNG5hNuQ6o$%&H`*r{IUf7 zSy%gy$YK3lEZgfKk;TZ54GF*r3|B1A@hn(}LVxK^!u*b#n9L8$DovYVEW7gPuTH9= zS~|+U^wdMFmbc~n%eqv1?3~!&_)f820FONpsjj{)hg3xk9ZBRae1c-n?dU8qWiS$5 z);r=!kfA-=9_(r7^&%5V3hOnv@;*aFc~pTb&9Sd1xw5_UJG)hu$gcm7uD1YdvTxtV z86aKKjfj9sO;NfO5rjEZKpF-JNO#xhE)kegg2D_WqeM*zwh(@ zf5&#rImGR_ci!jcyw2;qJ~yRL(7;Z*%fkDlUn$=iv7BUm3SIt}?ihzfq%iI~X3O3l zJFjtiqG%EQ5({jnV2ukN-q8cRCrPZA?ROFQ&9mBw^4kUtxBFw6d;?Uk2krVkt`?ZB zaw=w)SgaL!`@ZfUcSyMEqQSvip=KNU_p)-)*!c7F$}zeDfd=pBs=R!h#Msy}4Crr) zH-K1wG70_edsYTH80dvkeHk#B1f5U;+O^1%8z@QT`mTQTe#yXmwlI?C_wpYF{qy3j zlwv0t?f-u1KPneBbPH5hVx-}8PrnEY;}BwBe;_FYRltA!7I=4^XdteRi_|ZrWS{1p(86aSuJ>YYCSSzt0-08&$77Vl zY#R5RI5dVd7r$r-?->qk0UKWo9Owz(%5uDWN=EY$mC8xxiYuj9&@d5D5mvS2O7Z_r ztVPCj`2O^x^Z_^fW` z^9mP!tA0h4js??oELGHNaYEI_u+aLUe!4z9l}UY;ISWZ0s(p^44b+cG(3$(QhKru% zqr!7R(n78PwcFxw1^HK!P++Hu392WUmCMEDLhsIfnv();0>aLH*7`RGv%d|@e&7_} zUCI$!21a6+x0K^myhA<en`{pWsF@Iz~rTv7cw^u7f_#lRQAiN#lLr&_FUeX5r zr*iuzC8PWWD@*u~R|QDcI`755(lgGfzPg&pjlsM$-#g7-O}&RvyszZV1=E|p^NaGR zHZ~fn8Dw3Qj6pLV-aUWMFu)YuE02N0WV@2Ro~$|V=0kmQer#`&!tMQEs^s_C;or5W zhy=zWiPug9LNY6stgW~>c9L(a>=>^4%xD9Zo70Yfc>?aA?wc_30z~tr6}EeI4ZV8oXHCfh$E z%V6o^$3^7S$6;#s-JNy5%j!^q!E{ATswO+5dSF{WXHx^SrGv$V^qfvkD*)ru^uMe9QNi3@$)drVGmS2fm#6Z zI!gS9$5a}nJn49tgV>YJGd4>3#{1_#J*DOP#oyW!;}o}_Z^d_}-8&yUG}~%ma{#qA zoWl=)rTD#T=8c$Df?LO9ko~Xkc_Sn%%3gY0g1nYQ`W7U%@B&r;S0=Jg-l^eB{bsX0li+6*UC=|WtKCeYi z@A`s1m}Lm#ebCk-@4?>85KBx_C#HM`5zIC_F<_R;e zdIriUsrw!_`k$eMAHRnZs!!Sm|Jn59ALjwj$-BVB9+;*;7#(6SjS_sCVBAZ(cuPco{8fH*&Ci z4_;t6bYt0JjIert0dx^$e0d;%4A~!khhA9=$Pn2z8StK!6YSpMO(^hiZ3wuR92}ao zG^pJ`DfM7Y7FIJ?4bw%=RKADBt(9{d1`aLN<(=%=ir$Dbo zFF$>$P>q>>XiZJb?+f?#;G!LJlcCf@*FXMn8yMBGtvHHH|?|>V)K@7DBvF=Q4s3_6R zQx^)@IAWg>6UtKa^@n<>A`U%G2npqoN2fQ0y5d7`!J2;FHZfg!QJ?i>h8@C?mvNhZ zusfN(;uyl)MxpVuKfY1TIgy}VW=g9UdGch(5EiJaZ+Y$3-W5h;T z^O9Kve9ng>-Mo;Elcm?_fdvBa*U4`~pZ2ifFmF3n=<4JgW0~p0^H|*596U&IK*pv? znW^92J;DbH7xxrAkR(_zFzv(DC^$*P|GlGrTtjhd=ov>#pt{DLVTdg$(zNH(n&DjE z7K;2^7>G|LQ@M%5Bq}$8q)vt;9A~aLzYivorINUOIq#0dbOPpZ-0UHDAUpGNCo{CG zBPtRD$DmB@2|mr@SLiN`urqn4V>P;w73|D&D45};eZ@HxjnA{=EBIG`-(C2w5Qj;= ze|G;e4d}n`fuWbDdK@IAZBx81(W&a28>`>b)*{s~VuYrn78St!B>t!ONzpzVZ(UjK zIpElZns|xWJL}lb0(XLltsh^I917A>cQwy7WyaYDSXMDZpvO~&}O zKzdQl&20hI5&M~_U(BY#z8yozxrv|K-fnV_mWUazrR&U}CDYDf#O3*>hOSEJyf(i? zr~RC-bH<-DbuUq|q4>o%tOhS2dk=?S6v1{>c+#C|mnMQ>lB&5Ncr@T<3CXgyc~6zeLBOkgsOG ztZB3*scU}y3x(E*QxqpqqI;a%AAs5G?{r9Flk-LWtZ^$3{00&$pkG{tR0sWgIGt5u=5UMV}~ zOnK!Si7>5Q{tFd>t;z~i6ww=*A0PaMoBuo(^^jas%rnuTqDc2nB?tz97^y4?{wa8@ z{_tk(@JGszls!nwj=;tmQ{ce@mtxB8wBTSN(grL!Yu#>@$ohwe@93hYiMGV?zI%gCq^pfd^uPlYNW=Ts8?x((v?yRaWHY$8$K+OH#zXXa~zuP#l|6vxC z@dto;~Gq{>V zOoEyqKY=0*o7n>k9Y3uzCHK1Pw{c}f*_(;e%`4Vi9l29B;) zgdksfX8(vGQShbNZe%M*pEdIIpRwc}__@}g+qN>mlyU1$dPC_USTkdP6WS0xz&}b1?mj^+1;^zY#XU<+YDpauDU$@#q#oedQoDx}hh-o@Eq-cC=~G z-k}Lg3*W4d=X5`wb^s-P4HP|sca{qWU?K&kkNmAasQuDO_!;Irw8P-vm9`w33jR{Ha+TW-VjWa|^oI9Sdhpf}tz850@x02kG`LT2QqFvBn{r*!_3vzU)kRY$7-nGTb(1jYp^HmYr z9P0WSaB|C`t~=ZJV`qOO|~Z3k|BSV_emSX&iknKsYMzPNyMH~66449bY4 zZN9YZQbsP6vcbErJi?o~-%ZRp$y;J1US;TU#;?AiHsc9dOE8`QK}@oZ>#igmY1*Oa z2UVPN`ODg(_4Ow2|mLilOt)h81lsMDvxOW8}DR4s?V24{* zhf3?SGXLy`D2=$w7{le;MHX{3WmjiZQV^YS zS)dKA!RA1+!6u|eua-mo*Egb_q;P!VTUS0C9NZ7WJb8DN@+Dl?gUGkwN822vtsC7& z=&wwAZj95+uVLL^GM5iZNhCC~Bgi9;Kbx?cT_OO--Z_w>jD%^EQo*7{JAvkL=2B{P|j= zUtKI&Jfj?Zg7wV&s(bHAY37dOm@m@Nhdx@ZmuW zfXbs&osWDMN5%0j$!3*zon89<^_{6Kha+ztH%k<$Ee4U^`NOn@@CXK6OTrD#=l5%=HC@29;FT3dm#~dhqCN+w|(O&fMj*5Z%0 z6&pf2{q6HwBdAr*YiC^0ri7kHAIY)k{ObY;=6eFc8=%_nL=buJ#+Bx*^dKuwS0s9B z!4Pa`yXI=`zmErz4`6{>I-^=lzj>MygSQmSpH}2Q$u>f-Q9&S2p0hy4w|G_w7a&*W zm<2tI3n)jntV;_9o^K8mngA6;i8`)b1N@b999!z{3!Hq#8Bk^aO-sbmMAxw>q3+W; zuoC#uUgYi?XI zfZOT9pr~sXNLEY0%c>}S#84?+c1zbA*uGLGP-Ey5CI7y3!{ix4M1EHdvASrX7b8I+ zOiZNwI$d}uY!n?W?P&xsBc;ysKWusxsBa$7|5I1|VO;>zMKt(cMyS_kvofMgW0txg z4JoP~(0GM2>JE_Xf%3!>!&o_`R~fG2mCCu`3uq59m$c8>6(HSh@2<1`nuQ##UU<6-7|4c|mmAr-1h9>hAgE zFCaDqbJv&h2fqr<;=|^4c(*186SmRm+|pa8#`E-#6tlRw&3BzIh8V#uxwvU=aV>1P zW%#GC^_#D@wZVx+h=|_>*q#-^S?A=2cCCMXN%+(T9T!vSj>t)FA)CJ9Tc=f~o&&|V zPpf<1ZXU5`EeX5d$EEWn+MD(WrQBS-E`)&hc7~H^)T-9tLx63z_CkHKf~;^U#OY!& zYr$3@?qcuM&Kn`}>d&JzSh0~u?o#l`p=g{8V}i8m3LB;*gX5CkLJOT~Y_xj5)|own-U#L1{Jg*Y-Xbq z88(!)`&%lRUIR#_)l*xO@4pK~&}E(bfQm|>3JFhZzdR=aUVG2b9_U_wKL|S`yGZE0 zT#2%S=I@x3`!6d}QJC0Y-Q*j5(?`B988^984>pleIGYnBCc66SYg)WcL?{OJ%$6Y4 z{o0hP>^PHW{88n*`Ln7~HDmEt5YZT1(2>82sA`2Sog~tL@4pJ-^(QK#CbS<1=okUS%N>j(;<>R;WuQDAWEZy z+9-Q8{yatUKt-}H0c#{{f~Aobl|--SxWgrSj>1ZaoMtQ?J+Fj$BSY9&-n;o^Zl~g$UVLfYi%0HSft03R`Ca;&wAJQFIUVGTXekr#_7tuB@~{T)w^Gl zzJJibV9FzAO<+x+jil^ZP?-w&e&lN|Z&F`gQ}HBA_QcR!c?KwnqxMn)G8@y6)AK-x z;ovEkcynsr!kd~}p|wep$r-eHnTxkY=s$nIp(+t7P$XY`UaSqPE-P!cYCfcg@R$*& zhn0^}qxtK%5NNJG0=|bqrVYrV`b^1CJ+3c=7$zxbu|csSW+So}AH6@_-@H@CU6cQ^ zxn=(s9rM){yu2rySTZoKYPJkpLf!1TGj2rCTL-Q?;7NiucyWL;y6a5OR>37>exH%! ze+Z=rK>{q8x5g=xmHtP@T@I8mVkd$~+ILNxQ6+T;2ELFmf!=m{SuaqEhr~(Z2@{72 z%j}&#ITU`g6U5GJEof`^%`^v4ypFSFAbIY7(i#7her@Pv|4>&Tw**<(Vi7yD3<+S{ zyq9;@WcUsKz%NjMKeaiEh5uEtT*2>7Qm7dZyMaqxWPmu`^FeT1Su#M}zv~wFDJvalgxy87Aj-(>! zk^kzUH|am>=708!Frt`lsfflvMhEKc8)El7SVXK6PF0H+UA>w5W2Ijbc6=Z!@1hql z>W;9tms`E_4SNN$VZH8Fy=F!S_S>kk;(ur_p^wj(QjlZ`Zq1ET0`+L>V6~GkL3ER) zi^uY*guc9ys(s*pxl~h-rx_D~2 zbqi;V<2^pJb;yJ;9^FZBgEuMouKZ2{`nZh{LeLv*WOf1ums4-lS;nJh`z^u6T#^}} zJZ7grCJQHW)N#n3Jnfq=U;ACNETn}VW!k5hgEUr^L(d*z9#4C<9M_10(8O}}Xse>T zr0C7z*XWvd57cgZ#Z=3V>3J|q*B|cRNCw$33lzj z^}u#vhgRG|g5^Qa8Ej3#ajU8hkA$J*5}lm>kk) zt_oWdM8BupJaSC7^-e1sGiOW@?SDl;ut6Bv^J}{nX8HTW11RIovHSB0mM)~55g&%< zf5;0=jIZ<*rJ0hfA;U|tx^38^M1`oNUF{_U`&cMoH7;Yr$|DaQbOiC1FhTEJ&g{Zs6#{#MZ=GA?y_r~>Jh1xa}Vs* zogW%qxm@s=V$`WgR+GCY%f-XuJ587O{8b*_+UKCW=Kk8%-Lv>LxRs(=1-h+R5&ZRR zKp3g7=L5rdt*S!-+==qp zSRw|23Yfx~cr7!toYBSeBljlWBj8LF(v@#8nY(hJ!XXhLNWX1l$61 zM~G&q&y&x<`oWdEd!Axh?Y?pQ3Jbw$j4^Mf>=Hd-&J$t#tCnHoou=h7Ju6yTq#{{L zB*XMPG8rJ)@p1iPMi{Hl)E&9d?5!IVacl0ZR{m^x4(lh`=n*pTEm}o;Kj_L1uYYvi zYn8BaVJAb$(e~LoNds?nXHIe5tzpr9JTD2o6;X!dXigIl8*}jte}JHg_fSSnMpzmk zU@)ca*tq^II9AS0F5z3*ijVOqD% z2#LdUhB>~p2Jf*4uBn}Ue+sALb_RCx)n93Uc2YB%;u#Ts;qU%>Nzm!C(q-58d|1sm z($WP#t^a%GLxuKBdVptg@GM~}SMq@Kao=nnJG1vwtN1_F!Ljzy5$S-{Qg>H=2%8>phP~ZtC>Cw(H-?R+06Vqn)}i_v zYTgiR3Yw=dfwzLRv{0IblH@nM?pvFcdBPQOV^pM-BmHf@r$;dcsE*59j>p{iFCJs3 zrYgK7+iW~b2oV0bsG;$5XZ$>UJ>W1tS5a0$Bp%M2FX^s`ph>->O&q z+;`yeTNc-E^wd+A3InUys`Mib!^rsBzM6WiJb`5Mpy-<1B;qGO0o27E)8cqsMGp3x zGNXgCvHDz~VASdbaJXjAeS5GC#)Oc?>0bcJaoFqj8g)mNzVeVu_|1|)_e?pHdq1}!5()jr0r}WjivV8Mo8%fSN6OiJY?n> z&^r9vRftZwC_kqkKTZ6vN)Hf2Q^&MZ3ik4&hEjkg&mxr{X5TuzY%K`}1bY%qpS#`e zNt{(cde!B^IpT+QiogwaW_LvnQMeo~5MU{BdBW~ws6lJ2(WsW}vi=pS#xg&*Lps*-1_|7Oz4K0@z@DJKxy-U@`AF;$H-b9uvBUqjM7 z*y&1*20mZGu2dQ*FW_kK5iEdE3 zahA`{6W|Ad^F0En*9#gWCm+|Y%5XoXQA51~EB8lto{UJDWh#&Q5p58x#k_JizAnUH z5_#~3)HCB}*lhY{b}XJ}y{+Fy%YDI%u0g>=wxuIbe8XJoLEw@1Xq}e4A!B`(Y*zH> zguw16O7esp$`db%%w7&i|Jo+I8KK}l`sLBCg_W6n3@##vFpAdPNJ2~N5}imh@;Z-0 zalrNr3u?E@;S^;xo00+k$~L}OS}zGg`>gw-umjXaVOaL5mQzGPKJ-U)L4yyZ^X>O< zVKRNMJQh37l*Z3@9{h+Jq$)W3WvZps)Iz2C2E<2%n$a5Ww`kb>e(p=2M_IAqs3%^{ zl}BmjlOgA^Aqu{ATdCnk`^n@^ptgmdl@G0T_#gH#xP=c8lj-&3#DCL?fgL`(LOO8q zS9ZAuiK(7MbbYyW0KiL^HzPu2a5Mq3aG~Ee+2|CiAz$aBLGY2%AWaPAui^W|IGegB zwI3&OFjhhQ%dgEAJ0e%Q!`{C@mA6>V&Ky)@DbIvQtqS-by#M$g zf{2S#A_mI;KTWnW?kCjxF1>7%x7nZD2Bw^NJv90~@f@5Q4E4_C80lNujhA`@%Zb^35)olu) zxtA@70sks9rKIg9`ydA8FZwX8YJh2rax{3Ga`L9&+q@%7*XOks23OwGfEbcS6~fY< zuwtsuH^U>7#C;iv(T3OY&~Lg{TH;5+b?0W`=|o}|rJh^Q7bg;)SjEcRs%EUTMt|e3 z9u@yX1B4w8cHd>NmtM5O2RaAB=+f;z9(Ku_Y~20yIH})V=0nz9-M8;1_icwZJTmu( zCf$l@$S_KIoEz|hrqj;u4{+~;eM!-YzfxYBt<^)-ya;AwASW@6v>0m4PqfBU;&5de<}CShL2|6{LSbQ%I})ZyzS2H#sfmYrv6fs)bDgFo)W zU~rK=IECli);zs449rCt7f(0pt1kiYhq~Mj6h-Ilyj&8D)kx#ZTJuzqxA}JGUc=f* z!Q_U_T71d%tr`Ko--Sa_g4h5~x;BA*!5Mm>#W?+3FLETRW7qG;mgBkq0~PKIobOsG zVub#PIvk(`vbk8+{@ry5RJso*Xh~f`p?sIEk7Sj!P!E(Nk3A1t=$e1z=Ug|Lu`lSI z>8c!3MY%3E$jrH}aD>BHC%#Q$2fq`;ju%?gGbTO*4IFW>d_m+F9+8j)5_S?xkT+TS z>on|34Pqkb^p1q`j7B#J1&kC&R+?W6{A1k~bd#!>MYk!zgj0^|Q})}XJ57-*BNJU9 zbfM8+^`ebB3XqjG=(mr3OfW@jAJK0|J(^tm8_rQ!rb0Zvf{t!g)Pu~uIc<4IjL3gP ztM*%k)yrPg=og~yf>rVlY^V5qj8`X5xBKCrwmuO++1R9EtgPencVy&90jVxKzuV(5 z3f?+p-V$ihgq?rA?u>ZiUVFbG7AGFvG_Qr1+!@Y!JqjpWQUhyWzLAeFne09DTYJV@ zxf9ZI`#$TAxojUk*w1b;K10m*4cy9X7LM+)I|rfb+l`#1hyhwBLvx_$1yDh49W1r4 zHj1|$<0%uFP35$!S#HSxRnoBq0iaX0!}Gy2D~ zGdFdBLA(U5yAHswE$6*g|BuxLU~u*2r8L_%Qit_a0>w2n_`SnT;xMnfQh!jXlLJVw zq6ZG?QGX-i&o4Xj&>?O`u~-gSu1E7U7$#Jv3e-j24*n!gULdh!Gmdb1j>or<1gwXN z1V$M|5D)*e%gE_b_a-Z}5rqRLIyTl2IvRh zE6Ia#26j)A!M~2eX)pBM5;d6euDkTt zNpFh0=|K}DrCAa{N$;gII8U%EU?<-3BX;nu^Ge4!@!Oo0*QwTOy9k#VQil#S#_tbb zdC~g7PQh-9$clg>@{TrJ*2mQD#hdalMz8%Fb;l zp1Wcsj)f2p#nh$jUP2p7i_4m^HQ`y|k(bH)Ea5l0>t@z0ikXkO3*oi*lig~&sBY4o z3XCDr#g6O3$XXT{y%TP6EWKMjxwIvA`=0e1V~Gu*`vh2eOmI;=xq~$gXvLid0(9E~ zC(y~gxpgzedbXE@oKWKjY^is`H92q{V{L30#{bbpBUFwm>YlowSw^c1gvg? z@5NAWMyfCp@BOMM77l84>_2IE?|DSwh7+zn>Ua!|2D}P~sm(ZQpe1fUN&idvcb$L& z$l~7G|5*8qY@oQGdF9A-D|~x?(xmaRxk|qnRDzbm^X!`#C`lrrM1D`;Y)_8x>#fJA z!)6;tYkyNfV=Es>*|E!#pu>cu)A^tcWEuurL{l=$zQV3}blvUL=*3a6Q(6kv+fMe8 zP1Z70yb@}!UxN4pY_EpSSSt+clBa8ip#77F{99 zMw-cTChhS)UNz1<-qJb=e$a#F-kf+E>h#M#GlNX_M9=Ex+VGo42D;EVc zz_GR?(|1a(#32vKf=)RBQnE>EigYulDd9FvQ@EQ`c5 z_LSvCCE`4&uB4OUuMHz&i2m6bH&UMsXirR*_G1mdjssol872M#!r7yk2Q)@E42X$H z=KDr@AzKQ9NMh8olSkp6NcU8fnBMaMvF=mP44t?l9-BpQD{i_TEZc91)7jv{k%Ik} z)6TK3JHz$}F#tJX6{{guinDUae&3yIkOof{D4GE_#g7$10M&wz^=V#rt#pG=Oj0h=!Ru`yJ+*So_f@; zqFVeJXYo$gwNSgCMC2D-S5h7!ChFEA&|z)YARDZcSl87N7XT)Wq3qzTYQSDura~yI zhYxo#%zx{=(gdzbXMGy*@(?jekev~-&^3Cp7K`F_Y~gYr^G zje7ez!mK=ZIFXdG1Yle*hfjKD00+4EY*l{y+v<>WO?X2$23zBQ?~8}xoSJp9X!g{0Ll@qQomyn8O@9ay#VZkGVYt>z=>hf zIq1S_%FTFT`F2-P8tj@l&aST3l{v!_2%2NjXIdJ6Eq6zkfxn-6-YEMY-7bq?$5W&2 zel=Kh+u({4ahMJdbn=8l>1k7o=~k)n84K8N^F6iH_?QGC`dBFZ!Vw6;cAy=k0E=bj zIG_DYz6}WI0@d0_7w+&+4a9*W{MxR?dY{azSiAyLHpueKzgoSa?zgNm^+H_Hc_=lt zPR=_;h)=G@(eu}%O4@B{e1EBn$>Y5K(+$I0Q>y; z2dNhuH0INoFrLOolJ5Rd8N&szQ$X$7v7;U7CQ(P*hOg~w)2nAK)~p)oNe;GmcQ%JcrhF_M`so##4e+)cYM zClSyS;9bX?e9>~Z6tfk(&b%o<_ub3-1BZ89fKjx|^B(sfvGJDy?f8}S8bP9y z!mjGtVh4}xWeF(zN+-8!1JH5t|9E$Tcv~%>o!OM&M|c%Y<+Dl4d5!+ih|u3f7m&*8 z+1JllNdBNs-R~ydxezwGI)9CrXd@qB47T;|cVyAOvobxZfa6_q^%?%Z2?PkrFq6Uw zs&09#>j0PUT=Tmuf^uzDPm1cqgMOz-Nq3E|yVT|d#dns$*?47brxxEs#z^&fl zI9+5&-Z?wp`E{@bsHeAd(9R)#iNXiTrP31&AwJl*I99=fV4IyZV~b}h&JZgn(E zQi;qzwGz`stwhe7`|m(XpnwXzdAd`=xF7EY`<7YL$f_;>rf-}A5fE%Sr?RLPah%Dj zy!zakoB*Je${^z?wwnUwf?^+6Ud%q|igE16RMhg>xyif9|042bix4J013}xBpfm@V zYUF~vo5y#?<1f9&=WNS~;>fO76qZd%kpcBl0K76SP`p44^{y~4?eFUC|EZIU=t&PR zx4XWpMe?z?XM9dQwipQ@7S;1W6(VK_Y(%&AKR(grJr+YSH_sGD-;@0{0}p)G$7<1=@K2yk$@c=8ifvk$ahxq#lB*_t!;cJXi$^+qZ?bN+aH0~@e>qMwy2{+7n z*q?#F^ehzB&cjU|m4L^;?bSe6fM6~V9+=vlkUnVM{V5_1HTdo(ObiiJ%Ga_oxpZFw zBuwlg=0)i!!_9$;vl+*Gu_g4==@O@iikdwT3#oGXCiOuA9ons#LCe=dvBt`Bvoyo` zHMls_0S`QWFu;{QG4d8}HNH>>Wzz!`S3M4dr1muxfdIyD9h!{;)uQKQ&TeC<3BO@V zj?l5y%Ucxt!g1^2V7C1hTuv}VK}!p~Wn3*g>i@7!h2XVITDw)1@m~Rrscn*hA;$T{BDG3O|I&9QV0w|-+YaDC_s1^tIo3oj8IqubmN8;|M+zvmFZ&ks?m|Y=d z%u3-W)ISh8ja%xV;dqxs*C8~{QSQYum^ClA-;-s68Sovj!VTX;09Kdd@ytF~vuinML0o%!4^L;5l}Dz-Pwh1w_fK@ z{Pd^1XgkIalV>CBbe`$^+j%Rle^qyQKpZbn9U!d}NWM#sG)AXZf|rw>5m4>uNN#VE z4^zjEDFlkPI)AiT5#dGuhIH&2!_)un-@vM9-VqQeGWqCJe{^ZN^)odMXUkr+3Vo7t z38IuJooHm_4l{ia2n8}wR4?m)fgqFT^Ik}gNhc9Vu#mo=#arj&(RgU%?xaW~79URo z`8nOxs&Z4v;9VC;L%sOqd8&y^XD9UwCM6<`9{x`GtOD5Q?WRvg`b>jux2}RxRh>Hx zhJjt+A^Gh`pM$W1Q@bsN{Y|^m{SXOCdw-fdw_ZgkPuf9qsXsaxdS1QA1g`Vsgf8v? z3QevMPvf~~v8OpH;_x%skEsU~KsK+t`-DRPMPc?LGV}Eoy&ob&;pwnP_qx%6{Q6?e zC#xEbU%e?FCSu(uMw+~klH8%KBT+`tTN-SYw=fD0LP^F;l42pdPOMxd+#Q;)1h!>7 zR1YQ{M{gLr!(MvHz)b?$8`hhTgXFFb#tdsU76;0H_B7r2TK@6*J7#h_)=PO#^)7vp z>z74psSt9<$^7YIY$q^Q7U`KgV^Vl%q1gjT*{x_X$RZxUy8sIoewq9Su_r}3<8trX zj~qnrl1PdC)13%ef+9W(-I|vpPev37GHVGTjJ%Dm_1hSyIS-(I*?x!aoeej+=rSOE zkp7o6#y2YJ8#bEt`E z4Zs5I7cpuuzQ2}b!=@osmdut*xjSKXcAHpQnI@fSA)aIE`K#-C}a{0o^|w3|F72>X$0v{i3P9dK;iTd?CEYvI^-k;ceH!wxb8D%WiK{uW1r z!7C*;@|g{KaVyEV$KBIu>57*M*SoGvsJsRdh z3~3&vlAw$M^2=kM1K0u)&Tya~21mP6A($TpHg8I*A~5Fb^egRTulNPJ)?O;&JWZ{A z>Pa92E(+3&3UDeiSA(Tgr4(~;tE;U?Ek`MCKYTXXFPz-vA@-=tCJXVWX%H)hnw*Dy z$q%{_Rxq>WxW;@G$^CliJM5{CFl`lb(#7m(ZLyb>7X#|$QL@rjC0HeUMukm3_+9AO@1>`8jqLtqHQ>GUJIBaakOUyYMQ z%4ZQnR+VqlHoGP^YaZC(_@y|J{Rg)TfdJ|A_i|Z#nVmt}qTQ@sMYeG_xE@Bp2%`f0 zrdSYSV`9^HkHKOwaxv47sioHZwnc!p3QPYA{Uyk$_G<=6Pu7X~p9~XB_`*D3QdRw* z7=U&NoXh()fg*Dv6=VgZEB~mmWB9y4~>e3sq-r7Z`GEHXZ69HiQjYxlD7o$ z{hG7qdq00Baia^kaNw2pRu+n!o~LiX9CO+qTUAIdC5+~D6g^zK4-gjB?09MR%CEM_p@gSZ!oK>l82q=(u2{Yn1Zjq$@mpD zC9A3O2PK-T#N|8ed6_so!7h|WgRQjRQYH@YmiAdOt~7c2tVz&OIS=|g%K!~vh6!f+ z0bkFkBILYq{d%}N&0)&|x{i%OC!lSA8#h)>(D(K*rbD#+_BM5=X9koMadxyK@2cl< zat*T5PAy3(z7dQ)R6k#BIo5Tg)#$3Iq2F5m$6-jl$nmMoe82bK-z+fk@QSbrFDEVE zVpyB4epR%z{hmS`VVkR!_rgj$#XPW+n*}g{=1o6RmMt)nk__mrR|~C^0wWS$bOF_^ z4Q#HzGi1$s)aoVp1+ND|ErXYz{8USe`NWu@i(Bx zMBU{>xtW&kee{1uLzU33TLi{qe?BKVCzy2W8(w&%B-p!v5+PwLV~C#@j5Km#tzMIp zxXX9zC!(c#P!74U7}vaeB0k-6Pn$>!cJ$?Q(2c=hwOWf+XDwJ`r95y^qMrE9mtN`2 zQB*KhoFwx{IK7vgWYL*mgSm-mODI}imK<%DB{fxAP(_@V%ms|IS;-Ir&QJ(CmFx5d zB;air9ghZt34Y^j_L8C!*&H}Gk?By*Qd2Tr=N>*R)U@psMUZc0IvYa9b7xa#B$Vdk zsL#QO(oLDp@6-Cj6Agw-5EXRpXr~uq#~r6N@1gn}IC8*A>BdGP-{B_z z-l(SU?&^fQhtT0+u22+h-Z5AFhkg^eL&g5!%gI;gq{7)|>5hu{A9dpb@1sU`B=o0- zO6t?NXXVCuQi42UJ3T!{8Wzq(SzyOa9u147#ZFuhp7%9C(Y=q_?Q3&K`tchFUWkN; zAcRWb&>GOC>$>UkIcsGc56ITKSD2RX43{Io>ziD|Utw?&Z_PP7l&kR0@i0?-^Br(? zv(;lWV#r4Ii4oE=z!)&p%9O-K6K$S92k+f@`0S?L->nlM4M0>z_&*41Ba|poVdFnT zJGUgt`I9IHUbs?<+WONpx55YM1b|?kUTO`1vh~Q<5>^hC8x9ZmLP2A$RMz-gPMI|F$6FCcqgBdF24P^7@kjMhqwxpOIMjP!DPM(aeUV$ zicn~ebDRfgurJjJjR!FR9gG5FWL))bT~>axqd*~H*MN33lZULN5vRAwpI?KhpR+=J z7e#pVicaMUg1;*6%zfUx87Rd@yT*otFf7`RXkUGwE_0 zmT%%xa91$92VPQzuw{Cq)%AhxgRK5=w*FUyq(5H05a+hvE8~8r*)V(8*k}m(;zI%R z(#;nM=9eIl-UyJJ-1Mo8a`Iw_n#8T`!N7Lo5TGQOm@lobV#{+&<4?D@TMxvF8V8JF zTX*1*mr;Ew@W{F*S{BHbaaPp+dVx=lyWkl)=^Hr_GQ?rL!Bx1UzSJd{H#U7JT&|2d zFKr{;RnPCl!{oRoe$D;(S2sLzGR>U|(bW|_l=P>p46FhMDw++h--!M1+73=)a1Tfx zd|2yzOZ;-SM>hORl5wyhd5H8lxV+}lP)sn`#ZiU zivij_3tO*iSRak3XV2mbod}HM=zs&^4BJe+>75hO5jbsKYBc{>1mDpQ&|TBVjze01 zmhHcrH1;rjEZp`%BIlI*4C)CGyE{#tXAI4r`(oq79jsA92w~^*pBjjVv5u6tTkRc! z5Mjz29@Q&ZR86j8!F1YZ`i=w|f)5W-5g?^xle}dT0{<&zX#-FjeIX+pTdxt|r03C22xN18GQ{FG!mXCdPcqKd z3uM7-@<-p$`jRlydZkN<<$!!d1sc5wKx{aCHk(!JY2yRod=8gsG8B0R6-36!INqmP zr~*Ol<(SsK;=5}lNBE35Y}cUPGzX1EwFDSMcIG zc|!}TK6+TknK7odT1796o;=7LhO^eLu2rJhwC2C7l2;n=bNyuk~q41b3l&f zq{jB3!JiUbw^5J*jM-A5O6$w|u>=RJ!N3UQbW4QB99#64fBcO9cg6V#KKrrsmczP= z9sAJ^Ps#LPq ztlqTi%?8yXm2e}|+}|FpY<~Xf%?`Z5)S!`J60@Q`FmFzz@st5t%$l!lZH7O1txP8H zL{|~Y;hENW-tO&kb*3|`%)}apzKjf90K)SXP%mIH9!9kE_8LNstIw6eb%%`U65RKc z;W`Ef5AF0>Qoa5^y50gTsz&V^raOjEIz~W11cvS!6cCYakdW?{?xB>HZbUkzySoMH zF6nND__pW#?|c2t6TTL5UcNpB1n+HitfWLEe38*kw2#!FU?D1;nb7xbGGXcE{=0>?0QwK9KaA0;!DMu-#DL|sOj6cw z%;J4W31l83H{>Oc;W%(HrdHGC7sqUX{q3|e>JXvR z+U(@@%cI_fVBiH!Whtl*mvhAH^*J14nfHbuvu=xKUc!~EMA%~a^#+yOf@1mO;QWLf z?n+o2`5yt-^}|3{hDvYXZ%J4F78^H;H0ZL$T^o5TGM8H7*bK`DeVS{NRhMN^Cxka&21^Ht@gMt zrBvRx~tQGWWPGd0BFt*6)gNdq)h*yl#K&?bd8Eh!iS zV<3su#^b7H*;K%CD1(&_;-(WqX&{>rw?N;Yfy+uWej1~jQ{1Fe7G?SH+pPjvey#do z(^&;uZUWH?4^IB%KD+<;%MM{usPBruw80x+j&H@Gz3}pu4$_j` zRmk^=j72}z zJ0)zGIc6Q|OUHiypv;LpA1wzk2X7?q^b=$6CdN=JX@FVG{_qt61Dh&iFKncJ0B(CP z`jG|rp(xgA=tq7PV4oEgfY6WipxT~sXzN^A|8)U^m!*29m2`kD_?DYH9o6G~lLC>y zHbO<*-yKxu6_NOl|Do42Q0@&Bt{cA#eit7OH-e^({n+xEukQ@OY}DZFd1h>A;v+X| zEX*npy-k(mmRwR~us(Q%hn2*?n3~8SH+)r!XK=TNtRR3JwTz$t6B8%ugTcdex_su% zA0kN0L@<$tW)F6sqpROwn}X9lA3v3>+fgK%UEhqqa*o=u@{U@5oN&rz4Ic}22G z^>4~b9Fyv0!(#}2;4cmCwhtAAfJjJJp*w=)2(sqN;$gMzWSwE-MkdW?k25o>B+`a8 zAki@&L{g5u9QF!14Qh;aCYx}=frM;RqGxTtMB}!ivXE|hReIrNJLdC8O}-;w*mPSn z+-L4LYtKppH&s=6bjMJu)AXpMY(_h~x}|%wC9;}OnT9b+bY^Q>7x#&n*tYYeXB*b! z{D-ErY3n)cNpYjLNrQ6)ImyyOHix#7ZKnoGv=sE!-zlBqR#;Z4AhS%<_k4_ex5HKz{RYPC{&)u#iV2KNbxgpHr9%IVAsLS4|)55@-ulXg1!LcKa$PV{!evHZaQ_OrG9YzDr&v_C?zxcW79*Bd< zdpEKYjjY-RJkx_IJPpaMDV@pf5Aqgy4jW4{&7MGL(oq7q;iACT-K&((vjJZv9^5$2 z<~VO2;F?Dp+FE#9GIoP99ZoUiEq{6+?Njf7AV1PV${Tc+?=|a#rVTe(f8cx0#WM@+ z=qOr6O&VAs<<0t#OLMI5-C#`fQg?`NBx#uIzt-O*nrA*HDt|T^v_g?!cDr!$U_;%# z+T4BKd0e$b$N1D@45zY`>*VHd?6a7%0p=aPD^FDUPsetm_>)e)qQ}H=9WTWxD%@-H zPrhFsrU1Icp6`3EGdX}me6)2uGjgTU<7lJa}J?tv*U2vXX{ z4$#w;#K;R}p*Gzz=w7w>L+!TPu225?{@c_%qbpzl!WoJ9-ULY%5696UbQJp!+2w_#@u_#Qgb? zid43p)fWp`@1rc*LCE8me>U^{7A3tIndpJ4UOtVb$sE`k_b{5@_w~Mhmo&Zf$eN!$ zEt>TDE?~E6#eHHI2VwFSp!Neo0&)cb&`s3VmSmQn_0X@|4sNW=p`>Dw&(^h!;4jWk?shffV$yzmai9G)-8^hm))&j# zEM2s9MrMwbU!zfQQ&6(DwQ2dksJb?VvvcA<{RAvikEi@}b?*})Na-9fz?j-mN>b;J z6e_g_F4Cguu=kc_yQV|iQev%@E`)md6_sjMJ+sVi>D^y`e$y2A{9R|QS#6{bKqAUw z@+c#h@uFTMd&`W@BGYuc=HQ{pGjIAGr4mZd-`i7OIK>nSXdGjcnoIlqf56%&veKdw+{|WZ&Z)7T-&F8j-=(EBVQ>scxeI$raJb1@oy2kqhwg6 z^l_*Zr!pF9PgNdw=ViMt`+3P7| zTdbpAr~SG7{^9H};4-_$bCdYEO@Ze4{XRhnFHe+bkVj?O&z1?@JBi^skftlid3tOU|y ztMBFqwE8gKWITr#hkNyyX&iv!1y+rV=RU-1$n+It1J>EobztL5l6cg0-~HCybJb(hCh>EAt9I!T#6jS}l+-tS!-Xo~@5Dnrwf5L~;~m0jP%d$63Hy8XJxt5%)FBN8sGRhC#2Kjv1f zE@?#r0e5P;=YaOzps55yC6a-7fgp}91UVYs?|Oe0->L<;JE$TL`;>XpW>NeA8O8;c z%irb4uzrJ-E;wgSCQ{#k<=E2$Cv{N&MuyxgsNMf&x)Y0r4Za*xnxewF8K*-A%p0Dh zYBr-k?bnT5)TG~jEWYkVdzM(OCoFxmB@~Y>*Hs+R`PS%Ug}Zhvej z;bQ5_kE6c9VaK&Jlwx>98V93O9DXq*=Gz__6S+hcyC`}3?`Smc`ee`ydqn6U^pSWD zJwkNgi;|ry0;Eqr{3`-hzLO-`&VQZ_i@}}drU|kz7_!K((V-5RA<}Hx!Ym>4UJQ%D zndX-7P?dgl!qP!DXBhPMBWWEIek-+=6l-I^z?laUi%xzZG%ggx?9Vbmrq%u(HHlPf zEw8lCu!xTL#_?x{^Svg=XL5P)8FEx#{83Ee_SjvCMH-A*iqo8`XD{LHM>qZ*4D|@A zuoyB;--N{Lm@U(XjceM)^5uT=O^G24s_O8(jtxU0sow^$U?R;qPL3rb&ACb@;(Q=f zIAw%ylxN8Xu0*F(2p0)ZW77_4!xy>4BtRf7G`yTu;_nHi5{-R3GWm2BO*91DUP($7 zBj$PRXH98+1Ht*Dl^8)J$xu)y9CVQKa|2RJ<@-Bq0U&~q2eIRal*yS|F_T=Q zLwxryq=3E&VaLH@Ub$%!uGtUBUiYkgYx+;+^H2u+`FpRhq*yN;}@7#pL(ADg)vII46V@?WbYc!m5QL?&0# z&-aUic_i<__L>i``ftV^D)Dvdr+k}OVSrWuoPvwI3->xkpWO#;S|z8i<1^NLNF~$g z94QPaucNSrEwLEDj}w6C9!$|X^JnF5AeB;*CiK}MQxX0pEy@o~!bpXINKP9*+DB%8 zQ*4d~2B}G@rJk+RGlx(5b_(_I;iHWuokZC}qffcK6<+h=H&bh@hv#(Nz;?NJWzzxB zp;vDW8~h(j?*HR01_MMgPtf;?C$K~3)&A*j_nsan-8b9T3blx;EwKaTVP9F2J7o)+ zhYbh6STL5kefnd9n4~ARe_2>AiVOtS0)gf2>kjUE_x#B^{71ArK5TxDPH}Z-gZ1`3 zK#o9uE}kM|lhSe6GsLk&#5p^NX%lE}))HyW$)Qwv9Zo|b~;OEEkSuCC~ zmiAZ%s}d%g0*w8xrRw*O@XB_qiwz03DyUDVL2!M3A?}DG*6sP8|nJD4N zkA#MvVyM^L7rC~brN@w3Pk83Zx0kNF79ZO5h%h!1QP8nd_Ix?bG3X9@wt&}F?YQ41 zU#!z-pgrmK+vCSqX9|u_tzQy zB@YyNBk78H;ujp<4JnUPXleTJPrB-#XS0CnXr#rF&UHSsk7wuGdmd zZYX5fl+}fNjvezXg|TALScx7O^#=X%W8Jle8Vfc`ms0L^4y8@I({+d`(Hm% z$W1;+{ql9a)Q;pXUMs$XI8y^yN%*1$IEXYv$2v|kLLQ{`8o8UPJ| z&0`0N6*M~8$SU_%4BRNSA-{XidY|ul3Nv`&C-=oT{~%LZV_A7%PnzaoY^kO0Z7dDp zXx^2`?@~w)+(laLesm3^%xe}t?(Vniai(|)L6W>Nt;i_h+wWkps+}9l|uZCuEo4a}ooHWfx_3mHbyF6AEJ*Q2X%N1<(nn9vqIa zC}QytG?IwU7s}i;4xd{2A~InXJ+l^!eI*W90HO#^-NM6G;z#-niq0+n4W!RS3pVcPU(GDLhZe10dzwqed>nU`>ID$g)rBC z`?IU5uJPvrJ|IINM?xnE!@zuA6R1RY;1rudz@-@#&pPG}sFfsJ%lmPQw;Lgl26LR} zG2q2~?se#rPyL?Q8$8UdRqx14tDkAr=l9D$gcI(0sfh39=5C^m#D?I6uYIp6q$`RX zP@b;QU4>e_yS5EJJ;BDvM160B*ID`oUj?d6JcKT>n}S?#&|kovs-3$J-A66^0x-t? z{qeA?OqJurmA?1CV!R?LeQzsBauFdS=5Jwq-tzD0^I_lktPc(!wX?_S4vnElF^uTR z($IAW8q=X#$FhR{$Hl^^$`BU##ZZDUWDA5zZ(}RBJPK$T7O(rEd|)!w+-LFe97B$R zoJc-DQwW`CvTBjzjFQW=0m(owbJt?76;5-_v1Zz$pdl>b8a8`eGBl*%GAx+kz+mH@ zDM{bJlH73C*vO~g)pKoG=g;!k`CCJ`?xWfi{^)%Hr&<5}1!LzBNnT)PePF zwXdd;@^g%aUCjTYZB_<2Lyc45hiOJU%wiw}Gh#L%zL)t_c3{ByN-p7`$F6c_-*4T9KgGD zJ3qGn->TzZOV=J3^CYU@x$_%*qaTlo#A_Hy3TG9|yUh4KqhP}lJKQ;!h=kaI_Uo~a ziEfCzUPpI7rtb~5LE}ahV|uWdFd|S?9FoRcd(~MD|NT`yyxBe*N$zI~kjtx#B*7+B zuC4-*Mg$r$gk)k-QxXNM(e+L^gE@2fDumu^vbX-cl|xPPd90kd5u<_umAJO~E~SAd zf&nh2bhG1Wj(5W|nf|-3cI{y-4M|VVdL6|3V=>e9FIo8uh$joaSY++Ffvb_1`PEa& z7!n3zmkQ8dKIB`?u9IBndPqz~IJ5P+iT$y>MctZ7UC0(W7wtLdl$C^T9*g?}i{XTl z=r&2SM~~M$G=UZmRDcSyN%ymG*{@a+K@?nglPosck6rzKK5SJ`{xhetU+Kg(7Rz;N zR3j5kajK{?f<^@wwEey`inhY>p0qjAVC#l}WTfMTAOxv$gzrT;)Qdu9SIeBj{;TDJqa9;VYZ`%x0V<&E?t5 zQ7+X60Y$hB=rvxs_L43Sald7~owF_;=*QA_r0k zn=~1EzO$yZKXlk$ZU0d>^SzKg%!>Ygea5Z~?hZU}1(e#$`2Y_S#OR-RZp^@#09?VH z0v!L!2oK%DeQNez0wqAdn(8fs;*gOIOf2z2QEn#I9 zyd(JK{ zF2Ks=^CFO2J*}qSD!LQaS84pbnMefaf=!NWV+}gg$uO!i`-B$u*k#9Zv9sYi?^uHc zeq}?uEcYpmAaC`h&p2*k&+fZWJJVLV(L1SU-`|r(5xqu$)@Q^JM5-dJJru3^x{Bgu z|Gb=$&8xwNs`!+53OJXy( zw`H5mwxXI`FAz)yDFUN$@{>Y^SC=GZn9yVF@n7tYGR1)3kZNUhkOdjy;Erk%-)2dC zr)8=#?;m-QE|&G<3>{3Ljx5`m8m#!$8G^czYd3vfkLijAZIL!rIv#nkMjktggCa#M zBebE|Z1{ucP6D{3RcPxcqj%{@nf(cvNCm#Pn!gaP(0whi@!lL_7!YVIWz%tAft|%_TwJxFB8W3g=+(8e?KZa#*A%vFO4OzB# z2_O9ILzv;H?~BFE7;3;JNma3|XjSSCfqX(6ihmt0QNS+C9kvFU4}N0+i4k_A{@#xl zE~>m3W$YkE_ygh(TTMQvc0b_iM83uz)0hxI39JTNBpSX!ld@#O0T)8n(H6diWl40! zPc0}wID3)@5TOY6m~MfJnA%V7ZOsz_RW7ZWI@1vx<}M#-ym0D!6b^E~I&$z+Wl#Xt`ef^R6A zq@sDW@l4!pyMC?Bb}x85X`!&1*c0q!qLY6%uOzV`l6TFK;h2S+*xIaJI{Jg}-HQy3G#09rN>gUR`;mYwhCTuoE!$>laImCOVmp z@1x-Ns=+yONLu{-fxJ5MKM#^0Xg80+cCAbeq8?RdqQ55X{U^vFNgA+eKcEzxH#I(& z$AI5NOd{y?E@#ujs@))h;^x;?nygiOmWre%G8!peutO+8An-v|_@^XM9bL)fY>*}W zd>X5Ed!P;;wdwEK@C~Q^)+Tukx!P;caIOMxdQK%4Gy1Uig|(57 z)Jx_n`?p`-N4_7+-CjY0S=mu3SKf+ZMD+ONHr#+<*(*<40*K-uGd6$fcWP}o{%m?N z+X{+rX_xtYK!it~0kF%}7b4#+8F(3@iYS;r?c&>RKhhwB(5S_}-}m7zfVsDQb4u7R zpym!Z8jT-8)MP#y0?!eUM3zUiuoN->#Pz$fKr${RYH+|FbBcvXvIR`)kTiO}f;-R+ ziAZK|3if`D?u5O6$~svtTGIKe_@QUdJpxWp-!suAzPZCwRxXeG&62CdK6CWREq(WX zQV_kytxi_^h~&@M+0TzB5MeK41A^9e?8$f}@f-L>sqA|=3at9NKzW-oSX|+b>wz}O zzVBxe*V0(tS6?@78hVv7w*l-N*&q4iT_3-*l9KwbA<(}<7CD<7ML~(Er6nNg#K1|u zC&f|e-^}OU3wWbv-cO}I(a;6@ZKWr&{h>PmW+uErhkp+te8YOyPOQ9VV)x=DWF6Y# zi2!B&1PdDnO);MJ(@0g)j?;;yL0?=fnsxid388_NAiqg^!4%Qh=h!PFE0Dlb~q)0!OpvMjf>yAEoX=s2F z{EmJql)=ZlBwsQD1j%vDJ?$zFP6}m4>tK zWOaSNbDS1 z)+UFR6^#{*nyfao2ja6)CGG1Vj=5@d06SjJjP}I!l0!;jV|v|7^LPwaRh>-<5cmH0A zhT~OPJwc8?%X>0c<6lAZ$+bE*ys;{t(B3=lr1`M|4QE@eh1aPV@gIjjI{KRe6+TZ{vzob< z>=*Qm3!lL@1lnuk!V)9Jv!vBApJDvcUtD{s8<{7J>wTLfTA)rlz7*KgDS2=lF1xMmbHGCO{_{>hE74wm1QiIeL;eR(d_fY>rDic zGZfd_kCc?Drs00$w4^UbgUX)PI36f^_(7szh)g+161120jzKLg*#51p;yYJ4Xu(hv z^KK_Mtnt1ec8L==dznmqN1%;Y3>B;;Y|J!Fs|B)iGm?q6%!DZ(M=-s!!+=gEI55Q1 zz@k4($9tGjLOWB{_e8?KZWqrz^b-bKqDyckgxp$i( zhDmQ;h=uiDNypdBM2Po4&x><5B!+(UQOq{P6&Xv`AEXK}L?Zl<4Hhdj*6A}$60cQz zp+%{xJPNQjt44mzY&Nm)GSz4iv#a4*dMCX&J(#~q7^8Pdm3dcqQPz;f=Ugg~{Ha^X zTydbBB(XGaIK@J(;RT)jST09QFlPt3ca2>jH|p414ve{VaBfFxP;@^o^IW6@#_nh2 z^Bid(Zy!peeXUFy*m%9mo5G<>3G+qHJo+UipihU&@CQh$)xeYM7YSO-YjRShz!*|_ zn?PcNk325sQ2xdrAY#|UM3wcuo5Mt&UZ zUB|;1X^=O0_#=;TR0@yVUcLaIH2CLCDx-odyC}|4RHaan-c!6az@RWy{OCuRkh#Gf zSv-4mlg6ci3fo@hw03rdnqQ$2OU0fXWdxrXl3@*z1sPyuR%tHF(kjimsfBX9rb~%K zjA(z(CQbH+{4w131g2bO?`Es_E*Ka5g|zQZiVBU#)Pqz$sQ=XQgUVGRfEw+Vzr=qp zbl=6dmqWu|C~_it&c@SEDoIK6=Zlpfb_uBEj|^M3|KnH<7)QvgVk$)p%;2?Z7e~gJ z{4yuZbc@9INJ`^-ssvweXJ7V;>EFU$etJ<;^q$szs_hO2lt0sZJA*-^#GrX z!B4dB8Kw^U->>wh{SZ$pWb?KEP%4?R66dmvhk<)5n%e+DF$}DezU{?*0Oe%}7bdk=ix2Rr79+xT>PyfW6cpVa z-SxKQ$cM=yQ>2qbSBFuDihqQBYASfUsd@LcukhLH42J^*B}DaYe9I_mkNEbGb`#m! zrug4z88!ct1^B@ZXHvWIPs`0U%hD}`7ToxEnsyEufDuq?xsv0x`IUIaxoTCBP#RxW zw!{tjwIff`ogG*5iJQ+?YP_qClF-yiQY~S;*uFgd?FOCM?xp1>#%FS|;R3}jO9aIb z>@9&Y_U)3@%lZ%fqvtqtN(Le%3S1fEYDO_&7YUdq5kfdUsomsugoTl03>&!OnsqLU zg|1i7erpO1!e!dHdj!H;M0%&--p^rQXq9M9a#DJ;7lN{Zx#9#;04h)l`h}oii zMG(g~ANT1|_F}eA^>yIjA8+fZRAm0-$A`OOKx-j6^ygh5av`aBYd8oN#|PpJk7HD# zeHjlELhgD^c=Vbu{6LxXtuh%v)WVrD6~2G&?}Lx+-@(6AURUuNBta)qCz$97!|3BF z)F0U%Y%s7ziI?^7Z(pIXcF*oE>gXF+=D3K_woS2Gw<%|eUw#iU^rR@*|w&3HS zcsq-ZB8IXiUZQ#hK7qt6*T0Omf>O_S%HU8|^st=0)iOm@kDhWwMcUWjXX3BS?cipR zt~5;^V_`9-vFNjn(yo|#cgWGVw~jOubPNXtSgL6)AD#~4d#YY##{Ej=xZ*o-XKr{p zOM6P;;YI4w&k<=V{|DazkPNj5QPhBJfJE?K@uMfMnb+<{lnEf2$RQ_m%`&x_Cgn{x zxk9mPsZrP$MFJWecfDewov@J)2{vfPgX3^USqpLj3AjgaKew3(5i=0A@#NNC5!1Dx6?&f#^wD6ryY zTa$DltZW9ctb2IEA&7s5Rix}Yeuocg>o7V696LX>RA6VEAY==7zvJ#J{{1I0Hz~H& zI1CAQL?k*n6dwe`GNd4s)xe)lNJNFZ3 z=w_b0&v`K{ToaX#k3!ZXa+W>BeIuz?)4uwkG25YGnrT~@i{Y$&nI_21HlpmDU!=}u zTfxuX3dYP86~p+;Ah`8?F*={wbjf-fB0(L8N(}c6>FcPaynV#yoa#~5Mg*9&>d*sD z6KJF+C{*^&azGQyKRQG46FsXyu?y5Hdo3Io9}a`NYns5uOW>`{Rske%%VC{tkvAbU zUFgT}x+uUhk$+|7xB;53jd+#34gsi#%)4p@an1b*=F8g$5b1zHX%h|b&~cT$=>gHN zhhMprN`jPc9T*l)Nnt}eeerkeq&TKUUoR50Ffh#~PDa4=tpYGpS1GO3)ZpkVoRNkW zP+5b6uEjhrUUnIz==TePB>cHu)(sG8%nLP2|M79rBKj|Mik&jAT!J_-!V6+f^1$@i zT(aH{fP$R~F8v0I^*D^m0gIJ#3s!*$XK-1x%#fh3_=@NqyFkiE(Xr0x*={*9uku6~ z7m*>9Z#qQ^On#lYnKtoghEkZOkNSZ#9<37#pK=LEwZD7=J-&1=R>ni{-*}BT`)Z01 zr7V+uOcE1m7@admgn0R}UZ2=M`;`)9(PDa6?UcaI4bpVVwoQj1po{Fu58wA5mAX8^nv_^(rIF>;>?pFe@$N!kB}I^uyyR_!P6W`=v}M*wv-8q-Ho zJWcszuPHNdSv|Ii{?fJ%4ac+*dv5^nZ4wLtjLHt;$*Ztgt+t_U#;ttXd= zwjU8Mu+Wda81vs7r^CPV7@@0+36smt8M~{l3=|rA5Ez`XMzu7YN z-}y-tr}(V%Nmena-SVx+X6kQyU$zP4liRviAlt`NACr%dGa%&|(GqS?IJm^=F3#@h z00f(ff@X^*(L}M2>LBf}{Z(ZdZ!aM0cLj@U`07G~d>6zz?#Tj5oD-evMl(J$#9qPZ z;Uwd1#b`HPW3K34G0KzdW5y-mvp0@hiOgNtDTiS*1a!g8_ax%8(Vp$Ahuvs)j49^} zxHkA%4Tp%%UcPVK?Kh)9crA{~0@|o3QkFLhd8uITdJ);dCH(Ac{<4#Ad??ceXiXg3&jq$SZgamD&Ex#7fnrSUZ|!vX7%Y`Xk#ORM;s|!c5Ic zZ2D8`J9#~w>^C8VHQ9WHx$$Y->;W=Ga_m45X9Jr_SAsVvc9fc7NfgVUfSmI%2c+DP zE<3$|3RaoT=YEb(TjJ!(JaS6~;ykitP$Tt=W|C*NO2foRE>)2w{snFWPQ&B_{SspS{2DxD;U^}x)S@3pyr0U&N|RUz}-!$7I6a@6Wwt-%kZl>SNSrT1OLRWbf{ z%H)Jvl!W69%HIz3fmQn=+#r7c>|=M#0^)URkrf1fGYuT7e#U7*@t;9HFOKuV80MQW z%MHj0762IcDB@kHfG2#SrF#fiy}I7~RkvyL{N4Y8CysP#XgDCiy7mqjf!AgZe06>N zD!31@jm{Yyu8L4?|*U*w}-kDr)hjliF+jtE`X@&;bQ^ z+;#n+5MC>8_oXM6{ne;CTcgWD=6`dG)sb1u{mMTlv=VM=x9)_!w=EZeXh_;ohuN;u zF(ncMmM$ZpLIiXa5J0`YLgl*2E122LZ3^$~j)G4{o|bd&A92MEj04QEG^lCgbdTYP zu=vLtf#p6V9EO{{4YRa!O`k>&R8AXfvVu6A-}51WhM|81 zsa~4LG+Yn6CQdF>&tfD=+$~U(coq=#MY#hBCHZnrQ38|~t=b)O%l8NEd)i49V>GAs zhspT*5&@Q!kHc3t^Yfhe#3s56^km(m8m}++v~Vutwr!;c zwj0M;XEKQSDV9OA@7$??oLUS5XgSc(+mjDw?b#N4b|qmx$65Hp2@W=P^o+wjvc;Hj zyJivd*dfF4UAZ@6m}bbHrCWbWMi%;RF^{u?9sg33q1-J2S;e2E+KSb?xs2G3NS)_0|Wb*ASG8nGjH zLI>JO!W16Y*g-zmFXQwNJ&B-qjR3Ncg+zY_MBWh&O!Zf^gEY8$^DZ8D#S7Hw-K3|E z*9M%b)JSpp5d&nJk)l$&$b*$Fuu^FJMG{oj?5>;ah6(aHImsDB_FVR}Ulm}I9`ZZM zzsBu~qVhkPviT?*_=d*eny$%4U27jtT&A&!c>4&9u`x!iq9=fbVK+jC*LuSN(|4g( zKU#4Bb#j$U4tPIdVhk+mZSC_#LPAintbbjdLCK zh}apgL!+dP1q}DfQWYzM`e;ims8lQoNQ1#`M9apZVXtHD&A{|@c`?qY6k!r~$=w$G z*WLtlkZ`)KNV+W?mM8tY1yCd+mdIcfGDsB5&^VJ%0ikoaiZOolK|#{(I7c8ifD@L& z-qf(+koIf|qA?b_@l{I&Pg6ZwcjOeOuZObl#zZ`{S78x!)c>#^lNX8RSK)oaWAY7- z0kbaC8QAosg?t}XXwrkVUzuyG*zoJT(x9zqR{gw+E?A>m4NmauDMdkq6?sX;i90TUvU{R4rXNv~Yp2Cqgs}P5G=@iOhA9a$s&tlw z0Ywf~#U0L(w=c~e+#dxVP2g*HCfh+|np#%m>LltUBo-8**F?H`;Tl4FncpJ=nKfdE zvun2-ZcOMPY1pju!X;?`JlH_ECmmnP9m6;0Cq*MU4F4sqB!818)IId1Zn`!bvaT4p zlwe+d`YXA*8lGg_Ji74zrNFvF0m)`>891rWmD%Uw(HdoeORC;yzk#hYbaMl#9eyjC zdxFdz@)zjHAu@}ufE??89z+rmKYaq=l7cnzFK_y4{|!zPUGEw=QypYWHg?z2GvllN zUlcU|^~aFM;2(#*zqud2x)+a(#h0~6!a*<~fB+PMBoQmz+Es2x>8{o!Ls!VFzV42^j{?8G^Q`U zcY1sX;Ej=2n0C(r5ttO~f(|ZmZH#LzjG`TPx_WRIciX4yN~J;Q)HUGOLxt%aC#zN3 z(x4poDKM3=(_v5*BXv?p2%S!L!_0P>2q@HL)ou&M4i8f6g4yP zMbS|H!1p};AT*?+Y{*ELzfRk#K+5vvu?|h(P1{4IWf43Kn%iIvF$5UN)2ADd--$&3 zu$ABV=EW;5GeRh*Sk4dZDiEnZScx(~Iq41bWSxN2G&)X(>d3esMY-(INz@Y=ZBO^^ z-hqCRVC#w7n+9oN*XdZ5_CoAYdaEpb5~O{+qgHlND>)nI`{vg1C=tZpy6(~;qZYo@ zv%n_irf0tq{57{-n1FIt(b1bfYx|bVp`ZSF@@-VCL!06qgZS!IgDe?R-}}?kNlJvZ zUx`J7e6~j+6{Gt~DN8M&%2c8FPS+4xVfb_6qp^GU#R7yiEU8bSQIiax6WSjFitw}d z-gBdowWJa}h9Q7z?+sK9VI?kyA3-TjZOYWXF4RavJo>0GCM0C8`vBOBuste~+a#yD z=HxyVwhtW*`vMsSD1Jyg4Q9R-im1ZUcd~CU&H`z zwh~k!`UH$yFD3gMo8s@EoF&}ONr^p@&V}p>b%VsCC=$xm*&ekUg9ylWeo;3F-#!EW zr*i$mA`)%Iefc9Q$oA$`B6n^-SaD@O7eah}*C~&|x5QQ0d{0daq4B^tMRp+;L^@4L z@u+By$D*xRe9fDvk+7EbbBItehg)|o@qC^m;h=LfPL-Wuiu$nItLro2+wfHnQ{uyV zSzl7rGb%rQTw2v42#noePH;AI$KRVCxYCz-4g~HAU^Isw@0b@waMy|=sdG8%usa16 znsMO2)(@088$>ZI#>VSyKu~Xo%!zRhJn&Ce3HKEdMt>YV^@?)+KUg0^R+p-Hce7G(V^Hh3AhwXE8;RW zm!yvHn88EoN*l$2Mmi8@&^rbtew^&=jp*267s>k5?T~me4aBuveB9-abRg}sh4{RE zYQGAn8kt`+<3Dwg56dHOiSSM1@m zZS$0TY!rB$3I4lbz-X6Nsn3|o6{EPFi1JULgDSTo-lPV@yf&@Wv?)4uD>1;dhu}_r z_SX;S@x4|wKmq}WDdNZnV;`$2{-cVWgu&DMW1p3B=&F6a@+lay-r3xS;J;NU6BjDW z>PS`m7T+Kxtcu3w3832_j=F-csJUydv*s4Eg>Ug_Pej!K6dwxQL4>8;c%`63jovvn zy!v)UF z@$@ln&`%1Wd|;}n+nA#FfIKh9Tlipl`G@FaCdkx1Ku-93Y`_k6G_T43tyWqj zI_$mu`e@c#zFO$GF%y)nhM3F7l2{nq`Zt%!m0Xiq0jia)So9i%@-iWt6b1YcrvNtL zR5X4$efY8|Bc71fx&KJ3qD-iitTYv?9Ou+K$Yt-e^5bapgFK^=`f-!+)&wuPP7mwH zZVk(n;fZ%g6D6|@**Q}2?vZ#D+8lDl%p5ocW(vo zSVofvyqdiIt02W!%1ihPbXjGz53r6O5-iU}&tENiH#45j zT%&~+4Bs%4eGviFli3#8zsn+@Jh!u*0>v1l#N}EW4`Z1`p}(Kk_i~Sxz$MIScdyU= z#JKQ49V_ta>J@L0q+o}>_35>w70V^l^Idt{KSNSU)k+!qAW3YR$_0q!7amO=qe`7x zSm&g60`$cM@w^?Ac$HQ1fi|p+otC?Rp3tK8uB>**k0FUM@u=HCLV`FrxO!%h19^ZU zw__jOwNyTah=F>#gia~2N>3@XmR?=eUa?Na_tyZ?gkoieUS5nDBDD29*mU#r4b{@* zozPY|BwDhus72s~xuf?rA;0bmWc#?Fpl{^BEz^b*3R``-tiJtTVk9l@U z5Ja_i$v^^FRGe6NqL#v z;)Hz@KRyV+mQ*}O%%7h}1$0yOyPK{y@X8Yy(-<@G2;j(EJaL($RGcIi8u9$>7d`Ul z7tyJ=yZYqW*C~g+LV&+QpceKk4lxfCSx!M;O{*AD z%9Q-GI#;l?DbgW5!{}0wDY^W~ONJ;aU1F&=z8t^3{Rn88AH*HhFFsJr*bw>>j`iDNyI6n z0h_5Z5Ct@4Gj;3-0Vh8GQ3M;m5t^D|qfnNepT_r_a1vm`{VofS5WDsJ-4GxwR)%-w z2atZs9X^tG{UI-b>uAw$YPG-$4ICjz2(+_`aI*F&b&=nAqf>@!N2Wtz|F_ysD3q5K zbt|R*M7(i_FL&U?SOhWn#faC9X<&B%uvN{USI+%0(FaX_iRZ3S|0F{%?uPhoLS}n? z{s#LXWOIeYe zmyy4jsn~Ku3;Fdf+ZcQhqKzpmTZRzF6wd7~wgQ;9$c#rXVP{7M+a3Xtxr9yW@p~g0 zE*%Nx!jOWml{;Jw>MkO*v+6Ew2|L$Mt_Am--dVzvk0YLfEWQnp|MLPQ?!2>T6>b$x zyk?vDrue`zsuuR_x~=Bezn~HYx(Ml|F<6T7-W@Qy*Wc8=B?`3mlf8I zA+$MvxmudBJ~!yfBf#>0K%*5i$%e`&nzBvB$ma^sJ-DC6>jlO5GngMwL-PK=l`7E1 zW&@Se1}I%Fcsq}2E_=&HiH)>$<%m-6G@TzqOE$zmbCHsahjx;3^a&B!i~>>zXDzRZ z4h1BoTq0OxbSD?5j1)OsttrYSA78atBMZiS$}Bp7W$`uuYiUFdJD?6TcyQW=AXBtj8T$>hM z&n{+ih}g?kp@cJ~mo=056}E;h7I0gXgDvs#CC~G=x*SaS;RUP|JoIq9o+0E2vfp?d zAP=?jE%JM#Zp7Xi<7If}v-6lR*!tl|3{cRLGve=l(>7$5n+Kf1MUt9CGQ?(kX$MCI zmPm49@5Z~M?tVVOmNMh_7`%nIdo3J{<$Er60*2_@A+zc|4U1c&*BK)hZ{;6r(pAod zdubb7JAj)69u$!z-Hb-_mTZIKW%j<-62y})UwyAnV+>dkr}0Z@jk67~?T|aN zU`u47-!=qrAgzr;q_V?&F)TouFj&#^O>l_@BMTArOO4VXs00=YVXEthi|{%iZlA`+ z$X!N}20kzVR^UB=eOm@}(`AK_1}r2IgTT5S0T2Y1rE+j4V5uvbfuoT4eZSEBPKmu+ zn>w%sLIqYkpT0cy zy?}s?;By@JQ}Uo2ya)OD`;M0Y8)1<&vFRq+C)ftK<mIwGqg3OvE? zMoIBb;ZPdDcHBh~7I#et^YO&*O6fAqZ|A@;^_Oknx}G9O36BfdzM}@;(?923Ds`Y{ zTi)~L2i~Is^#T>Ma+Y;9GidCrjnhJ@;)v;V{eT`;uvAH9{)Jk>KLANm(OG3)V-o*CYGa32m)mb^t!!EFbD4&I0rnYU7b`nzCi#>5e0aex}PedP^mT{ZIUg@zWa zE9IZI8G^kY{FO(f=KhKl@$68Ba$F%1_@pgAZM*twDL^RK3V>I?{aG+7;*1p8PTsay}YVrM%(SZ{S+I7*uQjx0_>G?g+b$(jjGO9#TqD4t5 zJU+UVZ`a(yDY+;jI{OH-S)!m#ks79;FEXt25zmvC$XlIXgPfLe;IN@>QNUMFGmx@G z6B^2_>R0g1IXoeJxDBkgWP^xA{Xol_gE#n-@>=5HLUa zwtsXk;>u0#w~qwBn8>qwdew)5kcT;_W5^nl;}16CySbHk|7>{)vemRkEeu%<=!tZR zG;S!w3#nDW*hs}V5j{D z!9Y@lE%~>^D~r2rjk8}K_lGS(r_-q(!2VA9$W|_BaV`ETcU4FrKgaG3jmy*o2ycju zH=-m#(`(WlQV|z7BMd(f`FmI4VLu{~plQ1VS}Z31cf_}dp$z_0?~u~PLPpUqrZG`g zl#6C}R?tv(>e;r8->jf!AD7-R*|wiSLw|28<#VonrEPq)a)W=X)1jDqQ)ROto;!)2 zUrrWnzg+aZ5Hj$}xDBt%BK277&FTWq+7@0GM-Ca>xLq?ydX!-9*M-N+YtKW=+$Xku zhSB3*64`&9ROS(uy=bJa^_9b7=iVX$x*O~aofEfnKGTJN)cJ$GFv$~OYqs>1{pp*Y zk7mQF!u|KP#jW{E|4!iT@xcM*eaIbBBAyS>5Sa+*C`O)wip0wbL1Qx>M40vn(s z>=-jMd#(y0t_|AXFg(|<>4VvZjXGdG<&7HipA^Z$V!$d-#md>U;Ux2gH$iu&cKNg{^>lpWl_IVWR1qCKbd*z_Izdr|kZBmtMP8NrC( zuJ_qxX)pA_93EM1dT|71TXp;DV!bg8X6s+cF-U29fh48=XjTtes~)P4J8;&OshF=a z9`~&=IG~`~ZmwJ&b?nTaCrz?w_*I3PWDyoatnZw>Z9m{Fe@({_&G$FHhwL=Jr+k3+ z{aa#8;D&<~$rh#fMH?xTc#LA%_e85tq6aLJQ=t!v1E-^V2ULE=e>9&0(vg2f3DjE| z=C>zVIAAv+CRyZ(mURdm)4pfJF6e-(ZFiV9;usGs)ibn3D3Xa7EY!jRhX|o65SE)H zQpZpy`RjXHKluX>k@hKb6R+wb1D4DL-sMbmLzNR=>jjEYqD9fg^jeBFNX^lJMf6*c zuw#e@2)It{+*yhPYRPh;O1LX+d?xaIf~z~s#i9`TYKau=L;;hCWH0Z->PlA zF<5X5ak=Ypl$dimS{_h_q=cUiG4~RLKbhYUdgnO3Ww@hsG|ff|UjEx9NrD8gvqF9; zO=eXsmkCK>3&%*#_?k@*c9aOnM+yFB{h&r7x%q`K@%y*eIRQ1mZn55S-H}dWCxY0f z-*3qB{{T^v-9t@>`+i3%zgC1WL=@S$uKS9{0JS|?2x|CY6l(#I9I!XJnPfrP+ zh-)#KN(|b~coK6uhg$zk4Kt2=n)Od^gk6B^Oh6A2g_3@PNJ=R5)nxYfcS2+By07F!J$Wu`{*1$ z`bZ2$?LGnK+H-gSs${W-R*svg^8f$=DJU3nZGL|lcDBmb3(pQ!(EKR;=2;_zKNy4d zOs#b4wfCvt&*dtLv;B4c`$n%^^twJ&^b8SUTgCt>lv#P21E zt+V@=ak9scG^>v(K_s4)@mGL6e0j|2IrYHv{Z!aXL1ZI$&0b0^YSAIQWi+ z#v*p`$AiHzddz2%E&o(Z3cMnK7M5`|g|$elj9ppTVg2wXw3d&U3wQ~I3FVA+-;}jR zYNyU#HkCONzZT^(Kc!RMdH?!brs~9q1j8}Q#4$^JlfOdq5hBR*;Xdem!Z+yjr(>4H zY6Ot8xUOH@PrVWE+eC-q+);u~$b(&KQ9!H?N!11R2mk>qS$rcyWiC_z6STwzPz7TC z2t{z!h_AHwnuxg0sZ`hX(2lXs?K5gnr$yp3HA2^wSb)af?Fs>k4Ec}yBVRCSTe^Z& zFA9MpDb5NHKn>@+?z@Acf}AecGeWC=HtNsccT<4^(O}5zh+W|i4uGK;=$Q*)($4sj z-y0MKZsDF(r#>UA0vOr(Rr+Vg`=2^D@WeC$>injvy0{Umt8Y?e*K~WSnXIe%Lqq%} z8gxp?Y@jE`AAFnmH%u%&bgbID#bvQ=RKg^%`zh$YFu+?_p~BLe-WV;Eu23JKf>jm@ z#_YVSlzQr{Y^+*tFp}stHsDDeFuB=$yJ;>@o}9RiId2m(~8Z_nG0ffdb{Bg2-(~CqpO9C zU}lB><@I>fzd^6nq&1M*n=oj(^P(*Kwiq_JAAUXlsQh+z25A@>P7mrlV;H3$y^z%u zztw#Hs_?xAS%LU-wuK^lXn4JEg71we*4pTW3UPAm#`_p$#V2_Xsf<#tXXslH`j7;$ z0PKAIMi2wA=AMCEc1(u=GR*q3_sMA^mM#HLF{txiMJS*NJNPOlLnCKH8?0f@exW_? zsEO}5`@7miM5leXwpW=CuP>@`=p}d{3*gcoyxY#6I370P;2FOyp@9Y40NiZO*-CS) zTrj`GUkzPrNBl3`pKG~4=LwnW%#8jNt8uAQsx&|FKdXO)2_aJH|Iqqpd;(nLu)G?) zt`5TsW&mm|Bbq+!buf_6j%fdBR4~fX#5N5i8cLv&euTn>6jpQ^9~id;zIOhPr^cyW z>|x>IZenEstBI9`%Lpt5NJOWMEl#Nq>p(8Tq-)_+f=;Fu}Y@6`qiRxKwBL3b? zUXwUazlToS|E$JM)G1_CVqo^9|7tzIL8M~44p8~&G~O7uo`w|ug90*g{3j3%ptzH= z{ts4w9E`%RJlLXRN92L@!+)Zcl;3ZH_K44w+gz)ofYfIo!(P}JEcCm>>OZp7*g%FP za}Do!)Grzk!g?Zu7k;8)6P@(x!Kqd<^cT?*>w~}c1(_n|$E!#-8Hmy*D4-U0-S}S- zzKH#%&}3c&Q!R!`NTJD?=rANu{V-$AQY&Y2l|WeY>YJ>DgE3cEiXSGM7oi5uSAg{9 zKp%Slyifs*3Av@Qxp=(H=3I@i3mC=cQW67p>)HMBS{&I21PxI?YqGnU^&dC>1aZh+)u@0>UVs=?-H|bma=ZkCF?p_36=cnA}VvL zS!J9Y(%H1mKNh`0aN!MIFwtU3eHp!&l)n?s?T7obpN$LW^r4qj#?O83?;NfGeKQ`n zZ}u$%RvKK8>#`ChsFVFu_8)2yS7jJNaN=@hD>CSqRS3qL+K#oA;#;+y;v3g`O+`CB zgGDu8uf1k4l@_8`DW6=acT(ryw|yOf{W|sKHDfejqp$OASW2aRT!}+8cSW`uI^wIv z$x`h*(oAuAYo_uPL#O!>~@5g|pEe_YOY$n4N5o#~&BnO)=a}kKmk(T{*H{ z3A#}DnIGR~L{{k0>=JiNoVzCp{}axlgeGC;OpR^@s!U+_9IR>+|~tpg4~$ z%0YTeQ>d~Yz*#f!nSk2QVbh^Ke<=9p-8ggtN(ESF~*J#-n|=n`c7%H>bolsV!caOP+w$Y zNQFpFZbvxvj(#Kvkjs~4?uD@6^^#0~!3%y>j`j-l5Z9q%6a=1*b$t=ehqOJ2S10Qy z-WWJaHqdiz1llu5>hh?rI0c24HXNy3i+T_siJ&AyM2d6ZxUZy$ggdZr&)+p>IaTNw zZ^v%+6EN=R{B7pYLOvilBFZq$Smr8O%Tthw%UM}6sE>D68g@iBK6@X#)} zw~fbG7jhDzRc4^oXG-es4==@{1qq)F(%hK2V$j5jBk-i3X%hN5zF6V6vCRf*c>JKSNACLyptHral!$bLud>j2@}(7D=ortevcti1 zQS<>fY$!hM;~TL@Vc*npzG=)I3ru_3V`9AYraFr0;)tnbUTIb|W|`j_ReNp#*qO$&2aTA&#H)pV934gwv1|U(k7z&(zAVJD>bZozn z06qV-h=^}GegOc6%n7F>YH-DQGc0i?9qd+?vKmq^gvW6BESup4ku)=Hh-bl;>LZka zZ9lMqgk+60H)EKV&Nt{`(RYAu{bZ$L`a5`SRSRl2;K_vZ50Nx~SIDs^=qiwGINukd z1+`WzI*^4|{W{RBUuwfcx`6l*fGY-Wn4-(^B#8rXF$cYuSpkCrX1E|={;1~9uC2Y1 zt*jwH zo{=Loe7)>^bd@})vk$9(9RYSKEd3bi3XCHCpqT`9}&91#kr~*M&nXmmAglYXOE^hn~7F_7&c ziy1y2e;JXV;-(Ufa#Cxzt};iDsm0WT$|RbRcL{zPiY^@AGiLFwc+WR|!E7D-Z)hK{ zj3snVu9;c?n1bBxeQ}P#`%6?jc7PKZ1n4Uh6S7VCVG{R6`H$ZX>+bD-^+TZV?2c4VD%D2wh^89jeIz+x{T(LX8qS8?!0X2nsVG3mUvfB7PE>w^B3&%5#SoONv~4t(UT-V068NfNTfdJ|VDuXdnz)5AVQ z(rU<)F%2G1xRSh%f}#iJrjfCq*CCvpQGUIJIjJ!GC6Umo7FJjN=0^jNZoe&CKyZ9h zVD_GHC@pl{AUfL4LORkq<=Ua{B&e{!}RR7CX`Og_JZ+E@V7Lk<#k0%Vs=C;F;%Lt+%BVpaqn^hA3P!Aw++#&TKkm%%r2-G!4zrX4#NQ1 zF808}W2C_ND;kj)Z^?%#6b2o3pz#X37@I>-+D3J%R`!U&5gxSx7PXNNLjl&7nBf?C z6sAL9?>8}2M+1Tme6Ekl!n7xUya!GgLt=1j{0+;E3IfR4c{VMfdFj^q75o^c_AOLv z@1;!cQ}QfvV_#gmW_V3wthwxRCJc!I19Tj9+F^WXe<-9BoaiK@R^mqxS?EeUiySxJ z*7lue?CilVnybsSGCG{a55ufC_x=zSaLY9#EyfO&>Qk=`ix+v~g_Fy)`4VGzYOOtf zcim3mJT>=N7S_Zii68DKfQV>6Qtllo8F zc7F-N;=s!`5UH5LwK`T5#-(N-yEts=05n~*3dR1~D?qTGD}Ol6rl&gXwg#A>%c^Wk z^z7Yw!xNxB&KW&p}bpuu^Qi%RLnj;SIyVqNikI zAzWY!m=!Qq6BF#OQ(=PvZhRM;+i4u6#!5AYQV`y{`ZhzM%!+2CLP_yz#OG)I6q&^l zZ>2zP9X#*zG&l<-_@K%7L6{?uGImFVG~MZ~!31p-We7~LvU2E~Q-mQ5rePUg(y4VrwdMTul4*5g`Kgl;Nw;xRoZ6H7kYJEnec8= z3as~~O=0FJWNa7c^DDd*72T*`sus^5g;Zr*&1vigSE=T(GzvKnjzTm8#&0k~KU)qA zK*W$xeEeKnyPiH|0}Po>)~U^o`!V_}5N$^phZ_1Q-&Q}s*Q$1aGcwf#f;u;<9~B2* zl<25{@3KNgB)y)z(jV-v7r}GS3R9@GX(2yO=(PgIfA73z9?g&O$~60(qY)q$T310; zXOAh_##f$~_5!56%%4RN=teBLPPUj(`o)*ZZa}1L>GV+qJQrYTe2>uOQ)q16UP0D% z0Fk_7KnNXj$78|)ria`=gvPVIlQ=_*yGDz1yC&^&HX9)MG`23B(~ z$I=^%pqYl$NZp+$I)KV|N?2aTV$NLZ5p^4X=zzl_m zI5u((QbAlvlg+Q@&R~VxjL?j_sMfd<0N}-Ca`3-F3c02y*Jl4((&>CtoUtWpy{rIt z79RBaBry!bR_B;UZHi1Rg$-U{ehMUUCPZFT0H@Q z1YNceDO2y9KC=&iX0V&EL5y2?+x`iW$fk0bKVaR@b>qpnF|2zPC37SFE^nRNsvAQ91eX8Y6~= zRR{n1>%8t`+QyX6gi;_xh|qPsr=38!g9sv9@WnRbd*iiyJ07z`Kq)5rRH#=}X@MxF zlT`(++abO1`?Z5zkCl>NWdxhHL7=+F;^W9+x`ICly#hTqpApjDT^R!df$kHg+#kXC z_Pz;j&cp+WoOumVRPzo+qp=eG(Yd004%8fTeJ|hK_mtQnZ$BDJIb%x}EXT3LNELZT zBy6t}huYaT#5V!Gg|=O@y!UpU0O(MRMCJ31?X|BTZsnCPot*j9iT?oPIrhISAIUpC zz@K{KvJ!Ax)hsgkrjy8REkgJ|YQQb^p=%`@Ta;Y;)O< zdmk8*Vl0~yi~q|vJMfRh9)uMR{db_N^_5ZQ;4;r>BLqh0xhNK-8w$TVV z9<-O=Byw1%7_!wkYs8~w8)DUfO1nB~m-4HYe>mLp-imi$1so|? zaF|R3kz@~<20nnCeJtNKwYsAKSzC+$XvmzERF252W}z7A-zfOUDErIBv@~h*|9LWQ z%N9Ec@MAd4`bMEt{F79s-UhDAJ(XrPmD0vPH23xBsc0_Lfmr~ERO5n0JVdfoWhca1 z@y}Oez~8LZP~h^A6LcAu_jbNt0U)90bum<4-IUz+3HsrG7K@XH8P^8r4I$^f{8ystnoSI{_dD4Cc1b%$&Y+@Xyd`Oho%GmK>6Ru>ldkuiG&Ja_%SZTOn}5T=Qjd;NyD`wsK#21GT~CO-B@$Xj zX8uv$j8T^dq;8OkM);Qj`Znpe1De+MG~^;z+$^I?TF$X8cY(+}A*+R)3{O|Tg73DY z27zY>F&iHzD{OnYH+D775dI~gVejCGAe=Ca{9=C;$&zv)%OXhiaFDR7+7kdrL-9$` zFnRJ%3k9QGppAqL25`b!eO-7R2Vn-IZi-^YU+7!*REd~gf$H2IEM$=UDg^eByWq6l z?>oG(C8gT+tqCi0*Ad>-c``qX`8qyX$U1Mq+dFn2&u9D{R*!v8^l$2;_oH~_gMLQK zE}?0-b6upxcY1a9m>4(odKi-_4}r3EOT05^Ou2nKdw%o>kqk3r3vS$$pTykCXr)Wy zG_sPZC>+$}C}j)@Cx83)Lg@ANQ;(;EFlLUA_P2SP_2x=$gGxM}?@ePR$TV%@f#wk| zIMG#ib-Y2s_hlPoVog})2l*{g7q({5HYwle3!8?J!`tJiv(iF@bDKd`M@(4E0+kQLH`oWfqhKW|6L_-Jb`xjx|;z$R_x>R5M zQm#bYB#PyCNDZXiEEQ`TZMpz5aCh79JJ6iO^?qNL+Ln z#@UL6+}%>;ek-$~p0tYQvUOt_b%3{w#}zos-IF_tEd2$|hyC)2d0peCaB0jHb1SFC z7uBh&zi|_pQ%8#Vd*X=WBl) zY1@oKOn$Y1^8vno;V&eCEy8ilDmI6zDNSp410vlFOIP!fJVXEC;J^jx_Ud$)pvnI z9C(bdX(oh6CS{zCq$BK0H#9A=!WRd z#w&0ga$7$^AKa(IF0BliW=_9-O-+afHDvjF2SU@%jWVf?*wvFeXx zouK%!->&9H&p2FM^d_CUCybi!lU?c3q1?5Zel;ZI%rv*b1tm|6k{hj5R@_|7?!8Bd z!F`?e1|0nffy?;>W8>#b1n>5(vBy18bG%)VB$b37zAk=)?#&-##e6t&cM3<%HeZw| zvg{Z5E!mt+Av1l)lZMQYL*3 z%lMwLnlVo=E{cD`Rgb%hI{`W3tk=8=GPtPmjkY{MP!;kjL5w@~bEgocCYWb#(J4MR z+UC5Sn(Ir0i?GtB) zP0iuea1549^a!wx;$DUT2XK_-YM#?JC{E5DJ(d!vb>x`&4gdqvD>Q7xq71is690fvADux9n89AJa8^`=>=4xu3Uq+|jL|BG_34 zLgs<#r~=SlWLDH{j6^qvPZ&c&VCyLEPWWe^MM1NIXi%c*ZLIB%yGXt{Ngg@7U_2_! zd-y3p=|*VF9PRh=hVXfvCbhN@=4*J4##YnBMZ#f<+tIt_x`;Z=Qf>xK%b9uRK?bcP zRXHsjL(iXr5m&o68j!^GS8*<_=DPuxa4MY9L6O3tr#s*Iz+-<#dROS$T~X_r z^ap#=Fw)iOd6gAR&BV4`2AW>s_NChm-Fk9AzS|i77Jg&HunmTD&FT*XI@cvEA??do z#q~rb%uxLti`<)P#S?05izbRl@1qQ%J@3BIfvE1u~e6g-i$$RuM z2OjZD6TxWQ$br=T2p_n691oq+Pv0roy`-!UWc`H2qFfdQ$hfbG21 zu@_0s*?i?Tv->6V?%OMB&po6*@3W}0BeD}z)y>BX`=Ez?j8U^rV5p3doi_r<{ZnrQ zC5I0)y7)aa9Jtv`lyTpSTa2KktI&;tepKA)4SisY*A5^Fs9)V=-j#axa#B5W@$p zBfIlqUkM*(Zka7ISEw>_&zf&4fW-8pey!2cND8KVez z?tJ>yO4AWA@Y5dd5ci9!cjwt230V8oKHY8{AT&KY_hPXI?fTt_;l#xni`rZo31X^I z>T%r1n?Xxtd7R^v6aVT+h?>?ubD}>hS=9`C6{r-cxtq_pdI)e7tt^*yj+K}FO(LFA z>y~Kx#mpaiRZ~Zvu8+^=k2}~$KiJnRiHWV`UC7}^ya|DKe5l%^!pBw-%JWbJmiTcQ z0PbBsAEXmTUVl%|*1$BmHyTb1;G3g%GBX)%!;Nc!cr0<;6Gt8fUA$a?!8uf%FZGu_ z5FXE{q^=CDt=Q#^Yggy=7Li>Xt_02CvjBoRxo(L{h8rV!-_jD7c$%a4D@|OXt>wMH zuCCz|Izp2x$=`HUAsI`*$5wCOv)yV@OIOWI@YtywsTp55sn)Coa^ursnQS7%j{OyQ zTcePUYae9kE!B@dlq%s#k2fuai1>l_OZ=4{pghmptZ}hcqD|up;SWo9PUzC3x*i&h zxPfhI{FAr3Shmp9-Fg~=n`p=?B0da3mTpXlE4aLWxM#?}GN^SSG<&DP0~6Z38n-Xu zgDT9((Rb|GHv4AgZycLG|B9tf>Is`>cPpWZS8Q8BcU$a?gpcSGYp$c0nB4I?26cx& zMqU~@x6it69A3|cjIuo@jkMORmP$!ER9lya3pLzxFx1r}z1q3@vQm{W?SnR31`1>J z5UIa**(bj$E;E8wuIZWdUI~Tn?>#1N2%CZF&%7J$vU<>L5xa)*JWQ$E6C3{NK?Tj| z_GPzpB%lVz32;2Lp`FyMis;kFbcGshcm6HkXwVO{gmy#?Mmk)2!Dy{eas3)T8=085ubpfDr(y z_$9yzoW-F^)YarKO(?#5ki8<#YgKVfL;5D0JkzOD$|t^m zRDY&WgTwWZPZqWQb~gADH=5M{j7Kgo;b3n#*T9fxlkqxOu~6bmodvbyfmxxgd7-Uz zOI>5lsII8F#a{{?w15dN z`q57W#D=26wyG6`dxFB@6Q{(YJY82Mz0Mtfm$HxVcUnS3BYY z-`db6{@lX8lG^>K6mw%UM3pp@O|R?pn|b$hvDez?op02iuk~e=tL@-UV%iaGxnNe( zJ#Wi6#g*VAalb?(b)K)EIMEw^M<}tEnR4PV{K9Ey&ZAdqGgR}e<&uu zKPP+5xAFGWaemSV?~OsKV@=ch6Jt*l{U#5B&5)#bBx)b_N{2%(XFo;<-*U*U<=94# z20U3pOA2|!=Nxz?noAluB#UfRr_DV}B5zbO_YnfSlv*%yPV{Xlz>h1%bmAETYeoL9 zqX1|7SLxVm5=! zsKb;-v2O_K8Eu>MQMwbKl#eb3Fu{&31 zRNMmF-1d;Tj2vwbvfzkqw~Fc_aYdJVA59Gvo4ZgnJ11iQBz#H}C676r*a2YJGmoBu5pX zH{u!)3dD`CEF*0j#|FB3t<@2g1$UTn>V!=Q;Tx^7d}$;VZl#Z0QEFd@H0pb`n0O zRRaWp(+lmW)mH2{`G+Jcu1QCd)iU$`&@F$ZcDd+ z$5`Y9Nch@+?!1W0?={hQItl>$uWE~6bLq4#M|KDxay_ROoQ)|4opGIx4R&T{W^%Rr z4~|M4`(L7Nj3V?|RvPgPjHC1wR%UR2grN1s78rA(t^0H2bQ4G7Zak9~BX&*CyIFpA`&S#7>qip62`y>FII zwdg&6ye>ghP3_a!0A?!rz`8eq+4S%`O=IQ`KfQdw7``BuAuCGNo=FblOp}!1%s`%R zHtX^VTqP2pYA-nY($L|DOpMenCG@t5bg9#;rQrR*i}o zJuu7#zzB1BGF7+F6Dh(%$A-zs`(EbJPg4a2*RYl=RG_PmUK_?6(Xn z@y0POVy@7TD!xAqkQU1i)_8wt%TAH6F{&Jcr2tg_8{sR#T@jG=c4tt>YSyf;V<)HU zKMDglg;Gz)&B{@On=*h!F5bm0Q%m&#mk=^E@E<^t(8X4=efNjf)x;^${~zyAPi<)n zPhP%hney-fxP#UhpY$~8+_UCl27cZ!!F_|-+7+h(tU_lKt6Ea%u-2eegC0kUeQm0J z?Ly4=;WvH$f&8XvuD>+rTz&x_^B1$?Ot|lfx2(|ee!D-d3v4oFSWz}vQ9dR?fN9Df zCCu}}d0(|z+KyMYF)Pnb<`-L*3)|K341O=ePP^m!+qSqdDR&(fK7K;GOc?aEP< zsLpvKZ%f@r<$desC^dJIt^Ih3N*^^PVp^`R>tjWn#7F3Nov6=?jXU4aBGFRzq_(vP zr?5k2KTm?{SiCbm{IR>&&drsG1g0|fB|h%W#~}y)MD8wFU&qXv@xm_7oK?MR+gVeG ziVy`1G8i|LNq>>GVQ;{W{RERKhX#)q3>RzkFem z??sqL{@ELr>kVs)Ue)Z4{@_kd0~EK>?-jKBueC8h&yZ@$_6J_uy1&ZFbt0Fd5)A)f z^6F0{3u(U9i#%dhNAX$6YimDq^>)FK=pkz3eEcp|SwHfoTdzKJLr?z9%1W;xwC4I9 zn+pyWpD#8q9fmc^Im_VM=E@b#NZn!)PGV;{<*}(xiRvR`t{B&Gfl8L`?V}4*SKeu& z=naGpOtiFXTzaF;I>2YX+;OuyZXw~#t97Bvd%PDrgKZ@!_)WHF)s0(LEa{mI?m zCom!0Wn*#Nz=Bd*QF4n33;HM&OOn5j-6p9T?@$POBka_|{EIpf;P0iL&9gBxYDpA*Fz!+>R7i`D{MV-_eY2 zo+bDZwt)eDTd&no19F+&;2_AZpFxvrjLDkb_z<>GE*Kf^m^w@-cjL~eCjaun+B|Xc zM;w|&;XDe)sMNR9y>R!g7Qw|pn1`6kmVq|f{wL+6a3R6ry9Z|%KNTife38sLLX%rf zFHKyd1BA_NOY)b!l*mmUqlPk<<*0G72JNG}kakpajs;u0XuRM{JctK&batQ>c1VJ} znXLtqxjxTFY=>2Jmw?vU$PQ8T|3}kTMn(00ZPT3s(l9h5(%l_`5<|C0Hw@hkN{&b< zol*iy_lPJd-7UhkN=w5M_ zIx2nf^-pX*Dn>a=0Xy>I7qFGxNS*SRQ5&&DR9hCl7$s7{QQsQd@JrksQus+;g`f2$ zsDwJ8Dk0qp+t5;L83dsNcxn|*4rea1D0QxVGR3m3)HbheYR3(wF(yo@1@RX9Hn0Z1N+f#}KgD{eS-)AcM${XmWf&D_TnckfHnp~^!6_v)%tNZ_4BeKJfs zwnW<~8eM%8+;L+KOChjy&JjH$gK3$Eb{=nl!_^|JOpz#W)AFE;mk}=QE=!#H0L=VS zO|+09dm=e_Z1qJoo?1E`44Xg%TQNVJqR0Beo4-E5Lh~6~Kj5*3}9naQ6@;1!3HN6^ROV#cEy1i3%aR5LVcLHy=it zgqu#NT&#Qwo?RK=O0H;YUcr)0I#KZJ3Si4k3Q@Qq&3&Tc&jabgQ71SMZ?TmWd+`2# zRo2ZL)$jJ_W}KDkrIUZi?SyR&>Tm{=pz3hezDV>stz7+@EyWC2us;}rUVdSQ`h@A6 zu>O%r{HgDvY#mFUIeNczilkd!Ale50W9aWIk8_g#Kd|DkZIb@iz;*a@9fTOPQf*+g zt<3#2+>UKpB}$urzZ(h*>%9KRAFwqEO6fpZx1k03@YqkIa;jT>H{KjHC>@lQMnzQh zy8Zx`lB)fPq-(N$l3Y0sy-K9|Ih{f!Z`!V$c_GC7nzjt0q??s2yX&93djD0%v7_#t zFf60&orGHPynuIWwF=(}~-qsVz%m#LP%;YShYt+{rS@59OnaiND*2>^vci)Qr8)kl}~Jo-nsK~$gU)h4umxFe~J#ELvcd;DKPz=KOd#}06O zKgc2vcEq2S0{JPPxKwYDkk@OzR}ZgP6l!L)=qZJ!(!}}3-T`g_jttewuep!084z>V z#+;R`BSzteTHs^=`@euHTFS4kNcRC3A{Y6#@ZIu%{sM4B?CI%9sO9L~xh)aFigrfl z`mEa^9hHj?itwJ(rm$OmK>X)LZKQj z0q78b!ZH8k$syv!?*57ZN8)69#APCS4|r-XS;d<0Tn_zQ#kw9%F)E&bP$=7{!Gv(i z^i}b=Q$&L)GO1zFp?OgZaed+A1!~m_CB6o3A;(O`A7A$HM={SjT{zb}qO4SJefC+N z(0l*3`+K_T{O5i*gP9>2iB~#KiFD>8{U;#cyKNTu!(1g?P;x2$WuA)tPaAXQ{Y8Vt z5ov|VDZdiK_T9*-`IceN)0yq$-f_w#mdgP~+SOK13MGb~c)CTQmFfp2L)qdzmWkvP z35Hz^l$VMfK0?uHAIZUG?3Q%%Z z_e!EF&LlDD0G*nZO|M|m51i2*mXwY+@|ABru%XBF{e69jk^2$5LU_}Tj!)F{5RF+5Yk2gSU%Rdgngz2W{msNiIi0W4&k`Rd0{ z)ZFXdI8++|Wh6j-cq>X~W9!K1jMA{_3VTo>`t= z9%<@|+PZ)ih?cNjQCe<>cv8v#yhz(eUFn%$>HA{PLKj;eHt_qQ{`^;t4I3PKwVGZZ zxAG{It`N-}XFfk2R4kl1In_86kRktqq1Eau&$%V4p$}l(NXfrrrBMaHDJe|Y zrcWe}a;e@I#Nz3s-e5VdRyhL{hgJABW$#0)ETGjNw;e0cgX%2xliOs67&*C#cRu{7 z`g}L$HFdg}Grpim{8N>AH~gTsebDgIxq4NP$)XOS3_GPSyOpy=xS5e!5~| zqL%U6AG7w2Erm)$Y|)+7@PS~1>(O_t~S!4D|NVn#DndCOz ztK@3tJ?Z~$a5PBRr^$gzlqf!bH({X5$1w{IG4gQq?2}TniVo4e(6J}%#Ew~De!CmM zZLC|v9e&34p;z|I67vK~8@pgV&z5U|k8z`b^-ks%OQSkXWY53fV}dC3aV)vI{`*X$ zozt{t_U_dIuSV@i6b25IP1HW$0`E$4+`1g>woM*WcGGTTA*C_B=G5_crU6Bs+u?7NkryS&&(C% z%uC!teC_7O7YWVnbZ+Ew*=A6qwA9AR@$9!p*3W@`LP{#~I-Z}iFB$35yN<2+z{&QX zx@+(FQ;>lx8^=&~=Ds}-uA}pbuCXf)DRFPtrN1vDL%E)SYf&x!WdYrf=ZqUtV_siT z#3fd==b?{#O$40n$@goD{(-}eGAyz=EKt?7!{1)MG3}tdU%aHwG!rK}*-qf|Hmmo+ zQ@X4@-QAjzpoe+DyNsga$|FlI$CqsU8``y(sjX;XQjtnzN8-W_YWq=td&qjh$k%$)M(vO z*G1*fS6<&dZ1eTYX22cPj*ZVhLRLFUUX(vhymH^F_4y|b^sL%_m&gY7pNMmzXAjBw zVb5zp)dG3FUi(X)2R0qHvtkQb0k{oR2Ee)Vb3ARnwLvKQWb`yBrKbvjbudC4d(4nT za$Q*Te?Scs0?jTDjy3>2$I*OZRd@b8>-W^pE_O8FKoVKQCv?E3I9mBa3DEqk5hpBR z)GEEr^XyWIF3I%uNXYQwGeGjUspwZ`{{OLpKPv;kQ)fR98-1B!Kf-V2i17O@7;-7e z_r=Q9mGHdHI?1s(S(td5QiPLlD%&BI2NMg-J7vmE0_JPjb)^~>F&P%oY@%aKdZ1^$ z{FD!R{FMKZQdGLNk=z;1cKat{(0j@a%r!P?mQ@OF=z>17VCPc&k-v?o3jtYf)Jf zp0-ZSa``D+Ugn9pvxDoOr#ZaP?c)mj*=4WO{6^z;Fx1mJ*mO6b>fmKY9=G@(b+ui` zLv^*5+XJ+cQT4`RW|TXc^x@t;5Y1a{i~|mpP#ow;S&iV$Uq@I$2gz^|-7psMAw&wh zNUboE9PclMQTDEMSEHT5M+f>(iYaR>)4sn!@F?eciIYZU*hI2*@-BZ`W#5?TZOri~ zC2xe9cp--g26$>S$;_3mq)fs|ZAac2e(~|y&u4+D>naXgA=29Auil`iYEIu{veAe` zz$XLM;Hujx46iN$LP(+={qd9d<0n68qAcE9Y1pDFk)FmZJbAmzD8UaSR+)@Vql)j* z<%|8S&3asb=~X z*vxJz)8sp#9~amH6u6KTJP{j}Lktg{(O$9ki7WIPAn}?i^G_n_gwI?zSTcUGZ2&O< zZBx{78oy_w;_il3A>4Xfe?BTl>^%l5HwaC56{|K$HI;6I(s4p&LO9c(DDoSlU4Jig z7m<;Ha`!vaQ)OAY)Hcc~{Q8W?jjGaD?N=K+7Zh4{{GtX+W*)WQnpd3X z<_8i4v7n-&Mi*~x8~WIB$_5K0Q+b;>>0k2L`N7!-7Fg!$QrXq%-dpiht|9i%GF%oH zI*{p0%ee3jyCjW{?_@w8_VE6`xB2pcXd|Vzdkp0xR=f|KV~;~_!Q;I#v}Dr9k=qzv z0yV1CU)6{dd1YSmaX;HzR+Oa7W%j$zO^9F6mcpb2Qbf(D-Wa)L`m!fnJZf!*cF35rV&PmAlL z(=!AeN9$D5p^7&L&WZ5?$B*^vdxdB3pV-Xq3-*SDVf5!`6E*Gyjy$`MR|5(R)v?BL zl<`x1U8z4J6;KBqk7!A{EgVU_ys1$ z3e(&bNeq}QUmUViBQr+~Z@=8)&h8;2^4wv@rwQVJj zUQQ)uI=8x+pJ=)=pw%e(z@H{*qazG0!Ggm})$YlBCyVpX*m zv6hrMrbZtxT+`sW^y0ckDD!vJ*-x)na!R8C4Jtf2O&032A@M6tv-H<6yiDh6})b)1$l+?p|PJp_mV zxaLqkV%Xg3w--c&tkOIbB>qSP7$A-La01j%=Ku2&v258>R#yT97;imU9Y!};06 zD>{!+bM!yEMlnMM-?XvS^3$DEecl2meb5CU7C>XcS-X2-WW@iO{@DuuyZd)U;bN|w z%1E2gZ}J>FrPae{f-g2dVCAJdMSi}eY&3V2I64?9IsIHgyE_UP^8%`9uTUPVCx1s+ z0V%=_e?T<=s)evUzoWz3Ai#at(rjN;Z47eH*cEGx+B0#uw09^2)PDpi@CZN*;N!?U z+H?*6pl1NQqn8o#n&5v04Lp$A|4@u?N~d<#(*!fNF;>^YKO2t#X-6Bqi@$N+v<@c@ zW+;*B!0zlx~XyC-N35{MTk1^V~z4G?~ z7H*B)xX=t@W0{xs>&bN&s^j)D;O zgk*64mHWLV%KKJ&DVVK$WgJIBz2v#H1hPixN~z+>VFY;!q0YbRH7MZOSLjZ0fy^!iHy3I8*<`E};?jY684uH{giQe@m$;XJ;Z zB4ya$qxiyGG5cF)wQo&MoOJ^Fc1FrpHWZ9%n~8rB34F1{#bUu!KnSYX$Q)yU4PQkj z3j_RX9k2ya2K3p16st1Ks}hhR>j)q#p1*M}4_Qusc#X=$Is6j*!~ysMM2yD+0)36m z6yjiIK5%|?W12Z@47q4v@e8VS&ohG-_RGX?h`-dZH{MhI`Wf zySGOO@dzxEy4`1wOStD)`3vQ0>UI43U*|M^NU z(t`VP+K(x+;+#fKqD+drK+(44vu7o+gQ%KM;~}vWkeRY*nx!hI2R=nmQi|C(Y@36J zErNioI8RRfmTY^qYH?j*o-o1c+{*szGA)!1TG16-msf7s<5|lwgPdxXwkiB4)zq+C zMrRt1MU?S?b4dZgvs)+k-6S$lwLuTgW#JuCGn+8HK~rLZ0|kw+@detfYL_3$6fIS7 zNtnrQ@wgP`lbTW0nw>U^uTwh>R^OHHxCCCs8^x3BHfeM#7?wgy`g_YXQEV<&3=2$xPsxZe|1^b$a$k%98d^7WrF|X< zOMpYOkI!i{;Sg8DN@y|k%dLBZDX7|a5oc5%v*5P1a>>_y z_Tix$2v{`}O&&g*_61h^vc;U;M3WD#doS@ybtA@bwJy=kXBW>*$lMTHA~RzGOYtLj z$Ru@;MD>Jsd|$57Z0!)NNVzx!tz^1_HqhcdwC(bUK53RejSoh?wA=a`Q9b(Og)Wl64u8#*J6B}O*s1<7XNME60fwV7{} zz6z??=}>)A)&i90nO?b?wuk24h;`nmebbQc7RLu$7_a4JxK76g7A#^Td_4bms6;#zwvFrO!`=G=tmDa@NF2&cifVsz8vb9qK%69oZ^B8;aP&u*<%6(4&|~ z((xNIkkjlmk#P1a5==f|vN?(P9Jt6LUPC>1g=A3=+5U}QH|Qesb60B!y8DlY7;o3| z(ACm9#ZrvV$HKC3Fz{@q_i%ATBIIaMxSQ-A%( zEOJndPMzETqw)X;C&}o-Av3n$4GAjC69xGGq9UQY7;-G+Y2$ZwVcUI3QV;;+Syz5U zIscvhUAz;}F99lZ~JrTS0ZfXlJO4dv1286ldxuJe25?23YY+_X3r`a_Ym59 z+IkvTPXuvqOf)DaoT&cpQMZhB-50@?1VQi(ruQ(l1=hz^eYi@+ZlCz$nb(3-6KN@V z)lgBZRx(90lZf9Z3GV!ZddQ+tKg#n%WGL=AuKu=+}XRzm@kAlAt}wem#=r}!~0q8^KW?3ch=ir<4ny5=KBkCLV@v7 zFIVPH7#1qFE}mWaf%8PEs#Wj+Ia(4*UXr$CMgFV`bY19ZT_8fpobsxgzol9NZ$i|& zUpv~_(f>&ayv^ZkIzui{^=Xos5PGVL_qDL~{epQ#=hwGV#a;I%br+Xyqw$Hj>i8BB zOr7qGj+>{7Lu6?HMB@9>2{jQdV?o+%ep9I>N0G-GCPAPCtc<)=i1)UzuJ`)I99h`& z0(A}>P9H#w3jzINqVvR%$`P7tz}YtwcgSg9$5FF9|7vZ67h<;V*O&T@%@_@wNGDt# zDX+(vH!mUMtf-X6`w8afe8Zd+TtX8y)3+&+@gRNHcW%%tH?qoab!K=GLC(AE zp3`nkQ9>uKj=QQ;6W6(Ybf`ho4gQ6xe9(HK8_%*6-n_`Le_V5U;C;Bd$!YAD2kl_o z$j5Su-Q`y1saU{Am94xCe*3D-2alB#&3h(%ghpaGw$Q|Y3@-6F%81eb%QA}(%efd- z>JRtq-tvul{%{aoXgN0Om*oQ^IoVM3D3RibiTF^4QWSWdjwN@YDQjnD)K0yzw2NBI z2ef?9X;I~0O-S|U#mLgeOF1{7Owc<{-GAy1dis5f9&O^SR2WAalkHUR@up9^XgE>X z@G^;MdjOgee-uwBmwCNmJf7pm`_Bu@#LvPSGQL2NrMQ8F+u%pr%+4v)e%&&{+NV(o)B^~R4OHvz2JP&DX6zngVI8?llzYbFp&Z}6>~`H8HeL!*96XLb+~d@-ck zn;GhX-S4ByAYc&c^Hu!uv%zq<9;ja!r)BPzDTO6 zmz*Z{M&#G%I!9^*$mdV)d4&1cYt1^Qx38T$HE2b6Yl{jo6>lv)ehNe1Y26kn<~2dv zNlKx`Z0gptdb=f35ZI(=`ZKUWnsPXD+biQHG%tlNu8={wulCt*p&`=Ll@dL9mN+Qk2i zASyisPzXL3~EyDE|W6~;eSr06YH;!g~38EJee#{ z;~iNh`;%T4fAyO0pEi&yQJbLEiaH})w#zYHMu*TXxTmdq!Xk17RUVU#VG6droWHn0 zt=k{Jv6*0k56$?YW}}(T@BF|oE2Ors|7BRNgd>Y3f#yL{F8xX<9{w_-#whnYmflFm zj~H^P32#)}1z!t(K)1&Pt49<<=`ew%i-g(fKB(aK1R;k<60^WcP>bWaaGctF&ET_o zDfw7`)4Gc;9@Ba7@VfG60h^ZS=T6C#sEr~v7Dn!f{s0NfPv~H!Qx$Pz6fjW|b!&nf z%6#B4oEEB`c3*sw*!vh6~L(MP&Y zQlxB$!Tm0?o^=_t!4!^s>?Q_-DiZb&- zF89V{CPfesF!BWggW{96B|Ibxwd!X!nCnBftsKEj0+Qu@` zt9C9}ma^p(jVy|HGpdlaxR1I#w?I#Q#;0u#)(3{fR0od&zv*$0JmGR67)Ij{(?0^?#2P`L=_oQjMfwF`jK zE~bn8J|D2+{TJxO`!D|XFDx&7RA@}x1H`!1P0s zvMPY{yKc}nmF_L-3YVB8E7>D$(_J(trB|+Mffr{F>gZAaczBTz@1bSwa% z>T~uZ_yD1&H_nZ+{?|3|rbM&U*uo~jGfUve!tZqf4Z35F6CiZF0($iv8h4GHg$clX z4oF5oh+lMt8JZ?v5(i_lHs9l%ET$BYaMNCIQ_-ouPoZLJ+MTD0_^kHg)pK$dmD1`y z<(E%)FrD+hy1kY{hI7@u+;L>^*#2m{YM(A=fHh)9y`AyYJ2jN)2kSAz;jzHU-uQ(L z?oQWTkakTiLe-AuNbp zK5rD+_7c;&=$lVRH^-D?O!u|z{XE=7*GS7va{mE6N^1OZafVB-Bpg=tJ1Sa@h0&7? zYp%hry5zIXk+<^2#s`4H`gHdb4@aI6X;B@E8p`_)%R1AUgwSjpjmy$tC(9B+m(S39 zg+*6Tp>EKsLbbOO7mh&mG=AK$lT0ZYt1X4Oc!U6s#SK)ChNBpgJ+(X8DzMQXMPu!B zv>Pq#xANy2;?$V)UU#6>0_VImD<89j&icvd>};rttRlL zU|NzLKC9A{x5V~Q6yrPBS*=J6WeiIu^#J+vwei(vc^MHPw4k!^hKvf6D6tY_gC(6l!K7U)i$F*FBOy5s`})yKHRIcLUC-yyh0Jau)}5l z{NpQp6rE59kRXBF9@agvE^D_#>lfH^UDFJzj$|Ep`k^givV>j+-85<*@s@eiG*{&9 z2B2m{>%I6a?CgBMFFBU2F8H}+s)2lr4sogtGR8ouw=!ow##SJKHFV7VGOws!w~wF zTwnUKT2b@PRo-epGd;_!{DZfFRj5TLM^WZ}k!QjCCdH2vxUiG4$Z=D1jB%yFZoTSq zm-Ai+IK&4K-Rr#%TsLFVZl@m4$VTIhGE*b+sk9{x-x{uo#VZ7pU)xLrZKxoq$19d* zQGiZYCV)(xhI++Ws`#8#JvEV>flfoQX+Lbj2FJg(OB>vBONtog#sN(td9w~J$3n>b zg=@hA!qjKDE_bGbFD2kL8iS-eo)!_`Rj;<3C;2)Alxjq(Jhu-8tld{v;GE!QPtyOCPV_g4~@a42U&l=H?VE9LahYL-9b zkvV6UOKC6^LencJk`&~xys%4$m$B6)tD7643m>yDwfF{Hob^o)b0t@;$B5;%y)8M1y`Bhru z+jQGzLlM1)M>`|V(bX+p(*JqghH{A1;Cd`1+qq%hp%*2G>?!VTJ$41n(m&)D?o|Ta_o10571K^1A+2e>Ce`d}SIs+J z=GZT{LjKtp>c?q1FF*hBKsN28C56TKHi-~d@|7>cmPvN&Uj|s2o7RfRYUti_3?*E8 zTSTYh=g=1)%S;o~VC&s8-|~qHprc741a#4$rpU&FthW(hnb8M$D$X>=RQ;UI!v8i_23Dr{b=2pjg_eVhA z=6DAH{y!mn^y@aSv9<{l( z9X*vc!&(U)FoI9Oy6U(xmA`7mD;hZz+g`jYyq(_;UanZI z)5jry5!1q{p85b;K($uRmL? zP-MUI$(hn5sB|}HoOTo#OOZ^o+KLnfg<%mW8*rufwReS~qFgU?CDX4;5(!WxQHP{c z!0t->65k+*s32DAcjtzEIzsF^Uo^2|(u2L}RD$UOAlc_FIF%&En=Az-W(9iZ7C_|3 zW_PxhY7`xmX;JS&@Ds=c76ugiZRxy}+2SU_YsS|`2&OCSp3+P(d5 zOYZEBqMJ#}McJif*RDz)8C}U?Vyf#KK;y`Xh)E)Kmaqhy#9HH)`rtVN8AjM;rgLoC z6TKASMM7B3F44Klq}rY$BmbtKnKLkX<)n(ZNsgz(rS=;)4K_6$gRso?bri@?mPoO0 zdXXNc*=`%%PoI!reV3hZptF&f9JC=GewS|==9(iJOxFLpqi-z4y@~n za-=r#!4tk(2shHJxC7g1G-8WFnNTRS}?GQ z5d~~OM1!~jG)f=%I3*HIJgm=RJz@3%ZKG{mHEBoE5;b63oQl^Eg_-Rw86f>I=(VQ( z?d=`#v&#~YxStepbs6bF`FfH0#;Se=W86_8Fl&o656)0BQG-px&LHusK_Vy7<1x2A zTxRF>=eF1PM~;veO3`}nvn32$V2?+{$Xtto?0ZC=!{iGeX7jFzgP~bpc-W=Y4c&<) zZSVrnE}y2AO#C|;cQJDj`- zTuh}E2hkv$j4jb`ES?7c?}38?mmHjPTN?NWn42N zAS*ez#KT48IZC$U1cOo30^~{4c$`A9BrM)*BL<(SfuS0j1BJ?4guXG%&wKo{QmASr z+B^RslMmR48ahF2@vrk+g5NVorBN;gE`8u+TTk8)=uw73u#i%rvk!kU$^T*k{ctJuW$}%kuwd5-*b`MA87$^oPU-wKnH)&uiTrgX z^u3HK9B$aJK|yOKuAVMrcYi{@`zXfNb|xNZO9+nl1|M|J-=5^%T3mchaWRPY`ubtr z*yEb&{PygwO&7e7{vm**k{dR06s=Q}74n{Jzjq-45j|0;$}mGuvxl_>*A8vldY1#@4*g@+D{X4Sp3?gKipMQ z3Cz~$`j46y$p3vVEf{rS++}y&)=B_?q}^~=jJo6^!etQk6N=NHWqVgb@j^lw0Jnw0 z0d)QkV*Tx#TCKt*UXr|wiW;W*o&jXg8nbES{!6!o|A*E91Gi!3L)KwO=Kg_?MWg2e zLVK*wI)}_^TF4+IWF@WkApwB#YH@;&Np`t^_znA?2ksr|U)Gs#e;4@AnCEhcG!guB zlOxXe@!znVKOF0}B$!bXuKzK9EOBKU$4vsj#{d80Z@v^-SoWCQlkZxD05fpP3||{5 z)_;q&J7qPgp8Ymc8I)zTej_s+JMPx_x*^-4Ia_SpjrLLNFjGFzZ-n|pjwtUy&z5f~ z-0e!{i!D+*Wj)}Kz8xch=9GaY{w#INt6LQk+ZfZi)s7V}%=ZOYOkP~sU&9bRpSaO0 zx2ZAP7?kO*`j2YfW*%9@A>+@yNuOA~;mpz9TKxEoxrE~r7WnX^=ncSrFVnkRN*HWM ztWD8Bbz2+J{&*9)i`v$Jc_};hc%#@JLpm`g*#97w4V4?UbmY_mP+W?#$8C_s^{Bl$ zi=HG&Rc+@_2!a&IcTaJ8bk6#aT01dPd?SaHRKHBSVPFlHyz=Kfg$b8viAmLLtyPuUbLEd*6R#?E^Sd@c{l9DTL`%~6 z?tt7}#9)pNl?fVH-!RF{2d@N{NR0pzE?xe;+np}d8)UO$CEDMhk3eQO^hn5^|-4;IJ{_Y{Sa;=wYy8jo5@cJo@zJ#$dhOL{gtrnztFHF69PrxXckj(X*qaXy9Sc}*NBk=1Q5{9HV=M0ugpzd2)G>_(z1f?qJr{1( zQZAo#=SnQ(N|@wk6b5I^7X^ouqLMTUjsleZkSw{AYc%3+j~gz;(`kog^A(!SPqGY* zN+^4H8_-C6e~g-98s!TD7Cp=PL#i&IjlJ1=@rY-gEE9=({o`^oF@L|4*k*GpOn@PT z2*j~KB)!`BRIY?|ywkVtz_8+k>6LKwp1NID3QyHyE|J<**~NnUwzmmrXQ5@GyOg_< zrl}<*fpw~R*nn0kU$OEnPcFP_J9ZCewaoB0d;)EvaqFgb<}ueU8vnD=U9)+u zZ04{G-hMG_S6cRHyquCzFqRh>scy@(^Gf&CdZ-b6(udmd!U0LyE(T`G zs&pe3Qg^oZy6z4lokO?kt|(0D%78XEvuOFA!mfprJs^WHgHi#G58vF1p!`3aFdnhEvB zD1k%gwyWq+!K&+6`O@H@WU(hNu1BPp+T8E+PIN{U`*wl{;X>2(n)&qkg}{ho=~;*r zQBNLQeiZL&mlQ=Y7*=0PT{#mhNOarIQ2an-49{3QX2=)lxquD8C- zv@~sczd^1=EuDoh!%<&^H9Tpt@=)wGo7g0rr-@5|Sf&F69PbG#hNx>|3809E#uH zn!emSGZ+f%w)xFzNECGz(8L~Rr4;l{xX5G)b)8GLjm+louU9X67+U`%2VENj7&oWL z$q3tM&zk_zv)S$!P|=sY`2~8FG~9mHx{Z_0TPkDNuC*^)JL)xbX@{T0{s?_sKt6p- z+e}`~aQSdMbyN zy4?F3iITXM|Hk90%;lT=Fn>8npU79fav@3Sk?6Q-{~X z1$$a#4U%re?CXCXs8T+-o-qh{G`&AD^I>33ytRFQZEORuOH9Jsi_Z8Ch zi=lMC@cL%-;m@O-JX;sOcuHW#j`5U7o>Ek$WtkYE>)_|kgt;UC7feUTJ|5k-L9RN( zq;8k@Tf21sO|yF~U|sid)ub~QdHz^`xq0ZRH`{f2ow?$5h#TvXcRLhBY@o6GBFeRw zb!&qa)pS{3+8+gtD#{g&iIoZUhr_yjs<7#)RN(hStl#8lROnb*bG=O^2M=ANCP86- z#@uI5?AuzkFHQrRD%*PX+beyM=jP5~i_gFBjZTDI9}T;Ag)~O*qe`?k z`iWl{Ua0d6_`jBaOizVJrSy{ok4gbmk@GbUCPB@7&jrDvXw!yEh70|-jl1s=qzvgEB$-vH3hzCCC=AZMB?c?WVjkwUD1fuDxTmy3<5hGTwoi_ z-^0VyJi|LiIdmU%>v;y#PPmWHM{JO8-GXFythvSgv-c z%-TuW{V~XQU!J-)5H-hLP@v@{fr!&0V-z(!^ZgvYhKioR1YausFT|0bN{jw{zhE-v zZ^vZP-#ER>rIJKJQos%;&z4#qe-Zf@Xp?zb>}Xk`Qmgz8xu5%Sa3!``I4ridx%f?^ z#gYMgNO`Lp`8@u+HEUjDt)y&r(I--W-{N^`ug+*Gd(!JkBJQQSc9YdM1%8j4qhth&q_tl!|>WH{9uV#?xnQ{J? zb1`;C7VFw-b7H20W-zHc4%(riOyl0%P ziAFOR?ixV(gEG9XbBijPR#!4T`<@3!Saz}W&I5T$vLc2Xv$uS@ff2pn(*7jf;jJ#iv>2mMI!ZN=PPcdBhHZ|r&;M!Q9UG7?__+pS)*@W zW;5&9DjY=cAjko0dpbd#xt>hn_hEe^h~2nqVp)CZFcNkqkxo;Kj@CT1I@D*eA(a22 zfeZ=RYgBlR;UhJP$&~fpGnF?1^wnCC|VdT^yl92Shi5l~7s3^va z?u=yy9a$D>Xk!0sR=?_5tioPkFR@UL=oTeiN2LW$R@3mHd=p!NI5)K`AYnP@K+Mpb z)2_6*_#1mCl$DicoK|OWRky*~?1H=RwO|nIod}N9N|rZAv`w37#rXo$RB02q*gXT3 zgOPSO^Av}RCQsK%_Z7)*UA6iT=LnKfZ%#ePWS)yw8vJl5pOSs=$46(~`p%!JNcN0w zstLR2JA-F97%D;!K_^7Oc=<%o-`h;;B&rv}md_OatRGHD6*x?hj!B-koeOu(uQ>)&J z6GyuY|N9=a=#@{SX{(}LcubE87Pje+&!Z*WvmyEJktCOdI&caF=EcG(8&jq!+e<2e zSLH)Og)@s4OEC9HScrpCz?btBl8Xb$s*VSKP)?R0pFr|^=&7_^1J?w_}9W=W9g!hU3&Dt;@ ztjlU=GSl~hGqq9}f=xa2p$XD<;%@D$KiSQ4=c1keW~U$aiT1p{2z&L{0%yqSDYTK{ zl--PFZ%}0aYx#PQ23byrc^Pl#WC zf_0c)Tig-h%+4&=-kF{;;0!&2@mPz6U>ESKbrN*==0njvk%f4rzYVWI`Vj!t+t{y&kbIar(yo2 z=B(=)9ui=^RrP_kaBh!ciSeSX#n~#4%#clV9oj{?iqUOe|5N2T*&XLyoR@zk^bc z%1u@CmW}1o-)9;9zUmDijM~Tqxzga7Q7`d=S2HsIA6xGM)kOPsi_-}`2ndLjP?RPu z3erO;DmBuPE=`b*(g`gTmEJ=KsftoXdLW?mA|M?C(nLUdhj8b8f8TfS|Nq^)7HbwW zWM*KP=h^%0v(I_vz#4xu9A2o?#xkXJph;ZrII^4BYYf(j#W#-1v9R`0tFKBkt7k-n zvJOpgKxb`K6G1+!3Hhp zbuQl*^4I0^W#+(I61_Q}z<&b*;f@?gPV7?rI+Oj26(1&y>C~;l2h)^-qK&r!e;MD< z(hX;w>N?ZsSIco*P8~CAeS3nneoR~4_|=p>#7PofHrmFp7h7X6zVhE*02+axXbS#!+(rEoVpE!{mw)ZQt0MDi zIb(3^N{wPR9tj1B2U^#GBb@EX;Nyezlbzb@))vb=pEt zWv}gd$*x{8hjPYupU|vw2XkEoXng8Euq=Fqd~oj7kQ3p(>POPuznv)Wlm4PBp){)A=;^yj3{i(CO^2iQ&aaiL{q)4JS#Mlss+rXuPsRoUvg zV=p|f#Ul=>?e3%lmyOy5G}3iKh{J(bmR2dq-(>pkq$n;|o`WN*Ry6*=&AetICH=Ue zqB4gt_~-5Ec6*DoEh;JrG26R{#8hhPPIciia-~_hH?e=E|18vWgllnr2oz!#Be#Ak zYx9Hndeb3Li!2h+Zk7FDtpgkP(r|q7JAWleW1hucfwM@d+rQB!ooPaZlai%EZ+nx| zY?V$zVYjWZD92>K>8TY!0`>pH1Un}&P;6r`9aT!};`}?zKrNOz=V5i zqM!?BxX7!ytck3 zrl>PuTp?kY9y2Jtp3$9cYhGn@lZlFyUovd<<8ykf!Cn9p>)5@uqo(K{F5S$;DPcdG zwDguxzfO~=y5=AKrGuWUVMJ=VUNN{bDMCK^xU$7q0RzKOz0drGo;?Ca&bE+L)zoPS z%hpu8BvEeSMGRw+jlwVXkR?);7G`j-reMoc-|U|YB#dlleal{PfhW)NZ5CAKyta4* zU-&A_DQ+t-seLi{r81TbF7y7iKc}sVWKnmeW|Dr>Q~Q=T-Lb>=R`iOR6^p9M_m?$q zbKh?9qN2+JY8XP3-X!iz9+2_82!E_JL8Zr8Hn;3oUF4Z)(^A`JQ0kEH3oSdg->Qz~ zO4>0J+@AV9=O!#q+ez}=vAUf8nb=42()rh(lqHUj8KK)uy>zg$0orGpSBpn+wj~fT zqD=ZBeeb;%<9+|GQ5%FVLv-;SL{6?Di_+j0N8$iu8!i5C94-&`h5PBm{{H*tHJYbO zI`Ho9p`M1BuyVK03Tdq%zu?`#u%r_&KNF;)S5u(0_Q6?|zT+RQ*Cm|r`?EIF?_PL| z;yW7-1*pL2W4B7SYV83n#$ zDN~x=00QM@Ke&BX2U=QU7v=x);2!wbnfR(qB{Q5GS6zij zLT$TTr1TmsM6@3qaAb!qe!rjQ5j4hbjDD%l@lt_cDfWGBpF?d3kfH26?Y|4TTq`G3 zjSOd`ZS#Ke^utqJ~sz|NC6JVkDJ>Cqp zyF|**8B7Wqs|+r7o|2)lwo}f-idWc76M~K`nrrYz%i!<0ru@r|+gxHbfq#A;fRVWK zT-|k-DEA=mQi**hLTm(43Qq9{g|+)1;?>`*u!qua4=+9Fu~$0Uf)C2s!AWO?#oY4L zkW5yiUyT%?$xj;wg^f=^fg)8aUMRZ}S~0Bor1lWC@(ckJK3^A8z=p+qyOmN)Kq50@ zu0qA>9`n2U{aDRK%;tmmK%%7*v(hBvF|vX?Ure94ZJ>7d{P+U{pwIIl4`asHy-u9z z!EuAPI2H~hP_}>l_%a>C3y6Pck|&|%`!WaR!N-`OC)TJmUPAE0qH2c1JvmlKqYP41 z5{i7!x%mDLu!eJ@BQKg>aQ7beBqV0}BSFGVN}j6IIsauZ*i^tSbahfoRH;W{s&X#F zEfO(iRd&?GHSQ~~KDrs1@YsjNIWj&|lL0le<*MvT?&Kwl$n#S-xkgZ-aL#Oo{v~+O z(zXs|--a|SztuO#c-WC5SF|$urNfh&iptPQpQk2xcq`V?qF%^s`z`Z2(%52;3_ruv zu)M!7Ixq6|z~S@oxBmW!4{!D@Mf2=|#P#(6v2oqh>MZR_Np^p0RolK$=n;Cjv5GqP z>{@CzZ1Qz^(j=y3W#7pEuh~vk-~M)*s3rbFN-Mn*Gkz%L;!uujY~9D5DOmO58s)b8 z9MjBP%ai)=8F)4eH_oNHx|Oe1zEU{B==&AVQl(#S<|=BKN9~_Er|(@={eJ!2Vb>>3 zDd3x=(;RgzQMwJQbGp~*9SOdRYLQ@@UYrJg**;CM()Symk@2snV(%7rq`N|}>$~;} zdg0a)Z6zy;6C2uiJ>kZiv+o0OfbU;87#Zk(8|+`IfZz!9q{jYf&Pp{{pap#?w=WhM zBQ1~|`@}n%gJ|*QZSfuyDK%MZ$tm0O{!F~XjyVl1XbkdZLR_OENon44bF|f_iL>2H@uZkm*xxS&V!RAwVfgB z&!?%}y0=5S2X|`8>p4+~yoG`absu<{`TU3RVjrd7Q$Fg_MkJ^(UWa6xs%u~TWgF(T z9w+L=i(ZFggRg?7&w{1e3w!({`*>}n&t}3jppD3 z)jhb{`xPLbp-FwnHX_=Nls&$X4vyc>6TMcbtYcL-slnyW#-1i4DlmoI56B>Cc?fPK zz}yTR)+S$g--6ruf9DR?dGEj7pqt2wf+^}tVCdij>DG6<5jnSXlgjBq6obF`gRV*} z^Y9sh!IbbXQZ7|^iQKyv(D2c7QfE}~eR3B4wpVxw4x@pcDDV+E(-GuJPR)@(5|iaG7GJZ9;KTm! zx?|v98?1bZd13AThNe7>QTpv7%Be(L88xPt%PVQAYm~3nf#enSW5SFo+ca!g~+z*v&Y_F*L z_Nf`Eu59Io+Hnq-$T@{gjRDS(D$N>_?>o)H#Nh#YzZp{WR{8QOjh;18Q!|P_W~F4g z6YKjgw^Fj?2powZid15nMG*nqjw@Chc?#?c1s$RKMXLyKzg+?U$-hXa9Q9vFrwmXo zfJ4!X|9i&2;vTWA_Vf>CfcPb)hz{*D|6$U;j+H!!4Om4QBbO) z#cQ11H`QHT&GRF8chj`t%9EV(1R#|QQLmgM;yN)y9;d_hiWZZ_>K51gN&S`^XYQ?T zg8g4j+XByaaQDsI@VaBTx}0L<`f*=yX?PT0@cMq8P^pP$W3jr(SN-1YniWr@rq<## z%;?$NweD)FtEd^iGJy8pYxchNgNtmpSk+1f*>Fs45zn}E!u}MUBRQO-O zAZHd1JnLerbU*7$ziJ)#zBxoUv_9-YzuVFXu=v7dxv5NPUXj71m(|QsI{_AMX{*EW zh<4zyXwHk@AZwe16NY=tMg9A5xq&AvMno?axK2vVENUJmA^Yx~Bvt1mCCi$~b0mJ$ zE`yrd>);^6XPTMv{Hta&HT6K36;#ZO|0YEE-TX(Ohh~GV)oxVEVwf4y1D}my^$031 zezWMUE`Z~oD$KPtPv8$==+$1OiLaE9jp;846F>}5D;FASW`6s)Ky* zkr}KS=CFr6=ZW>RV01!AGxeS*` z`;#7vds}&v2Vb8uTJ@_y3*8{%H1*!t?`TUym z4i8Y?0K6c4rxsX>aUv>mc$g5|=tnGD;+w5Ff1d>MciX_wmJRZXQ>HSTm@6ACb^DAO zyrAH^h!kZav_>w79sNoNJMlKb3x#XU)&CmJgB8eAa-|#~`k(xCm-ZDK16Z&>4iFhP z@SUOrOUq-CP(Iix)^m}cYy4t1<+{a73KcH`lLlgI&R~1r<@p1!HgkR;8SHdp&4O?3 zwX*TWtP#`9vKb+mzd>4o4&=uEAn+9t_(h(M1L0DfVoO<-F$zk_tUo;h}@?4lmbk%g`_} z_6ndgZ%UgBPUka=SN=YtgFW=-po|Y^m0q}J1UD#JtbdxNQz~=xRYS`Vfh6Sqt}3sL zTDS!ffh=#*)RCX~T=4LKxj&kwuADo9m}r#Mc^`lv)!zEnT?Dg=?@|{GtG6Yf;yetq zhj%)ky>=ymb5DK_6itk5Uv4%gAeq$ZC(jC6@JL#3e0i!2l3FoM>APMaE)e-yP;tFV zfEsovFykj8%Bv6j?3sB2$>Z5(;8a=+O4wzCjDOlO{Gc1aHRt=Nw8rK)3u-w%ziK&Y z*1Y)r|yvP)Q=9{3e^OGN)gZ`?5T*fT< zE0IK?0lCiQRllm0vud79JL&sk(EQGE@sj6~@ zsHuge^ZHXO-73|q?~@mxG~V`N+$p@g5h-4LFstD2Bk$HBb14hO6`Eg_%leN6_ae=~ z$i0CEEHdwanJ^Rwxg5G&c7!z?wORA|n=+u-E)Boorf8SON%@(1-_4m-&J3n#HV0=8Dj4!Cue@|5np4 zvJ3L~Q2L7V@SS_`p^(141DF2#iO?aGcRv}-jjYtoVGBWQ&@I@|1~EY!v8Zk@CWcS>bI3^2 z+h!#p7W49^=W)Deu#zGuib?E>T?rGthiM$3lB|`!QeQ=Kom+lr5fBbhR+`deQ2HK1 zILHBO{_$udE_ri$`v-3I(!o2RWAE8@(I?TZomCGBuZG#S{zdD{yaBUz|IUHs3b&YD zhc(i3>)%ozdbE`ig}48hLeaBm>v_AB7a2E)31j4!-r;sJNRSN(P)mt=BQ#U>l9)7dqs#1_McT~+PX?EqM>|3SiY~8zAqi^DR z(k|LlQI_-tYa5mR^3NZJlb0G6bUyPgcR+t9*+ui1C$6=w-3B;ORl;p1*4KtyRrl%K z&-+h}9nR9*Y_IwLh z1|Gq|WSF=G$TdsD+f@~ZrB(?xj4{hul<=7dKIrq^rV+yyg{Q24mAmaaACY{`^<`B^ z<{@Kxs$H_u3V*QssI8O#Mj35C7wvJgEtQe@z$1yf>4N50)l|aPf`ZGe6i63Oc@kPa zM9uZMTdgXG{d9tPM&CFD096AD408)I|8dU-rw?7s< zxwI!C8V;vFXG1=qu{NmhO_&ZfK(kL>ju&}0k$~p@j=yx^Qen$X$-q%St`U{{qyz;! z?n>ncd6Dj8GuwS_Cj_BrdKopvh$@LMTICup;Un(X9IM;OA?*W9nAtYYNY+c&ic1;eEq_2Fc^P$F`@xPOvp8O zO`FMv^rYwaJVDCCjN;6lg$%($QgQ3AoF}JNZiRb)!9KHd4Wmg<^k599A}1C-`7m8x*A& ziM@E`aGl3BUWpI)4FAf8-O0Qz$|m4y@#{nEDAHLUf*2>5?Wd-8XRSeB7PMFID(-}c zoslt4^v7)^ga7QytE>965aDKj46vZoD=o{1Xq4Rhc48bAYp>KJ?+*WB_AxG-SPqqmdFZ7F#s~7zlUW~4DbY~^VwUeaSk6w~$VNM@gu>#+5ep`Yi@1z7UZTm7tq-36SqVV zd}C5cr=Hn0r*E&mQ-x*RW1XV)wxh^M$Jw%G$!kmWh$sJQ4_yXxEYw>zZZ8;uK6-NN zE&%TTBuAB4D~$581v#n+=eqkW;qV#QyjHK#dY%L1Bkuv5mfRIbDRaWDzjx`8!WK^` zF}1!5&cn=a`9RKz>FRu-2tWSM1+Fv(30tIIFvo>ODBQlDE&trDEgbGVbFEG`wVEA?U=O@WNmi(O|mf?)NFeL>Gbyte57!!+#WXyF~=Or4ra%!xbaMM8g9X)O)Yq z#TE(f*Wv$c`C}-)GA9IVDqEt`qQqZLsG>_8efFO6g3k}x@3jmLLz1#3Xa;M3x5ck{ z7;4-kb0eca`V{RF$$0+g_X+4SKr%_bGeomlR4w1(6WzVZc)^ad!5B;I8FTF_@}_oH z5;C8QU_xaOnyi|jgay_|Ed6#omLDABUBFqAgoMpn_HVxf0Y)CLD$8_q6}tTWv(MXh zlTw+vA%~zwt_VJ~1?dU6+(7Zldlnyyg-XiS}DK}oilWeuhXOsVeWYGSvRI$O&dZJ>5|0;<6&RL#si@P z%O8r56?vdWBST8TCJF2vKbf*tVL^@)ir+52w5$ZJd~W`<({YSGTd+4?tM8GK+ z!_*n%NQ#qco|7@TFJCYLTA$`GQiZTL1%gK)!%6JzuZ)|IfP9Dr z+svEEyNqJOx4BU?;FFLcTDaw?YY&+6<|+3okj??pJ;Yi|kT^QZyg|NFBx?U2d>=~C%Ng;Or6Q!ztfAeJ>}InBqgbFR3AI`QHrmI+T_%LJ zaai1m-z>lI4K|DmI~T*M{Ou_htcsPw*`5eL1p3lWuNYJ2&>g>LH^-EG2X*n9{_spe zU$C`o;N;d(I)D4a$XJ6>>D9DBl|@E5ks2hdR=h#{S$5b!$z2c1d9 z`=}Y`M7DS0aDso#A}MEZ_BB>&R2Iyo4a-wc46EP~tf?xdD1nD3Qv-KvUQJ$C4wShU&n|_xIBW9T_Zi;=G9P``%z{0l@%y9a z$^BPDt9bi{3>PO4R*PqC9q#9xcezg$tzS+sUMLOp?vDnE9@Wk8>Zx`nqW+GTB_3`HKJtRP7 zUe?{vdJRAAIA2fEStkfxZ(SR50V6YzX4ZFN-w@=+&DGZa9Z+I03|A_G+li`fXbSw{T*6iBiIM2;t_RHQp3G;_C9$Q@<}o3;A^-^y3mtQlDi`#>_?!4;9H1 zTHR()c=TZ+24;+AMd0QAm{DT~2(y*epwHq5SoFJ)-P!M*>gFXBz<;=Wykv$;ZZ|4D zRkV$WL_wa9BCOrfn~MWO0E&Nrsi@fhs$`t#XyA0~7~Zh3V|459tx&Nm$`lP%m{rk_ zvjbI7peAcQY4U%20W=O%)DVOTHwgd(`B%C&5>z?Q-BfdBH|2(lYRGEZkt~K zD-l#^+hmBI=vhJ zejB?*B#&um&wR^NAW|=oCzd@gfq`rSzJ9wRu>lORMa=Fn{ z85AdO{?qtb02l9=7A5Aclf*Xo7xH7$Kpp(?sg`=nk{?J&BCh3cIgrP4xI71qm>l%j0^4InIaKHqTL)x#|U4NTef`Sd#Zlz{!S%8kFT~`=CxT&5^d9Z&gxicby)0iWM$xI}_dHy{y-I|DI@ME0omXym!eZ)m}F*wvBJ zch{^?2$YE7o~wkI>@@>axCs=LOu?0z1EdNFpm#tuZh; zMX~}x`F#B%axRq#E&gjaZ$`MPx2`ow@Q9A%CB;^h)yAdZ8APS%`uPv=AIXETLEFX4 z@PVRs*oc{;AUbHNhMIbdt{`Y5zC6V_wU0n^VqYm*XV`9~9u1k&fnPIK3SF6Y)UiLq zQnC{TFB>P=aSKh~%(T3TPv@ZBnBcfss>>ir zuc)I(O-M*qqI?n>_58|gAYtj!NHQdAvY8u7^DPS&Aw_te{`@65MF0FBstm>k7m=m> zZn6e3SzzF@M*bIEDuEjIoZRT3vYr8cIRyvNO=hagx608U^5R*K_%@qt_tNuy_h_C9tOZ##?;cHxg zp&5_Gza9=C0xydWmET+~b`hxU7#RJcF;9*Bu={i;^Y>rt;V7hW8r#|I{cS;%UzAr( z%PTkET-i+>ek(Bje;mWy&;R=>b>;s=^F7V`o0dhEij%Fjor?dFt^W6*_(#XOG#767 ze!*`=&}A~bfH0bY+&9|+cK>51iP{nSM}r5H0e)=|5YmguMm6@Mo1@RpG}8B6`hoC1 z^-1~8cj3J##5P?)p!03P^s;>G6|~;|X>x)viT;+pZ7G+EgoLXG7s(=miIEbaq)bYk zsv!JvJ40mv3?4jGjy?+_b0ZM{CK&9zMx-`l=ue(+%*Q_5Z>PVzjbO#!|KNO+oSpfN z4dwLXAW9Bjx!YJsW^N={LyL7(FS@uC7Ll*#?l?ixyJE$2b|7EYcakrG`=oDYCkN@} zx0X;MVZ9j>CWMe+{g%X9s^%>BNqBe&maqFxU8J+M{v$QK$2DY(}G-)4Q;~vAG zrk^xjoo!}Mk~Pf(UHQ%_At*9L$ANXtbDhU@y4kU+l_4ISjD&pXRe7rR4QM=!qkBld-9L$plpxzU+7X;tkNiz z?fR!Sy>vBT{0XU7Rd2XqYA!n6?5SU6Y6WM`wiml1a#`a4m3V zl!YFdr9A1Unw-t@QCX$ zwyp3sa#IhqE~4ajv^@s?J2g|7ElfyMx&AV5PcA3LhBV?MH$t znB9VOLlEh%Pj8?FKtDG;_;UY-y6YjMB=Bp(C{cBUB)*9p8+nn*80Xg;eVVx8!&MiL zDc)lBV}K8@%=S^aafG*n-lQv>62S+DIi8l_$Vk-UZX*HP_0W=3%I6;^1=N`E&r3cd zvPffT?_)-iTnGG;1xWFFKIPZvotJmT@G}%X_}(pklvAb7oAY_FlUJIikQ4M1U`f%9 zv=CLYQi}@U+>UHty|y{xLK%CjG+GCdlc6)DV$oVD`c2?2ba%2M#_+%yqnNs5hk_ML6eo@r{<(h$h4yPO5Sy>x>lP)#4kT*y)wi8M=8n1)Rv-DihL;_&rOIGP;VlIQPl{w9 z6+JZkLSDq+q?aJv4R{Ss8clX z!=-mOHR=IdRw|piS6YV$B&%G!J*mpR8m}5*!6tV81aK{H#2dd>uwW}vS*_-Q`t@S8ZcbJR_=`%5 zHK@Opuvk!$1dtcm4uh;jAOU zD)O&3{aY-&A4xYM(AQlpPeDrZ4_K!^jvM`a85lN=%%VYZ_q4cPJxs-mQ7e}nMu7)o z1%AG~3(;S_oAT#1Flf-$5lGJS)hW8qZ92iX=&$o3(D29h-|dINdFZ`7RcJWEHV+Se z-gatxapZ=!C!@5Fr2Cw=$Fna(THN?~C>Kv6_(fZyQ&A%e-Er3Q`;n07hOAJ3dqScD z^88Ox5oV}j7=NXDG;)MvRkQ$@1e{O}Z|TuX^~$a|z`apl9I#Ugeu`%Pw-m^ohE;(J zi02(|i~e+TW$U|4uwo4)K{JUmi}v6EOJ>!G&mU1k@Wfs1e>3L*VC%O4oBu8&8hr$K z7~1qcz2CJ}{x@^=zbOFDf~pD^WyfzdVh#C6FL3#ft$~}2lAeeh&;b$_?PaC_lWC)S z=-A=AC?c|~h^5)Y(9pgQFmYSP$J+FEP!dD|W3^3{yE8BqxT!X z!sl!%b2o02LzPID_j}sT0#e<+%N0mO_Iu$^N*|?^4Yq^#EJ6@VocgC+uUG+P z0fi>X-tQ5dW%0Y!d!k%6KqI98%A+FN*ol)d>q)!7hlh7++*0i1T9f6e_d^}EY?Y~t)3>0#Vezb~?N!Vm^Ie z=f!D!0Y+b!Cr@8+Q4<$$jaFZ>i>R{I%HeLm1J~S1GrirW+jq#N@MM`;-Zb$)2C6RI z@P=&LCU$InhbZGo0o9A~<7z+H!@#-SOt+b%M z$ehS8#L<=Pu%AuVd`X*;h;&@>mTce+qHeVomrZKaGmMts#wRLp+2Qp@P%HRDt-QMP zJkW4iv9JqD3~nKR#55Dj%JM;P5J0oezVmzG;31|0>NaYSlwGbKN1zi6(N{00n_yqL z?BOK(Wy>K#qT$U{MM7-s!dPFB4tvP%3^PKT0IY5D@Cw@<;@+P#sIKaCm*iS%xsUlE z#C`FTXsXHwb~x{Ya$!q(>QGjeKdtLrTLGMuJin#nM#z>D??y7(Tbw%4f>|7~JxDeG zY!L#Y;cFENu*KzXq1DJk2C&67HK2rVcW-+r@Tj#36A=Yp-wxS5=q^dQH{1pa-Fyvb z+VD@fVlgatE@vp>$4D`+monI?wN-t;ejEIqR(a`o-*gtD;<}Bvu?f7}8}a;;l{={2 zZ?{u?6i89eDplVUsc}`!8}KcgQ8p?SV|PN^y+N~2^=oQZF0F*0d_4SWqi$IAoooEG zwXf#43`mY{LUH1wmW~drtp6|Kxdjw;%PoDs=YSlN-9!?>O3d;DyWQ$pL1KON{=K?z zD;1nuX!x@TuV)b8Q75+U1S%sYhY5%_PpYXx+<)gBsP`|*g6r`}l_xIFQB@L071-+M zo3*iEC#b}hGTWb;0H%rHG$9F~0aB}WvG*VcNNQasB_Q}(0#!PzsC&oa9_)0Nkug^k z8nvsCZ~kx;?om(jEl#}uOqj}x6y)V~pzas%S#69$@H5^-2r?B+4Um7+rFhFuUyTs_ zwSQtYJ5B;VY1P6%K>G?roDS3CH{8f1;r+%=h{jRzs;{!Po`?iX` z8Lmq%T;ZV(dlanTYEL7is`Z-K&JHjlnA=y@Nc;p@2TbNM&Y-)EP-nwuMt6525jlT! zGa6;rcEZ70tRGq(L5F%`dJUVEwTXYWCf9vrgB@U%RA#SO$zv2to)zM1Zo5Evjj)aTBfnyt}u65^E$_wp;<$1$xAK2pFwKhAK!w8;Xtfmnzs&{S)#t!ju}*N z#<7yW;QV^!sfec8DhZCoQk;`di>d@85Id2OHr)x)^LS%Vh2gt@=wQ))%9NW@&02NT6dp;fYR<80wm(X*9@$I8*R zytCc+xz7kA@DI<$(Hr4opL3^Tc7|Lh=(e`(pk?W^(ha!-fgPIE15Qp7VkAf3Z1>$y zDm6%8*I7(r!hJ1>zm-%_v3z4;v&fC9CWJ8D_-3wKJ$Zp~#0~ao{b%CX7)lYfnMV@I4YvhzomyJOUKOZ~!zG{R*{tZV z@%D#`<-4;np;`H-1@!svEE6yx^r;O>OCkDx(6=$Og|>I;fABZ`Q+#tfiqYOXpZkrv zoZzY8zOqCGnSr)VbV&)#2CK># z)EzDCY>Vlyu!2(UHVAGXEgGxT*^IMAT9Zh$(Lxmv=oJznLyB+WPGwerfS>AY_@>7n zk^3h6D8zgD^?Pj0te3LaR18Iov*e^_U!2tajg%K2m7M9Fnr!-I-!eA1Uw;qHI{`WGr(Hot zd^?6^1FDX+>VDD9v6nU{(rkso8+o*&ln7bp{-m`>-hI}B(VY?qyjJ@l5wp-`{k&&9_m^Lw_nkZ z2fEJSDoy9<0I2%483|DtYfgRS!x;v6<9=R00tvC@>O0-aekbO)GhR6@_8V877` zEZX5wO4FBc@L#`uyVY0p;5)d7#rmbD0KvHGYa%CuWQ!UjC2-#aQ94 zCAsFR)})Ku3h3d1TFIVQGp#_8FNS-T7V=}HXxv_wi??=LeSSt8^wpKafMfRHE-23; z1Pi*xr)vYWa9h1o)Shd^HR}2mAv_jWu5Sm>nn`Ve2QM!GMOH_MVTXta$~Jd4VgWk2 zs?oAm>FVmYgqE|-|IX|!SGZ>YLf;EWOv6qkwc+My_vw5P`MiFe=hVsp9I77L1VjV;wynGNNuoUWTSLu|cy+}Zboo9lLz z^~>dtU?&0wQSiRq3&9H{;$s{|1N3n0#3;o#5e#n3q6&I-2zP##8iZS%kvhO%Q`K_J}~47%dly0*Fom4VG${nlx9z*%V zObRtn*_~oTfJzHe+u3@k?A{D1f#Zl%xQ7CzR|_kW!~hvy1q^QmzJ>eRdaDh%fU#+RCp}%{&+Z zKJh#+?4sd3?HwgPalMyL!;~yJ)K>{beaS_m&_1vVoD8&e{Z}Rrc?*Q-#B(D+GmVZ{ zkE?A&7{#Wv8-WBoc_kpCe@>SDzx{KIJ8p#Dr^0`{C`qaR)$8p~hyDflIkC-u_XQ__ z8vy7&(4IhF`UhJeSXTEdJS^^|U?fYUHXjv}jIcDOrRe!5g?SQX=%2AcS4~LiTrW+b zE^F9Uva+-oap(RVFExX4Zt{zBf#Sa8ihm5}c^$EDY`gncO1UWA;-NStfvlP6w%}4Zf&s;FA$GeRk4 zbMNlZ<%*`hXbC2QbUw6gDObnEtac{#RarF#SB!+7K;v8)hURT!0!kz#^dP5OS`G0S~$ zBuub0^GxT7JsV`VvG;_4X+VFXY2C!0QEkfxDsEeGV?6CA+JhwqE1k&+fN7mz{I(wh z5l?5{&KUt#J-8GrzQ%(Gpg7HEp*-4rYl%p>KQfD-oQ9s#a znbaip#Dh11?ix<}T7B&U;vloX8*z$C2|k!A2>^qLVG=;q-$J-UN+qu#!430Zc=@0= zd@&=s@t!J%m&~NfR1#wc%jf&dO;3Q!r9$0uBM_TDf*a&?@WrznQ9^9|&L8~` zw*aTjnty81B+1evpDje(5HXfn?9#T=74*?9*kbAE^3_c+e)G6AMW&JlKFTIhM3$&Y z^Q+{Dp)YMa4{n^ejWArx;<~>hTRt)*OJ*5f-GG{x7Ynw z;2TKHezF!1*w-@9lNRhc@I^57V>4G5G;+pS7FBvN#2^1Pkb0@)!LX2xXewZ%pLBML zK+KQ0^k}}9fyVlJYnniQT%+v+O;DaIF9@-Hn$0&HSwa!1=%<1$9-zdn5ZzX95K%;k z^CL1=mhWJVtTI|hHZSQ(*U<{?&a$)$hiQ7=7T483Q&-?PixAK5N;-Jii7cbS*XQU~ zFvs>eLYe}$7}Vo0^=g*^^l`^k*J*4J{L3QmhE^=RtXEs$BM`rp?KP)<_^Abc80qjW z^CoD|ED!4)BmpJi`x$Qgf&9Tsz?o;khBBRCtpQvPyBGC7DILs1Fy9kBe#@H#OMyRj z4G6)fa_h5@-zWFOxe+6V3Mxdi5aC94ZLa|XHD2^@-f60IE7q;2IB5}LW%|=hj7Bjx zYVp%I?;BC>`S*uyZTV9v!Ee9$@lCDSWi9q=sdEgi9<1!BUN^wb@PNgbTQBfIpZtG& z0V1IPSDF!zziD-lVmHkpoi%7a`>eh-alpZF;m`WBr1>uuK|}hK-t_f<&&)fLOx>^2 z0Jbq;;tQ)62D0Y`wpL`QT2-J9La1}A{ThrinAYZN1IVmaNoT?~T&tdn z0@FDfhr-3(!WcZMm?i}ZLz8Qk3Y0*yO*ljQg@>d|n3d&@=sgv*0x5Mn5J;zCJC*#g7`U!!LGsk`MRq;(6gLp|$)Iz4g=~W*x z^5vheFsnNw4YtK8!c#QKZ~Ftcv)o*kWp8jM#X5bx@)^Q(Qh%&}mAKtzs{ynFC94q` zXz9cIshxHMG5dpQj@380$>3A=AtcT5aMksjtHjG7TtGd4)vn2Ks5j!)p-CLGxW1zlTw>dt>#1BkCaNq8Xxc2AWaQ`lEIVcnXl}zUK#ZO~kKX!VXPmknHEdytAFF zyD~qtl|t@AZ%t9}9Zoi$%QyzUSYYIRDYLDHvAU0=S*rgePYKPh8v;@Y%IP-_9znzo zN}E3q{4ch?1FETY>o)XWrAiN=G(nmOh$KJ+K_Q6rCLp~_@1aQmr6@&uXo5oMy-QVU z=+Xs*(2*Ki;N_fi@Bi+7?;m52!AKa%4rK3d&Nb&;>wEQTpZ7F;JO`qv55aYi@X36w z2zK}5WR_eVr(RJ zqgiAqP;_Lw`{>OGkxx(*cbx@sto6YVEzAAiS4MUDcYKwJ6qF9*&NXf#mDPSy^i%YX znLaG>wCtfGqSPbB&CtSinLY0QB=McHQV9>hbr$GWz0CVpmCYURSa-Esg!9r3&YrBM zj{6i)YD)f!04PG@b@;?{5D#|txC@uPJNFwL8L`srSHkmP zIyV+z8aPLDDupqps0-S>a8tr{I(|*eO!y`^`;PvLgft;f3WrW!qc|z!CLVt}Yv`Zx zq03|~au~|*m05HlF*mMkSC^8f$^}Y)?S3;(`?YFS-8z^B7wLdT+w}6hlwqO&Vo&;G zFp1t+hS;KUy2#BbGeQxMCk-bZ754F{yiituv&$C|^A^mXmJtD+xnIopCZARaYsTN? zLfUy)8{Y5b7{#M9^58UigRa@4>~#7s*>!ED<-mtiWw*r3RPX;KDb%k8#5Tc=*#; z-*%90ZtXmbApgQPV{d}i64J#ro7X5Ow=*l%E|oM~EI$WNq>H;6ML&4fRY5cQHmKN0 zGQazZfHp4;tM27Z+5T<)-iR+RMLVvFP8$tAMRve@EO9jBQZ@3B9hx+A1;l0c<1$}I zSQ@0rv$Z`R|28Sz_DMFnDp|ev;v2c@(}(S^vH-nBIdhy{^GdT(b1vUNUB>r1+r_(<#W!&!&A#T3@D`(iZbeDW`T zsJWJF+sgwr4B27lE?pWcoA;bMqII$N)%4p)drbPS0DUr#8AH=L=pfZ4CW$PtV*W|E ztTc&G9rE4Y%7?aQiWJLHjkZa; zj@fVjEIIlJDU&Ehfi9o5P?45M+*1!cIimHwGI9AmiQ&1j+tulhnbcA~8@cjcLj}Ox znqL2u_N7)rPUp>^pMgnhmj3EG+cT#T@~VGKwQSS=)bAHf6t~;?D$aHoe``@FxH}|XW)8G{Kw^7DsxD-H?3{LphrHU8McV*(KQ1F-wYUY_J+t*3C?4m6$ zHBWO51zoma1-bcj0+I||$nlrP>2TeWk&1|yj&ma4J32^dY~ENrC6ZChG$AG~`B~oY ztz2yCdKO}7%w53wNpGt~l(aVOG5b!-u9<&o$ z{OJ&1!_6t69cDn>AB#H49v5M4?CyJv+bZ%5eHqFh@=Ho%8Xzutwnr%;`llR3!JV

fB|gCu+BSGXhxH27CC5Q)6adY)ZVb#yy`bGm$jBo1^@W~b~Q9m$6o)JAYTJN%l!b_Cvhgw$6|Q+?NJLD13FY28Q{>lH@Ij?x*eY%8Wxz5RK4+r9avR`ejDk7KI;0Y2YH zvM$=46o-!!&QlRQAQ(tV&7gB{VPUV;7}+`*OJyT%BmJ}YQ_m|aHYX>{X)F5jr2@Zk z+{b4U7W6Mp&ok@!lhq4VjVFyqE?ZW5<<$|FGOav|R}9>{Txs(jT$&HUcUn^8#;!~EXLb1rsm&25!fq!)r^-{SfuNOI+D8Ze{$38xRLR?5+8HYBjW z^4i+f)nuK>E{}dtgs*-&6yw%}AM@D}xI7r<8MM43RXp^?Z_CbOtD*mEj9VLC%x@bN z0R^fU7OK(pm*C|qX_+Ns{=ubJJcGyximLr7P=R3z&uFcwj&GV)TQ~aJ%?ZkdYkb9B9tRxZC__GLb zm{OE!XgI7UQ$7UKSmJwbY*&YS zHP_c)@!a@e!Z1=oI#M953O5`?8_JCz@|kqQrD?Ct{EPEv4!d~mgpLlrx7?w)HzcKJ zS-%o|98QRP`0csnA0ctv$DlB1sDMRAdYG+r)=P%w8oGbWD4))JI~bMkQQ^zT;`uv0 zj&}MI^3X`C%g<(h^w3C2@%8i274N4`4Yt$;tPUqw4Ft8Ldc=EKlJsM!c3;G1K+ zq(zR1q_r69JK$&&a6~3qg3asNHCwR$j3jVphuGB13Q{42A1TZAURSwUmwt4fL14u? z!8DnZq{l;2Npgd}Q2~)4&cD(KEM8!_C0GCap(KIBej1be@SJ6;g~EN_Hhj=Y>f>8q zsKRN}#0w;>ig)GjJ~k!XXyTfN43;TBl!VJl+G*%R=Ckqa`6J=Fus=c!-Iwd&8@may zQ5zYM(Knvt4B}eb7~t;)9pBRa;V$V|-IOsvgcV8H&P>pcwolLQZDV>(u)u*-gIna+QscLScS7R7ioLtK|$`6%(W`_^7 znknipy)0qWS5kmL`@cj-C=gv-?gd1|WljxRDg>ekD0Z*S4b2EQ;hjAcv4~_MQV!sV z5GAv|r5)#YDJ?<55bQmb4u!&k^7fF-`M`X8&A~#X53!@gCcYX?;E9WWr-tNrui?ga zEf$1}<1@bZ#Knf8-xdx=8-HAl)Y`Zinf35suZ)gfNhj+n=;r3k{qALj&T}%;xZD7a zH|e`Sj~PNOyEN%)d%@}U?E6`Grq)T2!;}TY>0m{}-jp_Kw4A5S1aD)!x{PKR|9GZq zJN5{$i9V|x0jRTT!Bs{cppfnU+U(|pY9Y+jId~h9U1d|xu+_qVEHm#k%y5|J>kfyb zr>Gry4JFB8>5m~dJTT8V2nU{5s%#=BTTO-s=Mg~ zi^$kRQXq>FmpoA;1jko&sW#V+k&)B!0MDRPd-tV}@uJBaIeUYc@@a^H7J zL=D3wUmeb!<#;s73*aB(8FhIHmXqMk&LM$R>(W)laNjvnS1OnBu^BA=rb|+Ce&p%yaE7e3pAudyOS#gLw>O^$>2BE1$flQ3^kVS5LAfUeyWLKz%DF{m zMN%xU&+xRGSl!TTbn{u*iz&p$o3|j2DVUm(Jjn3@HJl2Q0>Ze?iS_2(&>zo&;pJzh zl^esU{kUz>4?t+B=`Szgrqm8sKMb3BJI1)Z{^ZiQe0tx<^ty_WxHY%-Bm{a)Z2EC- z>w2JY`a!+Uej&2GN%$&vpti0lIOy^0*2&Oh?R@px3fEQ3N$?P(`C;l}>B!9ows4C7 zZ+!z3UO%HlVWA5&E(>B)o4ILbKEsB01hYOyvV z6vhP^L+wGHw8eVM@qHGejl?ImQ5j%*lb^m(_XJ#m1iNFMn?dC{5od}`oP?3@$Z~n7 zd51;RMwBTPG|)vl+(AK#iX>3gkq4=g@2T~& zh6mxOx8%i=`m}l?FyKJZi94#|iD$%x@aak>ZIuosPVi8Z}o_iBMWO~wL7 zf+iUzaMmzfjWrz6a5s?yug9Ycm^4F8UPRKpWx|nE^%*kvmR=wc*<6ZGuZPDm9D&-^ zdL0GQ4k#L!4=rvaSeUBVc&)4H!9W-AqWcX$e2~;KjjnW9Po=bon*8J?q2l&=2}LN8uQrpyqir+WYs1;d)&Smkxi z5UI~7=XI_7plGLln~rAhBI0%;)Yc7l^MB{GJIa(G3|utN7)M)!{S~CH|M?CtF^c(y1K4eHWN~|Sj7`n0l>3Yxp zvhTX(y50jlqHNLt^I#VJbrng`Q1OfZ-hFp45gBFhjH&F??c@~{SKPnBJiYKsI3Y2n zL{{z9nX#JEnNGIV{{{PBV5h&C#y?adw$nj1*W2xaB=Dkd-x15JYzm$JT{rv9ce9q+ z2juWIa_*v}xs&Tc0EsL!&xx<|5AyK3hjD7$z=U7tPRr;A{xBbEZl-Ja-TO=FFOu>9 z+?9$tgr15oG2zCT*oXfVRT zr2mbu#K!kG?I#l2zaLLYeJHg7&7{NE`VnwbTK5uw#9kJi+?(>NF*L&0rbz8{2>ctJ z_}fmVL{w>86=d3&j%qZg*W^f&xajG21hircOXg7FIna$ARV7Ki0M0e4q3!mrp@&Sgs_ZUa;1DAZ3|`8?x}5_AY4 zm)^0u-W&|W$&l}YF#1yJQTcni)SmmxL3$Nsvx~&?bL0AaK?|$dmU_)2u8miqeK>)Mrug5 z3oSl+r29wXC0g^)Sn;TYn4<^#ye7Re<;`t`?`J;CO>xu>>rEmmafc#SgLXi}z`*QK zwM|T<940E#cwC`G^X!VKl}(RH50vWl{U(h%DQxmO@=1q$n5mfo;G8w~qW8e%=7Jq` z(KdFgRqlm8vtoz^UBQi~-;mpN$O}WLgn|%olUPmlnImz%je!GO?EtMA1eg_Y&uNz! zvSUa$@Bp-{XIOLc4(8(b``)N82_k3z5zopd9rV*AbE$icA?|yf1Juqi()BZe-G^y< zceZyT+=8!x8#9Mw2fa^WkHZ{~*TxZC*rq#XkH}GFsZ5ahSz`5{M7gZBr>&BR&3bO? zKq6G1+vF1Q#xypnTSu7-W){fRd9gWwh!9`yj{_!hK<4K~v$HPPj}Lj^S}6ek6P@Xe z0f6%vCd_x26g20ERzW|vhCY-LZ3{sY7W=cTFk9UPmmKjYZlwW`5Beh9U8$sMxs))> zaiVxNn1qscZ>D2cHj0(oj-(UTPIU3_f+%M_Sd@fy*cAw%Ydsz-dwu34Ubb%|_GnJ# zWhlnTZwHvk;?GK*mib=a2i?Zhj{sf<&3P@v_ky);$0p_9+wDu0N%Tf!lWN_(rhe&1 zw1U+Ea2UI{zM(`Q$8(}_Bf;>t>OQNe)=|f#east}A-XvE-nSB&58!yT?SI1uN}e`V z3|`lT+P0ayH8I4n>EO@~Z{gr)@WIt3p!FzHV`=*1WeQl)Z!0L*Bk0Z!52{P2sAupJ z;N0M$7a{WuHY+Kest^8BQcjp2LSs3-V-NLl)wb)!e0|2qAmIq?4|=d{vn3;ZzoC68 zy%E`t+eQa(`TZZ%T1K*2gI0$h@%BoN1W|&XrTqww(G1rM>rPXxAbeavZ1Dm&;eV-kukf436Ru{yX3492wuvpM{Z*<@-1wh*d{EFcg${` zo<*NrGX`su-3VUBf3v;{X-8AHqt`X2&YM$CxWvwQ|0%2fbH(d0e?jPZR}WKRjA=PJ zO>v$oDW5Q_^Oe+XlqFAjO4lwQTHomZywJ^Yq^Xb7DC+dEF3oP)Snt7uNb3$3CXi1op62+*R-NN#u^K)d?5!p z$~;i%%p~N0AMB3R+CtnQw=G6(p22{PZ#71keX(z5wThBUcxMC?;(lQ+(L;t(CHDt` zmeLW%jFoRsuaZdDD7be?-Wf_KWMiJP>L7fz+I_43$Zm9#cdrt-hIw`ehfQ|w0#|wy z8t&9nc{;GJ{FH^sy~9iN+(LZ6aId@`wnzP(w_QQfq=N5?g4iG9LC@ES4#Z^M!&Xis z;J1|#T-SEcM=j1`8|N5Ncs?`0f4`wa}XeZXvIvX?IQX}7MV__HN)aE z!ymelBNXFOW9Bo?4G{}yQ9!}vmt|COVt4&j_vx`*EaN>C4V?&-hyWW`YrcPAPIU#*guq@5w3^4 zoLrv>6zY&bvvUA~ZQmYVAWqqIaO$q@$B3}KBMPsbc?zAM5ek+&W7>$M;VG5RA<#8@ zEJ&SAs8+w4+=T{X9!=415AJd!D?)UeD96-UOlRm&BbVm4)`HAoabh5LIXvhlmHr-c zH6p3fv~=$tOeu{l7fqyZEyRuNMkF)TyilsXxmVorXE$33_;;VHl<=vl)45(2;Yi+7#Utm{GaN= zKexfzEneJ)&`nY0MLQ=|i;IW%{ZEo-2biW`5{G`-5@G#cMIw7UuEWSSE_jl*#ukITLEw7MMO2X46hnR;2P30mZ z6jKw7D|zO12qDk)e`beO+@Y(xQ+R=|%CXKhf6tRo-)o--T6FLU{<$9J>GHX@kq%m4 zY3liz1k5p2s`mOb$PszBCp(-1S3tE0AJG=)yFyhmwW0m&^fEV=GHUhc=ul?x_5&BD z-Ck;_h`xuP2b=1(GqYW-IA7CX7UJwJQb{sb4;_O@lD*TpO#+S0|q7G7leadw|YOHRfPzG3zT;4FPdw_z0 z%}YmnI&P}0IJcXq*VGeh3@Y>41e<7S%jV=4N;BjFI|oG(m0y2RKX)X0Yv4^iVUrXY zT=wx?NFbS&u>?s5{oPA9nurYTw0UaRlfLeMX8KK?;G?K5yM-+~$9cNlA;$*D`ao~$7ZWSBJ7imKVsS7+!NBn$mJ(?vNI`s*gHxNji zB$dNJ<|wI#HlRFt24>NXcqat>EbQ_;^u=(4+H5m-%hGb~lqR%(Qp|FO2P&n=GdqMY zkR(Vp)UCzx5m8dztm3n7k8%k3xKVY%vm@8EBL`(>eE9DyfF-Mbw@_H#yiDbtgu!C` z4{7*KN7cZjFCc?)Qp54!bzg@r8&s21yWKV`G33oSFgphiQkwop@$NkFm+Y4WB|;m) z&3bP}*E**tBNW1MwuyE!e>w1@eyHG-Jk3SacArYKG*ygjHPB@U22{`;P0JuE#J!CJ zPNL{-IIEu%La5epFoGZ)9i&R&0!hMO5=JI^<+|D*(eORqR-;x-T=-`$j7iu7q+oS)sY z)OYam6C?CLhtC&D4RL*jHE*CPEj423t?iClNsS_)}%#FVg{orTOAH)vy_B`-cD)3@)UIP zx^5F7w>jt*8g~(zeUlzHX3z`U@@VP+dIpVn9Jmo%Xz}2Vn+z=#pgh&GX->smu!WV0 z9nl}0%(NlDnTJ|+YHwc<1ineMAtEkU%JXRh+6;m|m5Lf-zVhkQ_A^2P$_U5$DK8$n zSL3Uv6OFD>hGz?M-YUXrXijr22F{NNV#=5J%6>Q#?G^{~9iOmfU9u!qJyCgNOoW_y z$a9(nP#ZB03}!-f*yD%GW4qYbtcW?zkf(g5+R&&* z<(4;gc%IqgE%;|U1=}S=FzReKdv+}myn^_U%Puk42R34reokUp#T;h;HWl-Btv139 zBn@+SpDjS{hnHyMr2bFae6;a{>o+pRysbK~_iy86nS`SHOR^L)z=CLdZAHq2-sPYA zIE~Y@)q{aMC}it$ZHFB9O=i*KWIBNqD_z3V@+MrUODlwIOdV~Mdx{;wlk=SS!Uu31 z%^z-~!HFjs5FvS@u@Pk{d4jR)$|>@VpitAW-c;&_1E# zR}OY6{5`^%GZkCZ^A9D!;jzMfToL{^`hVLWI=Vqgh1rBDC3JAlFa$@0DgHlx!e3YU zm9z?I^;vomZ89L+1$me4R*WJ``uQzNI<;r<9k$d@Ce1c4#u@c!p$DO(HjiH zN7dH9gYPdrBA-2??!G1Ot$aMP9DOa@a<(|V^3em(>N7v-yZdnC)cje@a5f%P$dqwm zm|dWzjnC4XioyLeL<;jrASHuQ+mYGMdFf82^9A?D&%vtup5m8c^Eh9mwTd zXT%^uK!i;kqq!_m3|qKT5xuYXu_P*XA8dkIl@qYhjP}){s_7Mho1e6kJG2}`pPjLT z_H*2JgD)QyV{CPEd6VQbff2179H-uoDpolvFP&j77n`3x`5v&~U_q{p6m{KIcFE;O zbZqJf;wycQcz%>a@jf>Dp+A}dBo^ERysZkjK+6-W{G2C`dEEz*(kH4lmR`1n`FYqw zka8w2#Gm4JT=ZW3d7>BKA!l#%Q)Xcc-Mo-S)N>??tT@0N7=z7oqEH_%1(w9M)39Bs@m6;)_d zm2ShlI;D6s>GIGdlQ-dYf?vIaS4pC zH?Pg!lg?j{0;E}=-^>4cvgqc$^avRGlOTNN1lW98 z(LAhd!D~@ibYM>K;;1g&zQ8zm~@T2q@}LQ?PJ`8rie^I=p%XF z8cUF^IZ@45+f=oaDX?}m-xZ}>c@=al8{lO#prm@SO?swMM($AGs}g8M$MEJ3$>mMXPd!fj%Fsc!Q=%VPkO5%P!VHVi#Y#^yVhKBm+o}7y@lQhVYsR-chSX$c>G9v zx{(PGSv8MkCdqvrFrR|Z^+_DPUEscbZrYJQsF2t5xXK2%6}<_5?e)Z&{uxJvS?A0l zJ;-b=ld*yKV~tDb<6-N2CGs0WqJ9;O^@9X8b>TO#7wJ!@%aOz2fI>YQ2JAce_K1kb z4SIoxqL=X{#--9hHIv)M^;$V913blM$lXG%z9o@`oWs@ms`Ou$|BHay{)O}^_i{vk zw*7Q(fB$;hB}8L;KV*A#lS8z9IZw{%x-*za1}Rds%V=J*Zn_ZM)&qeZH^k^NNB9I* zHBgF(T3LT9L<@T!=@}G+ire|M`PbyBiu=zOF?x8ZoD;=unc)vtl<(l*$6VMS44xSy zjDxkAw%-+j^^G$iUu|y{3o8Q4W>GW_8u76WIz*5m!O>A;qR7%PyZX2HQ zwI*&(7xgupjBaES_v8n-4I2WkM~Jyt5ZiXejF{^dTvQr62i|t?N*;U(a<3Mt33CYW zzCB@NZy3L1sGCV|)9&kVcwcQb;18}#r&+x`W-C?|oRbyyupJ+D*=KToc^AYnBbb{! zxWA(aki&=1(BK)NAMsA5XMkHV&q;UpuW%+Ho2yuCZK+WDVrYI5k7pxWaLuiWG(Wx4 zc4v7tVZj!!+X%-ub3N^fax`P)R3vQxfgp5)!N)nWv6w027*&u_;;-a<0J5%*?0tTA z{HtX3W!)BP^c~RkIde0iUeEJSkWqV__!zQdoy&W@ge6A|n&#)pW7#(xY(Y*!Spa** zqirs{r2ZX->Ek%qXY)%XyB-45>#3Y+5N;D{__hlhV5rUGNnjlz>d%#*d+j8Rk*zb6 zSRf#w%=OL4UBtfZ2)tw$i3r)71z4p?b})@t(y81A-tya zMx9;obr((OjKHha9)N}GqpF6fS^SG{&%^T$Z-e|7Z5^&PaOx1SrhPxI&y0{mfyVup zFf5aV0+=|1w|~;+QyLqVec$rng0qT{7W(zcn{0oXnzsbx!>Hj7IB@-)ZU_{b?d7aL9kB= z83l5ekzlT=>2GV}A@Nf)+}R)6$*H}1MUOk%FU?IhzWZ0F<{uFF;kv5bT=#~xS8}S~xXXU3^dbnnicwu(v>mOg;-dGsn@obf=+WBP*;CQ$_D}m*)3+(lB!Q;+S z!S1la78SSsPc&d<3qQp!IE%wP1Ej|HeoKJ*L>o?n04S2XrcI{lbUuCMW`tfvcU5+J z1)*c5v2LFMV~t635>(Ki0@1_sdeF56qTDpMK4!oe8tHDGNalayq3`1Ycs;KuR)ep+ zmMjKd7%twBdJ7&G%erN>yb|Rj1A1z!?wLV^syj5y-5!6)@u-bd?hSqCQW@k|HRRXl zD04=~Dqh6osfQ)z5)f4TTh9A7VpX_|;4a=6Wz|PCp4{&F!H6&-;-G*fz>2Pb)zusc z@I21k9?}7{o~3O!4K+d7wof|9m>ub)|#}dos`iTea0K!Q!7*>BWV`z<rnqT7aOGi2AN{&?>!NW2^P>;25qcdDDUv4J zs-no7`$^PP=99r9fa3|Zl)2l}C)e?IOiBH5(FWeP3vwj0Jdg^;1d&x{M8qX^l{QcFGLJ=r+;|SU!&yn9i~=xeZJz3twriwoJywf?UgD9~4nK$e>MnMP}a{e!>5y zPCHt9S05~;5AN$Vr28=edKzFBk>v-KM}zZCAS#Bsx%F{SnwC|w0Fx~dY|Ci5g=?j+$?$vX7TxO@`d$< z#IjUys#!mx$^)Tm^Ss;N7R1xMA;|N(Iwr;V{3(XQD($v_MRVS{MDdVl!`MeV4K4L# z3cV&cT0=?Itow}zqHeH;)R@skWD4zVJ@RUjgmo? z4--RW3~)UOL>DeD47AXNjI9Thysgk?+AS?hELjmM;>*JcKWQ0EWC6r{lR5@p1`mQq zS`#_tZa$kWv48va^`1wt^}A2{qx{J^L_NDrXMNjI;HS;NPBz@m;<7e+VC{S$y*J%) zSF=rb-RxaN_+1`PMO9($phn7sz4oo|>_|yB3nJRsj!1})=SUB4;OhpOa)b5aTu7;J zie3u12`3Iu4afoQ>{+=v+$NqGp!jP_Dr<{ITCe8HPe`~;WxF(^giI@~b;(@9T@Lmg z%WGQikz^v-O0~3%TU*y$^xKMk&Xnhb%LN8-@vFEuIW%3XLF|6|R^z4IFht{4sdIj1s?`3>YEJfK=^QQIwo*B=h zdvKC=M5ZX(8Ft4&1StYzN@b4J3p=7lU*+dat@hlC1q`(|ie$6kYVWp?_>G-=p*5Y> z&D!`q9^=5*x*%sgLzP1>=;6AdyY6Swu~iIQoXTN4jsC!is-SPu`3X@0mfnai6J4X0 zI)0>Yk=<qLQX z%P9CyTn~NJ0A!hZX03{!NkILU6TUA>5~`>10xjQ#ewewp(tFtpUTXErKppXAr9DST zZ$|Z8cjdrpZjMgP+whH2S!w%*@s9aTHk*4v`ttHB)9ng`7i}kOm2z0Ajr1Lx%35ER z0<>SS|4G=@QKB+5QyhpHYEPCxYtMR&{-z|{GL_xA+d@N;rx z2W_=FqTq*zjn0MyanHw(T;XweEBXZ02**-Y)6+X9R<24uHnC^fSjvr$I<&XTw_U|x zO~_zzOfM_yi#6Q2Fdm$hb`NOt4%YNW*>ZKH4`hUAaq0{!t9=l=?Vm#w)P|qSh`!mK zdXCWaDB8gJlglT(mR(tlC}s-Ui#Cb9FJVo)c*|Fw%xc2g@Agdr$h{f9DV7ZIB$oC< z?o^*TY%1MYrCypm#?~Z=rj<~xmhfP#c>qx${!4-+QHrFp)mLT1FI&}T4-fg{%E(i` z8I;xQeW1KugotksL+ze_8^R)zYBf{_$)FYQpm(cm zHEj6)ulknv0}aVK*dw1s4B5Ug1|wXQO81P%LKEn4pppE|jR3>tdh`8O&fi$?cGGgB z%(zJb&4#MU8uhrJ)ID_zx4MS{(X(;#Z7 z?S-H_vYPc}&`Apl6FYo9jzO^(k8{#aFFhu9Xd%lTvGqNrEgqi{YRcS2m@CNOSV_lv zWjx5yzpio6qEtL8@%y`&xGO0&NqK#12ksjWgh%-HOA)=H478E(eP0g?jZ#`E&BJFz zCyRWd8iryBua9LADeJd~HrF1`G1a~QQL>}+YZKJn{ij`Jy zm7G!uf!~9q$fSgtbjrqv!c-gt>s zCvw&#V)*ctLAv131Nh-f*ovm<#(emw6C=Y9J(ETWV53A{V>+CL3Obn196kR?AU%t2 zX$DFd`Ti?v@GL@`2=cxO6+^&d%U-~Xu4iZsZ62r94xsLQom|0M!^W7n z;Rpj>QM;>?8kgSDJ2fx&c4W#c4?S%oRtl2N&DoC7A&LxeP+IDfv+P<@Tn~V?Jadj) zbW?TPdGd2MNSlbb{)5ub;_y5@<%odKxLb;6`CWKkwwgD2L43*nh}viBEjJ-i4^obG z+Nm`kxN)!o!R*TmwU?(^BtdS(D`yg*bm-?xwA_c+GZ`OrRd+xd687y1qqr{U7n|f13(_m~Y`N7(3TBEd|&I zY=Xfan>S~-8qeC8Zx*UP$GTpahM0THEVwMVnulpd_0=^l?UlVuy*8!csW-L%OtG*# zP4EE8x14H&;+3lp%WQ9`=MP>cam6;(oOb1y;Hi<^-aL=|)(WuZ^3y17bEanKACB3_ zU5H6R3XOhtke-rv?$FH1+X35ZHW>IPagt@-vnle4N+%A6p$976nURjvZ#4TYCn2Yk z4OqWi6S7YEkV34Nk?bRvDUfyvP^nQ68c@bplVdKN+B=9#5Q{WQVBxkG2WMv%xo9qL zrqj@1xA6pn5Dfdqsj4|`vi=C{*wSl9pt;hUa%2EuqPhe3S3@+WQg99q8{FnAz1LI5OtG}yChyV*|)}B zGx0W$0FhMD7&du&Vjw{7mJO)kay$gP{9Ivh09~bG{s=Y*aZyg z5e_SaY)-R?f*SD;@}q3QrcQdmoTw zaKn~Zr3%v4E0osQ_`b^&P1Ot%<9K*i>Y8m27eZe^EnLLIU2acXAoIEl>G20_rI#kPq8$Ht+?K*=XGZ08-UA?@&iyT{#~T-xg~Jl1r+B z@@thXzn-qmOgxvT`uLzBN&|ZqIwN&&hfb~0PePS2&A%VZG zGfDm>JN)7o`#aD0w<>UdK_V+6AXxL3eV8EMmCD=p6hh)GdH4~@B$h5UmtFOq1jSdO z&OsNel#ETAmRjdx*a|x&H4B`f%P6PsIVccbg&88d$8p``hXd#(;7S@DG%SC_|8vFc zJpcOAC4E7f(z$sn;nscC9-~!RRCA7UU@#|Q-$-6%kcDUy{*r_2`d3{z_t2T2&9QUB zuzi8Y-MbPxh(WC_CCMDA@on>vN2Kamzeq4pm#f31*k>aoFH7_N>L1yns=VbJ35bW{RDeW4j2HrCR#IAknX+`#KzHR;tcDDjOGJXgco8wxnWX3th4GnVDjCp8T zlZ7Mp;8%)5kI(42gE*&`ny6|XU~4`FJF^z-r)Cw_g9RN8B5;Mjs*oIw6zC%Gl=C6k zmYa7>k~E>e=k~zLAU!C4_e`bq$FtBE3XnvpZNzDII@}rqn;m4O8`;Mt_w3{Kq}(NC zrSo+pzlmn7{>a#VT0Xtj_C-%M;x10Tp3mnkx|tr10&nlLGwaW3g}>Nz&g&9|t=LSi z=~_n5N+KFo4y3%pF!GI zFYKNa*WvP^TNS-Mw;cf!X~9oeFq*Jyi-lI8%QM)A;dc$C_~A12rtR_4U3%!!>36PJ zj)@o|9!K)dfn#b}e(1y((gkq`WCy+17J{@W(K3=KBY#qZ6#Z36SfJ*hURpk!wB6&d z*DI#v>95q7QF>5~I!1WqZ+=OcPB0kUVMi*5nMJjSb>0FOny@U1% zE;49>{ED8@-68aX4Fjms>AA{~J#;>uQSO7n>u2E~&K2@xU*zPEW&oeCGWa+EKV$(S z2i+5^(BR5Jm)vbC+ajGq-Y!P=vAg&b0z+vu*RowHQccl75TEcpV3{{+o%TZ6Ru|{34A>XzEI%U z%_6L|R>h6AoHZx}2~!sc!*K_jG~GZl4%)-uPEN4v9|EzjgQ*!b^;PEWVVzHgB*QDd z?fce`f#pMZWz~T)!pB||*8MfF$|?Sx1wh!9iPl)c%*q*=fHoWq)ix?ok~A@gZD|s- zgW`1i8HA8hZ8|S`%zrb6C587hYneO%sS z)tiw)b2>tglR`c-wffGHW7CmS3hi*p6*(DChT)1!ZOVP&LSGNJ-of;jR=@1%$XTfl zve@o|E&}F9<*A#Nul)|fn;j5#xX5t&NA4iS9wf1vdDJIU!p%!5iyeIX!km{EhA1jU zP6kCkOQsrRAZRBDqk$^2<@S8thYzTv6cHiN$X>IC9ak|qxblSox+_q`&rbd3-Y=ru~l88D^Yn!fT7XMNe#NsA5tiqy{!OXdH=gY`uyqyKAs#%oI8$qqJ>(v$;i; z+IEA@2>)+K*3})CwFVh4aJkK`2Okv*QVe08OqDRj3j?X53oQj2(sLR<$6n6NOUm^- zD^nq0(YC{S?5^0x^{7etwb!QPs6S=ulibGI3tIp3^{oYC&km=f3yqtxOUIJ<{A9*s zUrk|s$2A3Kn!pyy+d1+;&iP~D#@jjKp7diIsJ>0qMhywr|8aqWBwmbdiKrF5yA>|; z)PTDFeE+h=NpHIh9-&KOTHD5b6)fTU^`t_V05Zs`EraosQKam%jaA_aBC zv>GrHUiQET&UVO)u(IW;q3`&@h;!>bTMw3rP(+}>MncrWNSoM;b;6B+*14XE0rSwf zdrEfJR10xhBY4M0$o=TaG+PjEa5swlcT-Dt(J0-8fAF*NY4ux1WLI^4w&16ZY8%^m zSpMB%c9puf-e0b_vO^|fYK;=T-pi8vpA(QO&=kg(ARo86?CT2yHZM(F+fPQP!5nEE zme=pg!A1JqnE@pT!Vz7YCs*o)g3Y8&az_Oya?0{ReAMkDaraAQqJ(S`Cw4&b+>GFh z;fDg26|-(+%g65woVCKte9^+W2U<(@BuTF(-8@*Pw0y`+L)N5WO)^eQj_;l$#)9Fi z00EEbijAr$@a#T^L@chIFG1WXDoTOAsbf`AoF{T!=-?NRIk>Y zdw8A$U1D%*M?wa1awXwkb!nv+{W0s~Vd&DQmophiJtPqPk?MG zOXjGl`g-c~?|udB)l=`Z*Pm*y%S>m(JvQr4xPmv^v*bfKS2~F@o89oxVkRAtkwhXU zc9m|!B`qCW6ie&m|Kk_n%mtTC40q_V>)o_p?Lc=vT!&-PhXb0yf0%{hj^;Gu-y(yV zk_Ark=f7(=GM8+$n|l<8carCQg=>LG<$UNEc#TrFF}+sbRpD0yVqWdZ8KX>uen2Uf`a}(>A6Hu>+ zWp8OtXn_ZnYQSHJXrEZq&%IiPE8#M*T!B>Tw@0FVQ*_yduIKw5cku0eFA`9LU%3$P?&o) zdqGJu(o8mxdLnD#4)FA$z)pi@FJQyL>f@(5nuNz@d*37kQrK=SFXFHq*2^(0W(H~2 zX39DhMIE^Y@p#HlEh$iuTdM|fq#FbTq*Fu~-5@!o8;Lo_7(5^E`}cjG=f7>Q z*FLqq-{+j`y3V;+=1(;TAnaR4lXk8oyX&EKloOZj4v!y9(=2x0Xcl;{c3h5S7lP?aHYK)fXY$N5>9oxP8{j^{^)(T)b`VK*I2=eDOe{ zrqjv5QEj)K6Gmm4!+O`#hUpwJ2f!@MoUGTBGfvOqUOy-LUY#W%9~0n5E$0v_BTwku z?9w}qINh4tDmQ;CmTh1_XE#^=eR7BmbuhZHnb2`w+bpSnknLP`YPXV`OHgt;{YW6` z3cZ2LPa0EsCEP*PR^#5VGxmzGL=0%-sfjH7XS?F%Y+B(?eHqY;3duN;Fk;m%s@m4A z>knb6aroR%c~#x_q8h`XI?ipfbatyoG>m)BHkZIRd+D{&fg@6c;H~^)I&og`N@A4_ z&PZq6_1jB_&D#^anzT3m^GV>kTnnzu&5+=a!K=6x7kiT+?#e@!-qTDUW!`JAOIf#y zjzW2*(c~4w*PX9C(Nbvd>df3lOH0r1p$cKP%^A-thh6&1?W0|y%iXLRPBn??+Sd}P z{hd!)kFFEjGk1fOSB?Z4<0Du}rK4uM^O|oamSO(wwO=^GWW&0@RrS4?p_AT=Wd_7? z&!Som8AcOhp>9MxMN7ggziPTOvc9rQI{fky-zqXc2IR869AJ8OwzZJPmo1mDIdp_uv%S(j z>Uz8HUD8ePUYM5TnJIGMQfYR)C_Tm7$_A#k;k{;PG%-KxjtZ@~uO+qrcw?@5pW0+`I$#n{cXP z_jT(eW#8`egM7aGdV*M({ljLhmB44I`F+0+dn}A~l+K4Hvzs`o=>aDv)Y=`6)$aD2 zm>QGg@@mWDMnOhN^Nhx}<>P(4*_c+ns{P47^{8E{>O(v2gZkj>IO8u$FZMT;JyBsO z|85BmIjzqNZE?aGiqATi(pG7MI$`1GXIpcw`1uw)ZH2=ezgXKIaVk581;5p&%>W5~ z4gR@=<@nuYe^X_-_7i=>dYOY2R~$)OmA?MgD3mWx(l)SvM0$b6UFZ6E)AZr?{lf@w zF@S(38cA)O1KfuAgWom6>5(uCDQ8Laj&l5S|T`Pkw*+@eq^ty0I|I$Yl0# zYG68fLGDz}DItlpu3$f9;oBZ_rm=4$DY0O;ysw;8Bb9(@|S10Ea+H<_0vZ`(~)EWF%e zp4b+k1sv4On!^8d;NDE2(26aDU-~f#P$qc@f3S&FTbY&E$?nY_(Ed7` zS=N^|;+=oWN^6QAipz=0*K&C3cw>h6NDOpxkXNgo%<(#a;|`4I>iohj<%kOIfZe)W zZk>nPG?=_+Ch$d3zH^zv<@@eh6v``-?;#jezmyTZeNI)dUC>DHy1#9Pjl#|CtIv&l zeh6)SNn-8NuUuP=`&ymneV8!=dvX(!XY6UxO$q=Q86OWOwC0qHaopMgm%Q|VFcGHl zFNi?+{;%+n>%+e?he3w0G26GmFa$->YI&p4IozoG(TCQOIv>)iz_t18Hnp&UPSyB- zv5x;EmEm+T#C~rciYm)%o0TZt3dprBzBst1`}f02tX!eM2JRD6GuN z(8m+2@tfnqewA=l8XIQV5{0}OhMC?}*0fAv`@`sg0~{xOwChKTpSPPPvRm`{kWm13FEV1>W%{4KIvLXFlAP zOVJS@>NO%R&eWHi_nUeUQID2>ortLHz7plLb7!mzVeidUAIVe)`uCzA#DKFz1D1-g zU{ikU#uLh&31eE`biB5vZO_RSeY{4b&RsdK!!r4#aEaSM0uu_*VFs$=(|<>3oOTBg z7w?B*CZ6k*HRwC`CnbI~^VX-E@m-%@!}#M(#W$^589?s2OaT9FL*Yen{lW%!>SEp+ ziQA_L&u}dZkI+kTQL`JmuHP=+yVg zG6Roy`|3H8EYciTMzwu{o%~V9b1E$}`Z1C07lb+I0ta=DwH7nKkiMy>HeG7aFt8+n zijR7ssUogg8xNemcpKQjEO(a8tb_^9uanQX><$RN$LpRT5;MSZ#?v-Md6AE#|GX=< zzsSH*7^zwj-H>Z$HUg| zFfI3$wTM#z^C=kQkXJWpZKOV4WRC>Q*kp+(wgzeX{JrHN?7Qq-PL$zuwlUvtujfT6 zJ%2rc&hDyN+`|euW*e$s-+fttBn95BPXlsjq@RwDLk!S_SOhR8^KvRq~|b!3cb z^#miAYVL@CV*c#>Xyn6LOB&O;U-jqN+^R%%W@1bSwr)NdE5PH1lG2~&sesP_IK z-Wz*GxAayW!h84LdGOi0`{B{{iE&}8KWLt0sR)(5F4ljvToY^S#!~G4-nk_H13)iK zz2|b%4%Y*E(wUaPD`V6dt#?+oj6O*<#Y*Yzs9Zv?e1X`d~}%_iFvwT-R|GWBTXg*WX`B$uqsu zs6N_O97oUTyEcCEd>VhxZ(-ZJrmbzyXtsLEu=yyoYrNB`uO)a}9HfX{6}ObpBf)qu zN9W#nIqJx(9BTM_*X#{cu~5&Tzi)~mDxqaw%yxPYSVa6u=KpeY#&$>M{B=PpYcs)@)v*CEttkw#o7r zy*{XCG7sN>e%dSddvq@1rxq+I>|@0+{XwJLx1aCC&((degTPK0=EY|pM){wBH>(Zi zzn0AEs~hZHvn)Kwmh7r|IQMojsbkuFICN8&sOwdgX~|GYN-cJHm4C=5!dq+UlI>F& z)jYK1*r2U4=eQfsGyRs{L)p8U;rR3D8uULXW{&4X2z__iMc!4C*6SmUb?lo%(7yOm ze6+bdbUhuPJ9j_o7m91l5%Z`!dFbfPWIbU;I(th?<4Vh~+yThOXOG{8;RsP3I^3}^ zezC{W>tuwOqX(0(U{wEOGdb{@APs9+_X!LR-H+D|+f;yB*w4Uk+uRrV+G#8RDrgIX z;468$QFfk`yN69pv`m~2@f=8?e7!^mZw9X2tfnu znubcclP=82+4hA^kxhjhh8Mu-QCM@>FatESYu>TFlSUOoqkM7)5uJ`rKW@oPm+Q-6k%yPkMtR1llronc-;((rh)B&>w&<>^#zZdGC~>+u-5ds{uxP*A_TF#lbpi!ueLbA`UU+D^S8*i&Ye(^~e-vP6 z@yhuZaJL){Vg4c3`2A~KiNZoJtv%`90X-Y0pI+{Uc)s`<@-se>RoqBkMCSXGpB+EP zpI67$@NF{=C9M;p)f1aY?NylX1GD=@DK4*u~?EO z*fpRDX6ewfDME`NH1m2c%8B6bq>y%3Kyy0$^58SjARWG3kM&gBJ0)J}fh z)P>vbf131KVvC&%+p89a&OF-@_5|JaRD&;y53K9LgHk4 z>ZpugW*ytR{UUTDqFqe{Bh5`USw-z)pFRCZJQ(h=o%KT`w)LjgJLYj0+x^*;{MYG2 zc)Fd_;SF5;%}E{9r#OKlX{o?&!Ug%0wgOZmjsUL^swpkb9@)CoJHBoFJfxM9R`z^H z)^q+3FIZ@3*~a|x48SDIRwb0o31XY{L-jfhm2OghgZCO%)RwaeJkKWca*)dl*9l=6 zXyeRx@y_VCYl>^55k%;+WzfskuTO}&tASZPs}PkjQu^>A>ldh2?T81a{AVuS4IoL3 zY##H>hbqDqA`@Y@xO7=~u{M`DKK&V6MB%*ui;2)YHw9HKvVGGsa4}PQp-gTjK{DKc zfTV;z$lx_~7Ry(va83SC*p@BZcMiO-EC)>bWrbD6vEWEAG&jY}>br{}yF=a|LZP&i z=Mcnm2mlVxS<8GRd_88oeMiQ{c7Sf!Xe)OHrH83fFtV!k#!2rAbb8yoFi`Jh6}hhX zMGa>?3LA7BK}Yp9bgIql69syl>Z=@zDQcsZ6}%Wg{zn1JKj270J^#SwuP%?cRzIKM ztdz*Tf(Od_-ItW{7Q6sKn89-|`qve8Tl ztt%kbv-Ws_;~%7_R_#{O{kbM2frs2oUPDtj3$*Xb_U1&s+hjhWhuw{%U8Gsq{pwB_S1sNj=Jp>Ho>@Bpiyp6rDji^6ehBp#!HUvR2 zwwezx6}DGj%!!OZX0iJ5RV2I}r!?#{uJu~rX-!ygKA zia*&TrX@0Rc6Ml2Ow+v2G;Eiefzk3f{p0J@dP^_bhWVYx4%OFI$WEx%v40TFr6V)w zOqemjb^SQ}>I!|ozC)r+4FP5!reme-f*XgHj&$=hRXQ(+)XnbZ=ta-aEc@<-iSBB-e-o7YkH(cj1Tuv z-JnuEt&SX}u#}x2v-GMz zo@(;-F+PZ~Ie`hadk=1FzpT?ux1yqvbF5lwvt*+fh zegt5S!E^3i!!4KsS-3yG%0T1pb`c1*o72DHeH_Jyp@q6K?0+P{*jpZ?6)?9vNGu;jQ7E`(ve> zqio$5T*^5T6^P*pt8UzBt7KwV1eCJ*!OiF4#Z#S$%WQhBvOFOy^*NxnBkk@$HlDN< z8=iVaQ1`_+v&w#F7$r!#y=WX*ndB*_93pP3N>r!z%+WRKHcM`B1JYgB#AQ+>${1FgFNNTfXFH{Px{I@*GdF_L_u%6z(~$idhdi_ zTBql8{pu&!J&YMn378w*2gKVMJJXz+1B`-iOg+i#v?Mtrz|C=g%JO+`TJB~V`|9>e z+1^Vs@}AH-qs}VRYQ2xMnB9i%2&D%|P+kL0eG{w|dhn~wn)!_{=o8_#9=nOJvXXp; zarNyxn|pxwCrzYIkH=KxtoOzF^sKSg{P}({uVPELqcv*P$jty(y_cs)b!)F->i?T- z4I~s=f3NEbujQ)c8EY+Ak~lt8*zE+uc^oaV?*49%eR7My%JJZ#zZ;o@L zhis!-QGC5x-aO(g$$VbAx9rAH#w;!A5V|N2&R6t z&9{IQ@#<>=5svw$6Z^fZamuW3CE9;|-G-!O`W1P+-MJx|gzhgNCJbX&X}faO&f?49jWal&3T-6(bT*74Pm7 zPFFL?s@3az->2^z9}&b!pqV}0NqAB?B%QNJAv4xU_%VLw6(mD1`uGl3aVMiB#vYG;3U8Jfm4|)`tW7@s6KQ@Dqs;uK-5#9fMI%48G?}`ZIOt&Q z*exjS7e3Schgou!59;c+?C5+*kgRtyeoVlV>n5#N9R&oKkSVx#4`48A@ra^9PNb$x2)*Rs`WLXgI& zRwh%c+9wy|$`R%AKP^C_A};Lj*0}&LG$VJHWHJy=%X`hJ$8}R1tpK&@8ZUpQ9nLpp ziCz#&{G24B0eEz0cH{Gr$n|L}d zH55KpAEbCV;0=!o?KpU551_>kkhM%^?yMZ*$x8@N_;HWUv=n)Ljq6Cc;M2&Rs|VDS zZcoS>o1Mw^L6u2fYO(g);0<{G;rfCnE&ufTf+&3GWR!a?nK?$;?zx0Cd7)RL!#~!) z*`^y}OK6xTM--s6brW*oZFBglrdYN&5LNQ12yx=F0gsl}`>2jdln#b8nXa`#|JWcG z344GaC2(WV#Ja)hQX`i55P#zmn=3_+p@uKcS#z&064u*Xv9C8g4&P~gV1xJgKq2rJ z$oTqCezNl}QH9Hb3-Aj^@l3P#ONNEeZDewbPz&O3#G%=OX+p@nhW|JuMa}*eg*vV4l~jK`Xz;CrW(xwB?z|1A6wOtZdA4p#K(~45od<% zDXv$KV6HDf;&NV>d>R+$W`V?u*)px~P8D@Tmkt9snC084T$yxK(In&Dh zwMXqGP3pO+P}PhrHI;X8QAW<35qXjh5f>BFFXvZe`1Wx7@ylt)23smns(nq#$hl1 zL7zQizsVdj-po(039>wYzB`nazh6>aHJ#>SZ0bKdT-7X(Ymggdt2C;;Wcp&M>2V{V zUgI^H9g&xQ7oYxVLJ8YS<_=B(s_i)0@Gy11PV5C9)27Dr`dbQ7A#N%^&Xd)^B)|0SR1K zM&Jw#AHuHR4L@e;sycG83#U}RZmp~POjs<{J|DZuY`79*Fs!v+X6zfzCP%)$azFek zsb4-p6~@Ms&MaBDpv%-1RdBhq*;8?SAGOVAxY4+JBd~5!V`XmvoJ?NM7loAY zZv!ETgfU6RuSvWf8eGq>1mkTK_DZe40j?c}#1VkByN&|3HN*KLZ%(A~>MJ(yt&?J6 z!2Nj_Qxm@F?|U?l@iqcEH_zP2K+T624araZBOI3>CUX$R+5ZE;9rVR>*(MMkd3<`! zB-XfL5=d5-eEjbA5Qs%MRi&>X2JduEcYqF0b&OWpEsTI?amrd^Lk?VOrR0o|#i@6* z5yx$-Q_AK^ZNxyN>^VhO1@Jd|u!+&j+@NQf)azODG?9+Ws_T5}T|~gf&&DZ4u+dPc zf~LZ8G&GNeBV>VjT%NQE;(wG#;QdeAlZX7mDZB7gHa!-E+rWCe_(g7KM6n;=N^(@4 zOgXF)Lsh!wp@AwORw;MMKZ-Q8)nQC{fd_;G@AC*%#~9nF=M`jVrdsS`l7Z=Taa%Ez zxGo!4EX3K{TaA5SBc7SHszye{aoLrvO}%AC(D-Rr_G6a99`I>qpa+bhbI7;V zS^f_TxV|mIAo6iSU})^8`(B=0RvJtni(wr!{;rdA8Dqu#88{-OEY$b;n>+zR7VGD) z?dawrj$!P#j*gFXP#mx(eMKuH#vjtCt4}q6wI-}Z^2Cc zZ^0-m0LX8FT&9Fd ztk^7{{Jss6J2F-xfNu4#^#5}VfgdwAvDAi9(xzdl%s2@Laefmzty&5ge#B)uM-l%t zvDjJfKy*|0K|eFRe^_06WP)`}`h_7WrXdmOv+u4K7CH58e6hz<2jV*1ASL-vsK1s90khLJwA37FO zV$(_ZHhWSsRVBpZpu^>pa87)T^1i#&-C?}1UX#DnN%r2T5{^yrYBHC{s&$eOyWfJS zcW&7l@#J$ceUtdiNj|h^{_uU;`W?-8cC?))l`wWiYT8-xl1oeBuHkp@KzwPGp|!U* zj=xWVPM&7yVsc>ZT6}5PzGffJi*znzlq@O5dilNT#KkL~SfIskndWtRW>l;_HCKx4 z&)-SiH6oF>b+IM?X!{e3IUqbC*xGWh0kB~;8=H>>L0Qx}tZy1iH+&#oxDW`3`i<#e z`DL`+=@tBdylP2~ph$$8BRjQA&qMR1>WpT~@;v9hkHX}L65M2wZd|KJ^ld=;#TdmZ z(T6fFnp8FKbx_25q^tN_)j3}A=+_YGTrzKK5#Hzn%AKw|gGn;vgt1Jau?&~Aqi9x9 zcSkwig^CK1jh=i1VG_CSGr{V?_{f>y&iWe1>^XIel3gqA4}&-D}DRP;JT0(;rXN* zhNchF!9yi>HmGQ4ObgYW)*5EFjEoFOB<_LNtWJ17S9mmF*{x!7X+w<|k zpZHazuJCTHiJM1AmpVQ6GCjfhOBl>6e!}x^-L3jqVjHq|pfmydWC`?xiTzCelpSpahyELe+sH^q20W)mu|P00Ls?ry#jgoQyufb`=d$6;pHa`bQx>RFUa?E9wTjC8Lp$!N;JF-o zw;#e)t@MUW6$%@Q-7JLwrFPLh0Csm?by}VeW7_Cp6;Z+iTVC~vUIOWCzw0H91dmp0x86H)y}s<7qbuToHRW!6e>ClwbZ7sY zObZbDyxWZ}fCl8=d(uk}4$2c^Z9WQOE>`MZg5C)NzihQG8p{q(_NJy_v@M042OEJu zUK(z~Y0r(fV(6ZK<>T@h9IdNuokD&(oaJOCXL@o!HRPnXPLB-v?xr_3Y@hnC^CN=@ zCCOdke_#At-UA-WJ^m*jjk-3Y|4S93NPYv(Ydm}}%kLTh$bt91KmEV1R=(twHU^6M zdr+z%N+3c{@Uvf^%x8do2VgqKSANd{U#k+7Zt|Pmyok}@~ZI9EILEW)WiH2+R&gi;Iv$!#iVIp|j;KpqO-vQ!5$9O9t=@d5zffu@&^*nvG~ z4=)7cDJG_pj8{xwtsf@}KkP9v?>qVa_~ApLm!3{!n(quBva8p%cn~~hc=!0>op*df z!Zw33Vej=Svbsg_qZs(R9z63FGp6C=^Aa&)!jJos$g=OX4q~(&pd7t(Hl1AtVbE2+ z+7N{YlupM6t%InHM_Bo5NjH*D&{U(lUYneGq#6O8YASBRh_QuQ%FUb047|W|i-FMf zyof$=_I^9*#7IYKPFWTOEFJhy_j=1g1{Pe?vU0L12A=7_78AkI?-Canhe7U~QTOiu z0-A7a-PuL=rgK1pBjvim7$#_kh|A1IDR|FxJHf<_$X35#CE(IE9Krl6p$ISV)M9|t zE}T#AXmTEJYNlJN9y%Jyf> zci91%ZIxOXQ6lo%2c*$M=5olX@OYqt&dwzQD7$!7Kc)@N!g-c7*7`>bvXf2eLL}Zo z!fA{U^JOc$=&c9qB%lurV~>YOrGr)`zn#wTBc*t1SF(TV(;rTEFkE;Uf zbs_^5Df5q;{8ao3%2X@6e$Agr>a;BOs&DB}#7?VU*JD`N(zcmr5X)m%>zUTuU#1>g zkK{jCMPQ7u3;Yu}jl^3|xL0{%NiGQoqI#K}DQqU#qV?vCQ`^P>bNOpW7!i+$=kFzD zJyT5?+U4WX&#;;Q{&2w6|8WG|=5DgF``Ck>6~wsb3?`9tClnoGZLs zeNBhz|LHMCBbU3_%fRNAsGj3K0Xxf~Ut7?S`52|L%t}0oDep2EdTu8f-$pPyI+h99 z791%2AWfz>IFJ^+>F*Dd&+jvJ0MD%AGN1ynYy_z=9U{amnnU8{f)MSfVt{oQVX^n= z9hDXMEY6o=7;kd9>X@An;qI^CE@AH<<>>b9GDI0F)-Zf=BH0eM&%@1>uWEaef$Web z8s4Qs4ERMv4^zXxS;>^<@khR5m3?WBj^PEv0!H(7^A3rUy|uXJDd>m&SUqc5d4Bma zQ?+WrQL7w91)C5!g*Tdc!R$}u)m{XDk;4HMyewDQKIHN~_^zf>7vZe&JG@_Yw|#di zLefZZg+Y30!+g5&Iyh~bCPUT>);(j3yI_f+qOY$V5}7ti#vWZ?t2!mW zO(M%nWM*hIQ#4;9_i=BX<~5@maU;;qR`74*BDYb%`=h#~sPa0}Pje zyaAcHr(cr$36)`{pDke68U6J?{NKs$fMMPy)7UM3IYS{%@mD&~rO@fL(}4UYs3|77bAxr(Y5nvSc0RzuY9#!}c>X1O ziSuTKo@-er35~CvK~80qn2fgakFp^3YrhB4YhqxDiCQ!WCEa;K2JX+jLbSMSL*VDa z{>;xZvEVreGz(QG$6q3{>~=+bF!~o(Tczn+x%P4NWCtN?RQ~6RnB8*Y`hEDIGP^W& zC;_d7VS+J2=F!UMG_O;uALnnZ;9`X`O}A(zkJBD>x0Wg6V}3QVt4Scq=-a(UE{xsr zc%FM){jPL_tK@x^F(QavtxB94t);xOxgz1tJ&LhmI63D_Y2{Z*OxY9G>rP>ln$4q^ z;ScE2uSdGyewBB|E+cuJ=AF2{C!Atad>OTt=>*@|zY@uTp`#fZx}>ONC!Lcl&P;<2$dQ+5r@8&4ESD%pi3{#^}o zv?q#BNUYy;a(2CCr_zZ#jg>U6nUeKc^*7MTmLG7vw^i4lRGh1TP^;A3yQuI!u_84I zMH;>4z!Arh#wc?iyQG4BooO)&03hy<4fFU zQ&On=2tq5{W0l&?Xls)`%+W{Fq+rWtNfg_E$U?CKLb*<>t!TGHgn%rkFxx*|q7lVX z(R&NZCH<{tUfOzOI2RwQX#+_c~Lb766#(sTlZDY+= zd~g1U@6}@!Yk@tnZfSZ9DL5q)pe7CpHEd(zWon(=*ly6L*4!SZi18vJ<)p>sPr8^; zIo>yK&m#BT4ZCaf+Ju;7+kPgJ;%?Heq21jJ{+|t%D=%FH*8PrP1uy`E|LH$dRdgw= z-MxiZ-W*LB<@OWsrdItgt@t;ac!aO=TdcUbSfsk+b8bOBfDl-n1zrn2BQA(IoPLHX zboy(B`C?kJg`cyuC*ugf{$W?tz?Xn}UxJ+1uVychC8UC57C`%b+M>fkaP&Gjxw^3v zcR{pwdj1Q+g-3RQd@pn+-#C@9ys@JLkUG+?cHh5v2Tpwl`6oT-x8~>k0=H0VVoII& z*%emifM;gOe-sFSBK&(Ek-u1o2xByOcT-vBoWo+}?sc1M>_o&pT9J`R-A%f@S-SOj zw@*I*2V%ZZxHirDScx%TQPcew6nKG-=PE^AB9wX=ITAvS9HhED z7w&$Y)M9A4_>r9PNa%ATD<-1WgRQ`#W6y8}-ybIej0(19fP5XxedH1%kO(%FwHkbE>!ibI|VE`66xIFtllSH_s2S-VH1;l+XU z;bd9biZ`*R!ItH z%YYYe!H;T zQ&Xvx{AxZPp*Gj~GZn%Z{r!d3)|)AUSj(iDd!|BdFEBqTG+cevZ`%A%J=(i>>G{&$ zf!=8jP+qtvYVrGDw>CDpitGGKs8aux2xWAC|Fk3fbCBINXfnQ9Q-)@ElPfO z7m9U|WOb3+cT7)C6hlAIc6RUC!F*) z#ut5}Y({{%)6FcIlF32aSo9tcG;wRg-1zcJHXqY&>#3Bn%=0(*@o}|@U5F!w~3H!sDyuM$te_r+PvNFf_PXb1fwcqls6xTdL7 zo*auw-CcwTb$cROP`}E+ZClEsxxq9b&WCd6A-b~r7qcDDAKf?z&3e+vBUA))xEyBv2lC$xc1f#BLE8yTx%y-9R@IO zDA%xxuLHrw<-uP~y0FM~qU*zCkS*t;iJfi1E?7gU@ zr0sMV6fH0zid~hjY#)rwYqX;Nz9t6!ZAsG2TSFF#=oi083A{v_8^=<7Z_O53W2ssm|8)!8)@s zLUla&t}%N;D+k=R2^s5vg}k)PqZ2-lQuPj1>kuKQAAZKl_$rqq&Yn|xZ6OpyR*Q`N z;uG1oL!c>b;+~i6xIzj<0x_;#Pq<^yjH4rvnK%R|Ts~rq?VsK-xn9moid^<_NSgHf zrUiF#bC=Fl{6blXo!EG96**JZ9&hVL&oepYF0l~}ewm5Sy+;)_6U5!$c1{cB5r4(g z;_60!(yOGt1OuuT5JkT+PxJ`I9v}f4$>s^E%t=+0Dr*R9RVZ`s+4;#D2Rbf!P;)Sp-rs2AbJ3k zTm9B}{V|Z53v-61>Hr7*7#x+^)^E*Xx%8R(Bd2fv>BU*?MP-CJQcsC#_tPs+`|HXY zryxB3S+s+_yS0!@YWUm0(JxPQK0o4j#uF$@OK7ym{2lShCy_-PJs$^X7L zG3fy?0Gt1xR#4{c_cyos3QZoqtT0ja%zP_Jdh<_-o+Q!`KT6p_yojzLYar8^K{_Lr zLGa0)d%Ta0^9|0n87fZHUNDsE>VHssHLS$_L+F{L5vS0rLkX7pU@+G%KKDWsO|g)? z3(uK2C>bb@SeGurTuz+)eIa(by{tqUjXum&yPcpc!ZN=zEjR!jFrB2+!K1feMT7e! z8i4>>91MSOK(=M8fo$VceOs~1izIP!h9MlGKx%l$k9FTWsRwb?cX^;CVILgdB28E? zr04=n;s-rI`@V64Q-r)UJ|fh$tw@a|t2`9LePuI62`)kO7A(|kY5>xpbllOAgDB3n zjPg)8^O#U$vps(CDI;`dUpl1~29NG7r*mQkK6Rj!<(zQ{21{*X6R0w&;eA``lorSR z>7Ahc212z^0MJUw5lLw!gB$quO->~lXezUK+mLKSc=xBK)hyhoiwk`F9hWc2S5duX-l)n-ehgZZU#d( z@F0@P1#KUBI_`029!C9g&lvITK^#94>!zsa{iYTQlZQT7a&dI)FtNYOla(XR)%8dw zofTiho|JOT3M%1{or*>QyJ5diGf;pNXap6~Xw;53`Fexeye9tW@l}1K&lH6l*iS&W zt1BVw2>0$I0_k) z+wE*M05s8t)d(|rfhN55?}_X=T<lt7?^37pjhEgf)kl>Nb|wVY zWUuejruXb-fIV(%S!Cu;r^inunkT%itIs7GD2$am=}oGBG+v#1nFYIUHrrQTZc{j% z7wXT=G#WaIbVn_+y3G$+YR+2fwgWC7B?B1%$8s?VOo0yYpLEQF5pVQk2NE>he^5=C z{BY(0Q1Uenq+lEE_#b8s_giZAw=_~XME0g}Al43!^&xq5l!DCfrn+Ov@DA_gf)kQX z_0Z=8v196or};m9*Ph@ACdXS5uAvQjZCRB8vsXre6(Q$cg}CwVdLpj{swg!v#anuL zOYSq4N`&;|V=Q|Ejzg}b%)wWm3ql&ZH#hBQWk2s>!(uI|=(W+T8t&vgS)P0^+txaM zew5lDN%*CzPJ+}}_QsvDor!J4@5;tNQAtFY%jAW|R%V$UDcKlP_tn@9>hKwRKvA^I z3^%dPEAqWtH@DEU;AP=PSzIdOY_7vZ5N9rGZ4PI~*R>5U7Qrt(dpGr_AZ_gKI<-4x zvs;xS`ul(L9IPxZIm6@{^)s9*roS6beURpXExR71fQ~c9o3*p zA@r_EERBMs;Fr(Uf9mKkK?-}6+W3p409Ew%Gp4#;Al33)j=l2_WG4kkFp{|E<4kI9 zXRJ=Rcbwe;pJOipjfA#l;y4~WNi%Vq27hA;P}2l>b5gpqc&?#~`WuQsEX`A%vnN@1 zOkzqC0$?n~rGd+_o9GC0I8Z*B*+b?w)#4YML}^2f0-cwegwFU7ZKK!(+kG35sJDE< z`d|xPS$2pULESmf9I-&7mz^v95)%b#TnH@c&wgV=BDqSrVNeT`z*!1ZAoE&=0>8?Z zBQ#2ceEzBj4s2=u25=xw>x=*~^#?HrIg^p^$v*qNr`oh701SWT@>7cG15kSdpgK;wYC@I}u1y&-+&jY_qsE)xMm?HaxA7=6y!6J~Oz$QtC? z84c=H)yq$nyaJ)3mHT#pOi{V&S-{-iCFVh$3M{aBJTJq`guFry1(y=!1^!2*{J)Fq zXgtNB5%uPt%ClaAVLyvdNfg46UTWj>6;4M>uL((_*@a;9~nO# zc%A9=Ij98D7O$$4S7lu(#BOoTqo6^Z9_Qh@@nr8T4*@`0!a%9=CarbIBDDGnW+W%H zqwq!f@uD;K$)zjRkCup{q2)uBScd534s#6bN(G0C<|0PrnlYyv8<$QK7|V%hn36>$p7d|y$?EwiT=$D1MrjVD z_s&8=dhoIiAh$ZOT(241hWul7wH$CP1Evl=`f)Qv0Nr=}ruLIEH506uC)?GFF;40r zJ1z5nVeaqGh}f9UV*=EN8<{Et@nx$Iqs*~OQEK{{oKOY%mwn{xptvTwqE#2tg&VPW z-c>xrWs?y1uS4R&D`&(`<_+G)Ci+E402^q$>+pvpMhg0%VI>w2dB4Qqum3+xy>(QR z|NB2qGkSExDCtHTMz^$-q=Gc@be>L{U3s1#JVlT{s&)b}PX)RYZf{S(YSS}3}SY;CX zlPu2~OU#ymYMo+U^rs!r3nebQ{Zat%VJ5-Zb*MG6M$^c4M1}IOY9+->$l@wI*sH>Z zJ`$K5Q8PX^pYTVVGXxB@xm=lZ6Ud47x9I3T%8~@JP zwW;>b9swN>=$-V=WI(-p%l97BYd-2+g{X9S^LlJuLDdq=ABY)GOvM=JD0LLVi&o0&LWVKV2qXT-525*-%Xu4^tE4~W?Gs|WXf)Cp@y_7k6Iis70bkL3lakJUiH=P z`+hnQShv>BctwYGP*E|@^I!|O^X$l-|EYkqBq8M z{l*I-H5>|)$;_t-dn`-!wxb0Nj}o_h0$XB@6t8>9id@T&JiU~C?b^|Vi3*Iror47{ zkV&cYtq`#eV@tLCJk_0a&3$l{84*0D6Zsu)jT88gl6kr>95Irid*qCKQJ>u~ojmjh z(NqdAe|_|? zM#+S$>B=M_4#iPmIt*C*b&YdNbPzAYihSIcLk9nZE1nloj_F1k524@fKRxr|{_y21 zm&6k&gIF|b@o~H5ZtJP0RgoIX$QPHCtKNdKRsYnIPW%nQ+u<~D(J2+5db@9}n~M`J z8!te_e$rzauKw1JK@Cpx=Ebo7tn?cv@c+^^!`^QA)2lQIsm@}V&+%^Xha1J-loMkM+G4M5WrK}rrERa% zLrI27=jYo-bj+EUbp=2pYfg>O*L@jGN})jjqWv)Cv8NA|l3!n6vSe1izYSAfl-RRD zybf(OdAz&xtnCYYp89(^P(?#`+v;KNdpoHlHJ*f_Yn zB-E-stB9`UK#OZgT% zVwlvb(ASOq@I67pd+<8d`Nq$@4RCm&-di}Dh7~-$6fy0WeSf0P-U;^jn0>}~3t$vY zaM&B!WN?G>HJ&P`;Q?o{KnQxuEZ67v4RxiszJa{Hyp72Eg_+Ay zX%zGK5H#9~afUuH{a^a_c=3NPpnw&M!}HYiW~6l#{d>)_g%tk1W?~dVZMD$<83)*e zZXIaXy*fS8Z zOP8UWG?s?-lrW9&)k5bWYC6~mW5S(K6l zq}Vj@!_vcb?jic@B{9FU=^NYzW)RYEJsh4m!1_yCvJS8{YI95xM32&7 ztv;*3i9UW@l8I2=2}uJv1O89C$nJv-L++(C4A7%tv4kPYfxegLo#cXTgAQ8#Cp#X8 zX6;eUJj66!BKlZ2sEtf^_LWoFPhClJ(HiNwiY^d;g`1 zrR4?DaU!PBErS=|4-M&nlU+H*U&{#JvnHoWF*8OC3^UZ(xQAk;!`5Zr>|G5LGhw!~ zqr!0B(V&dgbhd8ljC1SNE~J$5wcOc|h=$Rw;`R)Ekm9;5@%8|j!upjHM==ISdUALJ zU8B5pKqJ5k6f3ivV$Tu5a6eOed%v}f>bWvfAFF2r(OcXmcZ{Ky<2=oQxjEVnLF1NB zA+gOx11$xd=~twLZ>kHrVe74?sOP>s_<^TMi4^xSvkOB7sh%O#uh7f%Bh0DmIGi~Q zJN-O`P0h#-jlF1JT)@=@H!u9OrWF49w&GnhPpO%O7{zqs?fy$zP|~X))DFxIcWYVG zW=q(a<3~>o+vPw+dAKQ`9=x%3+AnRnn4zD}YRqP2_=7$734mF&Yuz!>MuvX&FVg6- zhkTUiOlamCh zzTqs5nlY+p4tvbai4YhW{aCtD+*4mi-2ayAN|_f2gFU?Ajg%nQ^O}m6==*dpv(Qqb zxQmS{A2hU_ooROj*N=^_<2K?QXcr<;LSdt?ZOZV_K2ZFNj*?FX_aV9ZrPA|3Z}|7TIeb~TM@{!%dXlOf;jh`|jE#Xlj(*PUkVqS> zB-7YL+r?G*nV<-vZShXaMMs=^4wE-#DKZ@dE?-?U4xr2~ug>WYfk2hX_){HfhT~A~ zePkq7s!N_5yXczJs6XxFL$h1D&HCequil6J>9CF1Q&l|Bn_N?gM$^_86kE{h2YjAZ zEh+fKcqI?);2SDG*pPeC8$*@=iOXsnH5$I1*-}lvW0X^atl(t29*LH%12wiOn}=@r z&p^|=q--ti^gA~zjzrzLd&sAfkDDa^6<)T#Zuh}e4Y-NH191g8&_gRv3J(M5LE~au z+>qus%lj}zc`x0Osgeb=0pU`ixZmV}Avr{Ojp(YRxx?L;p#LzvDx@KFyrbTwE_w!{I$_LS=b2L+${NCE(!=>qsrH$zN>+H;Fz>Ph+`(h4D8Iyr~qoh3`~eQ;7o`JY_qg zgWn5$T+dgmzud#G8?GPE2FcK$KdKSjgGjg1;&HaE1KTMjIelO~AnG_$@2OW-vjHa0 z>@ZYx34L1pCKr5Kcio2=8)sD9XxQ`M5_GMEk#jBIP;nIU3eT|!(zp9)nv`lOqXB;u z=XQv3VAimsc+zCk_U%RFg0EpW{(-h1op3M7R6~XY`o+9ga>88REe0bE4$<4jv)VZ} zm`@3uVQjK*WSs5w6)|CZ*bhS3I1{SROJ(oEu9p{Q+7x5|X%Wrc@%@8p%C^bS@(##w zZKJx6CHLU6!gxFIpqLne6413IO?b9~Pi<&U5C667t8@!1WBcDYBeA6?rvA@8w_81Y zg+eRxxxJy!acNAS=bfRK2Vwpl8K%|!Df&dwDB2tfdlOK65X*b_V9AT_4@D*PoaGxa zOSd*I{N8~6OYZgM)@DcF!UkX{_5J_y{*P8;Y7p$o$F+_v|7s(QH-VqtyB3D|Jv941 zCj6cQ^(vLZI%eK%MakoC+%|vP_vRq_KUE+G1BW`{GdlD?-u>S&fy}*nSKl>rko66%RZM66p9aWciWQ49@&*3u0b3w+>G6I#86uG-8wrHkMJz8Cr3GbsL za8@xLO2D{jS?{!3Lapp*ei+*s$6b$h+Y-}Fk~oa_8wY+Qui?oah%?OYok=R2nvgTL zdQiNtHSDOpGbZ*@^LR6CU?gbR&-E>Ayx3mDTX8>_`%eYU95M$}sio|b$mL{=5&w<1 z>HZCm@7deyGj`}uDZ)r@=K3Jg+Mwq8n7OA3en_%;g~`=#`J62wkWC{XdI(jgMe$|X zVHsL?f88VE(zpl8g{!Lm`Xuu$lgfbho4ha9`cJp@ttRhkEaOlf#|r$aRNqT;BV7-_ z3vz?*nTt`qByZKahm%)K6~eR5>>}Hso5G^|jg6nh)vz(oy|AKjWaM4YBJYr6M1I-6 z9<%KqJ?70nhstX9LA)cF@0n>D(LlY7_e*Bssxl+K{;BjfIcSKX;Z(@8d(bm}8ODi~ zUvphV!Ri}g6w7xzFVNb#<YRcIDIW8?%!I(F#T;!UHIF%FYG0XFmBwg(xh>lNSI5 z+=6K;1lC-BoRNdz-i1R;qG;3C8_2&5BfYQ_(Ti>zVCaU_CDlmm3qQKkL-BNN_unpE zUW6d8lD4DuP!JJ+XUeWkYSiAPC*hYs!OLFM$h+1BD>ad3T=tLq;s>qC(JD>$aTE)0 z7Wbv9Va}UbJ2LXvlLY;9uVDKpeZ}4)A1i}LC=NtJhkDr?aHf3?P*cqonPZ#y_|dhs z6)R>?O&B}s;FY%(U5fv#kgqJ4fRV^WDk7`P*cngY8aO-j@9L%U{n#)XD{hRc8f|t& z#=SbBs^rsGt&!}grV3=NF3q~Y=S#n>ttb)j*qmxQ zh1Vkdt$TN!2&4c0h$RB+t;_K+ zyq1vFkw(grD^n9z`O@gRZk30+XY?ftAo4wSw4v&6>PRy0=cyeE4NtPQ$3Ps+a@E;9 zw@@gVuF8gKHJQj>kO`__dC=BY==HIZ$gPvBbT_T!#l+IvOF8ZRb@J_Skp#@2n*;*z z6oeOi^3#9*h>EwI8^en)E}v(l)ur1dZI+V#ub!8bfcMqmKK?9buI{KmxUlR-dU=wv zw>{nxwas{us?}o2kL7ARlm`We3S^IhP<==oUU9RV7^dLO`68pS=@#}e=(dMsyl6{F zt~P-O-^=zNCe157mR0#mc*E3G$uAS#+|O0LT^L^QL>jlY&WAk@zWC*cmKPkhd^NCc zs`!0X-rjZm2;Cq!Ftn7QLVnTtM{}@Z<}z_P%NqJjmYamYR<)jk)bp^F&1-WCsx!5QO4JLb?+IFh%}JzX2ZQ`_0l53PAaj14xWEcoco zE2x)V=(aLBWh`HPF6A*?zkPQosn7uUu(DI?lwuXHav@oXNfumB#*>3>FMAwCg$nR7 z<-}zF$T*8u7dL9NC@=XXfrKxn6&>{I1Va*aL%nYZ%7rY3^|kN9@;Xp0yFy_w8DAGL z9oPUXB>g6J%gYZiEqiO>YXjahhy|Cz54p(%(=&%%XSbLN4An z*1WvwUTlezsvthQU;c;JtlG068ulFKr+=>PBymWmNLc5^-#B$OipvoXFsoPGM%h=E zcMhrgG9qORe~q5L zdC5MED-~Q*bi3VuU}Ibd2TJ-cnK6Xh%i=J-_fAjPiQoL&=>NRurjWZ&U_`>l_9|6c z2uleEk8(jvZ++S_bO7Hi0)^Rp`Y5y(jQGoZ*dMLA0PbKxb=fsD#G;Jn$-lg|AJ!JR z(7XGL#p}aF&uk(>oBPQJ3GQAn9{;w!-VfEqv_~1{J5wj_+Cu*`)oto}>TfRVPbB_z z>i-v0i%xl8#AUs?;c}1Cvt6>s_XSL~HRVuQI~Im7B!digi~u-Fp1_SFHIO6GePEbw zb*ZFF|GbXk(Z;zW{^#BvcK`8kN!NdAI~4mC`-|f1O6ZuX|CD*WSoY+E;BwH0_UbDQ z8mezB9Ge7Bw51i#9!M^19P~=QczWa2 z+M8c%?N`z}m-w7fKD|-I*5cieQXodl-bQU@(;s+ia8_@h@J%GM*xis%MS8N26ymRtE*eE4*B^L|FM%g7R{b8w@6R%7g3j3I42`L% zy?8cU?9iM)x-gbldPsw(@ikY;7t*W9QTE-#z9QL8QRgNxw zPm_Q?t5(L|RO0po=*{AY6i{Qi5+#eUVYh7Q(fm&r|WkU z?N*KIf{F?iSY=*fNxTX_hAUWnK7}3_3o5wu09`;9&5hhvlrK7Xa2*({c$vdsDuKqm zg7Atci<0WD9{u=EcKo`8hoFZy7TTltkA<$N=|0EeJFev$kwr3gL4GmBL6d`BOzE8+ z>tZ`XLN|LX=>+*Fo;!ZqkX1JnN-iHBY$!I81~zCzMKw;1*+! z#CxCyppw<6FV#BP8Cf^wSfUkz$r# z7tJ;<&_>pRnMK(*X^TQhcFHVUPiN`X6pYtVh>b|~Opnt^%iz#)ra=LbY>qe(?#3Xl;y%CsylR@g$py7a{u;68zev*6~*H1uJUNC~lEY-MeO~crk zafwgKl0YMKM=jLO!B{){tvwAYkYy@cSy^kFB_IC6r z&3zbG`t<*00mhuhktny=mmb^}B&Y#}5HrhZuh179889()y&?s8@hphS})!u4l8?;xu*BsmHMW!fP zyp6sM;YAtJ?x-;z-D{{`Z6=h%kD9Y|*;vvKM>j7<5vz9WUp;lKAGLv&pAjF}{1Q6XRzM1y9ebs-nIcz*6$5X>xePxA zwaaVwl}y>)_-cdN%65xTO7F(I9-^6+-_8#s&z+s|RC?e4fnX%CqGhL9xTrq7){Qz? zv2aF;yVO$)GzOHRx^(}=W+BoZP0}T|UuWLe4>-Kp_?&$Zs(lFqfqO$MZXat6@mT6s z&{3pv7G?bzYTo(oBmR(S{AwU(TJqAjz#3IqaCyVc=_vO7w@ZKjiE6ukQ*l02unS-S)hM5b-BdL%-uanVl3@iGbc{sei^M+e-=E6P9Hnb z7dQ4(z8aNK%VOR*!v(OZr}FCEGXf>u{BArj&2lu}QkjCcDTC&zcb}DnQA%yj`Bsp% zXmzD?>oE;h=2LAG40+Ou7SK-E2csA1b{9SRA8S zhSa-czqT!N{GBkz!B*}|9}G^h26-oV$EA~YJ==*@@xa13{XGZl{Vu}@iAc|a#_Ucwa%Nlgnowb*ZZU6TT*m%oiY-1~&^gkNq#IwWipxUtqF`h9{9 zH#e&J*_SecG2_mDae=7(1mpoX3XF|Ug^%OJ7)nIv8E!nBew60PO%)9_O9Bx%gondm znoLYZ_r6UcnzH_D+;X>~_vcHI84|XCaR}EVB15~rQf5ePwmbx4#_)Jx>>=`%r^1SLDcbtOx_laoM-5ziIB{%4W0x)reU!smA#-31b ziuz!M28(*i7|Xo|5n*!*#>9DQ@!x4s$_nZxTL=lRu4M|(p}t)9i{;o!&HBwm-7g}Z z=9px#V6riq5%3i`@{c=ds=nh8k8o>bY2{4SRG;&pBM`E1t%PU-&SR?MyYfT6@2AKf zq^KIca&`#&bc^=<$=3$4stTU>K(b0wj&27mrHcdM%I#4*z*?nhER`LcXBH^Uv#nJo zg2U2uCro9l!&%u3Y-YM!^RFPLM`tZOva8d9G^c&XPnE~y={o)fIspf^YviZNbS^lO zJfN;{0(eGqjaQEMuEg|pm32FOHdE9xN{xW~cLL#pw^&`c(Vj8y)6U_n!;@yOG-m3+ zelxdeg@|Cp-)ko+HCnyJcInrl8q`#vSNPsq;vZuEx-YwPkMS#bJZWawZ{Uy;?8xyE zZy&;u`6yX=^gVZ#u^h>)lZ-0pQ1_}rs8hKqC09MBk65*0~ zk-^UL=S$i^VnO-3!~9SR$ip(2r<@>7IpBeas}Td(@VFe~<=jRD*>f-L!ce$^j@WCx z-bRa=tI~7`b-;)C`rnz@{fNUYc2OL0{at;&i_7B59AH+;IT~5H2ny-&s%}Ddc7h6= z6iVY~uf|V6gaUV}QeUnJWA10uxQD|oS!0ZdxJcxtBhvTj^?S~bumQ<^3R9P3QNf^B zw@V-&Y1%sEKL#bh*6PN~H%B)X@k`_jxH5e1{V@90yTwCO0dA(azMIQseq7M%BQ=-1 z7p-L4x!21&A&bM%);LDB{!kiA$@!^tm5x zaUhs_ilx;+_Y>JcI$wn^cohqMe#O^NhO(!=f88 zdx#6_iNDA>+;-;d1UkNF=+!TB#lS!^Y1O4Yr(Zss69IkcE%`kE0yz1Q)@!C+bdOKH zLpv_u_c#|{=!!;^2(e`oTFP|&LlN@^9%HDEi7_^x-FvhWIIdw69>xS5Itevu4_sje zg}8o3^YgJKG2#aR_4Q`}~tQV3=bsqPhE0@k8ie zZ0P(VL=@c}cwfP&e`spCVZzK*-5Z>H`ska`D(8bBhVE{9wRA&J?5H9;kS| z{VBo&-k+PLV+W2=*@!^=L_JmGa#wYbo4#oU^4(s|?m&z{e0gRLHrQ9Wo5f=|$SEs& z2C#aQazZtsmYS(jpT0-goHXXH z#xAJ!EuO;eNbX$Kl@aL ze4nK*_bc+vWc4!4KG5(9B#8b#UP% zf-oy{c`TOYgnOGx-BQ2G!OYPaopEl!cr6340To< zoT4>HHHO&l@ACboaqiP}$#Wcg+LZ7yBIY`kRhZ`8fKz`Ys#J3vT`?cYZqJUx;~2PG zxR3e4J8Pjd4N1(2$QaDhq%P)6G6Q}p_Ct@^n8irFR(khzaoS@m0=Zg5GfI14woMHS0=j(S$UTXAe(H?4VCR^uYt)&rPU-Wy36 z^X@D-i?aw5D+tZwPf7-kH4aE<27{wF953PvbeMcJ4nhoIBT)(@jqn%{q{>>K5yCoP zYoB3=A*w`&P7R|+EeAsQ1R-rVFDk{zalLD&QnwLi%Bzn zB$MR48YTvOg1HDcM$k>J#&v{5X^)?fxA=Dm08%cfuywL@CxT^Ez7U~sE3772GHgin z3=r~Yrf1F!W44I%m|rB}T^o!B&h(=@DDR`ij6se@r~JW$TiLVvGzKyINfpah-xpjJ zY~|a*OYV}Ub(D&jSA_v)n(5eqvYi6(^TH+O6g5nxJ7y&NI#^)8fAE#i{du+YJXQ=j z)@!i!_wY1X5`gk>DsSsy5TVlPt@mi$A8UYBpt4>7KuO$vWG5cvldVQLsOW=NiHUzn zc0B4H{dfP9KhF#IyokSJ{pwjiZ*5}!j(c(q=STD*svPOr0#2{ZiXFgq{n>i5P_Uk*K|&qDU+>YS*t^n+$ft;S=Els`v#}k2;}dU{G>`(;5Ad;VzXXKr4#C&%DY#?!hl2EB|Qxf z-pG4ghwka9zVH?cBBk>*$D0T6==;{wMY?4*xttsBvBY~Rr0fAQE16xq?HnWK`K zTGvRT&=xRTI2zYe2n@o<)NYzu#lufed?Ip5=6R=i7LT~p79IgrsOiTD6`O?csqKL8h$X0Fd18lUgpED| z<2xXbrt(v2j&$Vm2thr$Q_GI?k+4oOru;@C9f+Zk{sDe`3$nDh%Fe{|SKM0;GSh_m zE4SI-wIM|HBAcWVKw)XK8Dh`#A+`@NvTuv7Bu2uzzL)u=Vc-Lj8g}i@ zhO2Ro9i1{_3p)#BZVh`W_iOD;cm|8j%lX%MME9aturX6(t7H(Ca{nE(*BBM4(u z6w8Ek%Gki=6bkM3l6=xRq1JSl{UT9b{cG=lU%MvE+#CF*(-*}-MQ@mrnZG#cLQ zc0b!NtJW(4w-g{#c9qLT&i%Z)wfNZN(3b!c7l}?Wn?kfM$= zDZmEgH0Ca)r?qdOi6hsJ&zg+oT8J zhVC~XbC%bd?$LYpkKQV#h@tSUNBO(r?k(k8&n+Sv_Rm7NKx?lAuTl-ss~1n*wiDuQ zJbtMEoRWh0@jknQB;QGH?m|d7E4t7JWM-))e+9t)f8Z%1dH&yFpO|-}E05Q(h*O`S zken8gN>b}FDDWfARC7|2K6H#;=xWgVV8QyPV-wyN|B1L@IY2R)t_xlHv$`ypMCz@Q z^KGzhqsQ;qGj0^E|Hq=^C|^8ZiVbn^>#LVRT``R?>MN!luN8;MYfH7HTTo_ts%TH288- z54AxDnWNonIcOpDL(5%RtbwC5DF$_>v*$1XGtUD@NjhBRXUwJI9v8oPvajLgE*miO znmhJ-2=3*cuVY82Bo3~Ag?|Qg`nhB6bPTn>T=mSckG0pJOqC*#v!D@1KsrYGvJkQh z*=%Fn0({di`D$0>Ro|S?D7|!X`s1cYN8DS?%%m zi&iYc+E3m-ATF~e(GqgX-1Iz}b4kYf<~{O*ME-eZNjKE%V^b~k z%@*@1V5qmL%A{|I?1N~**QyW9!#HZw=69?&KOTLzcF;cI;*Mmk8pbe%3Q&++nL2;j zWM>op5KCg6%T#h6^62Caj?pkFd4G29@p7u`ZdZJuV$+nYnJ%C2ZJ*?47^U)4fC){i4ePNF4-ov{n z<6Nz`wET@}7rG(taMibwbvIiC<=VXd^_(g=!qlR+3SR13(qo3vPUdH7+tI;@wJU}kiO3QS^ z4?t~t!7B%~1DLLhl?qv1K5RV&(2FzbdtH|UGgSw|sj66BeIMsy95euYI#+cOF64Tq zVEc+&jn9kfbOA&DfD_#1d$s1I?6t)LDA0d>5BV^%3(V%IA3kgOXqC)jRP2l(RIq?$VuWZViV3sC1}%D|bToq=w{N^NJ5) zOR0C=_<-K zV)o+4xks0g&PRy#KyFQs|2#M0` zZ9k57XTccb4S5bDnuuS|Wh;DgaajIJ(lbP$IPk;!4;SLp#E|W?zD0B!T!C@vKEL#% z8mY;+X#FJRjzLquY}78VTnvUmyz!-GQ`Zd;c|cx@O!l1Vaon%dSedLH#~zZcsi`89 zr6c`fsAv6pOrJfSkXpA`uicdL >=<-ArJ;lieWx}}hsTB*v`wdjz0J(`QD9^$PP z|2ymJ)df31drD_^(O*y&0vI%^yvIYr(~bbexVGuFSA3&~+&t?wZm4mM_LxX}lJ#}~ zB`f{?S%uJ|1lE`e70#(iuG?Z^;8AgYG0``Cbd*=Q3GM9Zy~*uTvP5VdwbV?zzT}X7 z*K~iVkq^D}q%G{f%_sZ3Z~(lz8!rxK#*LW@_f^nzU&SqzVK7|4m0@x1*eXG+aH4OW zjK#$lVPoYOnSLRG3{npGhX)cu=!v+J^|wR{D97bT>R?;;WS2>zF+sjJqx3! zegc*XV6Z>|o5LF{BH zA7RGnleCiN0IX!obgl`-ucQ)f4*ngI_#6!C(hQ3=9dzcc9)z0xYD=%fJlA63HpvB} zzUr+4rXr-wW&(HeRv+T+6c=|bRS4+=S21GTS;sqrNU`)KjVI$ z`kb2;a#DtnNFA_@f8AZabAR>l_cCP4Ec{9~+3dJA-AvR=F?f&uwFmpg@{dN|r81(@ z{3r?4uw485bqj^xYR=WWbf#PPr?Kgn3?m*P z%Krpy^KjkR+X2>Y^ys|gQyfBSlkT+M;{U)offVyrRqKuBdPJ!eFG^8Ax;|}S{Q9T< z9|?z8Zk`jY^sI(5N`}KDBy0l~Zzf5mW0$(U*JE+e!~i9z)6MlwJ;^IS%C@aMjk=p? z=31Guu75m}9o-|W8qO;DkCQj;0{M47Y5+3OBRxT=%e4&8i~Cms;JEiGY-49wnm(dN z3G$(^|LZjMLTaDBYPa=^$izO70nICBT{YX9rw71QDzc`+7&mUx6(!bSxd;AWxtV^z zEBeGX2TD<)%z`Dke3pS1;mou=LVuJxeaPz;Z2JR8m%HDtM)C2l7sToW-(1OlTP^jW zdLwQa^{${~!A;vE1NS8rVObxB#gumnDiMrI8->MoH=s`<{X&XNQjLk)FsTouveljKSruuYUHg2sGl4pW^P)*!Ab-@S!| z*mm1>1co!IXD*hyl0;IpUL~bY+z!FsA%4afz6`YuS~KD2@GHrxVsp4z%z46W(mOo# zcVwQ^CE?qJ=V7;hFkxO$h5)hh6-t()IOZ^oKeYLtTvZv%g9YPuf9lEeJL|;E9v0h` zJR|^9wqCk{po=-Y>Qi{(b^AV`&F37aBkr@o1cms7s%qwvDD3e);|(dk zDr(4aUu#r5;a%YtG=*)A$_nF)vYowx7yxo@M9W=oCL38k3o_|KdEp9$+3e%sX($TLLrEQPL^}5l`JIm9_SS-H_DNZ%W}8bhjJ*Eh)O%fDlD%j z@Bri0u^Xthe+Ht+kz0&fFa&s#T~)eS3{VwZ*d#LCZE9f2I`OU`*tkPOn|XnseZ9~x z^2YFc01)HSC+)^+*-7(b82NPx`hx^NNA*`r@ED2f+0TpgBVM`7@JmyLtMPfb`-$2bs5KUcO1yoxPTtnjl+qv|8vTI7n2C1&G$KZ)8ku4WL4xf zRjv-0gH7O9^t;7(!EPw>i@L$R+Yf(!q!S8kv!7iV^H#Q-e43AWX;1o>80FANs=f-w zN8=mLMQOep9Xob>!b>`0g~^UBDAV_LRnJiR0g`3)`PKCNv=<09(^y#!TCFnJj6N& z*P(0G?4jXhQ`C^&j^}yw_ueZcm}e#I&Q8+otiQ(bXD5GUpTts@Liv15-(?aMc_|HI zm-mg%e7NHWqD%yyLy1DpXP$KKNdFG8=YQqkuZrQzsr2Uxz9SnhXY25}+ zpM48%Q$w;m1j|efg!oK#n#i9@eJKVmT|l9d%>qCgR-GY4g|@(6{)q>mD;+u=ArgQf%Q%^0&?T{4#+ z@$1Wv&hpov&?AGU>K+R2JKmq_N`wtwU7k#M-Zh+VH*aM3N}lE2<=l`US+x$d=ZcrW z-gnoAm_VkWj)ud6EAp7e4e9z2Pk0z0clVN~oBmSSY(t{L=aNWqZYs;D= zdjn?+(a3E4eZ2?EZup1MsIs0oi+)=5){n#|ARz=z@f2la=wFj=;(j8r4=i^@2L#Ah zFM1DBo?tz^3sXxeD(!ui-0Bo#=bdwAc#2L;K1zVVF+tSHreszRJKO~ZN)-)zg`QG? z?!5l$Y58xg=_@5t1p?(N4JsuVP-aDHJcZg1nPs#XN9(5{`@fZeVyKXlkNEW3O^Os0EP<1|z?~`X2PQLy{Pi5nK)OwkuHgIpg)S zN`Sw%T=XI(IFweRpp{+#uUu0-6v3Z%5wlf#KSXB2>BW3BaE*jUUOpyahktr+&|B71 zX+X~IrQ~Vg3AdD7dmvdw5Rm- zBRLn?8t6S46N9LT7-#w!OkVn^PIG*=Hl|b6Did1b0u(|~>zYl&qh=O>#Y(=;C$1Sd zlg%9q%_6K~snC9>m7_UA|C)PAeU6jX9#KlTYi>MW0VG#QXT zrl&Nb<#VkhX5zj7e;Gx@UX*KQAViY*x(sTi)1j>s?w8owHgC(kCljIbG1>-_C0Y3g4^D<+nXC%-8z9 z;Y#&ra1HK4$E1ADW*+gmZMq}_ljPIij--|$w0OZ}BGQ4_x8fDk5pRj#SoWnx5nTQg zN~D(1ycLG2hvY)#^LzC!T0Tk^ z@qqjd$yRg!pT&>k%>a_VpCvMr-$QQbCN~OyDUi%&{58Bn@| z`5={hv4c@=F}munt2I3HYK&28R<`HZ+0Vu3pBFfZWv3xS$d$^3VqV!fsbb;4Sx%m) zOkfG{H?v6*ea)%lpt}515~*N0gONnVSqP=&a!bp-NL+I2>qp1u;BT}SdHod^RQq4kWOK_mD=wjXP;g z6oBJmtTQv1lR^8LM8JhpZX6tTN(HF>8yI5_oNm)^ zR;Kqlp8UaIasN)z5c*=iSk0v2WLK(7aK-xoS7-4}T7lmHr@WrJ5MF7s1^9-zHvZFJim1F~V)cKlr!P)vAiC;-282Y{DbDul}VXUK^Fxzz5aZddz!Sk|yS z2ZgrXoPkl!vsUFRpZ>oCGSr~1r6>d++SbgcOD^hMe?^+E-bx`d$nHt1(%#y)XXZB2 zgppKhKOq2=+hR4U9FB?gy*f(}11R5q?N%v66ZSU>3YIPx#N`d?UrCU{d7+FU6E zviyDu8-?-qMtSV$>04ufT=PS%m-Xcdw+L*CpIg7dfMkFdver{pSX{YMc)7ne3FtH~ zMNehi&4D=LfwnY296YB$jy?C(vWL}(bCO#7#+N<lD_r@UGC70_p_NLkHiqnX+yj}yDe>V)`&1y0s4S}8O^lFLwps@&g=YuP z4xeaZ7M~as0iF8gANyQ@2nj+SxmYHVbiQ{2NlF=*)W#De&rgy}Dfo-xGS7RMkic~Hp|+7HLj zC!jgL&nE)lkNjdE`P;g`1!8bW=_awYT|ULtS+8 zYOG@ZZJuIiJhf$cPuuLveVFfdaEhq;c?8rU|EYopvLTEP`0;skH%RXFb5b0f!j*%T<@$qr+Ry{dCV#4H*(LZ^^AYQqFHON!mc^cj}aMA|6a}OfL`h zxR)Cncy*ODE~dDsW4D2sP};$i+d{`BGQ&uZA2UpNk!J$O@=a4sLC-#R=jN@a&R}4v zeRvf_lkq;-{w!$DNF+88uphOyy|jOZ0%c+;zn&_i6T4%M%X0NsK1{F|HG)DIRkZ&8 z=V^+yIwj>I{~uRx9Tn9Bt$))DJqSvdNFyyhqyhrcC5@5-(wzfCNlB-Kba!`2GZGTg z-8po=$M3y&{nmQ_V@;enYcXf`-p~F#&q4O2oCxP*k;(Y7V+9cpuRgBa`K?9MfIypC zFEYvtLiEKQ4*AekPUDn}tMUap4aB1#qdj#S0(ta8UMb3?|$nqzM_z%ot#Bbgh zibe@Z#st4CVRn+*Q!S15VCxssVM+OUXe((@-!zS@oe zmNAhv@tiW?6Fplg0lxIkA%zP6#+5{4ZKd?h*_#$RZ-m9#1g_isl1&zp1rrQ;#Vdj;YCv(v;cR)aiwkR}~B$OfjV%9PSZ|b*(yKatQ5RzU6 z!*wc#aShhzTX%}BRD>F@}e>4z~@r&@^qn4xGgmjC_GM9UbkmjAXKybj50r zM+Pba;M(&H=cbQ2QAhky&&p09$GN_OzHz_ish03p&a%@FMqfy|LSWP5YjGB3A{ zn-{!Rw+{a7m79&koiMNu>yQM8T{2zfiYu{^Bsq;>^)W!=e{9c_bi44icElWsl|wh3&VqPi^+>ryaWMnN{Rj9;kNQI2c#m&0K2U;zp6v20 z;;iUj9iQK)8eV?$)}kmxG?SXs8r~eN|=?w)oROud{rtwA= zBtmhXncEE(W=37D15M+$@u6asfbj}(Va)hY<>Ab9x~frCXlpf&>6M(7hzfNxa?Ovk z?YGT(v9q_hnUiG|eRnk0%#~jtzjK*9sq^U{?=F3ZMtgcQC-ai_ z0|0rDG_q0@uJ3z#!EIr`jz=n%j)6N|X9nVUhDy)HKBG$Vr2zMW$dXT~8H76=i2*ZT ztk#!S|6Pdd(yD6B`jpy&FU3kJBTVT^ZBr^&j$DfuP*a=VSBKbG!=bTu|AbTCno%QB z6GLK|Lai<6(Vld8vybT6?r+h<6qLwiLZ@f@EuDse<1l!9crzUrmw~qyIT1A%&UU+m zFHzQSCF_7DRqrR2y?xxlU>DFCc3@dy;pwlIC%`KH+>iCLRyvy9?vQLe%p9$M-Ob{D zwc^YK4%P8lG1&++d2}o<7@PcqD`Y0_3ayJHBo&KglO9MQVDqL)hmv23nsq2d+mK-O z>3(s7w=@RTsu!+f*VZ9vSFoHq>RO}hJbW>l$COf~m=UuNWxQC+Y`WACRdd)b69dL%$kVEZi6-NG-E|}`A!pmHuv>Nz!gYc~ETcH<4Q$yid|DEY zy1~Heds-#qT`{{VCPqBX(G392yr?vw^4<9WTI@5Bx3j{Q@|+BHbhx2~t=tg}IU%8m z!Q(2Bx{sf}xHsX0yfZM3kYLJ&K1s2=B4{R)Zqyp2*(R6cFJO2-NZS^s2*=-J-#Z1py&nO^x zBm;|SF6EUH0J*a7SVY+qLT_2=tMAIb!-=+fFPYo=ea3Nl z57CGBlL$6XrJSOs+YMifXG6oMtn31b;nxz_oSYOH?&x51KO=2lhaDEsgSf7Wn9Bt+ zXfX&PuU$za)od@f%RwN`@?HZYY|MD5Mr0N>TVS?GpYm&n{l^owyt5y4poXLCIv&>p z50KwIMZ9(*GBk#KhpubexED#=py@j}p>zuu!kA1mWyi99Gj#vC&Ah5S_6TY->!V?*r^%dJ+TtCw3#zYaQfawn7)&n*tG?o?^7<$O$q0r` zbwoPjpu3`omW`9V+Up5e8|swN{bY*nN$gl*u|-55ecC)i!gP2onM0k(aNAwjC1aem z6vM$TWxreLg~=<0iinO`X{7BMAUwLtCX0T1mC2*}vb0Cp!WUbrzP0dG9{W4l<4o=8 z=S*2>pLIwg=*Y-)d@?GPt3FMM=H$czZHQs`*k}~DdU6{y~)1K*LhO1~_ zTU?5AE9Y>Cmn0Ke7!>EVqMdiNlkc&aM*Xp*p=q}RD2#J4N8fu>nYPl%{7L$OYr-gE zYv=rh*UOw&2)FpUFE|1F{j=jA2NBE$ujnX>SX_nEnapya!6R1!Y4Z1?)avl{i}hIQ zg9GvP*hP*DPV$V5F907*CNre#ek}+S!Jt;Hzc)tdE*KVxAMW;|)u%ktj!na$H4XnP zUoMD}-QuhR%v@;MC3Xd8LDu+^*GmQ+n!(3DRNXWgf!(LCFQoIMH}^7f{z!fJsqnE4 zcH=U}KqO&B)ae+yEj|f9CX*a4g5OK(|D|aJ&>_|@WvqK;`Q0As)B9R&7*Mj_J8qij z1|*Z?bM+y!ycSxU9U2Yc^1qlSJ~vs8i-%WI?x)1WhW&q-O5lEXQC@WVYD)zh*nnLB z;z*17mW`GRWjQs5#K!l`gREu*0!GZ6GlzUoB%ufUmhO9k_7#Zao4HWw%r1iD{HkJn zu@$~E;}&%T3n%e;KUqRjMmoQi$6uErkJk^WA?H^YF2@UhkfZzxlYTK=xb6M;v1tXO z>Z6x`nH>0=ic;e-XTJozWGS)Qij@92Z&znFs>aKh_uBQY?G61u#KcKE>>g(yaob&n zT7^zLm!yylm0GZe_TT?cH?W-fL^lA{i5}~*eA#*b91lkRyH8Xkc1?-~GgTk7(M?N& z-DE$2CCzkbiih|35)PoM#u21zI5IKY>-3oHQ@ z8`xu2s9SQfn zGX`_k+dsoytZMH3jLr8tr0gC4)sRA1U47kO{^=Q40`Ugf^f?7C-9qkcUCY5kvlg>< zI6)@Ou`N|)3}HZy9GYwlZq&ypS~5yW?+R5J%j|-HI1cA$(rA5<&cmgY`nD`gym{z9 zrP;J|O@;_c5ZaQT2izQ}8blX5^E`?6)onD(K6Vb6W09wn@_~p}^tzVre!POt`I1Vl`ObpY z`4TX;O_0KthDm=C?KySC6>&Gzg)2LH97|M)Zj{!;nM1B zU}1DzMFPMFNC0_XIYBliDR)W3Ya68qb2>qFsdO;(#r)$F@nBgjd5l$l4z!(Lwu&`Gli2Ur^JT|0eau z&{dY>aPrv-0JRjmFqa3eMC$vd#;Zh|4Cu&27z^J}0^Q(D?0MnOh@o(|Po`*wT}#jq zi-a{>W~LuEG=5`uEfMBHD_B}Ql`z3Oocq**8gCtf9S2zg(Y@$%Qm^|UMp#pbp_Xse zl>=yIK{U@>cmY5As#qi;qul`P*@G(YCJ6l6y6c#rh3LD3zC|wl)||y!jZ3>?-ApT> zU8sNJ-uzvqO?$t}z|7~o;pNtf42n?c{<7wnKsHf`A~h)$jq}yeMqo0(d8Dn5B?tFS znvsADzGSON8aDiMd!|7udEeDHJ2jk$baRDo==)MCd{9p!yp);3OsTfI(#mvtD;bnl zL)0zteQs+IWcQJw23;!OU2e5krC5}pt1kMf^Jj~=06)n&{#nGWp{~o!M^kKz;0Ma9 zQUieaCJG25%*Cdc`%2Dp9{F;PNmbyjY{t8MXT@>uulBv~NV8-aE*!{JUJU`)!%J1_ zcN_2RZs#;A(emtxi@H@W+WGE`;dBPcf#IO%N)i!obV3%K8bQ{-BxxsomClG2R})Aag^sqlS=FanqGow?3isT`duTmX!IDUz(x!5(7cON~M=_3EEq zqFt!$VCa8qkza|#&=IOA_HCu}wQ35SJ^IM_g_P%dZY&l_w#Jckz9Y?}-EwQ}qNg}4 z%HDAG;U<019_!nPfo0QQN34U^oU)Q1KxwCXK_08l!($WT-jYyIRW1cILOkIvn+!o2 z_EYnX7uJl)0ne0n+IJ$7yIh%6Q-hvSxK$at58mD^$&)LY;e^oMTknwvd7|D?T)G{& z>8;gO2v?@ae$mter(@n1)AVkqjU?niif;lNO|hA8+CE?rz+dW(VxVn@S<#c!>AHYg zg)f#Gso%Bj7Onepo&I*%tcS|d5&LoFox=bL4jrI@CXhi*of2&qS3v}8IF%% z(HE$$TPrgwuNUZNMT;AkjQg)(CyVQA`b4zD_f?|Tc3TPPsYTD&0rS+UDnpZlIIMgk zlx`uY+@3_N?&j;}Nes~Sv=%0*R{K(=`yqFmt@_8_41EP9X&$Lwj)>zyZ$&SB1$>QN zxmTO%jX%2Uc@oIXrG6Huqc?p^!|gVqrXyK9taG7}%-ndqG3^AlY9 z-hsl5W!@XqQFK2YHEEgT%8t?0rM>~F_G(N%Iv5^g9v&_Eq5&m!rTg)Qoc>SycS&HlF%2}3chmtW}x zh?W0#H_+k6l03c2dLkdyiY+|S|0e!WA{g*2PT27w%F7D~hpE%5CyX*p@AT5d9Fs;3Hq)@y zw8sfEduaLg*Kzc%p*=7w7tSQAR%FWLR7GscNDKY31l$Ng=4DvfCPF}eer}D*jFWlqFodGHbH2+y@Ab?T8aDM) zEV1YGF>t@y??id_?zH&(4vuULJEgkXURz~1Z=Z*Rau=+Dn^5o77X^{~j2KdLW5%Mt z^gQ;Gv4C}0?V>>15>na^$#9303zpC3ohvI>~E=;7(lN!x>czSG33JZJPzHsBYcmb zcF117fS(sCUN{&39}7@VE%TLNTB0>W%TsJlr8dG}E&d@uyz!9b0((ZR@v?sd74ny7 zl@vBy@k3qgqK*ud`S>^9v2TVkOJ5|4t(>agf?Zpm7h`pDMv*1S&xKlwgH)AcP82Ph zsZn+p4`BJEyMOFpx&35YC@}4o1kFvv#}2?HjY+NPvZ8Kk(jY-7>)ltxQypV+QM|9BO8R6#BFkJHbt%Kn%GhFf?G;?kV^f+Xz z!&5HXb2=H4+w$@?K+AmW`*eG`#4Yp9_#t}90`b^9fUA@l^^VWU^Cm(@eH}%0Dk|C6 z-Apg%FmiD??*EP1O|Bf%HfP@tbN4N7dXhJ8G;3+?;hvtqd7t%|Z35L!0W;xZvkiRv zm0ZP87#ZZ8n=LI&vZt(_2HuI;RN5f0Y1a27$I~#QM0fg7jI}VFoN-V<;eV1Q{w9IV zP7o=(pjz)eiJR2G#<3a)=OX&*v`7?7MLRyI)-R_(geojC4fypCeTN9p%|WL7DhWU5 zT?C4E4rUECOv^`C!RXsi9QpuT8N+9?=4mT_C@v{jnPk{5p_c5=7k)avJ(r-OoDJKQ z%=l7x!54q~=C4(o9<3OlXo!Vi?KL^<*v{+i+UDPaH<%4)ER z1Us_NG$Uy4fdGJzhGzh5Mo-E7Qk(-MnOZy6kCo!q;TR-k9_bcMCp~~8yOY@*-?}$902>-cmc9^1xl1VSx z9#hx^CGkGtphMUs?}9t*7K@&X!%{h@MNlSR$A;Cs>O&FvUw8g*d> zS-N`*6Qx}c32h??m;Vfw2488#pjVqccTMci*7!Bf62FDlhwhB6MH7+K-+YD=55yeA z`h-Taw)(NgSEWU4=D${F)Rhqoa%T3heYwpYi*ei|N3(st=!F@hK9=Onqrp2L7VY9g zgmO32ykq#R$^Rh>kA3r2yPUV?bFMSfY#BpQsLtR3&WDn@I%7T&&(4`NP0*;;hp<~! zFVqr$HG%>$c}Q48g#Z%tLG(^}PyB#pm+Waq8`?59vU=A28!hO>nz!OYjnCqNJ~f z9hf6%e`)a6vBT-3-_V}hv?|Pfr-7N!0Be|4raNjaLsZ&IU#`1_MyP=C&AdQUP#jls zOg@`k1{|7HCSH)rL}$Nrv(`RO>+I`L5IY2ymJ9CCB3&)45hUn(hw6CY60HV(AIfVEq9#=2L}5oOMv|N??CRuZhd}F<=fE!Y{7BoBnk8u zZ^@5@Zo)mewNsh@YZk7NL`ZMmB)d7`{W98@DpTGD9)0^63_z@v5i&V@FS$ZzzI)<- zR94|A?4{@O7QK1{`Xre|$R^w(kadt&U@tYL8^xgTZ#ez|7Fz;oW?rHd&DeM>;XL?+oKqrt57@*Frd;fP{p9PSQiG9aL(o8T57KP|KZuxwr z24i}K$m*}typ0|GH^)3B$dqZ}ywwlPy39FH`WdG$YgBWvnvF;CwizioqdM)uV1Oa( za;7+k{ls)xFs36M>#6)Cu6yyeI@75_a9h&+^7l80zP1K|}wCg}Mjf zzLo@>*^@o*8xfdi)g+G%(7MSaJn;Qb0yY6F4$$)}Fg**QGrbE= zSh6PtyzPy#tzXcjnz^z!hH}6Yc|906Eu!CSJQUvpSqsi2vhm@=MBrvbPH^|DW}G;= zs}IbB#@JFX`J8Pt0>5K3#+xpa;;zeGM&|TLMfXMxy@Z9!BjjZrFr>;wOL8kH11iOm zwW_--G7vzfTYW^eG;&c{E0m;?P%n7Q_IG zXpCbWmDpj8WEqUaq~q$Ak@Y2)`sw_3mzhISP6Kzpn+!A-IO@qStSn&->J^K>U{=it z_;jJVS0H+<|GwSLn_yH#=u0cArY!&QYxw7zjKha$3cgAzla^<8cC+|Z2A@RU5fImZ zwwYNwTqwT$&Lm7`r~E<0dX44JxWu1N-o@#wNf(`|UhZVw3|+2f(pE@PUnlv^5Lfup z?z?=%N2_e}9oQUXHcPv@y%cls8+}C*NV9HXm$B&PHJ>wOLCAO-66|mtXj4iTkXNgR z0QSLz1~wcXlE5~E3dVokmfNajlX3V_yTd?IW$Wb*O0$)GZIy2hO;26YI^Rs zSunmFQ$TWT9M{=X_mx;=RA}r?nx%YvP$C_xoR=hryR10ZD!WZr?cQtVX+p<@CCf=^ zOm_K1wtWb_?Qf#*n}{z|;3L1aS{|+|KA7Fp)lbJwmpfX0wT1Fhf+Of}wsDef2uFZ& zJcI%|$tvArJ=xQ)0fF~#{3A`z`@IP=bRrB~F5PmeZ&U5c#D&1Ilv0EEjp7LXgj zS?R+wjoJ>^;t0PoC%SfrHzX)>p+I9IXP@34b{M@CP4bX@;H0~2dO;$o{0sl|tM-d= zRb6N5(*sPQucjS|SR}Nj3JUV9eCsoBxv;Z&N!8T)96%%nZ*pFB(}OwoGP!LsfX1`H zh8QMSVZ4F4mpKTiK5spw!f?TgQ!wV2@hQh^$AZE6zIjWJedrfkB+M1_DG#23w4S}wO_r7QSbDd%(gN-fgV^jk%&Y49 zul!dV&?%>}%Lru9CJEap~ryxk{`-{Q&Q)9lC+mVWR@ zC>A6w!#W>_ccl-KY%q1pwCT74B%W_|Sat`5arTzTq-y6r)t0y(Bw4?f(5Y8Z7Ft4s z^=Am+jX{I+$UF`4>*80(*vYJ@IAk&!6WyHyg7y%A9r7JG^t=;Vr#)Ir;EY*p}efG zG;G`#GJb3yg+6C_T)A}erLtP(5AY8s?53=Jleo&Ta8)`ul{6A*DsrBn!fDRq{-+=s zwfln`rKY3LJgsS#fNvAHE7@z>$EM4Lr#;+cP?!W8zjp>nau7SNa}whxl(A2kPyce>qFG z80t15vm8}FKDG3@MxbW9i!>BNHo}yC&8tt20e43JuQC_Wl~d2DjOxo>{byGFPn^H0 z`wiG!MiSA#v;yZk<^FG{Z*+X!#3$$8as{|r05gkEWT$*kQX*v_!&gucYx@gfx5Ne+e7)zxU8aUCBiG2t5N97 zXWJ;l&(&fcdgy;bUjace%iNCPw#x1gE@@k|=kd|ZDV)c!%2l=oW_`KUGoG@LAFFRV z?7wwXjYrzd{&?L=PT?d0_8H2S^3Lv~$y^pP9`bSV2^D{{j|n%bnth4GX?+kh2w`5f zLFzW6@GH%2cNPEw-82R<)`1eA`|>Ui+gse~fY{?PwBfJ(!BUwkftc=>iMSH=)?8vD zhwK_Zv~Mydq(NRCaEGU1EE`X*anuj3M-kgU=F$=mu^@9itAHQgxN+-pS@QbtApx_% z{k<4yx%DB`aS~2K=e>XFu{qxW48U8QQOtV2kF>C*vUOnVg-J&-!j^K62Gi#%%RLJR zPprmvJ2QNf2EeZ#`FtL1ITaN~WXXp-YZPRXiQ^39oIAT|n9O$26ICdC z#foID_IK%ObyPLMm#VHV%pJl3&vFVYH%R={B()R0iCCVD`;Gw#n|OsL#sn9!nAn>E zjPqWchh9Th)1IcySE@3>|%tlojn27YahWtPIeOx^x_R!F4pV@YM zPx-Ijlq+g5V_qK3He|dPOToCA zMeolGv<5R)I~#ET){w9?`Iv2Ckn=IH)9?Oc9Tw=E*mLWR#Pu)R8u_bVwkiQVJNWp4 zdRS(CB}H}=@&eJU9Uj%_P)~xZ+`OLIvt41B=G+KO(CRgEvI<)fM<0XVi>GvI-)VLv)YKWE(9#uUkp?TxosHXD?+=tQqDi1U-E zE#gc2y~`W*zapV*FRpLS*woYdbA2w$HTf1+^SfH)qe5+D;>#E&bi;;e8%cjeWDSe$ z6f!y^cSI)=g5$BH$WB5I{j+>NA0C};w(sg@lY!`FG7w($$EkxaI%-1nXQ)2TZG$)C z?@8{xhRFVBka?n;kD#KpfbId!A2*9&juuz__ZN{;1DY@5qlR9=TploFG~`}dO(DVoRgXCA0)#fQAt<7d8%8Opue2)2%T8><@=YaR8-Uw$dbZGF_RL-m+*BMGbH@mFNtj?y% z@IZaKkxT#f<-_M&PL21d7%~%ga<>?ubFf=gB`LIvRX@U6;vwFX@vckL6D-2zg+gHU zy?~ZRj^nGamLCBnPhao0R{BhLY_=pT7u+&}%7&TDA9QdwmkedgvXSlSy8xXdm zFIebI!l1iC+Z>20?>fl=N=mq5n_2M0?XC#UP(8XMqHT5mYY!}Grr54Uz465?sNR48 zjXV7<5z~0LRnV<#u8E3zI3Mp#cucVimgE`JO+*=A^9 zC+BG@OO{9hULC0}wW&OhCYEn}d$zYIy0;r*NOWL|z>;=K0|(`fIhbTSXY$(1tq|G>x&DY^@S$F=m@Po z+)h1%4&~nhn{(ps@_zCr^Kvh+@yCh@YPhwaZAH0;*zbS5O>yG6<2 zDvjthzPw%ckAFcQwuYi56>bXL1&%<7!?baLz?Xow1M_IEkHnLG85Qm!0xTH(y8oMy)#^-I4D#b+pd?rnnAJ<$qkfDki8R%;nYGxcT~_ z*SjVd8I_7anGb2T{!li)*TN3SX$3+OjL`ygG|Ax{pH+IpUR_43KC#@PR77-kcC5#c zM##n>7C7LY?nx;S*WIwbtNVvNKP#wGbd@t#wCmP)eAvhEQ@I=_4`I@v?^yV3b0`0< zMN{q!hI6j$kryGfrb9jQt_VI@)O~3zKEqJ=OSNZx#^{S_eyG_wmAQ>)EFYxL_*7HW zgn{78xl!5Yo}W@I!ZMgX>ibKC>m8k@alj>Xl4#VcA8NqDfes9s83DgpY%Q%QGnpy1 zCiL=wb$Wk-=@TF9X#ohw3|9c}6ixr>0Id?G1t(~jVN1z6XuQ{Aq;*dGMeNy`#|Cr+ z&z}F}3sUsZeQ$LRChz;uv~Ml|myol|&`3TSvwzXue4if6N)_~*kb^mFh!9ldEB&(; zDVrY5W`@|)dTE=aS*5DQPa<2l2Jd(6qmdVwdhXA)-pK4wKkh5_Kthi~8 z8fm(Xj6$R!1B(q-m!_^6avY22ifbwC{;q@cNxoOmK>})-iFoijXD`E@YU1bkj`8Q` zyyg*y<~bk&_Wd3CGpdVrnG*c$hk54bN1_jr>3#da)|$TrypvtuY?w-lJg)ia76@Gp zb`N{=(;v0e?c6?Rn-RjIJcYb!I(*VFgS@(PY@}XK06|&3eM;33oA02KI{pPHum2jC z+e?;|4|mz;c(juO{^cv*W7&X^hoku%Qyecko1Nu~b00tK?ho_S8kg*CD63G5Y*8b0 zG|$;8vB{hxXFRaZ5Wm~P-5<+0w3UXHt@~vdXue0jBu<4+dLr{Ed9o&L7mUDojf#oJ z$Z(GI;K#EJ96VE%2LQlJ>i1YPmT_DUFvSEXI2QgoWFsOPV(=Ea%MX_g{N}{JjXkP5 zpA)c6(W`9=fv9|fZMcxLpQaf$Ib9@Xy?TL+RWQnSN4iCu(+-y>`gUKTpE=GX;30m` zK%pol7ZRNN)}?y{~Qhvs# zFf!6h`o!7jy(#3pAtF268sjI%Ouwy=E9i{okKGWJIBJzI50W5u&er2rPDSy1r8KDL zXOO6Al~HeEpz#{00iLma=3RstxXs23`#S>mRq+EhH+q8G^R4dJz9NQ5X!ZTf;1d5seE#D`aqR(Voxf^vK4)X*3IP+A`6>g_#-6GvqbMj>H9|HB!di)0d{Zy_`=#&o zA?e|6)AbTs8<_B+`=Hp2(Fg$~J^aI=lQlSD)YP3D}oNR?Cd-RE5!@RM$`4L zn>SZD;2r3g7}NPqAI?hHwTZU{TxrY~qKX8#BeH9^A+%HXYrc76e=wmL z$=46mo}Wmi-=&?yNtJ(=K3TV_g6bYCyB+HDh|ozIP!}8u2(&TAvU$Mnq9y0Dv>FzS z#M4C22dzMVlTuQl277gRICZZL3h$%u;zwSK&{g`HK}N1*#6&Q=n!lN&TkON_E}QR| zs7ce2rLPHH%1hnW%6k?kYf5niQG-_${NLOPlnfY@9+zN@_-y;j%^_poL5ELPnJSJP zFmRWLW{1eQ1W_?8I>DsRcO40#*)hC8HYM(uf7gGH0|6GV)zXoBns_>QZ~-|1in=}R z-|Z=ki=h*k_d-zR2p9O=KweitDi`AuZ=rt&%JLCqn~&}@l#QYfn)r8+U?}k;W*#u00#K{32RRM-OSW>b_j(!+Y{rnBH6g7->GTGqli?IQ4$x{9X26Pq2k-De z>Fy|RL4T@0dLg17m^d{Zv5P{YjBr=#!1R=opNm1zU-TB}c?nmO%0 zrBj(?`P_c)$Tv4ZDsKTZe_n%wktpL2ww~%A_n=*+eS=X6tVYwT_%mzfp04?lDiyyS z>$oB4*_YoIhu(=dp?BBV`4;_t#-ElMVl2XACe(sXmFAi*r1Lrwb#gTX8a70LNlg%v zqkW0`>&IcsG!ovkk#q??{BD#_u5&i2sOgde#_VoQH!sn?P8w%yUfl>NNfcfWn&)M^ zu%V4btl(OSF?oL#bP7vcSWWHxRed*=5%H3DDu;LaCc|*m4L_-JmAvC%ATp29`1_ba zN|6i!uFQlw>Wo-En+pFo>xp$P#Vke$bPuUzpWfXhIsX zX>IHIBEXiwgTC-mrjnIz38mM%)7MP%3%QibyjWlPpY#+dO>^voA|Ls>4jTC{*nf&x zrmlmr?I@-0iy!3k#PPF-DAe5{2}OjU^AVLz0_j348r9zv zzsn&(`U}EnbW8mC8aIL9;`!isO+y+qPo*pijPeJaT?Kk?#TnbUy7}v+a=9p}&^l%2 zge4>xc87A&q98`LFeFDKk3gbu>_A&9OdG?DvgEC7JM=2%aO5|VIr+f`i4}wwZL^v6 zq_gvqm0{APU0d<2N$*UQW1xjv&*pmIiYN1~`a-I*UT~jqnyu7A=eUykQ(Ld+w~H71 z+|$+ydqN_UgRQEz=mVZ{&l$|Vt6V?6K^6-9*42$5$Kn18AM`9^$Z~tymJe*8kTS^E z4r9phw;#=9jHefp$TDmGAf9|TN~RZIt8XOCJ5u{WG`7qaUq_j%)~X|GY{!PyPPbwA~YyGEMa_jGmIq=XUoOu*MdGZSgJc35%mkD1 zOPYxHMCs16jb=OS*bh4C%-BxIemIuA?k`Xgl#b)oNjcu(zfwOF`q_TQgapk{1R|So zR>N~tNTV;}7EBmUzIRjDYh*ONJmLJ1mXI1r1TK!vk*{+)Uq54Q=A7)0W4H)zr7MdpGFl@0?t>xvl&q(r%krWk(F^~Qb1h6J2G=rSxd zzK~edQAQqg=zZbg$VE**8?||tx;l5XiTk=9)kcJIZMCwvj7G}Lt0LPLlwd2p7bMY! zo{Rre-q=t7Co0PGpE(ayRyqJn#KsC9~ z^Vp1gNvq#(rDb(?DXrtu*Z9mjGe~7z%w|^5|6An57?s+rl7-l(K^9PJbgNQ5$u4r> zRopVopIu4NROSWgE@ zki)qpYZP4RbX$2gf$4wZvDE2J#R&wwR-7|y5_76ggnqgU!h6A=MMirfNND>scjaQ7 zIs_9e?{XEJN4@9B+HjEf`Wt)d+jsf490@djX*g^6CK$zr^KEL2g?HQ`BoRlx-ujRv zyzMu-aKRDDz)yB3jxaPV#P3#j9Ks4}s4PKZN7B-$isyqy8635+w#j3Lbzvc0AhQr|)ln`Zq1%uMq?px;*}eCtTND-9iR6 zAPm%gs$sKS01aXBJU3}QX*q9omrpu@z!LGF!GcGW)S&i$$Kn65oE@qB?B&8)V6OgHN{ zN|$rM*)O!SjV&~9uWzR3eRMq>rWr{FbqoAApN{_hSY?D!wPrj2>m2ARhFb=naUOKJ zVc=TatrYxkL$P{JFp_x)f0&CEx9Fsm0bU4wJth};g==?KYJ;a0vnU(21?d=9nbY`4i44CPu4}ngu82eOh^vy zOzE#0kpyWDl9_1^qrn;liumrN!H*A2ZfuXEdbHkH%(d6ArKSn=E1CK4I(jk?S?i15 z*!EkL1nOr!(8YC&YY{-zO|&+CF9gDoGo|gPT#>+DcQ77}qItK5=!s9|!6HT2}LWBtk6ZkHv~_ zFt0VSQ`uVTITvwxo6Wv7F9LFY&V_YDMIK5j4L*IDcWwUKB;PvEJ>{8N4YtL#>hZa$ z#Cy~i)JCkMy1lN4Zw)QXk^Yd?7*>32m?>4I)V-!?t6JKMkP)FDrI>N)vhFSEL2;*Y z(&t&3(-GSxkt35(Te6#DBDx zDii;-qBz?5r`lG}vwF;9w!LK�!@*<-Q(?qdF(BHD;LC(kW3L!hUJ}yun*6Gci>18ek z{e57kzW4AIR5g7#bm2FLxoS^{eK;=U6kf26Msv&0yo!c4A49q!z_~=3tJ)o^kXzUK zTS9ci+X+Jm@pPmralI(bJCwSi1sWavnnyQIklkLV{(b>qGJyPKYzy|&=0<+*V*-usgd!uo5I4v4RjqMtVA4s zqz}#dy0r1(5avn<=@6}vXa^*E5NR24ppwtNlKI5zGsd2atkkIE#HIN?iBR9pbP~hj zQpeCEO|S;>mQcmmJj6Ex6{ao5g)4f21@m$hFEI68##()LjBIA$NCA5jwm7ucOAQ(g z!w=&9L!nLuP8F4|Sw_yLtNL&=XV(IRN?k=b+VwBf;X*_T?hn-^R-6VLts#8z{x@ot zN8Q`Go2jBES2PmK4_+AMP zx?QBipNDG9@ne}Jh!w1!oKEpyXxqdF))_^I+&RhJONCUW*rOZBQ6}_i|sU-QzXUhhBo8|^(M=9PFGrl&n zewI+=5R_40tiH|_@l*KCw`B41shc=3tDa1JYW;iA^QA&5$Zz$O-siK+K$3+-q2eXm z1w{cZmr=eNK$p2@J@CPc}3QQ!#g_t^rZrgyJuVe7jFr#3g~*D zeU|qbCWZalsO@$=dKcyc{=%!wQli^W+shjm?2Ng#i_d4bkfZN-0dy@5Y;l;hQ|@;O z-!=qc#gW09hdnBPF{+9`h%(}mwzd_XUH)7xb_nt;9UPH(AC6xAr6SLri)Y3p() z2pg(b&FiKJ?7Wk23wINMW-zhw)~Gb*SGjW|FuN{w%LLG~h1-;|AO>j5c&=?FY-nP62O`g@He*lHr&+;j`4>{Y)eu?IpXzWLBvE`H&d#qkx{`2q||Ge#8uo(WV(gv|BTj1ph$`7@M0?lAD zy)902%akroEw!Q+asW|GJ z5(W&IqI7I3(mA?Dh$x)`(v1=$q;oJrDTM(dEmBIObR!^*g47fk-QD?JedGDw_xqmv zFdi0ncii3oIp-JWM0xD|wsVh`ody}tp@7O8!?@ylZqVrQ$6FF{>52l%?-gh@ z+(MuED$^S$Hpj|D4A(m|G z89OF}e=U0AcnUpNWO4KE7B)}3AHV!KK`$_o`C;)QTeislUsjQRGYr1uYCC(uLAVU? z4L;Udl@bee0qE}*6oUcNJ{I3;-3>u9d_-Rz#)D4B?Wf+)s}C zM7e2A0h+pYJZIEC0y=s|-P_4)4Y5kSRMCGyMt~&ZP4v7}$bgA~O82erkBc(te_mjy zctLYv0sS?2u!I+$m>#?Ok(<1r1x&>3mG0(b`n~EN|FG}uTE66)^rBWu4nLd1C);6K zX%gA44_uVih0zSir>_dA`f&KaWQU zO|`UI&kCmRbi&ng(3HYZ5pD5PX%r7-FzA~8>Ph!6)n4uL+&A2E7InEdjv2EsP?MKE zo+^uXu+UMUQKupHKKk`S;Vo<}l|nco2S9jWdS|(>1GlHXK9B9rHGBb9ke0A~tOhIn zF4^2xj67_G_|O>c9`jQ`VwBOJ4nSaQfvR5GG8ogC)Nf?FTUTa=_R!Z5&akhf64n{; ztvcsMS)Ky|%b4Sd(h3%$l_#jfyd+fQja1V-crk${l{B*43odhOm@urgVN;!Gyx%d! z^S1h>@%)nv35wDR$2_qcyYE|pu{PJ8nbd}S4pH4Njm)d&F{c1J@l{@etVv*ac)*0! zen;Q;+;W1pG?!aeGUDfi9Cz zf3uKYie@fNH~QkdUr zxLuv*6F5p+77u@~)!PBBdjF7ALQ%WXgf~RDEp(@DLmIs@^K<v^t>UdYpetkmP|OPeiIts&8nAu#um44sb^D1~6bJ3evrM=C~t z^$F;+(1?mH7EIJY)HQ3bs1gD1Zsl;*p za)v`*V2?N}`oZYeW0q{mZ)JsdA4z6U`2AT|N*s@_tx+0;9+O@s^A

XV`Jdj=o2DMT3w~78zY(UDM6dG#Q1eubMzy6W^ix}h z2dXIfu@Y5x3@ubG{WN=4u-{N93n#;c)-&E|=AZmM3-%u5m=Qb@MdvSs5hJPIH_8;M zdU3CdQYxGr4(RsQABciH8CmogE;b=!CIAZm#Y@3qhXj+~4@KAO)wC~P8;Hfr*IJLS zs~qbpLmS<0_vR2lcbdAr0-E(g%fI|dAyoOPvH&^TyMO+RqvOO0MYNgNIdB^bu4KLo z)}W^d{^UHJT}etzT7|luBpNi)e#RG65%_$FxrstPEROO=A>UE7kT^E*cP2MpJ2nz* za1{hi!x?)s}so{{RPoy)Vqa9h|^!5A!_>McC@=u;y~fu_#PWBSs>g>D~{0G zpiTH=$(Mo?muo}CJQg@db)yhLh82n^!f+<>1-ZK>{>TNc8cd!UV^-DKMRE!KDM=^ zixAYa>uUE2DkrW?wBvOsASsBv!%5iY;h1HfddDPF#b^fpPUc>Q)=OX3(o>OGu2hK) z#up_Uv(1 z=`70%fdH3P>}D>6>lrbI3(3^p`AU#&teI6oJdAyfY?3ZJoUf-p=3ux=P9Qf(Y3*HN z6IQ*OS*nuTR>r7vlt1a}Hu{t9Qap3WkM(#uhaZXrGIw3EewhM%N{UnzF)dWTrU^bc z{(3e9MzIrTtrGprjDIeW-or0LX-Gw#S*$Bf?ZYn~vbKsaIlx&H81y3NLt`zj@TRDiHg{f7cRZ8It2NJ ziX@vhZ+PIkvqdW7NT(PveD7g-V^Xm--^XvNN-ISlrpyqBx00?+So?=mo-{nLbG@#8 z0MEh>Myk;X zHK9mH%AXid$38J>GQ`gft5a< zyCPX0lwvB9zrlncz`3&Y8u-dtoS8h%G3-?+SN`bCW_Zb%*r0GFPQ=KkrB|dC3Ta|- z@o1;s_U2f9g)(5l+$Ju+v3wgr*PL#%Xa6i&DtMvt9YwOSOQZq4`ge3Tj);wdW2Nn$ zITv72!bnrue{(~y#qXcu)TG7;4<>ec+~jMS=N1-Fn?JA6n;Nv>>V;5*{%z} z*QM^Q=dM;tU{iM0v-PzMyrXcy7nW zZkS2nF{aY{ND+h#B3_Ipfc7^_mN^A&*n&h#fUf3(qc&Zx6^J6esG9J8@D;CMzMda7 z^L}~XV{4$b;4$OGS+sPj55yi83J@)DZTz!b*x-1 z&q%IX3lYd$+Dp#&WYY z5kJGMq$Thy)v?*PG`CO*6QMu@ud%N(AMYcvq_}whOr0+CD}r`JoI*|x-%MXX%kJgC zu>{uCDe?}P5@?Jt%z5U9;N;a(jGt%}XBc`SXwg@LZF11!bHu69-40@{kDcG%Vblb< z`WDdRws|YF_b0%-J>OIR+-xcicp@v=ZYRpP0k;2YO_Rm}CBYFb-AShEEnPi^T})s< z6>E3_x!FtZ*dj`RJrc=pykvux2e`FU^o^%XQbP(i19I?Ci?8G8WKVBFF4F3g&c3;T zVpmxfX~(@4e2^?;Sv8FT9z8uDY7EYXzpw((wdGR$DK6-!1+|z;;WJDUEg?YUXdB-$#eWvUSDq>{SzB-=z4AZ;X8|wXZU3EucGeXu@9ZQFf4yrSL z4TpnZDLK`_?xDp{tB0tUMtvz8KU36EE|P_g7)W8|^s(7GlcHcY`0L(&j!ze)c~dFEOd2Gm;9@m; z0l0(&HA*Obx2}=x;fqT*(44+})G5#%08g+&z;RPceE#FyYhK9Ja=mfVTYE}?>AK-) z8FBYzQl>?OQIQ_H6@TvcD_;=qn4|^dVCgtkzBFBEd)6C}S6h|7$&hUsI>1g2R%Qxy zj)-OuaD~ER_^Whit%^rI!U-@IaoqM3ln8IS_Rf^)PZ!ucZy;c7>M>l(f+$wC^Y{QE zfRJG6sE?4N8L>kczSFw41-L7%)hZtE2mLjUVVK)=`=@pLoZkZ{svVy<{C;ZXKTY_h zJ}A%+mI`^k-<8%*Oqe_)kf9)gpR7J_WmOos`+)`Yjl7NMh0}>;p#fg}GNWQ_ZS6h@ zt-L{=>fSO*CeZ~n18VBes(Y?g(jQ?)t5Y>b9qPxb#ZT0Bcq8-H25>{#E`O1jLbddH z49++3qREou3i5L7z1Zt#$lKuVJl^60!s>IUI&z9!q5J=(1(-q|$Gtc~K2(SjidN?9}JAi3DI!%B3 zW%ORy?uNmU-#h3`TEHDXqi-k!C*4Y=2(D*wJdCeSvll2{sjwiDY#MdLt@AfMtR5r@ zN*)e6pG%hyKSO6xx-IX=q28h4;GReJ`2*ld9vu+IZD?Yna@dR+I_uR+a`p)N2y@CRa z4^|xgI+=3uwecfpa6 zIIJ5yAn$N=4wUr>^khqhR_&5s-x4W^qQoEbIT(SDVo8hx()01tAJR}QWWZMZZG6i$ zWW?Dh0D>53aP5vl(WbfGm|QkFaSS{_oCJgnbb7~)Ptb=w43@>q5&cm%{H8JAnMolD zQT$DZTC4H-x5npsAG?x1_20&{?&p2>ewsg>lX)t0OH)nmB$#mv@T3GC>65l2#kQaF z^=+Ubnx5PN2>ohxg<5fdq5H}Vu#TFX>r=|GlNPCL>EV5RG+}V$9!?q>U?x;TCWq7x?DYhgc-Zr!Sqr>5zODle z%6qyf7YF$9QMlDrnfA`$?cYD}NQgJ?><$b@$d?ZQ zGkKGZ6hak=EiW9{L*N$Tz8T7dHvqmpSy>?L3<#2RzkyPkfd#v|fk~B}AVet^;&8kX-a<*E*w0HmItpPx0r$bTr040hHc; zcor-PNC}ktT79j7kZk+5Z#6h zNib7~#xT9>wV;kg@7%M!hTWO8;9L{8k$K3K9~~L5fdt-L-bEPE6>WPwndk{;$Gx*? z=0-LfO92)-CxNbL6~@*}@fKmgGVV~vd>foh6({YAeHK#X#6U<~%IV_Sy<_t6b^aUq z07@HIyPpPZaz5+uFz?F_2%njElg?=sLcXx+g0$m|Z=3m6-;ns68f^n{VM#a5&^xOp zT85XYeu6QqXnx&zz6{(Djht|35B>V{r)591_JOjOEB(pw<8f8U`*DQLV^2UB6CRPg z*&Ehij%xM#ZgLY+NVOXQZ9P*EP#n(FAtl&v+hNMiU)bHbgp@LVHsjmC-_J#BRV7Hy z4*{eheE(`)UCyD!^Py4N6a@-i%8<|ib&WkXW=Rj<=_=5QTfj%bqcm>gqt50JU40sF zpP8!tC-EyB&m5nc_2RT@=EyBuF+jcjsLykizx5+KR4`s z^K$SNb%jJee_p{S=13D{5y0QPgt?M5M60q%@yMV z`(8sqcsaQ7c#hA;UVsS7v}JE;Ii&UYh?z>7!qT+T`1$!C9-_7N=vz>5<5}v@-$B5z zf?LK(*zG{#_Fd`QerhMb2L2Y&4M~i^>5*#+0s%uQhVH5C-#J!1@wJO|9|cufZhjY8iN~iI2>hg5!&Pr*aJB_2rwc}jKTB6i#KPSI{6Gc#?d07xK=rg=Y z#q-521zX8hR>eF-#0%uZqQg!pEFIUai1z@*KTJ5?D;qqsG$}(n_jBICb{&Eael?Td zctsDjak;2+Pd&y%pB=_?dzy)%4ecg|MPm)I!cV`(af;*lmi2uNUfG{uAJlLqpilWXCjDk z_ARS=pKkqutW8B=@~bI9Jp(em+}r(%hQFWXnNz-9@-HA7304`R>Lh>)j9<9S`p*xU zYRWqdIsqA524u_!i&;%^hb)n489F(K^5FH;u@MqeJqE;bPTmnS?D;cd2b++EH}v^n ztI8%O72Rf^bJfnfMEkM#Qb~pAKo4l0k9GJjG;)K)6W=GCuf7V!Wq@1UGWgQ@1`mZ= zml115F`Fi4h;v&*1a{khEX^{!P(Mk}RK?KaIYrltewC*-#V5&}DVu*u=X6`B(` zNq=Lmg#=+l>l@(B3U7un@B;z(f7B6& zI)+rlhkwz-!S3u2)=k}hZw{^K5Fqn>manW9Uf-oJ9>Oz}iXW>F8ftnVg^m6N){jmT zd)=AZULkyMA8_>PO3>$b-%{>jkB}(=T9-IZY_fROtAO&`o%mMXQc$U6R{|wb;w>qw zUp{}2VmB1X4E#xy{wr_~hTs~6A^_~ZYr5^onCG12?>D5MDAeJ{gcFta&j6UyaU^fi z@b?sRSPqy`&33PYIz^(l83>vhEQ}vWT?WB9FHfQ zXgbq#^tj}{r>Lk@>xIh2gNK@f2-^P`XX=f8{Y`?K83zj3<9bHuMd4PLl8K==HCSxE zy*|xN{*o8JW-j#1c(>zhp!de1yISp!f0-9q5{i5s8M>2OpGL8pwwoL78f?VrhHMZ2 znufX7t|4u25jD>(#_o`Syq)Rw&4Pki2za%O+(vZi5Sr~Mijndxsm#b$mKmKKA`~kZ zd2nhx%dgYMi3RPC9c88|;~KVvL^$LMC+WU*ENe5x+1+AQ3M-LmfRLxN+t z33QV0hsWaXq`_jG~-~7q!F@ zpLcWr{+u~t1e(^}EISTsvn>;?=FB2lf0?to5(eo76Y@C-T$JO8Ibz4>BYQnhEOQqk zcfu@Oueav3l486MHStDoJP>FkB0*#8hJno$8vY zln!Iy6M@J!a_K#mp=A}wi2;u-On?SUf4p_V4@4_w73)gIc0jha*jC!2U~!IyEBj_J z1@YY>6L{wZs_Jl+=CAPurr=3pE;Q!d{Jg%??n5Tt8oxP2+A{yP*hyPr5q`Lu?#J5B zUpig#Z5&YGPGX-wc2@RyAr&;$tUD=sfsf{Y4d}<;FyD2O*_E!-p(G&bl~9Zxx9p?f z7v~BikJ;j@8CVC~e&J5@D5Fu0s64G?LwQF4ZYn9YndbVExJ9}Va@z?bK3j38GCmD4 z@KAC|T_SGmgI#Bjri+jTL9(`9kzq(%|E%%*;kl?WxOI8*-dwJne4}8LL4<<5<*7{_ z7~XNK!O(rlLfF)0sBfdqq);STjPG(!n-cJlSL)_idh$pB!U1^kC|f;L*gvp0egtrr ztIK@2@EtdYU6_lEb04NQ0mZm>7$WvrFoRdmgX<0Ejq6h4)4>hjfm>F0irhd0EvdpM zQo|eDa@Ql>7=9qS4#lx2<;OBZxDx)Tgv}7bJjSbfO_{glYv;}=ZAX}eqFed~zp(nq zlH(Hkb9=-71cw8NyzNRSHDt|rTbn^AUcup@np3@q88T)g2~g;?=(B44yeTo&55JJo zAlQ;R_wJZ12%)~MLwN@e_q8J;D}Nj9TGXOjcYhKn{(F>@NW7asxD{zk#iS;z*sMi& z7jS6K_Lm%M;V5*iTdm16+jTsCdi}9>_Ri zM)$og7QnCTlj_HsAOLrbDap+Oa<9PI#dw&oT{XAol^5vT{M1B3hX_qkv!%<>STPZG$mrt?e^F35=n3k%pIt|x?vawH`_{pA}z{Y(V12_l7lR$VS#YsjY&U1Edn z+5R#3;+m&y0d?VwIO@7!_h~qb)U*^CD0hfSn`I?{T)}ETr1C)%vqGb7L1k$PYx}bn z)zd|Nz*uG4Ii@^O3(9}Y=OUV3KiRHSeyXrIdLn?!4tF3xZ*O)|2*i-7kx+}E0*8XsA%{}mPUEH!cAC0Wgxdlt(x&r zS+8#FCa}A;(TJP}o_3K6KP`n2)ufD52Nt(CAohcm7tnfPgy`Kk^Pks~7RT<(feWIXY(0E6SLuc~vky@R&Q7AMKLU zuMl4ke)`Arg4@Qz^n%3 z4==APRiRvy;f_c9BIX(q%pW&6Yz;!+;+%eOq@NfG7%H2S96Gy6I0x4cuC z;Sf$xy7r3#+46w$30EnO=Q;R<_CfD&IgwkvYz zYatkigWtrF%D25g#O3&>))gj=Z0oma?wBq8-sM3%h`UL$?d(-`-a=FF@nG7M%n=u4 zkgb`^;-9Xa=FsAvmz))P2z0$o&EERwV>$7i7l4qAqH0<^3&5hJ-61sNl6xHUQ#rkt)m{Uf8y6@Gc1Smk$eMv-S<69#Pbf<{) zpA9exkynfQmPoOe(H1yO^r?wd?@JtDGzL~^T}=)+5-{XGteKI;gJlZpol!&PrVDkj zQ{*`he&h=KH9$=~&$HX<3l;hC6#f9iW`Cwd!kG9JaQ<^89U7^83Bex znwz129T~ev&q4arF*-klC}Ej&PoMh#fd~cD>4=AA9Xij9Mnn9%%nbb4XF>#C&X)C} zQl>*&y@vP8h7=9gy*XYPAIoPKgwjqAC=~9oJpEM0gx1PR)gJqDq(GN>_3-o)9!O03 z;LFby!1`V^UPX53zQ{_cV7M92pajWULyp z&1Xx>kO=!3J-@E$Pyz9yI3b~3N%T-0?VbFzdJhJLSOhQ{5!mqvTt&U_KHBGnO3{IG zj?E+ORC*Jd^H(xm9BL=Qz`m7&bsaCUda< z-RvFZ=4iUigwx7#T#z`0I7RwC!>k8#JvKXxeMK23-cfPNL_}=;rTnh2@=1`b^!$xS zXK&-UtGh9z+7YBdyh+z$f`40vyJWwoxWjI0WY{Cr8o|p(^02woCIEnszpe$-o=A-Y zVQIf}8{m8g&r%LmCFo1wec{|S*Dt&6m1)^V;xn_(>EjY`;*bu?r!M=ov8?ne3%d?7 zgO@Sgn*dj&%(CBoqTO?#!Ir!G0eckic6}}guvJq&?P~%86BE>)HiPrWzo5h;>;fWL zQy;Q)C~u8>;`h{|8$gi-GVZe@3zaBnYX`0%Du7U|lF(x&4W(t@iU+h^#Y+O7LF@s% z{1+{^cH}=H_s-Ci7(4PXeXz+1mU>hOJVC}4s9#?u3#KHGgp9Nr72!X<7QUZ-<5!(^ zE7XiGhCL!cHaeYc4kh0R(7o@n`nV3hXnm)xh%&huZgR8JsqM{B&by(U`|HJId7Zaa zeL*JFaN~aYZ2Jh4A`t4hjpo)?_QyPWHY8mQ{osSye`lf?Lrmj6&bP70d+hIK;M%Me zYQbuKHM`FoI6XP?yuhaC;L$D-yzC)KLbQZjxh_@MfvV0iPhqfwJd0?^bNs#-A_m(l z4g-}RThjKa#D;j{J9lpDZN>FwqdjKB;LL_sn}n@ZP_QtF+vu;PsW0ba@`K$^aR-Y^LuGK+fu7!B%pcD(G zB90jiAw|w-X7{jfBgm`Ndla4~h>nKRKbi!r=by8(NxzA>1Lc1|#OhK`@gBp2rUMq} zml@eo;EIL{`&{-ilo2X;^xf;_rD6)%8IR8y4tc$*omJOP3d)&e6zSh%;Cl)3jOFkMm|@{!f-k>fEL2nRIOUH8wsfOt5np_ zpy;)4AAhE^gbh#B>((vi5JG_1wQMQV=iED_64L5C=n%N1E?21O#Z}URr9z!%1fB{2 zvflzg1c9IPp>z26PpTY`S=QckpIuJ;w5<&Nu{>7P&glAnFRhZ?4pOFY)P*ker)ca{ zHDG&+m(F>a+WE8>=vMz5cB+ghr_RQt{6aR^ z5NGvq#`XYOa4`+eq@J6g&rJ;xhMoVV1>mziU$F%7AjJNNfhLSLs~DGdGQEl zj=Q5n5(wj#L+V0NxYVt#^QKxj7p2fWc+i9|~T{!G~I%vY8WdV+fx4}o1z^aGbPv;?k_~Ary0h_hC zR=BXM#TT>8& zHb=Xj5Z(5>HS2Buq|y0y9u=%{WDmQPmKDY`T#e$G5to=3X#h0_kM2~(nnKo|MPecr z@QAL*9tv{bY9jlK&o zuavA1Dtx5y>m6CtC7qIu23qIqFH0`djL&%LJ^IP)ri(7@@W<&AT zwzwO~*KDl9YRQY!fT5_)K=roi1CzQxKTaOua3mZETh1^ltq0zFE4Y*+S&0Q*;IGwn;h}dv9 zD42q7Nf3dEcM|>PWAYuAV*HrqG{6PlcVVA+w-FNV_$fIl{O+h~nBn{@eHMG6l((Ha zq}MUJ(}?Lp_dN`0EuIFU2BG~k!bO%L-A#mTbMFI-f&jiRgl0QZk0ltcG3*PGWNg&p zwhw7%8%Oc_NEVAQ7VU0^HcuQLP$#Jp@FA6jQ}eL>uijcJe>;74mCNufGjp?6Fc<6a zTRw1#ko~%C7Q;?;XUkT+cNQc@JM+gC>7WmJaQNrqgV~1!-^=_uF6x&4tsNe)YyCcT zrloCA#)agEMO0324>En+_QCfHo)&2#%S5<~cBW7Z`0?|5uHAFzgH2y@ZE844VqAPz zlt7;^&GLYDB>*(Ft?dPFB*pdd+6jtIfuU8*d9> z)e1LEIQ`q&sFp&A9|Ap&Gj>TvZ2Rd;3D0FwJAI#zI4T~p6P3!Fr))%&56+ZGPhYd3 zJ0v*LCkLx&+z3Ly?u<|BCZx!X$PYQ>XrZ-^gic{Yc&Lw(cNNo)eF1pmOeT)r3BovMpZsJchNJh^;EzqH-4$5`1rbsQ%C^!z%huwAA(LTU3zH=lW zl^2;5p@u;FeCjL534Rlkj`+N_qP?& z=rga%!Ab~`%tREAe)P*B%B1B=fJ3ZSL%s=Ey|NU3Ac^_B#e`~IfA7ahW$6rHE^Yo` z6+03JAt{Ke6Cfq^RCI9oQzV?{2OYg1iTSoor7-IOKRxh=tX;^cc={0oPOi_z)5GLB zx!b(qEX1PMi}av%op>&i>GVifkXGr%ye?!dAy6e&j0~2^BKBoVsnL_)#cx(3oP|8! z?1D%kv7h0np=3EXjGr0hT1=F;n9m7PpEhIj1nBq!AAz%go9%1gD8r9iMB$VB_?(JO!EaOb zN$>kX=yI1-Kmil}0;z@zk&qG4v+v3|nJ<)OU1vcuiP)n_M~S+$e_cPU{M~gC1fm^9 z>YPlMZ%%$cf3=O!u&`)Ax1F%eiFUwcs#6r7$Dwf=L;THVnrx|MTA7*qk*i7`a*|mznAVtsX{?ZD`GR~-CuQ(mK8m=; zekeuj_A#i9(vQ-<>GF4XNcj~Yl^xqgLyjAgM6NVI*9DiPw%5_!P^Lb4OTU+RtzP2x z2SVl;o^F%&P-5OjX|t`0yU>qFX8pg7sOEZD77?X?$Z;k7+2y8P>qPKJN<>1Hq?w3~ zZYz+#ee+WM;{caOiwS!4A@66s?nAx#-xhFWfmxpnI!pd7H0sUpzXOP9$&&%yi2Lz^m5~X}yZh7^WPclp{HGvl37ogz9_+$3w2AM*}w$xw- zpXzW%3c8ObcHXD^m2FRYh@99)DXGx?s*AnvX!XHbCEwl)A!uG~ zD|7U=)N$p#IA-sEi+R*d#gk^6C$ChDTL*{ZlVn(jXw1OJm_*C(BWKYC^I`PT)f*>v zk+Q&OX5Vx|_1{Y#n_#-9gCHE1E&$;40B-NUdOo~lfCG2V3gLoU-`A79{2y*EC zcPbu=SgcdTMOG|{kZjGieLTiEY)qQ?u95kAK#d=&gEToCm z1^c}9|Fg|Hjo^_yq*}zoO(76P#ZNo|v}iJ?*z6jLMiFLl2?xwR>tPZfSlWj;4Y}pP-`nD)RMt){=Vjk#{avfqhJ=%)`rTyrqt1lD0gU%e0~ zw;)?n`&4!_yDFIM>TI(E|H(f{P9}A>twZFDLO|@Lp{mzPr`hhv6YV zI-At(#gj+j>~ai4U9DsbT(66I?{v6DFv;+{p6l)Y*@b_^pMLjvcj?)8iFem4;9Cr8 zyjx2~Z@P8l$2!Y{sAvx`BHK$QP1`TQ znD8vEAqf-`$ICAE)-*tB z%N>9Ouy4l6;JpwQ_U*z{Mb{XC&N7iH`dzz72>Of#8dQGV+4i|bK=S@UmM^PzY?!cpsYRMRr*08uJA0}L6=E7`3%^FH;W-9N3 zXM_|Owpu{8J#!Mx3YY?=k{*?jw_l~ib118Zys5Q)TI9GITE2^jOrjzW5japYGfJGm z2UcM%E}cur`>9Bb^9Vb1r+Mb6l>_a$TGBgs*$upHM+YIlg2QyVuF)S!0-zn7xlzVy znCs%@5C`K{dZ^a;Sixw1XLOX0@jJ7JNeFNU^wWjBUS2m?{W$f;2i==DkmdVp z>}3cpMK&$n#+#^x%ZdcTE?Z?k;hr7o{2R5SAuv!|h^+>zsH3{vbl_6L07I`6KE%OW zK9b1%4t|c~jK7NR87bhW1bmf68bCJ-=ib=k0lAYCZVsSfjz#_K&&DJFkeQng?E9&X z_ZrZfp4IX3v_SJ4sT#!Iee~C$f}bm%iiU}k`TdQVVRBT?dd8gQAr^;9O*#=`QVI!D))eK0L;w70+irqaS9Tf96C2u8evKLU9li87 z15(2Bfpv#Y0az-4WHE+rFn(F~*)s5#b;s#t;M?Ga^eTEtlI15RvV!M;ln=;JEmvJz z{w9Qfx2I1D0a2js;pX5HkSzc2&6UM5h1>#5N7O>=P{uJ ziazetON_k9yloah1u1Q;GB|ZR2LRF%EG9w^VCItl^d25J?B6^wGdo+rWZS3c! zbOvj1Kp6ZbhD#7hrVu9f3CNxRqY&|5VycU(G513Hk{#rz8~l`SK$-~Giq)dThg7RG z>sq~%fjmQ|X*C0$%&T#Bl}2Z7^y~hRc3T9)We`X+CZI6j83B47Kxi^a+VRXBGM z0O`(pQb`+!l=C$yzp6QuFH~W)0;EIA?9DnW5_3x=k6^_|Sz4uaNSbzSxziO6=*5*L zb#L%}mav_9U`Be4tS|-Y^aKK!q>nicr|+W;UjXez)D;Vlh$*h{K3(GL`OV|&79Qkl zEm9jSr;kap%Y=VAxXsn4^|x2VVI+oi><yB^>YIr_UvOXje=fD1>#E}iW(l_q-fzzwm}!!0 z4W+}k`=v#a=;_}YJBKhl$^AjBPw)5?C1*uj$mUt|Of{F~NmO28Q{~w2s>-MLVQ;j; zXzoL-62CoD;!VBT9N@7fa^4S@0g;W~wy~XFxTtm6T3WKpGJa^7YTn>N#zS~Re7Boa z$-_c`B*J#Mgg@hfD{&g@!z@vfh&y}G5hl8RRNLPhoRx%1Dq#nkAkC}*^x7ojeBkf> ze6Ql1l7w%l;pv>b>(2*%jitFGj`L z3AFXQExMssTDl$Ly7G-tg&Uu!c|M6d-Z02mRoBSbdCv2`=lQ(<00Tbz{kDDY z`@Zh$y5keJE{gwbTNUsU+xOyhsNUwPgoROD2~| zA!+dI*^X2uZ5kiiHlqd4viTZZ5(WyBg?i;y)%EhYR6orN7{#gxARO57VFC&C-xB;$ zX|>nr!5+B_;e)|jV_Y1lyTHmf@>HKo5G7#dS?L2GWpr9G7n1d^?YHL@+)S(H9MU>z zt_w`nk7Eq*+-MPXT^Nd#dulH%JHC++1{p!}UH)3J4U`B8T99fT=L7dLSS;s*9V@CZ zP?0(LE@;1-bqdiS+}rQq)H-WRZrVG4x{v|!i`*0Zf=Ue8;`zYtLkmG5p9+Q(l)zzD z$@FzCP=f>% z!m1*dR(c}Z7XICi_)4rIY{BXFy=Q%2ZEbG?%Polwd?B~ZpV{*1Pa(7^mF6169t067 z(NClmbABdxtFabD3#~;yRjb@i6~udsku{BfZFH0Cbh^TT=6fUj4=Vz8zoRK?)ATh;(>AC9;DinKOeS-+!_d9$qCUZp)QHl_lQY_{)M! z2B_bWHOJK}B4q!zr~g=&i4L{vx2PSTz)0Ly7Gn)-CH#bH2F8I4Sk}vX_5d zcA%0TLG$m+GEi6M4#+%$&+X_IsrcOhp}S`l3JN%8sM5R_p1B)4@uNSf6AQMc|0raI zR5A%VgW#1U$nRG+RAHGtSDh^jLT@5{tM=Ecor(^3ZXJSY3&9Xp6K;!9^#{)uSmB9f z@mtOoT%QSg|t6d^YnCz%=<{1s>-834eV+*kmbQs{@%)v_A(s^)}~VCJp; zFnZw$c2=K;v1{+M@Iezs2-Vg!>436*DR3Z0KOE-l|9B8W{iQk8l<9FRRi>)s@qM9V`@npxB_ z8NH-U6?9#Kf%sF%2x1>ta(>JLj#Z$+XM!O?-W{Jd`L)6O3J2%`s~J$5NvfXT<2LSs zJZ)GD+XHApo-PbIauw<+&0hB<5YpkTBgCHyLR&Q~THi#gTT!B^02Nhfw}lK+bXn=6 zC;fWlR&I@skVFFodJ{0Nc<1B@SA3RT%!F`JL3g@~k4DvTngcgGimrpDevcJhzZzuk zx(o%64sq&ju$@Yv)z85AmW$lUJ`D&jVw$AAV!T zj{%-0S1iTIPdZBV;A?keJZ~>a6Mfrad}9X`oVi+=Ar)@mBy`j4@Tg}L%6>)`f27P2 z-X|@Y{?la&aE_~Vc>fwAc-{epCb)yZyo=^stvehwT$q(z6RR&`UYF%R%C?KNnfQZ zr?-FS{R8InWD?$-u}d1~#*q`0sC}{fxk!FZ%bh*A-{Ou$_cNO)R6>P94{6~|D-+fb zXu|Tt3yN*(o`>07r%WK;v!6(BHIs@**z1+RvaU%wqN;2l9pu{>w0{JBIM%Oa04{JD z+X7`0TtM4rv{cJ^>(@WJfuK4cx~>FoH3_z5OU0@zi*R`w-R?8L)Tn(Xms>jm2xW;* zhnGJ!)t~XZk++;Ne!yfw;HP4q89yoot*PIEvT$*FICj>AuTCWzz@y;5e);u;K`IFuZcG_$iJA{fJeTlX`@-U8bg3a%gfsj}k2(c2`yUbeifsH-xt z{c1hj?0s*qBnJ~AhDK|rQdV4Ml)}ThQcw5u!k?ECrl;r%vGJW!e-i$GUHI4aEiO>I z=EXgEHf1DWn9Zx=(_&cf&(xH7q?GCXkP7gKF8WzqsPph=lDKcDv`+>=0r>= zJH@1SyixAw`IH({2pLNkuDHwAn#$}Rze$7=2``iQ2)X$W*ebFl+l^ZFo)v6sy6>-#G*fClF=e9}Btz}m0^?w1ea-7hKBHqc(-1JR&$#y@b`LPJV)r|Oh-_}A zyw9zXd5!u^dFtDrqT4X=(`<;}DnDi;4QK|GiWaIb_jd)Kdhd zb$-rMqDXtl;A!q4t<35o~l5+I)>QFht@tj zqOtGgJ%`@>;d8sH`Xms%CLN%Dcyrw!LbO?~=_ucJfF@JMQ?(JnX$_^iEBVoC%VI9Z zlrVWHlXMg;Gwl(0&4m@|C*>F^Zo*;c7ugDB>R_Zs4!q5i0O}b}o;9`K4f~=9IeNmV zVP$_Az8Rr!f4W7EZd`TFY^Y{#n%k1fZ1}_ow_5mMAkg`xO+SazusWZAUM6HC&$UZm z9^|VHtnL}|pbbB|#`HLqepjOMLOmIMhMp6@jJQ#jeu35kZ{WVb5()T_YG|CyEXL%7 z9)gN|I2ojLNQM{w>?StolwJ6WW87zdy&`9@=F89A3H|o@{x1(;t^(onRZvj4Xrp)}%g9aiq(L=18=drrgy#u*^ z%YrQKBL!50G2Bmo$rbC^{1^DOz6%2ovF6d93H~1_{5y18`wHT6C3G^Y1XZ*C1lS2k zl+{GxHL3{AmbjY@TX%?R@^9q*{_*^N({bJOMlCnp`9)lbWGk7=X8EgieAtoq6WP4% z^M&8(%uvC16MBEvd|AYiJYx*2E!PCleBZ)fcM@Ws8w~h?&dJBo*Z&KY|9K}NR|+x5H%tudQGhzmxcoS8t7CjfTYyk)@tpaK8RV5DN7PqHHz`P>+*ZY!q7E+~ zSVefTDaCa>4ucR!yC*TjUl_o3YRB=z!*zO6O3#MbxW4Z16L0$hW0sR2$>vBGGhPXX zY4O(Z~jS&usHmR;D9MN`0TjqIl?D;zWQ zX+ts}e!3*0^$Iu;)i8B#H1h$*)D!uj-!KVyya0KJJuS*tIG^S{=$4u9#|lrA%J?aD zmHVd)r=j2FW9Uz3PUC86PBbtx6H!#q17CJ#_cChjv%EBTy{v%Eq0fJTwtAHj*u_;X z^k$hW)tDL+2FY>ARiewRrOzZN84qOJjX^ z85^_T_{p7Uu8n~{x4YNUzM9)-b=*a?0)ys7SrS{+gMjV-d3xwO>Udd$- zUjX|FslU1v`c5TRC`w+)P~e6K*yUIu@j=Z{x5vbmp;KXB;LlA#qj zh?t^eHgROhzNo3SKGU<_6l6YmARvjuYe7^W zrsXEPbgNp|jn8fpXMi}6AIU`2`S|qP=-;&f_l!~CCNZl}cTo!R5#Glj_!xuKR0EUQ zwkddgiQjOf{I|segAjn!d7#DtVfQZ-T{*P+gUIs(*u8Sd|K}A1N_xSWj+QHi ztTFQRujKnrKLE&^MERgI0FYnVlP8WM-s(=VOt$3yIF#W^_AWi#ffqKB`oREbFLUi3 znc6b6+~yaK2o~HDWda<5H#E-v+W%|$q=s{W#@t$tN9VMaB`$aRJF$@SSbq;uPNe{7 zqnUiB&KmBY3F`5>&J&c-o3_FJR>DXVH^qA}QgjchfVJ29oP|7o0165zcoA1M-m+5) z<&dfziqg!k9D+S3fiB@*-DDuwfqLBHdE%u+L0-RhB0q_unO|t;?XdQxM-SgnN8we$ z*ed&)>pKTeAeE&^TO)yU;@JBx1NQeL0*+-~cOQKh*>UL5Q=aJY#>E6csJG~<FV&^8Jbm(PKth4E4f>Oo3tn+EFemogW+$VD?r9qYfzcI+ z!s!)s!(Ueme=tf3*AtNUV*}b7O##DA3Zk9wn7D?wW8;VZ9CCh+ z7uHqNm4kv_fV^nt#}rL4 zp$8UCuPhJBM5MM}E6CFk%KEbhO7J6)y`L;!Dqacg|1KhRsjsaJ|+yIud7Cp-#tDihV9 zd`PN(@W*%3m`RJo?EZ)!u%}@feWDjb{dW@fBNH0VJkedZW4h*@L_^`MR$HKX!c^n& z^1xIlie3ST8AF??dLzU1Dgx)%k$#;u(134?4EU{LbI3v9A2STM7|;+L_fT`Rq4^)O z;B$P`VNr1g;kz~@H}-m&JRm$>u&&;-Qzvj~p6<=lamoL*D%NfP+xqZ1| z@NnlJ`Dfs}27HeOL#|$sWXSZ;0DCotjs@#|aQcnYh3$WVXTe#H{G@+@XJK6mnULFS z{NI6i?+XV^nebyzGu!y3vJVi{uX9=_{=)P?@Rk*?MCvgCPty&); zP9DOU+k2xH2a*Nw9aoAJ0s#yZKpau)Se+mRpJGk{Ke-vYwWEBK8mZuhzc_l<=0@IF zheXa4uBlDA-(?ozy?VeL{Ad9EO55b>t6_%%F_9!j^jwVfp^nu#=}(Z2{oQQ}^dRd> zBUj87v(THEI~=+VV6gY;Bts@@ArRun<{xkCL=J$dr}!+CEam)LF%_GK=Bq7xdND?P zJF2{VuH-BuH_U@^6!#R0-*>6LGLCn?az&G_W2`kYjYK}aL5f!ljfq!Ct*}}7STk5y zM$yvX^d823HkoJ+jqmUWKzlQguD{{jzxUHN@G{LxuC@&{e3cI!emsL+fAn(*!A<1& zsfgj#W4)B26t-~$pzsT~RNom?UavPv3D(Qg5^U+P0Y`mgK+H(f*$soL*9IX?hr@T{ zzXx5vK+fwr)TRfy92vjSN{%i%vQb}W@H}Ts#T8$^OEIf7l3(LT3rDW!sF#N?Xn3!) zqHXOFg2oTH#o_QzLSES0F1`psOf1}*9Gu#_c7OM>x~>y@%LLj$s26@oE@q(5jj1!n zU|oj%w!(**(=U&}7Vm53Q_q3-Iem1--F_&rYyXfL`P#4%qX5W8(+3zw{+7NxZ`Xx2pKw8Bg^q4mS0C?~HM>8LK}X$hUAijHKLyh>e_VRsj6?K5XHelfa< z-kL?urnx33L~~haaTNtG`M~yf-kg^?*?ectC6?*>eObft_|ma>kjLAB8#|%j2dkDAq6;7`etXU1h>kNnwME33Ah;z&TT#|-!5C84ghLDayKtRr9hKqqowF0O-N*Iww`DuB43-Hl0#oOi4a(+R7XnjRt}u|ZDBt|{Ot}8S@y_^F zfZnjZXKFP=orYR?ieE@7S377V&I+(cN?cETdB*)^2l?QZFDaI87Y;fk@4sez<6_DV zbl81=JnnZ!u4zaLFm8$aEsyR0WoezJq&;O`CCS(Y$sAd6bgrg!_#-;n&ci{s?a9{O zl!iDUdxtmxQD6KQZ3l$tK9{(j6@YTZ}LJY|0n^8Ngt zw37d5sX3cWBsqvz{AVB$3-t`#fMymyWrct8n}SY$X@-`9kXr+yQ61cq0l3x$tp@Kk zc4q1u5i(Xb0qw<%DIL-EnqN0txb)1Oepq={i|tY?rtv?Ul42U- zRIad}{RKWlFGA;oj3+4ke2ncbAKG~M621cn>9(gi1_HV&%a!bu26k#%6q&03Oq;(sR8JWnhBef4-{kNuvb6^}tfz|FMJ~>pX7% zilTZ?J{Q7=<9YRm*DQIza&Gn5*pt`0RL}jcEgV0%IL@0N8w;7=eem%G+>R+GOhA(S z?M9JqqZ|43F7JfH$5~Hfj`4h6#Msg-YSaET|3!7%xutk)hE92n-1#&~g?uJ92}pGZ z;Msg>;kimH?^SY@LQEIoaIM1 zeXpgOmL{mb>>2&x=zeoUz`a-ANpv@sQryiXY)NsfR7y`lYERv!x=498CEKR&M%~;} zyBi~q4WfWJ6MM=AYPO-^ClUPdscY)o7XBG_z0Z-{uIx?ioKYD#H^2I-AGfbfJxg6M z+Eb3%k=3vb%DNN+eW9yn%?G>v>fjU$RHv*ywb!;6i(l0#$ldtrG@D{DqeU9Bp3MoQ zm0&DHD)>?@-gzW;y_;x*C#Enfsm}*ZMEn)NaR!KATpVHgQ}yo%`wu0DBNzdHI|;@D@4@4aK$?i z3nEx|?of0J+=sd4T*x$=*2#RcEE%bN0^%v!IqL2V-l_uTKa&kxLPlKVatiX5Xo1KJ zpRcj6y8gKS20ip-8?Fxm?|cfQ=}0^`kTo$QV(NOFA~icx*KCHLDq*s=T&B3VII{m6#|99I-2Um!9U-;`v{XOuPDamg1Ph zqE5D5Y5)m&AA6E;T@RQxRpQp9|6VoidRWow51vxYj=HN`MU`z(`1tCz%Hh4BTk?ZW z46IrG!>{(IYnWH-)ILLwq&bcKq*H_SgaOq{Kjbw#;;@}2|MF*ukJzQ%kk@QJ_nw)| zsZH)whXYu4+PDa}&p^WJn-&CCggZ@rmbpOIxl*L7@<{80d5*4FdNxphZh{e@d~=x@ z&Z}k=tC~*oGhSClsIA{nwy0m|qI!YrYyRrhNT}y1u)_J#vE#+{ba&eDj%)FGu>y6` zSQUYxSV1Jhf!mXw3-Kb!fjj5-U6SXgwUVc; zCeW_vbZ64?>-kl+&UWf3t;Rcy;S)CCfGta=!i#+u@1||=cmt7K4@i3KpFh<&WcVq- z-*2e&+!miRVE9_WwEi#poR@Bvz7&{$4|ujdj(0b`?Jihe@KTkHD5}Plz5EK*mqlD3 z58TBTL++wyYjm)pn{#m$;jM1q6AKB#oks@(0N1FoYsw^KsNro7f?Iht4D9~l0za4; zQD?ejAi|1Bdtb&xtQU`iKZ|O?Ld}%9|KJhdQkkT;ILI{bXNwP)I`in2j2-k#$rxcL ze<&$(iM@uwH?Btwybq&gN|_Lj@Q!T09kV3cqj1p922t{DzL!~Mql_MozH2|d-xbGPK!8h4cE{(#)PtD68yIKW;=)PnmwSe*%Pi+nRIWR z@_8y`HSZEQUOw8pV>W6iyWRWT8$b%*=x?u-s6>+drL-NGf!)k*R}=*iE*kLQUSg9Ha97vJO&`kR0>7d_n_Vsj<%VL{=a=p(@A7s`*!uCpBf z7Z5-sgMzLOH5^O5l*LsHSF5L4oNGB85=X7>zF<_D3mddLJqH2AA`Xr1BMwMWm7B%k z_r-Ncs^7U6GzW==TrUk(o*R69jrQG|q413bDK*$jG--(Jmzyf}bK+Vvm!pPm%Z<*25pXbv(*pMciNjudSIg`*IcQRR`q{ zpr%hDQm8;JUjhIJWQC;%JJpN8R2JReokl>DokxkM!OYfFHf`zWhv(>Ka=pv1O`(2N zoLJN;P=UjB18%JxpU^{T6ZVA^F>V68ra&64@5ruy-yDFSVxjbU?_*aQW_r@OVGF0! zXUuk07w#dNVUaf`RMMo*Wh#Y)-h^$vre?jA=CUk~hJ@HGv-(BGq!}sYF~KAt?;2{Z zQK+WNRNkQrqCk0Fgsz4<*R1UiS$s!Z=MOIHpKMzMzWj1c1`z!i-#vXz>X&Dz7jQE& zb%HhiNWgD3LtvfrZ-At5U1CR&m0 z6vD;gk~C?P%|Tu5U_jMNGg5c6_hM2UR@2CpqD#vTx_J&Zmv6XSr1L1A;(1v~UXaBK zr_#B!KA9gmIv?~f4;ghfgzfxPzh#vA1e3uapWB9tz4-W1^`Qdxbh-A zzVUe8$?Nj1H21Ibyx!uW>SI>2QR&xT+S-q(+d=qWWN>bpFdV@)GuA37iv25YH@>>V zdfyFjHFdU1`zNV1F@ll$;K0d1DG6=5Cqobx;}C{R7blG|%@Dvx=3l3<8SbkzC;+fO zmr?;!haE&|uL1SZ)5@O(ExnlLTFGrE7SaIs=vN>wlF>C20o!oTOSszyhJrzZ^A6oo z7hyOzl^KJB+Vh720w5IGz{hhTypLl1&E3e;E@e}zGkBKZD^nO)QkBWT-c$Z2m!>Mu zH?4NO8)$H2YC4*@hp!LoRq>GsY1&q2;)me$$lb`n_3F3u8R2_Nms|ZRcG=34PP?SB zXZPmjWpto=<7x}WB+@Kk{RNx;D!}%+$8aDLhs5iLeHSu9D)2dq4$+}%Mfr6AKs)SJAC)N z|L$41$NHS7edS7Wt|V0i_j9LODvB}G*r$hT3{#D%hdm?gtW(sBnv1GHlJKW4zq}?s zMRu(DV@FdR-7u0!ruL@`WvBWsf+FrneP0Oklokb2hJAM`z@hYGuRf{$6ArLg0;x#l z@`cHNlYBU)b{OQr&C>~PB>`Zp(*9-wz608mpY;e7@<{0bwP&nS`E;ilAUmkgH`Hn{ zy)$L>pxO7G%+^r?kjMQRkS`ZRr$TE-kXGxRtH9E`!aLA2dj?jEt`J8Nj7x)Z$Zk+Y z!fra=*X@F8yO7Z4LNr4!7U{YBzCfi@C~q=AT^t$2fg;eNb!W?NnNE(Kv-$8z*YxuW z1YI9(jzRe?p970w!_yYm8qO=Sg^~?iQ%m%bOg983*q}U7Gn{C`L*|V9 zE{#KoUnd~5DW}H^4MJW-yJfoU@SzRtG~DzV_*R>OGAsN@;8wl{E5$u3%T8KC;YY0K zjS2LCAvOZXyd9pV4;~8H6*b;h+g=yI@Rz)rA*c#e?O}wD=Uq>q;UBE;`IR)RC_(}) zm6vm)8s~XS4?O#1mvv4p&#W8PamrWdAM@nh82^1DPe zWUs;?kR5K-Y{;dC4bY`uH&ogicLD|qXLA`;E1kMPg3d0-e`erDV>y&d<_{vIUMxY; zQ3`V+$UX-K)+rz1lbl6VeNzagT2}Lj ztGJzj#2P#n44V7{rF(+qXb5%n3XJ{M%AT>DziAw!AWJ`A$V$DN=3HyRt`33UJhyq! zP1K<>NM)Jm!#fmf56IyHZCdNH`}`|1|Ui=sE!dGGl7SIN&D* zWlokF>hGOP zo4!TqQ`7wb>31Nb#|fkjyIh*T@rDJ{FBg;E{y6+!03=wj&y>_xdmYYd69TIyX~kl9B*OXa|9x$jTzW9bntL? z{bgnP*;DY)PZ#7YLZ`+CYf1Rno-ByN%;gD$_SbV;yIocTzcf(9XQ* zq;eT9{c=K-ck?|JX{V@LM%6pq+rMPedBfiBqL^d+T}{sv z>qWii2|>EEij}pSoEH6S2M;%aDfdPDI{1v!3RP>xOgcKGSZ;=E3(lznA7&MC-4!Ov z5};VL3~Xwj-JRa3Vn&U6=O?TeK_{C;V(KTi`s*iMeK##T>;t`34t(iu^l0FMHL=y#AJQP~H_%yb21=lMz6qQ@(T zf$cY!hjP1*karHY`FL&hTxNO?nM~RbnIJRI*k`8*65zEb^n#h^Yq)7g=mZ1V%HfsX zy_*?);jWa@A9H>tkeX8Q)x+g}xV_1x!1*fIfs_^K~U7VA+^F#Jf*RT53sXtDg(e(7|sV$1i;jNPVl^<9?ucv-4Cqa_LiYE#7U>LH4B1Cb4a2>2qq? z;X+zK!@xVdfjVced4l&<5P8=7*wJ}v%dDmEGwokaYGx08ZRM=!Iy3B*QrrAWPrKUI zu4yNUVNXM)P@J1APQ-vLdDD^njPAm%jdz=%S$^2A!VJqqEnUh6&HmEPXZD~XnV=V5 zj{FPq>KjXBcP|UhRrXBEP{u>HI%4xrU@c8P?PsEHCyW^sA*1ISTux5Y&r=~rD0xNi zKiNxx3GY+g$T3XXWnev_4`OPA)>U~m)&&;W!^ESkst-04LGQ5+Vl$2fuA{iIi%p2(S8lmzo^PnEWJ>7jfc#sd^Y;) z#Kw~`o4-xqZ72wcfPzz3@-zO$RSDBj$mq8Dswy1-N7?)75Ln`C^7CCf2yldjl`eAm z@E*uIHi&00J_^)H#ZAv7k~PcATWmFU8hj3zV+ABIatD|%YcG{cS%8Wz8)pH)kk`|3 z7i%sfU3;z%y{^7C{l~qn9b14YTVD2po)Y%?u?>TP^TdFlgf^3`$sxj;m4uj&d8;`j^)*{ejqvE==`7T}Aw z#dYQfUq6p>ujMH!S!pwV47GaGV!(QLuHS)lKJTcxV|7&&y!Ln!6Yc#Ycq`ISPe#Aj z-DH#RIrKhCp>*szGr^5&l95yS0G7$Xy@ju<6&K-RkKCCKVPZgoMZ#glK4ubkxzU3j zuRWTnZ&>vsU=g^J0P#kQojOX9YM#cMQ;#wakn53jauIuJsE1sX#VD1-z2=7Ct!xjy z99U=XB=?(QhrVJbJGY*e+>$xbO8>Hy)XDy!`lQuf`_4Egeph?j`deBxj6PVhQz9_ne{iSst>$}HH@+$8#UDTvzj}?_LI9QRF1OKZ zR-0b#e%(8c|M;rc6S*C%Kz(4+ta2LrURZ5T)!JelP5n?HYJML&`$cE16MtCCj!b>9 zOMsOhYPS;AIvE31wFVaRdr!;epU7A-B)qLICY-eKE;I9NRk9bL_D)ma<-T=Nfxf}( z+phx*x*Gd$<^gm7|IUtg{O%sbj`y2!Lc;hVN&LF4o0f#to=+d<>uH&9;Adh>+`~X^ zA6tGl@^8LXiCzlDbyqw6bo&}n3>9AYANV*hrz2?Kl`cObPm)_jRKQrf@q|DR4R&l8 zgp+*MUSi?$f&W#VSgz>Eeyki^^0b*uW484aLfPq$ahWPuIo zC=J*7t+D_&F3-f>58XwZfJv#3^+uIBURB0v0gBZt$Ggs3L`&QvSJZc9{qZpPehyzi z-xxh1y4O@w<0m<74MhK_>10B-}UmCQnQxdRPN(YRu*@K~!XuttWqE=pwOCA<0 z!{t1$dd*LEemL5s`Kz=rBiE(Qa4ii6q4ca{A85vGxV^dFJFcD8_lyTE(o!WCH~TxYb=ox8$hokvBy-qDCwI$R z++V3Ec0lw@A{!RA>8J=b?|W7o70Zh5Y2lq7dQFmUd~o(%ikdb z%8|9OSoF=|-uFSL!cHB>w;C~cVx>U?<}1WU`k|=>dmF?=piTRy;rAR&afC0hy1Q~E zDH7u~TnNz^-P9W~R_6}bi?HlC_yY+QH@MsuaxM|l)ENNhi`8Q;ys*Z2;?fksMS7hF zEO8WZtS1j?>mdau2M%mO zSDe;1O*1m)Zjk@?wd8e&o~h@DjH2%z$o`PlT3Y`WTAESNA6`E?Q=e)oyOHF&Z9LJy z(Y#H^zvwM7eb3cTOZX#Ap@4XW7?EdRsCX2L*^65;J<_OP~# zT(Z+3^z-ZU{pA&(S*=>Mo(FPFc|^Y@{$0_GIm$WuS?|eyS$P zeAOt2chC|adKbSQb3|HD%%#!`tV|^m5a6VGKP+K+Q&&xfM!+Men*;f68-sJy8A^f55RyU{**CsPzE?b737|lO9Hrh9(~oaJ?=Pq#Aupe_0@=paC6cDbH^qWydMJ`6 zTq_l3=Dazypm5W${-7HuNh976=q3j$hn+G#Ai*9l@4UH+=F`BGlXAjXFSlN(!@HTq z%))~z!eVHnUf~Q_Rp){KCfcuw#oxd2}Jw!sFIzvQWUH0iZ@xZ)9j+ zb(w9VaHv6wY~F|#Z@~zOGcFAtS3 zbG51@;kd-=YxAjl1b6txlyXvaZD0Kyx3~d_m*4f)Outnf(6+lii9tR50VH^3>Oj#i zcLC>2Zosa3Ec{^5Q~*#j4_3cD_UAN{PkITd=r z7FNkh6Y#2+>VRGPY+IOn>SWzC)4Lgf1s#)^{L3{Tj5u+|7^)KgyW+VT6GqnU)^>!jt#X|`(HlO~G6I4cLz#=^$4^;dX z@uCov5u6b~v1^UHHqa9s#y_h!11y<|iIpYliqk{clnEXGx$%%5;83VJIQ@9`KRzQ( zvyj>_5UlgGY^t|yDEZk9DoW}86ve(??E`npw1%X}oioy4CZ>__v|x5C z5RZG!0!`LTbLE%`AP2j}3)wtpS2TPFMZ3DfHZPoIMIJungqJtPe&Y2}AMa~AS4pmtuB;|GZX{R_d)KtW*?BW!M5*Cn1fhiKIQ+%Al;|o#R(Q zyLVe{Yp!E&T2(IdrYdKx>RU$=C5U;<(>xrqE(~jBLJ!Ub!gwInYJQk`n~TwInie_MI&ECd>c+m_jO$fQ`@G zW10M~PyRO#0`xE$b8lASJnNf)0%zMb(YOf4vgF%%vLo^7PTDfl+ucpmi#Q-uzLZP2 z@{hmzkO5*>?qB38plTqGsUZN6bb`;r4A4Q^bR7P6YoeyV=+U@3FNE7aZ{dI9bTY`( z`a?5Z-SQ;r+U#j3n0n_N&>{cb%RVp=hPL&s8L*nAn{RdL6)Kh{oj*zD)sm@5RLOrX z`5ZF`c@_SkZ-3VPTcVV*ayP~CdoLGWojKw{&GW;enJxmYl%%jwXWq9AhKEL=GpTzc zVN#u4iQCUUyGhami@QdFvDiP(7N{kQtmWx}8T=w`(m@AoVB}kGfiOG+_=$9^U<*M9 zsCW)Enx~dYXeMD%^{yWbO}l8~l;xYz9BAGBu+I5q()&N=0mWaN9tm zTjdzd`?b(Xps7{!z|PkAcMiYxD?RX{_#Y=l^csbntWe+Ukn%#)R=sy0L8 z{ETmMo!8qZQvrW)h~k)?p(?7PdTLT>Jp=(J5j)|n${M}6EWKL0uL1EY*7A$CDl!tA zK0gIjSgSqYE5$$1edfwJw6t94zKVG$K3_S2sfor#<$WwrrPRL0HZTOO{rPTpKOt1y zY9=Cd>Ne3;CZ+e%!?2=S-Jw`-Ib_L|zl{=T#2**JA)od?c(Ny(+F>5UGE=zA%S317 zlc>x8h~@=X_V!~39sow{#{gO%70?awY`|)q-e!7B6+bAw9=>((S+9pL7)LMz5>x+A zVQh`=CzpA&&@Avu^PX*^WS|Vg`*5+ubi7iA(hgJgj@A?+A}f8e4I)+xW49}bL*(ey z(bjldUlw#d;hFpN+kLnXo;KbjUG$luYn)uplL2^q8A(KdCSON*PIChK^7!vUD?r-N z|8+tHqB;I!%>OtB>qkPsc>pRebbv1F?4OVHAu<>^3c&9VLX=Kvxe)l#1^W-6#7QbMK<3(Qa?@kvofz^7pu8lM&CxS@NZc@n89)-f6 zNY2m!nP6fo=}+UQf)&%pV6;c@^;r-(@!AD47Kz7u5vlPU^y9~>6VvR)Vs z-c-oyzPzYu8)8cO(stE7{awOtS6&-ql~i=ccb;pbAJN!+;0dBY?fO+m&C@+(PY<)Wqvnze!&1)uL-b)Ab zlrQlmAG?o8k*oLaHa0Jjf{U-l`0p#8xA{=2Dd?;~Vq@t92Xa@MriQ8g9CtYo+Czkj zm!U*%+~@hit$VK*?y``=UJIOm@zNT%^GIIydLQZ6Tb9x&c6#|+JAN7 z`Q8$C`WGP&IFob$H$F67FURc92U}ffX)&{Sg}Hg>4OoA`=-{el(K5=1Kml~=b7>#r3Aamg*1Iz0MsimmFZ1T5oF^_ZE5Z54 zGv!QC6tzgr={ZCPY0wo*q0L=pcR!$DdndTB{96?0U}u0FXeVyWZ(oC=QNGJd9V+(n zXgEOm$OnMopGqw+9kqo)=#+gmWNB39svCexIv*~0ouI&^Dx2rhI|iVHWuAYsp?4q-ra6_U*q)TkBtEWUF{Q_Jv_{bY zqLA|xoBVwiN`!x5PkUeULjAvS%YQ}M+GtQRDPHT7u0)(F?fz_l0#yj(9}c`KV^T*2 z*Y$v1T__);OF%;5+DjHI=+Y95W42$14O{$i*OL-7kkuB+XRTPbTP#X<{#$5~u&Bz# zH>%|_P|mA<9{BC)hQ`I%rNQ|Q4k^OnXX@%Q5DvYVoi;Mukb2)%9(6MD!X zS5f0elxsk{@e?XceTNR%{5BS`ctK_EP#C-W@%*LAW-P?hyw!3t@plu;7V(!6Z#HBh z(*KuRFeL3!5KWanPoD~u&voTGd@f5L4=3ipMEIQ_IsXG)Kzw9iOJ-oCyx!E+6baoel~DsS%J6L3+ZbS}BdAvE^%fZZTj zEk4=UKoTm>1CRR%$&sIC*SQ7EDLR043`ZA2C?ES{>Q|M3o?3IvlZXnHG1=oIEU5&JdlKX?z3tDa`8cET1)8PaL7Ee79vL~B;5J%R{>bN8Z zZapmj|KaN^!=hZjuW5!Jl$IJox&)Pu0Rcf8DM=9p>8>FLhLT27O46XEr3REvK|y*b zk%poBeK_anIsfbD^?vb#%FOW0zVE%)+H0?!g^jRv_+GN7cK}6sp$~imu;%t8DPokw z{561V%6=ul2*>I^|*(seO|* z%}821$26JR^xf|HMj)c>StdyT^Lq~v9__@72|B(0>2OEK&G13e}`{ zDYjonY?s(;aQ6+m%!|O0Nyf@n`v!=rZ@NfL3~zi4z3;PRjHF8^J}v?N^C{${ZG^W6 zUWa+&REF@jnuMop*s62g?k+iwUf4p1OcowO zV9G<8wsZFCHEJ4@$~CF?YAdbncMR06!mRSC%(DFCuX{JMO${kc*2 znm>>z(+_cCINJq810fa_r&tSuP7iu2Y;y;QejvB~Jv4d#DMHM-uBumn<1onlLzLEm z2)8lMg5GJGu(l^@-=~ucA6BrCE@e)S#DMd31GnYHG2D!^h-VTII4C8;qXBg3bg9$x zh)>o$Bj_m>s967dR|unWIyu5A)Yl1s7N`5fAQ<&dW8aO1V;VoH z$aWI4N3sEormzBJi{r!1x-D{0OTcx)t!2O{7y6TAraVKk_)R0Oq4minnrA0LG-zm4 z@Xo)`&zz7q09Vwh>k%z0$O6^sOj~jzu+II0z$mcIx!3g#=3Lfk3^yR+kozgH zj|+;5>UfHl?#eWH;Gr;)Fs+eAOJQ)~H*IA7caqr7zUrG@j9fFq{6l?UOh@Q65jPTE z1$zw)cx2=bB*sgqPEV8oQ=B)iG+?lclH^}Mh75Zzdn(&Q@}UndXU6&z81a6-RTY{3 zx4@iN2nbAzh-q+9%&!3ZPti-QiaBs&cLyeY4I}o=HC$O`osii5-I^pUtXbN#XeRpL z^1`#gyc!bQ=aWtk_mz6&EnC92sa|hqwgZMp24y1Zke|;bP3pJI_Q6oFXc^XM%JLFl z>@yx@-OXs*!_)zn_Xj}h)d4iSlMbEv#4Cu#+L#9s8{hAo7atoj3I8ZO44nxMBB%Ar zI1GVbQ*RF}e|DhJv+N*^0hloM?=lW!<5b!kDPT4&`@4YB)F`I~$<&~(p7 z0ckkM+a#}w0OHHx;-NuZlP13Sr23z}+@YBDW&df+)9) z^!*VWtV^%6>4^|aCO-CnTmexgIDE6BZ(xU-6@t(-HZ51q6JWC^AAUR1BoRQ87m=b^@+ix^%f9c%9?zSRDUVn`1l-j+wZ+~aV*OfrsFl2Z` z;_+W$BGWo<_)81$f4)ED2kpS?u^G}p!}G5pKdXK6+K%7jL#mpXcd>vB(?=_tiFKBl zE04i?<;!(#y-kn(AMi?M3?!T*BzrB9cWO&7v0w`oCSX20&RCdy6hLBO4{U`i{7BvQaaG-F`y-R=+o|S9V(48RFhGiz z8W4+zB}IH48E1=N7I{SaUXqgwwLe;rTd++J0WF& zG-bm?MyvGwC0NB$qV&04=f|EqR{QQ`K&~5|b%X^NxwLefXe+b@%2umN^sUPk%m-|&P-1&9j+t=dVtKQONtZ&ZbEaBv~?iVCW1 zKGW(t=5Yow3(+DtZMZF#S_1Cw=wF=EAZCD#LuJwgOCOnDZ+&DkU;L@svqsMBv_vya zm{3K4xgw_2)r$Y4w6K2FIqR-1bINeRwM|(S&u4CuWom(kHuwTvtFG@D0SSnL5Kc+a zXo~IHTfq_acQ72h)z04SVqri7u@k0Szq587T>9SO;po^Q6zJ^a?UC7sIgNN~047dY zpf_{o$6B_GSlWio-?7YcGinx_MhTV_ugQC7f314_Z>B_D(+vE?1n8A&3AeGCV+LWj z?qklCBrkMRW6qsOC?!>Y@?}m08Y#^=d|+wMUiGCTon?rVarX{k8B+lPj%nDnnjeqA z27Fxx4>~PxC6mKIB~zle?nV(UJjlP+B)4rTZWKg=m4x!HIsrnUOVJ z>Ox7juCV5wq1hy`D%!6B9ea35+;)SH^DBpnHpopRhu=9-w(DW7>%`r_4ePNm_VnLj zEPe%8GOH_Ce|@w60hYj(Cz_*d3*-z!kAvK4H^PCm#{)lyPZofz@ntiR((Ye~!c>k$ ze=!6HvQT5J`~Yj9I4CU}2A#8R5C|@kM!aRZjC%E2V4HXVe-KX;|_l2Lp{bW$LU3Lldj~ zgbHMFE%PQH$Qh>I{aCjSMC?3_K+YT53CA!CvN;nTzGbGzXFmK{{A_k-3}FHuUgp$zhAnF&|fPs*F%N!orkV`<=jYJ99Ts}|A zR&HT`c4txTBTvQAzyo?0-cv{Cfiw4s5l6F`d2Qa<6$?M)N#H*G62@eGiPtQ0&^YEB z0M}cJwC(kW6El!4lL>C{BPo=kqgdPLu=oyJzThA-g#)xkN$f+0WUBPJ?5D6T-0j0$ z2G3*Vjy~CN`hTVA;LxI%57$w;Bkdw!Z+#okyYCS4d=4joAaq4v2WOsb1>Ah&D8wq* zg;L(hVHf4!$1bdK;ru?|8**C+z(Z@g}pJ0OzFrv^cfTwdJl899~X$rS(rP+ES$zoop*H*=XMeW z#FPMY?~B%eH^u>Jan#v9?C%eTymi$BPM%Y%zWG0@Bfy$wh644~g`=Ua3idfvZz>2` z31C1cmXm7NK&R8%jwE_TX+Pu!}mlxI*M%((^14 zT45SNJi79VoxrFiIqi4v2fF*V9{DQyFX&nw8zhvxA1-e{Dc5+asHou2YD~^!*{Plr z<4P!q4|%Z~10GUyzON7=h=apq4X4cA&$_XxCfKsm;6`ww?QaIdw5m5Xr5VjjQ=1;; z409|UNv5s8;jNI~oV`p-1K;@MsLF1uz`<@WW z2op%%SC?w%0&+)aqsElCBB2Ghk)G$h^K*h^yJSCP@jmVv_9(vI8?vL8t`!K%k5Sm{ zSqf7LeKWtQfp}Q&2HDD~cSd5yz<}8gvKaFRsZ*W3-8OhsD5n}h8T=d}#g7OrsZgX_ zUuc07V_P^uix>&W)6epO&4xD?^x1y+@F=i%mO+9(m_uFOsq&NC$}N@Nx3ht+l4dzi zN!nfvpSVfNtqF)6$LCvq4q?lc=lsG9eV3O04k5lAH&Op;C+tCd@`IPKYHudiETLTJ z*4Rx5Ma`C8%^qXWwua}1t|?%o?dp#IC~Zm6nHUnoYSk_;G}msos(4c)vqt_`wndp- zQF)$8o&0|&m7G{Pk@}JS>n(-0xmlPwmst{Aq+lbo)I>$++r}yHrrA4FfXOgYw9CR%5?#C%oW_Rnoq_Ui*h5 zd>4TQ+wl?VrT$*^pv9Ois~|~j4%F8Xj~f1XZ+e|QJ%z$$XntS|XM-G&6@CQ+Z6 zD;N;s{G1h=q_*on7koW0BOX(w8XIJo1r8l8XT9;p3Ej)ZK3o@@H1L{>^q;sXULjo- z{i3*6K+m#*QQmxGa10YJK^x#p2j28IR~a8}AcN-k#ts`!kj;&vulGpJ(gbf$rsNC> zJ?6*4@h_@31zuS6<@}v5ka9o2C()Ge3mo=l6Fqz>Xd&>Dt~2ZL=gt22`|DU; z?%~4j8P`t{+-I;cETX*NxWEcX-KVS6>T3=gVjYbwW-?^5p2626rY~}tf6V8!104}_ zLVVOsZfNoLgQhcy5T%c9$_gWrg=Z{wD}8v45!(a&}a&JSRCg zn5PpPP`|8GJDKzA%X7MS6?;6~Z>;|0xl{cb!z|OZuW;5)_#FLg(a^}%{Pnd6b;*1a z{@GVd6?6cLFd9DI3$RpDpX@C6yQ&2Fiph+b{LJk`CnEQgBga@Wv6bGcR#hC|Jw~Mv=1#&6S zC=o`^i4Gip4iRTQTgq72ot44OVg0`J^M-=bEVEHBeqw1BliYWSi-cQDm{4uWHeAjX z4WqB@t1lHWv7Ln@I?iIrYHCJ%Q$K&7gN0Xzw{~f{D_pC6v1>e&%Ju|Nq!e>O0M)lC zV-}>h~gE*!Wa4KPL-5G_e{?6KSM#B7>h*R0VZzi zM`%vOfW2?xsKlXsUG-`z>?tW0t5Dy!#~}Z2!2Ep6aMST6oH1xsm$Wq z+Qy;26U5mU-nsUH-HZJSX9_4PLCW*&TOKaw9Q|s}`k1P*`Zd3TcOk;AF^(H&42Vak zp-IMv%!rzVg_>r-^!;qn)N1{+8ME^ z-t=+hqIDrZt&VkM!#6Q6KVTdGl#)leclSXGMb{bZa&cd`zM0wd?i#zWJzc=e1EP_( z6D0Q@ri^UyKcW6#skyUVsQvBiMtfBayP z#dJHo1g?~BvuvFUtIj;|9b660jaGk9{YFFLrhvMv*8wHxvTYzTj&Q#ogcbX8-aRJQ zH7Q?_DDpNa`i&RaE0fko!{{+8R?wMa#;%Zm4|exwA7@Aq^7zZkmh@n!$rx;voMkX# zAgifojM0w{i&a1{fod`gSn(%4xc0B$YyYYu8`bgSnfwETU8D`);~8D*>^JoS6k0Bs zXE$t`(+)iRZcNkDBqT-W*Z*`vF45KGI`Wrp4LQS;m(NK`P13R*15B5DFk}0DA{s{v zBWxZ6MK{uD5B`ubbR@RCr`^QofQP#S-Up}wO+FuH-Qoj=FB(1hHq`%)C-|BfP_fos zOn%QA`om{>s1+DPywbPQh15{m4&fVfJpOnFV*r~28L6WrJSn9hD$ zJ4{JG^nfV`YqVF6G9&gb!4$-MuvQ)Von~E&aMDBWC+|LwiBtpg)PA+llD+_D+hm1& zt_x_LiRAXmHB|9wLbZaS6W^--Dc#*8onERA{kIzqDD}>cff2SK>|M){pLMdmyUk$F zocP$u?p2U??Ht%*Dli}lQV?3MJenMY#$(0E$iTPSs?))9{&xq?lu#gX`9HW1*m_ImPtSH)UWRkNO&$&|=!`#7-UugY@fmahy1b z;$VVqp<3SIn{6V}Vsr2BdoNb@w;khb*3lo2(<5T1eKp4o0NYV5=~U*H(=X$@Z_yh~ zEkWC>-kj@Xy3xr?!#-cUg3yHw6xw3tB$ zEZrL)enm_<_f;+YzUz6~p9=SdD>IgrnjbvY&5xIh+!^1DlzkDJeX@)D@ya0yn}G^Q zcNI6=8SO?zDy|SKklOm`38s$huEav+;=&&mTUZ8KfHKMb$-g;`=PIXte6H?G^M{`xODl+6G3DYO;;!3-R#Wd`F{B08O^eV3Ed>&KU|b`DF!~) zgsw`K`lD=Kznt-k7KoHNdnbKr`gB0WuJq-SjOfL-hv|~D<=JB#7HTg z0&%9V4RB=OGE3e;<1f{$E`VK_W=q_X@tEC`Jb28!*Qvo5A2AAU%3aIn?fK*w$G&j* z*dQ1gP81mq;Z<-ipyG#Gf1M1W3txrc$dJ6ouywc&v3wWV_i8F?MrjLSJ_^sud!W()QAbbom91igsBp^$vW}V zM^j>qH3PA7;5Vl7*T@6v1vhf;)tw|UjW;D4$<0IS-)E8DvOO;54jz~71E*&xm%mK= zmM+!4$NwG2$m+Z+{#U}@>h^C6@IM6Rdu`NXAdmHgYT>}kHmE;=v1WUBueVQiU$&WH z&MjmEwKwG{#4VAOi|Jxf4%B&B_c{Zcd3&;@8AjC=bcUOyJpMJO3{{Y27B9miyLP>J zg-kD-{Q47}EL66@3k-G~HadAU`D%y{36`hyN55C(R4*^@83ckKo6hNcV5kq?z6*5O zuFsxS3>%8!0ofi;g;?(G@#Cv-QmQ z3fzymxXcOlFVnT|7xTb(i)*Pc=9I7nIMais!A7e1iP1??;Lw1RP?tM+I5V@>oow^+ zoE8iUDfwofg=pB-iqD0Aje3?UUfl}M?I??0{<@m}1D?hL0R|s6i@TdyUfH}cTsAOv zNGWc+Fvbe>^B%qUI0X1OY#|T+GJc1z$zxu?%^cVCe~MjmHI36&ayM24UZBuQ zxBi`Rkw1JRU6kVL+T(G+{ih*he0p^cR?mhW;QWCyHnt*cJiT!wNIDlz-huXMs#sC? z7vLACfuQR3F)h~KFzJi)oI;+wh>_To!kT+0BVW&~MYEVklJkigxsS!!I$jrQ`sWbAJubhaYK{iE7ZeqY=m5s>9k)SI*2ov9r-lKfcD#4RejN385u83V3n{Pn z%pNvn4AlJYuRLzN9=1J>|1>FO76T_HkGC|Z|vvMM+(o*^Gc;$60 zR^i4b5U4Rbo}OkLbBBc@xi@Aczu9BrSEXC`>dK$H=-usN{y%-`)P&oVI}(7Yph33c zQ(FM_KN<%w1KT>x%LLb`Q_rwsZ63ceV5-|ZK~g@~{!1B|)(!0FI_Y~>qWe#IeMA1L zycU`m&-!P^?LYAC0sOXZAP-Kol$(P-{ou$)?r*s?K{CCQ3D>6#57JX?0pUTGluC0Q zJ`^JQ7Pglq2f93`O9V8CX9K5fvp!9vkSK=eaLhBh%1Y-d5T=vw;j7KKC+#D5v@*zP ztjEeB$&~HrC7xdB&&%a3LH5%gcb-{j>Akw^A1tOf)mL#HHlB1um_%W~$tg^Wu-o!J z1?W1gO_Ss_`L?*&&T=AR07w?Q?nT^&dY6(^`&bs`|KK2nQi6Y6nz*05J#gOR%ZbJ_ zj+IoH&n^*!kQ1AM;vUMUYd-Kdo!hcp#2p=a*eEn;!5g}i+@FxJ^?_`qPS&@Z_UIsrJWTu1 z*yCkZ*`s}%P8mNRGKBGo$1fCnxg4qI2dfE zlRw&y2}^1_Xae6kB;jGI_XU>K49Lc-ZDYwu^`9B?_*>Rf4akSj2bs=r;0CGS@s8=t)FmvbKnv+FP`DYWpb9v@Skv*kjG6rBXAEreO{ zahoIte8c1Nun=NL=)t`KY)wK>lJcYn$$^pt)Ocik8Nq<|rJe?SBze2840_r{R{;w% zYF6rBb|$CI4WYx~7eN47{5&KZg|Zx_dJaxyE)4ss$Wy!JMGW<|sU6mvz_-vTS6w+rgAubAxG!QG<&RB?58Ooa??-QFXFBek!LAxn;86SV@U)pe%)AKAQo#)$-EA zpquhg-kOY*dpcW&a86+PHE(Dj5_aUZ-V)vQI3jlj9?GRd`5$7Q`-*cYZBwk^{zDhY z$)4vbB=2dg4Wwg#60hZ-_2T%33X-$TndLj#Z7k!XmOKl(Ot|fRr^4V)z(NN*2VjQS zn{d+l6flrrkG!SYg0yJ45&7`%fW37J6k&jj>-mr$Ss0+-+b1P_*XWg%yvqV0OaiO0 zUw16B;R;r4vBa4C&q91qd#&A8@Ws zJZC2DE^D(kLzLrw%5Syyj1QagtOV#2t$n=d>)8Hs3e7XM>)hPZ|MAkJ* zBbC){R2hyB4<0A4j3i5}ip8K)lRvRxGxd!@LG*XpI76?6g5>XSlqy`Sh9nwKhijJs zL%kImDGH{jqYju&8b2{Bh#m}x2pUh47i1Vu2@o+^Oxz|-rvRQg;4vvsVwQB<9wY3u zU2R=n+6zoxfbi|VUh*Ez-nV^zY#yU5ci_ zrj^^Gxv)JQeC9JWF6UQgXA4HG-Q@+L+>ms86%h{Vh)+s{7WevoL&(4CH7630goj^# z<=WqBj0bRARm^6UCSas7wAxPpvspPvLGcc;%&5Y{JYQZWO8NhJND3Z;?sX` z>wl_GCoaO`Z#Smh#9-JPdX2QskfzF**=@Ce>*Fn`7|dg0bmO{W8MemxVFr4x86d26 zrShu|*WsFHQ_@SZZ!qCRR4eX>uDe8F18G*FBM@=ad2ds~N~bClhgxfE5?lCSSZ$Z< zM^$JthHOOKX-XOA?9{m`DBzvEcLN@1<+yVS#()(j4@jFnBRY;Kux|axzo|ZDGpKRn zXpSplPLm81eq0F?eOz`pzVTq*dOB+%w2GLZeW&4b+ zj7$k4cs%CtE+(Vv)RS}AJ?|jW#0wq0ZiTqJg(WQl3+?y1I4%O|THu3KA9_~j6|6EyjUjX>S zDX*$Z>dha}i7C9=ryz|#tjuY;$idZoo8-eCneX&=coU1TlUuQVOI|^T&Px!WYf>Np zOvnH@6;|WLcb)sroK!t@uO-L6b~czP9>H%qMk5ui9>Uy*rt&GRN%|EHQjMwepf z=hhEr*m;}XHZ;h3bR44@jKQWlX$*d&me9ML782r9EJ(d6xhTZSO&@STk9fr6n|P3Q z4t7|XEJ|!~qiN%Dls5!nS~z@Z4LJz$VE~!}#cu@;t17T;(ud0BifKVA-h>u-F}JOx1^9JCq^Knb`kYu?-tyJ0%G5-k)CAq0${#= z&3#UgaPl-stw0LgymK_tV+-tHrxv3zgvY;s9=Qu{41Ru%D`=OgaM|mAF9y-d&TMSU zg;FkoqdA_9>Po_?c9?YS;c4*4kvqMyC&K8q)&&8%@AP{-nFSD#{baW0fjZzSd4P_; zeZ<_2BNo*6kljV;8XQjf`hluXT*ilh-}$pT|BCHUEwOU{6Wj5E6M6SF>=Ur@VE!|b zx(~p;;)d^2M0^+p@~Mfv`W%uPMcRd&rf2 z1T5uqGz)A~Cgv@r!p4fU@-rOli0O)4SPVW%{b;*uxMS+DCwg42AXR87+8)o#O<@L2@ zHawdlLzJQpTfSIreU;RSy8yIqD9&a|1x6R8HMO%v2_eO2o$*rkDQ_N_EjhDN4(8hf z+V`Vx)?_I+Vr|lsggCx5Ls9RREv;0oI1n-Bvg0)Li0%DO|KdhUOsrGvW%)0UaauJd z6FQd~U#zSF{Oog)qLj?vbYSVzzsMYiKkea3#VX3hFtb6c3CG%$x#jeYB`Jkrv}ghW zr*E|FIT+jDnnZGWbE@g&U}Ih;7q}6DIrYfbHDHIUtY1Be|JS;{8NbJFHu@tmgp!5! z$&o z6#=d*-&(6btEiI$G(1#dGh*bqlp&Es3*J;hTG<~YW%c-FLvP2!~4(iL?IP@R}NhoFMOQXB7!4p0QTpIdUUA!bCe}``Sd>C&762!^HF>T}g{#FMWXjcOCYG zWK_oFt~3WF9?s*Ft-e{GzW3!Ql01d!)T`jZww95P3C8w++FqK!=6<(bN)a+IfYa_PQC_Ey3cm3xlrO)yBm;XksL2QTf1k~*Tp)8bMv8iL|8M31 zR2Dq|9Bm|7Gp2*xZ$#cjN~+sYj6sITS(=DjFl4nynY)wpM~ z?aKI5&*oF_)yeV!N-LXzlmiJ9^~#3S0iqSpG?&_UmHZ|Y_rP_mi5u+)mNH()YO-Fa zFr6MgDKdHM*&5YxU1_YGygIY-lMr%H<)D1p1vazY)I}UxKmriW4I6Ss9kph@KbE+} zh8Fr}YjZ}+#+jUbd?_;?KkGJEJr2F7_Lq(@isNAjwxp^b=DoFazo0;zw3E%~{Y&e2 z54fT;m%B-K=RqIZ$*T_4xsL(s?BwCM2?RGGi#@_BIk$)a!p^1rJ}9K8%0*`oj4*b8 zz?IkusY-g2xLzX!Vn|vb^n6;I7%NYpp35#8VYiZ)AdrPvph34=N)Id6Na8{C8THVu zoH)_L%<*(8>aiWrBHtO94INh5Y?VD7bTTHJ`>rQbSR6SYdu$IPVT&QmNoO!Mfh%(K zL*-dr0v|&#idB*A{6m2_VSy(FAN1Y?bi&%m`3snhJ7{tx`&~?%P|yQ+wg`2>&9X`c z8{s4_fP*Cmp$oV~pT-OHKcCL`zrkIpq6+M;%@Ii!d;ztgNb5o5`W1>tM18GhaaU!z z^VTERA|b$;{&-&0h5d^^MM(2~_DKm+3wdu{i{|kj5rnaocDJ4)^WrtS!8v*t!gd?b z@qAPQNj@39e4%8X$Vn@wi`5GDb;8L^ar?LG@^$s%c~!bknTIgDWHivmUxjpN!-f)& zCuwRdI{B;-+uxNy&gX?+22+$UELxr${AifkDI&M{bkE3!B$SzA6J}s@`jZ;slrb+# z)jM@FxnBNOZb9fL(f9J(mbvLqU)O+^urTIgpV52zxnFDJYvs3cEeFg4%u`%^k1as> zPfhh`?uYLiC0dixp>l+-JGBg#zFRrQW_`!|wRF)(| zcIXy_&rz8dTUo)}j?ZZ9Rz{d$E_7BujMqZGB7_Zh!Suy~QAGN0eP zBL)mq^-_~II;aI`4)s>TAT#AFxGxHE3;FYV)l3e;9(0B}J~a^sSRJZ?Qp(Q1$7?=& z1E>FUd^@0UX@3_)N@N2_eGR%sV75q0G5d}dbj;u;sZ)x#@lnSTl)5<{>T=YD$VRpcJo`ehm2uEM!?#2 zyGfF{W=BT-0bVRD&$e&ma^adfbvnZ>sHd$LA)X)wNUoEd;T#6?#YkZmk6+i5emn8= zo`tb(|iPCwpLJ{*n|4222SuwcoL3KV~2n3e|c#QoRRTlYbHtprN}vtk4gGpU>> zy?I~%HNDdJ^eEL;o5D&j z&6V#bf`F-xhdMpNfAJxZQJSdOqkuIy02F|G)2CKl(Eq0~t>WX4&)#Zu+ zO;hWq-tv#VX}XOyloKr)8nv(y_#Wpf<236*7-yVe@n%zWg=z-2xLHfG4``9@2^$?j7mY8EH~Q+fOYPVfj=(A;@8IC z^OU6{5zkW zEi-1{(I`fO&tep$@ix$%hR&a#05#AJhoS3Hjk1~F1!yAR@LS)r08fYVXY;sAtl7YB zHt-(LhQv+CS&Jtr_lHNO);tWlAAS6BEYK;`i7kl0?OKz|(ki`Bfb8T*Otx2oO#LLk zN_b!w_Sz9?;Ic<%?(tpov#`Oz6PL!(iJM*yK_e1K6d_dyYCKap{yH{9z!)_b~u&bEi+OD?^V>TzA{ zSL9wjFf+{*4RAAPXf01D zi;}Y@xS3>U=y9o96=D-YjzzN)IGAm!FpwB4N1)D}N3W!qE1LL}a`Wtsm2{dpm5MhW zZO*NCA`^xa#;cNff_`~0>TV19hq>HcO#^dsx%{9Ia|4?d2Dp>!Y+assaC(vOcTH!_ zEn`FJNx@L4mCCp#eXw>GG8B#D;s@kkK@to3FY!dc1*0Fs74hn8Q!?!fakCFF5OYD3 zfHK0A?TFyjMc=5Hoq}k`*1(N-ZM88+!?HhEf3{F&5j=V^LG}wYq9(4)NK2dURMP#f zUVmj%pqHlxRH~&_JWQeb!)3|d{>Kc)Z|?wh1s|uZm4KXag?c!__>CYyiRe)3NOXtU@7ry9iU6}0A7;gk1yV7jY)aoTBktT>B7w4k*4 zRPCRcp>ZYR4y1+5Pkc1EqmbLJn_8ga!~Rw_oXs-A>op#DPf>~&D4@Mpbv9$ z_15jy!w33wRNpnzLK4=i#(rXba|XJ{`m~`)pC`Q|;9+tx8ZqS7_p%sf9R@kQ6YYIY zwBPj}wRQ_oJ;TeG8RtbGU{|BKODhdd@=Go?xi0(#0{&dk+Dk4Cs5u+1;&b!Q7}S1p zEO_+#F6d3tY+KaQ(h|Yzm^{E2vW4+RHcaGQo8mfFrJ#`%j*(R zyqw$cF~`l!AZ=h0{Gm;xO~`p9IWb|94T!IuAY@~m^|CnQiY4YHz7}aZzd}G z)`JM5!$DE0f~Aq)iUm$99S7MG?$x+m3yO8p${K#l5QT~(Q)H$H%2Sc10iKm|cEVeB z{@i;v>tg|_-Th_}BKXGVE*cU)$Ho@9_24y&@zVM%Owi?!cMIAB2SguhnvIWO+7v73 z`vwf0oDO1vp9aX3osCGHLaI^@In#YkA$G(bJWQ8;S|FKA;lx@@&@)#2-lfKF4PaQ7 zyF#l34}K}iv)~8hLg%{-LV<^6O={0ScM*0+@k2on1rwVp#OWe#cHW9BJGJPg=zgCh z;!$KQC%~$40(Mz5mH3y{mxdwncSV_&c%3V#pcotpyrTBUqE7KtUiIYg)^j7@%o2s) zC$Q(LF{7`p+WTj)c#Q%@WHFIcyG>};J^XPOkgtZb@V=O`z61`@gt6COW8ZNhmi|6K zrUU3oDSCp@q6dJOV=GPNRvEAmYTp6GGyD7OJ-Ir22?ZYze>{8oP|S}UL#2H$_5V^7 z_uA8UY9?e_H}+_#aHa25`9LyLsV^;}T zg@#IA>%}sQu*iP}pEfKyjBYoL?~yjR@$>9AN)IXk^U~)6#Fn71>r9#%9IC98@BI^_C?{E1lXm3yr9~J9$NTJaZY_@ z-$?mUnM-|Tw#5E!0Rd>d=j}YJAg~^sCGlCJofC14VRQ#FbN6_a7?G~>Qbl!N6k#{Z zwG=G%5?m+WTr4WVC=+TifU*|WvZ-vK~ zL>oQB{DlVHY8@1bLMqdXt@P4;o1btCG%9O(r2pRd;8Q7D5OMkK#Bj-u0Ze0IvHXP^ zVmEP6ehdB58zjIdd)#w<5c>UV?IWADX>5z#Y2T!pl3B!dW|NHGz*kHQMpA&`tLr@0 zY%&cFi2!q{qYov2%&ISczfFQ=;An6oF#`* zBB4t|01&y>rg-u&NZ|t#Qu;&K8XHax{v`bPgORgTknD6G5d=rjLdATB8WM`S$W}^7 z<+2`Yz$}LSK9Iv#%&O{GkM{3I?|%=AF(&R2U6O}Ya%0=w>MF^WDPW8Nt^f6}=mrvC zbv3v<#FRhkuY@?{EeGtA0RA?~g{qEj7f=%vJz;EHv7Coo9r^nW2Dg8SSm2(TKpviv zqmcORB=ei&0^Yschj9B`Xrm{=D;+F zmbIA#ws$#7UR0s=a6nUX`mwB_sau^!6|gKvyBtfxiH@e|s{X;-9^VTrg+!G?lYC6G zpV4I}go8ulFQ2%b*sMXAMJ}`+)fViuwMio01Oyn>vwHz3u-?o@fH7#ukWQD-lQGcA z*Ii+J*bfAqGxOxRbXt=KioNgzu6|%rU7dRzEkZw*jNNu>LSZK=Sl2~lJk9cQI`A_X~xX4rA>y}NxVP30dVt7E9pmafc z?panG6%pq}(}kuWRkS_S$>DzsYycFrZW)j;88($0%k^JzNCK zg!QP`O446>;XbuxYkelezfDu$x%=YPOq+z}*mEQvsiGV}-8z^Nf_@Du{f33cuj$*Z zDutPKMJG5eGLbEHcrS#w0HLf~G0bs~X+)qoUK?~7weL3GHnf2>#O7q;Whp`f4*Np& zs5H3t53j~TH(C8u-@`htvZ=!l9l2OLFOR&43-;Aee~;3>a+#6oHK?t}=<+eB-nt}O zI6fR;0(L|~gScCz4HG|JnM)pe7#oXOHGL%MZ+RNmg|_ z?n_zpc1#pXPC~3dT>F0xOc)V`&e+lmUJT)ZR0^oKrR~wFc&^cb@2Edo-jO4_Dan7O zHHqBqy;m$1ZRGH~SGB+F7!ylo>IZX)zZm`h2PbpEDNg=UXCPXJH;*MK!`n^mO!LBS z)lH=cT}}b>d;l^*ow{RR=O%z?|Hj_ZZ2~*s)6PLX(rpvY{Ey*asOQsjMqYp5kI6bM zRloTZpY@NS_YZzgB^{8|{smHPudEldYAesE)!uX1&5>s3n5Xk}^=v-MqoCS)6;(k6 z;b7U1E2=2DsAhQMSueZlT4!^-(+XZTw2b(NQ~c-tLZdhwe-&XPrAJ8mR;+J?_GbOfNEu#Oe>vm)B`pek!ro)1e$KOdN(gV&koa3o8 z;9%Vs4s}x6C~+lt=%|$M1>MTRP8>s&{0FVtR_8%ahGT$ss{sXw%5oCu=IyCA)4Zo| zi^iC*9(YkwtUce8-|eN@Z|&UP6f$WmZlNf8W&U2Y0|>Q3WXyCb&q}%k2su6^YWZ@X zOK>s2X?k0Bw*^zySOW_W@{w086=*YSoD7D4NzigLr{1Mb;8br(xU$qJW_>&E>b!u`5b$%y-{ ziEx#5MuuSfW7;s$f@)vx)Z>D?iWJk?Ym8S+U5*RBIZ>&Z+GMy+F$b=nWh1x}E|X>3 zl7%^Xv9;e!kM3&6zPAwMft8r|G?`hx+8e9)lLK-3TK@Wz;pX|T za9#X0EBJ(de%_2*{5p1hOfOP5J<=@oJBc8*RRreSruMv{zFv3vlEu6vnsU;zt zm*+%<4{Dnjy&vagFsu{r<5_Jp=_%x zs=u|Kf4d6(DMdjQY0{AdmW|tOF7-~gv^slZ>oYv|Zs<~hJ{3ZfFss?fuSq2rE6pU4=$4D&)X#-`c@2yq? z2GyavEceQ-r9@dn;$qG)+U*MPh!Fpl@f3(`ROh=Q-!dg4w~hB_QHI&SYiT z(yA>-oofs4xg0vMRPuWGRJUF7x9Q523$^W1UlhQF$Kam1??kD#IHu^CXytHOS2o9& zg5ruA>_Z6!sgZ^~S)N+g7xWRCNm#ml*4@;=7#g-=1hv{#nf$ri&0Hnto7u9sOIp{K zLD%C2w&3-zM@{iy&K~7%^*hHeDgZx*p9g;0z3_Nn zFdpu4zrT&uq!Lw`-=n%7lVMWT_xGrPm~%Vdm5Ico1KTE|78acpd%ej{m@^!+402A9 zA`(JXSG?_1jGgdI%hV+pELeSn`1$;L?>X(RItkQol~HF@xeI|pclq15Tk^aCxD(Bz zL2d;%ira==4BX&{#3us(MA(G=6-zJ@Eo%wtdlu3ePT#34Q}-^w*84(z3zbVbQN3o- z?X%%DjAZc7cu7GPor}dxWwXi}>{_a4TQQ~&G1OU2^WT)_6awY4R&Upgu_*0|DKP7Ib zcQ!xl2Th#+pSt47d8(`)LAsv&57P|bDBr)7RU7*1*Zv8qa=T9MUK8WC^t5Jg18l4G za1KW(P(LxHiTw+83m6y)@b#CTJgEt=1#p2dAu9aB{!$k44N`_VgR)Kib&; z@A^g=iM!4_^JMOVUt-gG!Z@wb`77&>@;Fqdlr`W}aE-M>+Yez;>CKt(4DZpt5c7Ja z`Fh2=aA%R=hMV+b{LPz7++6$2zlCJJx(Ml#eNd{N)yqg(9P_KLO1^X6XO=xX6gft{ z#%pu*l~b}%qQ=#dN^*o@cX(@|@AlN0Y~&jxgyFOHw+RqQ5rN$2}O+bcEOMJ>V^q}KY! zo(ezC(EOPXuP3D_GQYVSEq+`H`*qKID0U@j!f1>JsqbuHpWqX^%F~uT#TAOh^9{=O+(X5bfF(FcvHx22N+pqo?I@-%hk_3{K}ow|B+s#sPWs0 z$K1##WkTn-ZH}<@fTzCt2OT`3YAt))g2PL?RMH6%31&E2aKL3c3ebVB2Ccgn)K#X! zeVpRFzMDPfP>aVG#KKsp^k*|O{MQw*y#5axVqeEHUtm1?NOIQeCC@`~HP?o?HmC8c z9PK$;F$i1i8Ofj^?;^c=aMKNl>c-x$0Vx=qN+Sxcua1sX|^=&|$at z@oT#HF`WaCw2a`G&m7JM;jy1-MbEI*)za86a}qaz&55s{GnEJ@@bk?Zm&Ds%I}E;# zXi0b&?+XeTnF^UD_pfq|Rr>)l&{q5XDN5jnXhyK}k}O}2m5L`_RL+T6#CjZaCVr|V z1+s_dV4WuM=3fLnCq9 z7Dk)-c`sgXNQ!n^kb!XDYspK+b7r5sc_qd3!R{UW25r3ba^O1tVpNVLd1&q%Hg;e( z%lsynRkDpzZ!u_!H=7g+O`HO?d^fg+rp=h1<1l_IAGpO`e^*vWdNg~E1#zh6)ycb3 zU`nl>ARRyZJQ|{eUsSc3Ggk3L3`(?l>=G_C?Gk zn=Ug?96iRm61Lu`E-IjWEWuUHB%AI<19`@-oMTWYPL14UxciLb6_?{0yMf$GFcsPg zcd;!lfFhJ3z$AU1&mdF6q-`DYIu1m&e_2$ z$93=ch^ueuZh5S7locQ11DSMw>(GL;1bdVvTbIa_aX&^iDlzNM%PJ}`8-kTS6<_o+C8G0dFQ>H(DZj4(KL@=G@mltfnum3 z@jP<@soK#lP9QC-2BLJ5tUJF$1u>fRsoT38s;-=@-+9ygLXek?1(+i2zn3^WXGx4~ z_lvt-t8nH@lB!aYs@7i2Ytd-+7K-NhC=e#+?XBrr0PAb7GyX(@3%?-T+|I~Vn69Hl z32d`zZHep=+}nPBYF?y)>w}fELSRpvG`najIoTTG;bHD1shX+F8_@L`7E|XIy*cM} zf!mBIUOEfTxZeJTP1)xRBxzC$OB^&0Q_~!6t^uR9$szGR&k}2d!?Plk~8#$9NG;dl<>XFWXXbD=1Tj9h~feLGa6i zY&qaW^8UR)|0>YgVf~{X6$SU)CyBPmBM)0Yg49CrEvn&km-*~y_V8cB5+BA!f=yMpvD6f!1{l)m$Y5_A5^2iSW zKV2Q-dS-NuGY~_dXL*r(KOG$R>b0H+Wvf@@I4?AYB z^^Q{i(>66x2OKo(C)Y~Y{_mat4;=2_Ag*=Enok*O|CW0MNbsJ?^3u=v)r`KF@#c($ z=s6N*@1DgZRN~;zplR_mvA;Y4si%#FfRH0+?gH~|>);g`|NMi{pxG)A+*WMg|7Ysq zI>)IPyvLV1{y(N}0bTY!^?Kyq(YJQPRJ11}J#$bdD18VGi|06vE$gzA)R}d#|FS%o z*5Pu$aFno{-UyMpu*X-`Y!OsQ#E1HhG5PSgILD!nH|@PKy+gkUu9!`!^Qa^HU{#3A z<~TESJciU?;L_J>xR+OIX?6No6BR9|T^*&G!$AyMCTd_t1{lJAT!KluA(AwJ%?wEQ zS1xv_ojO>3Vf2}IIqq#Hh*6nAJKUqyehbNul&*nuamuHS#)*`~qixaxFCO|c3p7tD zR%JSlN9N~MZS*p~gB_nN=5YI#u7Xcu(Rp>Xxj=7oFIn`9ksNXf}TdhObTSvK04a`ptSjW+VXTjOi<78AOp80yWU5g zd>Qi0YfwYEPx>I8y_NN=m;n&VM?n;qoAUxWm1!v#?z?eq@D%Lj9+3{~&Qw;9$dlLQ@zku=mzco3->PVH}(`BMk& z_|B4m4WB_`Hto9+Tw^_3=JQ$%cT^AuCB0o{frT!k9tjL!Z4fkowQX69D_*Hii{Fu; z1l9^goKNi9O*#Z+kg_+mxS?IhlgV8O$NmKWydxb1+Kuh(qEoe7p2%{ff2IaX_IHOe zcBAkY8deo2bxR#Vzm2)&P*yb<6Es23ob?0#(QXAr7k(Jn=Rr8?VI^2Hwyv{RCSKh08CYv(d5`I2<%IUlPcV2I; zF_Z*QtEFs*Z4#GUXl8na!GyJa56V;s()J<|PRI>q0Zz*p{w@I}5_K5)3zK>(+F=Bl zTCH5>+^1rQiBB|t*bFSx6g26K1reGh0-duL_U~>SK}^ba1XFyPxqxdb7m%KU$H>Ol zHfioL6MVX>)*_OMlivTGug6{&SL&-w+H_CGhQtZ;Ho~40n9xtnH>T@nx62HHE>1Im zP9(I4^7|5U9}wt`fA&54YD|T-;+s_7227`eb-7nOqSpV)uH@;d4AtaqsC;U#_M3 z_@{f{y7KvR9H#hCpDB0c0t!f*wwf*#l((l>s zV*|TgW_40iYW<-Uny(2vbeR1i3_ORoj=AaEF=bkQ^d!Y>=49ivOZmJ{Bdqese`dwc zjw^Ga}7lQyxhhMHoN^*apx+2jkbC`-20Uh>kdUo@WPJV;W+#gsu7jj%dOGidUAl-1+8F9>X3yVi#VJ#u9opX& zSM24$(hC|6X0-x0oU(B8oFWtCv*GyhPRdd|q-k|eX$8jf{dbo|_}2?U z*F?(>?lLs|7Fj#9y{6)Uqi6G~oL;Z)dS+tiiS*gIfq5QnWe~e#T;w4nTbFQfDcOr# zsX-O#5H@h-{m)l~uWAbFoYpE7r;n-k((j1EU!6d5L&UjA|Ks6c6<8MT_4%!hojEbB zgVjNa>GZLCVouG>qw~XvqB*(%CDs`a%CN zs)UDwbj?4?eQ9QN@x>qO(ZQ6qLag9}18FcR<~=z>Yyhp*{&bzJ(>>yl=w4$H17h?O zW7fASJi$^aKd+*)8kZg4N-tEP9L(mfR8{GD4UF4{Y&X2lX2C!>thwO)KMyjMOpS9d znsvs0VpBcJUo%CxCx)j+HC#qHA_s0U=S;^B5Y(&g|7vJGVkLq0T&cbNfHikhQtwK8 zE$o$nOQ*dJA}Kh>QuGd&{#%t#l1m>1^Q!7I=08;NgSDFM1GBgm7ow(|TCqD2^dB#P{EFjJ)kTeLXUw9&TG&R252uR}L9?-| z;kVG*GuN<2=iB-qQ25oaJiAW(C|`k+$294gc$f>c#xlb#i9d*iV^@c8rl~g9FfDVK z92zn!D$j7dc@1jjbT}C3^4LCStJNtIv+qOKiLN6>ecqB!Gr$o;lV`YX-Mko3l-ZUp zo@}+-Xmwb==;Wfd!Wrn;+8s+}Ds)bD7qTg8S6ClbS<2yD`|OC8HedS0`dm|XEjxT~ zzz3;)LQOC6kg0B)2$enq&)NKmh9!ATGY5QZEYB`#Ze?mhl#D~mOT}qtr(%`m6#O%e zr)OdLOQXw%s@FMSey@z;K2W>C zdVVBh!jF$-56XVy{YocAcnmpQPBJ=jkn1>ev(Obb>8D9D(+5=s9!#t z-=(eR@{%~c(C8}Mu~nOiOO%#3ngcnDqoOcUs`Uq6_- z+`iZNd#JK^aixZ`DSx<57VHBJF+8tQ?*DO+87avcsX}Krq~VEped)bD#V=Q}_JIqY zehnO&bVW0_ZK{#G@#nkH-j8qpg-jKS&YnPk8NJl|al|Y!|NbbKq~i;mz9jpkI=c4s zhPDpzeSmo_y?($2P?bXulNn^#VZtC(|AJx&xfpn{8DYYVZB;OkJJce5$NreT0Gdy# zwjN9Zo%jmB_6Pdd`qQrrv3{)f_ci=|Oq`w+)}+3ej?FoncID!MkvB*bvPO!5G}8OJ zq)Aj)6&gGF&HJ1N>Af5<7CWjsz>5)pv<3~|OQb|NU-s8I;D9!B9A`;bQqn#NzrtB2 z+JzzVB%u(PiK($s7eZ4Do|!#lt~ARHXg#nU(~*LtDA@0#BP-3`HeQnDskrYdJNS@G zn{IbvTRtL!un`ePLWg{ zrsRQ5-6A2H?SB}^vmkqk;6+4{Bl{qIhx0@cye2%gvz8m0G35WIc!&)3$;Qi`sH>J~ zUu9YZ3Kq_dE>+bieHIlm)dGN z39IAe=&eNloGJxctLCBQ&d-237gifPcMe7oY)70jV`~FlbPeT8i6QVu2-+yWoZAZz zhAMVUBanrY=sLTG@@&@(x2KE5&^VoXcAi^k-ZnmxA zW8AnDCA!Ax%@(F31A%9EuJvpN;1e0c`A+XrBCBa-w!8eD75#Cqd(E6S2LrqBvAT$7 z@xbDPrW?p4t#8l5npijg&*#U^Bkc!bK!6Ap68{p`T+}mlA*EvT7V3 zwGjC9NoNETe$$q(TrY2XA!VGwj%=OZI3r{hwaJ9?(_C%pCWkqWSrsV0`FG661kqRr zMn@Uf`bir(BSyNsS9ie)!XOK z`pP5=+~EO?*jgmoK{lg(9$BMJphdY8Qnz%%WTB6LpyBxhK9nQQKS#cfw8<&D(JPY} znsT)Etg>AlCZ{Ez78Jl4Rv){{aB_GJDm_=XU}ky1i24~OpSt`~Iqd5}(`YIKvr~l2 z9UME{Tz7@^6o@6ZY)=>1hIdI@2L?d?f~@4LN}&ZjkH zjjfX*%bR;sOFH=P2REnbp?TWE5-U5zb1ZqJGm#(3GvC)R{M&K^yl;JHPW3U;v%F^o-hqvqMKN z4V+bLZ#|nbqq~l=Rxl|n&gIF$1R1S}^6vH|(zkH^8uo21A=KsRi3ql`Ck>V_Jhm`) z_kbAXa9jDA+~yJT15IzxjATz!_|b92uOl|`PV1Ku2z)`Oj;sO>T}u^`lWal?Tl#RO zFY>o{V?F{Ews0ngxP1a){2}o&n;Lo}v9}Eaxi=O5+Gf)?#jMl*Nkp&EEPbe=LroF4 z^#cUaR9nh)0%*(_IP9jF?V!+gL>5gC*?qKBXKx~zQrMzK;FozLz;SvEtE8q(?HSZW zRvSe$?j(6-j(lwVD4jwtZE5Grb8I|C-4^@wSma7V$y=8HrbE)lQkg#CJ6o3Sb*o|hKS zxQaYq<`10i;0P8t4P%)w?}#X99v}h%KZ>{`>JLV0NuVqD&I&53U>A4#kTiJ}c{}K9 zuXR@5n2VN}ls-L=QQ>UAZXf=4ENkXY!35T>u%CT;^S_DIjMeUvwfK}Ek%{^Rbl&F4 zlOhyz#8(ftjh5zFHqO0=bg&cuOY6B#Y=PZ=vyb7Qswp)L50q?ob6HX6sH`qCKMJgW zmb?+f!!mn??Pcrv;h>L`-!#r^)UX)U){WJFEB*P*P+-v8=kqt_osjX&A-hWYlY;ka zxZex%*(vMsT^lY8(eRl-%2MX6(1n%`kEi~`8k)$y)_4u%2d!B~-x%c(cYD&aw)dxM zw}>CCzZST|jX}|JTZP%!IKR}45#Uhu;f~=4v7Msws(T_Y(tR!>b!`R?)!~fT8Wcd7 z>6bURZqqjm8OTog?#ERjVWf?2fMb8}a>}G=$s^}BkHA^|1j@+)b*~NB6GiyCBO-d! z>L{DAC(GE*i$tip8rJ>JXj`Q72g24?pzh&}QqES>G0c0jvC{+R$J8Vk=r(#{i*OX6 zm#b)yLG4cPW83gdC`STgj@uL!YPVL5*DyJBqdp~qZ5JfEfg4?;&t%L^IRm@(WmZ7$ zOgf7ON6P5hi7d)tb5GWA9vAd=HAS6XLW9lIk+JFCyg7Ui1Huab?~5YzjpYv8Pq8vY zvkqmEGJmBS1oSKVHoy zY;w>8!?0lz(?|U>pLHCijGH1|h<)0`-*)|Yamym%TmI)qgr#83Ja-S4m{9q(+&Sn%h9Hncyv5SCifB3rhN&hF^#spnEO;K$; zdIpDFHZ^=)y(6W|9=Ubz4o7}p+p2#~F3J}eq)Lu0G&p5bCJe;(H2ajr=gCqM)3m;m zActMH9FI}xz6vilt6nMT5G-C7psn3}_tc+0Vi?9U;XPNh}aisyP!kD3x1XbQ| zezY1s$Y{662TD-4IwSW5V(6=%L{4+kFb1AA(F_{r5M7olWGXy|oD=bnd03d!WI`J4 zqlpxHXH|5BhIQc?_C5{<87Wvun_Xr24YCV&x5^&q&r!2L%jI2$iCFVu({M}sXHiiN zNpZsL;oWWDVK3b6#j7LIap+0NJZ%q0HSCpp_mre&sHVkj?*2-7fjgJ(y%ZN?!o~=6 zW(sq*Byxnt@^{GQPPjl*1mfVOPMoOpHVf>V0%8gYV#nh#I5fo@=bA6F77lJ^C2Zy- zanq}xn4xo`jr%Huak$Qj1$6KY&V$2?0wz=B-<)ZS^j(SDlMC=X2TKy0^6}sZ`s&r5 z)>#AI;K}anf6{yv#qjvzGnq}oMP)Ath30Qg?-dzT`UI!E`42^ycj+Bj_5@idp~R-4 z;=!^Es`Esa7uck_lLIhbo5mQ#!yxr&wg?w>0vwzMJ_vlfFlA~pfe6~c_0`IoLqlhg zIA=Y98@jEu$%L5OWRS_tJ3?9?(#sp(3rDUbDXcjw*SQ{%p?=OErJ6b&vCtm+R!~FB zd~G``@{5=?_ITG^YyI}l!7Lmark^i$XQTy!FvGlaC#(vjsAuUV%nfRb7&O+-Ri!~pja*w_Q^oW;(`Gzay{H9Z#c9bJL}XzN+BVJ-?@hQeSDbYwu*s6 z!2-6_WkGo)+j0NJPLinuq9Hm86KC(tfk0h`0^fz1`@!T*bk`L>SrJp{!^-a8q&7m^ zN8wR5UqL_aNwQSq&n==Gz1IvFsgj`&l)u&$a_4j4=-)`IQH0-Ue@i@No@VJPM?DMM z@7!{?XVb`*sZdQ;%eL#duFaz_`MYtf#P?gZ@K2+96iwQ^6|5+1SjGSBF#fXZ4_)5T zDh3S?`-0-F>2t6XdKoriLSJ#9n9O0S+j!k-;Km|OVMSq*i1s0g=R!X#YEpra-H1A4 zcC!5{a3ixvCzVI1mOf-hgEPOryw>U0;+i7WY~=|zcF~Y{mYmGLkRS>YImIc*;}tk` zj6-uw`G6Xsu<1M1m@&?Pu2NXDrjdZj-PVVHQXrl^W@H~_s4qXLvg)MaB?`g43|M=L z@mv~yy9Knk@?*^mQq;1N{K8x*UHwO2pSJ0fl>^*WRQh9|w5UzwsYsMlB~CWWt6w0~ zb&Y72#zR}aqUQ-zab6YsyWj!>h9*%Ybb+hJK0gOO_Nfvkc1T~x0BP2Hq>mkGpogB* zX_(%dvsC(av1#FDWDiGEq9})(g*{Qze%GYCYl0%Ih*Te*FHSf6X-3Jmt0M#HY`vF5 z*t!K}Kj60A!l*(=3A5bTVL8OQ&(V<=X8#9GknJQ1?d~i#fD9E->osKG#Tgv?sY9j% z^Xsr`b~2-_*rh>RZ2PT<28&H1xlZT-m@-5A5HaONs-5=z>@=C%0t?jh!Go)r_j|JA z&QYZn$xlEcz%5g;Y+P@^t6Y^d=mIgBq5B%RB=MruG7@;JtPrlYv( zYyzHRb-w*A?I3tgKVCy&X~g!wJ|07n#L&9^>x*kAm|f$1m#v&pHtDX+)NXe=Jg2zWcXd@`(MK7dMSCyQyh>@_0Vy3V3BF3{+t=*zFj@g}Qj@DrW#qQ-Crl)~XETY!y2(aZ71|-MR{MOmz|z zv(wn=m7^;_+wqVaWLgW_z{v-!;#Z6TK)zwRZHcYgrLU$&;ek^RIdY4+32;(QDxpA< zc($$WW3T=g9eO{E8~at&eK12}yRYMfSHnQ6dd87m{b)eUeFq3ZyKxzf`?xv`zwfdbl>G-|&f|7} zD0|pAeE#ShOe5epe5=m|voAIo2la|N7$due+#>P1(el`IBA4#~USTQXN4j|5PPX zYk?e-S}WKe{~u}$F^dr;g(pv%O|@3_R*hb(=`1zkc|zJm#D`zKrVc+HzFzd*h1`+w z4Jvp&74l-g{&pW@erJK|XIchHCQM;lqv|O*r}x&U$p3nxNAu1|(V5%3|0jP4Go0Rx zCdh;uLB+5;yCuYP?ftpOGm_(#Y&V5KNkuDC>F=wzBF>Np2h2 zh@jP(rfv(dt7)Y@OejI>IJl`jt46l?tn-`?_B8)m4D7XlhIW5D4lK?MpwjTbz3c7u zfoRy`%6oNFLF~%u4#v|1?T^zrzYQ0z&(Uyip4)mW$lV20`KaCJ_v(MIjL*9>?bWv! zXrykJqG9=keju4Om7R0ny%I@|P}Ep?@iX3RT)r5+`y8Gc1SN9zdFnOD{Mc?yExCGlpav45?##wVBgP~)bGnHi7(*+1LgSVciO|L3 zOC@{kq20#m&aU$=&|G1kwEP}_nR!+B1WK3w-5U64q7TA-kJG&HGyN;lRaeLoSf<P=KH7>sXO6^u<7-krNqF*OQG= zAPZ_L*K{ok0YaXrY(D7NvqiF5hMseHCAzVs@{L~d(tGOa37X*xNl&NeI%v2_)k&|B zE!lZ4yl^J>o+xop|C3LiIn5`p_^$Uo{ZCSh36eO?C%?x94#dUry_@bxvpDPETy~D( zIuiy%>4U8VdE{xq1n}mC@8E@Q$3De+M>(7&Q^bb4?;01$MU7%Wq6zm?#GG@!G#3bd z$2qKnMgD0(oOta|Ks*o!j*Txl=v=?p|2{YRG4gZ>yGZA`$bFcB6LNzN%9cE+vTymg z54nD#pt;}gxQ#fs-#AIiSz%Ar)K7|r+fIr=ImdRbu!dcONaycYHLV9&0~yOWYwlIO z@PeA%Es#Q#oNTlbwcn@7lUrWbe;{p2Ov`;vy}G&Y?9ZEDp6Pvq6>X9c&6-zq$+Q0u zgXYep_FBkM=O~2iZfdt^IESMHb^PBwTl7oU_@uBp%$JnVpp}9}kDw_7(jNyIGk+=h zLaPDh@_p*yf<+kE&>LRJVUI#}ZR<4VqIj$xpJ$L=+=Qa`_9(;?c$}%@T9|}J8%lbI z13(IF--&FZd?<92IUDJo7%jJYU=JNWvFcQw$09x)aZ)qo#E8H%ivt>3=}{QZ0RI;I zNhwf!I3e0A$b%8{w#mc_Bv;v6(-gxEk@Om+9%HwhSV92zE?8@6BF(yeh8BmU$kBN+ z9M4Bq$DYwP9XX}C>zRVocFj-bt{24K>PR=|@#tmL9t*{@ca7EC==c)3)u&Fj< zba}@;pcpb68-VaU(+IBwk%57|$lX+xPclJfgpxR6c!S-f-1J-nnO@1s6Z}~87O7$rmskLC%S*iv1SU`zFesM+!Ww4P1=7Mv>Lfl<=7!{l^%W8oEiSgMn z&m7%PT0;-mGBfx&!(v6)UG&0?p~-q&=1IY^{Ms^U3C2+N1j;nQ!)%1?kot{3UI1%K zlH{N>P~4>4jCETvOpY>2#&A2T0TCXt+e3yj@DjPJ@X}y|ujhwmsx~NAJ`Q8#HJ7DD zD+aWH;d4m$C(0`y+9;8reg_BL8>rox_&5Q0eM^FEnmZ1Px^wIX*F;`uN-&L<8E#XD z^(1Vl5U6^Nd47*6Et@501jY(?$Smgd!lV*7pZ3&fB4KVV3G+bZJnn^iBdD7aYADmz zPHsVS^CHq5CoiKqXT*5><;i&isB-b#*6p)VvJ&|%UF0v(5YN2BBtcN!%Ew&7E46KC8Lv(xh1E~5-whFasB5FfUR88u@%IaMnamOs3_?{PM_ z)+MJ3)TwqS?0FNENziCK@b}15qS4NM6`SWklAguSYt9EPxjbN`kPwCOP#pDBdw!`u**c8c znepE`(Vmaf^WZ;Q$>&xj0RHf zkS%>Ys>(4O6?q(+XFDl|5A!=jj@{X0oi^%ov|*$Pclm0Ot%+eWL$h{Tp! zQxTbrL<^JD*)bf?`U=efPYZNgH&wOi3{Rp&)-9n-=t2&)K5S$wD5^~t$|gHLAx|Fi zM84e(HsZsZ8r8#wD#d?@TgnJ_v9TX(HCRAcw`-=B=FP%fkBi}^i{Pv$Tz@D2%E0Er z;{zUe={_IIV3Ls+O!De=8osE}ZJg|;=Lc@F`4D(^38zM><5trz-SlVTS@x;gl`!Iv z$*@^rz79UXJg*=LOO5@gKNl zTKZ~Jx8!^d`@?s?S2!c|Id0f`F|mi8;q8?s=joST{$=|FK@DDc`IiS7STqx}8t~n| zK)d1QPBd5^!AE;&*PgO$?9P;vwwC9E-1a{y+_I$fC$}PeRhB)<_NSc9uhEe+RR)%q zg$7c1zB{BT`>_Q4;W88zHG^`yVNU_~x?mV@8!JHWnt+DwAGa{^PQ`1)xM;-kM_w|N zUHGtB3a@MGB^3pG$`;)q5JpxXhH%qblb$`n||EgvF^|!M}r99{h z5yiw!okZK*9X;d7s*5{@#I!FceX&9x0hR#o&C~<^6uNhcztn=s^tjSES{b9oJvcDc z`DkFhq@$n+ig3sh@F#rh&l~uo^(aC?SnMtHa)k1~!{S~TM*cTjPxI?7^i>(JQX7RC zkztj-lcJyD-ZXbGo}@t$guXzRRg6eZ4$v6-e<*e~I}Ov#>WJ3A(4-m&e3ZA7J%6xt z38k?;Sp*B!rqxQgmU0K$8UV~yO_c}Djb~S5A`cXjJ(Z24ZK}E0FdC8+al{Zd;*pJR zP}W|n>T2EP2#j63=mBI$kb1QC2J@`xAGknsswRFIine`$C_)`0-5*x&v}oG1CHKhm z2#N=}w{0iAC@%>HnYvRc;2^WV{%3xWWCM z%mVIfS3E=g%{Q7PSu7tS8+3W&0w8d1>$g;X-%d3AI+RiFCrIl{GqEFCy4G6AzS_t! zoi|OF%IkPn(Ph^nOKdFo?vS_X-%QB)Wsy#MJ(+eypx613w|hYO#qNn1?>&|f#=Tb) zmkRJF)?39?9o#9>4enQ2?bAh~ADkKMYN0uLBcj6v3wQVWvZENKkh29Y<$N^7ZMT~V z3o-m;MPV$^u_vW6^yp&PjQe@|hOc|$%zN*u%10OZSAyd>uj}|jQa07$`OZ`nb+-H- zvXPD_&*S*n!n*G#ze>7c`on?;hP9?~gUv7=ZuoDNhv5oW8(Y_XwCd6$KRA;uHi+fq zDNus&`c}?k1ewOC$s&I?4bA!@Gt{L)>1p@fJM~k0eYK<-rdPnP3j&Dste#!XSit?t zvQ%!1sn=#{Z(BdIsyh$&Rd{nUzR}Ogazs})>H6?C(kBc|P?;{fbn*D1(XjCl&V9_2 z7xj~qkXTHGDHPJC(2y3~OJD!Qi2oE2FvcuX zkyaCx4_gm((Wwff;c;|~_deF(+%_9C_wPsGJ8lR01J ztvf$9xxXCqQCXRAfi_&R?{0(`gG||~H?-+UdHpFcUtfgVFexVJe%RuHe}Yl)n6r6^ zXPrrX>F9Nq*X@(so2Kep){gGVnupC-G!-P!YVm%V3Q^m+&m|;a62ybEHy2})F-7Nu zuJpCXGc;}p(em76er#|oq!&h+=3ZX}gX(I8{eOJD2|Sd0`#)|8WiPvu9BcOMWf@T< zB1sGyBU^T|GiFGrtP!$gDs@!W?8}T9I|)g)Fk=^kv2Vl7|Mr}8p68t3|NH7?OkVfg z%e7zc&-?mJqmqJNZ>eou=vGuxlu&uo$t8Djv&WFs2F{JuYs*O=gmA{seZqxT6ir=8 zw}a0_TzLCgB@;>F+e*x!`C(Xg{LJOh`Qtx2%ZxuNu;!u_X1zs#egnWdpl`1|Z-~gC zcF(H6I}#Ag>=XT1je}NE{?QjcA4KlbpP&1@-<+g;IgVv!{sL=Y*csk8<;~dFH4dGZ zps~ZeSj8qrI5v&H-;%JWQ@r31n%O8262zxoH{bl#^8I801>XwCHp65ZmH9Pq^j%VDx zblqM)2=yQe?X5-Dod$kj+ui45=G=0eg0v>3iWVyP<91-p9OmUTgZ2k^^dd^$R%j@l zMB-(?GHm5l?0QGt%CA2mJe|9e%?Y1r+)FcGnErC>%i9$K5A_u7$g+PeK>6{TV^g!; zNfegHxnf_}52T*-m3m~uq)}XUwitOwau^eYsJ>TPzIZ3E-J7d8@9(m$6Ywd!j8k2| z?fCZ_OC)_zenwzguwiDO&DkYQalA{$b3P9sJsDh>bIEK|{BcH*ch#DG`{Aag9uwdx z3)->$f7;omv+AvNyOT<(H74e$H-d~~0JYohgBVQ7p1fSHmU2}oAr=C$9UKOzMI9!-1f0I8hDDVz1uVsF}T|pgU zboG9l+JjU_6sDj};ZyjB;?u_|!PyWSs<>su?#I4eISRGC8Qxgj0^pn$ez}yf#vJ!< zmCQ!P;yg;c!GattrBib2*x5igpPMvpV{2&pkEs=M!l;g{} znOq7`J@NUTw1e-zQs`cme^Txt?wZafaq-sJLslBo=$JK$Z{5egISR*BJ4Z+O{h@nh zRDapfk@>^ZLJr3l5?_Rt9>uXO&G4!9K1OWC2ro9(cU=K9CcG)!dEW?yV7?cK7<5NyoX93tMHk*5*9X0A7BMDwKpYs6LvH-3dym;eh(uj?+xx4jsNRk-DS zB_!KG=rcBHR*G{k56FQ6rEJVbuL?oH@fqJF;_j$Vl}IyxK>?(psFThQBx2%@Eu|Xp zZnP!yNxpa@bXScgGJ-CkfTn>#QO%E24El)Jc;DOn6H#{uF3+K7O%aKcBIRWVA>PDs zovA^}7-Pp*E2SBQQW~XyND8Sxs+q&H(rR0clxfAiDMV=8%(+;p0O);pveG9NyI%SXiL3ICk=25TN*#NE8nkIQ*{^P>{b*}#e zM_58sWr!2n9+aF}Ehs(zQZ&&PB>*HZt`rtGtD}YtngL`DX|p$cf8!-b?GmAnig;$- zxRi!>^f3+ZWQ^X{iV(T2mQ1j}i&i&zjY1D0*o*|%Uj5oRT;DbiEaW1NFUXr<~eMUdqefJy<>6r{s-@-@Y=1@#g zWBvGBrAGOWUU2)Wy*d9=OxB{l1(}_Op*I!uxM=ch%)#*YKz2O~ z_)y2~t%hrCi#`emb#^5nU?PNfEDCKritx4M)pNFMo|iZq&9Rwy5Qn}>{_?5eU_tWC zm2+#ia8cQ195rjk?vjfv)7$sVaP_)%+1+chYVS0EDt!hZEk{A-Ws6FMW5U&=bEH#S z&Z@Ocb@rhB(}~-;Y7Y}8ukD9*b9HUr9C8Zg0d^q)j%SNY_;xuj--`xVmU-JW)v%=s z;Y0P>KA<#tF(7o)(#KMDplr)UxJYJD%r%6Wz18*orH-+X0t-#hRNC-B-}vq>Wxy zBHZBJZ-d96ZUcM#U<>gtBR+lb``Y0y@No5Y)|MIuK?l&oBcer%ON>hz-aE7&b84gs za(E;24RKF%D-l-%vMXMcl>1@7tY%o*M1TA9Q5jfpAIKh3AA4#3-5PB=h6g4Di(dpk zxY*f@^W7RwGxcSJUJKI1r*urE!`}F09+*-!)tq?ySdh6j;shgf+ht{w^Y%68ZCt`$`Ft9;St3}mS$*IqhFpr@D6bFg3hLoFO&bhVlb?|c4BMZ zSlr6Sqk`+BoYHA|-u9dMa@fasx~}KPQB#5*b5~g8aoW(e4uGJ4e#!3va)>t98JJ^R z$_F@2R*&qx`T`cPIve#%E8R=YubP{sMET?CD~_%{ieb{qSR|KHMc<6*pCs!k zX2y##TjeXe$_Hs&gs0R~$lTEVpgCdVWh91q>N5c|jw&W`^AfvRr6)9NvY@Cc!mOw_ z`JEL6S0P|nb0R$uE%2HgT3cyDgUBgo6MZoPh#Dz3Axv)eICQeKu=I~;5>CvFEED>fJQ z@<5(^!Q!#RgrKh|s(8w24lPjC_zKxs>lxh_#v$ulIaRv6RE-*`ycphDN8^XPT4e1V+#LF#>KuClLsioz<&(X-aqECGx#!O;V7_6aX`EayUL`=Seh1lH zSEp6>XdoaLgKQT}BskVsPxy^J1+LOkx8zkkQpvZ4>BpDA-7rl)2-~?EYJ9R7ROspK zpFt&p#O3-E5kIQ~z({~KUhhTh^61C(y2*B-{ zC`~wZSe-oeB`pJ-gQa8Hs*7hJw60y+u)Qb7N?%aykdm8IF7#cleRg3v5ka^oYibpG zWJOwS7!6Psy_m;#Ki5+*y9}^~>9-Rz&I1GyzW1IZ2U0*(4^)4-YsYJmzID+o5yKie zXA9BuAx=GF?Dc0&2L^uF(TfiS?5?JnMzTDj*cH33!j*GJjYg5x4*LJ)g@?FEzT_>s z|ECvLk$cLGEG<1Yxjq7uOhU6T6R||=eFwXF8snC+SSJ8g)ELqnVL%o#L^C@C)s53x z|9Qyc2C&GWYsJ93&w!g|P{_b>TIqMg*#Pp{Y!uTJUQ)NTePljnW@IObG2o;{=v?`T}g)*|);hz@o4)%Wi>{Lk4@R}}R1tAcAf!usz zu69k9H;aiOJExEhD-q`SHTVNP@0s5AD{PAlok75m#;4Ta2vS$3)b7NC0ZjY#06E!= z2lwX5D?u&jL_$pDV0i(jIQn*$@P*;)N^r%kgF6E9YRMZns9IO`#^c4Hs3^0tA8V-t z)^h9f9)+E6k&a8M5ghc!iNL@Tp)BmIm($IO4INtVH-Qy`Rm5h463qmO*^rKaYIHUH z``JOmv;xen(8a*g>5z`(%^|t8?4BiWyc!yqYH~d`S9W$}Vt-c*x^uxr*58p(a+aho zm~Ffjvw0D@X(={WrkQ4w<1T&?*$D*25xb00Swh)#2rxwg{8zID_l78z?p3ZZpH4z< zRQ*uGr0?H#s_RU!rwz)dUO@>s8@;FX&l(RF{ruvln=}?GM!)jt3y0xtWQs-`+ zruSfmxB>b}v*D!Zz}-0Z5h;Y?t9=lh&`fyt`R2P%54=DHV# zS}&{!3N<~QKjxa&QXZa4>%ZtST*~mmRGU%o@mhm;9@pVC0o=J@IxrS*2sg2Opk>%? zl4esx75$Fd&H%IV{7^)c1TL(%)0}-yz~OJ)?ITED&tYH9GuG~P~&_N0}gMzs9dltC*eWaDvkkb}eRd`N!7wS=#3DpbNbf}4p; z-%F?>GdOo%Iqp2;0SM&vJvARo0PuDV;yh9%4n^&ApIMe+2m-z>xM5e?UUQW}_O*Cg zHpYri)z3~#nt&UolfU_WfByt;;Z&zEM?GltjEpG89L960b9yaap$mI9t>fTNOqtB^ z*%9ZCUFnNR(L^!B0sD>+X>g+GoVej~OrHHk7okCWKFnvWy+3@k^!?$h#DRg(vWMtZ%^W_d(pm#khz({sO66Q(lxs*%*YknO%H zJWcg@Te2IA&D@Xvq(4_PJqs4A?Z2-yaRs1|kq|!vm4N}Fx}JGS*2e1E(ze}Mld}&S zv^mcr2b^>~pYD8)X0p`79N!qL4PbIx3G@K-QSPmr#tJY(re=G}oIH$=DuMrtJ1|B? zlrD=K9B0Noo{e`&vx5`pRY5IPt+}D2**yR>WysRfDEEco>}>UIDq740Pw8}|T3Sc_ zO7WVzVP)GE_|W#8GhpHW(S~EG)}cQ1_eGRGCfR`C#@u1HHQ8)i7rVB0p}-*bgsCjL zWZEYeL1y;O1myS*XfwtWhdK=Gtkd^2;GkqtO1T-vCx` zxe!)Iz}M-ohn(H<%Y(B2;_ODNU{C*)w&%-kP$x*GzCPQ;aYuHSJadIg7fsK*+WA7a zGRlfeUM#V zR`>+Of?&=&iqXrZw%-MXSbwmoZ>4)#c2BkD9Pc>1Ei9f})%6B%A)@!8lhVUVI8A)5 zqr-8htS)+*iJ_v!PyKJgT;qrCUZQ+^X|+(5$`-!P5!yN|mFlyZD(MZk=>@G6C*#-O z8OnVBb5;qO``T?)cjQi3<>}Aw%qlJR1@*`eg5A_j4fl2DQog=-2ulM7>6G4P5&h=W zi8U3~TA-4q>;!cz-RtRlX&uj6vwKRp;y5e%-qRXCUlaPlN~nUxEqpU*r0RX;@NUL2Zb^@;SHm#6?-(yaN^Xu@%Ia7Z+ONdoP9u(L;*M1u^z6x>M2t? z406LHIPx*D7Kp&pCdO1PAps_OO zu$*#Z3mrIs8c=7#FnsL%Y_twcpQ%pAQ!zo0Y_r!_M)7wq~>2U(xWU z!J~Fs?(u=HC7e}mW4p9_%m(l|x*WoJI07s?b{3H6+6wToRi=}O)iCn6^FZ@{Q0a0s z((N$5bO=K)E#A-g_cPEdmHfz!?K4ky7q3n25sF!jEO@s~Nh!0oo*HW2@#vwNZD1(% z`cBP&JxvvzQ2*2YM+ie)erII&pFqD`1tU5q^eYdAxLw&c{}4qv;sX4UDX&9CU)2P# zgPhQ~@ZLS1tvaw57OJ@%kdJHGd(yUKL~7gIX^)=Cs4T+~`Zzk7$}m!1n$mVuOoUH1 zg;j-{jd?Sh95O!1jt}U%o%s$f7uLS4CVylRlggg;E2k!(D={kQ@+>iu_I#K9gtkTJ z#(*b<*!weM&v7pc_~ffbmTRQ_u)4>dj{CAh^7%(DOXEZz7OYM6FA^qV$U zD@`WrdU!M66MkhSP11Fa`$Oh>K{K6FvEdh1Hhsj?@eH|T>^nd60vdOk02;DqE+nb} z`D|cQZ>aYUJ^iv8`a!1S3yj}We-l}7NNmi@?{wCwWSC5xm#DovInyI^55@Dd`?Oc znzk#6DK!*)R=&rhWu9-&+ZECW_K$L)&HP=!So+dxAmr5Le5pI{cNS7bvnLE%oe3)~ zVAzz*?B`&`No7Ww@(e~Z^4gMCdw?+wlARwHO_pIDRAZwqvx$&h4eFAF zmNeX_P9Nk~=G_xND%!p057#;0`4)UP`?mrOe^3$M4^pw+gD%XlpTwSC>L{(~gdGDy zMH)xg@7`NYfEHB`oMe9JmcY?DBA8r&=7i^4eWlFNm5#7Dg^~#7m0oJvlWMo+CC0y* z>uacRQ_;fSUAZ4az?jo9C*0Iv#a4SQCU*O$0^63%&}xQ(B33H|doGGo@iJ{vg?1#P zk;TeTCH{yzkmwRn{EDla5`UMd`zzX_yQ#nM?bseFIbO@wV(*Dm-Re{K-$ZMxH4UB) zZ&1Z3EhzIA5}tYAy;UsJ_s!UVptS`3j_Rs+!2^BL8%y$%LX z@_uQ?1`N5SF=Os4WqI@aEAgwS?%QK{T$OpVXCI{g#!z{_YQyf$W*o3kiE}7Omhdae zeRmD-bypEs`P}qPM(xHcPnQ?$kNMvN<0?VTy%g&IjE!~U?gH2f1AGN*5A0F$`o}S5 z2z|?vuX-PPv}Q(;{WUC!m{E#)H%`dzzI^94E62NAN_7#n5$ zyJrrz9Xe)wj!_XurNSp(3a}O)8QAGzZCFGDAvqZqSgj=NwU9;kLPfI5ZSk3 z9!IUh??mYUrR5t>q0y3nQt5JRAtRX1oNrHdYoTWDjhns45{*gv0lD6lZsdEh(NKJ+ z=;u|cR=3y)euwHT>`@uPDnz?{&-DJN-E{Bl z5X|rHt$Y=zG*vedLKnw|uf-d9jj;f`#I)7S*QLn6_z;1PEsmVrYO2gkV`-Eve1qsf ze^jQ9TnC%H`z_TLw?i)^elqR(`+<*w*+43=&SOE<5;rysCC*gB@gK7wLBl!g)TTv zGBbBA{J|`k@Kg7cF7`@=X-vb)39UbO$lZBR8*|*eLr7p#^~dVYnkBMrV^BV8Oa||Z zmDfMq8{Z3m)n?H{gZ+mnuDK^7#}c>okM>~#c6)hEof8~P$DW#|H{mcTb*%oIB}|YfO@JAl<8Ac2kc?A(H5vC zcxLFtR5~KMj0y`?zc1bB(gk+S=E^NKb!!}_&@advyZX%UW&X|))jto5xi{(}3rR=D zc?2f6WA{`_HjG0F^mR161KU1>xFh0ElsvW_m^yxRE}F@Qx%|d&eJCF4L-EtQ+P{tO zcX9!+C+T?G$NI-AY54dp_mK92y5DF$>rH9ng0%0AdTu1#f5gBQ|Kqegut`Va1#Uuk z!^!00LF%b|^|QVz=Ja-c=`k;Fpn+Y`F77kTUEz`+!s#3wiwoHRP1~1@GMn-Y%Pfaq z|9vOx&~Rs0dig6CbAleu-b?F+87uVNLR0fsC=sX}oEaTd8c-B3QZs}2cKB@-bW z@{-cD}KV0LlD)B4FjNLCv zj(9!n-Z&0)oDnG#u)i~n^SyI6yz)p6TCpi6Y6yEtT#>L^heAdK^^MLDXajq!F^f5` z7fttyy;tMU^UDuvpOT;(oXft;qa-F?yQMo$4DyjWlZ_GSs*7xS)?z{ErU$|CckcdO zr&PZ<9L2q-b!j2%U&6!gDJG%T&*E*_1pl+slQAo#r02h?Bo+Fgu!i)@kJy35itKp9 zLgAyT##`&quwj-1D*un*5FWwE425>=8kvX7KPY)3Y5ZsElL&hWYcOZQd6g9D^EvAm zA0}rxJ2A-G7SOalaT;F-!PxZz;eoGURAV!=KNm(O7ISuFfXo7|^ND%!;S#Emf77bZ zt~JV`gDhr9uOu?}chw4Yqy@&m_P@;UXrSFM-;XT=w-T;qS=j^ugS>jHm&SQ|`t5A0 z!ixJcK*yasjr}}_=Lb0h?gE^a3Vx{4N9DZKVc@%a2aYj&A|2flbD83+Ti%Now3k*q zJQ|`3XSSI>q`^KDJm+t6)Bqf%69?!^-fl#UL?;i%*k z@1PbfE6(BX@dH~!0Ohw#ZH=qJK&|~sc=DdZ{ z%*ui@?Qtj0c)gGwmNryDO7nbVlDFD?^ZXlZ&aZsDiysP`-vWrEL@nN$&suXo$f0(N zS)tv*_9x{hqjx122BI6~KR?RtR{$nhgoKQa)^ff1>4V6&0sONxX{%4vzl<;5aVf4+ zFs@hm(8~9M9CAk9bARKkI2uU&H)c-eQSztcAsI1dZd7`(T@DY-9a~`En$kEzi((KR zr-H1K4^6~XrB6ey>6#)gU zs^}C9KR{$0j@J4G3t{K^#vNOqrVA9X$1p$Q%N~;ikkF7;bHgL8{NF8%5wi&L+)9V` z87%f>6tly)lxSmQ{tC-|&m3-2$bxpxF9P7Zk805;kx8Jw3q6TaH86oKF8lAq0w*x5Vx z!?@5is)4|{^>k2i)&Gh{q^25m&{kBilHH>?^HoPKg80^)((?~bk7S6YQvgx}lL#d1 zS0nIWRpihs`efB_Q>Pd9vDRC#B;Yufy&JurM-t8C8#kv1H~~r@3M-VVKymm3 zn@#jKCD%DrvmxNY`m|QC<~DtoU(cIo?hHEBCu&~AesT)o>kvBubuV+8p!aOoscv{; z(UI)#hP<21q9!Dge(ahKjgSa$8J~5hQJ_T_a*&w4XTJUYy>fCzzfsuR!NI(`+S=NM zyYDWpukH4`9Anp}iXcd9Q#bk3JNh0!7fwac z&+jXrBH+gm%S3{hTdBlStH=}1Hzk)biV6mzvFT>}@1?--sqKWA(HdfjwD7i;z?xwh zgF`TRkxge()y&ZCB`>}hQY;?ZQ+-ouosajahmU5C@Lw0k&eGs0TO(>_mp>**C-dBN z8j<9*5nK5D;Id15qFKk<`~iaF!FEn0W5&ggb;o$cZls*hjdu;%m=dwr z9{GOr{SiaaSc+EZM7qGP7cbAdhzy?n9hLn*$}gUD`}K_42FE0z5*<&Vs(=Wn59RRd zMN)E{k~cQOLTS^iACF?fg$O-Nl;gC<+kD#GHpSlTCB!%B9UlAst@i3RHRU56Bd=ue zMenbF$?dsSI8F2|7!WtMI;7b?ll{+c?+9lw(^&-BNmSD5P61AnqhCgH0ycx*Lh0%0mTftI>#McJyOYE${Bh?0|^=r zWNIbm+#$c~9=6-1io_@XER@?GU=m#04z;`Tx(lOmN!G+MP}K@XA04FXP%I3F`J(%C zq_vzM@@O@a1VoXh6Y9l=X~T&*jKkyi^SQ6cK{Xk!}Hf z!QSMYxcHYlcxj)b90=7{iBm?*P44bm7|mcHexfXI`%pd6DLdiC1pXOK=a^!p!{yNS%Dop1eLXM{K znI50zk#`7G+Ity4y!71VhuLFtkXB|D~L z*8aKk;OD2^~>!-<0Q2z>h&#eKQet;C*`ltJj z9B-yX7*o^IF$WlsycuP5*=Lv$ zhbHg5ac~KPX*s;vjgw9ZMY09AF(kIHbqPv3R0xCDShfd54B_GRSw4DEc1^0q9vQVR-pZ_Z@S|7}L z=uftn&H7=l-%QNpkj%A51Zx{{m+RQ~h73FR1~p zmv#CR{eP#s!|RRczfwDuB9Ot>#K{1GZEA(%Gx;!mAr-!txiO#(6rd3d|2iH=iIc9| z(%@6@!CwaB$k;L=#Jb!E>Edn9$-w9kuy&O&crWmQlbPgB!d|-XTqry7+S0ox(l4(F z)sMxu^?^!v+6OK1wLPr^+s^~gyk~XlwO8YVineGVN z>(SAo+~}ALO(kwEWI7nsUI6A7)_P`+EdAU~b|3;!HJbfb>ibWet*p@2x|ht7?7%>v zUDtXwQCN&qeI4FVcyC0|?SYh)(V^*7(^uk^zY-Y^diTmu*|szS4yW91)Cp6SBRv}*u3F2 zm=g(`in*}8KdV@ZsyKkP@6;im&sgA3uVdJ*k;3a=(Qt6{&v!E(JgQY zc=B}!rSf$b&(0bB{Ny{St}?M*lk)N@O?$P?_4f4wm}*qab`9_31EPhkb&7N%a%V8f zHdFIl=iQ}9p}IW5)S31<+Y`ZelUtmjD}`Im#tYXq317=Th!vHR9Q=QY_I(auZ2jtU zXD+W{EQKQwZ8Zp17!q8Zg!O&-PM)`2)4;a^+1A3(k? z=F6>3)qi7Pon1<;FD}W!j1jfbj!W3>{5eu{DT}IveUtX3LG8^VY9e&a7um(@T?i^( z&*>zI6VprKe^5}3J(K$kW{a4~MV14;Gq6CkjOU*S8Z>3Sr)x{N0(`P|VT*H8Q4Tgi zxC_PwP#Th+Xp$Urbz5p-n!orI8Y2jEn`J&tU51X1ypkN^P#) zds?^D6NdVbcO{Li`3mlJKcLN}5^-8z#P=~_1H0{RNl<``FLt>e__~A*uv-<;9R{|| z!SHQ~!8-rB$sSU{$oT-C-AQ;)w&XMCAsmw?#UmVlR+uaCnmG)?y$vDGsHxc*(!ao&G2!qXDZOA#?hM z|05(HXFk4TsZ%c@`;#qY!;Ql|NyJt&RGV6tRdqTdmc=cG#bGz8C;sr32iYtqBNzs$ z2E%P>o37Iq`PX=1cGfisdQZo$)>q>dLoYn6XxjR-nh(K7$E;iH#{H`e&~~8%YV)NJ zc5Q!`<$t}arl!$qeCBmYc1X?W6cgWC|G{OS>1K?E`jSB!Y&oiWayOR80epk&0VH|G z3v+sXVECYP6uo_|efkR=yc{-ttsPYV2Y7&UN|0EmxbtL7t=HbsM8-bDP^I9sa+cb!_ik8p@WvsHdR8hoaj|(NDzJ3$F<~jlgLsy4jr1a$g z6}YjqB&rl$;a?|oaMn+``_}W#*nGF-83vwv%cKA-1pUCtGP`99! z#OAMa6SwZDKaoDYEqAu#U1IXPBSB;Js2O=xGE0vA!^I<(q)O~iBC;zH$z+j!z$SaI zj_2U^u60KXoe}(Yo)x;9O>GTW2w)@Dp)5N+&Rz!$dwT<ka!w}Fr?In5z>WHDDLB|fs$=u7kHI@#HAcOa2#5|3W0n`nZQHIVlkcsNH; zSZrd!(5ybcAg3*EF}VI7ozL;2q?m(S_BppH8UAwTy^0ovKai zpFG#5Q%+B7#ujE!z?4du4+h#uniyK&x-{pwIu@k)Oj>YvFp!8Abe>T`JFMFc#c=;=_cGA8!bOY$i<|@olr~( zoarEfLQbNL^Y3mk@cfwCPxKlzWCg_S@>|^2IzVxFwrKYB5f?zlCv(W)D7qirUQr76 zD|O5M$+n%|I5~Dfl!t51{wG3s`>H`2W^c~8 zU2bVauCu$fOsLMfr(>;%0m-(zOxN*1_Q|JWeC3XJ*VjF2&@u|zlvlpRgOX7rO-gyF z;v-h5Eov_g)NN3mm90K3M{MSmlPI_Gi!8%WvkGSq;;ULV*L&6(T7b4yh$uJ7c+g0P z;OzP2CFZ5-E| z%8I)7l+B5ke)EP}dxYo37w*fb@SI9ur zu$U*M=j#$F71>=4Jvb<=bZ-|#AYR`H{U8k~PVx&a-JtB8-akIIK6wkVdEGiNcL`4$ zNB5sjBfXA(e=dKJh0~e{LJ3sL;Jd%75MuV{P*n!WpC=El;xwHEqPO+Y<>IlRZSvcZ zrO$t49bhNbp?RhItnHhAb@!h)FKE3Dz7TeMgSQ{3`C(mr& z7Fs@u5HG3MAO=tE>%(8Q1T;~T7~u}(zoXJSAY@dRGZ~)zuX66tREHomS^(iz5VyCs zc>g?9B0FIFd^xAQJai#4q}+z-KV8z=vL`eS;f=xrHJ?C$R+zf3H6!odRsI3zTiBaB zQ4jNeUB&y|VXpnaNf7!+WGVrQC8~P0waLzP`&g+BXNaZQ$A%pXqYdK8%^Nl!1**-J zU|y9x*LK#re_RE_;8QI-PEQLlI71GW)^wi1 zxnRV7=D%8~qG|r2_ne3aY@x-JYPrfWrPOyjT6lQ!hSCcZq8&G`c#I1Va;J~pk6aVz zkQ7BtHmZMJ^v4(Z-{3}0{PZH5m!g));<~PVJrWK*@cymE!59$fT@vzbvS@dfh<`}F z5V=kp5nCTEIHcY6koc%#mwNh?#&7YbfYC=U zqUy4$meV!!!e4`wX0DXKICSyntNcSjcP4vw?MMrtN2`krY6yJYL%K>k5{Ak|d=g`7pKpvdCRSnDP2}%lhmvrTN z3c2~Xmd^8~HdjgTEP0JV$O}U;0E^|ZMU4(?{b5M3Ul+myDP@jpO7Q~osIKIP6myRu z6upmx+xzlj`L{CMvKt{EbV`dQwu>dV2UJr;o=Br1s?Ef2V`?R{#K%jiMODKAL1AdA z!yfAPJb(M8Oc>*%A-2up2*d5;SA}dlorerka-=_btzSNf7=SPp7ZZbEb-05~QvXgn zNSs&#Yo+rloBCPg@36YGYozCvEREkY{Ze;UORTeKxJjtanXO&n;90Ival!Sp^)X&N z1mP(R5+fKgOm&t8?wpSGfNTGkM&tmE%!CDT{Zk`LqBqanl+fyl(LUVRn3=ZBmd(-> zDx)>H31wqTK2_{%!cE@e z9r=he8^viq@#jN!Gk9Q9yH4xswq_mmRqEEStHi)CX;fbsacmjUxAmk4VUbo<&iZIE zJJ|RFLxu0`D;%jKH3@+^2j1`Q^Zbz_YOZQXJ~@%Qb|9(6I}1GHX5HA8v`Hf-^le5{ zRG@!kq~QCiWuOis@z8Qx{wGnSyPYE$EP}k&RS1bs53osdr=1`x2}Z+4$2u|{<{@zg z9s%@*8u3~Y54V|A#FIoRZe<+csH`l{N zc0~$RXqt4c*&^0IBo$+%!TYg7Fw7w|=kkrELw#{_bYbjmO3&*3V=8JUY~5c zKyCQJC5fc~5#+@`tQ$a^6*-)L%qtrpFYdDm)h&nKPFvM9i2>r1vqSJaVNMr9yp~oW zB8E?N7kdnQ$^aT&^d*@b?3C3c-pUea+>#{E7ta~qKgtaeolG~e-ob*9-?{de0YkHn zHwztOraxZG*(d3 zbwpBM5*wxWsWj-P0>CQmx=^2qw%>L~pY zy9#dzO@}9Pq;Gu7kycT?l`VLF>q>Cow3ksxvS?^S6@g?|CTp2PYGl(5xB(LD99wD< z@)`@(P6L5f3zG&1L}IURaFJ=A<5^J`U4O#;RBl>oC$d0BL!dfUu`cR6 z@npQN`rW&?zhu_hK_2d0c9DA~O4)a>8-sjIa^E^8sC`MiO-m3q3_C#>s4#}~`V?RuYVmHU&uv>GNEqW0)he)5< zMv_O8EbfSQ?pC}YE75}~c{#U+nVDCMw{=|p0I$I>5TNSlA{`NM&w?TcdZQ~r+ALIy zh<#`T7M(p=hM@; z4w_$%@V@d8U(%wS(fnTpXkP_ZQh;d3t#$kU*JK@*r{;&q-6FrG1qynViBsP4B%o)m zUAm;EzZxPa6C?&XxWU9b@ch2~>TLIZ{WZi*RN1%mIWuX8(DBZg z3M&w;?wbT@O@I63gb)M$>wVIz1azI702rnUd(}dm+W1gsOSqO6>5FIHo!ROuHL7<; zv|Bp?F!3|cE`c3m*1!F^idcuNn`MHrAoR&PmKA+B99Ap33k}i`d)rJxb@ehZh|DY$Ioe?hJXN5|%1nRBKYcbhMrT!|JBG>=|7I`hp0S^`o}i z+a5!5pqvPgsO_BxKO%e~El8QwT4{*t!QAlX@TJ$Q11I9S6mXYhJ1wUy+rJkae5pi^ zwdsiQ_nddwD-rvXjnK8NadK^)GGpS%chGMV4I^=NJn)fWS%nLmVqz+B&B#q>*T)CO znPR0uUK>`DVDfzbi$Kktx6*+XNY;$(i_(VcQ_eb&rE6)PwOR)dnXPgc;zMa#Gf;C; zuf-xU2*H&l^G5(aSSmPiNwXSrf#?$ayIlVNZJw4y$7%l{#PSp*0ZowNN<{j8!5ouK zdraIKPvb{U`P)A7eoTU8JQCBidr=Q*5ie7DW`%0ob zK&qV2b0A|mdevVeC68mno#d^9k9iCV?j2)y6qF-fck@H|B2Jvwm_+= zrD*ylw)E}%ZW1~36KG?X8Dbz}xL(B?z38ba(^)j{tb^OdIXmnQuyIWi*CB%=DBDg$ zel!SIt!FvN!!tsTF4eDZ6dM^?(c=E)tJ?iQoA!Q>3%mLYO*``C+gHx9ifl0Pz%pGK zc?$qSJpI!XmD}S1DoG~Px)ls7`4H@7>d1n}4V{drCCaJkD0-Zyjc@@WHr4~7J1lH# zm$Kv$B*^qDkH1;~I)mE>jN?21Ouy9`G+H?m9$L>&F{rtxMZPHdhAO~zG8JrwnoptM zZiP*3ZEqIc6G6+)BeW=NLT1EZqqMQmoD(&nJ@%!r$=-Aw_q(0DN@51@l5(CuI>9q3 zP>h~_qZ4v>Q+iCPIS^4H4koXxmII zh=;1&Tn4y2=z& z93+`ghQZ)&YrNuDN;9r?@0Ia245rfwa_yb5Bs#Mhw*QI8vBnKQC6uBFWEr>FCrQOH zv(lkZX^5V;VjWD2=_qLj2?JU{om9sC6Pl)?@iM|5uCFkLyWfwZp{LgjlTX^Z1OdYS zO{}(E3)UjXW&|T6XS6baxWmFNH)Mb4@Tyw`8se`N;fMtw8{8c8?RoYOJ38mRPKWYt z7-5NGBKLED>JqOr=IBWF)A@$`C=ECH(9^r(YEim>Kg+!uSHsd#6H3|CFA zHP99UoaTG$TAny~uj-={o<9eYBt2Y}mSE+xM zCl>r#FWaZl=`Qb=El|(CdUeL!5EJLVVk{l=?s}_1|7mgMwW2oNR@|z3X%OaFPu#cw zcwi|AG<^`)!@JQu*;%^izR31rDcCBlN{fAcA0S)AdWjWtpmsI2kiQWoln_w0;@D4n z{bS)B;Nhr>9P&+WVYM&N21er_xd8>*4%aE>6%tf^slpb}9O^ZmCWzxG$(I~9CRwLG z(H#BZ?-ZaKf$|peypY;c{WT>l>`L;%4KB(ol0Y5osi;USPn;}Fyqizeq_1&uiO}WqiIifBXOSF|smDY-E zS_%v^O0y!}ZkJmGr#Y7I?j~8Sm+SDBV@^bF4QgrEgwY41!-cwyk^~2%`FM@B6T0Y*{+g@EPUtW~FRby&; zY|h9VMq+xL{Z{6116hWpJ4P%xPk@V#KAmhVMrcP#zM! zt}BEqD=502HVBEI4-n~enwTj^Ki``@k`VbCR$%xr;$JJ)t+&g*r_ zQ6G5MZ2tztXSjjvHaZuB{%3ZBhMou#(h0Z?bSql-XCocUEzzrhs(|zuGzCZT)QOs6 zOVDtEiTg)^5_JfnY>$>W{}V!S52coat^f_Y7Cn;s|7-8t79D1Ip4xncrU(kp^`1^UZKqiwQ<`ks8!lP?ngn!taOLpzp9`OxRX5 zg*YYHr2?-lskOF(iN5u^AE7NZnniXYj-_OiV^tOTDN;pcOO>*iurmO6cN{fsnFT$mI0NK37S3$X z)QDoAOBl}UjSr$sT!2koQV;|EzR$S*^EnSu*kibSx3Qfkj^UpBxKyF%P(iI9c-8oU zj3ll(=_M~u{ccWMQ{XUpiigz1@6$HbsZ|RC@OydW%!U@P#MZ1EPvi*6my>?I=Q4#$ zWzLS?F#I>z@@Hw3f#btSX%5ZBx~&V&%K=}TzL9NFEotv&X7Z*x9a{aU_rXHuAsmto zveZ<%A=F~h&^ii#tydDqZhSxxqfyrzYc$fCMWGr)8r2L#*xvI|^9-}JzTu~97ShyJ8pKuQ^w?5>;>SY-8H?5p!MJU z?jv?=OUpYHflvy?>ekv3)V-u*Qj|Ku1KcJ*!&)w8g(~-AIbj7a+;mKbu;pP#Bzp-Q zQHd+Y3#{Sy_PCk1ggN%3X-`+RQX~Glmu2L%M1N;J#oS;U83grW_aZ~=LCWf#4}@6KJLXesr1KR|BvYiMj* z|0{%jw*kS?M^!`q=yILPD@#dhv*?67`6&=i$L4x7Y5Kk+ixC7HuS&+@0mB`$`DQtx zqX8;enCG~6?rrP#X(=uw5)wcht3}b~CX$B>8zNBI($yNI)S{h3lPqx}ov4_;mNwa` z0-H*ik3-Q?-4Ur5)uZp}wh{*d%5(yWtLV*Dg2?ALXD9pe*IaSCH9#@yI+%8_*4=SB zRMF!x+4)G9g|Nr2OS|9oDt0|!+=XUN4x>0tYiPk6JszKK5|Q($l5n$sDJ!#}qnUQ5 zAjmGWSqD-|zU#scwzK)}rZ-~(JP{@ENFBdcGL;=Nrhy5() z{P(Z^R;x|x!=ChC{sr**-mC?bPPb~O#EOLDwX;uW$A&YT&Jg?ZLMQ=fWrfD|KKiBh zR-s3t!6e@49i_g~gcPNghkbbk&l84Yx>JsF=3_xn6R+PL?~xmHJU~x8=KGgGM|5Lq z1q$$!ALx-H!Zhm&S9U0PReVDUTK9_``A2%agq!p0SNzeUHR3a~d;p)-Ui4gSTEjuQ zUa0ekD-%2g4ZdB(go@V7Dv$|tS5rbSTo#P}#CU3AXCWtMkoSoUi_>!jsA2XM?6_cJ zRk{>>NC*1WOJow@z0u;Yu9%1mDeFkDI@xkWk@dnIC#tIQYrRVD=|tXZZE2*3->Whn zN&_8z&g3jAXCw@EL_XV88RLXPAV;ne-}MNRDJkcEW7D-bnjrVh?Rt zZNpyY^yN88B}uS1aFtu#zweE+y@;snfhRXE%+wagMY`aH64k1>VeCe~d65)Zp5Y(G z7wx^+wN@zRm+L8UclnS?>JkZD9a-MFnssW4Z>_InJz3@091l@bvbDOm0#6@VMcAJBEPHC&^ zNXaE{(uX(qcAveP1Ab4^uBO(bmag{Vq5B@GL1wEoKF~)}hfxa?FxZhtj82Q(X7w0+ zg_Z{CYtLthk*g_i9LNQ%7dmE->!GSpJM^apzvKR9na$7GQ*#f`d663SS$}w5_&Ib(7E02IjfP4BGI_RB1}+=gYCcG+A#N}*W?$A0_M<(Scoi5`FBMb8uHJ3=kd_#ih&G>zd${LzGmR-ea4)XwGg zl)+@sB{LDl5TjB;>yMY26VUfhOvz=aEQGGq*y1wdwQz%uge!_zai)sx74ZW3IrS zPH03{>4wiK9miZ9E2&PRvK7=I7`wOeek%Cvu2`)CgSc`)m$=cw&rV8x`KBh<6`-7@ zSGgU03Eeccx#*ewCo+Z`%12FoFzoZNFqCQ*^8v`F!F{&Giy@Ba9C9%vpgcY4+3Y7t zNUYPs189+>?d`b$Gi%&D)@A7jg-Ka%F&}^dT&7A%y+HWoEX%p;s}-M--#|fp{=C2M z4rii}w{QIcd2Gdbq2L|d-7KS&p63icy>VgRqITUY-Uc><5yyTv)fw3p8z>=wx4Cc* zvo9zr(lYTQ7JMDjS@iZLHRhw_ON*}b<^?_{1FN4It&7Y~R6q~t=r_*2o9GERiB^ak z1%oDlcb^ZzilnB5*`j3#IVkKSb`o@!bwB!EDWDqmT}q93!j6pVD!%P{u#|jrycuj{ z2$)q#6>q?(AW~D#ugTH1BH}!$IjB#DN8vg8$;Hr|9)~tKEATiaMcMxSZY`i%-dn6n z8uDs7H}e`=nmTIIV5b776_sqnoKHafG-R1heGjlj2YVX9y{^O1pA1{UzyTP7#=P?0 z+%Z%=j8f2Y0+pVwUo zy+WZCo~bOJjdfyOAyPR^5zu1S-@GpM^^z>?QgMt)LpByYJZOzn87j-^O-771-AQ32 zTrW%By|-B6q^J6htkOlnv5lM5sRpyDqT@P8H{eDy#zVx%TRL5VJ7(hU>AEz?x01BA z02FBH+y?$XVxs&_1a}Y5qM>rg5Lgd;YSgrW>k`fBYh*D-sKha}^7k?0T421AO>acLwM!{qvqdZc+&iAvP5 zvL+T`J0|2w-f38VYss__FNdjCf&hsyHl0vh5g^TLV$R?q>Di!H>ZX|c^|dy{ZJmqn>2jqB-5q zBjfd_@(6OqFECd(JPnM$bkq=r+|^5(0X=w1{ia2_0Bkcm5tNF_q1KIlt*rQHc6*C8 zQCrJ@cUGCZ6*|~eDU0lIW{r;KWqr0KUU0m?0M;{?-7Cz*V2`0&)Aq~wbxaiF+NVO4 zhg?}s=wUVEGeoLFFbqvg9R#~+05b~x`4Cm)go4KJXP6V=Gf7a_IfXSa&T}~PgQu1+ zt2qdEeacWlo4f}?vREoa6>$!Me55Fndhv0>us7k#{vi#=2T*Ldft%OJ3uvZ}KV;u? zfHORIMYUr@aGJNnDT1^-FRFlOVu1jRGpC>^%xsubA_atP7zDTzdO#YZQ;U1g$rjBo?bRs%jZ=s>(H zp@P4W%ktF_DljZxpN|-T85caoVxoRwM*ZW!%jA!cqJJlD4`gUah)AEjw&C+>IX7D( zdD`A0wxGjssCQvfM=KC=sM_Q^%6L`}TMJ?;kQbvrWR z`mb0|CUzvGSZ#%c%_pW*Xds=?*l)}yC|aj0?)d)8dho1k0L790h<$z1@xI+PE~h8n z+hxO<^Bih48?&9nk*S3EcCzk4C3S>MjC2C5&{T5>)_`*tny7QsR0s&eya_BsFYc#{H5y+@>iIhPOt`VPm--;+)-N-!NrC}8 zYhx7{Vm!CwK}YXxC@Bd*!>+EgWRKs{7Ij2lP~@WgC!!q}PS1g99q*Y~QK6f>{W1gB z!Tizu{#RP#U4M&>cSNAYi)Mso%J+A^x(a*wBME(Txxp0Qrm`XHs%Bra<{s=WD&Prn zhCqo)MhXflVAS&Z~iL`AW}45u~B5*=?y(|`4nI2}C*uyLg1yK4Ats!gzPQd+8=L^K&-Hgt54vLLVy_ zd47}c2l3xz^^wo-Q$CaJXK?Lu?@fF#!N<@uCy(Xo!V*NfC8$0Lx{V2Q79Q5Ps~s+Q zduYiW%LeB>Wvw7%(sCAajs+v+FArO7sJydm-M`q>8nX25hy0QCBD7dpTqgZyq3Yz@qbDCac1y-u%>5_2Q%aCTl*ZqEOj>vuNKD4~O|PS(6E4G(vw{&KR6OCV3$ zPTup!^Zc3jLElfVE`m;%z0|4x+QmjV*?7%JMeiq)=gNZ&O{@2u$lbn#@qO1ZN0pl! z?G)zh2BN;$lSJ=C#RNot1!zFnooE7uuhqKmtHVO4CJLe|KNP|3jc$xaiKyk49=j$w zz%0@qMaX)l3(NYZC)8u(qdF#LD^RqNwe@N3fiqd-%~99J2%9@*DAhd%J9yQtFNPtU!D4Y~l%T4o*Y0j45oo8BSpVU}16w z52%aCimq_YWWH#dkEnHU|7PDM&#>uDK%C;HBiCkuTUKJJVz~Kl-?oW$zvsU-4&f0H z0nrcDUid&2&uV^7b^h!^3@1%&^x+RA;Lj!{710Q!o7Ouj-(2d(OS=gC^JhwpQ2Kjb zzaP9LHsV)p(0iIYtC?O7=AOkdsr}#39rk}^_U~HE)iB_M)U(L9SfpSSTcCc2^pMlz z5@iodGj#fTm>!<+F?@JYZXGYcMFA2xU3L-T8}5yT=QL%ORR0&CI7n|XH`TSptDU=th=fM@QL-Y8*%dI zLk%?oiQhObOY}Rj=2uVTog^wqt|MuGN)gG?mjN(V2B{ov-=XeVa#*tu%-X&f(7kMz zSzv)UtWS6Mt149mlTCrd$wb8@L>-uX`eZ`LJdqMtiyb|s130ZbbNE;^yxR^LKMP~B z=t9pZ37rt$KKGpjUgBULh2Sld=OkNR8Vb;_q|Jj!xKrH4uSvAWU(9w!?zJuL=B`Xo zwq2j!#H;nZPJ^YEjiCRu z6c`mS9S2GO(yk@(**#IrxXqgKWs7#6#XH(#EDKuvu!kejo)MnTdFS~#{`zXp7t_fv zl(2FAf-;)BlWl#~fv;!}l{Lm73^byCCV|OZ^PX4Cn|RBt_Pv=_$8t}@)?YKx!~4$P zWJ~eHo~!u&n-&wagyexVn?LfUy|)BH#{?t9R~_3vQXqwsr&P-i?oI`Xt`yWY8H3y!;!24 zBP0bJ2s%0#dvZraBHJp=TF#TpX-(f2 ztb#5kt$l-J&<_rl>Sr*IS0^?Z-MNS7rkdQBJlTz3Zgnlcxy!PKYFz3oUrty|m3h_+ZL-K?Tar^F$QDVZdt`S<=JM#<2Ou*rt!NPnS=pu4x2cKo<>vl~t>WA} z$aUb~p!u$CKXtXQvkK`s?+8awz*JV!Z= zFI^LHOxCF#X3t{pR=Cm+f;jvhbcMP<0R|ea`^}DgF=T9{!ANJQgqa5oX)GS^0bTTLbIdE}Z-5EvdM+6R66L-q~yZMeKpKKvKY-o$U~y zhHg0fb8{0GNRd63@7j+WzousDdRE3SF^TE^O<>#=o{~sF#K3X%~0ZROZ!u+Qwa!OAUWeKpXC4EPLT*4 z1e1+@`uT3Hi!HWGJMZjndfOLJ?Bf->==12q0A^pZIT{S7i2^<_3%afOGo-W%XhH!K zE9&8|amMo^Z#kru>;nYRXVl`a5yLHSxH)_xf zVDd5x>QS^RHBhdOnm)HvK~#jYBOx-|VFP&MoV{aqx93bfJ@at4zVTCv@pUvk>c3{h z3kW`MrXa&26;ixh=B&m=SW+$PS<9X95y3Y)X?1*WXEN`yXz(q@T^cUy0gb-bT&U^>b1f$2mP?zm-DiKK19G4@WXsU# zbI@4J-Ox664dlv=?ASPbxVg7plm(FNE<+OichdNBYZS4 zw@`4~wGEQ)u0L|}Ha5Clm=3-jn*epCvIB`fs^ULrmj*zxdK?}g-lsEBpHdG4s|=%P z43G0WIl%?5wv*0z4$BqizmonlA7tF^sCgD1Mz+r`HFCH0eSD(%vr*=^1IY8(6WL4t zds~JywJNug;Aq-drf^Wl=x$D7>)0+T=MXUp$aal;uvZhn0~xl}*=29D=;@!o%Q@m( zg(ZD^d&()6Cmxni<%$oV86Te5vcmBlif_aZ&DX@2cSi4Uelg&z{Jv?6QOAK&?(lqJ zg$9Cu*)o<~`cHYw%)fZQ`P;Cu0(!fgZ_m9^##K6o&~HZXX}#H8Dlu48biD(9qO4oj z3~g-0PKFpSkrWdVehUYP^G!f3^UVS>iw?D3z#W+k>6i>@pJcTQ4UjyWm0))%EXvs) zb&nFQjV!M}4^Rz*Acz#igvo=klTl`C!wBvOh9tM+9a_u{-Vo;Y9^1{(dEL|0mU6aA ztLW2MB4l&8*q`T>xJMc!<11oD!1xn-qTqF-XnWJr@s3^G{RzBWs zU%lVHk5pAvU!|!S&s&(oD`2*>$LsH|Sn*4Q0P>IbN2Br0;;PNc6b2u7Y)RQthBALg znrmUiB?o@T7asw!`V|*yu#eZ*eaKaaug^}6T-M9|f)^PBe5N8B2SmbR^1?@78*Aix zpY!Q;>Uo&3d!$lEi8N#wf9qcF%ns1jHj+)U^OdpngrRZJJo47RW^eoDz+c7-1nQgee2fE%M``Yvi%17iS#OO?n$^hnO=(cq zVhf05hzzJ{9)p_ZaS{Tjprhx12>l?$NQN%r8$^JPo{AdMQN9|IphU?=wqOefdJmn4 z(WUMxBpT0!wRCv}_2(2>#`28zKgDazjrhL#CXpVuZATnaK$Maa)G;_BI9VZ>dri!y z#`*k~a311{DA)=z^cjOw6Dli?EdELa{ssiI6JojDg5f>r`1+&MdMDaGmnzH8w$kC7 zeeRaZyB6EKX_({Kv*k|fQWr{N5VAom->BnO#_FW6O%W(sgAVR^nRpc7p2*JZ*@d5B zAfXHOo|B8&{V%_Bl47Ldb<-GzbXR9ofjkP)P=1~_Op`H|Jt-vD5SXK z>5V5_`BOG{*_6E1wdv;0%~1H5%dp5xii_Ppq`DaMAjEP^1K1rW99^O&_S~kXuh(!Y zpk_p8*F2xbtsg(P$2M1MYVt>RZ}Y4`4aWf8?K*5AB#Xm{K`>i=na;#k%f&ZPW1VV^ zq3rjJ#(~!G>-5zy4)!rsj5b>GNkC{5cP_e+BNtc{cHPgFQ`h3uF-+GPoFn-i4b>ZR zS7Cgh&s1(+!wgpe=9|60E{gpr5sZ0Mi3kS)_37i?N6en0V)NSSzu-OTQ*M@dxb1H? zt`AA(92Yg>j|{K~3J1fl>i%_n;SqlaxM$n|w}02fh83s9!70LPmHW%U+RdPF72aML zwhDFdRMVdiT^QGW`6{qhSWX>Ga2K7Zxf~jTC|lO9ih{24g%nZJ3e!2uubo$TVT8C@ zPIEj<7%ctj^Ey+v_c)__n4>Tv(~%YHN#EzPxUWUyc>?0JmXS_fkQv%PYM$;wp-3gq z?B6?ZgCAtXn%Os?t}Q69pIm6AiR8cyuej|AcfF$ly3qV1x_)KcQxE`;SqiP>q|)VKF~VN- z=z-(n9K6aM=NcgCuF51!U;xTgwWu6jKDW!5Bw1=Y0y{go;Ld#6HoeQ69({rT+}XVn4Z*_---o_m z_5lyhnEP`Hj$_Qt4FBF_naVhPHfZ<oK3``<#qKd1cf=I~!XW&9ITely8G@%wl0`6pTamO%dr yBft5~fq%lt?< literal 0 HcmV?d00001 diff --git a/docs/images/arch/PaddleSpeech_Server_class_diagram.png b/docs/images/arch/PaddleSpeech_Server_class_diagram.png new file mode 100644 index 0000000000000000000000000000000000000000..0c3dadddde8f96cb4ba24c1f45133e09935c0f93 GIT binary patch literal 160139 zcmeEv30TZ++jrIwLKI3PA*4bZZ9`g7gOG{_Nt>F|zRk$eB5hJpX%I!J(K_uTty)!T zXq#$UwMq(jc6NcV2f?gx+a<@tusOuuLrJ0o)Qf_}nY zS+z%sN;{&oy%w%{y2Sd@M#nz#z-hPZ*Vo@{=5nw_g{#w^U2r;egWbvH-b*;VYiQBU za?NZhk)Vx|o_NzO7zIiz%PfUat!a_NPlAtCOI*oU2v8nJ?X5z^bc$)?1XVvPDod~U20{au?C}mfm7=#c;#ID$0sY46H=8bLj_v8WtEG8;h0~+$QYyz1VU247 zoFx3^7t5>}auQOo5Lq_xjJ&zQ_Dg@>BV@b;@vWl8sONzw_)GS3ZMSV^WhkTV{j<|J zne{uir&?6LqW8V94$P6bR+B}PY*l(>@GMZM3n6J#zMF{k`)s^xx!}2J3nBTdmz_3y zo1_;(;n)52>O$IjYSd0?-to8MM`>^%b;s5z z(yxp+7xf&h|BP>rKOdrQxMA>)H09iyxSDCAq6PV;+Chm{mPfXNoYIjkQV9taq8m~} z`Q`Xd-CU%}C$Q_%asJ37!fUW@if*OwuL=9!Ejkds{LVY=#1Tluo39b?tkiZbMjhS0 zc*@VIG{sz^I)EX!$duBu9dlVr9y$uN48h{;Wnh4dawYY-?RTyAZn^1AY<@d z(iqdE%0G5jh)z>uZSx3ryZ^eQfose6J(qO2#ovSUSnsp6bkN_MsKIVRW=W|@WUm!p zm%g@rId*4I@KtDk*}}vlsxxbnJ{)2*9 zg|tvs(>k-8;}wP$Rd2ICA9=I(?OVz7LT(FGUz9J(8qTm4rg#S%RYb1xEQivGHa>f# zce|%KkGrtnOwcADA<-K&I(~L7^{&j+71gY$#sUkGRBRFac+5&ukbaz8IZn8x80KNvxaZO@VgC` zE9?9e9dFX}7QlM=ZynyXamkg#3-uNVU5?Ur)Tn)TX5XT^B|Dz3+`A<8@Q(9>N`Cg2 zq&_d&TzbTDW&FaS(jAVgI{mVe7Zve!^=ui|UUTQp$F)nZ+&{lHH&DLM-$QbN-mZ0b z%O!T}ZGI4>w`R$KMUMj>O4LRiyDE7)m=!8^I8?$aLaASPif=^AToQe9<;!PFUv07t zSbenkl+xy)olk{6o^V-V8Yr9?+_WNlX_ueU)2fft!;0BkeEnuG@wL2v^k~Q6El&%v zy-SZ*9N4!>NkF=yCS&s(aXWwR^oBQEM&E1nX~p|xIf@*-Ejg(b9|kM6>J6Se<|EpD z6a5a;BRJ{6lJ+y)x8tCY&+-fJ&ui|yyyw}SlZKx)Kf?|#H#Xw$-4da9b=`Q7xuasz znykQNW^_^`0?AtDPTpoWq!Z_Tv%`CjD z`f~r7%sUsfWTg;Nke%?I&>hs%F1vHr!;h`hKKxk6MDxoDo1_abTuyqu_B!L80#)>j z)qZk3?P;44>@?{#!P>^g#D;Di(zMR5zA5Endz!1xs9&3ItVVrM<+G6F-PXZ2W;VJu zSZlH!=CBr03)EUZJHXaCXGwcjPHMJjwwxXA)3f2vU*Ed!{JQ?@ z!_l$U{8ue6k2mGC3=B`XSdE+;5&u~HtRu1NqiDKFVoWMl4^h}{Qf$T8?yJf^>FbBg z;Ot}*8DSeYmc^D{p4>FOF>aI8rY%dg7I`m9KN5Z9ajD_$bGIFi`j)Nr*Y{uIf7U-> zWyj-R}WNPP5Gb_8M*V~?Nv*es|E#Sw-i}T+8cfP^g*G)rC?-di$Ie4 zwZUt>*G^|CUaP7vsCztkWYA<#x?V8O-E`mHg?r0|rDa0Tt66Qz{QO4j+>+77sU6Lq z4!l2oIln2hH4|Ucy{9Rq+BT+D-7X~u>*`NAHYz;oASw{oq$Gd+>ve~Y)7OjbwR2PL zVI3vdEbQ^zI0W9BTGw5jUe%1mlt$bk_31;Ie*c*P zm!?gwvo6A_7*_{T&5eEXS_fWSt6n7%E6}ln@Umf`^yJ{jSI@MMiUyR3U6bGI`Z@D* za)7TZ#j)P0Ys||kk286yEZARJ5H&v7U+JEDqwuStyPz|Ayt|)cduMuCZ=kEBgV#Pa z9q&4-HJR2oGO&C44RvfLlYDGCWwxN#zGus5aW(V4V5!t+tK7yT=&+K663-HEA1>LM zTszw_LB0Nht=C&!FA1TW(WhC;3@rcKh0P0<7H9LDE}dFvw-B<_gn#+1+{Nmas+n1z zRX@w!zIfZRlzE%r>~l!|knYj9;gjLb;cX|`AIm@1d}Lleclq&U*UMeK;=L_pVLEHJ zZIg`F?(B(mqIa4)PGI?2Cbp|=-?Pr_fy;wR zsn(svu+{qJ37r~lpFcmgXiR?eBG4%IwEk=T*Jh`^EANJyDW+Q#RnZ=h@lu~*yB;~Z zJ^xT19~7{2*5FEl1!;_@$RmjfUke$oj=ss1;)`t!zFRllRMZm(92R6N1#9VUscArSaZPraz3QN!^GHF8Q zqjdQC3y%gB?0d75?_R!(GpI_QPR7=~jojmKVm#k}qUB>~?y6b%bK+SDPz?H#5&;HQj5H-h3|e zTy(lmp>1}#R2{>Wjv~jOKN}O^f!5X(a zk3M#;ALd>d-=>bUQ`l}otK1gz38vIATA<6KP}Tf{Ii=Wq zrjl0QhL!BjohhH~M8z=EslFrpM8OUXFO(X4H@&5$`!!l0EnRXDX~Wq}r#QeJ;OK`^ z#Mhwxe?7nRpE4FUks&?b*HI>s^I^W(7{6F<= z)-EoNDspn}?(VYg^0J5jN|xS?OY!0^Tti|QVc z!yZ@9qRQh;aFJPtgIQBGp4S=^2x@lS(;aqc>W#>@EGw%F-%&HdvG#P=G`m&sWR6f5 zdUgoG8ALSiPL4m|iub1tv*Wx`Gy|CE6e3mvNo!r91%Xj~Kj2K+H&j_QvN<`l)?-l} zy~b{@(<(6MQaV@TGLPHKAahO+xY436+S-v*qxN9SFk`39g5har^0y!@W7Ap0KpU5?_ZM6E)D0BP8$a7`z%0|RB$;qtE zoC8Rtyr>_#buxxL9rtA!>y1C|KAKRc(_^K`vShKYDJ_Ji_R0~5l(W{-Oje;bL2-{m z9u-0a=x({2Ynuenz{j37Mq64fjb4BKuf9xR zJ~Ja6VU?O8Ry?_>3+=B+)%v8787}wFa{h8}|2duC%=I5*`+wo?jDNwQxS?IW`O%Gs zhT^jkgp&M;?lGzW&c^5t1lh7UNjnm7t3r>24LH5Z2A50~=m7);KbxlhzxttbBUE@Q zxt_FENS&$`ueu%Ca%~xum9+ds@J{Yz+|GS6v8*yg?7M91DM6gg!j|VB{`iNauj5r| zu1WQzHCue89}IJ{Bb~XeYsMw9Z4tLn!3{CP9EELDjZn-VIn%#ooj?6@U02v$A62uE zf#JK2z{kGHmwG-Mao1-z4Lj@;Gy#*`#$`?0{>PsE$CoMZWS_6bX0@IVHK*O5fR)91 zZ?lD%RLA_YoWI=Le@^GWkVgDtZ2w81`o|UgM>*?1#`ce~%`0!}x)6;fR!?#)+&$6< zj9}`n>4lX@iJN>2mu>y|OSmuzbLuePg2ijVUseabTRR=ZQum$}TgRuO{rzR$&(<8? zs(HyuYS8(HGgs^B8(PTM-1|YCRl`7p-ZO=+{RSyIlzBBUKqMovl>eroG8mOkqc0RQqb zR~TpddVXd4A?z+I)4)K6Zk(8%zX};=Z<|nx(XnwBWhr%hQo9WgX&zKkXJmxq4nb1T zL5A_b7OW9Iotm8Wc@1vcan~Kl)-x6Uv<)UH(&W^nJxb-6>eCm4;EHE6B#_$CUyrd; zUf)|qmd-q5JUdvWiFCESy2_Iq7m>GT(%H!}gj}n4VFN8cFSC+nxjp@!IFO{cztqcj za_Q5eCKpePYnoaNDsO(#L#PJp)E{^Lud2*tbfC!8_OYC59Lnk5jB{N}uweO^kEW5$ zM9Auu;EM&iK$3{hdfGep7>SH0P4VgDHLR67N7v$( zDhS;|ms;=Y!{5B7IVrz!RYf=Jd^JMvf%H)iZihp%9&2)Xnby#87u4qCh#|p{$AZs~ zmEtImP<#8C)7ygs!9B|^6I+&L<9xPfsIO)-G3Jlgzk+q)f{6v|z&hh4NZIr^P6*`Q zeyiOlFP1V1wkcPRvhM9|jiQ}3+929zeD!sDIVLvc^ILE&!LNH#@AFP3wp;dv;x**+ z-AlMSoPSe}{|r3UKng0|Cmo5>z);#~X~8sc;?T4yB7{&7NZOACU*s7;!je^Ci9%8Z z&{aWqS^hza60}YC{kI`kS^tf_;~oCd2NyQIXA<00wfh3lx?5G?AgJM`in6ZrS#x#; zC#{MF*fd(oM2mO)TLuq5GKfoE2o*7I+laege%22iENyomuG5$DuCXFYu~JUb>G%Eg zp0<#H9{2mYqMLi}mwlRNtKeGTCzil1c~5DYag#{fKP_aio2ESVw?Pz5;AG6!!XLAy zJ=3t2h>PXajZie+%Z}Bi?Gr`@Uz}(H?iFRPwTIY~g8-(Yx3^#U#GB=%jh7VGlkE=6 z|NFhmPzFv`^g(~knr?1x$L@7dmfAud98=lcK|g9Sc>5KA&!`Uq$PGvB!?x$(xs31Q ze}BDw-i7+jp!Op-1^vFCopHb?n5;NsJjuZV_B3ywe%|RZa4lE%(Dv|7KO_k&U=#8f zZ4DTC1nO_2O;fKsL)mw4!dI zM)z~iS5t@dh1c@VCE(DSYb}F+Zcx8EDBgY&!Q0<>`f0%}D(=sG-(_$uB1Wagn|M!A zD88s?SI5#a`Y(lpaNuOll^gHm{_4Kw`84kt3kj6Ez-Dn>DMP^ftP`NG60+mDC~el} zk0<{9-i_4*C+ojG|G*t8Zrtr&xga#`uuBQxdoyO-I3FL;x2CLTnT64@R$LBS6`7K% zaiwUV=@{uOw;r>UK#q5A&}Yt$7jC}aKe=)N&Srx47nORK8AlrjAFoH4<_aToCjCc=o$& zPWAVSKD`^+X=>HZTHVxyt6AHHc6$Ifh*fH$^%_gR8^!)zAeJ5?Kvz}WkDF{@hW#a? zI}R4um?s}(#-GdiBr;pl8`an5gq@-;E`!C6iz0vBTu07ioprY`uCv_eHd9q~g=Gfv3|h6$Z(p3!Tnh9@5$lgK04kVUMlWpLP{d99vm_LBmbgbNN@{pdN#tFd zL8(|EiuV6Ig5*cyc?WR)*li^T=)dJhKa-}eM8MidO+HQ@rt4!FH+&j;FCpyO}9y!y_U z?EF7UssA(s$OF@hto5(bd)SCSNY?v|4bEQw46dc(%;Z@eUU)G;h%|4Ohc-RDg_7j{ zF_ZLTqkd-S{lM97AV+=pEkF91G>w8;KfLjle--cc`V* z%3e02@X0oES)=3{9znLZ_z6_unnvKBCEPX(PDVJXF>>!V-np!H-ddMb|94W#pBVs{ zUZ7tdc3nb3rG77Jc>5Fxk9b+I4hQ7uW#C%ml5SxR@SbArRT$p(h#r)*o+V2Eg>w6Y zjrzgfNdPC47Kk~)`87Y9mo!m9zw9Nfpccx@`nLm#;?A~zU$^|+yU(CcUZsD-av2HL zF}^+sf)w}T;msB!;5_mHTT3P!C2=YTe-4PPd7m zgYKrRr1~|;zd-+~(LiwptBn~R?go&2Yd;${5cuZEf{VrMyqtz?`j+R3s3=JwDc+8Y zRj_|fF2kI=iAVg(u>OpOzn8{Sq9Rl7ZdZ`KPpFU_#$Qmi?I5WMSB*4l*M`${sL2=6 zAhMn~vD_c6(4ov~|7CM~S-p>&<}#h7(6Dp!GJp>7Ks;A}YI9_t{V_WLpH}%l`Da7Gy~Nnt+J5*w*!N(+Bn@t2**;Xl zj(`b&J^dv61g}Q&dmC=5{^ajf`9Cj2Cg@>OjU?@=nS?*7-oX5#L|VHhgjZOMyYd_i z8e}E@aWLOdlb`$f|DuBdfCp5<)wMi>c6+qwvXXuJpGK@d!T=_~bb0TR6lo8x84$33 zI^ua?$t-wn_HXcs;yp!&cWvrm3P2S72UPmwT>c+W=@*dh|36TPkiceANwnALj6m{x z>kOPRTN$_6r^>nEkkohwmSnr{L7DtwR#HCkFDQASA{SvJbChv5U!rx1F>xNYlhf7% z_P=HTv0(HhJ+7C`8}Afyn#iUZ#7VIJf~cR60ni(Cv|LkD@c;zbw6KK%1im?5ztD^6 zWcS2oT2)JzyGIvDiuI9-Htkx(0C*Z7hVU!q{WBW=uErWrmhC5vm9yTflAyMf+P?tL zK>!}-wiBBjTrwFl0V~u1)l52FLMx#*xd>X$FNocHdYOFU?^p*f1IPt~g8#zyX+lWz=S_;`WfRfuSQyQSt7?vC3p_L|1A*BbFe?rXdVb)2^KKM$vSP}RNTMf zjlAk@7+8^y@x6zlJlv{%v>QaNH6j2d*gvTCd~|;vsPYeLJ)Z^m2etkOwf?uL^*^A} z52oq=Do}}#kkz1Km;7pfRt?D1e@?96G+b}|yk+s4gUw4SiM0_Lt7jnqNpHQOhmmqN z^dLaj_Y^K9Qnpta9a#9CypQ5TR||%RaF(A0h>t4>bYRmXdh7N%)Pl7*3Ld-+eA`!v zw^{OR=Pl|9PzB~GyeQ4UTc&r8d;CQ3P~YUF)_UH9JNPF>IS-b9TnNO(gfE>RX91@D ztC@E3{xfiASa~&Br;#5--0Ri3&j}s|E!#E&UoG$J;@Pn;@;+ICccmI|ErY9mvUUfk zqtJ1SPfQuQHXJrPC~^q$RRFp>`+KTjyW}9Z4G@a|R^*ojl`FoSC-;@QvFXiriyJJ~%M1MY08%Ms;`*9<%X9w>u+}n>O z>cTPF)Z~bIHY?TlsswxmM~24_2YS`whDSeivmVNys=6&cEeKtnZ7;@c@2s3B!2p9J zw1IO6Glw8n-tD=wyb`gwdu@*hyVCB!#x0--#5e+6%g1kPxxme7ltZ?RdCdfuB=l2M zW%zjr(e!T!(eL}DS^2k*!*`*`_f+sSvI!qqgiNkq4mJqbxX{IimYfA$&$uPGoh++QWQ)WxL*KB2|$y6jQ+DzjQX@+I5AefHHOt9exGiY$XX} z^{_)iu4=>kxs#EHJ1WVTdi(qD5f81I_uz3I8(YErjyMLRZRS16`oc29N0(Shy;~N5 zXTI}x(*i&*$)(rA^13`hXZ492jPUs6YjuF8p1q&HmSDd)JV_!zD$>y zQ@bxZllI#T-IH#{;@D{}^nn;b9q&+6wG4MeK?5c1xKzBy>4I5GiBuO!7VAoH;u+2v z&~R#X<{8d?#$s7y^L4_c9oFat%N8SLhZFiY$^Rb{cSRAA-=(q7^M~N$q8NP95OL(* z?U{RbhaA!<7vx31pd=0$AQd=25KsSVr5_K9g8!-y~q5SdlLqjEzm*35LP7JcSZfK{t8|nQ6I#e|ZKk4KGz)s2 z@cQ0i-6nf`NPG9rxWZJfkx}hm^o{pwMB%b%D%XPxZU=L5jGW+>e6$9F#-QnHyc^K9 z06pnR&?sa@=Zu2)UH9AeuKsD@IrzvNLeRefSlvK+7vIny?bakx0(cbs5ZKYuU9De= z)8^@CnmU0^2*GDHdUzL%7=*%-Ig=koyb)eLD%VH}75o!9j=0F|1f0jCP;(Y^?|m6z zQqDtE#g!x6cYSoZ1otD1VO_40@5phH7gFjImpMT@!FX|SF=!`=I%qk7m!0#KH`r1? zx=xA*;Nz65yuEGJrnHtQjSLUoLnE{^uA9fA#Fzp6F zRnBUE+}SM4c&`j#;EYb8Q_O17LqCiw)|&&G$Lw)JxM9(UQ35F3-Lp_ECE@YMn&gC+LRt0@5Thy}+Afs#y z7X#mZ8~E)nCpO0Xh!^>spEs#{3jF$Rx!%)}$3Kw|`}RuRom*BP7(a%w@8~TYeT~Z#u<)ZuQ=U% zcC0mRs%0P;>fwEfjCI8MRwTG|)8Ur^qFE7$ZGNemH}A6C28Ks-07YA7J{kqE^#blR z{Vd-y3?LqvN-a)!!#4bP61qP`0j6{ee}u2W1XK>9D1pr_-rF@(9`~XHedAa`w`+P- z3G1a?(s4PuPV>-IOGBy4WRG7jk{+x&ASgiR2TTKBFhOZqF6E0VnCXZ~#<2^tTJDM< z@51_PgI6tBd`$v~1xy94|4Ey^xVN7$c1CJvqA)&DcYQqVzi7}(1(O7Q zdKQVAjdc|&cTGiOPq-I$bTzwTyt_ii2WjO2P*(4Z6>8Qh{bC`3JrQH5LBDET_o))j zXpU*NgEQNo#c^p6u0=sph=W4T@jk0P;3kx7%?|*kc|Qaf4|(_6y^mt{XN(FjemyDV z!CiK(HnSZ(mg@^?RLp(yo46&8n7(B#^9~`~Cbp2^;nVbgb!OHI!#d*cG^7?(B<5glmTjfuD7?plLZFu0ez9Jdz4$stC}fuh4G? zXv{nt0TzCdqxEbs1mR=h_3^g&8INlTXTv<}ESfi2#rL-7%QvdYca0dj->5iY&x*wt z?Nsku8@9249)O|+xRi-@JKDySH=en4p&C6(Y3*!;$UkWtaOlOpu-?;yFV9{1wH`3M z`zi^ugV+=CPXV58>#scBPy$Gz#StWTdV!>y9zeQ%>||Fen-|LT<0fBPy%2)s>8lI` z!1VD&m5R+n8r}{J1|C;%E6aI6#%;AMhc;fCt-QrMZ8S+#9Hq&abx+qDLQWLTP!JdD zG&%Rqb9$I1BxbV+j#lCFe!0{O2)hY#(Pol@L8&vdImsH7*Dk?komXCVU4RTnQoPiy zRYzV#rnni6Cu(GvQdWtCASZ^9vc;2k<;0MDKC@q}@cvB`%hq3#;4X_-sFH1Dvqm@8 zT(-GKP?+szRgUU|QXqpfAn-V#G<%z{t7&!vP6@kz1@6@IacQKzgT<##kQ+Q`Bvhiq z-D3-6QSF^5MJ?vWoRfC}whk-)ZSiQ%gP0^C2Omwwvx@N>hI!-=VKf$cuv3(KuOfMp z!fm)hHmBCWg1uz=AqVrMe3_Zj!Z-)tt-b>gSwl4aeN2n8Y{{LwESV9{(j_9mHmW&U zlT&ew)R7nzJtnKIrU}jZfIflaGI3BfYKn2OEq#RihJ1eyApXV3K)-OZlhtL|7%l~O zE!#b{@CML+DTmafP+XpjUXU8$m%eTc_j8g!VTtgN!(l^$`2c4axy3V%N8ozbQJggp z39;S0>}S=Sr#yN}yGaC}B(N3qFK|!dVd=K-i-3w^#;4ZN2H1 zKsF>Orl&jpB{JQ!-XcJBbd1}EZSe!HMQH^D$8J}i%?gJ{D4ND|mAQk2l``B(LGHe; z=>+OtonlHF!T@v!0YkZ6=0hC>&TJoPozIRf?wwAp)D~R-gX8|%V{u($*G7gD!(@?M zN=>aCPHjZU1CQfS@Mu#FP9&5b=|U|e_?y@Qtj+72)aufrCK5d&C;1@xEf-)Ytkd<*yG*I13M*5DIe2|*L zzMx}RHJH87*T~+xBCDYXcPrwPDcm703teV}rj<)Iry+btEwh4U3w9Ov?~v2ScvMI? zCqvmImh9pQSIo>!ezmRa85q~4pt7$`&#lF`2Q{sXk}sxJ$Qd$9h;I18Zbfz*f_;wh z48nMpVM{+NKuvz)X4?FmrK>R&@r*Nz#rZZ-UiyZ5{+{yuC9HX^N zYAV?W<@$pCFp3kH;e&B69({=dE{vIx+jDpoe_!!Kt4K+3MsV*oj83 zwybCviZhLdnZ6~gPt+a)JPgh>Ul1q!B4>>Q>g3xo$5S7#lpyVTV)3jALD!j182c0K zC4t>V*dpBQDj?CARN=}FKODzsLr6c&g)?*D$GJolD{R0BO|P?RM$rVh5+j0x(MoK- z!^Adwq)Q>hp0U7hA_8MDBpLFI!j$HGfFY+Q7x_Ocdk^hEPCx$`pK=Mz0FV54=C2+1 z)qvRmbF%>cB9|qncE_gYod2XYkw?<^rcM|z1%0G1IypL}KB(A8!R1P)pYMdAP9={I zCsJh-4ywpZ#N+ZRfXr*8@0@CkOo;|3LU3ck5c=$bt|V>jyC$i+oVV$)>aY&WU^9^? z-V^yFLZ6SP`c%lH`yHUm zbs;dj-92a@12hys4`DWvI>1cLs!|)=KXPBbVI#x0IWpm)IOUb@Ds&}YP*a@M!!Jl- z90_s|RFK^z7{zjqT7G|`Q<4A^_bmbXN+n~NbYo^U1r5lCc{<{?lxP%K8fJCHp)WiS z<^Qi^UbX}eb_2xM!%2>{XvB}uaO6RTCh?=9Y;IV0%R z0-av0XM(=F*jcFA#>!-WOc;tq41lbDW!`AlBDk>9?Ip}VESSZ3Sg0|MBv*(C3Uo4# z&oUI4U#!r@ti=h8mOlCiDYP2vUC(7f@AF>ucIJs+Ul3&T==BJVhj!d18K+8S^{B7{ zv5#wnW_)_Vty*%PLY3nV;7*Xd2~7Wt`xaTj!zj)WN`vKimDSBC4V*<4g@hNkh9M6= zg^h5jfRk)NuqqKNC1eN$Sl<{uBWF1jd@~mRP z#Sjf&QGw5h8J!51l6?wFG zKn_K(NkeA8#Sp|k&H`yM@(`q(#C=jBSTGatOc4<`Yl73?*9}3j`B&9GdEr>S1W+0TIes1gB?vwieK`t`sQD7g}?@;6)K(^*9 zhQrb{sQHas#Nl2eFKc1BA*&joT-N3NX4prYNCp^K(P$)znA9;njGP5hQPiPq`j{0| zwex|lD>2Eecv!I8wnfHm!X>$t5T3_nlc1eeaMb+HECE*sa26)-KqIkZaZ+B2Traq!G8u~}{T(x563LXUu$Al?&G4}-~E|DPsvb2zWP z;UuU8B>pyQrC`R4wTIHscaE7-`%VAj3i#ym2 z3?MMFnxKn#`aDi=MF;{V%*rZy1;n-M-dhigTs5h^!f7$~1TKe4+T|ACXaL`Uz*fBH zH1xG_EAmhj45ziB&*ZDBxaGw>(K$E>SR^wIoD&EsSUdtGUo*P$VkH8>b`<*6xN}rVsFy)Z)m+~8l zB?x8#t+f6IWY>k9g~TE)AN=ri{;bUe(3*D07Eo$_lN4*SwZ&nI!l9^yP4yjpe=g7< z2eB$P;6KSQqz(hy;^VXJa{FGQJ7qSQv$$0uO(W4&I+`qeh*6%;K8hXP#@J4U(xtdp zUm+{y6ZZQY;Dm8$VJ%mhd|$Q5c#2Qnaw4F5A4;dYZml*B)Keo6imf zcN?yhs>lexZvk`17v4Q#aTB^+9N_!JIkYs5;`H(C7DG`7efiwozOBVn*G3dG$kW>l zefq>}2bN25B?T-jM+AG@4bUn03(G8Kv|_qfkb$hv^NSmq z-_~6i+w#SvbD`C%mgKrB*vAgT2`_x%>PGG~iqja=8f;cIw$~xw=sLz?khYuc%bZ=c zKbT3B!;$*j6l;;!FOzlc12jaS%g&$cfYpt*aJ%g@s?~i66@6a&UC4{>^?GiyHe)@# zl8+8%c8XXCLr>QM!ql7dnQ;dVRl%=Q>_BgZYKhAVeu&SZ%sc2VMzVMO^d=N zV9br4m)94goJlZb(b@>jIcjNW0=!>4TM;wI&8Hp=IB`gv36hd#yyxKKS~t_ZZMP=- zgp6wongthRe}*=Yg@E$Ml2A||@)ZcL$+DxB@B55qREcRwaoRin)vaJ56wNLo`I1EA zWN`_=z~I`ulNFGdi*2~CXECOgHt^z(J8L8 z4=Pw35|=aO>79zMC}ze`f(ggYS9;=b3#%R(U}u}`11b;ESwm19%Y1#hm!FgPwi4$V zO89?%?RdJVRrZ1vcb_uucLJlIB8VXgg7u4#EtyU{iZ029vWY#c3-zKer4<7!Yv^;6 ztL5fcbEaZ&K0|<~9?J|Fyk3_I*4VXPg(or0^bS~)^q>L4t7V-Bx$1ntLEn4uU;;4- zmSpFIstb0DwX|LqW3~bhcMb>Kvii?@1zBJQmBPEW}zw= zW8K&&R@yB&BssX2WFvyFYEZ0nf`a)-NUJKbWLRjH(HoE}Ae(n-=p~S?W)}BdRdEQ0 zsulV;utILPkq|?+P92G4Gr)2$XJ^q8akZrBIx7uly4c+^$$f*ca`z#Ys=?ZMBx*vk zIsW;ic#f&{9>8V<#0E?;ya) zDJ81b{i4Zt2+ad>nt*j8iHi118TZ@9?J7M#kEe3NQ`uJwHI^eRX7t(Qu;LUXtGu9w zp@HzMHCuX>?Re>aOJAXbL?Vn3hK*@*hEkoO*aH#;#UUY0QVmtw&aKjJY~>YWREUK; zlOcj4ignWdP2O7Q_;owoQE=qAmCC5=9nqnY2#Y zS%HRNXkMW+5PqT%FMVYyUKMLpK#>oSD^(nO-c2z>eeph9pyN=aSC?#hnD6K)f;)?l zW8(A696)#`hMIz>ssWInn>c4OfMm@gYJB#l1Hi94Y#^ZFOrqO zP+dYKD4j=2t_U!=6dx0~rH5e|yX|LpjO7F#wb}VoQpwzYH1H!^eTQFVjn*Q ztW7-_5R4{aL zeD`cF_GMi6Owu?ZusSv5U0yg8Kbo44h7Kkyn^VnBFT0PAJXK{h)tHFJR>&!nKlKbX z)%>2>@#aRz$LTtDbzG+&iK;HIIizSq8YkSWF<5 z9of|Ap))MieXeA=VCX^M$inh=JC9ES8ie#xG5<=M!BG-OHe8iJnRaVw1Kl~N94w%` z!5HLU_olup%xv_&ykf824CV&VXZ(zu`dDb^Mx2d@?o9w`1~}9YMh5)y>u5j_i`U@N z8{-J?WJ$M=hg%Bx7rCc((I>Hcb9 zmWf}g9#|F)&#mP^I=6jD1ZM*f+JFjait!}+Hm@O?{#!%z?|ioe@o&q6H2i|UcRua@ zCK(xA%#)08ejSAQmy!{{nZyVLRDy?b86bRcTcC4D2G2vxpH2A*Xe$TxRLSu{D&jMb zP&ECg_Cy+Z4SSW3W&2iu0+_F$fZAzR%4bfTq9D6i>cl=|%Va1Yc;=C%!{F?fynBm* zLk|coF%JfCEFuHrizLTg!zTjAupL9NITQ#^RGF*9f3}U`K|#&H1;^I84#|}w2hw-f z&&~H3oB*^(wRey5)D!zw0P2ZQ$ULXA|Eo~mPqO(hYtw%W>c7FDfP`aU^Ln0+DmCI= zLqkJAW%9NdchA60j9%~>Y+cAl$Y4iA+pzjK9BZ9 zbY-0;FkL|u1OJ!FHsI`#d3{lNymokjBRp}V+*F8h7_uhw6J2SjR7AX!f1n_~cZB`U8kT1hoGI~bI1 zWsk-OVozMEFruB^YHxGE9apVvUWCMM*ZlU%ErM|Ty=1Q$BF#8rqi8H|>`nXlzse_o z$}a_Gj{8-z^y<54r31V-IbSkfUgQaCE)HoGSuT`Lc#m0Vb376d2bPfgD1|%L?wNZ< zh?U!OMhy_JRpIWHfBv|gm;O2c^zXkY1*^ex1F}FeA9VSN4li%^v0#>6?WP9EcgKkb zeI}wdD(cl-uX$~dhh>( zd80(K6TgWd4d;mc+GJtGBRka0mBnVIP-~Q@NC1#!NQF)fqyEEs2W@$3!z@ZiOJe}nD6eygmdS$k$ z=ZB_BZptG?!;^jN`}GD}YUqjI+i~ncc_#i^6(Cvzr&G8+5h6z~f8((Nx8jA20pR87 zmBDyYwAQO{uS|B_G9sS=jb_g}i=&Y|IHXXWC+?d!))UAhd3B0S9RT-&4ZNe{6ls|$ zjEvPmRQC6wV!WFvV|?!U+;pG4Qhiaoo##w*-?xitoYc+cgDQVH#)p&^o8Si>fphYQ z19P*LLL0yGv5V58?gfGrm)dsBb9C9M!4&y(_vMyd9YOP+*7&Z=R5fRG*B{ zdGdPX20gRb@7F^n=;XIodaz59`(BH)4|2|Kuk_!^n@;5B4d(E_M`&@F_e{&9#87^) zXQ{1Y)Y!BiZr0$)vHqSo^O`ix(%)!l^9CB27?w*L4I*!%u0Q}Ccb9$Q>1^9n1%gl1mmSRaa(}C zZQc`I4a?1TzU|x1+7q6u@VF4(FDhq_Gh6rU+bg9A0mBL-aC=kr?yoYKGo{%-(%^!K z7vOBS2u?=6%TB}MG1=9)-1uHr)Ay8Q9Lz>?FPH?~Ny17Gq*U8>S@E|iT zKK7z6Vs&1XUO-E8%!2@J(&WY`J8N(9j7e1mG$ufOx%j9067cbq52MEOb>0n-A0x*9 z+dTYUa3sr|QWK!h0bFmq31$(hA`Vu7W(W-2CIV%DsEnTYU3AMEs$(h{x~lB~PXvZg0Lifs{cMar$AL?d$lOh|`N*U@EN3FGyE%RR;!kG@z4C75yaw}16{@vHvgY1Qa&VbjlXe%Q40 zB)T?fi@d}jDXC>krt1;N>4D)P4#QFcb?B_Y-C_C@Z|-}Yv3zI^$j4;Vplvt`W1=mg3e ztCygV4TeFnM_UTJDe2GZ1vx!}N&FgVCp1 z-l;MZxff6~v1%rZNp9G}mm){9PbNrQIZg#mFBy$wY#iil=Go9; zV8-{jWYtfdJRoJx5#uV=12H$Vhf=s(G8eB4ipkrV`8Z@wSpS?B;T6EHy#DKd$-om- zr!$8Xf)B12xy9IdN@Vb>-9*sFL*2vlcd&&mxpRBitUg5mu^4`}o&}5Ad?%dI16NoD z2@|JyEF~l*CmbsKkua!))dv+>ZE=EOAj6+TU=|aO>q2~ICTRdPf8A^3I6M|4aFU?h zX=vjW@##lZD+O4@;>nz0ZVx~7pq;V?TK$yCTDBKZj+Q<0=GZ_IL#bfJINF36pkej% zbfG4bjIM`kJ4O@1@j&D1Lt*32c^z5BbN~$gfa}V%qYeS{(So#|g#Fe#Fyp}nMk37j zl4~4(^OA06)gl2Jeq4ajfclcCNCYDb;BIFEb=9t%Y)&2naL3MP*F~nzJ{wA4k69%b zb`HpnM50;!Cy|Zb-SXMKfC3_mrGRsnfMcojJ`vm_B&cm*mjX(e$whp=54gb6%?rNN zJ1ET%reHJVJplS=GeJ-Gz3)Z*rp5f9cWQRnyDU&>JR-=Q6b$ethAF_jnj`)1daAcU zYKlu&IqU-_$g#9azCx!W!d7ByPnmRp^I2Vhe-T6l&~Ggnxg%t_u_UFtFdVbdzEhG5 z{F9Pg9SC};8y!E$VE`Rj#ZDm!VUnf5c_lkV<%+ZW4|Ae2FH;NVII?^G3%a+e&LG(*x!8d2MM#; z$*ZWJxET;;#ta~>bELD35~d!`fP&F9yqxGs03u6Y4A|Pm1^dqfjV@15-z3`XSYQ8b z4DeGEfbUgqVOw09f!zdJ?GmPh>QV8S=ygaLTkb}AcU&~=;;3gOs^=%kA73THnvk(0&nuVg3K{V7jpOQ7A9Grs9#tOnw z|BJo%j%zC0+Q$_ORuCH^LR3^#)QqBnw1@@lu?{LiMzJ9!Dk4ZZQBe^Qu%jXbVHA~S z0V4IFNKvFo34#!M3y_2mNI6M^Q>oi8-AG~ z1$KN_Qg13JxEcU8M9F~}wDmNjeDgfsVxB98Z1oQJO*9V^*(jGg4AmA!E zgg?L+eR!Raxtw9smzLLzA9+q$Z2WFUCiVWy0{E(KIHsYtW#2r0r?*lXB zBgw9-J}q=FW8myX6w5`8b`tcDPuFnzxH$JB1}Sc>0PSzta55~?=Bb~K4Z!6>75r(*1@#G&`z@)%ymfbcPAn-R`dE7=4y(cU6){_9_;N5&G0E0C6)uNZV zi#7tzqE%Ll41><~QucxdC(Lq=na%#rSjt$PHb1+7X4aj0*5jQZn~7E48k^kb<)#kK zMyr}nSo&6DATn-XC47A59s{Dh4JCqWKUaR9(~Je;89$!x6yN@Mx`UDE!`(TmwQ#&n z;gDZrBG#2#46*fYb(E*>)d}hnt|Q_In%i8IZN=F@;!PkezD4I#j{lX>xXNsh|yDZSIya_@ z&RHm*HQw1|`wy8!kZLHb7yKa^w(|9l$*}koo;VY*NV@?WP4BaN2?~fS>j3LrI@|}S zv>Z__J`!@~a1uY5V`M~<743B=NtyvKyA@Eu?ZoMdCeOp3lFQl3)sUlQ{ZtMrwn895 zde*|WfS}3U&XpP(<{?;vc7z1nz7_Ibgr~&7PF{lu-tWDrT)MhSyh{FZ|DhRullGq* z=Yy!a(NkJ+5rd@v))~rYa-RPZn~4EJ|9l)23bb~!HE6uGWBDk%6V|JneK`f*kC;rN%-eJ=!bDnK*NV3GHDi4xl} zpz!PP66ypz{BqwUYveaQZS6a?oLLh6B?0WPRsZI56i5~2O2i8nHpK48uOds-1zB&{ z498t=S1Icsr759qk=t_M?KeuB z_w1s;qtl$fb5H?8`tlljU*8%nZRrY!XGw!LKUJg==+JZ&agJ-sk2PInQlawu54x~! ze2~-K5U)q_ThZn=0_InN*6y)kZYju}%_Y21XvHR-f2>D+@k2c-$m({bA$xKtn`g(e zj-3*LVo$Pr-o@X}be@j%gN8Rw-Zur64oJXI5ah?Q?%(@ZxdyEUj#sk)`j%O8pambs z@1w5$>lQpX&`FBe^X}p6aV2^_NxxY9v1}itTF&qb7B!%?W2x)poh)hc7}Jh8L3Ieq?8P$E`qB%Ad5?zMnBDzSDd`WHwX3f7h#QAJWTLco;);r zJ|3LspPXrZZ%Kdp?rVPkP+L_W#!$m}TZ!PDA8sWQYMSR*js&tAcz$1J2d58X7=%tz>h*$1Mq=?nwLbA+b9{@KEqpKl6hTd8 zK$_UB!O1D{^A=sNvzeCSElSHyx``P25#da%HaKPPf5t5_Ve}&sUHz(AX%( ztDe1__A`e)3tAr-)7MwHl-+MW)1yll<$v7k)N_U$fR!9!x(y zwq-kXpJ%NL>wIO7Olx4;9;$1c>~9!ycWeRchRaYMvtn*vdC#Ga#Sz6o?vHckd50m)A9vz z=1AKsJL9wT5m7p+d6c46H0S&KP1Vg=Ogbk_v}wqfLR+N~RV6TDbU0zV}ichyfvcWS5^GLC=Y z-7-)=ad^<^`!yDW=GlBQAT|4{^7XVXoS!3{wDQBY|g2$w0w+bzi;B&lYI(cZ>?dqo3qYqJoyyvu%6Y8lodN-a(+OHE7 zMb3Ngxh7t$_m4RE`Lg!v$!TXPQ$D|Y*f*-2gDn}V+|gP=`svj3S{_AOq%!pRrUWI& zR4@xgWxbTf76bir^rK~}!qQhaBZMoMJ2DT-uB`GrM=>@$^Hv#oG_;(wVg16&H`IF@ zTdOohC$3SR`CkpT)bDiN5bUR~d$xblY=i0iOkYuu$|}rCUJHr|u;sCc|x|4Cob-K8HuX2sH0(%3MZxL5W2|ip-YJ2Tx%~fAt z;@OiE;Ds4PZ%X;{7R<^^`*$~IjP6^Pg?*#yvZT9qwD98f&C_c1-l(LtTqm$8yXROc zGOWQY9A!&lPoC0vkWOC89*wnqe!l~Iz@0>ewTY9Ya-M+A9QuVlPwZPruY60*pLJ3ZQQ7NcDa^aKrZb(m z%Fb~#y5eR|a2c3Jf?A$#d4ltd1pmnh+w6%m(y1Ej9M@DG%?~>ArA$K@t*-pH@6IbV zxL)*SKO5{>uuOu`sSF$LC~8LRxDusTWg1dw=a>pHVpqw*-6VM;1s4F3W)FvRYGR zx5DEfEx)ps!<2MFi zdnxj}sKD)xHdbS?Av9Y+wtparbu!-N_>r>hNuPZZzKOjxNMb(k=^;bmLZ^ADmC*ie zEWQ513EID>uT+i^Od}^?c1^e7YYH{TwZs7+UQLcKc?QF_88P($a}Z|)T6m_y$D8F zD=2^F&LDKnuq5otI$b6AMTNe#G0B}%O*niLDqnFZ!1$B(5+MAmha7{D%8HckM??)1 zSv?7P4U)s&s;Mn&GvELy%Ym>&+>6rQRtilj$jJ8mlI;*9AVm`?vPMdpk0_6YC9Z_jdEhsYTpv=>aC!_vZauWnZ-bcm_wSw9sXs`d((|bymO$9sePwil3)JPCY6Wtz8jD+ICr$ zVLcI8*=Oc0M3b1OirDt4JOixm<921?qosAu!pwV*G`SmX-?5|l_0vfvk97lf@TNZ@ z)Vcj)5mk1vr9IZ&2zNegaik|NsqTu-CA*l!!`joN;m=XNA*$=q;*GRqnJKairdWsy zrILp+{IE9_#crqx8&76l;PO~;V~ z-RFEEIIIq{u+*5PBpeE+(j=AMIC(^K3))8eopR%?7t?mU?fbX@O;KqKZ64!AH3$)c1=O7@+lyhSBzP=nR&Bg2TLYc! zHlt>@XPOUD;p@kJL%{Xb-Dq4B_~|mUR`tjow{+qol~}z3s{EIu9N>Y1sMvWMn{aA3jwa+CX12~GdGZ6zs(^~F$#N=XyT z<2Zx{>U~%<(Sg@dX1QnTSigABhD?Qm%hGUXsqLnHnW-W3iNKoyr&k(9OTHY6S{?k* znGt)nc#)2fE?FihZr#+XgH9PgqbB=Ah z>Ry*vKhM0`>pC0~9}0Wb#MKrj0u-^bfK6<%Zw%4kH~B5H=vH1*_v>t9$1cT(pI9_3 zo7V1`Y2lH1x~f8+FCP1@#ZR+08hgWs%yp%QA>0^dgm)KltEjy5YU3TT#h|)mS!ZsgagV2P%GjyZj<|?-`}+=`i?u!Udsc4qxh{@Rjnv- zA=b2Mrb~_L!)1(%tOx5Gl(3m!2i7(i3@i+O+~;c!om9MMuWHyUw3Al|;_u+8md&c3 zJTctR9k~^ry;XYWblb>^Y@2DPJ%VVK*(pYE);W^C=g~zD!1(=3F;~XBJY}HH`#3dU3E`b9#(*t%GpV z`HKBz24HXlWiOaTM=+V>1-l-TLdOy9r^MZ*WSBz(@eD(tR^Q48?`Ce?w(Y6tciAus zcUaI}GLQ~h-g}$UIbF_@b+8gv9KS{~O>ES@6tkr5QK{>PNP(!b%k{xXEGz!wLsLdb z!BC_n)_MgDm>;6z7aGRym81tCidEmWQ~o(f_CTT{j$~G~_{e@#+O#G6jk=O+FR3Re)`f3&yG8@7mQC0x-fT+O zY)g6^@41&pBdxc}C1O#K7#2YpOB2#6h^gJ5!1mMAY+x%ZDcymRI~7gorZQ854*Brb zC+L|bfeSdJe18AR87UGF&4)-ETc%YHM>EtK7t{oXJGrW3N=I(J8V62r&*-IM*+Y}$ zQDRH;{-p$BThIMc->WVClYslBCHD>Ot40UHgP_&rwpWqqRE_tVKt(tOBJfm7Sz0GM z%~8Rlp%<3U+JwhG74_2Oy?%MHV%kyanAG~%hXsM~RGyB*E%M$CN?o+pKxCft=Msvv z&4Nk`X#>j$(tGrJ1(jmO+({PuUI=QZ%cpB7bv3U!;F2X$;a50Rxfva*7R%JFn|<#S7Ogo#tEvYR^d)i;bRxd65HiJczq)*}<(um=PyiliUm26s_H5y+ zZskFXl3K$2qNlRNWei7UX5h&pm-c|(9d}wix!1!fdq>E;SBp6nWEUhl+lvyt zy|dTOidJ87l9nY{#t69$V!)Ck7Ux(7H@IGu3ui5fS3sy%s#=ETT+Vl>)jKjiaR@ph z`X3Cy7B=SpR96l>U%dRw;xPcgT)mubCkG+3hrfnl!g@PWRebB%PHb<0_HPnzw+zUA zD{Y?`Dv%}dchGD5LTN(JenLX)zJs2L?3mtv zAo!7`f*o;e3FS)}j4?x*- z8OyYG?>G)rcd4Anm-1Z(8ITrR3oFHYWU~EIE)+fqvdasAfl5%Oy5xm(MQl5vK{?Rs z^yi*35S{44xHyO{B~6L_l0q8U|caeqhIyLxUPVY+gh-p)tiUwDz7BgFqFWB$C!bJ zoD?VFC93dJtqaoZa`a%N1;H$WNxiO# zjeFIxN6ymt4-OwkBU(O51t2np zG4gV-R6W2xTFY>}u(z4fl(`XIL6dYasK^i*MEtpTvyBcHQtKkni|85-{HjZOX*|E6 zInp^Ol8acViCP)lUNq!L;b19HJThL9BZmqAwNQ1Si8?d|s2pjU^w^M2yJGbOk`W+f zRZTi=gUaJgG8`CH%iHBcUI&rdC173D1os?!-COOhN2^~+TRDTyqi#GDtXY$Foi*e^ zmYSp{gX1UX#2PKU_!mcJlD#VKkew$x43AEYp|-cBmOx%`0BYd{(7|!X;V1lM7brhufkc z|J{GONb$2q0L-b-{eL|q-Dxyp{v@Wa-+}mkg9kIP;CUbyaHC*=&#wXzPpVR%m`p zCB+C<{&2!Y!}Gusoa>W$r%A%3`g!Z;bS0g;#42iP_TU)=^8?}2m(X(cJ6@h=En0G@ z*Rt8=aSZKHcf_xqf<%F9nMWo!oKs)(4JUL* zOxq*xN~$VtgW1t8M^EVv|1HuL%wkQ^ioQwiUQSz^8?QNcGPdSxGS4Lr%mV7@p;z6h zNCDyE`!&{jgSOqSGJU@gVb_e;w%B6&RgF1#_P3)i`}^8! zAp84Yoc;Zm2}EC!p_-UG411e1M~NF$GlJ5c6CfZixEbvQm)C|kfz$$Dx!U*pke7KN z-ru+*$H5?&2$gnL7TNpjf` zp-+0!r%G(n!fuNQwVr@)hd3M=*`hOnh$ABl)OiPU-u9sq{W|5g=1$1#LtZAru=?)bPkjEBjge!T_H74K;YroF3+zPpV7L=o z9b{J~izAE2NX#`DMScDoM@AxU`vl_1$Q?h|B?tRaiT+XDa>=9*d>Vqy(SHk0C^pj3 zxd+)b(7KIxfY=YPyF#DCeaOodkdD~3sVR1D2@cHQS>LnL@LMG>HLyF0x2ikf`TbE_ zPB{+zlzhIdJ7jm?q}}X?@6={Y?8DcFB5L}>HpA!XjLTA*fP0hE5Ef~ePQe2)kTZIs z*JoLu7Q_e!K?e$L>u71I?ImeAFh;}B>uaixKx8Dg_`|zgh>Xn6H8WCs*}pFFbn;@F z_G&^O_%s-qFs;^CQ}jZ1@z6`><1m@;lK`eAj6M6K2t*1fXA%C+`l5CNrghTI!YQdbzKe}3q0ihoV$ zr`aP`hF%`Trl`!hI(^V!&^zF)pnL0@DJuslj5|AM==A3|2BFu{@u6R;XbKG=IV76k z+>Z!=pmGJx>xw9Oi%$P(1|)|@vndDHG$%y!cVuB^B9AW{I$t*j8HR|@$~KstWNfBl z4+?I!P#G~BECD+A`gM<1=6U*$t1vXX`Da*R+~J>Lg}<`9S`1vz|4(&0AhNW(u~Zd~ zy}``q42F!psZMk#__rEGbi`n|yGjDf=<-_t9p8dwHLuTx7;bZaJOvQk#Ihkjj;Q}~ zFq%w1PFAI~hV36Q>s}6f_*BF^h$XCxj|p^L4JKLNX{5eVh%2&3PxgHpH&P1<>iNIh z;PB~3(`R$vjppm+FA$uiY|ygyraaiwcc=o%VWqBmgL`LQLxA{#yZg0Y>1W1Gyy)7x zjh3$(t0`K4C`L3K6+YP9mlXzSM6oLKeG5#lg>*HDsMDg8$ih;9H{+m1=5rVevd@f% zHX;;a{57IS|Di_dl*X4g|Glx=0dGCvtp~jI|4DEC6Da$iLt#Q~w85 z9@8+7?8Kq7KHAjcc0YhUR}|C&O5`Xp%-@7i{xS!X{UnbC!7)vUr;UMt18Luh%t=Ap zkZi;17a~6#M~3VR(ijLG*%PhJ{V0d`+TZ^obAz-pvV+-ZU5d}0B+ z%f_&uREy^&>;64?NsUJ$_uPeyF=W@b6P9a9I$cnAr{73Qe5Vlp1Ax;yPDyJGO0yLf z5v1EH!2s6jz5L*r1gIzxU*H563F=sYRZ5!oBnDscfD*&bI^9od)`Q8fckcBd8`a|k zvr*?|fG&5oIq)y1zfKmgKogK7uKXaD5oEu89Jh!%m0#>qiA$7UQZ#NcH4KK0QgV6c}$9)%kp8xz7lhvQy%vzUHtUs zaf1i6k2adG5__RSNQO2*%r zjO|{xg!#_tNM5vCe){^nWMy>8mgFe6hUUHUcaZeWU}qA(d48^C0n8)?mC@<9wb*o! zwS(LK>7SCp1Wlrjj=-DRWJlSKs%?NbSZpHBqb+P|?u<1Ixm)GI&r>=TGzH?cim2d| ztraasa7g}}$Ez)X{Sy1^d`}`>lBh0^m9@oI$DH}#0EkT!I5@*gCL1g|fW45yrfrN4 zRf-2Ms~O;v?vUv=tFxTim?~HA0rWcr%*p|q$OKq&dozJoH;m}|ChM>~ot6?W^#IM@ zo=f*Yj?L>-+V0F$&6ehP02O`S1p>T-0a%vvZyJeme&=-ppV7ybczUFrAgBjCPkiAt zyNyDfom3%RwoCm{Ifm8mrr7i=T03;paewuGQNgVf@g>S2YnWf+?JD05lk9)m>eo6)v)4PY1#f7l_jyMJr(=GG`f?bTL|gK z?5J1E8QrBM@u<)jDM0h-v&YzLF$E@)_V=V)h7}7ud!CR^Ov$$KS>K?EQ-_SZoT6!A zpkP4|1WG9SGnPv`QC@YP@fQGC#`d##V7bFZSnd!7!JwMzfNrNE$~@^;!mj~pLlAbH z`y?~^?KF;-EALi1(xq&2qVC;udFj+6Csj2Iztl36Xl!gBI~U4OIVyLdHKWs}kLh?y z_H5(AEgHmm%)!-3R#hmX%do2sHDMODMEja(0_BXQf)TvNl(5Z zXPTzC(0dDZs!)C-Fy)|YgBBst*i{bMl6moCU~5Mu2}PjGQzQ1sce<-{+|gclJyr)P zA-;P>m)*Pw-Fd2jqX!sRiDjtst<&y|e5y;6LmW73=%*_qDhi!~iPE(#1^=3Yp`R0Q zwMoq$aKtRlN2AVKC6K|ybO$n4wE*d79^RVPWS4f>i38P9xVH2j+ncAQSrSn*os{Up zJz$i>e3PhDn7yP+UAfIpsSB##)WUDhx>iY+4oMZ8+ZKCa(%AQEivaJzFOMki0$p&r z*HYr*NxRkL+*O{{@T%u?BUUnA4Jvgd+ox;>WmFiHMy4A>Nue+ zHll^Niq=Of)DW&cn)eUicE34-GtyuZOXbPk{Flm}{8TUhyV1SOVEWy7nFe`FDj1vv zt=B21JQIP%XRy2s#F{;ca!Zc4(nn!H6;jFYtbBL+0qX5(E)!}?p~p=*}Y2smL^171Oh7yOXf`4fb~Mhz=uhT2v01@;RJSEr*Tryz*+0phoDa{WLr1isvujIogXwi^ znW(K)}-={=a1ZW>J3B4W)W}9?5h;{P#JNs0$6MGjWAZNP5N zllZzMUCEwSzw1?iS^7I8lC8*(RJx`AQa1aS`OmnKXYag@%cnlbSpT7PR}*zvlAjiy zDuDT4@h7nuL{e7{d_&#Yx2r0#wKTzf6+>8U0QgY=Q7~t-S)sG4G+eTG_~$EXL1xti zwCv*COEGnt%ED;Ey)r@bUdk4Msm!$yl`FUA4?iS#=QG$16Q4+=iiZU-bWo6YLDaM)yS2nQmPi{K$ERfrPE8>^B5d z(!6j+V!WbucwUnSvqZs_s~Nmxw)0&CggK6!NTRZMty!oh2*#=_Tx%xlSwCl9h& z_X&IgNkdUYbpM>YoV3x?4Olvcel-(N^Fd6(8d~lym#|1L&cL`LZ&_EoVH9n`)qYgc ztwAqFeZsig0Eh&*CRkkSK(UvKf@A935P&>V_R^4F=~kJ>z+RD-v`{rx#rh(cBWX|aKbfQ( zCi1o7go8lJF!F9pT|W8D8){Vx@Gg&r-bH_80Qisemrz8`*_E`pQvrMLv9k0{Mw3&X z96Dlhl+XfY&K-`hTjC2i87A!+%dPBBcy2qO*bIecs%s&aoeSHKxps#WjQ0Wkuna}e z;!1Vwj)SRg6~!UTu_kajdoI6ik|8@MpCyCTDUXlY9v_>kVF`s2h@0sKwy~*~8!W`S zN?kev%a=~gOgE#&XIVGKf8fLeW{{=)!~24TmUL<-wkFhBIxicxR}=5H0yMZ8SQ#IZ zl`e^r9XA0$YP&)d?c1jZf~)vBT?D>ETGV!3KUW zF3yE({m58Lr&I%RLnSTkv9i(D-1D_{JUQ(Z$!peZ0s)!I>8iydiKNUOnq>59FpCPaO@eEmmXnrbKOMBEE*k6jr zhNXBK@2>{C*PhJY(H1>=+lnO3FExGc0iajmS)9nUw#Z8q@gvGqq&*w4cg=SSd8JYY zJh!e#w>hmFGWN#OBn+D21DZ_qk@aC-Gc;^TnX+vwUK5Vn1%ui>sW1rwpl)TLwn?kt zd#%o2tK9pSnG<*xhYi@&G(mR|M@@SE0f_7>3HTOi!l~f&ZS%+$>XA17Nf`t z&!R3-nY;);h<|{iiC1y_4%nyqjG_!<7C5hH^IiT7k+UOt!>ABvXSpKt{%X~2Y`8v+ z2D&dh3PU)ekXji0d)&^ysA{cm1lo9Oc7yglD5cJgXK)2uvcu+hFH?h@$N)7?TH*;8 zU9Vg3me+kTQ|@Eceq^19>t_PD!6K|l8P!6-CXh}WKnnl7FmQm^^FM^wqhWZatHD|$ zrkK6VWJ*V~yBm$GBC^%q@?eTTfrPAaa}p;HMx8%eHZ#TIPX?%So4%kLMEO$yh)-a% zl6VePLBJYc!07?KZdYV8yPNxJaL#=%!}v~fjc^9^EORsdm;?u_%hBN%SxG9O5=D|} zrxBe9&Jay%3};1zoi*fd-yLu;4=!~nPm<5~2DJuFu~{8cQr=c_b%w@gL^&h%zRc=s zF&g>~tD14X|K~A?Vfw^skal2G9_mm2h)2F3@alNMuHgfutN~Ki*Xr5-Eu^gf0+<(8 zG7TtI3@BCnw^yq8m+|gEO!Ge^rU5E0|CwB6){7u&+L_pig+pvzYA>ofA{dgUd~hme zKVoIgi1EL9t~l?tA#nPjb2Gtlb?x0>&I4BK@wtP`idU=bA2fZ-J;GNlfWaeh(rfw# z1bu+E5t|DOKXyixs{5eg!{3lJARd@gGnlOM4 zI`SK2H6WHWAeJ;BmK5r zoiw!|`H*0Jtjura|3NlvSE<1_nW_JP)b&6pKM=~_94I?%``=l1IFR8Q&|>`$)?)o* zStKKgVT4Al9`RjA4G2T7%l#$j|W6q zfgs-)!0uD;OyTG2KOdrL+%-Xg>86YhIX}w!EpbHq*%g@5ppifRc?DEun;!L9A>R1g z&YiUugBPASM^Ok516^c~`i#cLPnlGzDZ=q2F}l%euwmO0h$yK$XCm};p9$KRKtWGN z*o{Hh{5PKIV-dh#_$}BaSDf6L#J&Vo0*l{3QN7pgFLTSBdnURvK5{0WVrq^@*w+Ae zgFPaOZ$^VsWoYc*P>ccal=INCGmI#bbh!)=CsSK%pw{IVKmcYNxQ|M)Y;qZuWT!k5 zbzTP~bK#}oaAuERBRq6&bGV0silcAufWJ3eTp#INh8MP4tN_UMEK2k%$532uw~r}f z%uDX6u=CC(q8|KsgRqC3lGLXAc9)t-a#8N_QJp>S$oe=E?%z)MF&i_uk110Jw7K{i zqTH(|U5Y6F`LzyQ!w1{)6_6DM-i$Y#<7O*H3!x-e8LWR*W@bK|j5BJ6q~vHAT-`H7 zc!)QxVg$Nc<iZ^g@P`_q<~YklE~MSI^e2R!a`+f zVJ+mUTF8Gzh?to%RAh`W+9S^@dM3F*`6@o}wLlkeZuPr(P3Mutz)WBs=pC9_N z6G}fMmBM9@UGp|tL%5@WI#0ppCN+xIO0AcdQKe;vQ^hF-l9W>PHfuDxjoc{%f->YX z^c&8P>xh3XTR3rzSK#GA`^C4rM1IMm>p2hO`q$LC^GP6DL@me_r-gsZ*dS z)W94(JcIqqBB7F#fZEubPTX}H4$p_%Qaz0qKPW@>eNeR!itZp$`7-r-=fbIoqR+dS zTGgNbOoC4)>@>}chtdpf+o{T*+H`S+<`&}Df@#hXb@guXJ&Z2* zn!slts(_G8;|JNFsQ;K#0YQaqPWttT+z-f7En(PtkCy#MSwG#qqD(=|(CsOxHEJfzQEK?19%02eTmHhX% zPem46D@ND4TwB-wr+Yn^bJJnBD6iyqUhRE(fi?bimpfPINc_Q(=&{gdB* zetyw0op{0S@EBC>M&VfD#p8KbEo%a0%VnsHjVc|Xd-lvJuwqNCT&k8CwQUQS+SAj6 zYtXvN_;eDR`N_|UcgKDsCqA8O5rc0-s<2~f>Prb%r#>!s6q1~CtIC_A7Gd=(d_7v= zcqOzz=B^+9U>J7tloHj8ReSa1WNvQJij=X4x9*2MKtaX}doXiUB`$%i=LSUqL=!dx zNtYicWf!s$^&=W2%3x1VStzH7`4CNWjQG8w?zWKl`8jRDwt1)`dh*weces{e+Yx1? zG3w5OTE?13m^IuML}r%h#t@3qcps|_Vir4e8TN-id*MsmHJcn=1$Es&r5dk`EWp+8 zanUVY2TnS-t5Q%Y47oU#jG#iMZt(|xpBAM9hchN84MxYs^t0b;RNpSsMM6b>s9!86l~%>J)_c!+E-(>Wio7#} zFz#Dl#ru>+oAF96L+qrMYrn|j+3)TrOb@KN=8|hv8+jgoMbsq3Tjc{IgILh~{8yod z*<6+qYDCZ23tJ(`_CK3@)CFpZrI(jg!U&4lX~T^ru!o z{2k{xD@!%_hRh7nL1$T?+F4Hew%7k?0PqEFAiJ&;Lir}S`bQoOE*z7n^wtGm-ro4~ z##=|2(!Pd*;%#*<<4TYMR@GX ziTr;R6YPa@oClW{{8m|ZvD{!UD7Kg(1l#DGbG7mX8C0G&_2wvpZlKKBv~)?eJEsbJ znBKu)A)$>WRwm=HEmok={Gc$}Mu{Yh)_R_=E(O|zXT7iy(L1QQm=$0<;nAv|xbLkj znFJW_K^->qLoJXsahF6!x9L)%wSIE=;pv?Fb}?!(uj{V3HV@Gxafgt8*@uW4bpyiF zsbx<;j>R{dfrR=^8Xi~r>Fc4|qgTh&4zH4fGJL#^o1SFs{(?4LD|FdDJt zYa5V0B__N(#sfzLZ*wUUCHexTI%B*zXmKJaM?CrXRRUFXhkCEdMt*n3$f~oH5zLQM z5Zzf1)w!c<@At8@hafZd>DoZ!d+CvIxLC(5{^$sF?Sf3bT8pd4ipC+nyRT5+#!e{> z+Yd#QD7RL%KgNTRyjGK(E8|W9Vx=;2y=Y&;S{hV7!k3m6>`@|%qUBDVn>rdNhvV{H zs>mPsyg;~`#3b87E%YP=G&lOHAk|OjhOVid(1(A5RV=!Dk@H$R#4yC5Bx&4jE#>py za)#fv(}9*Ic=AWqYmhJd{pfg}+ zj{?k(t@TI!obRHfR@ehc;u2hQzDqU-+yuvve|Y=vm!?7vUv~DXrrK(KQ1*MAy=tew z(TFQw8@v1<#}0+Fgu1kP@wz{XvS)mYO;t(FNX!^O$lvClf`Z~^T&~IOhqwQJDHV!O z^A-o6d9AkvNOXM}nBsvc9@xb{N(&F{;=e6q`h%$(Fx20>XMDH5|0YA-M~Nan))DdO@VN##6+jl26PPqwqt5oGFqemIl>1phsFe2re$*dZL* zgySHq@B)+wZuiH`yuWHz{$aAH+{FV<=I6FB191di^)wJiz$QV0flzVaG%#=)7&!3# zAn-pBD*nq*v5$(ya|}iTa4J@@JR=~TYy_l6j$=ZeR^d&>$D=_RoJK#4$rjsr((Hm+ z2F?%Ms34Ct=7&5|dMZ8q-`?ZmD`FW#>h+Zy>~-RliSl$E*-9YCrr4Kbqjp0{-d@E2 z|6OzUy($ir0)KLp#$K9hE<5)4Ps1sIbknF*Ddn1tEK6{`FN5$s3jD6Uy9mjb?o$J( zci6NuG*B>m0;+upMo2TWm;Eu#00L?t(`#eEKTdrBMBQII&zNtnIrLkf)Iy5Kz8!cs zJK(5RP_TRG3D4jEc|$i}kP@;O9!)~$npd-6A2`@9&$v&B;t##w82KP6B)pq%^hG@a z=#YcfLm#dyntl~`*XmX!F#B>bAg7V9N^7F-Bm9d7dv(oU3T~US3y2@C_v(S0;HAL< z-{1aEdhsduX0-xgVoTMqlMSX@zv_|yFi2F`MUyU4XPW=>HZe)U6}?V}3dMZilMT|% z;3lwEy5xtq|EZ%4kckOs{g{z@0^pgoWfz}J!v&H1`U?LrNT8I!gLhupAiZE&mjRM^55K?)$&>EW z(w9ot+B8Tuk2+(^qISrZYIPAQ7C&DoJq7k0f0^f+IKBMSw5i-ypC^mEX3o8s632g3 zML)bo*2r#C5hj{hld57xwFT0kW^eof8t9jgT3jc;6n~0i6MjJ;1*Bj{Y@yMLrihT6 ztE?+}Et5%km$6)C>CYFEs=#jZ*i2CA@OxE7AMW8c^Vrpt5gTR@J3MTABh;?bL`iN# zk(ldDa<3gCbdooTYVikT6ZQBbuzgyw^?^ZW@CzE5Uka9;FJ@7enx(ORzZTddE$bxf z&XjjK3x2+k+zT4KQHssixYyhFMhvEhT%M{$Jik3>qame2YHQX&NsGY;Qcx_tPmp~< z&mgY&19-W4MhYZm4qMoGxNoFI%X*#sYPx$>{jPKL<~2O>MB^pJS94GgE90Lo1b5QU zDPVT16jke2N^R34w8I9ba$qV4cI6M-m)tAq7OE(l0k@M@9)d66?KLA?eoLts)n(>~s>#`ZOjj5_YKd{k&AcSD*Huk{X$mI(Wb(zWNS-pSlVwEugGu}9 zJd&{cl0>uPVu^27PHzx!I%%w*jN>;i1MUO>emn8yqtplB5jDjVw%IhS-3q)+^TwY{ zK9WS?XV4{j1eam1SnuLmyoWJe?NzlgW>eEF)ow15-ILmDiaAyK-Px z4j9<~p$0abOgCwqprtT)=(v@G&Y|mTDjC#DV@7W-@JZ7-H22)A1Z2nHDs)-{IXw*$N3FX7%~0qO7ogKz3ZzfJzc{{*!Xh->=0ZsHGy8J1hr@ipb?xS%+T}(>HCQyrLkm601=2XNAl~gdHQ>*WRSz z|3?p@VX@9VWxWM0J(R|yy%DBe7GAxRr=rFvL2ehUF0&X8-ahd>0r6RhpQ9GF6W(7j zsZ5X})8eY06Ry(=JT}m5o9Qxahuq?Zk^ED;u-qESYxZ^1mYYnCkYAw8?|EU25R5<3 z-V!HC$~Dmd%~n!v;1y4M8ua%BCObm*R9aWrNxo?S+;7$nl0Elkx^?YzT0wwB6@5BL zlD)GnRRiHHDyGnep-rZi?r9yeILCHEal0qgo0Xi~&8)=k7pSVSnj%&((iC~%U(b+p zgGO;>OK!XMU!e|8YV^a|VOEAGK)lmp|IT;Vw0&=>XXB2(HI*qGIqrk0PhWVyhLNt; z_v?8U`0;56)99!j8XGT}K&>QuCMZ=@@Ev-+=|g)yIEa`YY3pRmNqLC^k2hom_lsBt zb$+_caNvmpqdKs126hLqN&_}$z^)GXj(_eSO{BbR0_*Wg`Qc<1O)f1U$t14wy#3rG zT4&SABaX{uL#NIFA=4lQ=fOSU2JBLZy88lPdb- z9CRDY@+j7W35i|3SPlt$tk6!_@wE4}Fg1A-O*#p?FVD39SFr=1(y(y`pb+<`*T3f_ z!adg zg>{E*U*mRjk;3`u8jnWJ)My+rKhEtMBLfMh;q9WO(|_0Tuzfq zD7uF~-u=`*J+!Uk5ZimXFJbOPvH{Ka)#}Zk>B7wM*W|vlZ&S8lU5W2HJBA^z@(m*G zwrIp?7j5+;yeJFm|1Z(%q{p&oR7i*&%hT#CHA@qws~ zi1r=m?NJ~d4)v?DPB7{Y$_m)c&g=iZ%#Tovo;^_|e3%!-@~Tr*XuQ!*#tw{Z5zoG} z6IsE_Z62S@@W&?R1jPHVT#-=wywTKo?7MSHS98XrBi5apIjTXiX3fF(vYFF|UAasd zv}Mg5;o6m>CaR!=9$a3q96r-CHS3OIiam1e{iwJXmAQAck?C6(mFCIr%7!(tvy>eU zi&VdV>{40Qw^uH^4nFclnabSk<slpqb z?$iG^b*g74T+eLE9uxN>mRa6)HZV_l$)0n|;n2MUWAiBKGyGZ)__Yh+qKK%uexQpO#{(q-cm4St!q5rZE%_Zk7*f40Q;?8Lb< zk-_?3-v(1SKJ#TccK6)!9>G*gP3}vP=^UNHEJvbci16IX#GQH)8?oxOSanj|y+#Ur zk=gjVC={x1oR2;bHahuKS(+oG8nXAAhfb?JUfva&G`w!5qS-+o`~!z|4FUH*NK?6{ zu=X8skaqvu;KMyRh@e}Tm1b>|JH&c9Dd*s@it>&_xf4j^<_5MMrfrfdbUD|1FMoyk z!WWq(nSkzAm`bx*-xxRn^;q=6ot;BIhwWyMxMicRWU0B6xpG=d`sD>Y_<><75jdUg zM1}34mxaT?Ecd+)KHLZRHp45*nRfPP(sP>>??tUtPfoQz1DDVCs`h|Z%E~lHtnlz1 znhJc8gS0x;jVtS^3!Y*p>jDdApQdVv=3OJ$X`DN0pRqsJt8MD#AsSPz&qm+}PRvmT z(|JSseHGzv(>L7q$j6oH5;wnv%C4WW={F}GwP6G_dN@eE|!kYVfqm&K6Vkm zuj8Yb-?zONtwkEvWfQHn)!#wi{>)aZAtyqIXuW)gf1rX0PUlN}P2;Pm?``nm_RR}W zJ8DCTebG3W7(_VuQo*6U=VhZ0VZ^PC&adF672%s!widp~4XF=h!C*i1#qV9YJ`|I{ z@>J)Bh?Pi`^Ux13#(FPQer7ja>C|ui+hp5x5}eK+d`;u4sPAp?;eH`nJj9`$y)40~ z<`Ca&)31Ho7w(m(jy>7k#OV}0y zB=U~*cJbrhW6V5Rx$Kc6Rz7(V(s=m_U3gn-YfM7_nYa4tL>o=9=-%TZv%RiSeYo2`Nd}v`?^!`c65Fe*B_f*JIbM*Jr=NebLuU^U*r9>oVIX)I2p$F^{(*>pAmaaT7x9Z> zd*WG>3$HF{CSHw7)7|bDV%%dBV0`!v=usR}N;A^|A&J|{X6gzA92j)WL-#p=lPHW& zmRHK!mTSHc-Dc-iM}lxf90dXx7{z?pQ(05-kI|6|y5;EDMLLMry;CnE{y+BKJE+O7 zYx|atBBG!oA_PT6MVcT;4T_3hfLJL~f|Q`rL?B2@j3|h7QK}FS5erK1kbv|O5Rno> zl`4b|2_fy<0k8Xh%KgmyywCT3^ZoPATyu>xI)UtS@3q%Dj^A46G|&g;->LbPIClx= zPT#G|wiL=m^}b>!LoLka)r7SWm^$Y;`17Z_Y}e6fQub}K>6N6x&sLx?rp1*l+F6u< zU!r+pvOV(Q?TFT}I7m0Trx2B<9Ot@vYS?k>tUfa3VS%E`F9pQ-fba2i)uWjD|6!SyPi{!_^d zAPrehMyJ;mVv&FJlQ?{20J647Z~VM2_j#L(A~p>*yBlRzj?uow$q56l0L#^ZQ{1}7(J$#RC5y$v+V+@ZMSF0z{r+-5RFOK%Y^Pk${j#=a zbiyeFBXiA}D9HY)CbCq0CO-goM=?@0BW5WA=hnJq~TCuN&qa@r$5wQwDw;%tk4ZGO5evaW=Bn_YZA zp=0JVY%LaL>+wCpP~*3~MdEZI3mky33w2?l`&n`JH@<^`+=}D-YYA27T(|k@CiEBX z%Y|%Zg_{Z;GN>#1Wg3>0eFT|uKG#KU4HW#h>&U36k28)}r?k9ktWz)CqH&ew&VuFE z)jzTeJliQAPST@zPU=(Z?DeXMqj`L`jak#r_{yOhDooMn6V*JNZNN7j=qGuhS7WRf zgcqBizPYsQ3ViDl3LE#rr7nA)8q9#M6=F+tT>zV@;KeLU>PAm{>UW>%pi14)Ph>t% zJ0gTee;o1GAt}y7-vzjP7Zt&lsfZHvo?U$~T414-xmi6T~@p%pX`93OEqr`)GL z$oFs;6ls}D<3z|~{^z<1JO6BK-!49cO0@P)J@_pJtzO1Kw$7F-AzBd4*fS$Sre5>% zNj=OmZ8hUq@hIb|)lWjCyNCRwf?7XWKgyx$vGo{RP5HahG}&$$ z(sKIrlI~?@cKnyM+Z&(0v9xRfElP?t>G!dBbGL0AwOL-E>zw`E%M?20#n`?rIn%XQ zpy0!Ol&)G4qb>y^I;hcI!RRJVWQaB-Q3jEnD@h;W&&6`~8!71p?^E_*DaIHk!H*jG zwi_mE?D>?4J*b+F8){W;u!N6&c-ESg-+m9fnlWyf!W3bD0WqV;5<)kmFyGT}L3%sR zlXbTz+^Sr#p#-xz)qR*drZ6CNWmN;Sb`aq%WK@V;zS zE-wBB;?Ew(muY(VDCP)!$}qpLim8}S)qv5Zj9H=J;a6=x`sW#Z{~tXAn6p$h1X}Q# zCD3sXBZHh3d-m6J*z+s2ip|6J_+K417>Vk>qTZ8GIuVcezn>NjPbq~fGxS8ntBkWQ z_L39sklPQ3B)}cVWe_tj2h?m>)&a9d%H*cW0YvZ@we|m>fAhwVafuzv?M^0Zt=Hkz z9m|P#7y*cH+MJY-vsy#Q(|WTQTEM+P3jerqJ_n;ol~p<^6JI`$tg2Y}#+~1Q7}8UI zwQ-#U{kcv&@BNQXoq}rAa3rL!ne}X{pj`w<4m)?GR^fbRfc0j0t@>BtRe?-DGxifl z&}H(AH^S;qs`x%l*oyWqe%1EaLL5~R{)WQEV0v_sw_N-mgYAFgpy!iTmmdozek~sK zod9nf4LXy*+HH~4c|~(JaJ6|LrJE&J`1ATiZiA$9Ho& zHH$+&ehJ`((Hgu$wIGY4hg0$Bn)e|uL){@2OMe`80r zSVLLlVw@M6!Xs+=9p8vr8wtjPIj@f7K|iOD0GeM?=IY#VjsvmTvUORip~D69t?uA~O)AG7b4CAx-o93FG)6R{6&S?=jAaH2sFeI2dx( zPNWex8ll-b9tVr)*3b0nvFgbx>bu`DPT)}}9TTREg~x@;c;6o2UUn!etOO%~AvxiMbK7Y7vJarg3 zUUvDvUpw8WyOxabE{D7M*ND}Dw?^Au4BziyV(zeK`G#Z*G;+j5Zf#RecuE95mE`g#&c9P((_zNg*4`?2$Z$!`J*1ssGp{AHQsR}-T( z9DE0cv8-uR++P%kN2}7rm*T-wb`24y)Ab_}G;Qbsw;Dk;!O@j|t4gP# z&xL+0l=0iR^elOZ#iYQqE6JyZz)OBx+fmHr*j2kzu`nc6WlG`(*1|yx8tacjN-aqK zg@bnRF6l?+si93yDHuQMc(XX!*&09@Bwpn?wex#1i--z!!4De*2%(YQGndmn3or!?$&?G4q({GhO9B8Jf(JSbVDL(Zl!Ij3G@f`S7p*58n zsG$MZ6ra6hcK*&?ZBh{bg|qqHx1CBE;MKgh_K!sfjyZnWuRO_crqJff+fQQGei2A39+Hl& z^oC9ja4rgLhXdn6l7#c|surpC$XKDV(TQ_pUOnXR!QPf4XnToO)JjQ=@#*x7ZEp4c zAUqHTZ;DY?%$C8+c2#u^bS!NhtK^eZY}WA$<~nyK61?9ftT0uS5&g>(t7?NcrG~H# zwikh)uPGgQq&{z65Dlf)DIhx%cE0|q!r?15r19e^5od~lhPBS(1($b2glY?3JaZ7ivn-xhsd3RFOq|%-9zlvVG4^^GOR{6t_o z(|^4N!n6SaLXzcG=9=U|7yTwzE9`*#|LZ}`PcV=Ks12@16lh89CIgBIV~}3wKDrTso(b~+^y4~@}C++ z5M3s^OkrM5#0K6a@7(<0$TPqA^S_@HC{5LJ-8O7SbSajoRXeOD@%X77b&Qh6jf~;LEFV% zVz=*a85ri>_oT0+X@VwWe8BoxSf=xX8$xQk7OVwMo_&@%8({NrZDHi2^8pcsPvqmY zR|0yF#;Q&=W39!foqo^unA|PpMR`a;jCYK{VKQ44T9DHpGv7dJ1wQ5ai|*Slu6ptq zukSg1I{tKzWgCv_P?xtybL$wbXmsmgt+P+XMIJ>qHbzV5AL?u`FMU#MWr|uq zs`dx7c8!;>ZAA<-(nMdrMps0AS<~oK6<#o8P=CY|32NR9N1h{OZco%&>m|V3G^yel zx2{3iS928!49s*$^*GhLe#MEu2v=Mw^+ozbF5|76kgViX>Vo z3F5z9HkYvnz#*^ae{UcuH>;UPue^!xmAbaB+5goF(_4k1<d4AX07Z4?-MuDvmT7oYC|BfF?Zaw-rds3g(`INE_JRG zgl6fXRx$k&Lv4-iCQj7S4 ztfLgDhI}b&q#?!+^5|IU42~jGJix$=O%UfUxmGgT#!~#|Jy2`@opRoX)drEq4+*D> zk2bKVZ^u$c+nf(pQV_(#&Kn4uyIfR>)8DWShrYOBK3rzc865-X%(+bH6lj62ZA)x_ z?KE_)FDD8n zQ~Xq46{G1BhvaZIklsx3{nNBjG-0lMKBZTRmAW2t)aaoP))`U$<1g7Gan`=~Zu1Qt zl(pc+1J7QsbgY9>rG7y11ObWet;T_M$>msvW! zG<_G4I@fuSAlZMwae(_tUbWg#+r$?u1!%6*q|AK9@)b`hRP(Fjg6N~%TntEr8w`(S0SZCPH0Xof;Mz^Vx0P_#qJ`Y48=t7#vHqm^1uZ zN{tvxOma2;iXG`5!}JWS|bcf_T8c9)A*($VDY=@em6DN!I7s^+($IXCF@8P>FKGoV&y|XpI;| z#2BeA;t>|ytX*%~Ws?Rb&54=MZ^srIt7qhS_ub2k9j~xlxx9=IbC$wUd3RtjkA8&+ zKCG5a8t$p|cP<`45S@#3^o19;ONa7>j;tFS%$U7(h>>)W)4xjBVbZ4}h0X1K3vlrR zHyQqRGkkv~Yuz4*IAMYN)FOgI?CsEjk=gYWcM=A5px63f@DB0UnagV1ZnT{H)gx?l zGidF?o%`k=HtNI|`9Ly7_%i%yv&Biq$fX!K_=B3Lm1ne{m|X0us-+#57!1Y)7|#1p zp}g*t7V}wnuG1I}yuCoKr|lvwcQT$=DCug=+*t29>KEU%5Q+CQPU*!iBYx;U%zE=`Z-QhznD&hDtOnBshI z2I771><$aJu_ZqSZZr})n2f?onCX1#I(6b$AjN$6*-xE-5dqkP`L7E@xrH1syTFqA zwomPI2!+q&`GTq+vIZw}&+B2cY26lXf_-jWR9*x7gU_rH7B?X*`3Mp^w_Yl(kB7^`^7a$63wc$6T%1 zD8@{X|BBe4(o3T$u9Ihj#%rq9%VxeB{LdcUoTte;N$juKF)$noR5 zTm)U1+xez^0e^`(aF|fWRJI?sI-Gtne5t(uy0+QHCYcI1`(O#g0xUTNMr}6zF~7c3 zzbaCxU)>l3WQjV@wKw@^i|_s>7)4rFZ@G!j zosXXWOB+BKT7I;cCO7984QB)l=Befy%=CH{3|xnR6c{5yze5l@N#ux)w~92+CG(Uu zC3_DhFn!tx0Zb1>LgpM-d$q`16Ud`q!HiI)az&KuX#B3Y1~Tyb_!AIYYrW<+D8 z=c2Z#+fo?uwKnAwF$QOeya8kf(>{lX@(5*8B(w8blae56Qbr%+8%xuiFrF;edZj*+ zWe}q*U#_Iy(7aaiZX&}=tHFc_O5g0X-+1NWVgg*;+dAz#myp&lsk`M{m@7tcxo^G! z!A|mX`uF}2RO?r~8xr?cdC6{d5m`Uu$yn{tw0h8{2IY91>FIwzC3=2l(nU#gFmpm| zfiRACPQ{|bVZyE0g7Q1ih!Mv|qNtCI+5MA#{hD(nH4<%KZw=+JC^%N7X^l$~=K1<4 zgcUzQOyUR}OL4CkyN5(v#6ng35a1OVGHQd5(q;LQ7JYjAC_?X6XYrvbLdH5H$4_Lm zZGD7I?m1w|#gtUYyefrKOo|}q@Leg=QV9b3u)c}y2EW%#Q6aiFF<($NW$9#Gol9NM z&$SVt0RPcwVafHdkRSz;vEJ5H3xb7N_bCA#m_ z|Ipx1bzo^tl5|*NE$eGPLs-kFLV1^Vg*^>ZZV$6aN)Ev0j-&6$(6yFLET$&KicgGE zU=LfkN?ji|{~WFNMu*YS$l zdv8u3)mreFu?6?~3+mfenG)xe)-8uXOrSvgkGTZS$jlP4(V>SwF+#qLQOA$?kK@N$pD9OG-0+ z6w#unjyH)tR}_RBBfA>p_jINTd4B8UBH*&HYor?>)jP{9?6KCQxXpYiCU^CdLEY}D z+%v*Tby`-UdqWzbvjv^|n`;Mq1$dSg3!f=9t3wkKWFF6q|y%R~FOV%@F`(FQ5|pA_Q0iEW#|2;Lc8Pq0!wwWj%+0%dpXjs|38^XNwFum4%D>n7{X|uPD=Wb zM(lgzP8=m4Ntj#Kp^*Z!`o|*Uk`f36_M%;qf}o9IPy}O2BL$ao?S1Hq(vzw%{xP-o z#`d-a@(H!}m-BEb7En99eIu$P6w@Vj-gZGaryaPc_?dcXsffbGT?GvfH3w62SJCt= zXe|?5THi;vx$2?)C&K)-)PJ5BIeNFq2xOBV{Z2K{b2(-F;?`>TC9Lx)WKj=Ik9@S7 zd9wk%&jo%2IO=(#Yq=6}r0f#>%>nD)b@WXc=vZI0!;Gny&##`gLw%DiOtwE`vkY!0 z?yt_s1@kQ6g+;tuyA{c{y1Z^_pz^^pzPakMN-XTY{W(ofW9t~Z8RW{K|1v(Vt*xQe zzx7R>fNBzpu9QTf&8H*{sUGY86&h!LI4aV7?L(y4=iJQ$*m7xHM@mtelX~VM;;7pC zT!>7{-Km!oqF97-)KJBW0GyyE)9+uEr?l2Gcvbe4Gz3{i{0uE-lyg1k2kKPe!6C43 z=xyCjNa`w#wQf2&y0BZA>7NSS-RSr=8f*bO28*<$g)CJZlC0Qg{_zY)dv3|o$osbR zt5Pg!$!E~6|HIp(4Cq#64(p1 zldub_fV(6$)NaoJZ?>6>5MPE#w8PrZ1VuLcpHEef!ZFCiaxY>#*iNy;P+&{!xoUq; z;wF>Oj_iKGyknvh`=UBDBljY1-WWFvmx8RX9VwwN=3K-X|@cR3-Xvty}JviH#A$frpX1O|Utps29Ffa@dp^5RXd>mE{U8N-XlKoNA#Ge}Ds z{3~iRfraY<&4W=}d6up*fvDgfh_vqc{!)v1rY~sgqu7z_?xp*{T1VRLIeF{9-e0eg z{A!c~8!S#wzg;paj^PCm&_I0lmC;>i@~G7fIl@F(?sy1{n(~#+$QyK#velz7-q=|j zi|Kk%#=3~wra50?1A(Wmk8%CQmehw{yXK=yK;sK>7P4>OB}IWEYP*N);5^PREx8x7 zvC$lKuCh`CcPs_Fmg7fzims<_v5?%+wH5dHzPtdaDQBBc;G%Usq=r1UyX;ya9~bA2 zu!@oN!T^Q6p#5CKlFz`7vC+C*rWrESBU}A>xK{t&m5+^K{%Z=T?asTRpkq;b!rZ-; zkd_HM2|tYLkpFT}*|DAjRSj6@tBlvC*7rUHGHVRN8JbYnipBaM|DUjX8Lx#;vSN}i zL^8g&4_$qB#XKOtVPzd&%zn{E>ZtAn(EStT-naAcKbB_hYogxZx79=f+oI9+jHmsP zjoz9oWZjd4IH00k2mL!lf%|_K@=~Q!sYw zH7so)jc$#7%Fm!?1V~YUM%pgtM;Y*zSqpsAAnDio`KA2~cIAh`ZzcjV{fJplw$Ma) zwP1jO_CRMeOnHBs!BBATx~LW{04%2-#z>X$EEG@MhB=?jj7PsJh%v#0YM?Ga>uLAZ5T9wcfd+Oukc^ zC}OOoR0`B_;nqHgn(G@H637T8g*mzJgzRn1F+lrQ7lR`GeYv*upv;87wZL8Soksiu z@(tYVi38If$~cp8TT1@;NC|1iu^n1ubb`Wx7P)dJ%}%_nt+C;xf4ihwYoZ9}kPd`M za&~=H3FHOgH!Pj1P_YGm%ORjs3a&1^HO0^kK37_{&QU&w={!QqS?3L$xSC>hy&jV7 zBgX=&XrCib@q)}uHV&yL-XU2LI-1~^B31x#TBbc6_gqTZtsAgb4qgSb{`U7?-_t_} zns}G4?NlJRbRiB1`JT>Xm%vs>@eYdtV5_>^wMfB_b#-DDIZ*OX4eG|f4N=lQNNo9H zX$Mbm*jp?|gIGwe{GOTdxNq*&^2w2{I!Qhz2%Pvo*nxIpk?1p-j0a}p@3KMOmDZd) zHT{m3qr@XFVS00cy}2N)VsNSt6*ZUZY9k* zZcIt_0bDX*<(ggGLq&wo_F`NoD|P7UR*?fx&Gg>h)pgB1N*^k$CK_hNH9S9<+n{;E z1dr}7KM*9QWc!rp)-$6@eqWvH`vs8$5`L{IJR5WAc=3ho9l51#H36R3Br)9QlzkdT z>+AUuNJw(3mzRw}&QI`(tEiFhiQDz$shyE=p9AHn?Ry*!gEXW4a+!>oh_~Be=XNe0 zRH}08`xXMfcVKrCPXf0E4g^NUl)<%K z(3LBTKLtUq^sF1yJF7|?S^57yc$7c2s{G^{&~uY>osQ`lx}H;(lDz!IWXqN0r4(hq? zd-^|piNmy+{wsN>H_$KwtSIx}_d9rRa6=c^+)(H8I*;-U!w_*moMV21kkQKmTc*^U zw$}PXDDOQVPHo7)vv=IT^}2!FGrfy|^MUNJc0iJrrS}(Y+k40+u2~6TMGh*9k9LiV zJ*76_83PvholkGD*pgn*_REytdhn>JBDxTU;+q4A@I(BC^WeZ&=~|^pz1>Swad*zn@LmX&*oQ*-hbtg}Q_` zuKD=UXB%{h#;)&c*R~hbN80w^G~hlzJgTQV+R%`xP)-nrS~pvlT{h42d;J&+-8cW? z_buMa_iCIK4G>gCu0p>{JUS9#`e!Hw60_ zqf;3xx!XA)jgD53yBCoOgKIgi3a!r*KI)(WT{rC!0O&A~I*a#-7x-&f?b#jCm`TA1(|q>W^hmMPWDLs))^UfLf48W~yUevrS=O!QM! zoyvj68=HXH!KxM1Eege%@x8$iS!t{M;|3e2)P4M$wV~fuMN05LHSC^G2ecC3iJAIH z0xs&v1`k062j!RC4I?&-7?0)d?TGsU=##6VUTNFR4QtR^o1o$zPD7Td7plEm zD4=z$M^;r@8+^_4Te#zyk+T?JBhh=&x{dY>ooQYuA~nyI9?uXk|HZehP9zCHhsOp_ zb?s)8WWNm_2W)%|*8lw)|MzK3{eQNp_BfsSuNX+-_wi|ZVF{(E_{;B&lw*!oI=;V> zyshKC+jIQXyjnh9DNaoN{z2s#l1HLjz&-W2Ox8xExhV2{6{X~BGBA|C$`xaV+z+)q zdC39%m2Ze{7Ay3g2s_J`jL+dhM>VYnfIF0=Yh~a6`|nu*4g+gSG|{|Tq1`WJR6&|> zt-|5caZW(ucE)X?`CbR;G0SW8+|K*C@;494c0XvVSVtMN|kFxbXEyX*0oCjl8X}}!mMK=R% zo~&RW!X9VfjAYV}eT}~Vf*q6HL@wEVIFg8-5#?Dj*s^k~9E40lmcXsX7mR}s{}tF+ zBoDa8(|gOGM1+qD6{4=B2o6fWg5r5sWQg?16pYOrmd3ICO+chok{BQqd}|}E9`}@{vTz2Dzn1Y1{zinLni+!0@D$8?Y zA628E9oCB4_SXLHZ#k*lS6%O`UOPr3<}iw=#@vnM&uRf5M|@Ym)R>nW%j>V`Rm2Ho*j^BoyU=K zKURBSZrzq1_`XFw;+{r6fG!Xrx?E(c#R|PKy&4Qc06gj0qce1yjXZ~qWn*xgRPzI5 z4ud_+V*PE}UZOh~utDgB9QuC&D-PL+tseq(CJ%d)Tm-ZpdsqL9LwRd6(3J5AYcSH7 zE1kwk65jnCmi~)%AGZcjTVSd>si439an-inXJgmU=N4-xdVW)P{dluMXlSy>0JI7H zgUutr3fFSkUt@f|rwgtrU!-+RjHWjhHEOJsu8(&!$(vDX(;=RO(*@cSya#=2Y^<@D zd0VTh?2E0jR>y~UmiU~dQ2zxUIGnKwo3g)-`G4c<*rGm2KaM=&c?HT?Is1OpakM8I z%tY9B^96fM0Eta_Jnu+ke~S&)$37YIk6{MvPG&nA=z4BpwfDzE^cAIX$29o& zOB4Fg^=NZI$MH5Lz~e4yOn*ox35u;aAQRaXY+j4J6PU*$rfggLy#Jiz8~30D#XIL* ztq0qfOr+Y(F@@|T+CtzID8`D#PTzk;xc~V}na{DNug5mRw9e)|Qo95igzV0aw^%+J zc{cGeGiBOrC)|fE9g>5z23^595d4xTy>_FM^L2QfUK1t5>%`DnOMr(+DP=@SNC~&_bpX z=jc=CEsAiqEqWHcV336uGUxBfC{3VADsq;!PKs1G?;wluuUWLa_R)hg1y&xXz+j#i zZ-i(!XE}3G{~d<+K(|p>cZli6T207^wQlqGNS}}2B`vBF86n{asCO@(CGqEltS40s z0M(k^)&@jRGQ6<`{yqVI-+b5EH7f+6j%UnbZ=M*CGQX$;U8uh$B%rh@MkZ;bCYYA( zQVh7Q$pLAcz{wn4J%SSXV>lqP`>+?npbYtH2b=&U@4)OguLeKPHndF40LbW3jaB4m;sK0B zM$8qBk9U4t9#|{)ZQS_KS!_VbcO9#$B8W`D8b?4mn8qD^a_-z4?>bT#0@1C8Su4}z;bQX{9)*e zN)*$a^I2XOcRLX^qy)^K;-d1N|k_poCU=iA6v;3MJnk81!`VlH% z??>Wgpw!Dm)9j>RN#746*9NpH{hbFM4WsX^0{g>DDG;X~4uKA80YB!q>nlB}NkFK1 zmMMrwncq!&_Ji?7XSlw$H`mU>!P^DLNwv}=DnHrt{CLl;{m|w`yPk_XC6s59ZjkFw zxLT@a0&|dXAYi2fKK=8fpt&24fta2jnOR)9?x6v2yZ#t!@P^-VbFZ#jog@I<4YC2> zIhu5&mTeo9?czd_{(g% zb%Z&On7wRZJy>v%M6TWOmagI(1yZznJ|O+dNFee8;q7#MMnc?!8(Vx2Dh7J?-U%+v z+~RKzi6T524aW~L$!fUrhZZL}&`C!g?cN1_`D$99eMLsy*}TIyv(*oN4mpSS!<&NZ zBP-HJdAHEje*nv6DrKx$HaLK_3}x8?qykl{#2-nL2eqi+1(@ILX*Sy~-XC^RgVGPu zx+Is_*p8Cux8H*$53#2cG;@HF`@4ky_#{{`5FJp)>GtDc6pQH<4rm$3w!GqE{O z-P(EIha%gWpM@Q2$*L>RW9QAA|1E7T2yK|?x!)f;a~ec}gx%})*ae3cy>wgD^8>ys zTj79uRQ}JbpVRb}x-h}=lPytL9p&ZLnt^W`^H{Ye!y+`DgSJHM8GK}lrgQFHd8XiB z%|<`rUkbY)&UDbK)St8iV3{Hn;@$7AUshYq=AN-D847|Y0wpq2d_Pe~giupU;H}Tw z@&|oqO51bM{=1O?(RYEhlcq=b>H)$%r!$Ik<+cZisYGDmid`JDIx;FvFZjL2YsUBA zi|!R{#fH&%!q2&DUUDO(bNsuIJ|NpUtE=PO#rJWC&YDd=2vn1*yz|bB`73M&)#LkF zzKn4dSO3T7MlaqMe$8Sp^Zx69mU(|8%s%?U_U@YOhxr~ zi>{Vt*HuE#g$G;?ifsk5#1l9lXv%eK_qsjD2dXxj2*3AEX5)fvc=A&1ZuNxC^Wshv z$GNm`v^mn~&s1+|5BXMu@gVHR(#hb@c=lwgjZ|ScojDxqWB#xyqqSnW)uFYZ2~l7F zN{EvhUm4B21m&N*lBm*TD3PPQ8oWc<4XfRxwM^$;p0IFpx`=#dHF5;~NJ&oEn)7)A zIesMKsV*IW8vzt`8U%jE6+q^$Jpr|(?UDO&r2uE+~^yFz)UTSB+Sg zU1{xhO{>L2NtPZh$xWx#hN3M!I%k`@)cOdy-awwYr^o zOR0E>7cb&hJ>SeCIYOO|IrZ2eXf9H?JE&Gs+UU`)YTkj)cBU4^g2p+_>rP%A<8~oa z@fPA&y?vl8YI~-$ z^X)~;0=7^7OK0>>zqCYhZY6<`CtuIyCP1z{7E+l@%ya&{JzN}O+2R|4{Oq3ox~_GN zfsNdkcylB5&i9+Ij6=6I+066^`k4FW8iXIAk@Ah%lKXpX=j5czZ1|6}S=04G0pAS4 zwS6=RQ%rk!JK$=$_+&OI`%!xijVm!{7D#Oy`76xbf_$1lZXlx)quoMcjVd1iZS!JB z2nyTVY8!Vw0d%7bIEQ=7;2hrz8WtLm_EGpu{>k6XhD99I3%61qlFwDP8O$}9HOwVS z6b&|x&)~Yb74r|wbdT0Ows0wYofIAUgEh}$;8q+8%|?+O?@vEhN=JedrffzT%mfZx zt5-RFK7ffXVLsql z9ojC#K6Sgl{97U5Dy7-e@1l-MgV`#ZPI0LP-8=WXt-P(4#}!b7tekJ#eqVRKhkh@# zUrohTCE@%yt{w;*K{=bkhTlbT`#kso??BjKn5L!i=zdYuBdW`>WJKRRaHADn4*eMp z+21w|YZQiFJ(ki-Uuuhd+vhb(e37DqiRSNFQ4iKtJ?7ta%QV5O+q~g*^Bi=ciVF`!Y4s1P-BR@ieC{Ht>LbzS z+&#G`@x|CxIpt16<%hPMiO>Onw3Z!P87c*R^u!iNcAc2ed zdLu#bot|j{R94)8W8^63OO)vrS4*k85i7~_{tcE7s(C(I*$sgT!lsDms_Czi%)Mk6 z8L9s=cl!@8n_)sSpgvE7Ab#{wQ+l$?v}=xY>3Hl0a9TEn6EkM#8F!n>?d;BuP1OcG zEW=1$ok8g*4z&x&*op#RE|(@HmmJjgU70$2LB;iM|33TOdxmzg6Nqb@H(M|C>B0PF zbo_UqGprax8J?ow8?HO}Y;B3tUj2FFo(Y(UIFiMDc5;#H*{=}1f+y1R(-Af{kfzNZ z+^7Nl$526gRM#(X$hM8e&A?2PW=lZ3f;~wn+Q2Q~dj*Y5g#TJwbWMunGk%WT*{H^g zrVD+e=OcfKz~3=OB4&%R1(wKLvqoE;iwJPnSj}!z6q7pHsjJF4D}6ESfm1f<)_Tc% zmR7M+G%HiS5@r!7Qq8J(nyLeT@90Re=c4*}B~s2pv%-av^FsDSRr{`%9FI2Q|3Mr- z*e~DO*w9KRm2>PggxQ81$-C7b>I0=V8J8uAZ?5+$g+b%bQ?t(di&U}P_S^USqiYYA zjb1sM2c#gmZ5gnzi<`0Sk5uE8-MYd*rsvmBstavbD4RM&`6`@6Nr_uXiAj&pXdZs{ zT<2c@+Hg?iB>GEz_{s3;fC!-KB%1z1R$$lRsknJ8EwSP;5@CWISgVGcp_?`OU|sBf z%)9#KwPaJk>cJ-6%B{CghQi{F%Pkk3^pCx1pV(O8V4A{-xY;!@kAiKcLeZ(cU)ftvQQ&^ohe|7NBN+bP}HD7j zgP7~dWB5nF&1hh6+TcOErKH8~g~cu#U!J9W?o6X3xTMe6E;aJ-kzo{#IMi=+7shLl zaz|_V@@P=KpqoHtp2IqBLp7VSBMaRDBQ1Riv(GB7pgv6UtSHGsdWJplr%h)!D1Js(OYVPXi zGOY#MCD|6(Y7Do+?v;QauAy;-h{Od>OFs`OZC>kRGdD;rt}a#3Y6Fo)I z(&$BIiz`F@y}R`3*UkP50X0{jZ&*OBy$YdVIP6Dp`is66ptxary~E#OMHHGYd=iJk zSuZfkY1dh~v)NPB=$_}UZ*X}ytsm~RUIscn@1OVNn}MLez=VuhRt~e2lbR+O)@a9` zA$ymz^TDC050}I_5h1Y!#-yyZ!}IMFu_wy>5o-m_Cq5tM50c91mnRU&b7!JPZh&fa zeD(&%#}tG~277HDhgo@)o|VUr@hk=C>g;Z$Sc1lz2FW4$Yp?RofUZ8voNXMRO;Cu^ zkh8NtBE6&Fx&>@;8_>;(E9~~8dcsriN86k#|gw8m~UF-;|HHT7pnO($4S1m z_ms+8BLIsKZcHg$A9l=@uCj~MeNYFevr*nB#S-NaZZ&t_d+Sbsd4xt@O>GM+e-Xx? z{h+&YFJ+hfdT~>SQ{JaZK4Hui*{e@j$}4o^-L;_Fdd7cKBeGkvTS428iA*7+SZX+` z(R~2`q&Z`SA#>Uw0q@K!IY^4B7c~Br4nj<%+08|fPS%E*kOtk;r~vQcpBl|}rQ3A2 zg3i>tdDN~Z%oD97$TA&>C3F2dp7JE1k-|bhk&qNaLosm<#>wOqadZ`AI zVEdNaX4mT}#EB%glvo;o>k{X1zuzbQVBjG;pB6h2%kuzsYn?c7Sqk#HV#gU*RipXG zhN@9)2*>`GJ0>5^1YfhmG=(R2p~+x~KA~on@cjmt&uT$-Ik!JfDPps%YK;eQw3HZ? z8zlC@L#ss#9_P>^4}0%!r>Gh(9++_RUSzRP(3Q<7Y*MVdx6^vMQAibU z=#`HH4=@F?gc@(#y(oga!oSH@3i{DOmp|x_%pKnAKwwYnElddN%Tz-Pg3 zH!eC&zpgX|)v^us1XX4xBZOgAt|NX-3~+5E1JfAi0qKY{hqO&H(f--D{l=Hr%dem+3m<-1JRaV`dl z1~(dx(GVP?RSCi=>{8L%a4t8m0Z${rEG+G8xCb>jpy1Y4$z$Q?kOU zigl++4dsu6&=TE#PJ~~>`hRAhi1oiP@1Y!SabeP%*KR&g^W(|a^^rMe;b0)}tz>W( zY-qAYTA(Md-Jac;$G#E%jZhB=z++7+2RQTkUg%yOpzy$778`&LYNHm%xQ4UZ+=co1 zbCs633tN~aqY>h}Z?=8)i*!KtWq{ZV?c&I93pgPhwyLAMEP1?{|_HJ}sJYIf?O zfoj1tf&VG(-VpwAIx6!3`pkbAp5PE`-d{vQf$StRJG=d(Lzmp=lsjyO*%_Iksmpnn zn>p{D~Y530%*8qAobg$^=bptU? zzs}x_r>$U+!_QkT4dLRBq|nfCtJ~kgB(?jp@|sV;L38okw9=7^eH?7nn0ng~Y5l&a z(s;OZGDVpT&!^=ZJ0(2Zd%H0lDw~z_B*AAQL6)E!F8-Pn`m!Lof;p|HP|tTpU4F{%X*dj2y^Wb1pNqk4cyY}d>{(vUzW_sALiGMq zU}PSI!KMxV#zSmQ8zNT=&lUP#-1)skN%{k-BBJRJPhzu`1o;+aKCZicIoMZ%szfv0 zm|4U=#DQ#&MmGgaF#k+q#jkI=P=}FqgtVhcf9uy)E=8k`ErBmvA&0o_g%5H{h~z|S zB?7Vy-<5AL70(9QTS0|5M+5zoPFQkbb}3;E@sU%_00}@n35ebvos@q7CVQ*LD2oM( zg`YlgtGM1QboA9%c+dL00p3`{Sv!MTPw?f%Lx0X}L>l(kE*jk-1>%gVi{)DaMS{5l ztKp~6rUAbw+qUC00bH@!LNn;sPQO+@&WcAs9R5lcS)iLm`Xauf??2b|;l!Kl@iphxpzS2#1XO z?r4$d^HzN=@e%t#`0_oCRMKua*hZE(F3r z0W<}e&^J6CpnR7yy$T7Dwm>kt%+fm#sDxmH+U+um4$Zrl*%ss^1~nMAPw5+5jQ=g* z=2-IZdg{Y}JfY;KF;n%?sB?t}Q{B^5t&Cibb+R^WOg;S^h*5Cs2?eHq2>v9(Bcp)CQmkbGs&+<8zchV5VPig*3zPp%zP8_=-gV;l1voE(4l1t0y- z=H2^$Wb?9NjqojhXE-tO^!FB-GrDZ+Kv(K-{^~J?p$CT69cZ*=Ud@(fC%v1lW}FYO zk@r|gHs({$!e28WH=vthLv8jM&7bzm|Hjn>?NL`>QuUrbHax^u$Zg7Y*{p+^(7+}3 z6pk74obi;n#Foza!FSJ|W~O|BEaap3_293$%v&MTUSka;*v7ED0OqN7A+~|X<9&PI_jlfNKEHF$=X3shnx>w)x9|P^Ue|SB_f2W^Tfi3=qW>T&L741gJd)(X zYtfHgO1c@P@=I~=s`boq{fuX4cr3ag1Ct45YV|;B(b8kt){~H15pww}PcuM~CWgj+ zgyBXSt*xpk#Z~|WP1XhjimWXK90CA*&t)0x+N@j4A*Y;Jk%X@Ym2Ei9^2GsRhYl z!Zi4PEAbr6w(S|PYHP8BAWG)&gR2x=yOMcx-(ASCRws}DOtWn6%aUf0L}0T zzKs(BP~0O(!@Pc|*up;5tNFT-#s2Fh7B4O@13tt9I*TAv{_9h=(n7nI=b1lyX%!I4 zD0>to8TTxNOTBLfv*~~sINAcpAA4Z`lqS;f>@pRysb~f+P)Xw1en7VV zYs&S=!qSq0{TcL}0rub*Kl&9e1u^-d+tmlT9iM|kqwREZBKCDEOnNp5oab%+7&?<;{cw*?>#_7EpD zj-Tw45PHyI=X>1QcuGIPbf?sO#JD(E=(xkO_+&IC820>ZF|Ko~=p(j{wZr z!0**{l~t30T$?s3^Hp|)Zv|LUc5X=$5aRn?E6cB1$tri82gjk`td`(V0iH7jbs0F7 zwf1)?X|h==Ffvm&_Ky;J9xz*+2=-1F2ZGMUfc^Du!y=<#4LhlSSCW2sP_p^J&8F&t z=Ll_O$B0Hq2KD*zF%XuR<;qW$V_04n4&^Gy?DEcOu-G;#ea~!-)1pD6O&_SlhpKkz zAELme7~X*$W>Exz>tF9b=?Od{hqL)*Cx=IOtM&2<*Wv-kO>m1XSMIa^k>akz~H9r#1`&nq} z+P_j$U)?DYjbME2bo}vv=d#;V!f{>^c)q0rQXKU{IhUNy%sz0ByxCTvqAN7xBs%QQ z-Lq#Py{Z+kqx&MWPb(ziZsnoq{-0$U~fzv{e`6ts+qB$X-&zjQPQw1 zEG%|pe8Vn7QN{$|u-$dbigt|L!!7J$p1OB^NR1KFx|cdfyOFyDSV7jN+ME7i`5Txx2U|cXVOss)^xi5`qdgh{qp?ABNdu|et-3f_{GpYviFu! zh9UhWSIqISMSs5NAo$sgmGtOV%bfIY@V|FoU+H3^-V5Hb@+hL;6nx_mMYV#@&M)Ys|vOs{W5^ITZ%Z(;Hj&p4XeXIQ4bX z8hJRG|4zGR0|Bw_=Q9HF&*7X~M8SClY+Rh4x)Q{5$AOcVwlRS!<0>?ueM#)&9nDL# zNIF6T{v*#*0G7{b3l`G!x4gl$m90f>g>}b(tyYJsiU{_F>qw&Ay1mqTU!y(5(9??z z?Dhe`E#?K^_>FZ%u5{4_R{K65%MS`Sp8($HNP!csoD7`-@&E;)=%NK%p4)fq;={Yy zw^%6Q7MC0QEqe|(@#BD80cc(nGO+{>l_I$A<(R}vCaX=q;ACDpo%E>7EpHru!@k5O ziDN&lHGxf_k25qX=NqJ-OTZA`&ax>y{HDo&lZ9Ha`C*$qR3&yUU&TEC z%RXb@33&H3R*7Wd=xQ`jQhaNj@wxsXc+}aX+_0(%&Q}keg-T_7G?@n=k#ecz=mLP{ zZYGabttRg_shn3ThiRH(cHh*g3hK|r#qU$AvM?Ii=A0Zoi`619-300eF{!p`9*C*e zan}Sszyy1n>I);>0>}9rQ6-kyZT$?V{q?1?lNMJ7VLo5)Z-1w_wRf$B4TGyVgZq)O zV^1J#OT)+WUFK4&Yk+;$iw@>WonP1wVqB%#=;UR?(iuX$p=ra^WOTTs^9i$Kt4aZ# zO}J5CfTMJf7i~-5y?t4S20?!hQX#;3omZjA5)i`+iX)5;furi_=r#8XVdt}z{gxkc zE!!Lg2aRf6ed7lQFQL+2mE@vx!Xx|YS=H0zKOY=tLO37B^IIdL34Sk7*RhSP1A|a) zvK0_4*+Ach(XDad%)71tg4(uABtB>T)R`L;n^YJ$Yz)#K`$J}5FM2np>C7+pVi{ha zV4v(*XzWt{Yut)hV5POUT>eusHI1|H7bw?uN#CpBK?b_tU8d*AfWDo};?(-hfYGCpQGh7Kk zGvvz$gQS3o8(?MkG@CD>dy(iMYh+$>4#;2LS$YpM9@>mG?806C86t>C+w$e{oxb?C zLilNK>gXzTH1iI^HOWd|&DC0PQzV~8RTS5qSJq*f7A41W-ft}~)%ZPj4p$yI2 zDysp-N(WH@b`@Pb^DRoxunlyAitQF&OZ3pmuliy>!(NFh(etk#wN>|&=35BjdcTlv ztUfEoO)q=Xau%lex~e&aQX&*ZN%x}@A5gk7V{&OS1!nV zY6U>owAnD!_h9=9i@cP;aj4d4nP9p-CP+R~JcyuzJHLDAEEyb0oUA53=fsN~j961F z;i?VrVBd5402vrSZ8keHuXR^ob?ssxQk1WLsm`giB@NcEf%4qVTQ{%Edc&?r@7cRw z(eA)%QGV#)CN>GbF$zG`$CuME2*VK)_Y9TnUid;u=*ln3`f?p2gTbYV#NrBF@*?Iy zvQQ`|FHltq7BamhCIARA*}PYDQurO%_HTa);?`L2P}! zCJiYcA1dqhA59G$0q$AjjZBW51m#Q|ndWcNS{lniFd4vfWNd4q$DYv0fU3*k!_2m1luUAIK17m$I|(&^;J zx)p+7LtATYht3Ft9=E5z1m9hwyj+Dj8K;(-Of#)(Y+CApjtB0%+hFDl3|D`(C!2Ti8bz z3flWXSwXO?jeR%Y>g6rqB-rM2KAD%FtujH;CipH4h^jP$T<69NM_g*IVCP81NpCwJEv3GLE5lO7;{y*$ zhQXOmM_#^qg;0Kvu+BEt_bofbbF7@-tGATG15W)^=;I>OPi@~(H13#)@>rYoJ8l0O z)YDykpG!cug~`bG&8e5Ja1mzbe^gt@&(sW7L+7lE5QScAG@FpcNvMEdtmJkK;ydEw z-9H3_l(+>=1B%k4CvSC$*&~pSD16sY%S=F5Z`3Ga0ov*rZ11cL2yrhv6Qh-V5-O)99L+p1dvUxbz- zn!R{%uXCWeJo*x)Woi-gI(aVYXjvMW2$*|a*;OI z&r6F5P25}RA1<*%2J=z3d-iE&{xe{Pv-IsQJaO2LMtkEm|61FU>`&`o`pJQ_tl-#u zT$~F>`n3sp%=;{AaoNPLrA`Oq+b?ZaF}Jy45VLxXjQB~2wz&2*dzasaUUKhVs z)6oVOU1#3V2z*kN#&(PFrvSIr7R z?R;vc2}P1^a1q3*uKaxEXy{G10PsvC=j{wx@;U{xez3f!Lfk1Nu(k>>mz%hB1K{Oe zdfVQN=|tj5_*Zf91Kg2+hQ&61{)kGzz(Pt|ZUD`R$Pev|GW zHU@^6=tGZ`9}o-Lvq`pCYkSE(QSp-^>wsO%`=Y;V)()>V2}=IiZ)4lTb6n(zJj(Lq zftLEV#z7!Fi{u`08o2NUs_@!t54$I6A*{|QDPW2d9_Z5#_@;KrtWOgTfct>d~HT`j^8t0X5Ds>ILkg; z+j*qro!?p2w5ZQrq=hela~y*f zXyFp+b=%`bbnZems;G3Xx}4>OM|}P2`b;WE*U>XxB>@IU(#0h9@7*jSCi3#$%Y*w# z3I`K}_t;d3OYHx4E+IXPRr=)@j+cVkXEZ`KZ>>3ZMJsGO&;FAVG3-B%eNBHDuyrfk zTzgv^&aKS=Q)S@QJ5sEdfta2xw)<3mYfkyPUgh=SNqhYnKDKvocp<0Hq~Yd zFC%rnlbAV!x*;#E)I-myt$PKmc2B$yPZvz!k99U?uxj3Rp5R-jb|~jPhWKNzO$T*D z1$T3)NKk`0gU5gxz2kXrfCP*T%vj$m>ojlLY%*VcO^*Gv$j4>itNf}&C!M#c&Pp8Z zTO!t*pHH#g)d-8oO>qB`$jZL#U8=Q9>9t1sUZl4kN%D$MOGO&M%;6MQ7E`2&Ow&cH@kLGI9diCy>RUT}Ixi5kE91-!3aZGmGe zgHV$QM9^cwT!B`zmW;{MBpvjEPW;2~CDDO+k?V&S?roAJn~(DG8k5=ACa1^c6l?cR ztbKveHCrg|(E;XECHAC5wAVEO3=sQtCm!uSk4InTGCtfOso#BP*fBHp#HBIad~k~9 zQ=^9sOqaWITwu;2$*9iQvOehe6~=qFVCkrdO!)zaS0}JaJ*P0yp95(RgNn5eeEgUw zm$d&2`Ks}b?c-$bXkYD9D9LejRPc4xUfF#aQ6V zn@I0ic4vrS<+eDE@MH0bX9Zi5S9Hhgh&qR`Lksq~i9^e8-sR3+LIoXrvd;-+I4*KD z2E8(s*}Hifcsl3pa!{6P(ZF)xMcg_sDM&(v!b@}V8&g><@3S+EAE7YIN{dLFic67J z63ds4$KZC-fsb%Jb#UOV?7b%TedTbz1Kjm+w8KZSMES3E`!OqBnU8?Lxl-ACU1&L~ zw35WCP}TPt@eSFo_mjifSGKZggf^b*yY*4s7Dg+|0mfIa1GAa+cU~5F0GS$_Mhxe^ zn_GRtirlFRWU%>xNx^}EUp#=y!2p|om&zzl=7=eWKdAK;z8zUSFQC`8ZEzZ#2-Hxn zO6u_A()2A+n+X{550 zb?d3s+4{D&qw?_-q{MNo0F74mE#nFEk$^xX_H}_?f5&sOH4-yH;ilC@b-Yh4X5`d{ zn$-dGc?ENt#;%?wZ>PXlDdak&z3uJ*Lq_^+4n4SO<@shWFDLvfv7Af4rjb=s>cIbe zby`mh-F#V>)*WyIt zcCUCeaE)(ngeB$@fl!jCivpYBU-c-98^{c}!tgq48;Vo>>AGxjSLUO-c;dNfJHNJM zCw!t?d>eF%sRs+(Ty7XLp53;9ceUjrxenx2EvqWzYs$7KckXZ=0mkNr7_MY;-F+wQ zT+m!Vu77YaWff@DjVB^W5-bcaztO3Z$wRaWWXjIz)~P*I_u1RyY~!Cxy?Wn z4jw`fYVTq zH@c!Ho-j>lO%4v+dLtQe`q5|AEp<|x*pbV_Kt^@y%Df8j7$Pd}bZ#si$)?*0;(~tl zRRwldNSW=e5A~!OdS|QW-gSNK5jQ#4cLPLc6={{LHhRvyiF05t6-(+&JMj-4=o@jhsLp z>Y`1UQLbB+&~>8@uq_Mzw8$e`4&KwJB?|qr(N;~>myJPmW!N1?ZR%-hyG&`(_$1C3 zfD{x63WJv=*~+h0Jg*7^=M%0=4eO)T)%gNnU?=S{0Es-AE1!w*N&CR2vZQo;LJ^mx znlk#?>v7Y?ezU~TPsR#?jk8Cw`>6dy-r|D8^<`7h^bWa8C`Y>Qnjpw5BR(=b=(V7O zV=)+A=M%J=C*(oW$`8VxbO!UE?4aDyi%07(2?sHON-89Wi?xXt(nI|_nah>< z6%?ST4AE&@-T2&YSR!ILbc0kYQe1$DeGGHbU7*)O-uer>HUh+W`9y!k<7hdvQ zpLo@l#J5sxe(zR~vqF@#cM@_QBA9Z98c&gl{%OE4$LnfUn-AS;-%23<2mJUD{kyYl zRm!Hs$(}BxtcB;}m9nqUxmZ?v=jf7iKv(Muz7_QmiO(82W2pya>!>&G_wHs97={weSjd9bx(i z))Dwq3pOC>u`rZX`77j#C9Ot#Z9MB^PjI!(Val_{*XEDj^Z}YHmG;xp6hXgB1eD-) zbMJmzD%WFc0V;r83K==a5IIha9DB9wMsY0vyPhxkoJ}k_FQRb@Vu^&~j42G`-Sg`L zy2M`b^tjz-hX;(LhObeTrlVn3z4%bZRB$^)j@*4JwcrM$noHD1z;F3w^t@J6doMUd zxX?2|&f}j6QhgzN9wy&JJX}{&3Nm7NV$boDi{|A`*iG-y`KLYCo_6K$yvGB4wfur% zrQ7B_o@b6dZws^&fZz6Voo;*Mxbzl7rPpaxl_(rS?fD@$bT71L;z!7`;GS;+@i)|B zNG}W(vU>arq_|v;BCfQ%;WO>o^jgt)9^JeDxo-^%FM~yAFOZxxxE3)HqI{50%jHty zuP%((eQq#^YOkbTL{Zx^J%?H<7e6`CA$W$C;ORBfDBjx$Pn*9lYT!5_!?z7(Y*21O z(Neost5f#+!c{31{ZS5%t(7oxOlVQ#>KG5!_^Iw!gO(bu7_eo1`hLlWer>vU?Y$J1 zr)vJenX=lzt5wCwC85A8z2sR+qk#GZ-{A1eEVx9B!*v94(KIo%#G58`sr*gPoCu6| zE*M6uiRKw+OD~_Oq_w!^p&lPVs*dU&nd*_$o1QByowQe5myVeup1P4#d^@K}%X3*! zc&00=jH+>#R1tGWP$$haI{IBAn?x7#wc-}Ll=UrE!f_4oJqDj2k1K3CCi9#wthueZ z9u7THkVoF$HE~@j47|eDF|_=m#js5W&$h)extJkQ7rIE}bQ>4G8<}g~RQ@^nGgm-g zb=GI=vZ|n~QeUoBMk1xKSBJo4%T@UHMlP53gQ8!nLkr10Q`U;^e(VvRKIXQ*=$5Te zLtt`t}^7$cuCOU#i**b5C6OVI?gj@N6n#HTO6LgfND7$2aCg`m~@6vkZ}*`&*=E( zq{IB!ONYod8RJ%fdC6XVIFwpvv6ptm=BqNd_>=P_N4@)ODJ{OV!sD#bRgYB5&> z8y|c$QFE#^27B8RWMLh?4lioovoK2UeJzAIhEOWZV5@Wc-7R?N(@8q?je3wea7~Xs z1J_>mPt(Ha8iFohKbu%S=Vw{N_Y<5zb}0T9$|0eA&DTaix74a-rmH0hHXWy|jshu} zfm?neLV4iU;i`6D@JDlg_ixELvakU${6W&#hX(D6S4#*xBWDNVLNKy%#5U35)|MV>vcI3hvVrL7u1LCuCN>t6 zCZL+Q1<^w1c_~9(H0|Z$mjh|ZGx4zAXeEvoZkprUmY{mK*3+J^pRb?B*r*}&DphZ9 z45SiSD>-*dJTnSVmy7vfDeE083G{ls(^k{|1$nT(i$M$$G>W*IG`K3gE%BPyKptIZ&8Jl`xupT?yioZH}t}n1dkTJ%Ne+R3LX2 zhGg0?ID%eMm!J9jnW8FC&HKNRRP0|U2_ra*%eZJQCJs?x>%Ee^iqAkaX&e=s{Lk>6 z@O7>56k;Kxdsz@$RG^8%iwbS3G=^rmuD_sF`99VUlbSc*v09JJKUCOU`mFo!+Ln5#T7{=J`fS_>bd)yg&@E3G zIk~W%-*FP#PB3n319<_zM<-??#2*q|WNRFy1#1l0si7q6*a+C7s-p;U-0o+eEOUr# z^7;Uis!tSCv~psOpbHqmp8QY195$OTt4@8fz>Anmw6{3SLh~|uw<~9rCH{RH`_+u@ z$SSXZ+$Irduacu6f9}m)^_iXs4tWU6hk0&uXFxwD7V~gmD3m@aA*Yi>gD)uQW9D*+ zY`Xw1B6H=r2(%;_Y0M>g7z@%5|EdpCT=-nti;LN$8^e}Fay{494*6*}(~UXSxl&3l zN&5&km7_dsg7;wAzU?804RXpRlfG+zZ-t_Qc`f4>>fGo@ly;{>Gg4#M`T{ zw=~Xzogt6$qt6;7a$YA^%U2X?R!mBJKdDM$fzaN1@g7fw)nZ9$Wo|91$HcxRjr=sT zISqJAv^66AF+m`MN^${`GOu;#N@`Excr>D_r0xrV%vSq>IVv*#dL9y?^5gzqJHRR1 z%oYOqdD|}KpXrd9b}xAq+Ir$E0p?QDF&w@+Q@J>heL)%cq?&pK+g&MzoEFO7E&)ol zV4#%fz)T)zMX`B1ZW8dK`8iGF%a>pc{8-1G4gZ{m%ei10p8Cqm@h$ygMKBtuwp;VS zOzh{uF4wGh$^%=!B#DuFA563w`LgoV>waL?KGCmo_wYKXf~^@c>kp=#nAycHSH>=B(i_T(bmmtY6dV6@$zKer zD$Ro_f=HxWiLd86R5aw#iR+@7L~#*mjh|Gba0{``q~zKddPYr~KZuctT(r0wF&y_v z)!>>)N`Xc*kYpH1=67m;1n5T(<~rkwYkW5+e!UmHX4Glx4KS(}`97 zgc^;3YR)Gj2#0o$rb}pj0$5@>1aTr+5!I<9Q|ER_i=2D_L8L{4>&6(l6}h4)u+N;2 zyj{CR8qm$&(UzWm$uK|xUq{KttY;ZjW)VUb!Ffx}TcR&GyJIN76TfuDZ!a^EI?=)r zUP^qqw*h`AzW0jkx*x}hy}5OD&~D8Mz=z2lW6P=k+qiWNu0#%D1q#Lm1uxVeF)Mmp zA?S!FJwy5k){#`0ww~}Jr0KLMR&CZ%FMHa!$A|2dYQ60geU}nG<|X~cfHjRgafUZN zKj}K7&xO?LQ&p&}zP8XALFQaB^du2yAW2Ae5fCE1@tv`OMZhB-WddN~x+$%?Y#YO? z6zE6u!9gpvEWAh1+Dmg?EC8xXmIr#P4AXxB$7JW;H;Uknl1I=TB{h_^rHV127Rjdo zC!i%BU_NmRUMWAid3%h8;Z_i|4ey>klnpI+b_2V(P1_~5Z%LxLotJwYZ_d7X+E&Qe zMt^5j=>Vs<&0kypL1|~Lc_|Sy*?QhE4I%lpTvR#<__QzOc zl7Brs5|bR)alvHDMNK`!@~~+x^wZ2;__3Y`l0?XzV55z}fuM9wcKw06{WPWW-y%^Y z*Ds^m;`(6zx50EdEBDB1^R_o$JI|TomccJR_UZiTjv+AFfgyIcxT{1A8tC8_0^2Pm zI>)Ddsoh3K@;1SA$UK@nmG_`c1I1M!c7uCmJhJF5hkq@;S4;T9ZO@4M<5tehA9&IM z)Nj%}yZ;xVGSJNe!OUCy8)9K{4rLP=2y$n3C-F*vx1#;M=Z`=+%zG#`c4+F#hk>MOAJ6;Z15qGVE}5+L z;0`|6Wdlr>lFMxUcBkWjeZcg}kCwpFc!aT8CK_NCsQ@nzf-D$(3>hbXIj4H{yrX;y zrh~ar+$fF?D`oH-Wq2c~!8==wT_W0Hv^UgUv^S(qun+1L9fm;()*pH6zvf{77LZ}I zsqSi;UfZrT5a0J~*+2=NuTVpR1H;F*l=OTBs6ltnkg?Q>-Q&~uj(R3-ra=w@5082d zc>~vjQu2+#BA$cArrX&-G!hA@Tp0HS}M0?;c`* zb$;2|wd0v7*AgH2dn;5(&Z#%8Ik%V3&OB;{JT!ni(*=rVc2S1J`JK z-@=jp8_`yb8=(U7D!_jrdjWXP6-)qRy|=}3{vJ=4`mhGNNs)YWnx(&&C-xHYx#Vdy zzPDQ9f*^uBFRdUM;m`YME0H8kCEq>AoZ%n;`+YqYW2u(1zA!U-9!kZna(O^DnD7pX zNOT<<=Soorh9^%F`T?&d|&!l;dI=6bs_`)#kD zge9=tK49Bkr3h5-*$Z{R+G-nvb7~G;d;>=T5$6Gj`69g*ReL8ELemuZ$oL3ywFr;n zY1ZJjB8kH*?V+r*HZoG+w7+$eWa(pRfns+m->o?ZAuN>EJ%alD{pZ8~Lr;M5ex1X^TQ#)lS4!Erky0b5c< z5;Mo%D0`b+==Tm#Gdp+Y!~U!l)pbxi@x-DI@(1RknVAmUP1p!70L8NY@<>zjm;-#GrFP=3b(0lXF zuRs1Xg@%fqbQh;#m{=+WnOv~&nEv|Pz#_hPQfTAntI>-6&b4KW>cZ5r zF$4h^_;{&@$+1X7SAjk1t(ZI|d9P;{uqg2@vzl97N_IHM!PsBBbRiF6nS(O)|7TEW zf@4#PnjKwiGD+6^eUTvK=n2Y4AE)`Hvp-Q>-)`l1v7wFSOw2cu_}vNVqX@2jntN!_ zr%LP4H#|B4$g#!!&P5vi`(|O)qX?n*QA#wgyX<-mdh9timr(+OUpx6}(lPLT;cY|A z?WPsXNUC|DsVkSTpbfQVQ={-dAyq5xUSt%`cm<_GhKQo0Kp=xsu|&|#t@S;jw-q%0 z1iZ!|DBOe)+jFZVY1HTEu7S-h+f5hZWuG$NcJuWcx7xl!Do9T$e&u*0`U^k3DTA;7KNK;!byk?Y2t*L?GVT0v|yT`(u%ZRLCF{Y-XnVMWUrd z_&|z2{ZVk>g727}+B{fKlx4vuJq;=B4e*4|!nJTJk1o!wj>cqD>DK3Ivx=xS&}ZOp zpW>qEQ1>aVL!1yuS%9H~a@hHaHP4-I-gD9(fLpR+kpFzV6h%K+&qWkcu49gwP~h(H zUzhW5fL^>C)2Wb@Fl+H``3<9@T1GjR0#)NeG`|9EQ4jjrSL8!aFg&_Q)ow)7ZJ%qXc+_>Y;fj~v%NJ@^EP2|r>x4Vss@FuA9sxT?VXtC+!c%!-ELsp<9h zaC(Oo#99J$pGp6Z15E&UjXoNp+0Kn8CNpxVPx!EM#nj4%e1IpMM(r^TG9wdZV_(N$ z?i=C~=qneG9M>n*z41Cy$@{*Tp}z|Ra2JN-?R1K8oabt%mDP_9<=C;G&m^ts*U$p> zU-etjzvrF@eMhiYb3;Chw(kWL^l2*^T0lb!ge|nf=Pj@bz$TE#D51Awa%BjVP=E>w zLo?^EDh&fg94}KxKNDueN#GLBmD-D$KWq9pXF>xd$v?K2yjR3Y={CdZ$8Jmv=AcPa z@ObG1t-#YL>zk5nWa!h|4Cou6=mcrrFkIQ>8^z6ah2=JiTlk5E%f+>BF#|Sy=2t_N zSY+gBuNdfk*RMIMFyGe&-Z!frmqt2!1%zrn6o`|u`d!<* zGoWt}p!W@~!IjK5-si?jKAM$0cn$r}IzbHD-khFxkP4XFn-ol2IrW(8L}dE!m5dkN z+9M6NtKZS6&JC>Or}X=ul_&AgrxSGO8_dP#vsF;hV&i>)$K3YP>De{(KiBz5=zZe% zJG?gD2Zw2YsywDbh)jQM)SE>Ar2Qw#Lhn5)tRjlw&Cfek=av{YlS02)@u>$7eM+E1 zI7}Mnr+9}8SzwgJ)=+%3^t(v*+z6S1|@=6v9R=D?j zoy1_+joO}3{(=zsv1&(NZYA&i4J=ZiZzw{c3m2zx=c{lU8pKU2*%+8)S%MI?_p6Y) z5JLJx0t>D9c9@O9el`}Tr-GxjJ{d;=Q1Vzhh@xC(~g;6sbw_e@$ zC%z(L|F*l->uqYXX(SPYh0h5iHcR|tS7Po3hq<+nNsiGJ6xKGMVxh&}BR;zdEf{_B z-Yvvuam)}74IsyvG1*e)gpq}52deR4YWcqS8mjA(DObbwM6l3fD_Z>o$q~=f)@a{A zBEH&#Ig29Y1Sa4K;IWnlPm6DpfqHaKU(E3t(cx7UY z!K8oNZJCosTWGTxvx&DTnn&9GVjDmcx9&eFg5C72{w6sBJdV?OHtl_W5fz)h#f~4l z`DAPZo@e_BtFtKRF=ZWM1)2H7W+Ck(@Qo(Qve3A#DbKE;4~yM9Lf-qC{Ok#a?UCSd z<|uC6BePKoD!{Z)SMnsb@_kXfEF@0mhz-{j#X^s*XdxN*DLhZz@tZXyKH> ziT}d)MeL%c2$|z$c%vW|dTd2&%3bBf^E^22@QuW$y*FMHb@kc3vv?lx_=N`Fq2DgX zeB;xv4z(L^6a;TPaVmc^^{?^ALyym(H@=$Smfv`z5={H+8c$p`-G8j>C2d9;o@SCja_-OE}i>8D@i@jTEq%ryAJYkkLHgYB{IM%ukmZyF$-fa zLP-4;t|$WO?Cq5AG23XI-+9|aukP_vcNalr1Pj(VXU0>%#R7!tJ)!~%w}pT z?X>%2NuFe(g~)K&UHfYmXrrJBp+q+G2ChqOFMsCPiTKzU^!S|Wz3GTj1%Sigqy6HK zCYlm6_dh9600L=s06>h>bx@95J1Ue=4e&^fBn4_>Er7S>P=BaRb@k0~$;`w8FZ!ap z^e9$+@A)0H^M8hD8D@3k@40=@6lW}TV_#O5UXWn+Au9T(Hp=lm_#a^e1X))kX0nB( z%Y4l{tQ?@eUXuVa+vDF5iA)+;8DUZ(e_P{!%c$Q`#ygasjO^?)T|XWQkQn)OIYnr} z(>t5bUD+&4=C~Vv@fa3*Z2fDvYe{^+7e5|~1I;u@5+I?lR;Iylwxh7km61Xof0Vmu zbu#h@To|RX7O4SiIELlc5b4J&Y;ib}Oh3CR@}^!Y*e^r`4#v3z{U`$Zi_wsC7xf;x zWT?kn^~~nOo%tC}|2pvK|9ole!XRqdISFyNlb8Z?GM|WCiXN&uu-2o&?Q#2U6qNnK zdbADln2e)H21C#TRQUUhEd6XrciB|a`+z#`)yL4sMfCYq4%|vlX_hr~ySFt!JaQ4j zrEJ`CL(>tPpPoUbr{n4&YzXjiS9G#T8L8`6Lfb(#T7kOn@2Wb?#N)PsdUMtNkEwkr zh@r)i5x54p(Vci8tBBc_5BnX{&Y=F|k-3^K^Mx2PS5rJ@C8QqwG9z#%@bG>@7ec3- zTn;X=dIWHroGj>ycqF|6LqG5Jc-mneET5nZ260kyFLH0_l zyE4Z~Fku$sX%yUTSk%N5+`$(==Fj)}2~P zz1#1#%-rqAKOHQ74@FUj&s|X2%^PphzJWxJm8sPO2?$pYME(vkf5HVgD3_og5Q+*k zhF)LvxR)zv9VGcGzIvc<1TsP2xX@){xXw(=TOy$1i3TK4YJVxXnG8DAF*qvf>j*QA zay-B#3n6LrqD5;p&m$*P8Ns zxg+ln9mcoc$!2rf7)Oao#L`sHxXtv$i{S&H80hObFXo8wsx z0`UH{GhjUORyPF|Fo{6~eZIdE++jKW#0_{V6hgPal|Z)gfUZh2Jbc+)y|LOG{DW2| z2YHzKR;PQS8aDymL|@2LF}khLqd&DDI9n?~#|7+@oj;s+G0V$Dl&?%@XMY$%k1jn) ztazPO2i=nQ#4KR>hi4Rmv3z0;_@(+~n*-z*4jjZztdB*&U1^yqOdy9voUD6wC+*F5 zzzIRHO~|;ybu@@B+($r^31l#9CLli~myxPvBPW17R1N^;*4vHiMCQx7kP<;or*N|7xiR83DLLu@98&bkwJ)dd_;I_(0Po0sAJqd%8X zbjAP^XN)PoQ>w#&xNT)LSgSeFsgZ+df(kC!ulNOmNpmf-z?Sdoy3Sgd0JY)m_z}9t z=lJk7aOIuR1c;ZfS`3qm)2_lneJKFyOG51()8pcmOd?4uyBLi3D7D(U*`1;L>Ku?< z7Ev&B`kCrDvtF*9pyYgG>49hCGj;d1m~FjzRyxidq6VE;Y@^?sUAbR;aXh~`U8e@**< z1E;<65G`q2Sg*kVH_r`Zv0dJM(!-^CaHa1G$f3mdWv!d9XQi-Q*2l1e{*I(~oRhkT zNfzn2RnjpC0nJ>w3DD9A4vZ+6Yy>kYwtYkNbDn@85C-*_{eAEsMDKsw-)X=Ithf#2 z6sD3s)M~PP-Vk(P;)BUeL0svix_Z~z?m58Q7<6x@+6ZD#zgTIb<~J&h%Ke>SD)T#$ zYX{~!#QAOXDLAbN^{FKSItK$GREc*Oo~Wv|4PssMAH@3q3V?_7CsKdvPo^R#fMJ*Z z>JZqTaudo|gx>#Ztl6X3SG$GgTaKfBJbiVfuS0p+eW1Mr+%#ceE1Vw9IT5*y+-qW= z+E};NCrSTUv`cLMbj6c<477s!!qEk_Oq3AV!b;A}6I+#zdlfVFS>n)yI9RX8Yh!_I zMOz4e@Fd5PrcHGT*gP~PkU1mT7yr)*vJO67dS5ohoro)+EMKtzg8pL8Ez8i42R8g} zvz-!d5;0iD3Q>hJf&%^&ZySeP%V)DxyWGTLpwnR$c`Ph6wUmn8OR-_shL(3LMIO{! zJADsK?TAsrq)ddeX|EfQ**t#=5Uibev&W~5Sebwz6V^GJx}Jh@Loiu&tADcU_)H@% z0zys2_X&)4o_%Atw4Ltd0?^4Ljy_W+0%YcI)4#I=x}g~Ab^+A#53jP;4Bq;6U_bwz zT;0%Qx%gqA&h}+R0Z^o)2VF{)68(YE?mFsWDK!=g>}U+8#M;0m^fj73JFr< zyK4DK2h*XyZ+$a@0qSik;m1L~#zCNf%(_#NiyH=xAWPlq0GXMZKRqNWtm{6mHF?Te zjmoioz`0oR9_9{%?^cA@8ep!C&p}*G)uKCnSLEA&-*BGEi$)K+X2?x0+2M0ir8s38>}+wl7dR0$v8GKA$(U36>2c zZBBuRv=3)$qk$e<`Qu0~N23ZjfkPxGg|Ir!sRYcBGBO7tx`{VoNK?5=@HTq$UGfMu z&!-y&?hiUUaqrYxG|&_&3!qvw7?5O>@FSly$*k~l%bk`2rcmmnFe38WA8H^E&#>B3 zqAZnUXMrd}GI=y+AWu)X>hiWbU%Y*OEw-}FR)QOn zr-H)gXt1hVh>l^fC{S6BgJd<4`-m<>%JBrN)G zTZNOkXgYDJ4qpnowLHtl1lQ2(^=N+=Sv*9VDI-ontxQ$gRgUe?7mHFH3wycKx0Kvy z>&@hSYLo~mofmK*3ybiGq))?SJ_XT$*rP-dTpD!?hKG673)Qfx&j(urs~xEkQk2iW z)j2F0L{6VB`Dp*HS0|EY@Vh&vadickU@)6WQ$D z7{r)1mQF?hiGF4cu=6r~>h_HXq@^QPt7sswW=ipGDc?lZ3&e1bU`0BUUr(eoj&eLz;`<__#m4j5T@T_=64Q_JhKj*xs5s)`r-;&B5hMbtM3Eb|#! z;@0aYQQ%r1_5qP`Bkaoct9b-ukmhEH?v;)~oO4$7oZkDxm%}iB8z{E*p09VP(-=Qe zzcRC*YdV_=SfI{ItcBY$bXW1Kp*|os_2|0NbFS;59JN8Vy1@W234ws=6|;v3fn{*z zjYY8sxE|($)9eX1khDtNd)K#n-?=}OGfwZw)qE5vd%X&NBprBJL{0k>8J|NFh$J3R zbl7$gqIpfAupf(iCR|H-#{@ley+F9p7i;rZ|4fj$4i$+8GJ2&i z4P!v{^}my_Oi1m`2~rg1gfMWY86l}qX2ciZ#3t9*eLBZQcMW2|nUwY4c#t><4VQqxOx^2a z1sjmG3`|>9K=j;L$oowRbx-N4SGJzVivl!!f=>92d)MUy5Hy^=^}T8a9{O~Q4t)c_ z>^t!R+Pj|@ZEy-*hPn@4N|0H$t(`?}XlWvj#z0vo-Pmhq-v%R;$|cMqa4@!pztp%>xG$M9zT}lw(y5~_?C{l+iB3pV|1R)j-QV}xFSOEVQkJB)%D2? zgrJ=uTH7Jmd762&BrJ9@*44Gpp}kfxCI9zmQZ%*rQ|*eGtU{1?Ew3zoCOK zBo%FY)o6MxFQ-Wi^W#?Z_~VBXOn_MH*1^_4c%d7t3PneR842bwv?+j4{n=J8`ge9y`@xb6F%zL^Gn zjHfF+a%cx718AmL=$Gb=W^Mw_ykhs|;xs($*ohGO%ahw8>!C*xW;4&*^wl$)xd@v1 ztGd7(G?V*Kt0?w{x=k5g;jDuqhVmE4)J9TiwwFkx|AjrZqHXR!q%fiWYX5_cf6!3? zItVp5(E5t;)!Evk9s+eA(xFEPc}G2#IqS@9W)^5>>}CFj$;eUm^jK#y-)6Zdmrx>i z&wc=^j!lRDK-2iK&?gOUTiegc)1Z$h=@7oA!fd8+Sn$k7Ge>O&(Hb{dOuGI8eov#I zdBA+<{gbs9HwbR<&H_vRRzl?He?Vv8oxdB-kA+&*;P$M|Sc(RHOrR@(h6ADobZti1 zZN>&<`%@+c*3)-o7ggb(gI@vBV2Heks~k**fpuQ}@&n7v#a{p@exwiW#s zLR&ZYuVc+*pQA4bJG|dmB_DU<*^bIT(_H&&BH$Ike=7?OXLO_W&}ZgwZUe&!g_;re zUB5wTJo9H#n4mN>1l5sLeuL5*SamBJg20%wc07sQM1o0c1WU5egdT8a>Z5-{jOsK_ z3U5;j#D>{qh1D}dfx{M6-1qGAU`h>C#JD2Pf+?1%`63P@K_Q7I8A zp+`le6X~7Mdk+K>QqKR`vA^%RXWnzZv(~+LeQV8fZ5d|ve)fLqul+Fs?)KVAYXk(` z&b*z(oo6I-O6h-e4Kz}C_tu_!k1=r7AB4P~2G6P)(Ab#2W{nHy+}b-<*>dQT@lz6T zk@My7jGBk|rw0$rVP(SQMIPcKa?t#f_4*SbR`6!u!|F1!)n`BVXFtFO01mxbVGD`% z71*1d3u1z<+-GwMQ*K=d-JFd$l6#z+aJh}Ies;!EFZ|yJk59`YEO_;7E^BI$n(;$C zx@@Pe@PGXPYvhv+sTzVtPRQGtu45SHT1x-p)1yAid$y`+GuFscABDUU(`V@oYHUmk_2*{^5q>AY+^oU;XBcr(XC82hM{I z8r*P8SW`c!t$B=$aZB0}dDen8^2Np-CW1zO$laN)d3x@?Qv4rNHrB}NE6&AY1G5?$ zSy?|^GpO+u8R{_!$iH~&!66J1kY60aKit^xCD(@$D1t`51$}k=!pFP0=wtu#-H}KG zN(bziDd~5puq2on_ja?fbNI10tB=ng(%&7F`S#I-=}+eYmUQ9Kc$)T zldxJw{Xt%86451u2Y|)}1{ytkw0p%G?@`(h7VH;7OmK<8f?*Zd;1+N5a=E>!g zuV8Gz3EhFU! z@$oy(Q_RG8i{mfflC_L#V{%Y<$fEe+ujWTguOL@(kI}%5{hqk;kF+f<(7c_%e;XfF z+JF=_$>|pj1Nq-^E0HYnZ~wP;4suOn({prtt7APb4^m*V$@}&Y4-kKUENpdGT=;AU z#;RXfB(C-(C?!`0aXS!75M%;|p*J`K{5H9tz@yvGV=opPBQR?tro^d&F{IKT(hB}g z%*ZIPH?n?cKfF+937BzMOyCuC6PCy=G-&myx%HQS!OrwUThMz9iI)2&6vJ?#%)##O zCh%+;oVJztkYPhcQ%8EYJu7&W_Iq;#?;dklW}`*t;MBE{fdYax|4gB|#_iEIc_2b8 zFj;{Q_d{bryF#SUjqx-4X3XxpHxcum2l4?E)UjqGk|ouNHg1aCnm?O^NlOJD)|PDG zg0o<*>H0Ni(^hEoij-3?8?^yu!F*;~`qoow#niq6q;BW0hm?7jof}iJYybf<=7IlBB}QW*!zm^NG@#{;rjmSq zxQ(%<{p_Qf=*&b|zti3lo5eP$-dnrf~Dj>H=o6fZhzT0KHL+)QvYjhwxWef*t9@?3->E}gYJn&hPd+klNX-N?AH9T zYG$&~iDn<_wxpf2B=y`9`M`kh%y0Lc`+VyAH$R0dn1-bVy>sz(9xJzCy(yWo>-=jW zu028`VjvRigkP38$teZ|`{T3kyea2y2l2X#V)1UT%L4sgF6+Ahc5Zko_$|a&J`Q#r z8CfR&p<7cpcI}PdRa%qUsTYnL?ioUI$yI+`W}pY@_rAHI2>ak$o&NS3Sr@tF-q{rz z(f*;jpT|Hyytr0|zP5=Ieq;#qW}fpze3j%$>4Tu0MU$>u1K_ar!UsKT`|$8Vndv>( z<~GA_{6=CccMB^=zBYahRd#Nb5-aDLT~rPkEQ6{DEXZS*SXg0_>Gf74D^TL_LMorz z?uS^rb}!@rJ-)Tbd1xF-C?!lAkybEOTkgR>4eNmSNr}gQAt%Z>E<=&+?Vzz&z;uK~-N^-~Xs-cdhW{qmRNy&9 zq~k@N+v-+Io)`W7=7-xD%}-ChP1&@&;c-CV&KXoNlO&9Nbx}PY2>I)oh*FG7pLM?ogH}eO@`A3`@CX2r=j0xsI3h1f!#s-L}*ubo{rLF%un=cG$&vvTp z5l9l=fNIq@4duy^i$fTJ?F&duQ_{&)2(emwnrq=Q)Br|E8C9sBErL8=C**ubIqf*= z7>e=vK(|7sayRfBL|^OHr=Ln~l_6Zp8R5zChveL#^6c|U0FB)$()#Q&p%`9u6BLh^ zMu}YE?@^gCH-Hu?>`1`WQum|eqx&f9=R`GB^EU7ud0@IDtZ5{i#16MxI>$q)?P*iC zq-SJ#HHqd}e=KGUVzKphbnerBTN(GD-&6`Y#C_!OM4Uw#-@q2ozW4xO+`e8q02Uw3 zBLnQ`B|*PVr$^gxIvR!J-?2i^g`hjxPyCjoB?#6!RsqY888_v8YP_P)Uxm~`cftlJ z%|3^qj|?MC?=t2{qY78+-+6o(pDJ?ta8>Z-;(eJ=qCt0gv&^_vLe;A=D7LH{gon^g zyECJjIQEf2|BBRCh26xj2;gi2&%1dHurU>Ss1nP4oz$jf028};$haiNe9Brb_^3Q~&DVskw&n8^ka|tp}6=Lhrqy4Ajy32 zOF)QuJGFACzyHcNRZpy#Fua&291KEL91$phq{vjgq{b*GPHZ)>1!=j~uX%c{uEoUZ`#sAJz6z_w#U zz|oN!UqU?mQe}54&AHDub|{Kp1;vt;wvRR3>K-1_x?U|LWV|!I&=Kkus!G#?L%cr@ zR)Gm9p6zX6&n6NizKw{;?5usWWzIO7I_jcL(LxNsDz?r8>Vu- z@wGq;jQBY?f{*;KmUi#mmqSH8i{?zeE0|Pw3rOl|cfHoajs%HcISzZ(rX%zD#GmH# z|26Bf_VB*$yAY(Le5!sihP;}w(t5235s8u;JsOg0zSi3ur0cOXoV-1+b0MB(yRXWg zXZ0f$z~m{UK%Qb31AP2vPe#Y!Yl1;c0nWJm13`UrR2hD>QkKr?OX}WEV56-tLl);* z_-XQ#HVErmbJq3-yK<+h(}3|!i>$#canBd#lV|!uRCP)Uh!^DK1~P>#|Bk$>zvUrb zwsnhi#h0sk`^H~BqrTC;eda>E-l$NQI?*hx8OkRjF#>YqKQH@*zX0I+5dhcP$US-K z-?=CMA>lpm0UgP?5CPg|>w3tJJ@*?@1~o8oN-2 zob_3g-`gp*7R!6*_(DY#>2v3 z7GI05s|Ka*(YVs7mzTkOKKMIZp7#e^F3C~fNERPDlHKj{W5)*^W5@hgnC=3g!GEB; zh?(Ub$MeQ_59J1go~wFqT-oU6VjeOSBH;A)v0x@RmVOVp?=1Ym_<8%?1b zyu{m;oxpNgmirP~L?G+h*6x9Lr@E4EEFThRqw_9^H3(fpc?dfDS_8P5^FziycL`k_ zluSi@&X#^2^TQKbJL(X%OwRyp#1v zlg$jos->L6IzYcdb+d6EopErI&WCU(b}DDE z3h1kZ*R)I5DWm)@GVkpn=`2l5QK(GHJr;cQ32v!=J8RAWlLxWVwQnClT@Qx$KGx*e zOsHI4wRv>VmQXuhkCFFBA|c55#F9;fmVQwCozNc;5yFjq6h0NwduWcz@Yi$THGXp_ zPrOiuH|EJtBuY|ewdl|{>N_|0TaY7MObB3A+Q=fv=P4hG$6*^JAm@k8FzwonJ4lJp zs5oTLUA)wQw?kmQ!i+?Rl=&pn&9dNloP1IHNVc4GD-{=+tf7rwhV)#A9j z-Kx|Sg&o9Mi|kuBtEMS3K?CJGsm%0Dnv*4jMej8TcC4wZI_F8d)~SU*6-6aM2_PN4 zK6efzXHd*eEyrJLykln%uZT4CI3%(+Z(o>>*t}ZdiD6cKF4tKa=_20SiaND>7GH!g znAglw>Rf9N{)D!U^FY)IrLd%u=fN6;l5Wt9dz+r!K4Ns{;?(!s_&)(VvN+N=W32zq zKv7)Y_`V^xWiKY1{0oSVktsQ)tidd6A!Rv*Zr)g|h=Rrp#0p&>6qZ4l14X&NSW7M{@1hX${&#aw(Zv1Mvigh| zPv?BU@^J_QiDj}_G?L*<0%U5OsGhUE=WtEZWX@A^%Am*e77`R;g4YnwFxWIAcEgc! zoQ4u2!eQ|V+pBz$t+oC|BT(Rn%fQq9;j4LUpKxiOJK>u=#SCb;OU6+tZFV;+gjs7_ zuqYz4Ja~w* zl6*S-)zF?Z90);s`~F?IYI%58!RBJQ#D+=jn=Ob$mSmcpLz1O7mo+9aK{OYH5Y1he zpH#p)gxjm%4pI`Lo!;2d3~uH`RlXyA+%U+6*(A5P)}2Fv5?30EDqH*;ao)SqmnF>r$<|}(si0$ zYm#_-)UsU25W2dmm?@YP%0cM!ZBNqr<%RlkOfDApO&FQ_90YinmI7uD%!T*TaHpyw z(P#8=i?6~JK{P^$>1*6{XQ~WY9ATIs^4&?a)Sgo*$dl=+2IWB3c~g{UYC+P06je`V zfxlK)LzJB_so{nY^Il``Wna$^6xgYeI9VjisUL%u!&HdEOiZh*@s~0ej);yIiaBne zH|uZbgPS`NN3Uvj6LQ*jeZ`LAcf?D!uI_ma_;v{mDZha#aX*q?cYq)lY;MMfSV(7%s1 zU)%XC)iMWSu!H?ieO5I}Jpp}|=ZI78U_}4Sb}$uvhwEMy5|*qt=Sow{TfG(4Gv6EM zy~_|0E>msAVIgbp_P1Vzp}w$He>hW{IP|#btgxf*U}?B&{MyOI>*@}VD-%0vhO*4u zHt)nEYrFcARdT_seA99}@^WP%#k$MKPEb(p;*dS$AsFaAd(d}M*6r96LYD|SrXNYS z!lN?vnNXpwXsbmKAJQ#1U~Nm6CfQ zIFZTjs)%z_frefXii?FR0$&u&zFI@XcC>raZ_I!~5??l>e zZYkg&4p@<`|=VWW&iMk>|g&(d!Tu#J`;wu6Z zy^Oe8weuPkgpfK#rZ3*9*CI57?aqPBfgEt5o}0d*iJdnn3hrA03@>n4b-O?ct2x4& za3_~IZ#N_%k;1>VZ#+Wa@9G)TzbWbJ!SBcAYfoGamIz&B10A*4ljEYGU-&t9nR8?3tSL# ziC_67+6_Z4omnf2d$(5E1E%*Z13A|U9nk%3FWdctFm#yAgAO%hj+?4%AJQ_?gb0S! zXH@V&=rCKZaMc5B@8dT;^c2&sjT8z7^_Q5qTCuX<{FSWCc>r6zoW-3m#_e}}mnE|?9^0`>y}Vz8KsO{|fJ_vU45-;OXn zKm-Tg?PV;P6gbG4O6-rwsjOSYT8k9lzpa>4d9BI%C*CN?TAEgxO;hq31ubMXPBqkB|;`cp!=qL0?%J8!#=iD$pn z;EF55|JCxp!VsMp=kY>uazZ&N7bPSV;AYRkeDkEGvo@SQC3SS!@>oO+kSFV1`w4A+ zI_81@-SJ<@B7=Du5{FN(ASuMMZjIKC?Sf3rzvsL^(dNYa`dfG{wjsdml82;a*q zcWBp(GUKNC^?TjC;+JKUe^`?XNk7fYoP>+_w6nE;Segq-Fwi0Q2G)~^adoM0)NrBP zZngIj?r+e5{$-RSjv#&idMhdHe${ZlI@97UV0eX?3oI)_U`;M0{WLFg5`LHR6q3Na z3>X*OFNMF_RB*orAZpa?Z1ubTNb+h00>sJvsIn?XfRwe~+8bVSJ3Mvng6|Ghfi*dh zk75V}=H;>8A29@2I6)vEnU}FX=`mqS_%~J|6q zI?lUyNLS#<1~MaTb;afvJT#ZLNNJSYte`HMGq?{i5+Zpo`Y~Shu#xJ$;qilyK8f!7 zJqsd__0yd%5UBn1%Y9;@vF0K!)D93^E}hA z+0t8(mobkpW$Je^mcW|a{!26lQRTacOAwgRj?Bwu7+ofLGy8==Z}ZFqjF1;u}h-MU9^Zz{Paxfra;zliyASTxPms_vX7kN_$XosTz_WFwdI zBaRbXZsTXh4(VZwf%E!%THmx&X5eTYE{_l8{rL14=x>kd;>6A z6mKGHGGymDG+C|CQ9FW{aMh&>(Ipg)+&-+ z|C03pebh^h^95^SJT#Hof9{yQMP5#Bk2b9zgMankfp5HZXkk2QZCju5etv%ry2LbhaXUWU#qPKj?wH6wX9ITxYw{kfIEuif z%)<0Un6zjcG2?eZ8}w9=m|&8Mj8lvAzNJkVK^vd6(ZYEsw&a%t6YP$W%F}uT^0z)Q zwQ2qzH_Tfxab6;R{?h`JoZi^r5C|{2KFIdkAi4--ChtB^2a=-uL+zwDf^e1Y@s&^f z0(f6drVZ`dm41ZuJ5By4>pyu@ZsAMO=?_0}?@pM!DLJ}jWKWj6A?Q~|#V4f_JJ_2k zJJ$mb8uYp&r|qG=$fZ+}2Rz(hXpHALGOtirc-J34aHfqC7E>FJK_D z*T+5eYpyjI-FPSw*()}V zxs9QT&MBPF*{9~T{F5+_C&27~`0~4*7=1Db{fVY7gK~~sL4I@$TwS#^~8JK4IrbrPasnOJ-2t+#A?of$n_nX5P6WfHKmAj&IieW0LJQhgB zE0DZ*bW%CP8;%&-))n|TrSP$ZlUW^up9>rs;4IQMxNB9^W2L!JSmp-W!^-)j(>)kJ zt7Kf_u;HBv%FbXFXyAl^XksL99Jl-Xaqf8g9~lfk;}`-kl|cEP%O2SFU=P#`s0%X8 zfVVhdWjH}zrX~f`T)7xumiUF@Zy9s{aRm7#4df|mn5T~h?cpfx_lyrgS~QkciOF{) zT*uro`Hp#k&|DHd^Hm$XMp@ACy$!RXpgYDd(V}hRqwF?pQ~PC5`uA8g$eM#qEhdCI zdMCfvp1P z`~R~d>AyvQKLQN?Un>Hn^gupA@Ya3ZBh%w(a8T32o zT&g_lvhcegP9$~iEQZ^+OcsHxnS&G^;i=@$N8w#)d*}Cs@WXne3b0ZA_@i z@O!xN&y?tYm;P41tqzj2?}vFPza8t)#GOh70wKc28mf|XxpTnu7@mkqqCg?3^h%pN z8Bs&_9xPOJg?G)ZW!3qL=XUN&hw@8XVF%amf@xUeJngv`7=TZsDxHf8G95BXF_D;yRRBx<}u$s>XT z%J0vX56|{A_M#@NxT(Z#$=_m6{@4@|JN{?sk?_rbrboWwtQdk4N^zjI+InAeN7Gza zm6+nX!4xPUdEW)cY0R7~HgNq+5}7y>;f9FD8Rew5Eg@!AD6b;(7!)U5-#c8QEeFM> z$4{m+KbO(WL;W1=8$Qo;Jt-s2g(C(1NjvrBMq+qAL>a}!#4H%jV#!5$g54JW1RZNS_9QlY1O$#7)v8hx7zk;`v#aYNaXX|4V6?@thut~dwrLG1N@ zzavyT+ddAwnSx_UK)t*)>2<<-%IT%1C#N--BAV60@#;m05wA#21{Kh08w-AqDK6!` zeN9b$`F?Gsm_9lb3Ob2O+Ru3J(_acPySO0>J8IYM3gLwb)j9Wp58HZldbYA_(CqL$x_bMUdO($sdCvyhB7R$^H64X9+bR#sIfY`ip2Swl|^*5;#!J zzqo4DzRnqeSk;kpkowXIF*sv-1*F$^uo5d5Y`^>b%}@~rloe6VSeHdkmkX5oDb4ps zM=b;G)je1pfUoiqqFNCh1x+?};1eccs{!w3 zcLH@JieNRexxUYZmIozQPC77Jpd4`omkiahOnlgtSjz8CXvLI05e3P(Y`HYk+S-SP zM}u!HmIiA7h4fbkBJ^EwJa5hDhS~%XEaCX4>}#>n;?;q_)iL-di3>GdUy3*XeQWil zcUx-$mS*E1x&Kt_a}H7#ne$KTTiq#fuswk4dniXaUWCt9)^liE<8AANuGA{S6BCz* zgNn5a$j4tERaaZ?jS9Z?uvqnlOc+a-Gj(Zl5GnE*0!d3|!wEaqpMTix4J1XgpBG?K zgzxnX{!suyl=X(FNt<-1wWGQm_Y}}GQ$f}2C^BzD%`GMwaxLtHKNW5Qp{P;UaNk6q z+DljVOyoz?zuL#9?YRQOAX;;LiXTm`IAI2P7p8-??Wz=Se!i({?6ptxgTLlU|@zNhRJ_>R?)0`G_PgNh4XTm__cU@1UTW z#U()t*`IYF;oEjmwWJ{~uH_^f7#|#_*7*@SLFkRIh+cI@pwqGkSNhw6wsAUmRUF;;u4|S7$TXPj^L6q7JE^it@yf^0GQ7++)lEGQVmTR|NTzGSQ8u&;z_2sM3Cy@4|4eyw``*DGY-oEW>)V zpE`1X-3?9%)aJ3A}~H^v=3>7ebLs#hT#mCyt- z?c_;XS-MJJg2*0*WpPL7P}@*ADMS!O18x92{(lP%Hg!< zJ6E`#>1WeqnU&+QC7Qw?z)F z(X^-P5V*MM^o^oseMe`F2aK$Zbsq?esjpeIQ%A-J7uBhDGb)Eg-uP}B+A>>1)~H0B zlUtU-(J3sHXcWTT*%@aT+8vu-O60BZBCn0kQarV-XhbhIOm>4}&^`M;+mP)93x}&| zt4okh)IMoswS*T5@?7$}&~T~Q^8RrSsw=>14|Zl&r4e&IItHWCRE>^}t6i8C=F>5F zoj%~@>d%i{jXhm?`&wck?m^qTmsir%D6|Wgc#B^x8JpcKvX!qV9qQ>>-5smyd64Ag zutXA9a=B%*0{0{Nv`P^yu>_Xxi@hFnwNT61)XmoG-d-WD8YMzO%lC<(UOk{kpD`1t43mO6@l2H%G-5at-32qDtchABDtg573q9OGwpb;ai2o zt~Gp0M|HATDb(?lDk^E5Lb7;8&M-qVnL;O&$D+p~LqTiB1cJRiD-U14qu-up@rv|} zk^~t??J1yXmQw_S#i->tY;=#?f<)IjU2ch|eZG-DiRDW&L&{%8f5Q;Ys3u;?h#w#oO>U!}Ep1eMfG_1JMlN);9)=DU z>NR?xKn~I7=d;TzebsH*jjE;r!9`2cAW(d=QY$+d!>XR5j{X3)`v$oe4aSHo6g`bEDL(@xD z=eCWu@W0F_W+e8TG3f8=zi9S@B!TqVqJjTj10HS|9*!QYz@Oc1<@EF6R{Xn%qZAMo z2B2{K#qvDS+JdL%eR{DTc^$(FWB{@mri* z_t?TQNTG95ZK@}1C)z-$NMxW;7ghw79btmzyJ)oyQ3i5Fy<2vmDY{Xr>vKv5t>mj! z8!pJt^Lj{ljIk}R@|@R(LUC8Ms-;BZ+M}gw#y0U+CmVOi7*Jn~-@7+{LQ++MSl<5H zX$l8k=2Y%SIo}U!3aK4X#h4<_Nlt3XMTt*$xmyYqC zlTVT!jI5hZC8`DD7FBKJ%M2Z+G=$I7AEr;ifr05uS9xVLes4>}jH)3z6dY9GE-83D zz9eEMQRBJO4ZPBJx7*?zo9Pb*IlClqI?JUwp?BsRglr8G_2NxT1tGTy`D@I~5M&98 z4Ie_0PHo`XvGh=3REpnSTleoGx;ameHGxYOK{Pg&!#bZ`6 zYh%7-!<%wF>T^!;Jiek{FL&uz$KlR9mLAdEih>x8`U~g;D%ZdJbi1;9n({~4+^j2- z=dN7~HQKkzFCbtWu?WA0Bc|9!72D{SIiNrLKHN>bjViNf^&-yx*H)R9H^mwBMf7E%!PC!6$A<`XUEMqO?gGjKKSJv0`j9#=e( zi26kihq9<>ze`M~ry6g+Qeai>sdqGNImc2K4{KGarWXj@fl>V`YA?jJ^Z&^h3VG&|Z zq$Grn%rgjyf4>M;bN@9CP8MX^a2CgOL9WP{cWi9-sWxh~;YH4sBRO(4Q{Zt~9AA~N ztc)&Gp8l&ys|@Yt_@3ERV~J`7j=W)B{eb?OP^$`<#L^XlgX)V!#9%=Wvq+ko1P7b? zKsfpofG@E+wPm%O-{6MMejK+1x0O2TNL6&>Gbz-%T5wtfRe@`;!XN2N3*w{gqT0Ze zH5_TlqUOm_Q>(xVX_AQ!+5w_aIk{k^%d%oH(UC6zN=1(WDCRLP?4>gV)iZh?Fb20 z@gjwZ*1cwR=}@a{drTDd@3pF>Uf)xHJ>-Gq$_d1o+gjYw2e_+A8-+I+V9}Vx3gdmQ zTLWLHVykV7??<_i6TYT047^TFs3)ZZjFqT|b7Wt(c|WJ_IOJ?N&@K5`XU?COMlCSO zXwTklj}7DKQX5Gd*vu>7*hQZHcupqX(i(DGX={ClM*(x$Wu#qF^WJ%!Y0ocTJp{Rn zJ!m})%4A8gqCOZFG2q#g?Z0dZE=uh>LH1|8;G?0!Z4(`lXICey(YjS9a-$^U97yOX zrNgk!mV`Rn-j=J&_w3gj9z5dc{N*mgCG=qe0e&aNc!VZY-iBr*qVG{gZfJYSrY;&g zpQ|5S@00eGzIm>dC~+y-m@paBH zBvIB2_C&aJC9uCKKORBn6n{L+8}C7+{kZrHx3cuu(Mmd`&}7$Ls~TyMYdYA-*pfct-tk$ z)k$tUN@d30d|t$Dq-5`@yNSk{I1zprbp(os63@T4Z7;0C#Y+0X=Q>#OF_0S(<6Ojb zkCuBx*TS=lbB<^(-wH8&w)gKgy@BAomWjgGKAdNp2iSAKrH8A6&7YKuIB~2+AH3)A z^T3>5>#HeM*R=6=TkN4mtvm~~F%_^DAxjWd*Gg?fI>68l)7g7731#nbOgJ{X=SvD^TPB90^ zV81h=g7jm>yZnH&^w#{kL3j`DULqPry%A~Ydx<`}l$dL1e^Zv5SJ!uiqQ@EbtSdAM zC&&4DAH;iYNdlMhHt}xxwuA&)km@F!rxH}0Z6lWGv>X731|gr~99U(>Leaf`o@I6b zkA^qhiC;Y+^QF2;MD{#P)^|e+^EE%f)~#1ws>7@IUUmC;2Xn~{begGlII0zj9V3Jp zA#We4lg7|)DYOkYURw>ri>Viq^@_ST?SWT_*+!cs0lWsCXt6e`&6P#dabxSIU=!)c zOD4=LPU@Fb(go|)a=WuhIcf*Yn;!=czsNxqTXTG&fOifmNk(eUx&|aRyEXxwTEl#kd`s;i2VhE;wv=@Bqy4?3>mSvWV@B-EoS+>lJt$~ z6ehNIxdv-15%aPn)NMFx<{4Xzd0)~x63>8DSHt{+vO|+6>}M81D>~I4tZwoZaa8e$ z=L4XWld+7&whSGPv9aY0kKcAX;Gp)A6LIczXmK;zcZWJ(>gSGvfTK=FA>Vy2A$ zOu5vH3H>Vl_S#0I#Bv`ux%9-OoP*YzW6?pn?b)q4{#Az3$m3=*n9vOV4$D{CT$+;) zKR_&zK_h*fY4{bKb~L}Fp30aozjSNUs-WHjlLqpplr>MocArNW__i!=eiAzU4{nii zw|KE9pOmpN13UT?bfuZK9Wb&^$r&|^L|q=6Eja@Re{9^al>3pe_{Q0y_-@#}y&J4E z3N4$NhjJ|oI>STiyXZHB#I{KXq*tUTwX5_ey(6(dJ2CRWMLW|tPc&j43y?vbEzU0s zZT2$W`=&)Iu{*+qRu6@fbIkf|d{gqfcv;|PX#-dJ`EJ{zyY?&SiQ~aSNV{)=dw2}U zgbYGuHVkVVBttUIaE?XmafRYK3W2yL4|Js50H?MPK+4ABLehRbyz>s!o{RaJf)7r z=WcICCk;J)Yx_L?!qEtFISZJN?foN z_prSc8}?Fy7g+NB1**}Yu3f8^ZR(&?MNVx%gwXi5lvv=f|sa*NnR36*`A;A=<4M$gH5x#eX z6Wd|WYxZJVWSA5WKD5OV5ubRebBB~#pV;Glh0`a`*>)zOA3Tnh{TE08!G~mlxwXoc z+M3+9eZ~U}Kie~fI_Y`h-(yOPr<%Y!5jPOvc(x0m-)Y!Z&QnQ;Sv&42|E}4O$?yMU ztD!JU0FJz;Q%7LqPXPnS6C{yysK(nl-=yY~hl6J4qd$Tp>Rks1!vAq1p z7h4AJ&!uQtLMfV>m&m|)_D3V<2qPq30(-+hama(3-*CwJDj_x;A5ZoBEn^2)<(3bi z^RaomCd4?GUbh?mEN&2DC)4$5^XhXb272M#8i=hou~O=Jm(eyzb108BF?R0Hrd%!4 zA9p^=%ilr^9PI?$`)P&omwsMff8nU!=~J?a4L9wRNY$#nlo3FA2K;ESuWoVEyej$W zPQ#}GEm1fKTHw6Yx{T*@V9}nx`|>z!?b-izjiD4QWg~dWp{__?G4bPUlkVqyn6`SW z60B+>^-9|2SQ*Z*ORw%0m83#ee57A`f4biqet>>$(<0jf;ITa~)Q{JXDxj!~fKHJq z44W+&ZZ_v?Vn$>Sgu9qAf$3uLkhi)3k{Gtr#m))7fb`hxBeOVni<&tTsZNs=_^=^u zkMGH08Ts8yn2mF=HlbfL@JQ%?KURK{A#+1H38Xf+Z)Cb_9;5OKzN<3Am>0 zje~)tB0Rz(#_Vi!WTJxLv{37Hx=}>s5;WvsyTTzI~pkW3)#*A4OtvY@c4R93>2F&j}>veD2*ThR?dg@2}Q|{Ex9u^mDt*|VIC2X zl8s-`LG83%Xv7A-M&1xPcrfEVfo9VMCC2w6LH#m zU|V+CM0FA|Sfk`3Pq(F!qHu}(iPJv4TY&>VzsW-TXxN+7?HMp%ZXg|q6a4aB>kMtv zHpzx|vkvU{&LP0caq5C<0XmlHzPGWVZ;oh<`#rk=Ddk-GSl4C|uD84T{c}96?{mkt zA4|j9vDL6GZ9`;%IYL!JGz{^!+{i7AxdQ;S{Cw2LaQG2A+tQ_mDC3;go8+Nl3%y@UgEUHb`70|1x!;UCiDFX&LWixLK!jrfpy5-vpjvW+X*oY zBROHN`+eTIO@1>@DGRHr7sVFiDPQsWLi?w47M!xekH!0~T^-M;Nq|GUYw>fwH2v~f zd8>Z@iTTV9+8sK(G4|F7AW<5&P#xzzw$1+gol%yD$vCrjrmuG0pLl{J%2;q0)#(EkTaN5+2>)t6{)t?mw(bqg z>rTehMt7WhX>m1X8^I5h|bK5IOe#AB1`ofHAJYeRhEieT#`2C%qqjq@43;zmf3(d+Uiw-TFJ=Z>K9{X<6d11+BQB*(2s;zj z9lnojUtnQErX_)@t_G+!alZ<4?N&PMEvzhuoFL_-en;OW&w&0dVa#TkWasjBS>j2< z_i7X}kc4v?P)SR`DbSiYF>VKjozBExDIf*n*iR~D`+?zW)g6qb z0lcFUq|L6ZkK>hNejeAh9-tS+CGT)hjo02oitr=X%)}r@7rMD>H}ex=`SY@I3)uRQ zM#hYL-BmAXRG0?6W@&t`cjKY{TMz}M5H{2^SJk6bqBh7iv$sVs;KGv3epKMZHglnl zJat%$T6!9wQ0q+H8;|ZKLA5l0fIyv?OM?q@9cCb5(bkHT`Vs$b!_nlZnPn&!$t;pU zNh+}Nw=n2FrLuLyB7Ns_4~(2VXl?U2Ge||B4%-iZgq*54ip+2IIZhoe))GwtB$-Mh z*-LN$YAvm8oOAU;BrP0tX+1$>>AT2|b>05A<|w~Ts;TQ!a4a}fOlDnnCwrdg1%{Wm zfpKiSgKN=GxI7-&zYjN8u^fYOssS>qw7U$9SmsAwJ0~Xd6YydP7b_HBAEVC4=!T{- zY5k&#J97A@o?;#IJLdpTOOj0;gK4&|IfBB_vHXg$BNAQmk4)GWX1{pDF2sL8WP`NcZ z^84!bMjauoF$V!)NmR2>=L|txqD^F%gcKSTUqdAkf_nqf8N`)ex}K_D!j1Aj2&rzY zg8k|uC(CiP*ZkB3hnIs| zpbf}gjZ9osA{1E>KWII(8-YTP;kejl^^WFP{5V+NPjx!A!Bq51|CSI|e#;TaT-wcD z#6Xb@Wjbzt=_SOM{9)(OHU<0x7x$1$fon$ETXQzbc0LVBzaq7eR&M4bGDFA`P$4@j zKl=5)lQ_Djr%TNKQ%gU|WNM(HA^{(=ffM5tE$@7Zw>MUcGm==A&oXi&xMs4#Zdaeo zM{)#_ux$#&D+d?+K>2nKb00X%vh!ZjdxbE{E1qEyDOn_B2rS2!?3rQcc#mI2eewY# zGRyY6tsW}bb|@q+xaQ4ht@`3&Yt&1IL=$kfb(c-m$^;(SF5Pj@V}C}*P3vUi)? zjO84w@=0vA&mo+aQ_VTNYNctV@3c4Wc@{N_+&xKKNaM6eB{`<-^Q_v0_fN8MqqVj= zyYY8CW8uerCfQYr>%rcrz5#xqkI&;43h?05L6zYq* zdkWawd!UC-c`0Ih_wFe1XJ{&lG#gn0UK)@De>D4gl$?$4xgYJE?r9w|;_w}noAw=* zJIj+cHT4>}6|#H9DLFv@@#Z+v!)%=e70seDfp8YBg0tum*Qb|nsX9mHwxD@Mssv}Q zto^hmy)&h&J+9ZarQ~aC* z^b}Rc%__YqK zMLzx%WlcWKs`OoD7L#1uu8)f>Xd`HMjN=u@!t2FBKv8(EXF3N$E-jc-b5J0l=;PzQ zT;-KAojMC6IG(EPjuzDz{QegXJrxU_^FR-k>KC%whBHrqaHheRz%(Ek)!xI4ioSg7 zZt~>WT#3prC2dpTo6KtfxK~pF4ghNcaS6IlbOUjS^o7;R%VE=f0NU(Trc=MV(nlH>`ymo{ez}(pLFA0E+5sKs-=$>7@jDl1lqUxF?iJ?o1hB#!J0i4fKYHFxN7Gx8PK~c zFO6hE-M_*0X4bBITVS5|{@l(fay|}MO`l+XQ2&?t!K>o&K#lSoEtz4R{nk3mI1Rp9 zE7-)Zsv_Lh;(lfXI*9gNdb~eOI2HQ#7OZbC)=Cf0935RqzLuF9#azX{BVxFX&eXmU zuB)$qbnOT&_%nr#*iU=EH2+q}8D*@D4Ik)TXUm@vV(04A?YKX9JXe+5sZGOmLk!^4 z-XT|~@v`gqNM}eGB`Leb$ZhDUm};Q4|5Hs$zZF0_$J#=S`bV_2noOPJR4Z}g@i(=f zQh|ci?wi@YXO7Z&!*G>#gghDROK$cxs>9EPWrnNLmu<9Wy4{S^6@NgD8t3#9Bwl1z zKl&o=Jm|$_#XGaSI8L8%BNgLwVt`j=pB#7nyjVk~sQGN~e&zcYj1WE{_XxB}uo-OR zd}L+Nv|uJFD_~{JFSjZbha=b~1f6AW^KcI-xZyPu5CBY#agqB72DUJ~PjBrxk=aR#BukVvaBKhj=IU=~>(&Y4V`VosoKM=4geKeXnaCO6&}w z)ET;!#87Y>AEoMOpuKIr zL3oWB(Zm>L9fVN^WLBCoWk^F(q|J;NVr5fEqBfZuV?(1Qlr+(~1h}pk+Q4T}72e4m zw2YDJK1Q;%n(p968BDcqpIu#mn+ox8PzuJF0{ALDiL^iPQ&y zIGeR2RI_lA&_I7rnY?78T=_Oh&I%%?!IvJKp)!<_B}K9ySX~? z=Qz>vA*u1EUY;@oe`ZGJV)DV2c&kjxf(k9rmGYL!^d775M@n;w>5sXVSHO` zlxhV3b(n`gbUIFJ#a>-f$+3PP-G2R^w4pZz%)A?|pNaPwoTGkG{5As5L@cCBFJ84j zm0{AzRGi%7=F=*-`)DNBzX_4Vq$k|{jd$dn8uydCK82pV`@??D z4tgHzs`{FBj{0W8Hp$LrrPYF5-xpy+OA{n8QCws@0=e}a_PIL2%x5_DGs{Rz)Zip+ zdP6?4!rU$s{gnApZr;RQ-3Hh`T3vSJS!;+Ih_TFOF-|pPmx8tbG(B z9$uPWN_*aowzOqLiA{HrI#W00k%vU}Vg%?(1MzO8Wwy?X#xl~8pnIsp>tyRDgtkwP z0FlyWW2*p$%NL6((0sU+y2UIL#RW{36d9Wx2E1VFyax7aQ1%D}ULHs$^b^e*2Go7{ z33KRJoAf~bRhAmhHkc7;pVUuQu@9;??>8FV=fDphwjU_HHWGnX@!TXHL6hClvwYCk z(BaM1pft~Vs?C@B6Cpl!t{lLfrIKVLkwB1!L0}M{MZfo#f)X z&Poq;^(C@>eA9Z28MMI-^Kj!vi72Skgx^!~7dm0J@q9k?pOU3=hL(gQ95S|!xhg6x zfCssrYI$|R4=EM%Uk=7pi1<-2^(BAp9-{2mhFn?u<;E>LAx-#N%IB~gJ5;n+os=}3B2Y}lA>pX4rp+Dv@aKh~NmBuIdHq~l zN=lv8Iqi!zP2(2YU3&VxW+eS|ZZCmWQ$K1x$+5WpT%;BBva6;eFRy+TzxZKdtb-3< zOM9Ox5QG8!|D(Mtji++!+ZvRp$5g3=NHiFdsR-4Ph|)wMHpk31lqq48C_om}dRkw4=_312hkts#Pd(lQllp`<7DXG?ps}rJXy+kf z=;J&mymR_FvO%TOAw6C_*yJOZ5`T5_!@BA4kr8}{!QJ*L?KnKHZzdkZxY%2Nz$VkT z6BzVdTecTf)5v*@eq@8z#BiO)!+OCnpPJ79*g&$>Tcish5O@J5q4_miu~^; z{7&GL-S1uXynsfa<9E-A&hwhWk8xy`S20G0rv2ID{r1`#-nMB<5&iK)<9J#M=wqKM z5!O%N*AKHRe}&!>w#hp(78!OyQ}^>clN#$C0yzv43Cj24;+itOI&&y%VdFpj3T%p7 zAE=E3Yo|upoHNF}kJ7k=eXk?UJcwB@cq&||S6=NIp~`eh-TP!N?np%@;t+ukWGad? z$~go8{IAP@vgP95L_>`w%k|u^+8-LSTTb$g`FS6{z^mfX zt7(%kQM}fA|YX;GX9=?{YFA$`mJv^z3qwMwY{Zt>8AubiGOzbiM`%sP_#)%ga z+5zm+-wN0mWe4g4MdNNq%NtA*1dXMtuLI?j2fwM;kT$+Sa_QdXEoMC@gp|DcAa`n_ zos3~yXFB=za!zVdjj4Blyf|cyP=_a}^qw-B*vqo&!fNI0{>a57vv_OeF@IkSgS7p+ zX4d{`9U7DTHE{Y#I;pFITF$oiP{Vo>HRS?K?!V-rP%nZM9bVgZ8xbk==>HW(B8p22 z>RB#YjqpsmwGu%3VYekIDdd(+A7#lCz=x|7TNF!au32lUJ@R zqB|AaV>(X`AwbIlcxjuSv}BNrNEZAS(y7=T)0uEKy0fyaw2V$IlZQQrV>&16V;-wa zb(X?!16V|7*+2631XKKOX?I(>jc9QCStZ_UGSO3EPzG`hEa^SNkF;ycb^{7;&7g_+ zB>cMjBkzZAhyjS-T#lCwP&RCjM|fOLAy_E# z?fmOIi9{1xw{6dmXL)<10l#6qS6}UjTR!h+$193oa4x~HHdiw~AiX&MRtg_MjcqEz z`pWPjKtEt60C9v8q!yOENaa|A()|?K#sKFnzziA|tEC;@l zFdW?}DG&|Jg%_K;_prQoS`L1A1Zk0ym4OEjmMzz92V`h3 z5l*|+Bg|7@sj!G$6%3*rPrT0oMO1mpw8`QW0*KGVObtmxkJm{|$+wGlrqJZLs5dYEWj^Thf_8zCgM=IUY zKr??=vg1cpDxLfgmcZMFi-RB%oMtp`s7~)D(7;DLukL*qMKgIrf=T6~!vr7rZH|xB zYKN(639CMO{Uk5Y>0A_0xw@qRd7<^U;X_{Zr-wXA0#SKTkvdUVfS^p*X=4!We20s+ zA50|5!v~GQ2P%icti}>cNmIUAEYC{urOm0Z6&5?haxQ`ocrFa4R=yLw#T26aa|bK>>FnzH}tCOAeAy1xEt>nU-#1TEwz%(^V$z*7jriE!&?GsJglDY7r$@+Arn(7PR{&Uvlcn zUrQKw63mJ&dzCX_aL8&DM@}N8*-H)+6%Hhf=R3&8yLjxu)o|+P2;dSX5~sm@z;G_L z{PY=N)d~kk`iTWokp1th(K>F6t>Pa+qoqoMXKWP1-gCvrsW*z0lEsg4)B*D06^2%OLPlB70)I1s@0(_+u0?So(6vu zJFa_u(vS~vyF&N46g|oE#ebII5;_uzMT6OK72Da#N%zxm0@P}0{kA+RTF}f%sbwq| z3zoYL>#~OeABGVbGI>>~y2E;zZfLnD4;IqUat)i*giSY{&TSbWaFW0!XV?w4?@35>qI|qb>CeT-A9)XGnNVji&~14$?P`ns)e{7E zDy6Qq=Mn}g5lBc%0n7l`#GYpzV|9$BnrXDUb=w+lqkOYPsMTOsc%!9Ot5$zTON|9f zwcY9wo5j%s1T9N&dbcmeHq``)*a8CC+DAsBhlI;{GZmWz~xw_y5=Iu%P>YV zI*|`OUokyr1-maaH!MZ^x$ux;TY3h^)T}0B1p&TXXhW$H;l&N>k@@Fo3}}VKV;)~G zv!VYqM;1LTW5dB=6n$v-sCynhRLf{jk;7aHfFuc+N%uB1=Uzi+_vty4g@>{~roUb} zHM<{{6Gnq|-O-#n*NPi?(44%D0cQWQ2<7GRRWjZc)Ev3uSn4R8D8M6ArlZ#2z&%a?MWy3g!id$n zM^{jOPCav$#T9sLKaAY7qld^YU}#1wQLE}?AsiK}^WR9Qd!oSCTobq@n*#@jKu?ll z#sdzf@KD-s73UinTj-cZYv#iU9Uofi&QXOtJkOEpf04~ix?DCurAN;BVj?Gcu%h5) zhFV67^7tB_8Y*duEPGbMjf2D6^dUVL_c!>U!?^WD4$27<+GH1?yts^kLk1ZHJy=gW zF(5-h8|abnP-H|zW(aL|8&Kg836hj$S=tzsi#H(wJMZ!(gGKJ+yw)U|+vVtwS-$%3 zNM>TP=zKA>mD>WTTTIOD?-R6+DcKKnFS@C>5)F~2;Lys$=a8o0wd3ad!B!2CY03*Inkhyt3CXAA zVQ`e4z9~Wt=`L2v3w^keapxrmRxpgUNwnE!Pd?sXbvq#XIHuoet~nsI#*unBOaJ|V z?bm;orl;RfXNC{cQ-h!F{q*9t22z>z%pL9$u%}|L4|ErV8?vAwTHHxieG{~}&tP#O zovA=FP@?+6j%@w=t(@jeE!74KTIB_Ck9-F}$|@a9n9CO;L6OWDRCi%2ua19aY?r+n ztsXjU4IQ)W1tjQbhi9VygzuTPXtyXf1_WGw)%xDQX|7Nyimqy`**9RYSjhf7^;+(& z#^%hWs*Qf}EZBFk6Ami^vXBqqVgAxM?J|#YO&mSt8%SGvhA(-b+ec`19@I-Iz(=X* zcXurUkh|ICg}Rk`Q>x)_eBIhU@raCma|X zGg^2kQ|7lb9oqM*#{A*CT@2qna2pxDjW3-(s2#^>yxJL^k@>o?cab>Y=I*agF+G$9 zbS5Sd!Miu$@|Ot!rl}O2K^6vTMqV}JGcpw3LV)`2!~tUX3!}n9huih_72k0mX88pV zAwU#S(RkVGfuz{D@#=#|0C!%Z>!a+tr391bfS@ce%#gR14zdwUmR>pdfM5cYrz=}P z@O7};9810OG>LE3T&BV|bPJpIq6C+JSM&^q(6$`{Ar2O|p;7um)LW{IH20}cUh2hM zv$gq>a5#)%zx~IrWOLuHb1mfFnInTv&mUB1SM9JA%|PlqVbbz`fL8DNQngcQd1dsM zBdzS^#0?#D<0a${`(OnMBki*%`+@cyP#qM=;(WLM9WK6}-OSegqkq2$AS&bR+U7Vs zq#eB4k_r7Ixot*W$WZDbZ#boYCiCiKD3nWm%VlWJoC%$1kX1&|=hpLh0EM6~zPw?H z_j+7504mp|0LrhqUB2c_y3(cccUO9@eZDZ)2lyxEqOKujI1=W7zAvtm281jt#e{8?I3kd^TyjF zBo)rua7F4ZpMaQ+gc&g(Quu-W3C;+b?%<`}Zpra{!HcIYB?q!(zTi z7)6dAAadlFwe!Hr3#naIef=n`H3!nvfTEYdMPtPQrEhdg=2tl%VvfJ1LYXy)R)Mll zA!TcFd`TVH!HR$@zJ)^x1AwEc^$im>l)aCrYXUua2QO)kT;c(bF_bsVmrs zRZ2%De30V1bTe<$PyHH18-VuX9QD~8%~U+l1z&w7iyXq5)V8wG;lCvY=9WitU-i}? zp}OAJaJ~h8;R0uJfd~5knMIpv*Zw}1oBf`yqTxEA6SL(`u;-y(SycwxWb`&1 z)gr*UJ5&-^_zTnGMN2_ZZGUT(0_;gqOV%T}r(16Dvg7kovW~w_dm(k^PrP=>UBvK~ z9qsSnwgiDqN4>w5UGOW%%kdRr4A=fmw;3RuR(tV1pOXYjDHRYjxmq0Q=A7;Wz^&Mvm3jq|uDwSCGcB=n-l<5Z`!p;mf>uzan=XK0p`@O&N z-VvF69)9`EB@;+#7m5y{%?RJWSHr`s!CQ}+xq*>_sR|K{jSatxIp&K)X0JJ6VMG#O z=EgEkU??I^vZw-`m2!7U?R$Z0OMX>ZR?03lkTcb|e7ME{fQ&`nR6o~X9+*jea<$)7 zoxg(lD`$;vECNpsr@6R}xsh)1pv&10BYLan0NH%*a3joOV^5T`qR?;D0?YHE+VXXU z%%4!n8Qwq84SX*JctZ{=9#@s&otjCTuEc}VIWKM|mqLJ6Dl2OSBf<<+#pUI`LINrg zD(l^sKQSWJf=MJf-t}?xd6&#>nRhCEBTf*RbTz#VzS1ZEHX3%fHb$3rm|(xOJ}fxfmzr94R@mi>$kgW z7`bX`t;EBmGS3Ire}c`SDA|rY2T7l~3-y?w*svITNdPDc!ku^WF$50-QZsg|$8%cbH8IL19W+vWjo((x26u=IiZMOlM?=T3%^Id8IY7p)U?8tob9i-_&O*l%nZ>?nr zMz(NGFfZvo(9~|c`u4yMAJWf^Ou;0CeZtwlWYNXvQZ@@+Y0s61H$6LE9x2rYx?&{X z)Kx_>@iJ-7j4MaB|I8jkNCvubuN=r{4Y-OWnrQO8I*&I~Aq4eCa_+DNfd~Kg)m?|c z19{Wz8yU*OA&-ui^Nc`!%#8g)>Yl%{a=;wC7QJ8BF<*ZVWadzHDpKdzb3QbUrtlEk zvTFa_vb?Q3n@=;9GDcNF_R>sD>+Wl)?msnqb-od@KbO>9uR78V=5{j+?xmV|k@lYg z)f&@}0Ie|;O$tCT>?#W(?5ddg6Og|&e(unqW(wah%$;(W0s6kd{LrvSY6uBFG(QI@ggT89b0DgemYF(Umue6JOacCTL&&PVuLwNvaCSn4Z#G^{H$U;!L&tQ%dXc7 z=|~1nTBUn6*UZUvPbd< zuCH^{BombSsuc3CT(*KCz?@&mwP4W_ky5!J(2x*_mAP=pIZC96Y!j%SlZ)4K+lG=? zMw_a26TMP_o)Tu^S|tqC=4@A^`C<@MoJ5`5W`AQfm1|nY%g}0}76Jn^Qm$DUsQf+{ zwb=07PnYUiv68&LbVlm;)v>X)vzx0nd@pF31$AacBi}$Ve^S19cs}431yiNjf~mq( zUxNc^l>^9eN(a0tS(Nz<#Z32}ADWYyyPiOWjf%d%tMeCW;Er{B54?~IhXpz=qzFIM zp)GKlA)x!KBldF#5%yu|Ka@~@{ir8Uk$ie)7*MUoHKDd2!{|1_GS%7AHLJ6m$KU?k zrTY*no3dI{UlCXahyw=cY!&tDLd2~dnyBpw|2-A2vtFpz5i0P`xTU{)=_Bk7B#?AK zQDJaBfISji1@(C~T6vkh0#TQ}%_EOP6JR&Bzc%PIZ{SCF1weFc{DF?uAyMTA-D&n! z5~{YFQN6&K@!3Wu_qOp5u$>I`(7ii{gse z*9CQm-Uh}=qM8;Go*pK;aGd|Q)O*1fqV|G6a(wedHVeLRw&mB~aE}YV@Vg!k^EkeF zE-m=N1z-4w=RbFNh#9epZbv;Y+orqq)K*}LuXCzNIz$QbDqf+vea6(hrx^r4%Gn<< z8mql>1&FGE+-nal=iUQ}e8Qm79tRb5!2X(ePv7{p1KV^jv+w61mdOFUUK;CZSy6!( z0)2ru=uK+>k{I&4e$hf^^b`39`+6&~fbUli$^OcVEHrU!3jl#a3g1+-Y@IU3HZEJ}1<*(Yb_j*O?Z)F1i OX&gMNntZ_6_kRGG-Q`{Y literal 0 HcmV?d00001 From 3da6d7e7340864a1d8bb062835536d1f1eb576df Mon Sep 17 00:00:00 2001 From: Honei <915574300@qq.com> Date: Fri, 28 Jan 2022 12:31:22 +0800 Subject: [PATCH 09/22] [vector]add voxceleb1 data prepare scripts (#1409) * add voxceleb1 data prepare scripts * add voxceleb1 vox1_test_wav.zip md5sum * optimize the voxceleb1 data prepare logic * voxceleb1 data prepare: adjust the code a little --- .pre-commit-config.yaml | 7 +- dataset/voxceleb/README.md | 10 ++ dataset/voxceleb/voxceleb1.py | 173 ++++++++++++++++++++++++++++++++++ 3 files changed, 187 insertions(+), 3 deletions(-) create mode 100644 dataset/voxceleb/README.md create mode 100644 dataset/voxceleb/voxceleb1.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2f80e46b..60f0b92f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,12 @@ +repos: - repo: https://github.com/pre-commit/mirrors-yapf.git - sha: v0.16.0 + rev: v0.16.0 hooks: - id: yapf files: \.py$ exclude: (?=third_party).*(\.py)$ - repo: https://github.com/pre-commit/pre-commit-hooks - sha: a11d9314b22d8f8c7556443875b731ef05965464 + rev: a11d9314b22d8f8c7556443875b731ef05965464 hooks: - id: check-merge-conflict - id: check-symlinks @@ -31,7 +32,7 @@ - --jobs=1 exclude: (?=third_party).*(\.py)$ - repo : https://github.com/Lucas-C/pre-commit-hooks - sha: v1.0.1 + rev: v1.0.1 hooks: - id: forbid-crlf files: \.md$ diff --git a/dataset/voxceleb/README.md b/dataset/voxceleb/README.md new file mode 100644 index 00000000..3efb3519 --- /dev/null +++ b/dataset/voxceleb/README.md @@ -0,0 +1,10 @@ +# [VoxCeleb](http://www.robots.ox.ac.uk/~vgg/data/voxceleb/) +VoxCeleb is an audio-visual dataset consisting of short clips of human speech, extracted from interview videos uploaded to YouTube。 + +VoxCeleb contains speech from speakers spanning a wide range of different ethnicities, accents, professions and ages. +All speaking face-tracks are captured "in the wild", with background chatter, laughter, overlapping speech, pose variation and different lighting conditions. +VoxCeleb consists of both audio and video. Each segment is at least 3 seconds long. + +The dataset consists of two versions, VoxCeleb1 and VoxCeleb2. Each version has it's own train/test split. For each we provide YouTube URLs, face detections and tracks, audio files, cropped face videos and speaker meta-data. There is no overlap between the two versions. + +more info in details refers to http://www.robots.ox.ac.uk/~vgg/data/voxceleb/ diff --git a/dataset/voxceleb/voxceleb1.py b/dataset/voxceleb/voxceleb1.py new file mode 100644 index 00000000..2c6dff22 --- /dev/null +++ b/dataset/voxceleb/voxceleb1.py @@ -0,0 +1,173 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Prepare VoxCeleb1 dataset + +create manifest files. +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +of each audio file in the data set. + +researchers should download the voxceleb1 dataset yourselves +through google form to get the username & password and unpack the data +""" +import argparse +import codecs +import glob +import json +import os +import subprocess +from pathlib import Path + +import soundfile + +from utils.utility import check_md5sum +from utils.utility import download +from utils.utility import unzip + +# all the data will be download in the current data/voxceleb directory default +DATA_HOME = os.path.expanduser('.') + +# if you use the http://www.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/ as the download base url +# you need to get the username & password via the google form + +# if you use the https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a as the download base url, +# you need use --no-check-certificate to connect the target download url + +BASE_URL = "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a" +DATA_LIST = { + "vox1_dev_wav_partaa": "e395d020928bc15670b570a21695ed96", + "vox1_dev_wav_partab": "bbfaaccefab65d82b21903e81a8a8020", + "vox1_dev_wav_partac": "017d579a2a96a077f40042ec33e51512", + "vox1_dev_wav_partad": "7bb1e9f70fddc7a678fa998ea8b3ba19", + "vox1_test_wav.zip": "185fdc63c3c739954633d50379a3d102", +} + +TARGET_DATA = "vox1_dev_wav_parta* vox1_dev_wav.zip ae63e55b951748cc486645f532ba230b" + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "--target_dir", + default=DATA_HOME + "/voxceleb1/", + type=str, + help="Directory to save the voxceleb1 dataset. (default: %(default)s)") +parser.add_argument( + "--manifest_prefix", + default="manifest", + type=str, + help="Filepath prefix for output manifests. (default: %(default)s)") + +args = parser.parse_args() + + +def create_manifest(data_dir, manifest_path_prefix): + print("Creating manifest %s ..." % manifest_path_prefix) + json_lines = [] + data_path = os.path.join(data_dir, "wav", "**", "*.wav") + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + speakers = set() + for audio_path in glob.glob(data_path, recursive=True): + audio_id = "/".join(audio_path.split("/")[-3:]) + utt2spk = audio_path.split("/")[-3] + duration = soundfile.info(audio_path).duration + text = "" + json_lines.append( + json.dumps( + { + "utt": audio_id, + "utt2spk": str(utt2spk), + "feat": audio_path, + "feat_shape": (duration, ), + "text": text # compatible with asr data format + }, + ensure_ascii=False)) + + total_sec += duration + total_text += len(text) + total_num += 1 + speakers.add(utt2spk) + + with codecs.open(manifest_path_prefix, 'w', encoding='utf-8') as f: + for line in json_lines: + f.write(line + "\n") + + manifest_dir = os.path.dirname(manifest_path_prefix) + # data_dir_name refer to voxceleb1, which is used to distingush the voxceleb2 dataset info + data_dir_name = Path(data_dir).name + meta_path = os.path.join(manifest_dir, data_dir_name) + ".meta" + with codecs.open(meta_path, 'w', encoding='utf-8') as f: + print(f"{total_num} utts", file=f) + print(f"{len(speakers)} speakers", file=f) + print(f"{total_sec / (60 * 60)} h", file=f) + print(f"{total_text} text", file=f) + print(f"{total_text / total_sec} text/sec", file=f) + print(f"{total_sec / total_num} sec/utt", file=f) + + +def prepare_dataset(base_url, data_list, target_dir, manifest_path, + target_data): + data_dir = os.path.join(target_dir, "voxceleb1") + if not os.path.exists(target_dir): + os.mkdir(target_dir) + + # wav directory already exists, it need do nothing + if not os.path.exists(os.path.join(target_dir, "wav")): + # download all dataset part + for zip_part in data_list.keys(): + download_url = base_url + "/" + zip_part + " --no-check-certificate " + download( + url=download_url, + md5sum=data_list[zip_part], + target_dir=target_dir) + + # pack the all part to target zip file + all_target_part, target_name, target_md5sum = target_data.split() + target_name = os.path.join(target_dir, target_name) + if not os.path.exists(target_name): + pack_part_cmd = "cat {}/{} > {}/{}".format( + target_dir, all_target_part, target_dir, target_name) + subprocess.call(pack_part_cmd, shell=True) + + # check the target zip file md5sum + if not check_md5sum(target_name, target_md5sum): + raise RuntimeError("{} MD5 checkssum failed".format(target_name)) + else: + print("Check {} md5sum successfully".format(target_name)) + + # unzip the all zip file + unzip(target_name, target_dir) + unzip(os.path.join(target_dir, "vox1_test_wav.zip"), target_dir) + + # create the manifest file + create_manifest( + data_dir=args.target_dir, manifest_path_prefix=args.manifest_prefix) + + +def main(): + if args.target_dir.startswith('~'): + args.target_dir = os.path.expanduser(args.target_dir) + + prepare_dataset( + base_url=BASE_URL, + data_list=DATA_LIST, + target_dir=args.target_dir, + manifest_path=args.manifest_prefix, + target_data=TARGET_DATA) + + print("Manifest prepare done!") + + +if __name__ == '__main__': + main() From 8891621e2ca8a44635c8936700c8ac10d3f7c117 Mon Sep 17 00:00:00 2001 From: Honei <915574300@qq.com> Date: Fri, 28 Jan 2022 16:34:48 +0800 Subject: [PATCH 10/22] [vector]voxceleb1 data prepare: compatible kaldi trial and data split dir (#1412) * compatible kaldi trial and data split dir * remove unuseless code --- dataset/voxceleb/voxceleb1.py | 59 ++++++++++++++++++++++------------- 1 file changed, 37 insertions(+), 22 deletions(-) diff --git a/dataset/voxceleb/voxceleb1.py b/dataset/voxceleb/voxceleb1.py index 2c6dff22..ce744751 100644 --- a/dataset/voxceleb/voxceleb1.py +++ b/dataset/voxceleb/voxceleb1.py @@ -45,15 +45,26 @@ DATA_HOME = os.path.expanduser('.') # you need use --no-check-certificate to connect the target download url BASE_URL = "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a" -DATA_LIST = { + +# dev data +DEV_LIST = { "vox1_dev_wav_partaa": "e395d020928bc15670b570a21695ed96", "vox1_dev_wav_partab": "bbfaaccefab65d82b21903e81a8a8020", "vox1_dev_wav_partac": "017d579a2a96a077f40042ec33e51512", "vox1_dev_wav_partad": "7bb1e9f70fddc7a678fa998ea8b3ba19", - "vox1_test_wav.zip": "185fdc63c3c739954633d50379a3d102", } +DEV_TARGET_DATA = "vox1_dev_wav_parta* vox1_dev_wav.zip ae63e55b951748cc486645f532ba230b" + +# test data +TEST_LIST = {"vox1_test_wav.zip": "185fdc63c3c739954633d50379a3d102"} +TEST_TARGET_DATA = "vox1_test_wav.zip vox1_test_wav.zip 185fdc63c3c739954633d50379a3d102" -TARGET_DATA = "vox1_dev_wav_parta* vox1_dev_wav.zip ae63e55b951748cc486645f532ba230b" +# kaldi trial +# this trial file is organized by kaldi according the official file, +# which is a little different with the official trial veri_test2.txt +KALDI_BASE_URL = "http://www.openslr.org/resources/49/" +TRIAL_LIST = {"voxceleb1_test_v2.txt": "29fc7cc1c5d59f0816dc15d6e8be60f7"} +TRIAL_TARGET_DATA = "voxceleb1_test_v2.txt voxceleb1_test_v2.txt 29fc7cc1c5d59f0816dc15d6e8be60f7" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( @@ -69,7 +80,6 @@ parser.add_argument( args = parser.parse_args() - def create_manifest(data_dir, manifest_path_prefix): print("Creating manifest %s ..." % manifest_path_prefix) json_lines = [] @@ -79,7 +89,7 @@ def create_manifest(data_dir, manifest_path_prefix): total_num = 0 speakers = set() for audio_path in glob.glob(data_path, recursive=True): - audio_id = "/".join(audio_path.split("/")[-3:]) + audio_id = "-".join(audio_path.split("/")[-3:]) utt2spk = audio_path.split("/")[-3] duration = soundfile.info(audio_path).duration text = "" @@ -99,14 +109,17 @@ def create_manifest(data_dir, manifest_path_prefix): total_num += 1 speakers.add(utt2spk) + # data_dir_name refer to dev or test + # voxceleb1 is given explicit in the path + data_dir_name = Path(data_dir).name + manifest_path_prefix = manifest_path_prefix + "." + data_dir_name with codecs.open(manifest_path_prefix, 'w', encoding='utf-8') as f: for line in json_lines: f.write(line + "\n") manifest_dir = os.path.dirname(manifest_path_prefix) - # data_dir_name refer to voxceleb1, which is used to distingush the voxceleb2 dataset info - data_dir_name = Path(data_dir).name - meta_path = os.path.join(manifest_dir, data_dir_name) + ".meta" + meta_path = os.path.join(manifest_dir, "voxceleb1." + + data_dir_name) + ".meta" with codecs.open(meta_path, 'w', encoding='utf-8') as f: print(f"{total_num} utts", file=f) print(f"{len(speakers)} speakers", file=f) @@ -115,10 +128,8 @@ def create_manifest(data_dir, manifest_path_prefix): print(f"{total_text / total_sec} text/sec", file=f) print(f"{total_sec / total_num} sec/utt", file=f) - def prepare_dataset(base_url, data_list, target_dir, manifest_path, target_data): - data_dir = os.path.join(target_dir, "voxceleb1") if not os.path.exists(target_dir): os.mkdir(target_dir) @@ -126,7 +137,7 @@ def prepare_dataset(base_url, data_list, target_dir, manifest_path, if not os.path.exists(os.path.join(target_dir, "wav")): # download all dataset part for zip_part in data_list.keys(): - download_url = base_url + "/" + zip_part + " --no-check-certificate " + download_url = " --no-check-certificate " + base_url + "/" + zip_part download( url=download_url, md5sum=data_list[zip_part], @@ -136,8 +147,8 @@ def prepare_dataset(base_url, data_list, target_dir, manifest_path, all_target_part, target_name, target_md5sum = target_data.split() target_name = os.path.join(target_dir, target_name) if not os.path.exists(target_name): - pack_part_cmd = "cat {}/{} > {}/{}".format( - target_dir, all_target_part, target_dir, target_name) + pack_part_cmd = "cat {}/{} > {}".format(target_dir, all_target_part, + target_name) subprocess.call(pack_part_cmd, shell=True) # check the target zip file md5sum @@ -147,13 +158,11 @@ def prepare_dataset(base_url, data_list, target_dir, manifest_path, print("Check {} md5sum successfully".format(target_name)) # unzip the all zip file - unzip(target_name, target_dir) - unzip(os.path.join(target_dir, "vox1_test_wav.zip"), target_dir) + if target_name.endswith(".zip"): + unzip(target_name, target_dir) # create the manifest file - create_manifest( - data_dir=args.target_dir, manifest_path_prefix=args.manifest_prefix) - + create_manifest(data_dir=target_dir, manifest_path_prefix=manifest_path) def main(): if args.target_dir.startswith('~'): @@ -161,13 +170,19 @@ def main(): prepare_dataset( base_url=BASE_URL, - data_list=DATA_LIST, - target_dir=args.target_dir, + data_list=DEV_LIST, + target_dir=os.path.join(args.target_dir, "dev"), manifest_path=args.manifest_prefix, - target_data=TARGET_DATA) + target_data=DEV_TARGET_DATA) - print("Manifest prepare done!") + prepare_dataset( + base_url=BASE_URL, + data_list=TEST_LIST, + target_dir=os.path.join(args.target_dir, "test"), + manifest_path=args.manifest_prefix, + target_data=TEST_TARGET_DATA) + print("Manifest prepare done!") if __name__ == '__main__': main() From 89e69ee10ee02b875af663146bc46fcf095e812a Mon Sep 17 00:00:00 2001 From: TianYuan Date: Sat, 29 Jan 2022 10:09:36 +0800 Subject: [PATCH 11/22] [TTS]fix tacotron2 dygraph to static (#1414) * fix tacotron2 dygraph to static , test=tts * fix tacotron2 dygraph to static , test=tts * simplify synthesize_e2e.py , test=tts --- examples/csmsc/tts0/local/inference.sh | 51 +++++++++ examples/csmsc/tts0/local/synthesize_e2e.sh | 5 +- paddlespeech/t2s/exps/inference.py | 2 +- paddlespeech/t2s/exps/synthesize_e2e.py | 16 +-- .../t2s/models/new_tacotron2/tacotron2.py | 1 + .../t2s/modules/tacotron2/attentions.py | 16 +-- paddlespeech/t2s/modules/tacotron2/decoder.py | 104 ++++++++++-------- paddlespeech/t2s/modules/tacotron2/encoder.py | 11 +- 8 files changed, 132 insertions(+), 74 deletions(-) create mode 100755 examples/csmsc/tts0/local/inference.sh diff --git a/examples/csmsc/tts0/local/inference.sh b/examples/csmsc/tts0/local/inference.sh new file mode 100755 index 00000000..e417d748 --- /dev/null +++ b/examples/csmsc/tts0/local/inference.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +train_output_path=$1 + +stage=0 +stop_stage=0 + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + python3 ${BIN_DIR}/../inference.py \ + --inference_dir=${train_output_path}/inference \ + --am=tacotron2_csmsc \ + --voc=pwgan_csmsc \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=${train_output_path}/pd_infer_out \ + --phones_dict=dump/phone_id_map.txt +fi + +# for more GAN Vocoders +# multi band melgan +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + python3 ${BIN_DIR}/../inference.py \ + --inference_dir=${train_output_path}/inference \ + --am=tacotron2_csmsc \ + --voc=mb_melgan_csmsc \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=${train_output_path}/pd_infer_out \ + --phones_dict=dump/phone_id_map.txt +fi + +# style melgan +# style melgan's Dygraph to Static Graph is not ready now +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + python3 ${BIN_DIR}/../inference.py \ + --inference_dir=${train_output_path}/inference \ + --am=tacotron2_csmsc \ + --voc=style_melgan_csmsc \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=${train_output_path}/pd_infer_out \ + --phones_dict=dump/phone_id_map.txt +fi + +# hifigan +if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + python3 ${BIN_DIR}/../inference.py \ + --inference_dir=${train_output_path}/inference \ + --am=tacotron2_csmsc \ + --voc=hifigan_csmsc \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=${train_output_path}/pd_infer_out \ + --phones_dict=dump/phone_id_map.txt +fi \ No newline at end of file diff --git a/examples/csmsc/tts0/local/synthesize_e2e.sh b/examples/csmsc/tts0/local/synthesize_e2e.sh index fe5d11d4..c957df87 100755 --- a/examples/csmsc/tts0/local/synthesize_e2e.sh +++ b/examples/csmsc/tts0/local/synthesize_e2e.sh @@ -22,8 +22,9 @@ if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then --lang=zh \ --text=${BIN_DIR}/../sentences.txt \ --output_dir=${train_output_path}/test_e2e \ - --inference_dir=${train_output_path}/inference \ - --phones_dict=dump/phone_id_map.txt + --phones_dict=dump/phone_id_map.txt \ + --inference_dir=${train_output_path}/inference + fi # for more GAN Vocoders diff --git a/paddlespeech/t2s/exps/inference.py b/paddlespeech/t2s/exps/inference.py index 37afd0ab..c3510bea 100644 --- a/paddlespeech/t2s/exps/inference.py +++ b/paddlespeech/t2s/exps/inference.py @@ -33,7 +33,7 @@ def main(): default='fastspeech2_csmsc', choices=[ 'speedyspeech_csmsc', 'fastspeech2_csmsc', 'fastspeech2_aishell3', - 'fastspeech2_vctk' + 'fastspeech2_vctk', 'tacotron2_csmsc' ], help='Choose acoustic model type of tts task.') parser.add_argument( diff --git a/paddlespeech/t2s/exps/synthesize_e2e.py b/paddlespeech/t2s/exps/synthesize_e2e.py index 8ebfcfe7..8fca935a 100644 --- a/paddlespeech/t2s/exps/synthesize_e2e.py +++ b/paddlespeech/t2s/exps/synthesize_e2e.py @@ -178,10 +178,7 @@ def evaluate(args): am_inference = jit.to_static( am_inference, input_spec=[InputSpec([-1], dtype=paddle.int64)]) - paddle.jit.save(am_inference, - os.path.join(args.inference_dir, args.am)) - am_inference = paddle.jit.load( - os.path.join(args.inference_dir, args.am)) + elif am_name == 'speedyspeech': if am_dataset in {"aishell3", "vctk"} and args.speaker_dict: am_inference = jit.to_static( @@ -200,10 +197,13 @@ def evaluate(args): InputSpec([-1], dtype=paddle.int64) ]) - paddle.jit.save(am_inference, - os.path.join(args.inference_dir, args.am)) - am_inference = paddle.jit.load( - os.path.join(args.inference_dir, args.am)) + elif am_name == 'tacotron2': + am_inference = jit.to_static( + am_inference, input_spec=[InputSpec([-1], dtype=paddle.int64)]) + + paddle.jit.save(am_inference, os.path.join(args.inference_dir, args.am)) + am_inference = paddle.jit.load( + os.path.join(args.inference_dir, args.am)) # vocoder voc_inference = jit.to_static( diff --git a/paddlespeech/t2s/models/new_tacotron2/tacotron2.py b/paddlespeech/t2s/models/new_tacotron2/tacotron2.py index 6a6d1073..bd4129fb 100644 --- a/paddlespeech/t2s/models/new_tacotron2/tacotron2.py +++ b/paddlespeech/t2s/models/new_tacotron2/tacotron2.py @@ -432,6 +432,7 @@ class Tacotron2(nn.Layer): # inference h = self.enc.inference(x) + if self.spk_num is not None: sid_emb = self.sid_emb(spk_id.reshape([-1])) h = h + sid_emb diff --git a/paddlespeech/t2s/modules/tacotron2/attentions.py b/paddlespeech/t2s/modules/tacotron2/attentions.py index 710e326d..af7a94f3 100644 --- a/paddlespeech/t2s/modules/tacotron2/attentions.py +++ b/paddlespeech/t2s/modules/tacotron2/attentions.py @@ -157,7 +157,7 @@ class AttLoc(nn.Layer): paddle.Tensor previous attention weights (B, T_max) """ - batch = len(enc_hs_pad) + batch = paddle.shape(enc_hs_pad)[0] # pre-compute all h outside the decoder loop if self.pre_compute_enc_h is None or self.han_mode: # (utt, frame, hdim) @@ -172,33 +172,30 @@ class AttLoc(nn.Layer): dec_z = dec_z.reshape([batch, self.dunits]) # initialize attention weight with uniform dist. - if att_prev is None: + if paddle.sum(att_prev) == 0: # if no bias, 0 0-pad goes 0 - att_prev = 1.0 - make_pad_mask(enc_hs_len) att_prev = att_prev / enc_hs_len.unsqueeze(-1) # att_prev: (utt, frame) -> (utt, 1, 1, frame) # -> (utt, att_conv_chans, 1, frame) - att_conv = self.loc_conv(att_prev.reshape([batch, 1, 1, self.h_length])) # att_conv: (utt, att_conv_chans, 1, frame) -> (utt, frame, att_conv_chans) att_conv = att_conv.squeeze(2).transpose([0, 2, 1]) # att_conv: (utt, frame, att_conv_chans) -> (utt, frame, att_dim) att_conv = self.mlp_att(att_conv) - - # dec_z_tiled: (utt, frame, att_dim) + # dec_z_tiled: (utt, frame, att_dim) dec_z_tiled = self.mlp_dec(dec_z).reshape([batch, 1, self.att_dim]) # dot with gvec # (utt, frame, att_dim) -> (utt, frame) - e = self.gvec( - paddle.tanh(att_conv + self.pre_compute_enc_h + - dec_z_tiled)).squeeze(2) + e = paddle.tanh(att_conv + self.pre_compute_enc_h + dec_z_tiled) + e = self.gvec(e).squeeze(2) # NOTE: consider zero padding when compute w. if self.mask is None: self.mask = make_pad_mask(enc_hs_len) + e = masked_fill(e, self.mask, -float("inf")) # apply monotonic attention constraint (mainly for TTS) if last_attended_idx is not None: @@ -211,7 +208,6 @@ class AttLoc(nn.Layer): # utt x hdim c = paddle.sum( self.enc_h * w.reshape([batch, self.h_length, 1]), axis=1) - return c, w diff --git a/paddlespeech/t2s/modules/tacotron2/decoder.py b/paddlespeech/t2s/modules/tacotron2/decoder.py index fc15adfd..3622fd7a 100644 --- a/paddlespeech/t2s/modules/tacotron2/decoder.py +++ b/paddlespeech/t2s/modules/tacotron2/decoder.py @@ -15,7 +15,6 @@ """Tacotron2 decoder related modules.""" import paddle import paddle.nn.functional as F -import six from paddle import nn from paddlespeech.t2s.modules.tacotron2.attentions import AttForwardTA @@ -59,7 +58,7 @@ class Prenet(nn.Layer): super().__init__() self.dropout_rate = dropout_rate self.prenet = nn.LayerList() - for layer in six.moves.range(n_layers): + for layer in range(n_layers): n_inputs = idim if layer == 0 else n_units self.prenet.append( nn.Sequential(nn.Linear(n_inputs, n_units), nn.ReLU())) @@ -78,7 +77,7 @@ class Prenet(nn.Layer): Batch of output tensors (B, ..., odim). """ - for i in six.moves.range(len(self.prenet)): + for i in range(len(self.prenet)): # F.dropout 引入了随机, tacotron2 的 dropout 是不能去掉的 x = F.dropout(self.prenet[i](x)) return x @@ -129,7 +128,7 @@ class Postnet(nn.Layer): """ super().__init__() self.postnet = nn.LayerList() - for layer in six.moves.range(n_layers - 1): + for layer in range(n_layers - 1): ichans = odim if layer == 0 else n_chans ochans = odim if layer == n_layers - 1 else n_chans if use_batch_norm: @@ -196,7 +195,7 @@ class Postnet(nn.Layer): Batch of padded output tensor. (B, odim, Tmax). """ - for i in six.moves.range(len(self.postnet)): + for i in range(len(self.postnet)): xs = self.postnet[i](xs) return xs @@ -360,7 +359,7 @@ class Decoder(nn.Layer): # define lstm network prenet_units = prenet_units if prenet_layers != 0 else odim self.lstm = nn.LayerList() - for layer in six.moves.range(dlayers): + for layer in range(dlayers): iunits = idim + prenet_units if layer == 0 else dunits lstm = nn.LSTMCell(iunits, dunits) if zoneout_rate > 0.0: @@ -437,47 +436,50 @@ class Decoder(nn.Layer): # initialize hidden states of decoder c_list = [self._zero_state(hs)] z_list = [self._zero_state(hs)] - for _ in six.moves.range(1, len(self.lstm)): - c_list += [self._zero_state(hs)] - z_list += [self._zero_state(hs)] + for _ in range(1, len(self.lstm)): + c_list.append(self._zero_state(hs)) + z_list.append(self._zero_state(hs)) prev_out = paddle.zeros([paddle.shape(hs)[0], self.odim]) # initialize attention - prev_att_w = None + prev_att_ws = [] + prev_att_w = paddle.zeros(paddle.shape(hlens)) + prev_att_ws.append(prev_att_w) self.att.reset() # loop for an output sequence outs, logits, att_ws = [], [], [] for y in ys.transpose([1, 0, 2]): if self.use_att_extra_inputs: - att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w, + att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_ws[-1], prev_out) else: - att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w) + att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_ws[-1]) prenet_out = self.prenet( prev_out) if self.prenet is not None else prev_out xs = paddle.concat([att_c, prenet_out], axis=1) # we only use the second output of LSTMCell in paddle _, next_hidden = self.lstm[0](xs, (z_list[0], c_list[0])) z_list[0], c_list[0] = next_hidden - for i in six.moves.range(1, len(self.lstm)): + for i in range(1, len(self.lstm)): # we only use the second output of LSTMCell in paddle _, next_hidden = self.lstm[i](z_list[i - 1], (z_list[i], c_list[i])) z_list[i], c_list[i] = next_hidden zcs = (paddle.concat([z_list[-1], att_c], axis=1) if self.use_concate else z_list[-1]) - outs += [ - self.feat_out(zcs).reshape([paddle.shape(hs)[0], self.odim, -1]) - ] - logits += [self.prob_out(zcs)] - att_ws += [att_w] + outs.append( + self.feat_out(zcs).reshape([paddle.shape(hs)[0], self.odim, -1 + ])) + logits.append(self.prob_out(zcs)) + att_ws.append(att_w) # teacher forcing prev_out = y - if self.cumulate_att_w and prev_att_w is not None: + if self.cumulate_att_w and paddle.sum(prev_att_w) != 0: prev_att_w = prev_att_w + att_w # Note: error when use += else: prev_att_w = att_w + prev_att_ws.append(prev_att_w) # (B, Lmax) logits = paddle.concat(logits, axis=1) # (B, odim, Lmax) @@ -552,6 +554,7 @@ class Decoder(nn.Layer): .. _`Deep Voice 3`: https://arxiv.org/abs/1710.07654 """ # setup + assert len(paddle.shape(h)) == 2 hs = h.unsqueeze(0) ilens = paddle.shape(h)[0] @@ -561,13 +564,16 @@ class Decoder(nn.Layer): # initialize hidden states of decoder c_list = [self._zero_state(hs)] z_list = [self._zero_state(hs)] - for _ in six.moves.range(1, len(self.lstm)): - c_list += [self._zero_state(hs)] - z_list += [self._zero_state(hs)] + for _ in range(1, len(self.lstm)): + c_list.append(self._zero_state(hs)) + z_list.append(self._zero_state(hs)) prev_out = paddle.zeros([1, self.odim]) # initialize attention - prev_att_w = None + prev_att_ws = [] + prev_att_w = paddle.zeros([ilens]) + prev_att_ws.append(prev_att_w) + self.att.reset() # setup for attention constraint @@ -579,6 +585,7 @@ class Decoder(nn.Layer): # loop for an output sequence idx = 0 outs, att_ws, probs = [], [], [] + prob = paddle.zeros([1]) while True: # updated index idx += self.reduction_factor @@ -589,7 +596,7 @@ class Decoder(nn.Layer): hs, ilens, z_list[0], - prev_att_w, + prev_att_ws[-1], prev_out, last_attended_idx=last_attended_idx, backward_window=backward_window, @@ -599,19 +606,20 @@ class Decoder(nn.Layer): hs, ilens, z_list[0], - prev_att_w, + prev_att_ws[-1], last_attended_idx=last_attended_idx, backward_window=backward_window, forward_window=forward_window, ) - att_ws += [att_w] + att_ws.append(att_w) prenet_out = self.prenet( prev_out) if self.prenet is not None else prev_out xs = paddle.concat([att_c, prenet_out], axis=1) # we only use the second output of LSTMCell in paddle _, next_hidden = self.lstm[0](xs, (z_list[0], c_list[0])) + z_list[0], c_list[0] = next_hidden - for i in six.moves.range(1, len(self.lstm)): + for i in range(1, len(self.lstm)): # we only use the second output of LSTMCell in paddle _, next_hidden = self.lstm[i](z_list[i - 1], (z_list[i], c_list[i])) @@ -619,38 +627,38 @@ class Decoder(nn.Layer): zcs = (paddle.concat([z_list[-1], att_c], axis=1) if self.use_concate else z_list[-1]) # [(1, odim, r), ...] - outs += [self.feat_out(zcs).reshape([1, self.odim, -1])] + outs.append(self.feat_out(zcs).reshape([1, self.odim, -1])) + + prob = F.sigmoid(self.prob_out(zcs))[0] + probs.append(prob) - # [(r), ...] - probs += [F.sigmoid(self.prob_out(zcs))[0]] if self.output_activation_fn is not None: prev_out = self.output_activation_fn( outs[-1][:, :, -1]) # (1, odim) else: prev_out = outs[-1][:, :, -1] # (1, odim) - if self.cumulate_att_w and prev_att_w is not None: + if self.cumulate_att_w and paddle.sum(prev_att_w) != 0: prev_att_w = prev_att_w + att_w # Note: error when use += else: prev_att_w = att_w + prev_att_ws.append(prev_att_w) if use_att_constraint: last_attended_idx = int(att_w.argmax()) - # check whether to finish generation - if sum(paddle.cast(probs[-1] >= threshold, - 'int64')) > 0 or idx >= maxlen: + if prob >= threshold or idx >= maxlen: # check mininum length if idx < minlen: continue - # (1, odim, L) - outs = paddle.concat(outs, axis=2) - if self.postnet is not None: - # (1, odim, L) - outs = outs + self.postnet(outs) - # (L, odim) - outs = outs.transpose([0, 2, 1]).squeeze(0) - probs = paddle.concat(probs, axis=0) - att_ws = paddle.concat(att_ws, axis=0) break + # (1, odim, L) + outs = paddle.concat(outs, axis=2) + if self.postnet is not None: + # (1, odim, L) + outs = outs + self.postnet(outs) + # (L, odim) + outs = outs.transpose([0, 2, 1]).squeeze(0) + probs = paddle.concat(probs, axis=0) + att_ws = paddle.concat(att_ws, axis=0) if self.output_activation_fn is not None: outs = self.output_activation_fn(outs) @@ -685,9 +693,9 @@ class Decoder(nn.Layer): # initialize hidden states of decoder c_list = [self._zero_state(hs)] z_list = [self._zero_state(hs)] - for _ in six.moves.range(1, len(self.lstm)): - c_list += [self._zero_state(hs)] - z_list += [self._zero_state(hs)] + for _ in range(1, len(self.lstm)): + c_list.append(self._zero_state(hs)) + z_list.append(self._zero_state(hs)) prev_out = paddle.zeros([paddle.shape(hs)[0], self.odim]) # initialize attention @@ -702,14 +710,14 @@ class Decoder(nn.Layer): prev_out) else: att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w) - att_ws += [att_w] + att_ws.append(att_w) prenet_out = self.prenet( prev_out) if self.prenet is not None else prev_out xs = paddle.concat([att_c, prenet_out], axis=1) # we only use the second output of LSTMCell in paddle _, next_hidden = self.lstm[0](xs, (z_list[0], c_list[0])) z_list[0], c_list[0] = next_hidden - for i in six.moves.range(1, len(self.lstm)): + for i in range(1, len(self.lstm)): z_list[i], c_list[i] = self.lstm[i](z_list[i - 1], (z_list[i], c_list[i])) # teacher forcing diff --git a/paddlespeech/t2s/modules/tacotron2/encoder.py b/paddlespeech/t2s/modules/tacotron2/encoder.py index b2ed30d1..80c213a1 100644 --- a/paddlespeech/t2s/modules/tacotron2/encoder.py +++ b/paddlespeech/t2s/modules/tacotron2/encoder.py @@ -14,7 +14,6 @@ # Modified from espnet(https://github.com/espnet/espnet) """Tacotron2 encoder related modules.""" import paddle -import six from paddle import nn @@ -88,7 +87,7 @@ class Encoder(nn.Layer): if econv_layers > 0: self.convs = nn.LayerList() - for layer in six.moves.range(econv_layers): + for layer in range(econv_layers): ichans = (embed_dim if layer == 0 and input_layer == "embed" else econv_chans) if use_batch_norm: @@ -130,6 +129,7 @@ class Encoder(nn.Layer): direction='bidirectional', bias_ih_attr=True, bias_hh_attr=True) + self.blstm.flatten_parameters() else: self.blstm = None @@ -157,7 +157,7 @@ class Encoder(nn.Layer): """ xs = self.embed(xs).transpose([0, 2, 1]) if self.convs is not None: - for i in six.moves.range(len(self.convs)): + for i in range(len(self.convs)): if self.use_residual: xs += self.convs[i](xs) else: @@ -167,7 +167,8 @@ class Encoder(nn.Layer): if not isinstance(ilens, paddle.Tensor): ilens = paddle.to_tensor(ilens) xs = xs.transpose([0, 2, 1]) - self.blstm.flatten_parameters() + # for dygraph to static graph + # self.blstm.flatten_parameters() # (B, Tmax, C) # see https://www.paddlepaddle.org.cn/documentation/docs/zh/faq/train_cn.html#paddletorch-nn-utils-rnn-pack-padded-sequencetorch-nn-utils-rnn-pad-packed-sequenceapi xs, _ = self.blstm(xs, sequence_length=ilens) @@ -191,6 +192,6 @@ class Encoder(nn.Layer): """ xs = x.unsqueeze(0) - ilens = paddle.to_tensor([x.shape[0]]) + ilens = paddle.shape(x)[0] return self.forward(xs, ilens)[0][0] From 1b0c034134005adbe2f3754dc8b301ca044d6613 Mon Sep 17 00:00:00 2001 From: TianYuan Date: Sat, 29 Jan 2022 03:32:08 +0000 Subject: [PATCH 12/22] update wavernn, test=tts --- examples/csmsc/voc6/conf/default.yaml | 11 ++-- paddlespeech/t2s/audio/__init__.py | 1 + paddlespeech/t2s/audio/codec.py | 51 +++++++++++++++++++ paddlespeech/t2s/datasets/vocoder_batch_fn.py | 35 +++---------- paddlespeech/t2s/models/wavernn/wavernn.py | 14 ++--- 5 files changed, 72 insertions(+), 40 deletions(-) create mode 100644 paddlespeech/t2s/audio/codec.py diff --git a/examples/csmsc/voc6/conf/default.yaml b/examples/csmsc/voc6/conf/default.yaml index 2c838fb9..e7696cf4 100644 --- a/examples/csmsc/voc6/conf/default.yaml +++ b/examples/csmsc/voc6/conf/default.yaml @@ -12,7 +12,6 @@ n_mels: 80 # Number of mel basis. fmin: 80 # Minimum freq in mel basis calculation. (Hz) fmax: 7600 # Maximum frequency in mel basis calculation. (Hz) mu_law: True # Recommended to suppress noise if using raw bitsexit() -peak_norm: True ########################################################### @@ -22,13 +21,14 @@ model: rnn_dims: 512 # Hidden dims of RNN Layers. fc_dims: 512 bits: 9 # Bit depth of signal - aux_context_window: 2 + aux_context_window: 2 # Context window size for auxiliary feature. + # If set to 2, previous 2 and future 2 frames will be considered. aux_channels: 80 # Number of channels for auxiliary feature conv. # Must be the same as num_mels. upsample_scales: [4, 5, 3, 5] # Upsampling scales. Prodcut of these must be the same as hop size, same with pwgan here - compute_dims: 128 - res_out_dims: 128 - res_blocks: 10 + compute_dims: 128 # Dims of Conv1D in MelResNet. + res_out_dims: 128 # Dims of output in MelResNet. + res_blocks: 10 # Number of residual blocks. mode: RAW # either 'raw'(softmax on raw bits) or 'mold' (sample from mixture of logistics) inference: gen_batched: True # whether to genenate sample in batch mode @@ -42,7 +42,6 @@ inference: batch_size: 64 # Batch size. batch_max_steps: 4500 # Length of each audio in batch. Make sure dividable by hop_size. num_workers: 2 # Number of workers in DataLoader. -valid_size: 50 ########################################################### # OPTIMIZER SETTING # diff --git a/paddlespeech/t2s/audio/__init__.py b/paddlespeech/t2s/audio/__init__.py index 7747b794..0deefc8b 100644 --- a/paddlespeech/t2s/audio/__init__.py +++ b/paddlespeech/t2s/audio/__init__.py @@ -12,5 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. from .audio import AudioProcessor +from .codec import * from .spec_normalizer import LogMagnitude from .spec_normalizer import NormalizerBase diff --git a/paddlespeech/t2s/audio/codec.py b/paddlespeech/t2s/audio/codec.py new file mode 100644 index 00000000..2a759ce4 --- /dev/null +++ b/paddlespeech/t2s/audio/codec.py @@ -0,0 +1,51 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import math + +import numpy as np +import paddle + + +# x: [0: 2**bit-1], return: [-1, 1] +def label_2_float(x, bits): + return 2 * x / (2**bits - 1.) - 1. + + +#x: [-1, 1], return: [0, 2**bits-1] +def float_2_label(x, bits): + assert abs(x).max() <= 1.0 + x = (x + 1.) * (2**bits - 1) / 2 + return x.clip(0, 2**bits - 1) + + +# y: [-1, 1], mu: 2**bits, return: [0, 2**bits-1] +# see https://en.wikipedia.org/wiki/%CE%9C-law_algorithm +# be careful the input `mu` here, which is +1 than that of the link above +def encode_mu_law(x, mu): + mu = mu - 1 + fx = np.sign(x) * np.log(1 + mu * np.abs(x)) / np.log(1 + mu) + return np.floor((fx + 1) / 2 * mu + 0.5) + + +# from_labels = True: +# y: [0: 2**bit-1], mu: 2**bits, return: [-1,1] +# from_labels = False: +# y: [-1, 1], return: [-1, 1] +def decode_mu_law(y, mu, from_labels=True): + # TODO: get rid of log2 - makes no sense + if from_labels: + y = label_2_float(y, math.log2(mu)) + mu = mu - 1 + x = paddle.sign(y) / mu * ((1 + mu)**paddle.abs(y) - 1) + return x diff --git a/paddlespeech/t2s/datasets/vocoder_batch_fn.py b/paddlespeech/t2s/datasets/vocoder_batch_fn.py index b1d22db9..d969a1d3 100644 --- a/paddlespeech/t2s/datasets/vocoder_batch_fn.py +++ b/paddlespeech/t2s/datasets/vocoder_batch_fn.py @@ -11,35 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import math - import numpy as np import paddle - -def label_2_float(x, bits): - return 2 * x / (2**bits - 1.) - 1. - - -def float_2_label(x, bits): - assert abs(x).max() <= 1.0 - x = (x + 1.) * (2**bits - 1) / 2 - return x.clip(0, 2**bits - 1) - - -def encode_mu_law(x, mu): - mu = mu - 1 - fx = np.sign(x) * np.log(1 + mu * np.abs(x)) / np.log(1 + mu) - return np.floor((fx + 1) / 2 * mu + 0.5) - - -def decode_mu_law(y, mu, from_labels=True): - # TODO: get rid of log2 - makes no sense - if from_labels: - y = label_2_float(y, math.log2(mu)) - mu = mu - 1 - x = paddle.sign(y) / mu * ((1 + mu)**paddle.abs(y) - 1) - return x +from paddlespeech.t2s.audio.codec import encode_mu_law +from paddlespeech.t2s.audio.codec import float_2_label +from paddlespeech.t2s.audio.codec import label_2_float class Clip(object): @@ -195,10 +172,12 @@ class WaveRNNClip(Clip): Returns ---------- Tensor - Auxiliary feature batch (B, C, T'), where - T = (T' - 2 * aux_context_window) * hop_size. + Input signal batch (B, 1, T). Tensor Target signal batch (B, 1, T). + Tensor + Auxiliary feature batch (B, C, T'), where + T = (T' - 2 * aux_context_window) * hop_size. """ # check length diff --git a/paddlespeech/t2s/models/wavernn/wavernn.py b/paddlespeech/t2s/models/wavernn/wavernn.py index f30879ed..fcf39a48 100644 --- a/paddlespeech/t2s/models/wavernn/wavernn.py +++ b/paddlespeech/t2s/models/wavernn/wavernn.py @@ -20,7 +20,7 @@ import paddle from paddle import nn from paddle.nn import functional as F -from paddlespeech.t2s.datasets.vocoder_batch_fn import decode_mu_law +from paddlespeech.t2s.audio.codec import decode_mu_law from paddlespeech.t2s.modules.losses import sample_from_discretized_mix_logistic from paddlespeech.t2s.modules.nets_utils import initialize from paddlespeech.t2s.modules.upsample import Stretch2D @@ -28,7 +28,7 @@ from paddlespeech.t2s.modules.upsample import Stretch2D class ResBlock(nn.Layer): def __init__(self, dims): - super(ResBlock, self).__init__() + super().__init__() self.conv1 = nn.Conv1D(dims, dims, kernel_size=1, bias_attr=False) self.conv2 = nn.Conv1D(dims, dims, kernel_size=1, bias_attr=False) self.batch_norm1 = nn.BatchNorm1D(dims) @@ -205,7 +205,7 @@ class WaveRNN(nn.Layer): if self.mode == 'RAW': self.n_classes = 2**bits elif self.mode == 'MOL': - self.n_classes = 30 + self.n_classes = 10 * 3 else: RuntimeError('Unknown model mode value - ', self.mode) @@ -333,7 +333,7 @@ class WaveRNN(nn.Layer): # (T, C_aux) -> (1, C_aux, T) c = paddle.transpose(c, [1, 0]).unsqueeze(0) T = paddle.shape(c)[-1] - wave_len = (T - 1) * self.hop_length + wave_len = T * self.hop_length # TODO remove two transpose op by modifying function pad_tensor c = self.pad_tensor( c.transpose([0, 2, 1]), pad=self.aux_context_window, @@ -396,6 +396,8 @@ class WaveRNN(nn.Layer): posterior = F.softmax(logits, axis=1) distrib = paddle.distribution.Categorical(posterior) # corresponding operate [np.floor((fx + 1) / 2 * mu + 0.5)] in enocde_mu_law + # distrib.sample([1])[0].cast('float32'): [0, 2**bits-1] + # sample: [-1, 1] sample = 2 * distrib.sample([1])[0].cast('float32') / ( self.n_classes - 1.) - 1. output.append(sample) @@ -418,9 +420,9 @@ class WaveRNN(nn.Layer): output = output[0] # Fade-out at the end to avoid signal cutting out suddenly - fade_out = paddle.linspace(1, 0, 20 * self.hop_length) + fade_out = paddle.linspace(1, 0, 10 * self.hop_length) output = output[:wave_len] - output[-20 * self.hop_length:] *= fade_out + output[-10 * self.hop_length:] *= fade_out self.train() From f428ec44319582a46241b3d8e0c7114ab6377890 Mon Sep 17 00:00:00 2001 From: huangyuxin Date: Sat, 5 Feb 2022 07:16:42 +0000 Subject: [PATCH 13/22] change log of cli/asr/infer --- paddlespeech/cli/asr/infer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/paddlespeech/cli/asr/infer.py b/paddlespeech/cli/asr/infer.py index 64b32520..6e14e0d6 100644 --- a/paddlespeech/cli/asr/infer.py +++ b/paddlespeech/cli/asr/infer.py @@ -185,8 +185,9 @@ class ASRExecutor(BaseExecutor): """ Download and returns pretrained resources path of current task. """ - assert tag in pretrained_models, 'Can not find pretrained resources of {}.'.format( - tag) + support_models = list(pretrained_models.keys()) + assert tag in pretrained_models, 'The model "{}" you want to use has not been supported, please choose other models.\nThe support models includes:\n\t\t{}\n'.format( + tag, '\n\t\t'.join(support_models)) res_path = os.path.join(MODEL_HOME, tag) decompressed_path = download_and_decompress(pretrained_models[tag], From 81264a108dce7d679c8ca34ff208303619fec2ec Mon Sep 17 00:00:00 2001 From: lizi <49679880@qq.com> Date: Tue, 8 Feb 2022 18:00:02 +0800 Subject: [PATCH 14/22] add Chinese doc for tts quick_start, test=doc --- docs/source/tts/quick_start_cn.md | 204 ++++++++++++++++++++++++++++++ 1 file changed, 204 insertions(+) create mode 100644 docs/source/tts/quick_start_cn.md diff --git a/docs/source/tts/quick_start_cn.md b/docs/source/tts/quick_start_cn.md new file mode 100644 index 00000000..7d473061 --- /dev/null +++ b/docs/source/tts/quick_start_cn.md @@ -0,0 +1,204 @@ +# 语音合成快速开始 +这些PaddleSpeech中的样例主要按数据集分类,我们主要使用的TTS数据集有: + +* CSMCS (普通话单发音人) +* AISHELL3 (普通话多发音人) +* LJSpeech (英文单发音人) +* VCTK (英文多发音人) + +PaddleSpeech的TTS模型具有以下映射关系: + +* tts0 - Tactron2 +* tts1 - TransformerTTS +* tts2 - SpeedySpeech +* tts3 - FastSpeech2 +* voc0 - WaveFlow +* voc1 - Parallel WaveGAN +* voc2 - MelGAN +* voc3 - MultiBand MelGAN +* voc4 - Style MelGAN +* voc5 - HiFiGAN +* vc0 - Tactron2 Voice Clone with GE2E +* vc1 - FastSpeech2 Voice Clone with GE2E + +## 快速开始 + +让我们以 FastSpeech2 + Parallel WaveGAN 和 CSMSC 数据集 为例. [examples/csmsc](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc) + +### 用CSMSC数据集训练Parallel WaveGAN + +- 进入目录 + ```bash + cd examples/csmsc/voc1 + ``` +- 设置环境变量 + ```bash + source path.sh + ``` + **在你开始做任何事情之前,必须先做这步** + 将 `MAIN_ROOT` 设置为项目目录. 使用 `parallelwave_gan` 模型作为 `MODEL`. + +- 运行 + ```bash + bash run.sh + ``` + 这只是一个演示,请确保源数据已经准备好,并且在下一个 `步骤` 之前每个 `步骤` 都运行正常. +### 用CSMSC数据集训练FastSpeech2 + +- 进入目录 + ```bash + cd examples/csmsc/tts3 + ``` + +- 设置环境变量 + ```bash + source path.sh + ``` + **在你开始做任何事情之前,必须先做这步** + 将 `MAIN_ROOT` 设置为项目目录. 使用 `fastspeech2` 模型作为 `MODEL`. + +- 运行 + ```bash + bash run.sh + ``` + 这只是一个演示,请确保源数据已经准备好,并且在下一个 `步骤` 之前每个 `步骤` 都运行正常. + +`run.sh` 中主要包括以下步骤: + +- 设置路径。 +- 预处理数据集, +- 训练模型。 +- 从`metadata.jsonl`中合成波形 +- 从文本文件合成波形。(在声学模型中) +- 使用静态模型进行推理。(可选) + +有关更多详细信息,请参见examples中的`README.md` + +## TTS流水线 +本节介绍如何使用TTS提供的预训练模型,并对其进行推理。 + +TTS中的预训练模型在压缩包中提供。将其解压缩以获得如下文件夹: +**Acoustic Models:** + +```text +checkpoint_name +├── default.yaml +├── snapshot_iter_*.pdz +├── speech_stats.npy +├── phone_id_map.txt +├── spk_id_map.txt (optimal) +└── tone_id_map.txt (optimal) +``` +**Vocoders:** +```text +checkpoint_name +├── default.yaml +├── snapshot_iter_*.pdz +└── stats.npy +``` +- `default.yaml` 存储用于训练模型的配置。 +- `snapshot_iter_*.pdz` 是检查点文件,其中`*`是它经过训练的步骤。 +- `*_stats.npy` 是特征的统计文件,如果它在训练前已被标准化。 +- `phone_id_map.txt` 是音素到音素ID的映射关系。 +- `tone_id_map.txt` 是在训练声学模型之前分割音调和拼音时,音调到音调ID的映射关系。(例如在csmsc/speedyspeech的示例中) +- `spk_id_map.txt` 是多speaker声学模型中speaker到spk_ids的映射关系。 + +下面的示例代码显示了如何使用模型进行预测。 +### Acoustic Models声学模型(文本到频谱图) +下面的代码显示了如何使用“FastSpeech2”模型。加载预训练模型后,使用它和normalizer对象构建预测对象,然后使用`fastspeech2_inferencet(phone_ids)`生成频谱图,频谱图可进一步用于使用声码器合成原始音频。 + +```python +from pathlib import Path +import numpy as np +import paddle +import yaml +from yacs.config import CfgNode +from paddlespeech.t2s.models.fastspeech2 import FastSpeech2 +from paddlespeech.t2s.models.fastspeech2 import FastSpeech2Inference +from paddlespeech.t2s.modules.normalizer import ZScore +# examples/fastspeech2/baker/frontend.py +from frontend import Frontend + +# 加载预训练模型 +checkpoint_dir = Path("fastspeech2_nosil_baker_ckpt_0.4") +with open(checkpoint_dir / "phone_id_map.txt", "r") as f: + phn_id = [line.strip().split() for line in f.readlines()] +vocab_size = len(phn_id) +with open(checkpoint_dir / "default.yaml") as f: + fastspeech2_config = CfgNode(yaml.safe_load(f)) +odim = fastspeech2_config.n_mels +model = FastSpeech2( + idim=vocab_size, odim=odim, **fastspeech2_config["model"]) +model.set_state_dict( + paddle.load(args.fastspeech2_checkpoint)["main_params"]) +model.eval() + +# 加载特征文件 +stat = np.load(checkpoint_dir / "speech_stats.npy") +mu, std = stat +mu = paddle.to_tensor(mu) +std = paddle.to_tensor(std) +fastspeech2_normalizer = ZScore(mu, std) + +# 构建预测对象 +fastspeech2_inference = FastSpeech2Inference(fastspeech2_normalizer, model) + +# load Chinese Frontend +frontend = Frontend(checkpoint_dir / "phone_id_map.txt") + +# 构建一个中文前端 +sentence = "你好吗?" +input_ids = frontend.get_input_ids(sentence, merge_sentences=True) +phone_ids = input_ids["phone_ids"] +flags = 0 +# 构建预测对象加载中文前端,对中文文本前端的输出进行分段 +for part_phone_ids in phone_ids: + with paddle.no_grad(): + temp_mel = fastspeech2_inference(part_phone_ids) + if flags == 0: + mel = temp_mel + flags = 1 + else: + mel = paddle.concat([mel, temp_mel]) +``` + +### Vcoder声码器(谱图到波形) +下面的代码显示了如何使用` Parallel WaveGAN` 模型。像上面的例子一样,加载预训练模型后,使用它和normalizer对象构建预测对象,然后使用 `pwg_inference(mel)`生成原始音频(wav格式)。 + +```python +from pathlib import Path +import numpy as np +import paddle +import soundfile as sf +import yaml +from yacs.config import CfgNode +from paddlespeech.t2s.models.parallel_wavegan import PWGGenerator +from paddlespeech.t2s.models.parallel_wavegan import PWGInference +from paddlespeech.t2s.modules.normalizer import ZScore + +# 加载预训练模型 +checkpoint_dir = Path("parallel_wavegan_baker_ckpt_0.4") +with open(checkpoint_dir / "pwg_default.yaml") as f: + pwg_config = CfgNode(yaml.safe_load(f)) +vocoder = PWGGenerator(**pwg_config["generator_params"]) +vocoder.set_state_dict(paddle.load(args.pwg_params)) +vocoder.remove_weight_norm() +vocoder.eval() + +# 加载特征文件 +stat = np.load(checkpoint_dir / "pwg_stats.npy") +mu, std = stat +mu = paddle.to_tensor(mu) +std = paddle.to_tensor(std) +pwg_normalizer = ZScore(mu, std) + +# 加载预训练模型构造预测对象 +pwg_inference = PWGInference(pwg_normalizer, vocoder) + +# 频谱图到波形 +wav = pwg_inference(mel) +sf.write( + audio_path, + wav.numpy(), + samplerate=fastspeech2_config.fs) +``` \ No newline at end of file From 06e8bdf0d7b1fc2491fbbe98fa895a77288e9ac9 Mon Sep 17 00:00:00 2001 From: lizi <49679880@qq.com> Date: Tue, 8 Feb 2022 18:05:24 +0800 Subject: [PATCH 15/22] add Chinese doc for "FastSpeech2 with CSMSC", test=doc --- examples/csmsc/tts3/README_cn.md | 274 +++++++++++++++++++++++++++++++ 1 file changed, 274 insertions(+) create mode 100644 examples/csmsc/tts3/README_cn.md diff --git a/examples/csmsc/tts3/README_cn.md b/examples/csmsc/tts3/README_cn.md new file mode 100644 index 00000000..9794dc28 --- /dev/null +++ b/examples/csmsc/tts3/README_cn.md @@ -0,0 +1,274 @@ +# 用CSMSC数据集训练FastSpeech2模型 +This example contains code used to train a [Fastspeech2](https://arxiv.org/abs/2006.04558) model with [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html). + +本用例包含用于训练 [Fastspeech2](https://arxiv.org/abs/2006.04558) 模型的代码,使用 [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html)数据集。 + +## 数据集 +### 下载并解压 +从 [官方网站](https://test.data-baker.com/data/index/source)下载数据集 + +### 获取MFA结果并解压 +我们使用 [MFA](https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner) 去获得 fastspeech2 的音素持续时间。 +你们可以从这里下载 [baker_alignment_tone.tar.gz](https://paddlespeech.bj.bcebos.com/MFA/BZNSYP/with_tone/baker_alignment_tone.tar.gz), 或参考 [mfa example](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/mfa) 训练你自己的模型of our repo. + +## 开始 +假设数据集的路径是 `~/datasets/BZNSYP`. +假设CSMSC的MFA结果路径为 `./baker_alignment_tone`. +运行下面的命令会进行如下操作: + +1. **设置原路径**。 +2. 对数据集进行预处理。 +3. 训练模型 +4. 合成波形 + - 从 `metadata.jsonl`合成波形。 + - 从文本文件合成波形。 +5. 使用静态模型进行推理。 +```bash +./run.sh +``` +您可以选择要运行的一系列阶段,或者将 `stage` 设置为 `stop-stage`以仅使用一个阶段,例如,运行以下命令只会预处理数据集。 +```bash +./run.sh --stage 0 --stop-stage 0 +``` +### 数据预处理 +```bash +./local/preprocess.sh ${conf_path} +``` +当它完成时。将在当前目录中创建`dump`文件夹。转储文件夹的结构如下所示。 + +```text +dump +├── dev +│ ├── norm +│ └── raw +├── phone_id_map.txt +├── speaker_id_map.txt +├── test +│ ├── norm +│ └── raw +└── train + ├── energy_stats.npy + ├── norm + ├── pitch_stats.npy + ├── raw + └── speech_stats.npy +``` +The dataset is split into 3 parts, namely `train`, `dev`, and` test`, each of which contains a `norm` and `raw` subfolder. The raw folder contains speech、pitch and energy features of each utterance, while the norm folder contains normalized ones. The statistics used to normalize features are computed from the training set, which is located in `dump/train/*_stats.npy`. + +数据集分为三个部分,即`train`、`dev`和`test`,每个部分都包含一个`norm`和`raw`子文件夹。原始文件夹包含每个话语的语音、音调和能量特征,而`norm`文件夹包含规范化的特征。用于规范化特征的统计数据是从`dump/train/*_stats.npy`中的训练集计算出来的。 + +此外,还有一个`metadata.jsonl` 在每个子文件夹中。它是一个类似表格的文件,包含音素、文本长度、语音长度、持续时间、语音特征路径、音调特征路径、能量特征路径、说话人和每个话语的id。 + +### 模型训练 +```bash +CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} +``` +`./local/train.sh` calls `${BIN_DIR}/train.py`. +以下是完整的帮助信息。 + +```text +usage: train.py [-h] [--config CONFIG] [--train-metadata TRAIN_METADATA] + [--dev-metadata DEV_METADATA] [--output-dir OUTPUT_DIR] + [--ngpu NGPU] [--phones-dict PHONES_DICT] + [--speaker-dict SPEAKER_DICT] [--voice-cloning VOICE_CLONING] + +Train a FastSpeech2 model. + +optional arguments: + -h, --help show this help message and exit + --config CONFIG fastspeech2 config file. + --train-metadata TRAIN_METADATA + training data. + --dev-metadata DEV_METADATA + dev data. + --output-dir OUTPUT_DIR + output dir. + --ngpu NGPU if ngpu=0, use cpu. + --phones-dict PHONES_DICT + phone vocabulary file. + --speaker-dict SPEAKER_DICT + speaker id map file for multiple speaker model. + --voice-cloning VOICE_CLONING + whether training voice cloning model. +``` +1. `--config` 是一个yaml格式的配置文件,用于覆盖默认配置,位于 `conf/default.yaml`. +2. `--train-metadata` 和 `--dev-metadata` 应为`dump`文件夹中`train`和`dev`下的规范化元数据文件 +3. `--output-dir` 是保存结果的目录。 检查点保存在此目录中的 `checkpoints/`目录下。 +4. `--ngpu` 要使用的GPU数,如果ngpu==0,则使用cpu。 +5. `--phones-dict` 是音素词汇表文件的路径。 + +### 合成 +我们使用 [parallel wavegan](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc/voc1) 作为神经声码器(vocoder)。 +从 [pwg_baker_ckpt_0.4.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/pwgan/pwg_baker_ckpt_0.4.zip) 下载预训练的parallel wavegan模型并将其解压。 + +```bash +unzip pwg_baker_ckpt_0.4.zip +``` +Parallel WaveGAN 检查点包含如下文件。 +```text +pwg_baker_ckpt_0.4 +├── pwg_default.yaml # 用于训练 parallel wavegan 的默认配置 +├── pwg_snapshot_iter_400000.pdz # parallel wavegan 的模型参数 +└── pwg_stats.npy # 训练平行波形时用于规范化谱图的统计数据 +``` +`./local/synthesize.sh` 调用 `${BIN_DIR}/../synthesize.py` 即可从 `metadata.jsonl`中合成波形。 + +```bash +CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} +``` +```text +usage: synthesize.py [-h] + [--am {speedyspeech_csmsc,fastspeech2_csmsc,fastspeech2_ljspeech,fastspeech2_aishell3,fastspeech2_vctk}] + [--am_config AM_CONFIG] [--am_ckpt AM_CKPT] + [--am_stat AM_STAT] [--phones_dict PHONES_DICT] + [--tones_dict TONES_DICT] [--speaker_dict SPEAKER_DICT] + [--voice-cloning VOICE_CLONING] + [--voc {pwgan_csmsc,pwgan_ljspeech,pwgan_aishell3,pwgan_vctk,mb_melgan_csmsc}] + [--voc_config VOC_CONFIG] [--voc_ckpt VOC_CKPT] + [--voc_stat VOC_STAT] [--ngpu NGPU] + [--test_metadata TEST_METADATA] [--output_dir OUTPUT_DIR] + +Synthesize with acoustic model & vocoder + +optional arguments: + -h, --help show this help message and exit + --am {speedyspeech_csmsc,fastspeech2_csmsc,fastspeech2_ljspeech,fastspeech2_aishell3,fastspeech2_vctk} + Choose acoustic model type of tts task. + --am_config AM_CONFIG + Config of acoustic model. Use deault config when it is + None. + --am_ckpt AM_CKPT Checkpoint file of acoustic model. + --am_stat AM_STAT mean and standard deviation used to normalize + spectrogram when training acoustic model. + --phones_dict PHONES_DICT + phone vocabulary file. + --tones_dict TONES_DICT + tone vocabulary file. + --speaker_dict SPEAKER_DICT + speaker id map file. + --voice-cloning VOICE_CLONING + whether training voice cloning model. + --voc {pwgan_csmsc,pwgan_ljspeech,pwgan_aishell3,pwgan_vctk,mb_melgan_csmsc} + Choose vocoder type of tts task. + --voc_config VOC_CONFIG + Config of voc. Use deault config when it is None. + --voc_ckpt VOC_CKPT Checkpoint file of voc. + --voc_stat VOC_STAT mean and standard deviation used to normalize + spectrogram when training voc. + --ngpu NGPU if ngpu == 0, use cpu. + --test_metadata TEST_METADATA + test metadata. + --output_dir OUTPUT_DIR + output dir. +``` +`./local/synthesize_e2e.sh` 调用 `${BIN_DIR}/../synthesize_e2e.py`,即可从文本文件中合成波形。 + +```bash +CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize_e2e.sh ${conf_path} ${train_output_path} ${ckpt_name} +``` +```text +usage: synthesize_e2e.py [-h] + [--am {speedyspeech_csmsc,fastspeech2_csmsc,fastspeech2_ljspeech,fastspeech2_aishell3,fastspeech2_vctk}] + [--am_config AM_CONFIG] [--am_ckpt AM_CKPT] + [--am_stat AM_STAT] [--phones_dict PHONES_DICT] + [--tones_dict TONES_DICT] + [--speaker_dict SPEAKER_DICT] [--spk_id SPK_ID] + [--voc {pwgan_csmsc,pwgan_ljspeech,pwgan_aishell3,pwgan_vctk,mb_melgan_csmsc}] + [--voc_config VOC_CONFIG] [--voc_ckpt VOC_CKPT] + [--voc_stat VOC_STAT] [--lang LANG] + [--inference_dir INFERENCE_DIR] [--ngpu NGPU] + [--text TEXT] [--output_dir OUTPUT_DIR] + +Synthesize with acoustic model & vocoder + +optional arguments: + -h, --help show this help message and exit + --am {speedyspeech_csmsc,fastspeech2_csmsc,fastspeech2_ljspeech,fastspeech2_aishell3,fastspeech2_vctk} + Choose acoustic model type of tts task. + --am_config AM_CONFIG + Config of acoustic model. Use deault config when it is + None. + --am_ckpt AM_CKPT Checkpoint file of acoustic model. + --am_stat AM_STAT mean and standard deviation used to normalize + spectrogram when training acoustic model. + --phones_dict PHONES_DICT + phone vocabulary file. + --tones_dict TONES_DICT + tone vocabulary file. + --speaker_dict SPEAKER_DICT + speaker id map file. + --spk_id SPK_ID spk id for multi speaker acoustic model + --voc {pwgan_csmsc,pwgan_ljspeech,pwgan_aishell3,pwgan_vctk,mb_melgan_csmsc} + Choose vocoder type of tts task. + --voc_config VOC_CONFIG + Config of voc. Use deault config when it is None. + --voc_ckpt VOC_CKPT Checkpoint file of voc. + --voc_stat VOC_STAT mean and standard deviation used to normalize + spectrogram when training voc. + --lang LANG Choose model language. zh or en + --inference_dir INFERENCE_DIR + dir to save inference models + --ngpu NGPU if ngpu == 0, use cpu. + --text TEXT text to synthesize, a 'utt_id sentence' pair per line. + --output_dir OUTPUT_DIR + output dir. +``` +1. `--am` 声学模型格式是否符合 {model_name}_{dataset} +2. `--am_config`, `--am_checkpoint`, `--am_stat` 和 `--phones_dict` 是声学模型的参数,对应于fastspeech2预训练模型中的4个文件。 +3. `--voc` 声码器(vocoder)格式是否符合 {model_name}_{dataset} +4. `--voc_config`, `--voc_checkpoint`, `--voc_stat` 是声码器的参数,对应于parallel wavegan预训练模型中的3个文件。 +5. `--lang` 对应模型的语言可以是 `zh` 或 `en`. +6. `--test_metadata` 应为`dump`文件夹中`test`下的规范化元数据文件、 +7. `--text` 是文本文件,其中包含要合成的句子。 +8. `--output_dir` 是保存合成音频文件的目录。 +9. `--ngpu` 要使用的GPU数,如果ngpu==0,则使用cpu。 + +### 推理 +在合成之后,我们将在`${train_output_path}/inference`中得到fastspeech2和pwgan的静态模型 +`./local/inference.sh` 调用 `${BIN_DIR}/inference.py`为fastspeech2+pwgan综合提供了一个paddle静态模型推理示例。 + +```bash +CUDA_VISIBLE_DEVICES=${gpus} ./local/inference.sh ${train_output_path} +``` + +## 预训练模型 +预先训练的FastSpeech2模型,在音频边缘没有空白音频: +- [fastspeech2_nosil_baker_ckpt_0.4.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_nosil_baker_ckpt_0.4.zip) +- [fastspeech2_conformer_baker_ckpt_0.5.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_conformer_baker_ckpt_0.5.zip) + +静态模型可以在这里下载 [fastspeech2_nosil_baker_static_0.4.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_nosil_baker_static_0.4.zip). + +Model | Step | eval/loss | eval/l1_loss | eval/duration_loss | eval/pitch_loss| eval/energy_loss +:-------------:| :------------:| :-----: | :-----: | :--------: |:--------:|:---------: +default| 2(gpu) x 76000|1.0991|0.59132|0.035815|0.31915|0.15287| +conformer| 2(gpu) x 76000|1.0675|0.56103|0.035869|0.31553|0.15509| + +FastSpeech2检查点包含下列文件。 +```text +fastspeech2_nosil_baker_ckpt_0.4 +├── default.yaml # 用于训练fastspeech2的默认配置 +├── phone_id_map.txt # 训练fastspeech2时的音素词汇文件 +├── snapshot_iter_76000.pdz # 模型参数和优化器状态 +└── speech_stats.npy # 训练fastspeech2时用于规范化频谱图的统计数据 +``` +您可以使用以下脚本通过使用预训练的fastspeech2和并行wavegan模型为 `${BIN_DIR}/../sentences.txt` 合成句子 +```bash +source path.sh + +FLAGS_allocator_strategy=naive_best_fit \ +FLAGS_fraction_of_gpu_memory_to_use=0.01 \ +python3 ${BIN_DIR}/../synthesize_e2e.py \ + --am=fastspeech2_csmsc \ + --am_config=fastspeech2_nosil_baker_ckpt_0.4/default.yaml \ + --am_ckpt=fastspeech2_nosil_baker_ckpt_0.4/snapshot_iter_76000.pdz \ + --am_stat=fastspeech2_nosil_baker_ckpt_0.4/speech_stats.npy \ + --voc=pwgan_csmsc \ + --voc_config=pwg_baker_ckpt_0.4/pwg_default.yaml \ + --voc_ckpt=pwg_baker_ckpt_0.4/pwg_snapshot_iter_400000.pdz \ + --voc_stat=pwg_baker_ckpt_0.4/pwg_stats.npy \ + --lang=zh \ + --text=${BIN_DIR}/../sentences.txt \ + --output_dir=exp/default/test_e2e \ + --inference_dir=exp/default/inference \ + --phones_dict=fastspeech2_nosil_baker_ckpt_0.4/phone_id_map.txt +``` From 5e34cdbd6ea22d74237f22a21d02148358e028a9 Mon Sep 17 00:00:00 2001 From: lizi <49679880@qq.com> Date: Tue, 8 Feb 2022 19:26:28 +0800 Subject: [PATCH 16/22] Modify typesetting, test=doc --- docs/source/tts/quick_start_cn.md | 28 ++++++++-------- examples/csmsc/tts3/README_cn.md | 56 +++++++++++++++---------------- 2 files changed, 41 insertions(+), 43 deletions(-) diff --git a/docs/source/tts/quick_start_cn.md b/docs/source/tts/quick_start_cn.md index 7d473061..f44887ae 100644 --- a/docs/source/tts/quick_start_cn.md +++ b/docs/source/tts/quick_start_cn.md @@ -6,7 +6,7 @@ * LJSpeech (英文单发音人) * VCTK (英文多发音人) -PaddleSpeech的TTS模型具有以下映射关系: +PaddleSpeech 的 TTS 模型具有以下映射关系: * tts0 - Tactron2 * tts1 - TransformerTTS @@ -25,7 +25,7 @@ PaddleSpeech的TTS模型具有以下映射关系: 让我们以 FastSpeech2 + Parallel WaveGAN 和 CSMSC 数据集 为例. [examples/csmsc](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc) -### 用CSMSC数据集训练Parallel WaveGAN +### 用 CSMSC 数据集训练 Parallel WaveGAN - 进入目录 ```bash @@ -55,27 +55,27 @@ PaddleSpeech的TTS模型具有以下映射关系: source path.sh ``` **在你开始做任何事情之前,必须先做这步** - 将 `MAIN_ROOT` 设置为项目目录. 使用 `fastspeech2` 模型作为 `MODEL`. + 将 `MAIN_ROOT` 设置为项目目录. 使用 `fastspeech2` 模型作为 `MODEL` 。 - 运行 ```bash bash run.sh ``` - 这只是一个演示,请确保源数据已经准备好,并且在下一个 `步骤` 之前每个 `步骤` 都运行正常. + 这只是一个演示,请确保源数据已经准备好,并且在下一个 `步骤` 之前每个 `步骤` 都运行正常。 `run.sh` 中主要包括以下步骤: - 设置路径。 - 预处理数据集, - 训练模型。 -- 从`metadata.jsonl`中合成波形 +- 从 `metadata.jsonl` 中合成波形 - 从文本文件合成波形。(在声学模型中) - 使用静态模型进行推理。(可选) -有关更多详细信息,请参见examples中的`README.md` +有关更多详细信息,请参见 examples 中的 `README.md` -## TTS流水线 -本节介绍如何使用TTS提供的预训练模型,并对其进行推理。 +## TTS 流水线 +本节介绍如何使用 TTS 提供的预训练模型,并对其进行推理。 TTS中的预训练模型在压缩包中提供。将其解压缩以获得如下文件夹: **Acoustic Models:** @@ -99,13 +99,13 @@ checkpoint_name - `default.yaml` 存储用于训练模型的配置。 - `snapshot_iter_*.pdz` 是检查点文件,其中`*`是它经过训练的步骤。 - `*_stats.npy` 是特征的统计文件,如果它在训练前已被标准化。 -- `phone_id_map.txt` 是音素到音素ID的映射关系。 -- `tone_id_map.txt` 是在训练声学模型之前分割音调和拼音时,音调到音调ID的映射关系。(例如在csmsc/speedyspeech的示例中) -- `spk_id_map.txt` 是多speaker声学模型中speaker到spk_ids的映射关系。 +- `phone_id_map.txt` 是音素到音素 ID 的映射关系。 +- `tone_id_map.txt` 是在训练声学模型之前分割音调和拼音时,音调到音调 ID 的映射关系。(例如在 csmsc/speedyspeech 的示例中) +- `spk_id_map.txt` 是多发音人声学模型中`发音人`到`spk_ids`的映射关系。 下面的示例代码显示了如何使用模型进行预测。 -### Acoustic Models声学模型(文本到频谱图) -下面的代码显示了如何使用“FastSpeech2”模型。加载预训练模型后,使用它和normalizer对象构建预测对象,然后使用`fastspeech2_inferencet(phone_ids)`生成频谱图,频谱图可进一步用于使用声码器合成原始音频。 +### Acoustic Models 声学模型(文本到频谱图) +下面的代码显示了如何使用 `FastSpeech2` 模型。加载预训练模型后,使用它和 normalizer 对象构建预测对象,然后使用 `fastspeech2_inferencet(phone_ids)` 生成频谱图,频谱图可进一步用于使用声码器合成原始音频。 ```python from pathlib import Path @@ -163,7 +163,7 @@ for part_phone_ids in phone_ids: ``` ### Vcoder声码器(谱图到波形) -下面的代码显示了如何使用` Parallel WaveGAN` 模型。像上面的例子一样,加载预训练模型后,使用它和normalizer对象构建预测对象,然后使用 `pwg_inference(mel)`生成原始音频(wav格式)。 +下面的代码显示了如何使用 `Parallel WaveGAN` 模型。像上面的例子一样,加载预训练模型后,使用它和 normalizer 对象构建预测对象,然后使用 `pwg_inference(mel)` 生成原始音频( wav 格式)。 ```python from pathlib import Path diff --git a/examples/csmsc/tts3/README_cn.md b/examples/csmsc/tts3/README_cn.md index 9794dc28..2e9c27af 100644 --- a/examples/csmsc/tts3/README_cn.md +++ b/examples/csmsc/tts3/README_cn.md @@ -1,15 +1,14 @@ -# 用CSMSC数据集训练FastSpeech2模型 -This example contains code used to train a [Fastspeech2](https://arxiv.org/abs/2006.04558) model with [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html). +# 用 CSMSC 数据集训练 FastSpeech2 模型 -本用例包含用于训练 [Fastspeech2](https://arxiv.org/abs/2006.04558) 模型的代码,使用 [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html)数据集。 +本用例包含用于训练 [Fastspeech2](https://arxiv.org/abs/2006.04558) 模型的代码,使用 [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html) 数据集。 ## 数据集 ### 下载并解压 -从 [官方网站](https://test.data-baker.com/data/index/source)下载数据集 +从 [官方网站](https://test.data-baker.com/data/index/source) 下载数据集 ### 获取MFA结果并解压 我们使用 [MFA](https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner) 去获得 fastspeech2 的音素持续时间。 -你们可以从这里下载 [baker_alignment_tone.tar.gz](https://paddlespeech.bj.bcebos.com/MFA/BZNSYP/with_tone/baker_alignment_tone.tar.gz), 或参考 [mfa example](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/mfa) 训练你自己的模型of our repo. +你们可以从这里下载 [baker_alignment_tone.tar.gz](https://paddlespeech.bj.bcebos.com/MFA/BZNSYP/with_tone/baker_alignment_tone.tar.gz), 或参考 [mfa example](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/mfa) 训练你自己的模型。 ## 开始 假设数据集的路径是 `~/datasets/BZNSYP`. @@ -20,13 +19,13 @@ This example contains code used to train a [Fastspeech2](https://arxiv.org/abs/2 2. 对数据集进行预处理。 3. 训练模型 4. 合成波形 - - 从 `metadata.jsonl`合成波形。 + - 从 `metadata.jsonl` 合成波形。 - 从文本文件合成波形。 5. 使用静态模型进行推理。 ```bash ./run.sh ``` -您可以选择要运行的一系列阶段,或者将 `stage` 设置为 `stop-stage`以仅使用一个阶段,例如,运行以下命令只会预处理数据集。 +您可以选择要运行的一系列阶段,或者将 `stage` 设置为 `stop-stage` 以仅使用一个阶段,例如,运行以下命令只会预处理数据集。 ```bash ./run.sh --stage 0 --stop-stage 0 ``` @@ -34,7 +33,7 @@ This example contains code used to train a [Fastspeech2](https://arxiv.org/abs/2 ```bash ./local/preprocess.sh ${conf_path} ``` -当它完成时。将在当前目录中创建`dump`文件夹。转储文件夹的结构如下所示。 +当它完成时。将在当前目录中创建 `dump` 文件夹。转储文件夹的结构如下所示。 ```text dump @@ -53,17 +52,16 @@ dump ├── raw └── speech_stats.npy ``` -The dataset is split into 3 parts, namely `train`, `dev`, and` test`, each of which contains a `norm` and `raw` subfolder. The raw folder contains speech、pitch and energy features of each utterance, while the norm folder contains normalized ones. The statistics used to normalize features are computed from the training set, which is located in `dump/train/*_stats.npy`. -数据集分为三个部分,即`train`、`dev`和`test`,每个部分都包含一个`norm`和`raw`子文件夹。原始文件夹包含每个话语的语音、音调和能量特征,而`norm`文件夹包含规范化的特征。用于规范化特征的统计数据是从`dump/train/*_stats.npy`中的训练集计算出来的。 +数据集分为三个部分,即 `train` 、 `dev` 和 `test` ,每个部分都包含一个 `norm` 和 `raw` 子文件夹。原始文件夹包含每个话语的语音、音调和能量特征,而 `norm` 文件夹包含规范化的特征。用于规范化特征的统计数据是从 `dump/train/*_stats.npy` 中的训练集计算出来的。 -此外,还有一个`metadata.jsonl` 在每个子文件夹中。它是一个类似表格的文件,包含音素、文本长度、语音长度、持续时间、语音特征路径、音调特征路径、能量特征路径、说话人和每个话语的id。 +此外,还有一个 `metadata.jsonl` 在每个子文件夹中。它是一个类似表格的文件,包含音素、文本长度、语音长度、持续时间、语音特征路径、音调特征路径、能量特征路径、说话人和每个话语的 id。 ### 模型训练 ```bash CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} ``` -`./local/train.sh` calls `${BIN_DIR}/train.py`. +`./local/train.sh` 调用 `${BIN_DIR}/train.py` 。 以下是完整的帮助信息。 ```text @@ -91,15 +89,15 @@ optional arguments: --voice-cloning VOICE_CLONING whether training voice cloning model. ``` -1. `--config` 是一个yaml格式的配置文件,用于覆盖默认配置,位于 `conf/default.yaml`. -2. `--train-metadata` 和 `--dev-metadata` 应为`dump`文件夹中`train`和`dev`下的规范化元数据文件 -3. `--output-dir` 是保存结果的目录。 检查点保存在此目录中的 `checkpoints/`目录下。 -4. `--ngpu` 要使用的GPU数,如果ngpu==0,则使用cpu。 +1. `--config` 是一个 yaml 格式的配置文件,用于覆盖默认配置,位于 `conf/default.yaml`. +2. `--train-metadata` 和 `--dev-metadata` 应为 `dump` 文件夹中 `train` 和 `dev` 下的规范化元数据文件 +3. `--output-dir` 是保存结果的目录。 检查点保存在此目录中的 `checkpoints/` 目录下。 +4. `--ngpu` 要使用的 GPU 数,如果 ngpu==0,则使用 cpu 。 5. `--phones-dict` 是音素词汇表文件的路径。 ### 合成 我们使用 [parallel wavegan](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc/voc1) 作为神经声码器(vocoder)。 -从 [pwg_baker_ckpt_0.4.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/pwgan/pwg_baker_ckpt_0.4.zip) 下载预训练的parallel wavegan模型并将其解压。 +从 [pwg_baker_ckpt_0.4.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/pwgan/pwg_baker_ckpt_0.4.zip) 下载预训练的 parallel wavegan 模型并将其解压。 ```bash unzip pwg_baker_ckpt_0.4.zip @@ -214,25 +212,25 @@ optional arguments: output dir. ``` 1. `--am` 声学模型格式是否符合 {model_name}_{dataset} -2. `--am_config`, `--am_checkpoint`, `--am_stat` 和 `--phones_dict` 是声学模型的参数,对应于fastspeech2预训练模型中的4个文件。 +2. `--am_config`, `--am_checkpoint`, `--am_stat` 和 `--phones_dict` 是声学模型的参数,对应于 fastspeech2 预训练模型中的 4 个文件。 3. `--voc` 声码器(vocoder)格式是否符合 {model_name}_{dataset} -4. `--voc_config`, `--voc_checkpoint`, `--voc_stat` 是声码器的参数,对应于parallel wavegan预训练模型中的3个文件。 -5. `--lang` 对应模型的语言可以是 `zh` 或 `en`. -6. `--test_metadata` 应为`dump`文件夹中`test`下的规范化元数据文件、 +4. `--voc_config`, `--voc_checkpoint`, `--voc_stat` 是声码器的参数,对应于 parallel wavegan 预训练模型中的 3 个文件。 +5. `--lang` 对应模型的语言可以是 `zh` 或 `en` 。 +6. `--test_metadata` 应为 `dump` 文件夹中 `test` 下的规范化元数据文件、 7. `--text` 是文本文件,其中包含要合成的句子。 8. `--output_dir` 是保存合成音频文件的目录。 -9. `--ngpu` 要使用的GPU数,如果ngpu==0,则使用cpu。 +9. `--ngpu` 要使用的GPU数,如果 ngpu==0,则使用 cpu 。 ### 推理 -在合成之后,我们将在`${train_output_path}/inference`中得到fastspeech2和pwgan的静态模型 -`./local/inference.sh` 调用 `${BIN_DIR}/inference.py`为fastspeech2+pwgan综合提供了一个paddle静态模型推理示例。 +在合成之后,我们将在 `${train_output_path}/inference` 中得到 fastspeech2 和 pwgan 的静态模型 +`./local/inference.sh` 调用 `${BIN_DIR}/inference.py` 为 fastspeech2 + pwgan 综合提供了一个 paddle 静态模型推理示例。 ```bash CUDA_VISIBLE_DEVICES=${gpus} ./local/inference.sh ${train_output_path} ``` ## 预训练模型 -预先训练的FastSpeech2模型,在音频边缘没有空白音频: +预先训练的 FastSpeech2 模型,在音频边缘没有空白音频: - [fastspeech2_nosil_baker_ckpt_0.4.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_nosil_baker_ckpt_0.4.zip) - [fastspeech2_conformer_baker_ckpt_0.5.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_conformer_baker_ckpt_0.5.zip) @@ -246,12 +244,12 @@ conformer| 2(gpu) x 76000|1.0675|0.56103|0.035869|0.31553|0.15509| FastSpeech2检查点包含下列文件。 ```text fastspeech2_nosil_baker_ckpt_0.4 -├── default.yaml # 用于训练fastspeech2的默认配置 -├── phone_id_map.txt # 训练fastspeech2时的音素词汇文件 +├── default.yaml # 用于训练 fastspeech2 的默认配置 +├── phone_id_map.txt # 训练 fastspeech2 时的音素词汇文件 ├── snapshot_iter_76000.pdz # 模型参数和优化器状态 -└── speech_stats.npy # 训练fastspeech2时用于规范化频谱图的统计数据 +└── speech_stats.npy # 训练 fastspeech2 时用于规范化频谱图的统计数据 ``` -您可以使用以下脚本通过使用预训练的fastspeech2和并行wavegan模型为 `${BIN_DIR}/../sentences.txt` 合成句子 +您可以使用以下脚本通过使用预训练的 fastspeech2 和 parallel wavegan 模型为 `${BIN_DIR}/../sentences.txt` 合成句子 ```bash source path.sh From 6b35b36e318ab26fabb99de46886c5f3b703456b Mon Sep 17 00:00:00 2001 From: lizi <49679880@qq.com> Date: Tue, 8 Feb 2022 19:31:49 +0800 Subject: [PATCH 17/22] thank myself, test=doc --- README.md | 1 + README_cn.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index d5e56d37..23124231 100644 --- a/README.md +++ b/README.md @@ -539,6 +539,7 @@ You are warmly welcome to submit questions in [discussions](https://github.com/P - Many thanks to [mymagicpower](https://github.com/mymagicpower) for the Java implementation of ASR upon [short](https://github.com/mymagicpower/AIAS/tree/main/3_audio_sdks/asr_sdk) and [long](https://github.com/mymagicpower/AIAS/tree/main/3_audio_sdks/asr_long_audio_sdk) audio files. - Many thanks to [JiehangXie](https://github.com/JiehangXie)/[PaddleBoBo](https://github.com/JiehangXie/PaddleBoBo) for developing Virtual Uploader(VUP)/Virtual YouTuber(VTuber) with PaddleSpeech TTS function. - Many thanks to [745165806](https://github.com/745165806)/[PaddleSpeechTask](https://github.com/745165806/PaddleSpeechTask) for contributing Punctuation Restoration model. +- Many thanks to [kslz](https://github.com/745165806) for supplementary Chinese documents. Besides, PaddleSpeech depends on a lot of open source repositories. See [references](./docs/source/reference.md) for more information. diff --git a/README_cn.md b/README_cn.md index 1ba1e8a3..4ce4ade9 100644 --- a/README_cn.md +++ b/README_cn.md @@ -548,6 +548,7 @@ year={2021} - 非常感谢 [mymagicpower](https://github.com/mymagicpower) 采用PaddleSpeech 对 ASR 的[短语音](https://github.com/mymagicpower/AIAS/tree/main/3_audio_sdks/asr_sdk)及[长语音](https://github.com/mymagicpower/AIAS/tree/main/3_audio_sdks/asr_long_audio_sdk)进行 Java 实现。 - 非常感谢 [JiehangXie](https://github.com/JiehangXie)/[PaddleBoBo](https://github.com/JiehangXie/PaddleBoBo) 采用 PaddleSpeech 语音合成功能实现 Virtual Uploader(VUP)/Virtual YouTuber(VTuber) 虚拟主播。 - 非常感谢 [745165806](https://github.com/745165806)/[PaddleSpeechTask](https://github.com/745165806/PaddleSpeechTask) 贡献标点重建相关模型。 +- 非常感谢 [kslz](https://github.com/kslz) 补充中文文档。 此外,PaddleSpeech 依赖于许多开源存储库。有关更多信息,请参阅 [references](./docs/source/reference.md)。 From be2fc2cc117aea3f66022c78cb5abc5a7f94dd31 Mon Sep 17 00:00:00 2001 From: lizi <49679880@qq.com> Date: Tue, 8 Feb 2022 19:36:06 +0800 Subject: [PATCH 18/22] Modify typesetting, test=doc --- docs/source/tts/quick_start.md | 1 + docs/source/tts/quick_start_cn.md | 1 + examples/csmsc/tts3/README.md | 1 + examples/csmsc/tts3/README_cn.md | 1 + 4 files changed, 4 insertions(+) diff --git a/docs/source/tts/quick_start.md b/docs/source/tts/quick_start.md index 3180d80a..bddee778 100644 --- a/docs/source/tts/quick_start.md +++ b/docs/source/tts/quick_start.md @@ -1,3 +1,4 @@ +([简体中文](./quick_start_cn.md)|English) # Quick Start of Text-to-Speech The examples in PaddleSpeech are mainly classified by datasets, the TTS datasets we mainly used are: * CSMCS (Mandarin single speaker) diff --git a/docs/source/tts/quick_start_cn.md b/docs/source/tts/quick_start_cn.md index f44887ae..c14fccd5 100644 --- a/docs/source/tts/quick_start_cn.md +++ b/docs/source/tts/quick_start_cn.md @@ -1,3 +1,4 @@ +(简体中文|[English](./quick_start.md)) # 语音合成快速开始 这些PaddleSpeech中的样例主要按数据集分类,我们主要使用的TTS数据集有: diff --git a/examples/csmsc/tts3/README.md b/examples/csmsc/tts3/README.md index 13d291b5..04c6a5da 100644 --- a/examples/csmsc/tts3/README.md +++ b/examples/csmsc/tts3/README.md @@ -1,3 +1,4 @@ +([简体中文](./README_cn.md)|English) # FastSpeech2 with CSMSC This example contains code used to train a [Fastspeech2](https://arxiv.org/abs/2006.04558) model with [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html). diff --git a/examples/csmsc/tts3/README_cn.md b/examples/csmsc/tts3/README_cn.md index 2e9c27af..25931ecb 100644 --- a/examples/csmsc/tts3/README_cn.md +++ b/examples/csmsc/tts3/README_cn.md @@ -1,3 +1,4 @@ +(简体中文|[English](./README.md)) # 用 CSMSC 数据集训练 FastSpeech2 模型 本用例包含用于训练 [Fastspeech2](https://arxiv.org/abs/2006.04558) 模型的代码,使用 [Chinese Standard Mandarin Speech Copus](https://www.data-baker.com/open_source.html) 数据集。 From 348a1a33bf3d31aa48b88f526b454a60e6888298 Mon Sep 17 00:00:00 2001 From: TianYuan Date: Tue, 8 Feb 2022 20:13:33 +0800 Subject: [PATCH 19/22] update tacotron2 voice cloning in aishell3 with new tacotron2, test=tts (#1419) --- examples/aishell3/vc0/README.md | 141 +- examples/aishell3/vc0/conf/default.yaml | 86 + examples/aishell3/vc0/local/preprocess.sh | 72 +- examples/aishell3/vc0/local/synthesize.sh | 22 + examples/aishell3/vc0/local/train.sh | 12 +- examples/aishell3/vc0/local/voice_cloning.sh | 30 +- examples/aishell3/vc0/path.sh | 2 +- examples/aishell3/vc0/run.sh | 30 +- examples/aishell3/vc1/README.md | 2 +- examples/aishell3/vc1/local/voice_cloning.sh | 16 +- examples/csmsc/tts0/README.md | 6 +- paddlespeech/t2s/datasets/am_batch_fn.py | 41 + .../t2s/exps/fastspeech2/gen_gta_mel.py | 4 +- .../t2s/exps/fastspeech2/preprocess.py | 4 +- paddlespeech/t2s/exps/fastspeech2/train.py | 4 +- .../gan_vocoder/parallelwave_gan/train.py | 3 +- .../t2s/exps/gan_vocoder/preprocess.py | 4 +- .../t2s/exps/new_tacotron2/preprocess.py | 4 +- paddlespeech/t2s/exps/new_tacotron2/train.py | 53 +- .../t2s/exps/speedyspeech/gen_gta_mel.py | 4 +- .../t2s/exps/speedyspeech/normalize.py | 4 +- .../t2s/exps/speedyspeech/preprocess.py | 4 +- paddlespeech/t2s/exps/speedyspeech/train.py | 4 +- paddlespeech/t2s/exps/synthesize.py | 15 +- paddlespeech/t2s/exps/tacotron2/__init__.py | 13 - paddlespeech/t2s/exps/tacotron2/config.py | 75 - paddlespeech/t2s/exps/tacotron2/ljspeech.py | 91 - paddlespeech/t2s/exps/tacotron2/preprocess.py | 98 - .../t2s/exps/tacotron2/synthesize.ipynb | 342 -- paddlespeech/t2s/exps/tacotron2/synthesize.py | 103 - paddlespeech/t2s/exps/tacotron2/train.py | 220 - .../t2s/exps/transformer_tts/normalize.py | 3 + .../exps/{fastspeech2 => }/voice_cloning.py | 175 +- .../t2s/exps/voice_cloning/__init__.py | 13 - .../voice_cloning/tacotron2_ge2e/__init__.py | 13 - .../voice_cloning/tacotron2_ge2e/aishell3.py | 89 - .../tacotron2_ge2e/chinese_g2p.py | 42 - .../voice_cloning/tacotron2_ge2e/config.py | 81 - .../tacotron2_ge2e/extract_mel.py | 95 - .../voice_cloning/tacotron2_ge2e/lexicon.txt | 4150 ----------------- .../preprocess_transcription.py | 257 - .../tacotron2_ge2e/process_wav.py | 94 - .../voice_cloning/tacotron2_ge2e/train.py | 263 -- .../tacotron2_ge2e/voice_cloning.py | 166 - paddlespeech/t2s/models/__init__.py | 1 - .../t2s/models/new_tacotron2/tacotron2.py | 2 +- paddlespeech/t2s/models/tacotron2.py | 1074 ----- paddlespeech/t2s/utils/__init__.py | 4 + .../exps/ge2e/speaker_verification_dataset.py | 6 - utils/compute_statistics.py | 4 +- 50 files changed, 515 insertions(+), 7526 deletions(-) create mode 100644 examples/aishell3/vc0/conf/default.yaml create mode 100755 examples/aishell3/vc0/local/synthesize.sh delete mode 100644 paddlespeech/t2s/exps/tacotron2/__init__.py delete mode 100644 paddlespeech/t2s/exps/tacotron2/config.py delete mode 100644 paddlespeech/t2s/exps/tacotron2/ljspeech.py delete mode 100644 paddlespeech/t2s/exps/tacotron2/preprocess.py delete mode 100644 paddlespeech/t2s/exps/tacotron2/synthesize.ipynb delete mode 100644 paddlespeech/t2s/exps/tacotron2/synthesize.py delete mode 100644 paddlespeech/t2s/exps/tacotron2/train.py rename paddlespeech/t2s/exps/{fastspeech2 => }/voice_cloning.py (57%) delete mode 100644 paddlespeech/t2s/exps/voice_cloning/__init__.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/__init__.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/aishell3.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/chinese_g2p.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/config.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/extract_mel.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/lexicon.txt delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/preprocess_transcription.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/process_wav.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/train.py delete mode 100644 paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/voice_cloning.py delete mode 100644 paddlespeech/t2s/models/tacotron2.py diff --git a/examples/aishell3/vc0/README.md b/examples/aishell3/vc0/README.md index 91d32619..21cd0aa2 100644 --- a/examples/aishell3/vc0/README.md +++ b/examples/aishell3/vc0/README.md @@ -1,94 +1,119 @@ + # Tacotron2 + AISHELL-3 Voice Cloning -This example contains code used to train a [Tacotron2 ](https://arxiv.org/abs/1712.05884) model with [AISHELL-3](http://www.aishelltech.com/aishell_3). The trained model can be used in Voice Cloning Task, We refer to the model structure of [Transfer Learning from Speaker Verification to Multispeaker Text-To-Speech Synthesis](https://arxiv.org/pdf/1806.04558.pdf). The general steps are as follows: -1. Speaker Encoder: We use Speaker Verification to train a speaker encoder. Datasets used in this task are different from those used in Tacotron2 because the transcriptions are not needed, we use more datasets, refer to [ge2e](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/ge2e). -2. Synthesizer: We use the trained speaker encoder to generate speaker embedding for each sentence in AISHELL-3. This embedding is an extra input of Tacotron2 which will be concated with encoder outputs. -3. Vocoder: We use WaveFlow as the neural Vocoder, refer to [waveflow](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/ljspeech/voc0). +This example contains code used to train a [Tacotron2](https://arxiv.org/abs/1712.05884) model with [AISHELL-3](http://www.aishelltech.com/aishell_3). The trained model can be used in Voice Cloning Task, We refer to the model structure of [Transfer Learning from Speaker Verification to Multispeaker Text-To-Speech Synthesis](https://arxiv.org/pdf/1806.04558.pdf). The general steps are as follows: +1. Speaker Encoder: We use Speaker Verification to train a speaker encoder. Datasets used in this task are different from those used in `Tacotron2` because the transcriptions are not needed, we use more datasets, refer to [ge2e](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/ge2e). +2. Synthesizer: We use the trained speaker encoder to generate speaker embedding for each sentence in AISHELL-3. This embedding is an extra input of `Tacotron2` which will be concated with encoder outputs. +3. Vocoder: We use [Parallel Wave GAN](http://arxiv.org/abs/1910.11480) as the neural Vocoder, refer to [voc1](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/aishell3/voc1). + +## Dataset +### Download and Extract +Download AISHELL-3. +```bash +wget https://www.openslr.org/resources/93/data_aishell3.tgz +``` +Extract AISHELL-3. +```bash +mkdir data_aishell3 +tar zxvf data_aishell3.tgz -C data_aishell3 +``` +### Get MFA Result and Extract +We use [MFA2.x](https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner) to get durations for aishell3_fastspeech2. +You can download from here [aishell3_alignment_tone.tar.gz](https://paddlespeech.bj.bcebos.com/MFA/AISHELL-3/with_tone/aishell3_alignment_tone.tar.gz), or train your MFA model reference to [mfa example](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/mfa) (use MFA1.x now) of our repo. + +## Pretrained GE2E Model +We use pretrained GE2E model to generate speaker embedding for each sentence. + +Download pretrained GE2E model from here [ge2e_ckpt_0.3.zip](https://bj.bcebos.com/paddlespeech/Parakeet/released_models/ge2e/ge2e_ckpt_0.3.zip), and `unzip` it. ## Get Started Assume the path to the dataset is `~/datasets/data_aishell3`. -Assume the path to the MFA result of AISHELL-3 is `./alignment`. -Assume the path to the pretrained ge2e model is `ge2e_ckpt_path=./ge2e_ckpt_0.3/step-3000000` +Assume the path to the MFA result of AISHELL-3 is `./aishell3_alignment_tone`. +Assume the path to the pretrained ge2e model is `./ge2e_ckpt_0.3`. + Run the command below to 1. **source path**. 2. preprocess the dataset. 3. train the model. -4. start a voice cloning inference. +4. synthesize waveform from `metadata.jsonl`. +5. start a voice cloning inference. ```bash ./run.sh ``` -You can choose a range of stages you want to run, or set `stage` equal to `stop-stage` to use only one stage, for example, run the following command will only preprocess the dataset. +You can choose a range of stages you want to run, or set `stage` equal to `stop-stage` to use only one stage, for example, running the following command will only preprocess the dataset. ```bash ./run.sh --stage 0 --stop-stage 0 ``` ### Data Preprocessing ```bash -CUDA_VISIBLE_DEVICES=${gpus} ./local/preprocess.sh ${input} ${preprocess_path} ${alignment} ${ge2e_ckpt_path} +CUDA_VISIBLE_DEVICES=${gpus} ./local/preprocess.sh ${conf_path} ${ge2e_ckpt_path} ``` -#### Generate Speaker Embedding - Use pretrained GE2E (speaker encoder) to generate speaker embedding for each sentence in AISHELL-3, which has the same file structure with wav files and the format is `.npy`. - -```bash -if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then - python3 ${BIN_DIR}/../ge2e/inference.py \ - --input=${input} \ - --output=${preprocess_path}/embed \ - --ngpu=1 \ - --checkpoint_path=${ge2e_ckpt_path} -fi +When it is done. A `dump` folder is created in the current directory. The structure of the dump folder is listed below. +```text +dump +├── dev +│ ├── norm +│ └── raw +├── embed +│ ├── SSB0005 +│ ├── SSB0009 +│ ├── ... +│ └── ... +├── phone_id_map.txt +├── speaker_id_map.txt +├── test +│ ├── norm +│ └── raw +└── train + ├── norm + ├── raw + └── speech_stats.npy ``` +The `embed` contains the generated speaker embedding for each sentence in AISHELL-3, which has the same file structure with wav files and the format is `.npy`. The computing time of utterance embedding can be x hours. -#### Process Wav -There is silence in the edge of AISHELL-3's wavs, and the audio amplitude is very small, so, we need to remove the silence and normalize the audio. You can the silence remove method based on volume or energy, but the effect is not very good, We use [MFA](https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner) to get the alignment of text and speech, then utilize the alignment results to remove the silence. - -We use Montreal Force Aligner 1.0. The label in aishell3 includes pinyin,so the lexicon we provided to MFA is pinyin rather than Chinese characters. And the prosody marks(`$` and `%`) need to be removed. You should preprocess the dataset into the format which MFA needs, the texts have the same name with wavs and have the suffix `.lab`. -We use [lexicon.txt](https://github.com/PaddlePaddle/PaddleSpeech/blob/develop/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/lexicon.txt) as the lexicon. +The dataset is split into 3 parts, namely `train`, `dev`, and` test`, each of which contains a `norm` and `raw` subfolder. The raw folder contains speech features of each utterance, while the norm folder contains normalized ones. The statistics used to normalize features are computed from the training set, which is located in `dump/train/*_stats.npy`. -You can download the alignment results from here [alignment_aishell3.tar.gz](https://paddlespeech.bj.bcebos.com/MFA/AISHELL-3/alignment_aishell3.tar.gz), or train your MFA model reference to [mfa example](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/other/mfa) (use MFA1.x now) of our repo. +Also, there is a `metadata.jsonl` in each subfolder. It is a table-like file that contains phones, text_lengths, speech_lengths, durations, the path of speech features, speaker, and id of each utterance. +The preprocessing step is very similar to that one of [tts0](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc/tts0), but there is one more `ge2e/inference` step here. +### Model Training +`./local/train.sh` calls `${BIN_DIR}/train.py`. ```bash -if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then - echo "Process wav ..." - python3 ${BIN_DIR}/process_wav.py \ - --input=${input}/wav \ - --output=${preprocess_path}/normalized_wav \ - --alignment=${alignment} -fi +CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} ``` +The training step is very similar to that one of [tts0](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc/tts0), but we should set `--voice-cloning=True` when calling `${BIN_DIR}/train.py`. -#### Preprocess Transcription -We revert the transcription into `phones` and `tones`. It is worth noting that our processing here is different from that used for MFA, we separated the tones. This is a processing method, of course, you can only segment initials and vowels. - +### Synthesizing +We use [parallel wavegan](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/aishell3/voc1) as the neural vocoder. +Download pretrained parallel wavegan model from [pwg_aishell3_ckpt_0.5.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/pwgan/pwg_aishell3_ckpt_0.5.zip) and unzip it. ```bash -if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then - python3 ${BIN_DIR}/preprocess_transcription.py \ - --input=${input} \ - --output=${preprocess_path} -fi +unzip pwg_aishell3_ckpt_0.5.zip ``` -The default input is `~/datasets/data_aishell3/train`,which contains `label_train-set.txt`, the processed results are `metadata.yaml` and `metadata.pickle`. the former is a text format for easy viewing, and the latter is a binary format for direct reading. -#### Extract Mel -```python -if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then - python3 ${BIN_DIR}/extract_mel.py \ - --input=${preprocess_path}/normalized_wav \ - --output=${preprocess_path}/mel -fi +Parallel WaveGAN checkpoint contains files listed below. +```text +pwg_aishell3_ckpt_0.5 +├── default.yaml # default config used to train parallel wavegan +├── feats_stats.npy # statistics used to normalize spectrogram when training parallel wavegan +└── snapshot_iter_1000000.pdz # generator parameters of parallel wavegan ``` - -### Model Training +`./local/synthesize.sh` calls `${BIN_DIR}/../synthesize.py`, which can synthesize waveform from `metadata.jsonl`. ```bash -CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${preprocess_path} ${train_output_path} +CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} ``` +The synthesizing step is very similar to that one of [tts0](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/csmsc/tts0), but we should set `--voice-cloning=True` when calling `${BIN_DIR}/../synthesize.py`. -Our model removes stop token prediction in Tacotron2, because of the problem of the extremely unbalanced proportion of positive and negative samples of stop token prediction, and it's very sensitive to the clip of audio silence. We use the last symbol from the highest point of attention to the encoder side as the termination condition. - -In addition, to accelerate the convergence of the model, we add `guided attention loss` to induce the alignment between encoder and decoder to show diagonal lines faster. ### Voice Cloning +Assume there are some reference audios in `./ref_audio` +```text +ref_audio +├── 001238.wav +├── LJ015-0254.wav +└── audio_self_test.mp3 +``` +`./local/voice_cloning.sh` calls `${BIN_DIR}/../voice_cloning.py` + ```bash -CUDA_VISIBLE_DEVICES=${gpus} ./local/voice_cloning.sh ${ge2e_params_path} ${tacotron2_params_path} ${waveflow_params_path} ${vc_input} ${vc_output} +CUDA_VISIBLE_DEVICES=${gpus} ./local/voice_cloning.sh ${conf_path} ${train_output_path} ${ckpt_name} ${ge2e_params_path} ${ref_audio_dir} ``` -## Pretrained Model -[tacotron2_aishell3_ckpt_0.3.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/tacotron2/tacotron2_aishell3_ckpt_0.3.zip). diff --git a/examples/aishell3/vc0/conf/default.yaml b/examples/aishell3/vc0/conf/default.yaml new file mode 100644 index 00000000..16a4a60c --- /dev/null +++ b/examples/aishell3/vc0/conf/default.yaml @@ -0,0 +1,86 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### + +fs: 24000 # sr +n_fft: 2048 # FFT size (samples). +n_shift: 300 # Hop size (samples). 12.5ms +win_length: 1200 # Window length (samples). 50ms + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. + +# Only used for feats_type != raw + +fmin: 80 # Minimum frequency of Mel basis. +fmax: 7600 # Maximum frequency of Mel basis. +n_mels: 80 # The number of mel basis. + +########################################################### +# DATA SETTING # +########################################################### +batch_size: 64 +num_workers: 2 + +########################################################### +# MODEL SETTING # +########################################################### +model: # keyword arguments for the selected model + embed_dim: 512 # char or phn embedding dimension + elayers: 1 # number of blstm layers in encoder + eunits: 512 # number of blstm units + econv_layers: 3 # number of convolutional layers in encoder + econv_chans: 512 # number of channels in convolutional layer + econv_filts: 5 # filter size of convolutional layer + atype: location # attention function type + adim: 512 # attention dimension + aconv_chans: 32 # number of channels in convolutional layer of attention + aconv_filts: 15 # filter size of convolutional layer of attention + cumulate_att_w: True # whether to cumulate attention weight + dlayers: 2 # number of lstm layers in decoder + dunits: 1024 # number of lstm units in decoder + prenet_layers: 2 # number of layers in prenet + prenet_units: 256 # number of units in prenet + postnet_layers: 5 # number of layers in postnet + postnet_chans: 512 # number of channels in postnet + postnet_filts: 5 # filter size of postnet layer + output_activation: null # activation function for the final output + use_batch_norm: True # whether to use batch normalization in encoder + use_concate: True # whether to concatenate encoder embedding with decoder outputs + use_residual: False # whether to use residual connection in encoder + dropout_rate: 0.5 # dropout rate + zoneout_rate: 0.1 # zoneout rate + reduction_factor: 1 # reduction factor + spk_embed_dim: 256 # speaker embedding dimension + spk_embed_integration_type: concat # how to integrate speaker embedding + + +########################################################### +# UPDATER SETTING # +########################################################### +updater: + use_masking: True # whether to apply masking for padded part in loss calculation + bce_pos_weight: 5.0 # weight of positive sample in binary cross entropy calculation + use_guided_attn_loss: True # whether to use guided attention loss + guided_attn_loss_sigma: 0.4 # sigma of guided attention loss + guided_attn_loss_lambda: 1.0 # strength of guided attention loss + + +########################################################## +# OPTIMIZER SETTING # +########################################################## +optimizer: + optim: adam # optimizer type + learning_rate: 1.0e-03 # learning rate + epsilon: 1.0e-06 # epsilon + weight_decay: 0.0 # weight decay coefficient + +########################################################### +# TRAINING SETTING # +########################################################### +max_epoch: 200 +num_snapshots: 5 + +########################################################### +# OTHER SETTING # +########################################################### +seed: 42 \ No newline at end of file diff --git a/examples/aishell3/vc0/local/preprocess.sh b/examples/aishell3/vc0/local/preprocess.sh index 5bf88066..069cf94c 100755 --- a/examples/aishell3/vc0/local/preprocess.sh +++ b/examples/aishell3/vc0/local/preprocess.sh @@ -1,36 +1,72 @@ #!/bin/bash -stage=0 +stage=3 stop_stage=100 -input=$1 -preprocess_path=$2 -alignment=$3 -ge2e_ckpt_path=$4 +config_path=$1 +ge2e_ckpt_path=$2 +# gen speaker embedding if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then python3 ${MAIN_ROOT}/paddlespeech/vector/exps/ge2e/inference.py \ - --input=${input}/wav \ - --output=${preprocess_path}/embed \ + --input=~/datasets/data_aishell3/train/wav/ \ + --output=dump/embed \ --checkpoint_path=${ge2e_ckpt_path} fi +# copy from tts3/preprocess if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then - echo "Process wav ..." - python3 ${BIN_DIR}/process_wav.py \ - --input=${input}/wav \ - --output=${preprocess_path}/normalized_wav \ - --alignment=${alignment} + # get durations from MFA's result + echo "Generate durations.txt from MFA results ..." + python3 ${MAIN_ROOT}/utils/gen_duration_from_textgrid.py \ + --inputdir=./aishell3_alignment_tone \ + --output durations.txt \ + --config=${config_path} fi if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then - python3 ${BIN_DIR}/preprocess_transcription.py \ - --input=${input} \ - --output=${preprocess_path} + # extract features + echo "Extract features ..." + python3 ${BIN_DIR}/preprocess.py \ + --dataset=aishell3 \ + --rootdir=~/datasets/data_aishell3/ \ + --dumpdir=dump \ + --dur-file=durations.txt \ + --config=${config_path} \ + --num-cpu=20 \ + --cut-sil=True \ + --spk_emb_dir=dump/embed fi if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then - python3 ${BIN_DIR}/extract_mel.py \ - --input=${preprocess_path}/normalized_wav \ - --output=${preprocess_path}/mel + # get features' stats(mean and std) + echo "Get features' stats ..." + python3 ${MAIN_ROOT}/utils/compute_statistics.py \ + --metadata=dump/train/raw/metadata.jsonl \ + --field-name="speech" +fi + +if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then + # normalize and covert phone to id, dev and test should use train's stats + echo "Normalize ..." + python3 ${BIN_DIR}/normalize.py \ + --metadata=dump/train/raw/metadata.jsonl \ + --dumpdir=dump/train/norm \ + --speech-stats=dump/train/speech_stats.npy \ + --phones-dict=dump/phone_id_map.txt \ + --speaker-dict=dump/speaker_id_map.txt + + python3 ${BIN_DIR}/normalize.py \ + --metadata=dump/dev/raw/metadata.jsonl \ + --dumpdir=dump/dev/norm \ + --speech-stats=dump/train/speech_stats.npy \ + --phones-dict=dump/phone_id_map.txt \ + --speaker-dict=dump/speaker_id_map.txt + + python3 ${BIN_DIR}/normalize.py \ + --metadata=dump/test/raw/metadata.jsonl \ + --dumpdir=dump/test/norm \ + --speech-stats=dump/train/speech_stats.npy \ + --phones-dict=dump/phone_id_map.txt \ + --speaker-dict=dump/speaker_id_map.txt fi diff --git a/examples/aishell3/vc0/local/synthesize.sh b/examples/aishell3/vc0/local/synthesize.sh new file mode 100755 index 00000000..98430280 --- /dev/null +++ b/examples/aishell3/vc0/local/synthesize.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +config_path=$1 +train_output_path=$2 +ckpt_name=$3 + +FLAGS_allocator_strategy=naive_best_fit \ +FLAGS_fraction_of_gpu_memory_to_use=0.01 \ +python3 ${BIN_DIR}/../synthesize.py \ + --am=tacotron2_aishell3 \ + --am_config=${config_path} \ + --am_ckpt=${train_output_path}/checkpoints/${ckpt_name} \ + --am_stat=dump/train/speech_stats.npy \ + --voc=pwgan_aishell3 \ + --voc_config=pwg_aishell3_ckpt_0.5/default.yaml \ + --voc_ckpt=pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ + --voc_stat=pwg_aishell3_ckpt_0.5/feats_stats.npy \ + --test_metadata=dump/test/norm/metadata.jsonl \ + --output_dir=${train_output_path}/test \ + --phones_dict=dump/phone_id_map.txt \ + --speaker_dict=dump/speaker_id_map.txt \ + --voice-cloning=True diff --git a/examples/aishell3/vc0/local/train.sh b/examples/aishell3/vc0/local/train.sh index f062cbbf..c775fcad 100755 --- a/examples/aishell3/vc0/local/train.sh +++ b/examples/aishell3/vc0/local/train.sh @@ -1,9 +1,13 @@ #!/bin/bash -preprocess_path=$1 +config_path=$1 train_output_path=$2 python3 ${BIN_DIR}/train.py \ - --data=${preprocess_path} \ - --output=${train_output_path} \ - --ngpu=1 \ No newline at end of file + --train-metadata=dump/train/norm/metadata.jsonl \ + --dev-metadata=dump/dev/norm/metadata.jsonl \ + --config=${config_path} \ + --output-dir=${train_output_path} \ + --ngpu=2 \ + --phones-dict=dump/phone_id_map.txt \ + --voice-cloning=True \ No newline at end of file diff --git a/examples/aishell3/vc0/local/voice_cloning.sh b/examples/aishell3/vc0/local/voice_cloning.sh index 3fe3de76..79831f3f 100755 --- a/examples/aishell3/vc0/local/voice_cloning.sh +++ b/examples/aishell3/vc0/local/voice_cloning.sh @@ -1,14 +1,24 @@ #!/bin/bash -ge2e_params_path=$1 -tacotron2_params_path=$2 -waveflow_params_path=$3 -vc_input=$4 -vc_output=$5 +config_path=$1 +train_output_path=$2 +ckpt_name=$3 +ge2e_params_path=$4 +ref_audio_dir=$5 -python3 ${BIN_DIR}/voice_cloning.py \ +FLAGS_allocator_strategy=naive_best_fit \ +FLAGS_fraction_of_gpu_memory_to_use=0.01 \ +python3 ${BIN_DIR}/../voice_cloning.py \ + --am=tacotron2_aishell3 \ + --am_config=${config_path} \ + --am_ckpt=${train_output_path}/checkpoints/${ckpt_name} \ + --am_stat=dump/train/speech_stats.npy \ + --voc=pwgan_aishell3 \ + --voc_config=pwg_aishell3_ckpt_0.5/default.yaml \ + --voc_ckpt=pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ + --voc_stat=pwg_aishell3_ckpt_0.5/feats_stats.npy \ --ge2e_params_path=${ge2e_params_path} \ - --tacotron2_params_path=${tacotron2_params_path} \ - --waveflow_params_path=${waveflow_params_path} \ - --input-dir=${vc_input} \ - --output-dir=${vc_output} \ No newline at end of file + --text="凯莫瑞安联合体的经济崩溃迫在眉睫。" \ + --input-dir=${ref_audio_dir} \ + --output-dir=${train_output_path}/vc_syn \ + --phones-dict=dump/phone_id_map.txt diff --git a/examples/aishell3/vc0/path.sh b/examples/aishell3/vc0/path.sh index dfae49af..9cdbe256 100755 --- a/examples/aishell3/vc0/path.sh +++ b/examples/aishell3/vc0/path.sh @@ -9,5 +9,5 @@ export PYTHONDONTWRITEBYTECODE=1 export PYTHONIOENCODING=UTF-8 export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} -MODEL=voice_cloning/tacotron2_ge2e +MODEL=new_tacotron2 export BIN_DIR=${MAIN_ROOT}/paddlespeech/t2s/exps/${MODEL} diff --git a/examples/aishell3/vc0/run.sh b/examples/aishell3/vc0/run.sh index 870360c1..64f4ee3b 100755 --- a/examples/aishell3/vc0/run.sh +++ b/examples/aishell3/vc0/run.sh @@ -3,25 +3,20 @@ set -e source path.sh -gpus=0 +gpus=0,1 stage=0 stop_stage=100 -input=~/datasets/data_aishell3/train -preprocess_path=dump -alignment=./alignment +conf_path=conf/default.yaml +train_output_path=exp/default +ckpt_name=snapshot_iter_482.pdz +ref_audio_dir=ref_audio # not include ".pdparams" here ge2e_ckpt_path=./ge2e_ckpt_0.3/step-3000000 -train_output_path=output + # include ".pdparams" here ge2e_params_path=${ge2e_ckpt_path}.pdparams -tacotron2_params_path=${train_output_path}/checkpoints/step-1000.pdparams -# pretrained model -# tacotron2_params_path=./tacotron2_aishell3_ckpt_0.3/step-450000.pdparams -waveflow_params_path=./waveflow_ljspeech_ckpt_0.3/step-2000000.pdparams -vc_input=ref_audio -vc_output=syn_audio # with the following command, you can choose the stage range you want to run # such as `./run.sh --stage 0 --stop-stage 0` @@ -30,15 +25,20 @@ source ${MAIN_ROOT}/utils/parse_options.sh || exit 1 if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then # prepare data - CUDA_VISIBLE_DEVICES=${gpus} ./local/preprocess.sh ${input} ${preprocess_path} ${alignment} ${ge2e_ckpt_path} || exit -1 + CUDA_VISIBLE_DEVICES=${gpus} ./local/preprocess.sh ${conf_path} ${ge2e_ckpt_path} || exit -1 fi if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then - CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${preprocess_path} ${train_output_path} || exit -1 + # train model, all `ckpt` under `train_output_path/checkpoints/` dir + CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} || exit -1 fi if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then - CUDA_VISIBLE_DEVICES=${gpus} ./local/voice_cloning.sh ${ge2e_params_path} ${tacotron2_params_path} ${waveflow_params_path} ${vc_input} ${vc_output} || exit -1 + # synthesize, vocoder is pwgan + CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1 fi - +if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + # synthesize, vocoder is pwgan + CUDA_VISIBLE_DEVICES=${gpus} ./local/voice_cloning.sh ${conf_path} ${train_output_path} ${ckpt_name} ${ge2e_params_path} ${ref_audio_dir} || exit -1 +fi diff --git a/examples/aishell3/vc1/README.md b/examples/aishell3/vc1/README.md index d5745bc3..8a566089 100644 --- a/examples/aishell3/vc1/README.md +++ b/examples/aishell3/vc1/README.md @@ -114,7 +114,7 @@ ref_audio ├── LJ015-0254.wav └── audio_self_test.mp3 ``` -`./local/voice_cloning.sh` calls `${BIN_DIR}/voice_cloning.py` +`./local/voice_cloning.sh` calls `${BIN_DIR}/../voice_cloning.py` ```bash CUDA_VISIBLE_DEVICES=${gpus} ./local/voice_cloning.sh ${conf_path} ${train_output_path} ${ckpt_name} ${ge2e_params_path} ${ref_audio_dir} diff --git a/examples/aishell3/vc1/local/voice_cloning.sh b/examples/aishell3/vc1/local/voice_cloning.sh index 6a50826e..2a8864ba 100755 --- a/examples/aishell3/vc1/local/voice_cloning.sh +++ b/examples/aishell3/vc1/local/voice_cloning.sh @@ -8,13 +8,15 @@ ref_audio_dir=$5 FLAGS_allocator_strategy=naive_best_fit \ FLAGS_fraction_of_gpu_memory_to_use=0.01 \ -python3 ${BIN_DIR}/voice_cloning.py \ - --fastspeech2-config=${config_path} \ - --fastspeech2-checkpoint=${train_output_path}/checkpoints/${ckpt_name} \ - --fastspeech2-stat=dump/train/speech_stats.npy \ - --pwg-config=pwg_aishell3_ckpt_0.5/default.yaml \ - --pwg-checkpoint=pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ - --pwg-stat=pwg_aishell3_ckpt_0.5/feats_stats.npy \ +python3 ${BIN_DIR}/../voice_cloning.py \ + --am=fastspeech2_aishell3 \ + --am_config=${config_path} \ + --am_ckpt=${train_output_path}/checkpoints/${ckpt_name} \ + --am_stat=dump/train/speech_stats.npy \ + --voc=pwgan_aishell3 \ + --voc_config=pwg_aishell3_ckpt_0.5/default.yaml \ + --voc_ckpt=pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ + --voc_stat=pwg_aishell3_ckpt_0.5/feats_stats.npy \ --ge2e_params_path=${ge2e_params_path} \ --text="凯莫瑞安联合体的经济崩溃迫在眉睫。" \ --input-dir=${ref_audio_dir} \ diff --git a/examples/csmsc/tts0/README.md b/examples/csmsc/tts0/README.md index 3f3b4a39..b030a51c 100644 --- a/examples/csmsc/tts0/README.md +++ b/examples/csmsc/tts0/README.md @@ -44,15 +44,13 @@ dump │ ├── norm │ └── raw └── train - ├── energy_stats.npy ├── norm - ├── pitch_stats.npy ├── raw └── speech_stats.npy ``` -The dataset is split into 3 parts, namely `train`, `dev`, and` test`, each of which contains a `norm` and `raw` subfolder. The raw folder contains speech、pitch and energy features of each utterance, while the norm folder contains normalized ones. The statistics used to normalize features are computed from the training set, which is located in `dump/train/*_stats.npy`. +The dataset is split into 3 parts, namely `train`, `dev`, and` test`, each of which contains a `norm` and `raw` subfolder. The raw folder contains speech features of each utterance, while the norm folder contains normalized ones. The statistics used to normalize features are computed from the training set, which is located in `dump/train/*_stats.npy`. -Also, there is a `metadata.jsonl` in each subfolder. It is a table-like file that contains phones, text_lengths, speech_lengths, durations, the path of speech features, the path of pitch features, the path of energy features, speaker, and the id of each utterance. +Also, there is a `metadata.jsonl` in each subfolder. It is a table-like file that contains phones, text_lengths, speech_lengths, durations, the path of speech features, speaker, and the id of each utterance. ### Model Training ```bash diff --git a/paddlespeech/t2s/datasets/am_batch_fn.py b/paddlespeech/t2s/datasets/am_batch_fn.py index 2fcb46d9..655e06e3 100644 --- a/paddlespeech/t2s/datasets/am_batch_fn.py +++ b/paddlespeech/t2s/datasets/am_batch_fn.py @@ -46,6 +46,47 @@ def tacotron2_single_spk_batch_fn(examples): return batch +def tacotron2_multi_spk_batch_fn(examples): + # fields = ["text", "text_lengths", "speech", "speech_lengths"] + text = [np.array(item["text"], dtype=np.int64) for item in examples] + speech = [np.array(item["speech"], dtype=np.float32) for item in examples] + text_lengths = [ + np.array(item["text_lengths"], dtype=np.int64) for item in examples + ] + speech_lengths = [ + np.array(item["speech_lengths"], dtype=np.int64) for item in examples + ] + + text = batch_sequences(text) + speech = batch_sequences(speech) + + # convert each batch to paddle.Tensor + text = paddle.to_tensor(text) + speech = paddle.to_tensor(speech) + text_lengths = paddle.to_tensor(text_lengths) + speech_lengths = paddle.to_tensor(speech_lengths) + + batch = { + "text": text, + "text_lengths": text_lengths, + "speech": speech, + "speech_lengths": speech_lengths, + } + # spk_emb has a higher priority than spk_id + if "spk_emb" in examples[0]: + spk_emb = [ + np.array(item["spk_emb"], dtype=np.float32) for item in examples + ] + spk_emb = batch_sequences(spk_emb) + spk_emb = paddle.to_tensor(spk_emb) + batch["spk_emb"] = spk_emb + elif "spk_id" in examples[0]: + spk_id = [np.array(item["spk_id"], dtype=np.int64) for item in examples] + spk_id = paddle.to_tensor(spk_id) + batch["spk_id"] = spk_id + return batch + + def speedyspeech_single_spk_batch_fn(examples): # fields = ["phones", "tones", "num_phones", "num_frames", "feats", "durations"] phones = [np.array(item["phones"], dtype=np.int64) for item in examples] diff --git a/paddlespeech/t2s/exps/fastspeech2/gen_gta_mel.py b/paddlespeech/t2s/exps/fastspeech2/gen_gta_mel.py index 13569b99..3fded29b 100644 --- a/paddlespeech/t2s/exps/fastspeech2/gen_gta_mel.py +++ b/paddlespeech/t2s/exps/fastspeech2/gen_gta_mel.py @@ -29,6 +29,7 @@ from paddlespeech.t2s.datasets.preprocess_utils import merge_silence from paddlespeech.t2s.models.fastspeech2 import FastSpeech2 from paddlespeech.t2s.models.fastspeech2 import StyleFastSpeech2Inference from paddlespeech.t2s.modules.normalizer import ZScore +from paddlespeech.t2s.utils import str2bool def evaluate(args, fastspeech2_config): @@ -196,9 +197,6 @@ def main(): parser.add_argument( "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--cut-sil", type=str2bool, diff --git a/paddlespeech/t2s/exps/fastspeech2/preprocess.py b/paddlespeech/t2s/exps/fastspeech2/preprocess.py index b874b3a7..fd6da2cb 100644 --- a/paddlespeech/t2s/exps/fastspeech2/preprocess.py +++ b/paddlespeech/t2s/exps/fastspeech2/preprocess.py @@ -35,6 +35,7 @@ from paddlespeech.t2s.datasets.preprocess_utils import get_input_token from paddlespeech.t2s.datasets.preprocess_utils import get_phn_dur from paddlespeech.t2s.datasets.preprocess_utils import get_spk_id_map from paddlespeech.t2s.datasets.preprocess_utils import merge_silence +from paddlespeech.t2s.utils import str2bool def process_sentence(config: Dict[str, Any], @@ -203,9 +204,6 @@ def main(): parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--cut-sil", type=str2bool, diff --git a/paddlespeech/t2s/exps/fastspeech2/train.py b/paddlespeech/t2s/exps/fastspeech2/train.py index 1dfa575a..d71292b3 100644 --- a/paddlespeech/t2s/exps/fastspeech2/train.py +++ b/paddlespeech/t2s/exps/fastspeech2/train.py @@ -38,6 +38,7 @@ from paddlespeech.t2s.training.extensions.visualizer import VisualDL from paddlespeech.t2s.training.optimizer import build_optimizers from paddlespeech.t2s.training.seeding import seed_everything from paddlespeech.t2s.training.trainer import Trainer +from paddlespeech.t2s.utils import str2bool def train_sp(args, config): @@ -182,9 +183,6 @@ def main(): default=None, help="speaker id map file for multiple speaker model.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--voice-cloning", type=str2bool, diff --git a/paddlespeech/t2s/exps/gan_vocoder/parallelwave_gan/train.py b/paddlespeech/t2s/exps/gan_vocoder/parallelwave_gan/train.py index a7881d6b..46ff67e1 100644 --- a/paddlespeech/t2s/exps/gan_vocoder/parallelwave_gan/train.py +++ b/paddlespeech/t2s/exps/gan_vocoder/parallelwave_gan/train.py @@ -41,6 +41,7 @@ from paddlespeech.t2s.training.extensions.snapshot import Snapshot from paddlespeech.t2s.training.extensions.visualizer import VisualDL from paddlespeech.t2s.training.seeding import seed_everything from paddlespeech.t2s.training.trainer import Trainer +from paddlespeech.t2s.utils import str2bool def train_sp(args, config): @@ -204,8 +205,6 @@ def train_sp(args, config): def main(): # parse args and config and redirect to train_sp - def str2bool(str): - return True if str.lower() == 'true' else False parser = argparse.ArgumentParser( description="Train a ParallelWaveGAN model.") diff --git a/paddlespeech/t2s/exps/gan_vocoder/preprocess.py b/paddlespeech/t2s/exps/gan_vocoder/preprocess.py index 782fbdf2..47d0a292 100644 --- a/paddlespeech/t2s/exps/gan_vocoder/preprocess.py +++ b/paddlespeech/t2s/exps/gan_vocoder/preprocess.py @@ -30,6 +30,7 @@ from yacs.config import CfgNode from paddlespeech.t2s.data.get_feats import LogMelFBank from paddlespeech.t2s.datasets.preprocess_utils import get_phn_dur from paddlespeech.t2s.datasets.preprocess_utils import merge_silence +from paddlespeech.t2s.utils import str2bool def process_sentence(config: Dict[str, Any], @@ -165,9 +166,6 @@ def main(): parser.add_argument( "--dur-file", default=None, type=str, help="path to durations.txt.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--cut-sil", type=str2bool, diff --git a/paddlespeech/t2s/exps/new_tacotron2/preprocess.py b/paddlespeech/t2s/exps/new_tacotron2/preprocess.py index 5fc6b590..ffbeaad9 100644 --- a/paddlespeech/t2s/exps/new_tacotron2/preprocess.py +++ b/paddlespeech/t2s/exps/new_tacotron2/preprocess.py @@ -33,6 +33,7 @@ from paddlespeech.t2s.datasets.preprocess_utils import get_input_token from paddlespeech.t2s.datasets.preprocess_utils import get_phn_dur from paddlespeech.t2s.datasets.preprocess_utils import get_spk_id_map from paddlespeech.t2s.datasets.preprocess_utils import merge_silence +from paddlespeech.t2s.utils import str2bool def process_sentence(config: Dict[str, Any], @@ -179,9 +180,6 @@ def main(): parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--cut-sil", type=str2bool, diff --git a/paddlespeech/t2s/exps/new_tacotron2/train.py b/paddlespeech/t2s/exps/new_tacotron2/train.py index 20f73f0c..a77331e7 100644 --- a/paddlespeech/t2s/exps/new_tacotron2/train.py +++ b/paddlespeech/t2s/exps/new_tacotron2/train.py @@ -27,6 +27,7 @@ from paddle.io import DataLoader from paddle.io import DistributedBatchSampler from yacs.config import CfgNode +from paddlespeech.t2s.datasets.am_batch_fn import tacotron2_multi_spk_batch_fn from paddlespeech.t2s.datasets.am_batch_fn import tacotron2_single_spk_batch_fn from paddlespeech.t2s.datasets.data_table import DataTable from paddlespeech.t2s.models.new_tacotron2 import Tacotron2 @@ -37,6 +38,7 @@ from paddlespeech.t2s.training.extensions.visualizer import VisualDL from paddlespeech.t2s.training.optimizer import build_optimizers from paddlespeech.t2s.training.seeding import seed_everything from paddlespeech.t2s.training.trainer import Trainer +from paddlespeech.t2s.utils import str2bool def train_sp(args, config): @@ -60,33 +62,38 @@ def train_sp(args, config): # dataloader has been too verbose logging.getLogger("DataLoader").disabled = True + fields = [ + "text", + "text_lengths", + "speech", + "speech_lengths", + ] + + converters = { + "speech": np.load, + } + if args.voice_cloning: + print("Training voice cloning!") + collate_fn = tacotron2_multi_spk_batch_fn + fields += ["spk_emb"] + converters["spk_emb"] = np.load + else: + print("single speaker tacotron2!") + collate_fn = tacotron2_single_spk_batch_fn + # construct dataset for training and validation with jsonlines.open(args.train_metadata, 'r') as reader: train_metadata = list(reader) train_dataset = DataTable( data=train_metadata, - fields=[ - "text", - "text_lengths", - "speech", - "speech_lengths", - ], - converters={ - "speech": np.load, - }, ) + fields=fields, + converters=converters, ) with jsonlines.open(args.dev_metadata, 'r') as reader: dev_metadata = list(reader) dev_dataset = DataTable( data=dev_metadata, - fields=[ - "text", - "text_lengths", - "speech", - "speech_lengths", - ], - converters={ - "speech": np.load, - }, ) + fields=fields, + converters=converters, ) # collate function and dataloader train_sampler = DistributedBatchSampler( @@ -100,7 +107,7 @@ def train_sp(args, config): train_dataloader = DataLoader( train_dataset, batch_sampler=train_sampler, - collate_fn=tacotron2_single_spk_batch_fn, + collate_fn=collate_fn, num_workers=config.num_workers) dev_dataloader = DataLoader( @@ -108,7 +115,7 @@ def train_sp(args, config): shuffle=False, drop_last=False, batch_size=config.batch_size, - collate_fn=tacotron2_single_spk_batch_fn, + collate_fn=collate_fn, num_workers=config.num_workers) print("dataloaders done!") @@ -166,6 +173,12 @@ def main(): parser.add_argument( "--phones-dict", type=str, default=None, help="phone vocabulary file.") + parser.add_argument( + "--voice-cloning", + type=str2bool, + default=False, + help="whether training voice cloning model.") + args = parser.parse_args() with open(args.config) as f: diff --git a/paddlespeech/t2s/exps/speedyspeech/gen_gta_mel.py b/paddlespeech/t2s/exps/speedyspeech/gen_gta_mel.py index b6440fd6..31b7d2ea 100644 --- a/paddlespeech/t2s/exps/speedyspeech/gen_gta_mel.py +++ b/paddlespeech/t2s/exps/speedyspeech/gen_gta_mel.py @@ -30,6 +30,7 @@ from paddlespeech.t2s.frontend.zh_frontend import Frontend from paddlespeech.t2s.models.speedyspeech import SpeedySpeech from paddlespeech.t2s.models.speedyspeech import SpeedySpeechInference from paddlespeech.t2s.modules.normalizer import ZScore +from paddlespeech.t2s.utils import str2bool def evaluate(args, speedyspeech_config): @@ -213,9 +214,6 @@ def main(): parser.add_argument( "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--cut-sil", type=str2bool, diff --git a/paddlespeech/t2s/exps/speedyspeech/normalize.py b/paddlespeech/t2s/exps/speedyspeech/normalize.py index a427c469..249a4d6d 100644 --- a/paddlespeech/t2s/exps/speedyspeech/normalize.py +++ b/paddlespeech/t2s/exps/speedyspeech/normalize.py @@ -23,6 +23,7 @@ from sklearn.preprocessing import StandardScaler from tqdm import tqdm from paddlespeech.t2s.datasets.data_table import DataTable +from paddlespeech.t2s.utils import str2bool def main(): @@ -55,9 +56,6 @@ def main(): default=1, help="logging level. higher is more logging. (default=1)") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--use-relative-path", type=str2bool, diff --git a/paddlespeech/t2s/exps/speedyspeech/preprocess.py b/paddlespeech/t2s/exps/speedyspeech/preprocess.py index 9ff77144..db888fba 100644 --- a/paddlespeech/t2s/exps/speedyspeech/preprocess.py +++ b/paddlespeech/t2s/exps/speedyspeech/preprocess.py @@ -33,6 +33,7 @@ from paddlespeech.t2s.datasets.preprocess_utils import get_phn_dur from paddlespeech.t2s.datasets.preprocess_utils import get_phones_tones from paddlespeech.t2s.datasets.preprocess_utils import get_spk_id_map from paddlespeech.t2s.datasets.preprocess_utils import merge_silence +from paddlespeech.t2s.utils import str2bool def process_sentence(config: Dict[str, Any], @@ -190,9 +191,6 @@ def main(): parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--cut-sil", type=str2bool, diff --git a/paddlespeech/t2s/exps/speedyspeech/train.py b/paddlespeech/t2s/exps/speedyspeech/train.py index 448cd7bb..de0d308b 100644 --- a/paddlespeech/t2s/exps/speedyspeech/train.py +++ b/paddlespeech/t2s/exps/speedyspeech/train.py @@ -38,6 +38,7 @@ from paddlespeech.t2s.training.extensions.visualizer import VisualDL from paddlespeech.t2s.training.optimizer import build_optimizers from paddlespeech.t2s.training.seeding import seed_everything from paddlespeech.t2s.training.trainer import Trainer +from paddlespeech.t2s.utils import str2bool def train_sp(args, config): @@ -186,9 +187,6 @@ def main(): parser.add_argument( "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--use-relative-path", type=str2bool, diff --git a/paddlespeech/t2s/exps/synthesize.py b/paddlespeech/t2s/exps/synthesize.py index 02bfcb15..e6cc630a 100644 --- a/paddlespeech/t2s/exps/synthesize.py +++ b/paddlespeech/t2s/exps/synthesize.py @@ -25,6 +25,7 @@ from yacs.config import CfgNode from paddlespeech.s2t.utils.dynamic_import import dynamic_import from paddlespeech.t2s.datasets.data_table import DataTable from paddlespeech.t2s.modules.normalizer import ZScore +from paddlespeech.t2s.utils import str2bool model_alias = { # acoustic model @@ -97,6 +98,9 @@ def evaluate(args): fields = ["utt_id", "phones", "tones"] elif am_name == 'tacotron2': fields = ["utt_id", "text"] + if args.voice_cloning: + print("voice cloning!") + fields += ["spk_emb"] test_dataset = DataTable(data=test_metadata, fields=fields) @@ -178,7 +182,11 @@ def evaluate(args): mel = am_inference(phone_ids, tone_ids) elif am_name == 'tacotron2': phone_ids = paddle.to_tensor(datum["text"]) - mel = am_inference(phone_ids) + spk_emb = None + # multi speaker + if args.voice_cloning and "spk_emb" in datum: + spk_emb = paddle.to_tensor(np.load(datum["spk_emb"])) + mel = am_inference(phone_ids, spk_emb=spk_emb) # vocoder wav = voc_inference(mel) sf.write( @@ -199,7 +207,7 @@ def main(): default='fastspeech2_csmsc', choices=[ 'speedyspeech_csmsc', 'fastspeech2_csmsc', 'fastspeech2_ljspeech', - 'fastspeech2_aishell3', 'fastspeech2_vctk', 'tacotron2_csmsc' + 'fastspeech2_aishell3', 'fastspeech2_vctk', 'tacotron2_csmsc', 'tacotron2_aishell3' ], help='Choose acoustic model type of tts task.') parser.add_argument( @@ -225,9 +233,6 @@ def main(): parser.add_argument( "--speaker_dict", type=str, default=None, help="speaker id map file.") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--voice-cloning", type=str2bool, diff --git a/paddlespeech/t2s/exps/tacotron2/__init__.py b/paddlespeech/t2s/exps/tacotron2/__init__.py deleted file mode 100644 index abf198b9..00000000 --- a/paddlespeech/t2s/exps/tacotron2/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/paddlespeech/t2s/exps/tacotron2/config.py b/paddlespeech/t2s/exps/tacotron2/config.py deleted file mode 100644 index 0ce2df36..00000000 --- a/paddlespeech/t2s/exps/tacotron2/config.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from yacs.config import CfgNode as CN - -_C = CN() -_C.data = CN( - dict( - batch_size=32, # batch size - valid_size=64, # the first N examples are reserved for validation - sample_rate=22050, # Hz, sample rate - n_fft=1024, # fft frame size - win_length=1024, # window size - hop_length=256, # hop size between ajacent frame - fmax=8000, # Hz, max frequency when converting to mel - fmin=0, # Hz, min frequency when converting to mel - n_mels=80, # mel bands - padding_idx=0, # text embedding's padding index - )) - -_C.model = CN( - dict( - vocab_size=37, # set this according to the frontend's vocab size - n_tones=None, - reduction_factor=1, # reduction factor - d_encoder=512, # embedding & encoder's internal size - encoder_conv_layers=3, # number of conv layer in tacotron2 encoder - encoder_kernel_size=5, # kernel size of conv layers in tacotron2 encoder - d_prenet=256, # hidden size of decoder prenet - d_attention_rnn=1024, # hidden size of the first rnn layer in tacotron2 decoder - d_decoder_rnn=1024, # hidden size of the second rnn layer in tacotron2 decoder - d_attention=128, # hidden size of decoder location linear layer - attention_filters=32, # number of filter in decoder location conv layer - attention_kernel_size=31, # kernel size of decoder location conv layer - d_postnet=512, # hidden size of decoder postnet - postnet_kernel_size=5, # kernel size of conv layers in postnet - postnet_conv_layers=5, # number of conv layer in decoder postnet - p_encoder_dropout=0.5, # droput probability in encoder - p_prenet_dropout=0.5, # droput probability in decoder prenet - p_attention_dropout=0.1, # droput probability of first rnn layer in decoder - p_decoder_dropout=0.1, # droput probability of second rnn layer in decoder - p_postnet_dropout=0.5, # droput probability in decoder postnet - d_global_condition=None, - use_stop_token=True, # wherther to use binary classifier to predict when to stop - use_guided_attention_loss=False, # whether to use guided attention loss - guided_attention_loss_sigma=0.2 # sigma in guided attention loss - )) - -_C.training = CN( - dict( - lr=1e-3, # learning rate - weight_decay=1e-6, # the coeff of weight decay - grad_clip_thresh=1.0, # the clip norm of grad clip. - plot_interval=1000, # plot attention and spectrogram - valid_interval=1000, # validation - save_interval=1000, # checkpoint - max_iteration=500000, # max iteration to train - )) - - -def get_cfg_defaults(): - """Get a yacs CfgNode object with default values for my_project.""" - # Return a clone so that the defaults will not be altered - # This is for the "local variable" use pattern - return _C.clone() diff --git a/paddlespeech/t2s/exps/tacotron2/ljspeech.py b/paddlespeech/t2s/exps/tacotron2/ljspeech.py deleted file mode 100644 index 08db2a64..00000000 --- a/paddlespeech/t2s/exps/tacotron2/ljspeech.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pickle -from pathlib import Path - -import numpy as np -from paddle.io import Dataset - -from paddlespeech.t2s.data.batch import batch_spec -from paddlespeech.t2s.data.batch import batch_text_id - - -class LJSpeech(Dataset): - """A simple dataset adaptor for the processed ljspeech dataset.""" - - def __init__(self, root): - self.root = Path(root).expanduser() - records = [] - with open(self.root / "metadata.pkl", 'rb') as f: - metadata = pickle.load(f) - for mel_name, text, ids in metadata: - mel_name = self.root / "mel" / (mel_name + ".npy") - records.append((mel_name, text, ids)) - self.records = records - - def __getitem__(self, i): - mel_name, _, ids = self.records[i] - mel = np.load(mel_name) - return ids, mel - - def __len__(self): - return len(self.records) - - -class LJSpeechCollector(object): - """A simple callable to batch LJSpeech examples.""" - - def __init__(self, padding_idx=0, padding_value=0., padding_stop_token=1.0): - self.padding_idx = padding_idx - self.padding_value = padding_value - self.padding_stop_token = padding_stop_token - - def __call__(self, examples): - texts = [] - mels = [] - text_lens = [] - mel_lens = [] - - for data in examples: - text, mel = data - text = np.array(text, dtype=np.int64) - text_lens.append(len(text)) - mels.append(mel) - texts.append(text) - mel_lens.append(mel.shape[1]) - - # Sort by text_len in descending order - texts = [ - i for i, _ in sorted( - zip(texts, text_lens), key=lambda x: x[1], reverse=True) - ] - mels = [ - i for i, _ in sorted( - zip(mels, text_lens), key=lambda x: x[1], reverse=True) - ] - - mel_lens = [ - i for i, _ in sorted( - zip(mel_lens, text_lens), key=lambda x: x[1], reverse=True) - ] - - mel_lens = np.array(mel_lens, dtype=np.int64) - text_lens = np.array(sorted(text_lens, reverse=True), dtype=np.int64) - - # Pad sequence with largest len of the batch - texts, _ = batch_text_id(texts, pad_id=self.padding_idx) - mels, _ = batch_spec(mels, pad_value=self.padding_value) - mels = np.transpose(mels, axes=(0, 2, 1)) - - return texts, mels, text_lens, mel_lens diff --git a/paddlespeech/t2s/exps/tacotron2/preprocess.py b/paddlespeech/t2s/exps/tacotron2/preprocess.py deleted file mode 100644 index 480b3331..00000000 --- a/paddlespeech/t2s/exps/tacotron2/preprocess.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import os -import pickle -from pathlib import Path - -import numpy as np -import tqdm - -from paddlespeech.t2s.audio import AudioProcessor -from paddlespeech.t2s.audio import LogMagnitude -from paddlespeech.t2s.datasets import LJSpeechMetaData -from paddlespeech.t2s.exps.tacotron2.config import get_cfg_defaults -from paddlespeech.t2s.frontend import EnglishCharacter - - -def create_dataset(config, source_path, target_path, verbose=False): - # create output dir - target_path = Path(target_path).expanduser() - mel_path = target_path / "mel" - os.makedirs(mel_path, exist_ok=True) - - meta_data = LJSpeechMetaData(source_path) - frontend = EnglishCharacter() - processor = AudioProcessor( - sample_rate=config.data.sample_rate, - n_fft=config.data.n_fft, - n_mels=config.data.n_mels, - win_length=config.data.win_length, - hop_length=config.data.hop_length, - fmax=config.data.fmax, - fmin=config.data.fmin) - normalizer = LogMagnitude() - - records = [] - for (fname, text, _) in tqdm.tqdm(meta_data): - wav = processor.read_wav(fname) - mel = processor.mel_spectrogram(wav) - mel = normalizer.transform(mel) - ids = frontend(text) - mel_name = os.path.splitext(os.path.basename(fname))[0] - - # save mel spectrogram - records.append((mel_name, text, ids)) - np.save(mel_path / mel_name, mel) - if verbose: - print("save mel spectrograms into {}".format(mel_path)) - - # save meta data as pickle archive - with open(target_path / "metadata.pkl", 'wb') as f: - pickle.dump(records, f) - if verbose: - print("saved metadata into {}".format(target_path / "metadata.pkl")) - - print("Done.") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="create dataset") - parser.add_argument( - "--config", - type=str, - metavar="FILE", - help="extra config to overwrite the default config") - parser.add_argument( - "--input", type=str, help="path of the ljspeech dataset") - parser.add_argument( - "--output", type=str, help="path to save output dataset") - parser.add_argument( - "--opts", - nargs=argparse.REMAINDER, - help="options to overwrite --config file and the default config, passing in KEY VALUE pairs" - ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="print msg") - - config = get_cfg_defaults() - args = parser.parse_args() - if args.config: - config.merge_from_file(args.config) - if args.opts: - config.merge_from_list(args.opts) - config.freeze() - print(config.data) - - create_dataset(config, args.input, args.output, args.verbose) diff --git a/paddlespeech/t2s/exps/tacotron2/synthesize.ipynb b/paddlespeech/t2s/exps/tacotron2/synthesize.ipynb deleted file mode 100644 index cc424311..00000000 --- a/paddlespeech/t2s/exps/tacotron2/synthesize.ipynb +++ /dev/null @@ -1,342 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## TTS with Tacotron2 + Waveflow" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import paddle\n", - "from matplotlib import pyplot as plt\n", - "from IPython import display as ipd\n", - "%matplotlib inline\n", - "\n", - "from paddlespeech.t2s.utils import display\n", - "from paddlespeech.t2s.utils import layer_tools\n", - "paddle.set_device(\"gpu:0\")\n", - "\n", - "import sys\n", - "sys.path.append(\"../..\")\n", - "import examples" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Tacotron2: synthesizer model\n", - "\n", - "Tacotron2 is used here as a phonemes to spectrogram model. Here we will use an alternative config. In this config, the tacotron2 model does not have a binary classifier to predict whether the generation should stop.\n", - "\n", - "Instead, the peak position is used as the criterion. When the peak position of the attention reaches the end of the encoder outputs, it implies that the content is exhausted. So we stop the generated after 10 frames." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from paddlespeech.t2s.models.tacotron2 import Tacotron2\n", - "from paddlespeech.t2s.frontend import EnglishCharacter" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "data:\n", - " batch_size: 32\n", - " fmax: 8000\n", - " fmin: 0\n", - " hop_length: 256\n", - " n_fft: 1024\n", - " n_mels: 80\n", - " padding_idx: 0\n", - " sample_rate: 22050\n", - " valid_size: 64\n", - " win_length: 1024\n", - "model:\n", - " attention_filters: 32\n", - " attention_kernel_size: 31\n", - " d_attention: 128\n", - " d_attention_rnn: 1024\n", - " d_decoder_rnn: 1024\n", - " d_encoder: 512\n", - " d_global_condition: None\n", - " d_postnet: 512\n", - " d_prenet: 256\n", - " encoder_conv_layers: 3\n", - " encoder_kernel_size: 5\n", - " guided_attention_loss_sigma: 0.2\n", - " n_tones: None\n", - " p_attention_dropout: 0.1\n", - " p_decoder_dropout: 0.1\n", - " p_encoder_dropout: 0.5\n", - " p_postnet_dropout: 0.5\n", - " p_prenet_dropout: 0.5\n", - " postnet_conv_layers: 5\n", - " postnet_kernel_size: 5\n", - " reduction_factor: 1\n", - " use_guided_attention_loss: True\n", - " use_stop_token: False\n", - " vocab_size: 37\n", - "training:\n", - " grad_clip_thresh: 1.0\n", - " lr: 0.001\n", - " max_iteration: 500000\n", - " plot_interval: 1000\n", - " save_interval: 1000\n", - " valid_interval: 1000\n", - " weight_decay: 1e-06\n" - ] - } - ], - "source": [ - "from examples.tacotron2 import config as tacotron2_config\n", - "synthesizer_config = tacotron2_config.get_cfg_defaults()\n", - "synthesizer_config.merge_from_file(\"configs/alternative.yaml\")\n", - "print(synthesizer_config)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[checkpoint] Rank 0: loaded model from ../../pretrained/tacotron2/tacotron2_ljspeech_ckpt_0.3_alternative/step-50000.pdparams\n" - ] - } - ], - "source": [ - "frontend = EnglishCharacter()\n", - "model = Tacotron2.from_pretrained(\n", - " synthesizer_config, \"../../pretrained/tacotron2/tacotron2_ljspeech_ckpt_0.3_alternative/step-50000\")\n", - "model.eval()" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - " 36%|███▋ | 365/1000 [00:01<00:02, 256.89it/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "content exhausted!\n" - ] - } - ], - "source": [ - "sentence = \"Life was like a box of chocolates, you never know what you're gonna get.\" \n", - "sentence = paddle.to_tensor(frontend(sentence)).unsqueeze(0)\n", - "\n", - "with paddle.no_grad():\n", - " outputs = model.infer(sentence)\n", - "mel_output = outputs[\"mel_outputs_postnet\"][0].numpy().T\n", - "alignment = outputs[\"alignments\"][0].numpy().T" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEYCAYAAAB2qXBEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3de5xcdZ3n/9f7nKrq+yWdhCY3SLgblQAygKCAgAquP5jZUQd0d+M8WJlx1NHxssDM6qi7s4Pjb1DHZXQyiuKsI+JtwBkUWQRUHDDhFm7BhEDIPeTe6U53V9X57B/ndLrS6Ut1d1XX6c7nmUc9+pxT5/Lp09X9zfl8bzIznHPOuYkKah2Ac8656c0LEuecc5PiBYlzzrlJ8YLEOefcpHhB4pxzblK8IHHOOTcpVStIJJ0q6YmS135JH5HUIeleSWuTr7OqFYNzzrnq01T0I5EUApuBc4EPALvN7CZJNwCzzOz6qgfhnHOuKqaqIHkL8JdmdoGk54GLzWyrpHnAA2Z26mjH51Rn9TRVPU7n3FFEIJUkZcKQqCFLoVG0dxwgwAgVESoiMtEU9KH4sEOKiO6oDoCsighDGNs25+naXRAV8NY3Ndmu3cWy9390dd89ZnZ5Ja5drswUXedq4DvJcqeZbU2WtwGdYx1cTxPn6tJqxeacOwopk0G53KH1YFY7Pa9dwI4zs/zeH/ySxqCftkwP7WEPXcV6zmtYT52KhBr8z3dXlOXhgycSWcD87B6yKpBTkQ9e9WLF4ty5u8gj9ywse//svBfmVOziZap6QSIpB1wJ3Dj0PTMzScM+Ekm6DrgOoJ7GqsbonHPpZRQtqnUQo5qKJ5IrgMfMbHuyvl3SvJLU1o7hDjKzFcAKgFZ1+IBgzrnKkFAuR9DcBMfMpvvEWWy+KMTm9ZLZkKFxKzz2nqVobxcEAdbbh8KAO7sWY/15sAiL4j9Jymbi82QyUChAvoAVCuw6uK1i4RoQke4/gVPR/PcaBtNaAHcBy5Pl5cCdUxCDc85NW9E4/tVCVZ9IJDUBbwb+qGTzTcAdkq4FNgDvqmYMzjk3nRlGMeWjtFe1IDGzbmD2kG27AK85d865MqU9tTVVrbacc672JDIL5lOc287+E5t55cyA4JQDhGuamXNnjtbn9hDs2ktx9x4sXwCLYLinASUteyODfAEgrj8BCMPB9yvAgKIXJM455ybDn0icc85NmAH5o7mOxDnnqko6MvWUpJUUhvHXTPxnTg0NbLv6NP7Hx77Bqu4T+MGLy2j4+SwWrNhL4eWnAbAwJEo6KSoQhIMdFoksTnUpgEAQGVbIU9zff0RYZuX3RB+LYZ7acs45NwkGxXSXI16QOOdcmsUdEtPNCxLn3PQ1kNYaaCV1aD0ABYQL53HwxDlsuizHf3nb/dz6+BK+/OplEBnzeQEsolAsHjrOCgWsZJ1CYfTrS0deu+JEkcq1AqsGL0iccy7FjLh6Js28IHHOuZTzJxLnnKuCoL4eMzvsv+tBexvWOZv9r2rj4OyAPWcWIBLZPfDQu8/g1A1rifr74zTUaCmp4VqDlb4HoAAFilNhCuKJSgZadVWu0VbSIdELEuecc5MQmRckzjnnJsifSJxzzk2KIfIW1jqMUXlB4pybdpTJsOHjZ3HJVY/yjo5VBIpYnDnA2nwbTx48nnu2L2X37ll03NtM68sF6h94nKi39/CTjNZcN6nzUBjGvdiLxbgupPRYK3Jo4sLSnuwV7NUO/kTinHNu0kTRpmIOwonzgsQ551Is7tnuBYlzzk2MhMKQ/EXL2HpeHflX97C4cxfZoEj7P0S8eOUsPrfvPCxfGJJ62sxxtunQallDjJQ06yWKz2VRGWmq4eYeqXAHQk9tOeecmzAzT20555ybpMifSJxzrkwSQXMzwZwODp44h6BgbD+nnsuv/nd+sPosWn7TSM+WenL7i7T/+lkKXV2VHyyxnHRWSbwKQwjDeGreQ9srF07casufSJxzzk2Yp7acc85Ngrfacs45NymG6Pee7c45NzJlcyiXRbkcam9l89sXELx5F7ObdtCWO8gLv13M6tfByfboYaPyRplM3FS3Uj3JJ1LXYoYVCkdOgFXpapuUp7aqGp2kdknfl7RG0nOSXi+pQ9K9ktYmX2dVMwbnnJvOBirby33VQrWv+iXgp2Z2GrAMeA64AbjPzE4G7kvWnXPODcMQRSv/VQtVS21JagMuBN4LYGb9QL+kq4CLk91uAx4Arq9WHM65lApCglyWYO4cotmtdC9qpvuYkMxBo/0LTeSe20tPUZy6d3U8gRUcln6yyOKJpWyUSahmiKO5sn0J8ArwDUnLgEeBDwOdZrY12Wcb0DncwZKuA64DqKeximE651x6mZH65r/VjC4DnAV8xczOBLoZksay+L8Zw/5XwsxWmNnZZnZ2lroqhumcc2kmonG8aqGaTySbgE1m9kiy/n3igmS7pHlmtlXSPGBHFWNwzqVNEBI0NVJ87Qm8fHETjefvJJfpZc+BgN5tTSz4OeQef4HCvv3x/iOlrSzConT/T70SjKP4icTMtgEbJZ2abLoUeBa4C1iebFsO3FmtGJxzbiZIe6utavcj+RDwbUk5YD3wh8SF1x2SrgU2AO+qcgzOOTdtGSKqUWusclW1IDGzJ4Czh3nr0mpe1zmXMhJhSwu2eD6985rZ8boc/W1Gbi+0f76J3IZdtHVtwQ72Eh08SLGMVlgKQ1CQ9EeMZmzLLQPylu6+4+mOzjnnjnryia2cc85NnJH+IVK8IHHOuZTzJxLn3FFJ2RxYBGFIeMxcDp7ayf7jc8igbo8x+9kiLat3UNy8lWJk8ZzrNo66jjAZEbcwc+tHIJ5qt5JPJJIuJx6+KgS+ZmY3DXn/OOJRR9qTfW4ws7tHO6cXJM45l3KV6kciKQRuAd5M3NdvpaS7zOzZkt3+O3CHmX1F0lLgbmDxaOdNd+LNOeeOcvHEVhXr2X4OsM7M1ifjH94OXDXMJVuT5TZgy1gn9ScS51xlBSGZ+cey79yF9BwTsGdZkbqOgxQLRWyL0bAjYP4vDhCu3URhz754jnSVOfCiFM9BkrD+/hmd1oqNe6rdOZJWlayvMLMVyfICYGPJe5uAc4cc/2ngZ5I+BDQBl411QS9InHMuxeJWW+OqbN9pZsP13yvXNcA3zexvJb0e+CdJrzGzaKQDvCBxzrmUq+DQJ5uBRSXrC5Ntpa4FLgcws3+XVA/MYZRxEb0gcc5NnkSm8xgKxx1D93GNvPLOgxw3Zwu7drfTvLKFBV8zwk1bKe7aEw+2WChw2AS55aanzA5NrWv9M7u11oAKD5GyEjhZ0hLiAuRq4N1D9nmZePSRb0p6FVBPPCXIiLwgcc65FDODvIUVOpcVJH0QuIe4ae+tZvaMpM8Cq8zsLuBjwD9K+jPizNp7zUYvsb0gcc65lKvkoI1Jn5C7h2z7VMnys8AF4zmnFyTOOZdicWor3T01vCBxzo1PMpIvgC2ez64zZ9H19gN86czbeaj7FFbuPp7oH44juH01xyUtTQ0oVDyOgJk86m8pHyLFOefchE2g+e+U84LEOedSzVNbzrmZQkK5HMHxC+l6zRwOzA/pWhIRZYyOnzVz8zXLkoEXt9Ay9qgakzfQI/4oUMbQJzXlBYlzzqWYGRQ9teWcc24yPLXlnJu+gpCgvo5gdgf9J8xl3+J6upaI3mMKqFhkzqqA2U/shbUbiAqTaJdV7qCNQx0FLbYq3LO9Krwgcc65FDOg4E8kzjnnJsNTW8656SUIUTZD0NwEx8zm5Svn0jfLKMwqEBw02p6HJd/dA1t3EHUfJCrkj0wxjSdVdWiOkaOjc+G4mae2nHPOTcLADIlpVtWCRNJLQBdQBApmdrakDuC7xHMAvwS8y8z2VDMO55ybztL+RDIVibc3mdkZJTN23QDcZ2YnA/cl684554YxMERKua9aqEVq6yrg4mT5NuAB4PoaxOGcA5AI6uoIjj2G3efPZ//xAf2zjHDxAU6fv4Xup2fRuCHDggehccNueGUP0d59cS/2kYynrqNksio3vKP9icSIJ5F/VNJ1ybZOM9uaLG8DOoc7UNJ1klZJWpWnr8phOudcOg30Izman0jeYGabJR0D3CtpTembZmaShv2vi5mtAFYAtKrDm3I4545aR3Vlu5ltTr7ukPQj4Bxgu6R5ZrZV0jxGmVDeOVdFQUjwmpM5/bY1vLphHaHW8vcvXkTPi3PI7QppvbuZA490cOraxw6lsYpmR81AialhR3FqS1KTpJaBZeAtwNPAXcDyZLflwJ3VisE556Y7AwpRUParFqr5RNIJ/Ejx/14ywD+b2U8lrQTukHQtsAF4VxVjcM65ae2oHmvLzNYDy4bZvgu4tFrXdc4NL5zdAZ1z6Dm+ja7jMuw9zYiaigT/8XiePthJtL+L5t4NnMIGsAhIUlkwmM4a6IWevA94b/QpYEdrQeKcc64yZkRlu6RjiSvKDVhpZtuqGpVzzjkgfuCb9qktSf8V+BTwc0DAlyV91sxurXZwzrkKkAgaG+k+/yS2XBBSbIogMpo2Biz86V4KGzbF09aWy9NaU24mpLY+AZyZ1G0gaTbwa8ALEuecq7qZUdm+i3jgxQFdyTbnnHNTYCY8kawDHpF0J3EdyVXAakkfBTCzm6sYn3POHdUGBm1Ms3IKkheS14CBDoQtlQ/HOVcRQUjQUE/QOZeouZ6+Y5t5+fciWp7JMOsho+mFPdjGrUTdPeXXjyjp7DZQRzJa/UgQDi6bT1g1KZb+2zdmQWJmnwGQ1GhmPdUPyTnn3AADiimfanfM6CS9XtKzwJpkfZmkv696ZM4552CGjP77ReCtxGNkYWZPSrqwqlE558ZFmQxWLKIwJOw8hmc/uZBrzn2Y1zU9xbZCG/e+spRX/XErxTXrwIxxz/4x3jlDxtOc2I1p2qe2AMxsow4f8dM/Jc45N0VmQqutjZLOB0xSFvgw8Fx1w3LOOQfJw+AMKEj+GPgSsADYDPwM+JNqBuWcG4M0mO+QUC6H9fYRtLfRdfZCml7K8NjXl/HY47lkLpEdYNvHPpdLpZnQ/PdUM3tP6QZJFwAPVSck55xzpdJezpfTpuzLZW5zzjlXBWYq+1ULIz6RSHo9cD4wd6AXe6IVCIc/yjnnXCUZtSsgyjVaaisHNCf7lPZi3w+8o5pBOedGIaEwhDAkaG2F9hbWfLKdH194C2vzc/noQ4s55X1PYPn+I44bNkeS9ryJI+0/oRELEjN7EHhQ0jfNbAOApABoNrP9UxWgc84d1QwsqtwTiaTLiRtQhcDXzOymYfZ5F/Dp+Oo8aWbvHu2c5dSR/LWkVklNwNPAs5I+Md7gnXPOTUyl6kgkhcAtwBXAUuAaSUuH7HMycCNwgZm9GvjIWPGV02prqZntl/Qe4CfADcCjwOfLONY5N0HKZCAMsf44RZU5biE7bmng9tfeSrdl6AgKbC/mWNvfyd984Wo+/v7LsL4+TgmfPjKtBSOmsJTJYJEdPmHVKPu7qVfBH8U5wDozWw8g6XbiEd2fLdnnfcAtZrYnvrbtGOuk5TyRZJOOiL8L3GVmedKfsnPOuRnBGPcTyRxJq0pe15WcbgGwsWR9U7Kt1CnAKZIekvRwkgobVTlPJP8AvAQ8CfxC0vHEFe7OOeeqzYDxtdraaWZnT+KKGeBk4GJgIfHf/dea2d7RDhiVmf0d8HclmzZIetMkgnTOjUWCMCR/wWvY8oY6eo/rh0ic/L/6+cCjl0JkWCF/KOcxVw8TDeQ/CoVxXcfGs7+riQqmtjYDi0rWFybbSm0CHkmyTy9K+i1xwbJypJOWM4x8p6SvS/pJsr4UWD7O4J1zzk2UjeM1upXAyZKWSMoBV5OM7F7iX4ifRpA0hzjVtX60k5ZTR/JN4B5gfrL+W8qoxR8gKZT0uKR/TdaXSHpE0jpJ302+Geecc8Mqv35krFZbZlYAPkj8N/054A4ze0bSZyVdmex2D7ArmYfqfuATZrZrtPOWU0cyx8zukHTjQCCSxjOM/MBowa3J+ueAL5jZ7ZK+ClwLfGUc53OutgamVCgZNPEQs8PXS6anVRiCAhQGEARYvgAWxYMqJscFdXUEc2ZzcOk8+ltDdvx+L/m9IY0v5Jj7ZJ7wseeJ+vpGjmukHIgUxzK0ZZabHirYvMnM7gbuHrLtUyXLBnw0eZWlnCeSbkmzSb4VSecB+8o5uaSFwH8AvpasC7gE+H6yy23ErcGcc84Nx6bxWFslPkqcQztR0kPAXMofIuWLwH9jcIiV2cDe5PEKhm96BkDSZO06gHoay7ycc87NQNN4rC0AzOwxSRcBpwICnk9q80cl6e3ADjN7VNLF4w3MzFYAKwBa1eH9VpxzR6+U/wUcsyBJutS/DVic7P8WSZjZzWMcegFwpaS3AfXEdSRfAtolZZKnkuGanjmXbkPrIUZbL5nnPK4LKXDov2ESymRRGPcu3/3OM9n5OmPeaTvo7jvA/q4GTvmrPmzNM/F87Llc3AN9qKF1NiPFPJ451126pLwgKaeO5MfAe4nTUi0lr1GZ2Y1mttDMFhM3Mft5MkHW/QymxpYDd44/bOecO0oMdEgs91UD5dSRLDSz0yt4zeuB2yX9T+Bx4OsVPLdzzs04aR/2rJyC5CeS3mJmP5voRczsAeCBZHk98cBhzk0fYzWtHem9IIyb3JqhTDbujQ6ggHBWGxw7F6KI7pNmsfOtvdQ/00DdXbNoe3En83v3Utj+CkRxSsr6+gjq67HCMKms0uVyUl1D4x/P/m7qpfxHU05B8jDwo2QukjxxhbuZWevohznnnKuI6d5qC7gZeD3wVNJRxTnn3BRSyv/yllOQbASe9kLEzVjDpXaSnuAKQxQGqK0V+vNEPT2HHxqGBJ1zsV17IAyJenpQLgdRhBobUVMD9PVjvX1EJy7A6jJk128jmtvOmmvbOfE1m7lo7loe3r2E096bJdqxE6KIohlKptS1JLWFFPdqH0+rsXL4r3a6lTeGVk2VU5CsBx5IBm08NDZDGc1/nXPOTVrtWmOVq5yC5MXklUtezjnnplLKh0grp2f7Z6YiEOeccyOYrqktSV80s49I+jHDfBtmduUwhzmXHsON0qsABcn2ZCRe1dfBMXMozGkmOFggqs/Q355jzylZ8i1QrDPybRHWXCTYlyHsFVEmrgAt1hnZ/SKqm0+xziCweJC9rEFoIMg294OM/MEsRKK5o4N8PqTtwYDsikZ+ta4N6992RF3FML90Vb1dLqXGP0PilBvtieSfkq///1QE4pxzbnjTttWWmT2aLJ5hZl8qfU/Sh4EHqxmYc865xHQtSEosJx5ssdR7h9nmXM0F9fUQhmA2OHFUZChQ3Cw3ac5LGKKWZqK2Jkxi8yXtHFjWS6bOaGroQ+qlu6uR4v4sQV9A4+aQsC+kd45RrDdUEASQ2ydUEBYa9fsDolz8XpCHTA9ku41Mb0ihXjRvLYBBlGsmu79I7pEnKXZ31/qWOTdpo9WRXAO8G1giqXRO3xZgd7UDc845F5u2qS3g18BWYA7wtyXbu4DV1QzKOedciela2W5mG4ANxMOjOJdOGmyBhUXxnB1maH4nL/3BMfzOFU/zPxfcTQB0RQE9liFvIf2EPHnweJ7oWkRXoY7st9o47XMHKLQ1kNlbRPu7OWb7CwBYoTDy9Sch5V0DXFrMkJ7tzjnnaskLEuecc5OhlD++jlqQJNPsfiuZ2dC52lNpZ8KQcE4HUWcHfXMa2PWaOnrO6aGzYz9dvVnq74UtN57EHz18GVYs+U20KD6+vg41NkB/nmMW7EG9/WS278T680T9/fHUuCNd3zsHuqmU8o/bqAWJmRUlHS8pZ2b9UxWUc865mGx6t9oasB54KGkCfKjRu4/+65xzU2S6ttoq8ULyCoj7kDjnnJtK0/2JZGD0X0mNZtYz1v7OVYsyGdTQQNDWijXUke9sZdtZjRxYZLDgIFGhl2PurqduS0jbizuxrh0Ud+8lioap6wAs3w9dXfHKvv1xc1wrrUsZ5rfX60ZcDaQ9tRWMtYOk10t6FliTrC+T9PdVj8w551zMxvGqgTELEuCLwFuBXQBm9iRwYTWDcs45l7DBCvdyXrVQVj8SM9soHVbZM3yuwLnJCEKCXBZIepMnvdUHBl3c9v5zqL9iB5fNf57O7H5e6p3NDx85m1lPhRxzRxE9tYaoPw8WUUiOLTsVNUL6y7lUmO6pLWCjpPMBk5SV9HHgubEOklQv6TeSnpT0jKSBupYlkh6RtE7SdyX59L3OOTeaGZDa+mPgA8ACYDNwRrI+lj7gEjNblhxzuaTzgM8BXzCzk4A9wLUTCdw5544W0z61ZWY7gXH3bDczAw4kq9nkZcAlxMPTA9wGfBr4ynjP72YWZXMEHe1Ex3VSrAvZ8bpGll39NDfO+ynzM3Fa9Yx7zqD9y7N4/NnFRLv3Eh04wCmsBAXxf8ZsSO/1cQUwTDt9b6Hl0iLlH8XR5iP5MqOEb2Z/OtbJkyFWHgVOAm4h7o+y18wGhlPdRPyk45xzbjjToGf7aKmtVcSFQD1wFrA2eZ0BlFWvYWZFMzsDWAicA5xWbmCSrpO0StKqPH3lHuacczNPyutIRpuP5DYASe8H3jDwFCHpq8Avx3MRM9sr6X7iuU3aJWWS8y0krncZ7pgVwAqAVnWkvDx2zrkqSvlfwHKa/84CWhmcXrc52TYqSXOBfFKINABvJq5ovx94B3A78Xzwd04gbpdSQX394ERQClA2AxIKw3jCqfp6or37DqvDCBob6b7wNF6+yjh36QvMyXWz4YVT2fjpU/jYv0PxQDdYxKmZ1RCIYrEYj8w7UIdhFWi66/UhLqXE9E5tDbgJeFzSNyXdBjwG/K8yjpsH3C9pNbASuNfM/hW4HviopHXAbODrEwvdOeeOEhVMbUm6XNLzSReMG0bZ7/clmaSzxzpnOa22viHpJ8C5SZjXm9m2Mo5bDZw5zPb1xPUlzjnnxlLByvakAdQtxBmiTcBKSXeZ2bND9msBPgw8Us55y50h8RzgjcmyAT8u8zh3tJAIW1qI+vritJMCFERYsYhyOchm0OxZYMb7HnqEixq2klX8QLyzWORdq49l3v/pYP+fh+x5pY8lrAYOH0LB8lWeEkfyFJdLp8p9LM8B1iX/oUfS7cBVwLND9vsfxFURnyjnpOUM2ngTccn0bPL6U0nlpLacc85VQuVSWwuAjSXrR3TBkHQWsMjM/q3c8Mp5InkbcIYlvb2SepLHgT8v9yLOOecmbpxzts+RtKpkfUXSCnbs60gBcDPw3vFcsNzUVjuDrbbaxnMBN8MFIWFbK9FJC+n5qy6uXriSjswB6pXnxOwuioiuKMe2Qjsb8x3sKzRy65svYsXGLYcNlDgniOdPK9Zy8ERPa7k0Gn//kJ1mNlIF+WZgUcn60C4YLcBrgAeSgXqPBe6SdKWZlRZOhymnIPlr4lZb9xO3RLsQGLGm3znnXGVVsPnvSuBkSUuIC5CrGRyyCjPbB8w5dF3pAeDjoxUiUF6rre8kJ/udZFNZrbacc85VSIUKEjMrSPogcA8QArea2TOSPgusMrO7JnLeMQsSSb8H/HzgApLaJf2umf3LRC7ophkJ5XIEJxwHr+ymuGs3CkN63n4W26/pZfmrHqEts4Nf7hG7P7GIO586gejAgThNNDAQ4hEpo41HXIaoOPzAiZOM3dNVbiaoZIdEM7sbuHvItk+NsO/F5ZyznA6Jf5k87gyceC/wl+Wc3DnnXAVM17G2SgxX2JRbSe+cc24yalhAlKucJ5JVkm6WdGLyupl4VGDnnHNVpnG+aqGcJ4sPAZ8Evpus30t5MyS6aU6ZDMpkCObO4eW3z6V3zhwKswqEzXlm/zSk8zv1/GrtGWh/N0QRmf4tFAfqR6D29RO1vr5zlZLyj3I5rba68ea+zjlXM2kf/becVlunAB8HFpfub2aXVC8s55xzh4xz5uipVk5q63vAV4GvcfgYem6mCkIynXMpzp/NgeOa2XNqSPYA1O8S0d4s9bszzH54B/byZop9fYMppCAsL500XLPcSjb99Wa/biaZBlPtllOQFMzsK1WPxDnn3PBmQEHyY0l/AvwIBidPN7PdIx/inHOuUmbCE8ny5GvpuPQGnFD5cFy1KZuDQBDZkfN7JOmlA79/NlsuL3LF6U/zq81LyN3TTue3VmP9eayQBwUULToyfTSZARcrmYrytJabaVL+kS6n1daSqQjEOefc8NL+RDJih0RJ/61k+Z1D3vOJrZxzbiqMZ3iUGhU4o/Vsv7pk+cYh711ehVicc84NJ+UFyWipLY2wPNy6q5XSEXZLm9AqGLbOwgp5guZmJFEsFg+NuqtMlmDxQvJfzdMWbKT3O8fz0p/lmNe1BoUhUaFQcpIR6kIkgro6rBjFdSkDcQAKhEUWfy0919Dvxes3nDuMSH9qa7SCxEZYHm7dOedctaT8L+5oBckySfuJC8SGZJlkvb7qkTnnnANAKX9SH7EgMbNwKgNxE1T6ATtsuYiyOYKGkjI/m4EgxHp6sIF9JRSGKJdFhSLZD9ZTXLOOOdoeN/EFrFiEIESBDqWqrFiEYZoAR729Q+KL02DJqQ59HfN7cc7FDJTyIVLKGUZ+QiQtknS/pGclPSPpw8n2Dkn3SlqbfJ1VrRicc25GSHlle9UKEqAAfMzMlgLnAR+QtJR4JOH7zOxk4D58ZGHnnBuVrPxXLVRtpkMz2wpsTZa7JD0HLACuAi5OdrsNeAC4vlpxzHilLZ0Up54UhiibQS3NcLAXtTRjrU1QjOhdPIuX35rhLRc+wSc7H6QtyNFjeV4pii/uuJSX37c4Pp+VzKGugCCXxQqFuDd80spLYZaoPz/YOsxTU85VR8p/taZkylxJi4EzgUeAzqSQAdgGdE5FDM45Ny3NkNF/J0VSM/AD4CNmtl8lfR3MzKThb5Gk64DrAOpprHaYzjmXXkdzQSIpS1yIfNvMfphs3i5pnpltlTQP2DHcsWa2AlgB0KqOlN/GKSQRtrWy9T+9mr3L8lzy2uc4rXkrV7Wspr6kTN5dzHJv91I2981iX97oyvfTlu3lwfUtHP9148VPhby3542Hp6PUC6wZXD80ZW6RqLd42HbL92P56powZAwAABPNSURBVH6rzrnp0SGxmq22BHwdeM7Mbi556y4GRxReDtxZrRicc25GMCv/VQPVfCK5APjPwFOSnki2/TlwE3CHpGuBDcC7qhiDc85Ne2l/Iqlmq61fMfKYXJdW67rOOTej1LB/SLmmpNWWmzhlMug1p9B1UgsHOwJ65ovsGXvo+Mc88771PFvDLJt75vBzXYoVo8O6jquuDjU3xSuR0d0dcXK0hqg/TzTcJFTefNe5VNIk5oybCl6QOOdcyh21qS3nnHMVYKQ+W+AFSUopm4t7py+azws3Zjj12A1s2tdG75Y2Ov65jcZV6yke6EZhiBXyh6dQk7lJ7GAv6uvDIhtMeSkYY+RE51za+BOJc865yfGCxDnn3ERNhw6JXpCkQTLXhxUKEIT0/ofX8fJVEbdd8jXu3HMW+Y8uo+/BbcxlG3MhTls1NKDXLSVY+zLRgSieH6RUMvDiEVmskabJdc6lUw07GpbLCxLnnEs5fyJxzjk3OV6QuDFFRcwEQUhm0Xya1u/jVTcbf/2n5wOQ0W+JSkZNViZL1NsHK5+iGISD84EMNZAyG2i1lUyTO9wUueNSOgeKc67q/InEOefcxBlQTHdJ4gWJc86lXNqfSKo5Z7tzzrlKqOAw8pIul/S8pHWSbhjm/Y9KelbSakn3STp+rHN6QVJLQYiyOTKLFtL1rnPh3nmceedLvHBNBz3HtxH19hH19MTNgjX4o7JCyTzpI9WPAEEuS9DYSNBQT1BXF8/lHobxuTTSwMxl8PoR56aUrPzXqOeRQuAW4ApgKXCNpKVDdnscONvMTge+D/zNWPF5QeKcc2lm43yN7hxgnZmtN7N+4HbgqsMuZ3a/mfUkqw8DC8c6qdeROOdcisU928eVBZgjaVXJ+opk6nKABcDGkvc2AeeOcq5rgZ+MdUEvSKZSEKJshqC5CSKjcNpx7DuxgX0nBPTPirCvLeTRu/awpPAYli8cSltZX9+ELhf19kJvbyW/A+dcLYxvnNWdZnb2ZC8p6T8BZwMXjbWvFyTOOZdy43wiGc1mYFHJ+sJk2+HXky4D/gK4yMzG/J+s15E451yaVbaOZCVwsqQlknLA1cBdpTtIOhP4B+BKM9tRToj+RFJJ0qH5PpTLEbS2wtxZvPCe2Zx4/gZuOeEOQsG2Yh0hxj1dr+XuLa/GftPJCf/SR/CLJ4iSuUTKETQ1xWkvBVi+v8rfnHOuNio3aKOZFSR9ELgHCIFbzewZSZ8FVpnZXcDngWbge4r/Fr1sZleOdl4vSJxzLuUUVa7JvZndDdw9ZNunSpYvG+85vSBxzrk0M1DKJzX1gsQ559Iu5Z2AvSAZamBk25FGuE3qQRQM1oeggKChnsKrl7B2eR3/39mPs3z2L3mqbyHrejtZf+cb6b55IX9y31uwfCHumQ5gRhPraWL94dco80MT9fSk/gPmnKuAlP+aV63VlqRbJe2Q9HTJtg5J90pam3ydVa3rO+fcTCGzsl+1UM3mv98ELh+y7QbgPjM7GbgvWXfOOTeaCg7aWA1VS22Z2S8kLR6y+Srg4mT5NuAB4PpqxTBhw6W1Bpr2AspmUCZD0N4GxSLRsbPZdXobXceJ1jXwzPdO55OPNiZprALH5x+BqDjOzqll8LSWczOfMd6e7VNuqutIOs1sa7K8DegcaUdJ1wHXAdTTOAWhOedc+ojapazKVbPKdjMzaeRBj5NBxlYAtKoj3XfROeeqyQuSw2yXNM/MtkqaB5TV/X7KqLQlVtK7XAFBLgvZLHbq8Rw8tpFXzszwznc8yIl1z3Nibge/7jmZW587n/r7W5h3x/MUd+2mONk50SH1Hx7n3BRJ+d+CqR5r6y5gebK8HLhziq/vnHPTi4GKVvarFqrZ/Pc7wL8Dp0raJOla4CbgzZLWApcl684550ZzFLfaumaEty6t1jUnLAhRIMI5s7HWZqKWevaf3ELmYMTuV2XoX9bNgjl7MTtAoC4Kj85n5TtO5ZGXmrDCIjDjOJ4CaXIprQEpf4x1zk2l2hUQ5fKe7c45l2aGFyTOOecmyfuROOecmwzvR5JWEgpDCEOUyaD6OmhsoNjeyIHFTWw/DzpO2kvP+ll0/LyR4it1ND+3m2j9y5xgmykWi0c+bqb8h+2cm6ZS/rfl6C1InHNuOjCgghNbVYMXJM45l2reait9kl7jYccs1NpC1NJA37HNdC3MUqyD/lahIpz0z11kNh9gbt8uoq4DUCxSjOzQ/CPOOTdlvCBxzjk3YQYU091sywsS55xLtSQTkmJHTUESNDailmZ6lx3HK2fk4PV7WdS+lygqcrA/z84ts2h+PsfsZwo0/XYX0YZNFPr7h5+XpHQ55Y+czrkZIOV/Z46agsQ556Ylb7XlnHNu0vyJxDnn3KR4QVIlI82rDiiTjb/W16FMBjIZus9bwiunZ7AzuujdI467tRnWFwl37qF9307a2QxhGDfz7c9DVBz+upZMoJzyH6xzbqbwfiTOOecmw4DIW20555ybDH8iqRCJoK4OggA1NaFcln3nLWLHO3p532t/xX9seZIA2FBopSPsoV5F9kY5Qoy8hdy08W1sefgETvpMHlvzHFYsHjkJVblNe1P+Q3XOzTAp/5szfQoS55w7GplhxRHqbFPCCxLnnEs770cyeaec3sO//nQVW4sH2VJooNtyvNDfyZfXtHPMt1u4/9Nnct/62XGpXTqUwGGPg9s5ge2jTzSW8sdH59xRKuV/m6ZFQeKcc0ctM2+15ZxzbpL8iWTyfru6kbcvPBvlcgQN9Vi+gBrqWaRXIL+Z4t59ZZ9LdXUoDFFdHRSLhzohRn19KJMhOtiLAmGFwpHHZpLblRxjkcUdF4MQZTOHb0tamamhAcIA6zlI1NMzZnxh5zEwZxZs3Epx//7hv4dsjgNXnknTD38Tbyj3QyYRNDdjpy0m3LaHwuatgx0vk6mHD33fQRjH09GO6uuJdu7CCoX4fYkD7zyX9l+8SLS/C8sXwKIktTiBD7xEOLtjzHukTGbw/g4IwkP3G7P4ZxGI8NhOyGYobtqKFfIlJwkG058DsQ6cY4opkxn2cza4Q/I9SSiTJZzfifX0UnzllZEPqasjaG0ddZ+BawctLdjx82DtBqLu7nEGX8aApck+QWMjWrII7d5HtGcvhGH8cx6Y12e89z753QpmtVPYum1chwb19US9vUeeMpsjaGrAFs4j6OqmsGFj+bE0Ng7evyoN5Gr+ROKcc27i0t+zvSZT/Um6XNLzktZJuqEWMTjn3LQwMPpvua8amPInEkkhcAvwZmATsFLSXWb27FTH4pxz00LKJ7aSTfEjk6TXA582s7cm6zcCmNlfj3RMqzrsXF06mMcvFlEYxrnXtlbs4EHsYC8EAUFHO7ZvP1aMUBhgvX2YGZYvENTXER08CGYomwOL4nqR/jiHrkBxPUgQEPX2Df7wFAzm4QfyuqV59mS7spm4viDJ+Sqbwwp5grq6Q9cgKsbXUIDl+4Ekb9vXd3jOHuLvsbWZ4q7dR9yToKWFwrITyW3cBWFI4aWNcUwD5xgpV5vcQ2AwPz/Qo18BQS57eA55oF6lvx/r6xu8fn19/D0NyW8HTU0Es9pBorjjFay/P743YRjXVYz0eQtCgob6wTqqYnHwvpdcEzj8Xo1mpIE9U5ImUCaDFYsEDQ3x5w0GP0MDSn+myT2ygfq8kp/HcPVbpfVf8bmG/zwEjY1YX198fFLHZJEdfu2xvo9CIf5ch+Fhn5NDSuuyFKBgcBQJG/hf9NDrjfGzClpaIJ+Pr93QQHTgQDxgq0WHf7ZHq4sZeH/gd7au7tD9D+fOxroOxD+jujqKB7oPrwsd6ZzAI3Yf+223hn1znFqD2XZe5q1l739v/juPmtnZlbh2uWpRR7IAKK3J2gScO3QnSdcB1wHU0zg1kTnnXNp4z/aJM7MVwAqIn0hqHI5zztWOp7aGXHACqS1JXcDzUxPhhM0BdtY6iDJMhzg9xsrwGCtjIjEeb2ZzK3FxST9NYijXTjO7vBLXLlctCpIM8FvgUmAzsBJ4t5k9M8oxq6Y65zde0yFGmB5xeoyV4TFWxnSIsdamPLVlZgVJHwTuAULg1tEKEeecc+lWkzoSM7sbuLsW13bOOVdZNemQOAErah1AGaZDjDA94vQYK8NjrIzpEGNNTXkdiXPOuZllujyROOecS6nUFyRpHZdL0kuSnpL0hKRVybYOSfdKWpt8nTXFMd0qaYekp0u2DRuTYn+X3NfVks6qYYyflrQ5uZdPSHpbyXs3JjE+L6n87r2Ti3GRpPslPSvpGUkfTran5l6OEmPa7mW9pN9IejKJ8zPJ9iWSHkni+a6kXLK9Lllfl7y/uIYxflPSiyX38oxke01+d1LNzFL7Im7V9QJwApADngSW1jquJLaXgDlDtv0NcEOyfAPwuSmO6ULgLODpsWIC3gb8BBBwHvBIDWP8NPDxYfZdmvzM64AlyWchnIIY5wFnJcstxM3Vl6bpXo4SY9rupYDmZDkLPJLcozuAq5PtXwXenyz/CfDVZPlq4Ls1jPGbwDuG2b8mvztpfqX9ieQcYJ2ZrTezfuB24KoaxzSaq4DbkuXbgN+dyoub2S+AoQNzjRTTVcC3LPYw0C5pXo1iHMlVwO1m1mdmLwLriD8TVWVmW83ssWS5C3iOeGif1NzLUWIcSa3upZnZgWQ1m7wMuAT4frJ96L0cuMffBy6VVJExqyYQ40hq8ruTZmkvSIYbl2u0X5apZMDPJD2ajAsG0GlmW5PlbUBnbUI7zEgxpe3efjBJE9xakhKseYxJauVM4v+lpvJeDokRUnYvJYWSngB2APcSPw3tNbOB0SlLYzkUZ/L+PmD2VMdoZgP38q+Se/kFSXVDYxwm/qNS2guSNHuDmZ0FXAF8QNKFpW9a/AycqiZxaYwp8RXgROAMYCvwt7UNJyapGfgB8BEzO2y6yrTcy2FiTN29NLOimZ0BLCR+CjqtxiEdYWiMkl4D3Egc6+8AHcD1NQwx1dJekGwGFpWsL0y21ZyZbU6+7gB+RPwLsn3gETf5uqN2ER4yUkypubdmtj35RY6Af2Qw5VKzGCVlif9Af9vMfphsTtW9HC7GNN7LAWa2F7gfeD1xOmigQ3RpLIfiTN5vA3bVIMbLk/ShmVkf8A1SdC/TJu0FyUrg5KSFR4648u2uGseEpCZJLQPLwFuAp4ljW57sthy4szYRHmakmO4C/kvSAuU8YF9J2mZKDckv/x7xvYQ4xquTljxLgJOB30xBPAK+DjxnZjeXvJWaezlSjCm8l3MltSfLDcQT2j1H/Mf6HcluQ+/lwD1+B/Dz5OlvqmNcU/KfBhHX4ZTey1T87qRGrWv7x3oRt5D4LXFe9S9qHU8S0wnELWCeBJ4ZiIs4l3sfsBb4v0DHFMf1HeJ0Rp44b3vtSDERtzi5JbmvTwFn1zDGf0piWE38SzqvZP+/SGJ8HrhiimJ8A3HaajXwRPJ6W5ru5Sgxpu1eng48nsTzNPCpZPsJxAXZOuB7QF2yvT5ZX5e8f0INY/x5ci+fBv4Pgy27avK7k+aX92x3zjk3KWlPbTnnnEs5L0icc85NihckzjnnJsULEuecc5PiBYlzzrlJ8YLEVZSkYjJS6jPJaKofk1S1z5mkxSoZSXiC5/jzIeu/nlxUo15rsaR3V+v8ztWCFySu0g6a2Rlm9mrijl1XAH9Z45gOKelNXeqwgsTMzq9iCIsBL0jcjOIFiasai4ePuY54EEElA+N9XtLKZCC8PxrYV9L1iud3eVLSTcm2MyQ9nOz7Iw3O//G6ZL8ngQ+UnGPY80u6WNIvJd0FPFsaY3KthuQp6tvJtgMlxz0o6U5J6yXdJOk9iueueErSicl+cyX9ILnuSkkXJNsv0uBcFo8noyHcBLwx2fZnY8T8C0n/pnj+kK9W88nOuUmpdY9If82sF3BgmG17iUfKvQ7478m2OmAV8dwYVwC/BhqT9wZ6jK8GLkqWPwt8sWT7hcny50nmNhnl/BcD3cCScmIeWE+O20s890cd8XhKn0ne+3BJPP9MPIgnwHHEw5YA/Bi4IFluBjLJOf+15FqjxdxL3AM8JB4194i5MfzlrzS8hnvMd65a3gKcLmlgjKU24jGfLgO+YWY9AGa2W1Ib0G5mDyb73gZ8LxkTqd3ieU0gHhLkijHO3w/8xuJ5OMZrpSXjKEl6AfhZsv0p4E3J8mXAUg1Om9GqeFTeh4CbkyedH5rZJh05tcZYMa9Prv0d4mFRvj/0BM7VmhckrqoknQAUiUfKFfAhM7tnyD6VmvZ1pPNfTPxEMhF9JctRyXrE4O9PAJxnZr1Djr1J0r8Rj4H10Ajf52gxDx2/yMczcqnkOVdXNZLmEk+j+r/NzIB7gPcrHv4cSacoHj35XuAPJTUm2zvMbB+wR9Ibk9P9Z+BBi4f53ivpDcn295RccqTzjyU/cMwE/Qz40MCKBuf2PtHMnjKzzxGPZH0a0EU8NW45MZ+jeOTrAPgD4FeTiNG5qvEnEldpDYpnmssCBeLU08Aw518jbrX0WDI09yvA75rZT5M/vqsk9QN3E7ekWg58NSlg1gN/mJznD4FbJRmDqaYRz19GzCuA1ZIeM7P3jLn3kf4UuEXSauLfqV8Afwx8RNKbiJ9eniGe5zsCiklDgW8CXxol5pXA/wZOIh52/UcTiM25qvPRf51LoSS19XEze3utY3FuLJ7acs45Nyn+ROKcc25S/InEOefcpHhB4pxzblK8IHHOOTcpXpA455ybFC9InHPOTYoXJM455ybl/wG2SGZbPnFFOwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "

" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "fig = display.plot_alignment(alignment)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## WaveFlow: vocoder model\n", - "Generated spectrogram is converted to raw audio using a pretrained waveflow model." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "from paddlespeech.t2s.models.waveflow import ConditionalWaveFlow" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "data:\n", - " batch_size: 8\n", - " clip_frames: 65\n", - " fmax: 8000\n", - " fmin: 0\n", - " hop_length: 256\n", - " n_fft: 1024\n", - " n_mels: 80\n", - " sample_rate: 22050\n", - " valid_size: 16\n", - " win_length: 1024\n", - "model:\n", - " channels: 128\n", - " kernel_size: [3, 3]\n", - " n_flows: 8\n", - " n_group: 16\n", - " n_layers: 8\n", - " sigma: 1.0\n", - " upsample_factors: [16, 16]\n", - "training:\n", - " lr: 0.0002\n", - " max_iteration: 3000000\n", - " save_interval: 10000\n", - " valid_interval: 1000\n" - ] - } - ], - "source": [ - "from examples.waveflow import config as waveflow_config\n", - "vocoder_config = waveflow_config.get_cfg_defaults()\n", - "print(vocoder_config)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[checkpoint] Rank 0: loaded model from ../../pretrained/waveflow/waveflow_ljspeech_ckpt_0.3/step-2000000.pdparams\n" - ] - } - ], - "source": [ - "vocoder = ConditionalWaveFlow.from_pretrained(\n", - " vocoder_config, \n", - " \"../../pretrained/waveflow/waveflow_ljspeech_ckpt_0.3/step-2000000\")\n", - "layer_tools.recursively_remove_weight_norm(vocoder)\n", - "vocoder.eval()" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "time: 9.412613868713379s\n" - ] - } - ], - "source": [ - "audio = vocoder.infer(paddle.transpose(outputs[\"mel_outputs_postnet\"], [0, 2, 1]))\n", - "wav = audio[0].numpy()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ipd.Audio(wav, rate=22050)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.7" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/paddlespeech/t2s/exps/tacotron2/synthesize.py b/paddlespeech/t2s/exps/tacotron2/synthesize.py deleted file mode 100644 index c73c32d2..00000000 --- a/paddlespeech/t2s/exps/tacotron2/synthesize.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -from pathlib import Path - -import numpy as np -import paddle -from matplotlib import pyplot as plt - -from paddlespeech.t2s.exps.tacotron2.config import get_cfg_defaults -from paddlespeech.t2s.frontend import EnglishCharacter -from paddlespeech.t2s.models.tacotron2 import Tacotron2 -from paddlespeech.t2s.utils import display - - -def main(config, args): - if args.ngpu == 0: - paddle.set_device("cpu") - elif args.ngpu > 0: - paddle.set_device("gpu") - else: - print("ngpu should >= 0 !") - - # model - frontend = EnglishCharacter() - model = Tacotron2.from_pretrained(config, args.checkpoint_path) - model.eval() - - # inputs - input_path = Path(args.input).expanduser() - sentences = [] - with open(input_path, "rt") as f: - for line in f: - line_list = line.strip().split() - utt_id = line_list[0] - sentence = " ".join(line_list[1:]) - sentences.append((utt_id, sentence)) - - if args.output is None: - output_dir = input_path.parent / "synthesis" - else: - output_dir = Path(args.output).expanduser() - output_dir.mkdir(exist_ok=True) - - for i, sentence in enumerate(sentences): - sentence = paddle.to_tensor(frontend(sentence)).unsqueeze(0) - outputs = model.infer(sentence) - mel_output = outputs["mel_outputs_postnet"][0].numpy().T - alignment = outputs["alignments"][0].numpy().T - - np.save(str(output_dir / f"sentence_{i}"), mel_output) - display.plot_alignment(alignment) - plt.savefig(str(output_dir / f"sentence_{i}.png")) - if args.verbose: - print("spectrogram saved at {}".format(output_dir / - f"sentence_{i}.npy")) - - -if __name__ == "__main__": - config = get_cfg_defaults() - - parser = argparse.ArgumentParser( - description="generate mel spectrogram with TransformerTTS.") - parser.add_argument( - "--config", - type=str, - metavar="FILE", - help="extra config to overwrite the default config") - parser.add_argument( - "--checkpoint_path", type=str, help="path of the checkpoint to load.") - parser.add_argument("--input", type=str, help="path of the text sentences") - parser.add_argument("--output", type=str, help="path to save outputs") - parser.add_argument( - "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") - parser.add_argument( - "--opts", - nargs=argparse.REMAINDER, - help="options to overwrite --config file and the default config, passing in KEY VALUE pairs" - ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="print msg") - - args = parser.parse_args() - if args.config: - config.merge_from_file(args.config) - if args.opts: - config.merge_from_list(args.opts) - config.freeze() - print(config) - print(args) - - main(config, args) diff --git a/paddlespeech/t2s/exps/tacotron2/train.py b/paddlespeech/t2s/exps/tacotron2/train.py deleted file mode 100644 index 8198348f..00000000 --- a/paddlespeech/t2s/exps/tacotron2/train.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import time -from collections import defaultdict - -import numpy as np -import paddle -from paddle import distributed as dist -from paddle.io import DataLoader -from paddle.io import DistributedBatchSampler - -from paddlespeech.t2s.data import dataset -from paddlespeech.t2s.exps.tacotron2.config import get_cfg_defaults -from paddlespeech.t2s.exps.tacotron2.ljspeech import LJSpeech -from paddlespeech.t2s.exps.tacotron2.ljspeech import LJSpeechCollector -from paddlespeech.t2s.models.tacotron2 import Tacotron2 -from paddlespeech.t2s.models.tacotron2 import Tacotron2Loss -from paddlespeech.t2s.training.cli import default_argument_parser -from paddlespeech.t2s.training.experiment import ExperimentBase -from paddlespeech.t2s.utils import display -from paddlespeech.t2s.utils import mp_tools - - -class Experiment(ExperimentBase): - def compute_losses(self, inputs, outputs): - texts, mel_targets, plens, slens = inputs - - mel_outputs = outputs["mel_output"] - mel_outputs_postnet = outputs["mel_outputs_postnet"] - attention_weight = outputs["alignments"] - if self.config.model.use_stop_token: - stop_logits = outputs["stop_logits"] - else: - stop_logits = None - - losses = self.criterion(mel_outputs, mel_outputs_postnet, mel_targets, - attention_weight, slens, plens, stop_logits) - return losses - - def train_batch(self): - start = time.time() - batch = self.read_batch() - data_loader_time = time.time() - start - - self.optimizer.clear_grad() - self.model.train() - texts, mels, text_lens, output_lens = batch - outputs = self.model(texts, text_lens, mels, output_lens) - losses = self.compute_losses(batch, outputs) - loss = losses["loss"] - loss.backward() - self.optimizer.step() - iteration_time = time.time() - start - - losses_np = {k: float(v) for k, v in losses.items()} - # logging - msg = "Rank: {}, ".format(dist.get_rank()) - msg += "step: {}, ".format(self.iteration) - msg += "time: {:>.3f}s/{:>.3f}s, ".format(data_loader_time, - iteration_time) - msg += ', '.join('{}: {:>.6f}'.format(k, v) - for k, v in losses_np.items()) - self.logger.info(msg) - - if dist.get_rank() == 0: - for k, v in losses_np.items(): - self.visualizer.add_scalar(f"train_loss/{k}", v, self.iteration) - - @mp_tools.rank_zero_only - @paddle.no_grad() - def valid(self): - valid_losses = defaultdict(list) - for i, batch in enumerate(self.valid_loader): - texts, mels, text_lens, output_lens = batch - outputs = self.model(texts, text_lens, mels, output_lens) - losses = self.compute_losses(batch, outputs) - for k, v in losses.items(): - valid_losses[k].append(float(v)) - - attention_weights = outputs["alignments"] - self.visualizer.add_figure( - f"valid_sentence_{i}_alignments", - display.plot_alignment(attention_weights[0].numpy().T), - self.iteration) - self.visualizer.add_figure( - f"valid_sentence_{i}_target_spectrogram", - display.plot_spectrogram(mels[0].numpy().T), self.iteration) - self.visualizer.add_figure( - f"valid_sentence_{i}_predicted_spectrogram", - display.plot_spectrogram(outputs['mel_outputs_postnet'][0] - .numpy().T), self.iteration) - - # write visual log - valid_losses = {k: np.mean(v) for k, v in valid_losses.items()} - - # logging - msg = "Valid: " - msg += "step: {}, ".format(self.iteration) - msg += ', '.join('{}: {:>.6f}'.format(k, v) - for k, v in valid_losses.items()) - self.logger.info(msg) - - for k, v in valid_losses.items(): - self.visualizer.add_scalar(f"valid/{k}", v, self.iteration) - - def setup_model(self): - config = self.config - model = Tacotron2( - vocab_size=config.model.vocab_size, - d_mels=config.data.n_mels, - d_encoder=config.model.d_encoder, - encoder_conv_layers=config.model.encoder_conv_layers, - encoder_kernel_size=config.model.encoder_kernel_size, - d_prenet=config.model.d_prenet, - d_attention_rnn=config.model.d_attention_rnn, - d_decoder_rnn=config.model.d_decoder_rnn, - attention_filters=config.model.attention_filters, - attention_kernel_size=config.model.attention_kernel_size, - d_attention=config.model.d_attention, - d_postnet=config.model.d_postnet, - postnet_kernel_size=config.model.postnet_kernel_size, - postnet_conv_layers=config.model.postnet_conv_layers, - reduction_factor=config.model.reduction_factor, - p_encoder_dropout=config.model.p_encoder_dropout, - p_prenet_dropout=config.model.p_prenet_dropout, - p_attention_dropout=config.model.p_attention_dropout, - p_decoder_dropout=config.model.p_decoder_dropout, - p_postnet_dropout=config.model.p_postnet_dropout, - use_stop_token=config.model.use_stop_token) - - if self.parallel: - model = paddle.DataParallel(model) - - grad_clip = paddle.nn.ClipGradByGlobalNorm( - config.training.grad_clip_thresh) - optimizer = paddle.optimizer.Adam( - learning_rate=config.training.lr, - parameters=model.parameters(), - weight_decay=paddle.regularizer.L2Decay( - config.training.weight_decay), - grad_clip=grad_clip) - criterion = Tacotron2Loss( - use_stop_token_loss=config.model.use_stop_token, - use_guided_attention_loss=config.model.use_guided_attention_loss, - sigma=config.model.guided_attention_loss_sigma) - self.model = model - self.optimizer = optimizer - self.criterion = criterion - - def setup_dataloader(self): - args = self.args - config = self.config - ljspeech_dataset = LJSpeech(args.data) - - valid_set, train_set = dataset.split(ljspeech_dataset, - config.data.valid_size) - batch_fn = LJSpeechCollector(padding_idx=config.data.padding_idx) - - if not self.parallel: - self.train_loader = DataLoader( - train_set, - batch_size=config.data.batch_size, - shuffle=True, - drop_last=True, - collate_fn=batch_fn) - else: - sampler = DistributedBatchSampler( - train_set, - batch_size=config.data.batch_size, - shuffle=True, - drop_last=True) - self.train_loader = DataLoader( - train_set, batch_sampler=sampler, collate_fn=batch_fn) - - self.valid_loader = DataLoader( - valid_set, - batch_size=config.data.batch_size, - shuffle=False, - drop_last=False, - collate_fn=batch_fn) - - -def main_sp(config, args): - exp = Experiment(config, args) - exp.setup() - exp.resume_or_load() - exp.run() - - -def main(config, args): - if args.ngpu > 1: - dist.spawn(main_sp, args=(config, args), nprocs=args.ngpu) - else: - main_sp(config, args) - - -if __name__ == "__main__": - config = get_cfg_defaults() - parser = default_argument_parser() - args = parser.parse_args() - if args.config: - config.merge_from_file(args.config) - if args.opts: - config.merge_from_list(args.opts) - config.freeze() - print(config) - print(args) - - main(config, args) diff --git a/paddlespeech/t2s/exps/transformer_tts/normalize.py b/paddlespeech/t2s/exps/transformer_tts/normalize.py index 4bb77c79..87e975b8 100644 --- a/paddlespeech/t2s/exps/transformer_tts/normalize.py +++ b/paddlespeech/t2s/exps/transformer_tts/normalize.py @@ -130,6 +130,9 @@ def main(): "speech_lengths": item['speech_lengths'], "speech": str(speech_path), } + # add spk_emb for voice cloning + if "spk_emb" in item: + record["spk_emb"] = str(item["spk_emb"]) output_metadata.append(record) output_metadata.sort(key=itemgetter('utt_id')) output_metadata_path = Path(args.dumpdir) / "metadata.jsonl" diff --git a/paddlespeech/t2s/exps/fastspeech2/voice_cloning.py b/paddlespeech/t2s/exps/voice_cloning.py similarity index 57% rename from paddlespeech/t2s/exps/fastspeech2/voice_cloning.py rename to paddlespeech/t2s/exps/voice_cloning.py index 9fbd4964..d6733a94 100644 --- a/paddlespeech/t2s/exps/fastspeech2/voice_cloning.py +++ b/paddlespeech/t2s/exps/voice_cloning.py @@ -21,17 +21,43 @@ import soundfile as sf import yaml from yacs.config import CfgNode +from paddlespeech.s2t.utils.dynamic_import import dynamic_import from paddlespeech.t2s.frontend.zh_frontend import Frontend -from paddlespeech.t2s.models.fastspeech2 import FastSpeech2 -from paddlespeech.t2s.models.fastspeech2 import FastSpeech2Inference -from paddlespeech.t2s.models.parallel_wavegan import PWGGenerator -from paddlespeech.t2s.models.parallel_wavegan import PWGInference from paddlespeech.t2s.modules.normalizer import ZScore from paddlespeech.vector.exps.ge2e.audio_processor import SpeakerVerificationPreprocessor from paddlespeech.vector.models.lstm_speaker_encoder import LSTMSpeakerEncoder +model_alias = { + # acoustic model + "fastspeech2": + "paddlespeech.t2s.models.fastspeech2:FastSpeech2", + "fastspeech2_inference": + "paddlespeech.t2s.models.fastspeech2:FastSpeech2Inference", + "tacotron2": + "paddlespeech.t2s.models.new_tacotron2:Tacotron2", + "tacotron2_inference": + "paddlespeech.t2s.models.new_tacotron2:Tacotron2Inference", + # voc + "pwgan": + "paddlespeech.t2s.models.parallel_wavegan:PWGGenerator", + "pwgan_inference": + "paddlespeech.t2s.models.parallel_wavegan:PWGInference", +} + + +def voice_cloning(args): + # Init body. + with open(args.am_config) as f: + am_config = CfgNode(yaml.safe_load(f)) + with open(args.voc_config) as f: + voc_config = CfgNode(yaml.safe_load(f)) + + print("========Args========") + print(yaml.safe_dump(vars(args))) + print("========Config========") + print(am_config) + print(voc_config) -def voice_cloning(args, fastspeech2_config, pwg_config): # speaker encoder p = SpeakerVerificationPreprocessor( sampling_rate=16000, @@ -57,40 +83,52 @@ def voice_cloning(args, fastspeech2_config, pwg_config): phn_id = [line.strip().split() for line in f.readlines()] vocab_size = len(phn_id) print("vocab_size:", vocab_size) - odim = fastspeech2_config.n_mels - model = FastSpeech2( - idim=vocab_size, odim=odim, **fastspeech2_config["model"]) - model.set_state_dict( - paddle.load(args.fastspeech2_checkpoint)["main_params"]) - model.eval() - - vocoder = PWGGenerator(**pwg_config["generator_params"]) - vocoder.set_state_dict(paddle.load(args.pwg_checkpoint)["generator_params"]) - vocoder.remove_weight_norm() - vocoder.eval() - print("model done!") + # acoustic model + odim = am_config.n_mels + # model: {model_name}_{dataset} + am_name = args.am[:args.am.rindex('_')] + am_dataset = args.am[args.am.rindex('_') + 1:] + + am_class = dynamic_import(am_name, model_alias) + am_inference_class = dynamic_import(am_name + '_inference', model_alias) + + if am_name == 'fastspeech2': + am = am_class( + idim=vocab_size, odim=odim, spk_num=None, **am_config["model"]) + elif am_name == 'tacotron2': + am = am_class(idim=vocab_size, odim=odim, **am_config["model"]) + + am.set_state_dict(paddle.load(args.am_ckpt)["main_params"]) + am.eval() + am_mu, am_std = np.load(args.am_stat) + am_mu = paddle.to_tensor(am_mu) + am_std = paddle.to_tensor(am_std) + am_normalizer = ZScore(am_mu, am_std) + am_inference = am_inference_class(am_normalizer, am) + am_inference.eval() + print("acoustic model done!") + + # vocoder + # model: {model_name}_{dataset} + voc_name = args.voc[:args.voc.rindex('_')] + voc_class = dynamic_import(voc_name, model_alias) + voc_inference_class = dynamic_import(voc_name + '_inference', model_alias) + voc = voc_class(**voc_config["generator_params"]) + voc.set_state_dict(paddle.load(args.voc_ckpt)["generator_params"]) + voc.remove_weight_norm() + voc.eval() + voc_mu, voc_std = np.load(args.voc_stat) + voc_mu = paddle.to_tensor(voc_mu) + voc_std = paddle.to_tensor(voc_std) + voc_normalizer = ZScore(voc_mu, voc_std) + voc_inference = voc_inference_class(voc_normalizer, voc) + voc_inference.eval() + print("voc done!") frontend = Frontend(phone_vocab_path=args.phones_dict) print("frontend done!") - stat = np.load(args.fastspeech2_stat) - mu, std = stat - mu = paddle.to_tensor(mu) - std = paddle.to_tensor(std) - fastspeech2_normalizer = ZScore(mu, std) - - stat = np.load(args.pwg_stat) - mu, std = stat - mu = paddle.to_tensor(mu) - std = paddle.to_tensor(std) - pwg_normalizer = ZScore(mu, std) - - fastspeech2_inference = FastSpeech2Inference(fastspeech2_normalizer, model) - fastspeech2_inference.eval() - pwg_inference = PWGInference(pwg_normalizer, vocoder) - pwg_inference.eval() - output_dir = Path(args.output_dir) output_dir.mkdir(parents=True, exist_ok=True) @@ -112,24 +150,23 @@ def voice_cloning(args, fastspeech2_config, pwg_config): # print("spk_emb shape: ", spk_emb.shape) with paddle.no_grad(): - wav = pwg_inference( - fastspeech2_inference(phone_ids, spk_emb=spk_emb)) + wav = voc_inference(am_inference(phone_ids, spk_emb=spk_emb)) sf.write( str(output_dir / (utt_id + ".wav")), wav.numpy(), - samplerate=fastspeech2_config.fs) + samplerate=am_config.fs) print(f"{utt_id} done!") # Randomly generate numbers of 0 ~ 0.2, 256 is the dim of spk_emb random_spk_emb = np.random.rand(256) * 0.2 random_spk_emb = paddle.to_tensor(random_spk_emb) utt_id = "random_spk_emb" with paddle.no_grad(): - wav = pwg_inference(fastspeech2_inference(phone_ids, spk_emb=spk_emb)) + wav = voc_inference(am_inference(phone_ids, spk_emb=spk_emb)) sf.write( str(output_dir / (utt_id + ".wav")), wav.numpy(), - samplerate=fastspeech2_config.fs) + samplerate=am_config.fs) print(f"{utt_id} done!") @@ -137,32 +174,53 @@ def main(): # parse args and config and redirect to train_sp parser = argparse.ArgumentParser(description="") parser.add_argument( - "--fastspeech2-config", type=str, help="fastspeech2 config file.") - parser.add_argument( - "--fastspeech2-checkpoint", + '--am', type=str, - help="fastspeech2 checkpoint to load.") + default='fastspeech2_csmsc', + choices=['fastspeech2_aishell3', 'tacotron2_aishell3'], + help='Choose acoustic model type of tts task.') parser.add_argument( - "--fastspeech2-stat", + '--am_config', type=str, - help="mean and standard deviation used to normalize spectrogram when training fastspeech2." - ) + default=None, + help='Config of acoustic model. Use deault config when it is None.') parser.add_argument( - "--pwg-config", type=str, help="parallel wavegan config file.") - parser.add_argument( - "--pwg-checkpoint", + '--am_ckpt', type=str, - help="parallel wavegan generator parameters to load.") + default=None, + help='Checkpoint file of acoustic model.') parser.add_argument( - "--pwg-stat", + "--am_stat", type=str, - help="mean and standard deviation used to normalize spectrogram when training parallel wavegan." + default=None, + help="mean and standard deviation used to normalize spectrogram when training acoustic model." ) parser.add_argument( "--phones-dict", type=str, default="phone_id_map.txt", help="phone vocabulary file.") + # vocoder + parser.add_argument( + '--voc', + type=str, + default='pwgan_csmsc', + choices=['pwgan_aishell3'], + help='Choose vocoder type of tts task.') + + parser.add_argument( + '--voc_config', + type=str, + default=None, + help='Config of voc. Use deault config when it is None.') + parser.add_argument( + '--voc_ckpt', type=str, default=None, help='Checkpoint file of voc.') + parser.add_argument( + "--voc_stat", + type=str, + default=None, + help="mean and standard deviation used to normalize spectrogram when training voc." + ) parser.add_argument( "--text", type=str, @@ -190,18 +248,7 @@ def main(): else: print("ngpu should >= 0 !") - with open(args.fastspeech2_config) as f: - fastspeech2_config = CfgNode(yaml.safe_load(f)) - with open(args.pwg_config) as f: - pwg_config = CfgNode(yaml.safe_load(f)) - - print("========Args========") - print(yaml.safe_dump(vars(args))) - print("========Config========") - print(fastspeech2_config) - print(pwg_config) - - voice_cloning(args, fastspeech2_config, pwg_config) + voice_cloning(args) if __name__ == "__main__": diff --git a/paddlespeech/t2s/exps/voice_cloning/__init__.py b/paddlespeech/t2s/exps/voice_cloning/__init__.py deleted file mode 100644 index abf198b9..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/__init__.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/__init__.py deleted file mode 100644 index abf198b9..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/aishell3.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/aishell3.py deleted file mode 100644 index da95582d..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/aishell3.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pickle -from pathlib import Path - -import numpy as np -from paddle.io import Dataset - -from paddlespeech.t2s.data import batch_spec -from paddlespeech.t2s.data import batch_text_id -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.preprocess_transcription import _phones -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.preprocess_transcription import _tones -from paddlespeech.t2s.frontend import Vocab - -voc_phones = Vocab(sorted(list(_phones))) -print("vocab_phones:\n", voc_phones) -voc_tones = Vocab(sorted(list(_tones))) -print("vocab_tones:\n", voc_tones) - - -class AiShell3(Dataset): - """Processed AiShell3 dataset.""" - - def __init__(self, root): - super().__init__() - self.root = Path(root).expanduser() - self.embed_dir = self.root / "embed" - self.mel_dir = self.root / "mel" - - with open(self.root / "metadata.pickle", 'rb') as f: - self.records = pickle.load(f) - - def __getitem__(self, index): - metadatum = self.records[index] - sentence_id = metadatum["sentence_id"] - speaker_id = sentence_id[:7] - phones = metadatum["phones"] - tones = metadatum["tones"] - phones = np.array( - [voc_phones.lookup(item) for item in phones], dtype=np.int64) - tones = np.array( - [voc_tones.lookup(item) for item in tones], dtype=np.int64) - mel = np.load(str(self.mel_dir / speaker_id / (sentence_id + ".npy"))) - embed = np.load( - str(self.embed_dir / speaker_id / (sentence_id + ".npy"))) - return phones, tones, mel, embed - - def __len__(self): - return len(self.records) - - -def collate_aishell3_examples(examples): - phones, tones, mel, embed = list(zip(*examples)) - - text_lengths = np.array([item.shape[0] for item in phones], dtype=np.int64) - spec_lengths = np.array([item.shape[1] for item in mel], dtype=np.int64) - T_dec = np.max(spec_lengths) - stop_tokens = ( - np.arange(T_dec) >= np.expand_dims(spec_lengths, -1)).astype(np.float32) - phones, _ = batch_text_id(phones) - tones, _ = batch_text_id(tones) - mel, _ = batch_spec(mel) - mel = np.transpose(mel, (0, 2, 1)) - embed = np.stack(embed) - # 7 fields - # (B, T), (B, T), (B, T, C), (B, C), (B,), (B,), (B, T) - return phones, tones, mel, embed, text_lengths, spec_lengths, stop_tokens - - -if __name__ == "__main__": - dataset = AiShell3("~/datasets/aishell3/train") - example = dataset[0] - - examples = [dataset[i] for i in range(10)] - batch = collate_aishell3_examples(examples) - - for field in batch: - print(field.shape, field.dtype) diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/chinese_g2p.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/chinese_g2p.py deleted file mode 100644 index 12de3bb7..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/chinese_g2p.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import List -from typing import Tuple - -from pypinyin import lazy_pinyin -from pypinyin import Style - -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.preprocess_transcription import split_syllable - - -def convert_to_pinyin(text: str) -> List[str]: - """convert text into list of syllables, other characters that are not chinese, thus - cannot be converted to pinyin are splited. - """ - syllables = lazy_pinyin( - text, style=Style.TONE3, neutral_tone_with_five=True) - return syllables - - -def convert_sentence(text: str) -> List[Tuple[str]]: - """convert a sentence into two list: phones and tones""" - syllables = convert_to_pinyin(text) - phones = [] - tones = [] - for syllable in syllables: - p, t = split_syllable(syllable) - phones.extend(p) - tones.extend(t) - - return phones, tones diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/config.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/config.py deleted file mode 100644 index 8d8c9c4e..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/config.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from yacs.config import CfgNode as CN - -_C = CN() -_C.data = CN( - dict( - batch_size=32, # batch size - valid_size=64, # the first N examples are reserved for validation - sample_rate=22050, # Hz, sample rate - n_fft=1024, # fft frame size - win_length=1024, # window size - hop_length=256, # hop size between ajacent frame - fmax=8000, # Hz, max frequency when converting to mel - fmin=0, # Hz, min frequency when converting to mel - d_mels=80, # mel bands - padding_idx=0, # text embedding's padding index - )) - -_C.model = CN( - dict( - vocab_size=70, - n_tones=10, - reduction_factor=1, # reduction factor - d_encoder=512, # embedding & encoder's internal size - encoder_conv_layers=3, # number of conv layer in tacotron2 encoder - encoder_kernel_size=5, # kernel size of conv layers in tacotron2 encoder - d_prenet=256, # hidden size of decoder prenet - # hidden size of the first rnn layer in tacotron2 decoder - d_attention_rnn=1024, - # hidden size of the second rnn layer in tacotron2 decoder - d_decoder_rnn=1024, - d_attention=128, # hidden size of decoder location linear layer - attention_filters=32, # number of filter in decoder location conv layer - attention_kernel_size=31, # kernel size of decoder location conv layer - d_postnet=512, # hidden size of decoder postnet - postnet_kernel_size=5, # kernel size of conv layers in postnet - postnet_conv_layers=5, # number of conv layer in decoder postnet - p_encoder_dropout=0.5, # droput probability in encoder - p_prenet_dropout=0.5, # droput probability in decoder prenet - - # droput probability of first rnn layer in decoder - p_attention_dropout=0.1, - # droput probability of second rnn layer in decoder - p_decoder_dropout=0.1, - p_postnet_dropout=0.5, # droput probability in decoder postnet - guided_attention_loss_sigma=0.2, - d_global_condition=256, - - # whether to use a classifier to predict stop probability - use_stop_token=False, - # whether to use guided attention loss in training - use_guided_attention_loss=True, )) - -_C.training = CN( - dict( - lr=1e-3, # learning rate - weight_decay=1e-6, # the coeff of weight decay - grad_clip_thresh=1.0, # the clip norm of grad clip. - valid_interval=1000, # validation - save_interval=1000, # checkpoint - max_iteration=500000, # max iteration to train - )) - - -def get_cfg_defaults(): - """Get a yacs CfgNode object with default values for my_project.""" - # Return a clone so that the defaults will not be altered - # This is for the "local variable" use pattern - return _C.clone() diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/extract_mel.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/extract_mel.py deleted file mode 100644 index d12466f6..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/extract_mel.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import multiprocessing as mp -from functools import partial -from pathlib import Path - -import numpy as np -import tqdm - -from paddlespeech.t2s.audio import AudioProcessor -from paddlespeech.t2s.audio.spec_normalizer import LogMagnitude -from paddlespeech.t2s.audio.spec_normalizer import NormalizerBase -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.config import get_cfg_defaults - - -def extract_mel(fname: Path, - input_dir: Path, - output_dir: Path, - p: AudioProcessor, - n: NormalizerBase): - relative_path = fname.relative_to(input_dir) - out_path = (output_dir / relative_path).with_suffix(".npy") - out_path.parent.mkdir(parents=True, exist_ok=True) - wav = p.read_wav(fname) - mel = p.mel_spectrogram(wav) - mel = n.transform(mel) - np.save(out_path, mel) - - -def extract_mel_multispeaker(config, input_dir, output_dir, extension=".wav"): - input_dir = Path(input_dir).expanduser() - fnames = list(input_dir.rglob(f"*{extension}")) - output_dir = Path(output_dir).expanduser() - output_dir.mkdir(parents=True, exist_ok=True) - - p = AudioProcessor(config.sample_rate, config.n_fft, config.win_length, - config.hop_length, config.d_mels, config.fmin, - config.fmax) - n = LogMagnitude(1e-5) - - func = partial( - extract_mel, input_dir=input_dir, output_dir=output_dir, p=p, n=n) - - with mp.Pool(16) as pool: - list( - tqdm.tqdm( - pool.imap(func, fnames), total=len(fnames), unit="utterance")) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Extract mel spectrogram from processed wav in AiShell3 training dataset." - ) - parser.add_argument( - "--config", - type=str, - help="yaml config file to overwrite the default config") - parser.add_argument( - "--input", - type=str, - default="~/datasets/aishell3/train/normalized_wav", - help="path of the processed wav folder") - parser.add_argument( - "--output", - type=str, - default="~/datasets/aishell3/train/mel", - help="path of the folder to save mel spectrograms") - parser.add_argument( - "--opts", - nargs=argparse.REMAINDER, - help="options to overwrite --config file and the default config, passing in KEY VALUE pairs" - ) - default_config = get_cfg_defaults() - - args = parser.parse_args() - if args.config: - default_config.merge_from_file(args.config) - if args.opts: - default_config.merge_from_list(args.opts) - default_config.freeze() - audio_config = default_config.data - - extract_mel_multispeaker(audio_config, args.input, args.output) diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/lexicon.txt b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/lexicon.txt deleted file mode 100644 index cc56b55d..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/lexicon.txt +++ /dev/null @@ -1,4150 +0,0 @@ -zhi1 zh iii1 -zhi2 zh iii2 -zhi3 zh iii3 -zhi4 zh iii4 -zhi5 zh iii5 -chi1 ch iii1 -chi2 ch iii2 -chi3 ch iii3 -chi4 ch iii4 -chi5 ch iii5 -shi1 sh iii1 -shi2 sh iii2 -shi3 sh iii3 -shi4 sh iii4 -shi5 sh iii5 -ri1 r iii1 -ri2 r iii2 -ri3 r iii3 -ri4 r iii4 -ri5 r iii5 -zi1 z ii1 -zi2 z ii2 -zi3 z ii3 -zi4 z ii4 -zi5 z ii5 -ci1 c ii1 -ci2 c ii2 -ci3 c ii3 -ci4 c ii4 -ci5 c ii5 -si1 s ii1 -si2 s ii2 -si3 s ii3 -si4 s ii4 -si5 s ii5 -a1 a1 -a2 a2 -a3 a3 -a4 a4 -a5 a5 -ba1 b a1 -ba2 b a2 -ba3 b a3 -ba4 b a4 -ba5 b a5 -pa1 p a1 -pa2 p a2 -pa3 p a3 -pa4 p a4 -pa5 p a5 -ma1 m a1 -ma2 m a2 -ma3 m a3 -ma4 m a4 -ma5 m a5 -fa1 f a1 -fa2 f a2 -fa3 f a3 -fa4 f a4 -fa5 f a5 -da1 d a1 -da2 d a2 -da3 d a3 -da4 d a4 -da5 d a5 -ta1 t a1 -ta2 t a2 -ta3 t a3 -ta4 t a4 -ta5 t a5 -na1 n a1 -na2 n a2 -na3 n a3 -na4 n a4 -na5 n a5 -la1 l a1 -la2 l a2 -la3 l a3 -la4 l a4 -la5 l a5 -ga1 g a1 -ga2 g a2 -ga3 g a3 -ga4 g a4 -ga5 g a5 -ka1 k a1 -ka2 k a2 -ka3 k a3 -ka4 k a4 -ka5 k a5 -ha1 h a1 -ha2 h a2 -ha3 h a3 -ha4 h a4 -ha5 h a5 -zha1 zh a1 -zha2 zh a2 -zha3 zh a3 -zha4 zh a4 -zha5 zh a5 -cha1 ch a1 -cha2 ch a2 -cha3 ch a3 -cha4 ch a4 -cha5 ch a5 -sha1 sh a1 -sha2 sh a2 -sha3 sh a3 -sha4 sh a4 -sha5 sh a5 -za1 z a1 -za2 z a2 -za3 z a3 -za4 z a4 -za5 z a5 -ca1 c a1 -ca2 c a2 -ca3 c a3 -ca4 c a4 -ca5 c a5 -sa1 s a1 -sa2 s a2 -sa3 s a3 -sa4 s a4 -sa5 s a5 -o1 o1 -o2 o2 -o3 o3 -o4 o4 -o5 o5 -bo1 b uo1 -bo2 b uo2 -bo3 b uo3 -bo4 b uo4 -bo5 b uo5 -po1 p uo1 -po2 p uo2 -po3 p uo3 -po4 p uo4 -po5 p uo5 -mo1 m uo1 -mo2 m uo2 -mo3 m uo3 -mo4 m uo4 -mo5 m uo5 -fo1 f uo1 -fo2 f uo2 -fo3 f uo3 -fo4 f uo4 -fo5 f uo5 -lo1 l o1 -lo2 l o2 -lo3 l o3 -lo4 l o4 -lo5 l o5 -e1 e1 -e2 e2 -e3 e3 -e4 e4 -e5 e5 -me1 m e1 -me2 m e2 -me3 m e3 -me4 m e4 -me5 m e5 -de1 d e1 -de2 d e2 -de3 d e3 -de4 d e4 -de5 d e5 -te1 t e1 -te2 t e2 -te3 t e3 -te4 t e4 -te5 t e5 -ne1 n e1 -ne2 n e2 -ne3 n e3 -ne4 n e4 -ne5 n e5 -le1 l e1 -le2 l e2 -le3 l e3 -le4 l e4 -le5 l e5 -ge1 g e1 -ge2 g e2 -ge3 g e3 -ge4 g e4 -ge5 g e5 -ke1 k e1 -ke2 k e2 -ke3 k e3 -ke4 k e4 -ke5 k e5 -he1 h e1 -he2 h e2 -he3 h e3 -he4 h e4 -he5 h e5 -zhe1 zh e1 -zhe2 zh e2 -zhe3 zh e3 -zhe4 zh e4 -zhe5 zh e5 -che1 ch e1 -che2 ch e2 -che3 ch e3 -che4 ch e4 -che5 ch e5 -she1 sh e1 -she2 sh e2 -she3 sh e3 -she4 sh e4 -she5 sh e5 -re1 r e1 -re2 r e2 -re3 r e3 -re4 r e4 -re5 r e5 -ze1 z e1 -ze2 z e2 -ze3 z e3 -ze4 z e4 -ze5 z e5 -ce1 c e1 -ce2 c e2 -ce3 c e3 -ce4 c e4 -ce5 c e5 -se1 s e1 -se2 s e2 -se3 s e3 -se4 s e4 -se5 s e5 -ea1 ea1 -ea2 ea2 -ea3 ea3 -ea4 ea4 -ea5 ea5 -ai1 ai1 -ai2 ai2 -ai3 ai3 -ai4 ai4 -ai5 ai5 -bai1 b ai1 -bai2 b ai2 -bai3 b ai3 -bai4 b ai4 -bai5 b ai5 -pai1 p ai1 -pai2 p ai2 -pai3 p ai3 -pai4 p ai4 -pai5 p ai5 -mai1 m ai1 -mai2 m ai2 -mai3 m ai3 -mai4 m ai4 -mai5 m ai5 -dai1 d ai1 -dai2 d ai2 -dai3 d ai3 -dai4 d ai4 -dai5 d ai5 -tai1 t ai1 -tai2 t ai2 -tai3 t ai3 -tai4 t ai4 -tai5 t ai5 -nai1 n ai1 -nai2 n ai2 -nai3 n ai3 -nai4 n ai4 -nai5 n ai5 -lai1 l ai1 -lai2 l ai2 -lai3 l ai3 -lai4 l ai4 -lai5 l ai5 -gai1 g ai1 -gai2 g ai2 -gai3 g ai3 -gai4 g ai4 -gai5 g ai5 -kai1 k ai1 -kai2 k ai2 -kai3 k ai3 -kai4 k ai4 -kai5 k ai5 -hai1 h ai1 -hai2 h ai2 -hai3 h ai3 -hai4 h ai4 -hai5 h ai5 -zhai1 zh ai1 -zhai2 zh ai2 -zhai3 zh ai3 -zhai4 zh ai4 -zhai5 zh ai5 -chai1 ch ai1 -chai2 ch ai2 -chai3 ch ai3 -chai4 ch ai4 -chai5 ch ai5 -shai1 sh ai1 -shai2 sh ai2 -shai3 sh ai3 -shai4 sh ai4 -shai5 sh ai5 -zai1 z ai1 -zai2 z ai2 -zai3 z ai3 -zai4 z ai4 -zai5 z ai5 -cai1 c ai1 -cai2 c ai2 -cai3 c ai3 -cai4 c ai4 -cai5 c ai5 -sai1 s ai1 -sai2 s ai2 -sai3 s ai3 -sai4 s ai4 -sai5 s ai5 -ei1 ei1 -ei2 ei2 -ei3 ei3 -ei4 ei4 -ei5 ei5 -bei1 b ei1 -bei2 b ei2 -bei3 b ei3 -bei4 b ei4 -bei5 b ei5 -pei1 p ei1 -pei2 p ei2 -pei3 p ei3 -pei4 p ei4 -pei5 p ei5 -mei1 m ei1 -mei2 m ei2 -mei3 m ei3 -mei4 m ei4 -mei5 m ei5 -fei1 f ei1 -fei2 f ei2 -fei3 f ei3 -fei4 f ei4 -fei5 f ei5 -dei1 d ei1 -dei2 d ei2 -dei3 d ei3 -dei4 d ei4 -dei5 d ei5 -tei1 t ei1 -tei2 t ei2 -tei3 t ei3 -tei4 t ei4 -tei5 t ei5 -nei1 n ei1 -nei2 n ei2 -nei3 n ei3 -nei4 n ei4 -nei5 n ei5 -lei1 l ei1 -lei2 l ei2 -lei3 l ei3 -lei4 l ei4 -lei5 l ei5 -gei1 g ei1 -gei2 g ei2 -gei3 g ei3 -gei4 g ei4 -gei5 g ei5 -kei1 k ei1 -kei2 k ei2 -kei3 k ei3 -kei4 k ei4 -kei5 k ei5 -hei1 h ei1 -hei2 h ei2 -hei3 h ei3 -hei4 h ei4 -hei5 h ei5 -zhei1 zh ei1 -zhei2 zh ei2 -zhei3 zh ei3 -zhei4 zh ei4 -zhei5 zh ei5 -shei1 sh ei1 -shei2 sh ei2 -shei3 sh ei3 -shei4 sh ei4 -shei5 sh ei5 -zei1 z ei1 -zei2 z ei2 -zei3 z ei3 -zei4 z ei4 -zei5 z ei5 -ao1 au1 -ao2 au2 -ao3 au3 -ao4 au4 -ao5 au5 -bao1 b au1 -bao2 b au2 -bao3 b au3 -bao4 b au4 -bao5 b au5 -pao1 p au1 -pao2 p au2 -pao3 p au3 -pao4 p au4 -pao5 p au5 -mao1 m au1 -mao2 m au2 -mao3 m au3 -mao4 m au4 -mao5 m au5 -dao1 d au1 -dao2 d au2 -dao3 d au3 -dao4 d au4 -dao5 d au5 -tao1 t au1 -tao2 t au2 -tao3 t au3 -tao4 t au4 -tao5 t au5 -nao1 n au1 -nao2 n au2 -nao3 n au3 -nao4 n au4 -nao5 n au5 -lao1 l au1 -lao2 l au2 -lao3 l au3 -lao4 l au4 -lao5 l au5 -gao1 g au1 -gao2 g au2 -gao3 g au3 -gao4 g au4 -gao5 g au5 -kao1 k au1 -kao2 k au2 -kao3 k au3 -kao4 k au4 -kao5 k au5 -hao1 h au1 -hao2 h au2 -hao3 h au3 -hao4 h au4 -hao5 h au5 -zhao1 zh au1 -zhao2 zh au2 -zhao3 zh au3 -zhao4 zh au4 -zhao5 zh au5 -chao1 ch au1 -chao2 ch au2 -chao3 ch au3 -chao4 ch au4 -chao5 ch au5 -shao1 sh au1 -shao2 sh au2 -shao3 sh au3 -shao4 sh au4 -shao5 sh au5 -rao1 r au1 -rao2 r au2 -rao3 r au3 -rao4 r au4 -rao5 r au5 -zao1 z au1 -zao2 z au2 -zao3 z au3 -zao4 z au4 -zao5 z au5 -cao1 c au1 -cao2 c au2 -cao3 c au3 -cao4 c au4 -cao5 c au5 -sao1 s au1 -sao2 s au2 -sao3 s au3 -sao4 s au4 -sao5 s au5 -ou1 ou1 -ou2 ou2 -ou3 ou3 -ou4 ou4 -ou5 ou5 -pou1 p ou1 -pou2 p ou2 -pou3 p ou3 -pou4 p ou4 -pou5 p ou5 -mou1 m ou1 -mou2 m ou2 -mou3 m ou3 -mou4 m ou4 -mou5 m ou5 -fou1 f ou1 -fou2 f ou2 -fou3 f ou3 -fou4 f ou4 -fou5 f ou5 -dou1 d ou1 -dou2 d ou2 -dou3 d ou3 -dou4 d ou4 -dou5 d ou5 -tou1 t ou1 -tou2 t ou2 -tou3 t ou3 -tou4 t ou4 -tou5 t ou5 -nou1 n ou1 -nou2 n ou2 -nou3 n ou3 -nou4 n ou4 -nou5 n ou5 -lou1 l ou1 -lou2 l ou2 -lou3 l ou3 -lou4 l ou4 -lou5 l ou5 -gou1 g ou1 -gou2 g ou2 -gou3 g ou3 -gou4 g ou4 -gou5 g ou5 -kou1 k ou1 -kou2 k ou2 -kou3 k ou3 -kou4 k ou4 -kou5 k ou5 -hou1 h ou1 -hou2 h ou2 -hou3 h ou3 -hou4 h ou4 -hou5 h ou5 -zhou1 zh ou1 -zhou2 zh ou2 -zhou3 zh ou3 -zhou4 zh ou4 -zhou5 zh ou5 -chou1 ch ou1 -chou2 ch ou2 -chou3 ch ou3 -chou4 ch ou4 -chou5 ch ou5 -shou1 sh ou1 -shou2 sh ou2 -shou3 sh ou3 -shou4 sh ou4 -shou5 sh ou5 -rou1 r ou1 -rou2 r ou2 -rou3 r ou3 -rou4 r ou4 -rou5 r ou5 -zou1 z ou1 -zou2 z ou2 -zou3 z ou3 -zou4 z ou4 -zou5 z ou5 -cou1 c ou1 -cou2 c ou2 -cou3 c ou3 -cou4 c ou4 -cou5 c ou5 -sou1 s ou1 -sou2 s ou2 -sou3 s ou3 -sou4 s ou4 -sou5 s ou5 -an1 an1 -an2 an2 -an3 an3 -an4 an4 -an5 an5 -ban1 b an1 -ban2 b an2 -ban3 b an3 -ban4 b an4 -ban5 b an5 -pan1 p an1 -pan2 p an2 -pan3 p an3 -pan4 p an4 -pan5 p an5 -man1 m an1 -man2 m an2 -man3 m an3 -man4 m an4 -man5 m an5 -fan1 f an1 -fan2 f an2 -fan3 f an3 -fan4 f an4 -fan5 f an5 -dan1 d an1 -dan2 d an2 -dan3 d an3 -dan4 d an4 -dan5 d an5 -tan1 t an1 -tan2 t an2 -tan3 t an3 -tan4 t an4 -tan5 t an5 -nan1 n an1 -nan2 n an2 -nan3 n an3 -nan4 n an4 -nan5 n an5 -lan1 l an1 -lan2 l an2 -lan3 l an3 -lan4 l an4 -lan5 l an5 -gan1 g an1 -gan2 g an2 -gan3 g an3 -gan4 g an4 -gan5 g an5 -kan1 k an1 -kan2 k an2 -kan3 k an3 -kan4 k an4 -kan5 k an5 -han1 h an1 -han2 h an2 -han3 h an3 -han4 h an4 -han5 h an5 -zhan1 zh an1 -zhan2 zh an2 -zhan3 zh an3 -zhan4 zh an4 -zhan5 zh an5 -chan1 ch an1 -chan2 ch an2 -chan3 ch an3 -chan4 ch an4 -chan5 ch an5 -shan1 sh an1 -shan2 sh an2 -shan3 sh an3 -shan4 sh an4 -shan5 sh an5 -ran1 r an1 -ran2 r an2 -ran3 r an3 -ran4 r an4 -ran5 r an5 -zan1 z an1 -zan2 z an2 -zan3 z an3 -zan4 z an4 -zan5 z an5 -can1 c an1 -can2 c an2 -can3 c an3 -can4 c an4 -can5 c an5 -san1 s an1 -san2 s an2 -san3 s an3 -san4 s an4 -san5 s an5 -en1 en1 -en2 en2 -en3 en3 -en4 en4 -en5 en5 -ben1 b en1 -ben2 b en2 -ben3 b en3 -ben4 b en4 -ben5 b en5 -pen1 p en1 -pen2 p en2 -pen3 p en3 -pen4 p en4 -pen5 p en5 -men1 m en1 -men2 m en2 -men3 m en3 -men4 m en4 -men5 m en5 -fen1 f en1 -fen2 f en2 -fen3 f en3 -fen4 f en4 -fen5 f en5 -den1 d en1 -den2 d en2 -den3 d en3 -den4 d en4 -den5 d en5 -nen1 n en1 -nen2 n en2 -nen3 n en3 -nen4 n en4 -nen5 n en5 -gen1 g en1 -gen2 g en2 -gen3 g en3 -gen4 g en4 -gen5 g en5 -ken1 k en1 -ken2 k en2 -ken3 k en3 -ken4 k en4 -ken5 k en5 -hen1 h en1 -hen2 h en2 -hen3 h en3 -hen4 h en4 -hen5 h en5 -zhen1 zh en1 -zhen2 zh en2 -zhen3 zh en3 -zhen4 zh en4 -zhen5 zh en5 -chen1 ch en1 -chen2 ch en2 -chen3 ch en3 -chen4 ch en4 -chen5 ch en5 -shen1 sh en1 -shen2 sh en2 -shen3 sh en3 -shen4 sh en4 -shen5 sh en5 -ren1 r en1 -ren2 r en2 -ren3 r en3 -ren4 r en4 -ren5 r en5 -zen1 z en1 -zen2 z en2 -zen3 z en3 -zen4 z en4 -zen5 z en5 -cen1 c en1 -cen2 c en2 -cen3 c en3 -cen4 c en4 -cen5 c en5 -sen1 s en1 -sen2 s en2 -sen3 s en3 -sen4 s en4 -sen5 s en5 -ang1 ang1 -ang2 ang2 -ang3 ang3 -ang4 ang4 -ang5 ang5 -bang1 b ang1 -bang2 b ang2 -bang3 b ang3 -bang4 b ang4 -bang5 b ang5 -pang1 p ang1 -pang2 p ang2 -pang3 p ang3 -pang4 p ang4 -pang5 p ang5 -mang1 m ang1 -mang2 m ang2 -mang3 m ang3 -mang4 m ang4 -mang5 m ang5 -fang1 f ang1 -fang2 f ang2 -fang3 f ang3 -fang4 f ang4 -fang5 f ang5 -dang1 d ang1 -dang2 d ang2 -dang3 d ang3 -dang4 d ang4 -dang5 d ang5 -tang1 t ang1 -tang2 t ang2 -tang3 t ang3 -tang4 t ang4 -tang5 t ang5 -nang1 n ang1 -nang2 n ang2 -nang3 n ang3 -nang4 n ang4 -nang5 n ang5 -lang1 l ang1 -lang2 l ang2 -lang3 l ang3 -lang4 l ang4 -lang5 l ang5 -gang1 g ang1 -gang2 g ang2 -gang3 g ang3 -gang4 g ang4 -gang5 g ang5 -kang1 k ang1 -kang2 k ang2 -kang3 k ang3 -kang4 k ang4 -kang5 k ang5 -hang1 h ang1 -hang2 h ang2 -hang3 h ang3 -hang4 h ang4 -hang5 h ang5 -zhang1 zh ang1 -zhang2 zh ang2 -zhang3 zh ang3 -zhang4 zh ang4 -zhang5 zh ang5 -chang1 ch ang1 -chang2 ch ang2 -chang3 ch ang3 -chang4 ch ang4 -chang5 ch ang5 -shang1 sh ang1 -shang2 sh ang2 -shang3 sh ang3 -shang4 sh ang4 -shang5 sh ang5 -rang1 r ang1 -rang2 r ang2 -rang3 r ang3 -rang4 r ang4 -rang5 r ang5 -zang1 z ang1 -zang2 z ang2 -zang3 z ang3 -zang4 z ang4 -zang5 z ang5 -cang1 c ang1 -cang2 c ang2 -cang3 c ang3 -cang4 c ang4 -cang5 c ang5 -sang1 s ang1 -sang2 s ang2 -sang3 s ang3 -sang4 s ang4 -sang5 s ang5 -eng1 eng1 -eng2 eng2 -eng3 eng3 -eng4 eng4 -eng5 eng5 -beng1 b eng1 -beng2 b eng2 -beng3 b eng3 -beng4 b eng4 -beng5 b eng5 -peng1 p eng1 -peng2 p eng2 -peng3 p eng3 -peng4 p eng4 -peng5 p eng5 -meng1 m eng1 -meng2 m eng2 -meng3 m eng3 -meng4 m eng4 -meng5 m eng5 -feng1 f eng1 -feng2 f eng2 -feng3 f eng3 -feng4 f eng4 -feng5 f eng5 -deng1 d eng1 -deng2 d eng2 -deng3 d eng3 -deng4 d eng4 -deng5 d eng5 -teng1 t eng1 -teng2 t eng2 -teng3 t eng3 -teng4 t eng4 -teng5 t eng5 -neng1 n eng1 -neng2 n eng2 -neng3 n eng3 -neng4 n eng4 -neng5 n eng5 -leng1 l eng1 -leng2 l eng2 -leng3 l eng3 -leng4 l eng4 -leng5 l eng5 -geng1 g eng1 -geng2 g eng2 -geng3 g eng3 -geng4 g eng4 -geng5 g eng5 -keng1 k eng1 -keng2 k eng2 -keng3 k eng3 -keng4 k eng4 -keng5 k eng5 -heng1 h eng1 -heng2 h eng2 -heng3 h eng3 -heng4 h eng4 -heng5 h eng5 -zheng1 zh eng1 -zheng2 zh eng2 -zheng3 zh eng3 -zheng4 zh eng4 -zheng5 zh eng5 -cheng1 ch eng1 -cheng2 ch eng2 -cheng3 ch eng3 -cheng4 ch eng4 -cheng5 ch eng5 -sheng1 sh eng1 -sheng2 sh eng2 -sheng3 sh eng3 -sheng4 sh eng4 -sheng5 sh eng5 -reng1 r eng1 -reng2 r eng2 -reng3 r eng3 -reng4 r eng4 -reng5 r eng5 -zeng1 z eng1 -zeng2 z eng2 -zeng3 z eng3 -zeng4 z eng4 -zeng5 z eng5 -ceng1 c eng1 -ceng2 c eng2 -ceng3 c eng3 -ceng4 c eng4 -ceng5 c eng5 -seng1 s eng1 -seng2 s eng2 -seng3 s eng3 -seng4 s eng4 -seng5 s eng5 -er1 er1 -er2 er2 -er3 er3 -er4 er4 -er5 er5 -yi1 y i1 -yi2 y i2 -yi3 y i3 -yi4 y i4 -yi5 y i5 -bi1 b i1 -bi2 b i2 -bi3 b i3 -bi4 b i4 -bi5 b i5 -pi1 p i1 -pi2 p i2 -pi3 p i3 -pi4 p i4 -pi5 p i5 -mi1 m i1 -mi2 m i2 -mi3 m i3 -mi4 m i4 -mi5 m i5 -di1 d i1 -di2 d i2 -di3 d i3 -di4 d i4 -di5 d i5 -ti1 t i1 -ti2 t i2 -ti3 t i3 -ti4 t i4 -ti5 t i5 -ni1 n i1 -ni2 n i2 -ni3 n i3 -ni4 n i4 -ni5 n i5 -li1 l i1 -li2 l i2 -li3 l i3 -li4 l i4 -li5 l i5 -ji1 j i1 -ji2 j i2 -ji3 j i3 -ji4 j i4 -ji5 j i5 -qi1 q i1 -qi2 q i2 -qi3 q i3 -qi4 q i4 -qi5 q i5 -xi1 x i1 -xi2 x i2 -xi3 x i3 -xi4 x i4 -xi5 x i5 -ya1 y ia1 -ya2 y ia2 -ya3 y ia3 -ya4 y ia4 -ya5 y ia5 -dia1 d ia1 -dia2 d ia2 -dia3 d ia3 -dia4 d ia4 -dia5 d ia5 -lia1 l ia1 -lia2 l ia2 -lia3 l ia3 -lia4 l ia4 -lia5 l ia5 -jia1 j ia1 -jia2 j ia2 -jia3 j ia3 -jia4 j ia4 -jia5 j ia5 -qia1 q ia1 -qia2 q ia2 -qia3 q ia3 -qia4 q ia4 -qia5 q ia5 -xia1 x ia1 -xia2 x ia2 -xia3 x ia3 -xia4 x ia4 -xia5 x ia5 -yo1 y io1 -yo2 y io2 -yo3 y io3 -yo4 y io4 -yo5 y io5 -ye1 y ie1 -ye2 y ie2 -ye3 y ie3 -ye4 y ie4 -ye5 y ie5 -bie1 b ie1 -bie2 b ie2 -bie3 b ie3 -bie4 b ie4 -bie5 b ie5 -pie1 p ie1 -pie2 p ie2 -pie3 p ie3 -pie4 p ie4 -pie5 p ie5 -mie1 m ie1 -mie2 m ie2 -mie3 m ie3 -mie4 m ie4 -mie5 m ie5 -die1 d ie1 -die2 d ie2 -die3 d ie3 -die4 d ie4 -die5 d ie5 -tie1 t ie1 -tie2 t ie2 -tie3 t ie3 -tie4 t ie4 -tie5 t ie5 -nie1 n ie1 -nie2 n ie2 -nie3 n ie3 -nie4 n ie4 -nie5 n ie5 -lie1 l ie1 -lie2 l ie2 -lie3 l ie3 -lie4 l ie4 -lie5 l ie5 -jie1 j ie1 -jie2 j ie2 -jie3 j ie3 -jie4 j ie4 -jie5 j ie5 -qie1 q ie1 -qie2 q ie2 -qie3 q ie3 -qie4 q ie4 -qie5 q ie5 -xie1 x ie1 -xie2 x ie2 -xie3 x ie3 -xie4 x ie4 -xie5 x ie5 -yai1 y ai1 -yai2 y ai2 -yai3 y ai3 -yai4 y ai4 -yai5 y ai5 -yao1 y au1 -yao2 y au2 -yao3 y au3 -yao4 y au4 -yao5 y au5 -biao1 b iau1 -biao2 b iau2 -biao3 b iau3 -biao4 b iau4 -biao5 b iau5 -piao1 p iau1 -piao2 p iau2 -piao3 p iau3 -piao4 p iau4 -piao5 p iau5 -miao1 m iau1 -miao2 m iau2 -miao3 m iau3 -miao4 m iau4 -miao5 m iau5 -fiao1 f iau1 -fiao2 f iau2 -fiao3 f iau3 -fiao4 f iau4 -fiao5 f iau5 -diao1 d iau1 -diao2 d iau2 -diao3 d iau3 -diao4 d iau4 -diao5 d iau5 -tiao1 t iau1 -tiao2 t iau2 -tiao3 t iau3 -tiao4 t iau4 -tiao5 t iau5 -niao1 n iau1 -niao2 n iau2 -niao3 n iau3 -niao4 n iau4 -niao5 n iau5 -liao1 l iau1 -liao2 l iau2 -liao3 l iau3 -liao4 l iau4 -liao5 l iau5 -jiao1 j iau1 -jiao2 j iau2 -jiao3 j iau3 -jiao4 j iau4 -jiao5 j iau5 -qiao1 q iau1 -qiao2 q iau2 -qiao3 q iau3 -qiao4 q iau4 -qiao5 q iau5 -xiao1 x iau1 -xiao2 x iau2 -xiao3 x iau3 -xiao4 x iau4 -xiao5 x iau5 -you1 y iou1 -you2 y iou2 -you3 y iou3 -you4 y iou4 -you5 y iou5 -miu1 m iou1 -miu2 m iou2 -miu3 m iou3 -miu4 m iou4 -miu5 m iou5 -diu1 d iou1 -diu2 d iou2 -diu3 d iou3 -diu4 d iou4 -diu5 d iou5 -niu1 n iou1 -niu2 n iou2 -niu3 n iou3 -niu4 n iou4 -niu5 n iou5 -liu1 l iou1 -liu2 l iou2 -liu3 l iou3 -liu4 l iou4 -liu5 l iou5 -jiu1 j iou1 -jiu2 j iou2 -jiu3 j iou3 -jiu4 j iou4 -jiu5 j iou5 -qiu1 q iou1 -qiu2 q iou2 -qiu3 q iou3 -qiu4 q iou4 -qiu5 q iou5 -xiu1 xiou1 -xiu2 xiou2 -xiu3 xiou3 -xiu4 xiou4 -xiu5 xiou5 -yan1 y ian1 -yan2 y ian2 -yan3 y ian3 -yan4 y ian4 -yan5 y ian5 -bian1 b ian1 -bian2 b ian2 -bian3 b ian3 -bian4 b ian4 -bian5 b ian5 -pian1 p ian1 -pian2 p ian2 -pian3 p ian3 -pian4 p ian4 -pian5 p ian5 -mian1 m ian1 -mian2 m ian2 -mian3 m ian3 -mian4 m ian4 -mian5 m ian5 -dian1 d ian1 -dian2 d ian2 -dian3 d ian3 -dian4 d ian4 -dian5 d ian5 -tian1 t ian1 -tian2 t ian2 -tian3 t ian3 -tian4 t ian4 -tian5 t ian5 -nian1 n ian1 -nian2 n ian2 -nian3 n ian3 -nian4 n ian4 -nian5 n ian5 -lian1 l ian1 -lian2 l ian2 -lian3 l ian3 -lian4 l ian4 -lian5 l ian5 -jian1 j ian1 -jian2 j ian2 -jian3 j ian3 -jian4 j ian4 -jian5 j ian5 -qian1 q ian1 -qian2 q ian2 -qian3 q ian3 -qian4 q ian4 -qian5 q ian5 -xian1 x ian1 -xian2 x ian2 -xian3 x ian3 -xian4 x ian4 -xian5 x ian5 -yin1 y in1 -yin2 y in2 -yin3 y in3 -yin4 y in4 -yin5 y in5 -bin1 b in1 -bin2 b in2 -bin3 b in3 -bin4 b in4 -bin5 b in5 -pin1 p in1 -pin2 p in2 -pin3 p in3 -pin4 p in4 -pin5 p in5 -min1 m in1 -min2 m in2 -min3 m in3 -min4 m in4 -min5 m in5 -din1 d in1 -din2 d in2 -din3 d in3 -din4 d in4 -din5 d in5 -nin1 n in1 -nin2 n in2 -nin3 n in3 -nin4 n in4 -nin5 n in5 -lin1 l in1 -lin2 l in2 -lin3 l in3 -lin4 l in4 -lin5 l in5 -jin1 j in1 -jin2 j in2 -jin3 j in3 -jin4 j in4 -jin5 j in5 -qin1 q in1 -qin2 q in2 -qin3 q in3 -qin4 q in4 -qin5 q in5 -xin1 x in1 -xin2 x in2 -xin3 x in3 -xin4 x in4 -xin5 x in5 -yang1 y iang1 -yang2 y iang2 -yang3 y iang3 -yang4 y iang4 -yang5 y iang5 -biang1 b iang1 -biang2 b iang2 -biang3 b iang3 -biang4 b iang4 -biang5 b iang5 -niang1 n iang1 -niang2 n iang2 -niang3 n iang3 -niang4 n iang4 -niang5 n iang5 -liang1 l iang1 -liang2 l iang2 -liang3 l iang3 -liang4 l iang4 -liang5 l iang5 -jiang1 j iang1 -jiang2 j iang2 -jiang3 j iang3 -jiang4 j iang4 -jiang5 j iang5 -qiang1 q iang1 -qiang2 q iang2 -qiang3 q iang3 -qiang4 q iang4 -qiang5 q iang5 -xiang1 x iang1 -xiang2 x iang2 -xiang3 x iang3 -xiang4 x iang4 -xiang5 x iang5 -ying1 y ing1 -ying2 y ing2 -ying3 y ing3 -ying4 y ing4 -ying5 y ing5 -bing1 b ing1 -bing2 b ing2 -bing3 b ing3 -bing4 b ing4 -bing5 b ing5 -ping1 p ing1 -ping2 p ing2 -ping3 p ing3 -ping4 p ing4 -ping5 p ing5 -ming1 m ing1 -ming2 m ing2 -ming3 m ing3 -ming4 m ing4 -ming5 m ing5 -ding1 d ing1 -ding2 d ing2 -ding3 d ing3 -ding4 d ing4 -ding5 d ing5 -ting1 t ing1 -ting2 t ing2 -ting3 t ing3 -ting4 t ing4 -ting5 t ing5 -ning1 n ing1 -ning2 n ing2 -ning3 n ing3 -ning4 n ing4 -ning5 n ing5 -ling1 l ing1 -ling2 l ing2 -ling3 l ing3 -ling4 l ing4 -ling5 l ing5 -jing1 j ing1 -jing2 j ing2 -jing3 j ing3 -jing4 j ing4 -jing5 j ing5 -qing1 q ing1 -qing2 q ing2 -qing3 q ing3 -qing4 q ing4 -qing5 q ing5 -xing1 x ing1 -xing2 x ing2 -xing3 x ing3 -xing4 x ing4 -xing5 x ing5 -wu1 w u1 -wu2 w u2 -wu3 w u3 -wu4 w u4 -wu5 w u5 -bu1 b u1 -bu2 b u2 -bu3 b u3 -bu4 b u4 -bu5 b u5 -pu1 p u1 -pu2 p u2 -pu3 p u3 -pu4 p u4 -pu5 p u5 -mu1 m u1 -mu2 m u2 -mu3 m u3 -mu4 m u4 -mu5 m u5 -fu1 f u1 -fu2 f u2 -fu3 f u3 -fu4 f u4 -fu5 f u5 -du1 d u1 -du2 d u2 -du3 d u3 -du4 d u4 -du5 d u5 -tu1 t u1 -tu2 t u2 -tu3 t u3 -tu4 t u4 -tu5 t u5 -nu1 n u1 -nu2 n u2 -nu3 n u3 -nu4 n u4 -nu5 n u5 -lu1 l u1 -lu2 l u2 -lu3 l u3 -lu4 l u4 -lu5 l u5 -gu1 g u1 -gu2 g u2 -gu3 g u3 -gu4 g u4 -gu5 g u5 -ku1 k u1 -ku2 k u2 -ku3 k u3 -ku4 k u4 -ku5 k u5 -hu1 h u1 -hu2 h u2 -hu3 h u3 -hu4 h u4 -hu5 h u5 -zhu1 zh u1 -zhu2 zh u2 -zhu3 zh u3 -zhu4 zh u4 -zhu5 zh u5 -chu1 ch u1 -chu2 ch u2 -chu3 ch u3 -chu4 ch u4 -chu5 ch u5 -shu1 sh u1 -shu2 sh u2 -shu3 sh u3 -shu4 sh u4 -shu5 sh u5 -ru1 r u1 -ru2 r u2 -ru3 r u3 -ru4 r u4 -ru5 r u5 -zu1 z u1 -zu2 z u2 -zu3 z u3 -zu4 z u4 -zu5 z u5 -cu1 c u1 -cu2 c u2 -cu3 c u3 -cu4 c u4 -cu5 c u5 -su1 s u1 -su2 s u2 -su3 s u3 -su4 s u4 -su5 s u5 -wa1 w ua1 -wa2 w ua2 -wa3 w ua3 -wa4 w ua4 -wa5 w ua5 -gua1 g ua1 -gua2 g ua2 -gua3 g ua3 -gua4 g ua4 -gua5 g ua5 -kua1 k ua1 -kua2 k ua2 -kua3 k ua3 -kua4 k ua4 -kua5 k ua5 -hua1 h ua1 -hua2 h ua2 -hua3 h ua3 -hua4 h ua4 -hua5 h ua5 -zhua1 zh ua1 -zhua2 zh ua2 -zhua3 zh ua3 -zhua4 zh ua4 -zhua5 zh ua5 -chua1 ch ua1 -chua2 ch ua2 -chua3 ch ua3 -chua4 ch ua4 -chua5 ch ua5 -shua1 sh ua1 -shua2 sh ua2 -shua3 sh ua3 -shua4 sh ua4 -shua5 sh ua5 -wo1 w uo1 -wo2 w uo2 -wo3 w uo3 -wo4 w uo4 -wo5 w uo5 -duo1 d uo1 -duo2 d uo2 -duo3 d uo3 -duo4 d uo4 -duo5 d uo5 -tuo1 t uo1 -tuo2 t uo2 -tuo3 t uo3 -tuo4 t uo4 -tuo5 t uo5 -nuo1 n uo1 -nuo2 n uo2 -nuo3 n uo3 -nuo4 n uo4 -nuo5 n uo5 -luo1 l uo1 -luo2 l uo2 -luo3 l uo3 -luo4 l uo4 -luo5 l uo5 -guo1 g uo1 -guo2 g uo2 -guo3 g uo3 -guo4 g uo4 -guo5 g uo5 -kuo1 k uo1 -kuo2 k uo2 -kuo3 k uo3 -kuo4 k uo4 -kuo5 k uo5 -huo1 h uo1 -huo2 h uo2 -huo3 h uo3 -huo4 h uo4 -huo5 h uo5 -zhuo1 zh uo1 -zhuo2 zh uo2 -zhuo3 zh uo3 -zhuo4 zh uo4 -zhuo5 zh uo5 -chuo1 ch uo1 -chuo2 ch uo2 -chuo3 ch uo3 -chuo4 ch uo4 -chuo5 ch uo5 -shuo1 sh uo1 -shuo2 sh uo2 -shuo3 sh uo3 -shuo4 sh uo4 -shuo5 sh uo5 -ruo1 r uo1 -ruo2 r uo2 -ruo3 r uo3 -ruo4 r uo4 -ruo5 r uo5 -zuo1 z uo1 -zuo2 z uo2 -zuo3 z uo3 -zuo4 z uo4 -zuo5 z uo5 -cuo1 c uo1 -cuo2 c uo2 -cuo3 c uo3 -cuo4 c uo4 -cuo5 c uo5 -suo1 s uo1 -suo2 s uo2 -suo3 s uo3 -suo4 s uo4 -suo5 s uo5 -wai1 w uai1 -wai2 w uai2 -wai3 w uai3 -wai4 w uai4 -wai5 w uai5 -guai1 g uai1 -guai2 g uai2 -guai3 g uai3 -guai4 g uai4 -guai5 g uai5 -kuai1 k uai1 -kuai2 k uai2 -kuai3 k uai3 -kuai4 k uai4 -kuai5 k uai5 -huai1 h uai1 -huai2 h uai2 -huai3 h uai3 -huai4 h uai4 -huai5 h uai5 -zhuai1 zh uai1 -zhuai2 zh uai2 -zhuai3 zh uai3 -zhuai4 zh uai4 -zhuai5 zh uai5 -chuai1 ch uai1 -chuai2 ch uai2 -chuai3 ch uai3 -chuai4 ch uai4 -chuai5 ch uai5 -shuai1 sh uai1 -shuai2 sh uai2 -shuai3 sh uai3 -shuai4 sh uai4 -shuai5 sh uai5 -wei1 w uei1 -wei2 w uei2 -wei3 w uei3 -wei4 w uei4 -wei5 w uei5 -dui1 d uei1 -dui2 d uei2 -dui3 d uei3 -dui4 d uei4 -dui5 d uei5 -tui1 t uei1 -tui2 t uei2 -tui3 t uei3 -tui4 t uei4 -tui5 t uei5 -gui1 g uei1 -gui2 g uei2 -gui3 g uei3 -gui4 g uei4 -gui5 g uei5 -kui1 k uei1 -kui2 k uei2 -kui3 k uei3 -kui4 k uei4 -kui5 k uei5 -hui1 h uei1 -hui2 h uei2 -hui3 h uei3 -hui4 h uei4 -hui5 h uei5 -zhui1 zh uei1 -zhui2 zh uei2 -zhui3 zh uei3 -zhui4 zh uei4 -zhui5 zh uei5 -chui1 ch uei1 -chui2 ch uei2 -chui3 ch uei3 -chui4 ch uei4 -chui5 ch uei5 -shui1 sh uei1 -shui2 sh uei2 -shui3 sh uei3 -shui4 sh uei4 -shui5 sh uei5 -rui1 r uei1 -rui2 r uei2 -rui3 r uei3 -rui4 r uei4 -rui5 r uei5 -zui1 z uei1 -zui2 z uei2 -zui3 z uei3 -zui4 z uei4 -zui5 z uei5 -cui1 c uei1 -cui2 c uei2 -cui3 c uei3 -cui4 c uei4 -cui5 c uei5 -sui1 s uei1 -sui2 s uei2 -sui3 s uei3 -sui4 s uei4 -sui5 s uei5 -wan1 w uan1 -wan2 w uan2 -wan3 w uan3 -wan4 w uan4 -wan5 w uan5 -duan1 d uan1 -duan2 d uan2 -duan3 d uan3 -duan4 d uan4 -duan5 d uan5 -tuan1 t uan1 -tuan2 t uan2 -tuan3 t uan3 -tuan4 t uan4 -tuan5 t uan5 -nuan1 n uan1 -nuan2 n uan2 -nuan3 n uan3 -nuan4 n uan4 -nuan5 n uan5 -luan1 l uan1 -luan2 l uan2 -luan3 l uan3 -luan4 l uan4 -luan5 l uan5 -guan1 g uan1 -guan2 g uan2 -guan3 g uan3 -guan4 g uan4 -guan5 g uan5 -kuan1 k uan1 -kuan2 k uan2 -kuan3 k uan3 -kuan4 k uan4 -kuan5 k uan5 -huan1 h uan1 -huan2 h uan2 -huan3 h uan3 -huan4 h uan4 -huan5 h uan5 -zhuan1 zh uan1 -zhuan2 zh uan2 -zhuan3 zh uan3 -zhuan4 zh uan4 -zhuan5 zh uan5 -chuan1 ch uan1 -chuan2 ch uan2 -chuan3 ch uan3 -chuan4 ch uan4 -chuan5 ch uan5 -shuan1 sh uan1 -shuan2 sh uan2 -shuan3 sh uan3 -shuan4 sh uan4 -shuan5 sh uan5 -ruan1 r uan1 -ruan2 r uan2 -ruan3 r uan3 -ruan4 r uan4 -ruan5 r uan5 -zuan1 z uan1 -zuan2 z uan2 -zuan3 z uan3 -zuan4 z uan4 -zuan5 z uan5 -cuan1 c uan1 -cuan2 c uan2 -cuan3 c uan3 -cuan4 c uan4 -cuan5 c uan5 -suan1 s uan1 -suan2 s uan2 -suan3 s uan3 -suan4 s uan4 -suan5 s uan5 -wen1 w uen1 -wen2 w uen2 -wen3 w uen3 -wen4 w uen4 -wen5 w uen5 -dun1 d uen1 -dun2 d uen2 -dun3 d uen3 -dun4 d uen4 -dun5 d uen5 -tun1 t uen1 -tun2 t uen2 -tun3 t uen3 -tun4 t uen4 -tun5 t uen5 -nun1 n uen1 -nun2 n uen2 -nun3 n uen3 -nun4 n uen4 -nun5 n uen5 -lun1 l uen1 -lun2 l uen2 -lun3 l uen3 -lun4 l uen4 -lun5 l uen5 -gun1 g uen1 -gun2 g uen2 -gun3 g uen3 -gun4 g uen4 -gun5 g uen5 -kun1 k uen1 -kun2 k uen2 -kun3 k uen3 -kun4 k uen4 -kun5 k uen5 -hun1 h uen1 -hun2 h uen2 -hun3 h uen3 -hun4 h uen4 -hun5 h uen5 -zhun1 zh uen1 -zhun2 zh uen2 -zhun3 zh uen3 -zhun4 zh uen4 -zhun5 zh uen5 -chun1 ch uen1 -chun2 ch uen2 -chun3 ch uen3 -chun4 ch uen4 -chun5 ch uen5 -shun1 sh uen1 -shun2 sh uen2 -shun3 sh uen3 -shun4 sh uen4 -shun5 sh uen5 -run1 r uen1 -run2 r uen2 -run3 r uen3 -run4 r uen4 -run5 r uen5 -zun1 z uen1 -zun2 z uen2 -zun3 z uen3 -zun4 z uen4 -zun5 z uen5 -cun1 c uen1 -cun2 c uen2 -cun3 c uen3 -cun4 c uen4 -cun5 c uen5 -sun1 s uen1 -sun2 s uen2 -sun3 s uen3 -sun4 s uen4 -sun5 s uen5 -wang1 w uang1 -wang2 w uang2 -wang3 w uang3 -wang4 w uang4 -wang5 w uang5 -guang1 g uang1 -guang2 g uang2 -guang3 g uang3 -guang4 g uang4 -guang5 g uang5 -kuang1 k uang1 -kuang2 k uang2 -kuang3 k uang3 -kuang4 k uang4 -kuang5 k uang5 -huang1 h uang1 -huang2 h uang2 -huang3 h uang3 -huang4 h uang4 -huang5 h uang5 -zhuang1 zh uang1 -zhuang2 zh uang2 -zhuang3 zh uang3 -zhuang4 zh uang4 -zhuang5 zh uang5 -chuang1 ch uang1 -chuang2 ch uang2 -chuang3 ch uang3 -chuang4 ch uang4 -chuang5 ch uang5 -shuang1 sh uang1 -shuang2 sh uang2 -shuang3 sh uang3 -shuang4 sh uang4 -shuang5 sh uang5 -weng1 w ung1 -weng2 w ung2 -weng3 w ung3 -weng4 w ung4 -weng5 w ung5 -dong1 d ung1 -dong2 d ung2 -dong3 d ung3 -dong4 d ung4 -dong5 d ung5 -tong1 t ung1 -tong2 t ung2 -tong3 t ung3 -tong4 t ung4 -tong5 t ung5 -nong1 n ung1 -nong2 n ung2 -nong3 n ung3 -nong4 n ung4 -nong5 n ung5 -long1 l ung1 -long2 l ung2 -long3 l ung3 -long4 l ung4 -long5 l ung5 -gong1 g ung1 -gong2 g ung2 -gong3 g ung3 -gong4 g ung4 -gong5 g ung5 -kong1 k ung1 -kong2 k ung2 -kong3 k ung3 -kong4 k ung4 -kong5 k ung5 -hong1 h ung1 -hong2 h ung2 -hong3 h ung3 -hong4 h ung4 -hong5 h ung5 -zhong1 zh ung1 -zhong2 zh ung2 -zhong3 zh ung3 -zhong4 zh ung4 -zhong5 zh ung5 -chong1 ch ung1 -chong2 ch ung2 -chong3 ch ung3 -chong4 ch ung4 -chong5 ch ung5 -rong1 r ung1 -rong2 r ung2 -rong3 r ung3 -rong4 r ung4 -rong5 r ung5 -zong1 z ung1 -zong2 z ung2 -zong3 z ung3 -zong4 z ung4 -zong5 z ung5 -cong1 c ung1 -cong2 c ung2 -cong3 c ung3 -cong4 c ung4 -cong5 c ung5 -song1 s ung1 -song2 s ung2 -song3 s ung3 -song4 s ung4 -song5 s ung5 -yu1 y v1 -yu2 y v2 -yu3 y v3 -yu4 y v4 -yu5 y v5 -nv1 n v1 -nv2 n v2 -nv3 n v3 -nv4 n v4 -nv5 n v5 -lv1 l v1 -lv2 l v2 -lv3 l v3 -lv4 l v4 -lv5 l v5 -ju1 j v1 -ju2 j v2 -ju3 j v3 -ju4 j v4 -ju5 j v5 -qu1 q v1 -qu2 q v2 -qu3 q v3 -qu4 q v4 -qu5 q v5 -xu1 x v1 -xu2 x v2 -xu3 x v3 -xu4 x v4 -xu5 x v5 -yue1 y ve1 -yue2 y ve2 -yue3 y ve3 -yue4 y ve4 -yue5 y ve5 -nue1 n ve1 -nue2 n ve2 -nue3 n ve3 -nue4 n ve4 -nue5 n ve5 -nve1 n ve1 -nve2 n ve2 -nve3 n ve3 -nve4 n ve4 -nve5 n ve5 -lue1 l ve1 -lue2 l ve2 -lue3 l ve3 -lue4 l ve4 -lue5 l ve5 -lve1 l ve1 -lve2 l ve2 -lve3 l ve3 -lve4 l ve4 -lve5 l ve5 -jue1 j ve1 -jue2 j ve2 -jue3 j ve3 -jue4 j ve4 -jue5 j ve5 -que1 q ve1 -que2 q ve2 -que3 q ve3 -que4 q ve4 -que5 q ve5 -xue1 x ve1 -xue2 x ve2 -xue3 x ve3 -xue4 x ve4 -xue5 x ve5 -yuan1 y van1 -yuan2 y van2 -yuan3 y van3 -yuan4 y van4 -yuan5 y van5 -juan1 j van1 -juan2 j van2 -juan3 j van3 -juan4 j van4 -juan5 j van5 -quan1 q van1 -quan2 q van2 -quan3 q van3 -quan4 q van4 -quan5 q van5 -xuan1 x van1 -xuan2 x van2 -xuan3 x van3 -xuan4 x van4 -xuan5 x van5 -yun1 y vn1 -yun2 y vn2 -yun3 y vn3 -yun4 y vn4 -yun5 y vn5 -jun1 j vn1 -jun2 j vn2 -jun3 j vn3 -jun4 j vn4 -jun5 j vn5 -qun1 q vn1 -qun2 q vn2 -qun3 q vn3 -qun4 q vn4 -qun5 q vn5 -xun1 x vn1 -xun2 x vn2 -xun3 x vn3 -xun4 x vn4 -xun5 x vn5 -yong1 y vng1 -yong2 y vng2 -yong3 y vng3 -yong4 y vng4 -yong5 y vng5 -jiong1 j vng1 -jiong2 j vng2 -jiong3 j vng3 -jiong4 j vng4 -jiong5 j vng5 -qiong1 q vng1 -qiong2 q vng2 -qiong3 q vng3 -qiong4 q vng4 -qiong5 q vng5 -xiong1 x vng1 -xiong2 x vng2 -xiong3 x vng3 -xiong4 x vng4 -xiong5 x vng5 -zhir1 zh iii1 &r -zhir2 zh iii2 &r -zhir3 zh iii3 &r -zhir4 zh iii4 &r -zhir5 zh iii5 &r -chir1 ch iii1 &r -chir2 ch iii2 &r -chir3 ch iii3 &r -chir4 ch iii4 &r -chir5 ch iii5 &r -shir1 sh iii1 &r -shir2 sh iii2 &r -shir3 sh iii3 &r -shir4 sh iii4 &r -shir5 sh iii5 &r -rir1 r iii1 &r -rir2 r iii2 &r -rir3 r iii3 &r -rir4 r iii4 &r -rir5 r iii5 &r -zir1 z ii1 &r -zir2 z ii2 &r -zir3 z ii3 &r -zir4 z ii4 &r -zir5 z ii5 &r -cir1 c ii1 &r -cir2 c ii2 &r -cir3 c ii3 &r -cir4 c ii4 &r -cir5 c ii5 &r -sir1 s ii1 &r -sir2 s ii2 &r -sir3 s ii3 &r -sir4 s ii4 &r -sir5 s ii5 &r -ar1 a1 &r -ar2 a2 &r -ar3 a3 &r -ar4 a4 &r -ar5 a5 &r -bar1 b a1 &r -bar2 b a2 &r -bar3 b a3 &r -bar4 b a4 &r -bar5 b a5 &r -par1 p a1 &r -par2 p a2 &r -par3 p a3 &r -par4 p a4 &r -par5 p a5 &r -mar1 m a1 &r -mar2 m a2 &r -mar3 m a3 &r -mar4 m a4 &r -mar5 m a5 &r -far1 f a1 &r -far2 f a2 &r -far3 f a3 &r -far4 f a4 &r -far5 f a5 &r -dar1 d a1 &r -dar2 d a2 &r -dar3 d a3 &r -dar4 d a4 &r -dar5 d a5 &r -tar1 t a1 &r -tar2 t a2 &r -tar3 t a3 &r -tar4 t a4 &r -tar5 t a5 &r -nar1 n a1 &r -nar2 n a2 &r -nar3 n a3 &r -nar4 n a4 &r -nar5 n a5 &r -lar1 l a1 &r -lar2 l a2 &r -lar3 l a3 &r -lar4 l a4 &r -lar5 l a5 &r -gar1 g a1 &r -gar2 g a2 &r -gar3 g a3 &r -gar4 g a4 &r -gar5 g a5 &r -kar1 k a1 &r -kar2 k a2 &r -kar3 k a3 &r -kar4 k a4 &r -kar5 k a5 &r -har1 h a1 &r -har2 h a2 &r -har3 h a3 &r -har4 h a4 &r -har5 h a5 &r -zhar1 zh a1 &r -zhar2 zh a2 &r -zhar3 zh a3 &r -zhar4 zh a4 &r -zhar5 zh a5 &r -char1 ch a1 &r -char2 ch a2 &r -char3 ch a3 &r -char4 ch a4 &r -char5 ch a5 &r -shar1 sh a1 &r -shar2 sh a2 &r -shar3 sh a3 &r -shar4 sh a4 &r -shar5 sh a5 &r -zar1 z a1 &r -zar2 z a2 &r -zar3 z a3 &r -zar4 z a4 &r -zar5 z a5 &r -car1 c a1 &r -car2 c a2 &r -car3 c a3 &r -car4 c a4 &r -car5 c a5 &r -sar1 s a1 &r -sar2 s a2 &r -sar3 s a3 &r -sar4 s a4 &r -sar5 s a5 &r -or1 o1 &r -or2 o2 &r -or3 o3 &r -or4 o4 &r -or5 o5 &r -bor1 b uo1 &r -bor2 b uo2 &r -bor3 b uo3 &r -bor4 b uo4 &r -bor5 b uo5 &r -por1 p uo1 &r -por2 p uo2 &r -por3 p uo3 &r -por4 p uo4 &r -por5 p uo5 &r -mor1 m uo1 &r -mor2 m uo2 &r -mor3 m uo3 &r -mor4 m uo4 &r -mor5 m uo5 &r -for1 f uo1 &r -for2 f uo2 &r -for3 f uo3 &r -for4 f uo4 &r -for5 f uo5 &r -lor1 l o1 &r -lor2 l o2 &r -lor3 l o3 &r -lor4 l o4 &r -lor5 l o5 &r -mer1 m e1 &r -mer2 m e2 &r -mer3 m e3 &r -mer4 m e4 &r -mer5 m e5 &r -der1 d e1 &r -der2 d e2 &r -der3 d e3 &r -der4 d e4 &r -der5 d e5 &r -ter1 t e1 &r -ter2 t e2 &r -ter3 t e3 &r -ter4 t e4 &r -ter5 t e5 &r -ner1 n e1 &r -ner2 n e2 &r -ner3 n e3 &r -ner4 n e4 &r -ner5 n e5 &r -ler1 l e1 &r -ler2 l e2 &r -ler3 l e3 &r -ler4 l e4 &r -ler5 l e5 &r -ger1 g e1 &r -ger2 g e2 &r -ger3 g e3 &r -ger4 g e4 &r -ger5 g e5 &r -ker1 k e1 &r -ker2 k e2 &r -ker3 k e3 &r -ker4 k e4 &r -ker5 k e5 &r -her1 h e1 &r -her2 h e2 &r -her3 h e3 &r -her4 h e4 &r -her5 h e5 &r -zher1 zh e1 &r -zher2 zh e2 &r -zher3 zh e3 &r -zher4 zh e4 &r -zher5 zh e5 &r -cher1 ch e1 &r -cher2 ch e2 &r -cher3 ch e3 &r -cher4 ch e4 &r -cher5 ch e5 &r -sher1 sh e1 &r -sher2 sh e2 &r -sher3 sh e3 &r -sher4 sh e4 &r -sher5 sh e5 &r -rer1 r e1 &r -rer2 r e2 &r -rer3 r e3 &r -rer4 r e4 &r -rer5 r e5 &r -zer1 z e1 &r -zer2 z e2 &r -zer3 z e3 &r -zer4 z e4 &r -zer5 z e5 &r -cer1 c e1 &r -cer2 c e2 &r -cer3 c e3 &r -cer4 c e4 &r -cer5 c e5 &r -ser1 s e1 &r -ser2 s e2 &r -ser3 s e3 &r -ser4 s e4 &r -ser5 s e5 &r -air1 ai1 &r -air2 ai2 &r -air3 ai3 &r -air4 ai4 &r -air5 ai5 &r -bair1 b ai1 &r -bair2 b ai2 &r -bair3 b ai3 &r -bair4 b ai4 &r -bair5 b ai5 &r -pair1 p ai1 &r -pair2 p ai2 &r -pair3 p ai3 &r -pair4 p ai4 &r -pair5 p ai5 &r -mair1 m ai1 &r -mair2 m ai2 &r -mair3 m ai3 &r -mair4 m ai4 &r -mair5 m ai5 &r -dair1 d ai1 &r -dair2 d ai2 &r -dair3 d ai3 &r -dair4 d ai4 &r -dair5 d ai5 &r -tair1 t ai1 &r -tair2 t ai2 &r -tair3 t ai3 &r -tair4 t ai4 &r -tair5 t ai5 &r -nair1 n ai1 &r -nair2 n ai2 &r -nair3 n ai3 &r -nair4 n ai4 &r -nair5 n ai5 &r -lair1 l ai1 &r -lair2 l ai2 &r -lair3 l ai3 &r -lair4 l ai4 &r -lair5 l ai5 &r -gair1 g ai1 &r -gair2 g ai2 &r -gair3 g ai3 &r -gair4 g ai4 &r -gair5 g ai5 &r -kair1 k ai1 &r -kair2 k ai2 &r -kair3 k ai3 &r -kair4 k ai4 &r -kair5 k ai5 &r -hair1 h ai1 &r -hair2 h ai2 &r -hair3 h ai3 &r -hair4 h ai4 &r -hair5 h ai5 &r -zhair1 zh ai1 &r -zhair2 zh ai2 &r -zhair3 zh ai3 &r -zhair4 zh ai4 &r -zhair5 zh ai5 &r -chair1 ch ai1 &r -chair2 ch ai2 &r -chair3 ch ai3 &r -chair4 ch ai4 &r -chair5 ch ai5 &r -shair1 sh ai1 &r -shair2 sh ai2 &r -shair3 sh ai3 &r -shair4 sh ai4 &r -shair5 sh ai5 &r -zair1 z ai1 &r -zair2 z ai2 &r -zair3 z ai3 &r -zair4 z ai4 &r -zair5 z ai5 &r -cair1 c ai1 &r -cair2 c ai2 &r -cair3 c ai3 &r -cair4 c ai4 &r -cair5 c ai5 &r -sair1 s ai1 &r -sair2 s ai2 &r -sair3 s ai3 &r -sair4 s ai4 &r -sair5 s ai5 &r -beir1 b ei1 &r -beir2 b ei2 &r -beir3 b ei3 &r -beir4 b ei4 &r -beir5 b ei5 &r -peir1 p ei1 &r -peir2 p ei2 &r -peir3 p ei3 &r -peir4 p ei4 &r -peir5 p ei5 &r -meir1 m ei1 &r -meir2 m ei2 &r -meir3 m ei3 &r -meir4 m ei4 &r -meir5 m ei5 &r -feir1 f ei1 &r -feir2 f ei2 &r -feir3 f ei3 &r -feir4 f ei4 &r -feir5 f ei5 &r -deir1 d ei1 &r -deir2 d ei2 &r -deir3 d ei3 &r -deir4 d ei4 &r -deir5 d ei5 &r -teir1 t ei1 &r -teir2 t ei2 &r -teir3 t ei3 &r -teir4 t ei4 &r -teir5 t ei5 &r -neir1 n ei1 &r -neir2 n ei2 &r -neir3 n ei3 &r -neir4 n ei4 &r -neir5 n ei5 &r -leir1 l ei1 &r -leir2 l ei2 &r -leir3 l ei3 &r -leir4 l ei4 &r -leir5 l ei5 &r -geir1 g ei1 &r -geir2 g ei2 &r -geir3 g ei3 &r -geir4 g ei4 &r -geir5 g ei5 &r -keir1 k ei1 &r -keir2 k ei2 &r -keir3 k ei3 &r -keir4 k ei4 &r -keir5 k ei5 &r -heir1 h ei1 &r -heir2 h ei2 &r -heir3 h ei3 &r -heir4 h ei4 &r -heir5 h ei5 &r -zheir1 zh ei1 &r -zheir2 zh ei2 &r -zheir3 zh ei3 &r -zheir4 zh ei4 &r -zheir5 zh ei5 &r -sheir1 sh ei1 &r -sheir2 sh ei2 &r -sheir3 sh ei3 &r -sheir4 sh ei4 &r -sheir5 sh ei5 &r -zeir1 z ei1 &r -zeir2 z ei2 &r -zeir3 z ei3 &r -zeir4 z ei4 &r -zeir5 z ei5 &r -aor1 au1 &r -aor2 au2 &r -aor3 au3 &r -aor4 au4 &r -aor5 au5 &r -baor1 b au1 &r -baor2 b au2 &r -baor3 b au3 &r -baor4 b au4 &r -baor5 b au5 &r -paor1 p au1 &r -paor2 p au2 &r -paor3 p au3 &r -paor4 p au4 &r -paor5 p au5 &r -maor1 m au1 &r -maor2 m au2 &r -maor3 m au3 &r -maor4 m au4 &r -maor5 m au5 &r -daor1 d au1 &r -daor2 d au2 &r -daor3 d au3 &r -daor4 d au4 &r -daor5 d au5 &r -taor1 t au1 &r -taor2 t au2 &r -taor3 t au3 &r -taor4 t au4 &r -taor5 t au5 &r -naor1 n au1 &r -naor2 n au2 &r -naor3 n au3 &r -naor4 n au4 &r -naor5 n au5 &r -laor1 l au1 &r -laor2 l au2 &r -laor3 l au3 &r -laor4 l au4 &r -laor5 l au5 &r -gaor1 g au1 &r -gaor2 g au2 &r -gaor3 g au3 &r -gaor4 g au4 &r -gaor5 g au5 &r -kaor1 k au1 &r -kaor2 k au2 &r -kaor3 k au3 &r -kaor4 k au4 &r -kaor5 k au5 &r -haor1 h au1 &r -haor2 h au2 &r -haor3 h au3 &r -haor4 h au4 &r -haor5 h au5 &r -zhaor1 zh au1 &r -zhaor2 zh au2 &r -zhaor3 zh au3 &r -zhaor4 zh au4 &r -zhaor5 zh au5 &r -chaor1 ch au1 &r -chaor2 ch au2 &r -chaor3 ch au3 &r -chaor4 ch au4 &r -chaor5 ch au5 &r -shaor1 sh au1 &r -shaor2 sh au2 &r -shaor3 sh au3 &r -shaor4 sh au4 &r -shaor5 sh au5 &r -raor1 r au1 &r -raor2 r au2 &r -raor3 r au3 &r -raor4 r au4 &r -raor5 r au5 &r -zaor1 z au1 &r -zaor2 z au2 &r -zaor3 z au3 &r -zaor4 z au4 &r -zaor5 z au5 &r -caor1 c au1 &r -caor2 c au2 &r -caor3 c au3 &r -caor4 c au4 &r -caor5 c au5 &r -saor1 s au1 &r -saor2 s au2 &r -saor3 s au3 &r -saor4 s au4 &r -saor5 s au5 &r -our1 ou1 &r -our2 ou2 &r -our3 ou3 &r -our4 ou4 &r -our5 ou5 &r -pour1 p ou1 &r -pour2 p ou2 &r -pour3 p ou3 &r -pour4 p ou4 &r -pour5 p ou5 &r -mour1 m ou1 &r -mour2 m ou2 &r -mour3 m ou3 &r -mour4 m ou4 &r -mour5 m ou5 &r -four1 f ou1 &r -four2 f ou2 &r -four3 f ou3 &r -four4 f ou4 &r -four5 f ou5 &r -dour1 d ou1 &r -dour2 d ou2 &r -dour3 d ou3 &r -dour4 d ou4 &r -dour5 d ou5 &r -tour1 t ou1 &r -tour2 t ou2 &r -tour3 t ou3 &r -tour4 t ou4 &r -tour5 t ou5 &r -nour1 n ou1 &r -nour2 n ou2 &r -nour3 n ou3 &r -nour4 n ou4 &r -nour5 n ou5 &r -lour1 l ou1 &r -lour2 l ou2 &r -lour3 l ou3 &r -lour4 l ou4 &r -lour5 l ou5 &r -gour1 g ou1 &r -gour2 g ou2 &r -gour3 g ou3 &r -gour4 g ou4 &r -gour5 g ou5 &r -kour1 k ou1 &r -kour2 k ou2 &r -kour3 k ou3 &r -kour4 k ou4 &r -kour5 k ou5 &r -hour1 h ou1 &r -hour2 h ou2 &r -hour3 h ou3 &r -hour4 h ou4 &r -hour5 h ou5 &r -zhour1 zh ou1 &r -zhour2 zh ou2 &r -zhour3 zh ou3 &r -zhour4 zh ou4 &r -zhour5 zh ou5 &r -chour1 ch ou1 &r -chour2 ch ou2 &r -chour3 ch ou3 &r -chour4 ch ou4 &r -chour5 ch ou5 &r -shour1 sh ou1 &r -shour2 sh ou2 &r -shour3 sh ou3 &r -shour4 sh ou4 &r -shour5 sh ou5 &r -rour1 r ou1 &r -rour2 r ou2 &r -rour3 r ou3 &r -rour4 r ou4 &r -rour5 r ou5 &r -zour1 z ou1 &r -zour2 z ou2 &r -zour3 z ou3 &r -zour4 z ou4 &r -zour5 z ou5 &r -cour1 c ou1 &r -cour2 c ou2 &r -cour3 c ou3 &r -cour4 c ou4 &r -cour5 c ou5 &r -sour1 s ou1 &r -sour2 s ou2 &r -sour3 s ou3 &r -sour4 s ou4 &r -sour5 s ou5 &r -anr1 an1 &r -anr2 an2 &r -anr3 an3 &r -anr4 an4 &r -anr5 an5 &r -banr1 b an1 &r -banr2 b an2 &r -banr3 b an3 &r -banr4 b an4 &r -banr5 b an5 &r -panr1 p an1 &r -panr2 p an2 &r -panr3 p an3 &r -panr4 p an4 &r -panr5 p an5 &r -manr1 m an1 &r -manr2 m an2 &r -manr3 m an3 &r -manr4 m an4 &r -manr5 m an5 &r -fanr1 f an1 &r -fanr2 f an2 &r -fanr3 f an3 &r -fanr4 f an4 &r -fanr5 f an5 &r -danr1 d an1 &r -danr2 d an2 &r -danr3 d an3 &r -danr4 d an4 &r -danr5 d an5 &r -tanr1 t an1 &r -tanr2 t an2 &r -tanr3 t an3 &r -tanr4 t an4 &r -tanr5 t an5 &r -nanr1 n an1 &r -nanr2 n an2 &r -nanr3 n an3 &r -nanr4 n an4 &r -nanr5 n an5 &r -lanr1 l an1 &r -lanr2 l an2 &r -lanr3 l an3 &r -lanr4 l an4 &r -lanr5 l an5 &r -ganr1 g an1 &r -ganr2 g an2 &r -ganr3 g an3 &r -ganr4 g an4 &r -ganr5 g an5 &r -kanr1 k an1 &r -kanr2 k an2 &r -kanr3 k an3 &r -kanr4 k an4 &r -kanr5 k an5 &r -hanr1 h an1 &r -hanr2 h an2 &r -hanr3 h an3 &r -hanr4 h an4 &r -hanr5 h an5 &r -zhanr1 zh an1 &r -zhanr2 zh an2 &r -zhanr3 zh an3 &r -zhanr4 zh an4 &r -zhanr5 zh an5 &r -chanr1 ch an1 &r -chanr2 ch an2 &r -chanr3 ch an3 &r -chanr4 ch an4 &r -chanr5 ch an5 &r -shanr1 sh an1 &r -shanr2 sh an2 &r -shanr3 sh an3 &r -shanr4 sh an4 &r -shanr5 sh an5 &r -ranr1 r an1 &r -ranr2 r an2 &r -ranr3 r an3 &r -ranr4 r an4 &r -ranr5 r an5 &r -zanr1 z an1 &r -zanr2 z an2 &r -zanr3 z an3 &r -zanr4 z an4 &r -zanr5 z an5 &r -canr1 c an1 &r -canr2 c an2 &r -canr3 c an3 &r -canr4 c an4 &r -canr5 c an5 &r -sanr1 s an1 &r -sanr2 s an2 &r -sanr3 s an3 &r -sanr4 s an4 &r -sanr5 s an5 &r -benr1 b en1 &r -benr2 b en2 &r -benr3 b en3 &r -benr4 b en4 &r -benr5 b en5 &r -penr1 p en1 &r -penr2 p en2 &r -penr3 p en3 &r -penr4 p en4 &r -penr5 p en5 &r -menr1 m en1 &r -menr2 m en2 &r -menr3 m en3 &r -menr4 m en4 &r -menr5 m en5 &r -fenr1 f en1 &r -fenr2 f en2 &r -fenr3 f en3 &r -fenr4 f en4 &r -fenr5 f en5 &r -denr1 d en1 &r -denr2 d en2 &r -denr3 d en3 &r -denr4 d en4 &r -denr5 d en5 &r -nenr1 n en1 &r -nenr2 n en2 &r -nenr3 n en3 &r -nenr4 n en4 &r -nenr5 n en5 &r -genr1 g en1 &r -genr2 g en2 &r -genr3 g en3 &r -genr4 g en4 &r -genr5 g en5 &r -kenr1 k en1 &r -kenr2 k en2 &r -kenr3 k en3 &r -kenr4 k en4 &r -kenr5 k en5 &r -henr1 h en1 &r -henr2 h en2 &r -henr3 h en3 &r -henr4 h en4 &r -henr5 h en5 &r -zhenr1 zh en1 &r -zhenr2 zh en2 &r -zhenr3 zh en3 &r -zhenr4 zh en4 &r -zhenr5 zh en5 &r -chenr1 ch en1 &r -chenr2 ch en2 &r -chenr3 ch en3 &r -chenr4 ch en4 &r -chenr5 ch en5 &r -shenr1 sh en1 &r -shenr2 sh en2 &r -shenr3 sh en3 &r -shenr4 sh en4 &r -shenr5 sh en5 &r -renr1 r en1 &r -renr2 r en2 &r -renr3 r en3 &r -renr4 r en4 &r -renr5 r en5 &r -zenr1 z en1 &r -zenr2 z en2 &r -zenr3 z en3 &r -zenr4 z en4 &r -zenr5 z en5 &r -cenr1 c en1 &r -cenr2 c en2 &r -cenr3 c en3 &r -cenr4 c en4 &r -cenr5 c en5 &r -senr1 s en1 &r -senr2 s en2 &r -senr3 s en3 &r -senr4 s en4 &r -senr5 s en5 &r -angr1 ang1 &r -angr2 ang2 &r -angr3 ang3 &r -angr4 ang4 &r -angr5 ang5 &r -bangr1 b ang1 &r -bangr2 b ang2 &r -bangr3 b ang3 &r -bangr4 b ang4 &r -bangr5 b ang5 &r -pangr1 p ang1 &r -pangr2 p ang2 &r -pangr3 p ang3 &r -pangr4 p ang4 &r -pangr5 p ang5 &r -mangr1 m ang1 &r -mangr2 m ang2 &r -mangr3 m ang3 &r -mangr4 m ang4 &r -mangr5 m ang5 &r -fangr1 f ang1 &r -fangr2 f ang2 &r -fangr3 f ang3 &r -fangr4 f ang4 &r -fangr5 f ang5 &r -dangr1 d ang1 &r -dangr2 d ang2 &r -dangr3 d ang3 &r -dangr4 d ang4 &r -dangr5 d ang5 &r -tangr1 t ang1 &r -tangr2 t ang2 &r -tangr3 t ang3 &r -tangr4 t ang4 &r -tangr5 t ang5 &r -nangr1 n ang1 &r -nangr2 n ang2 &r -nangr3 n ang3 &r -nangr4 n ang4 &r -nangr5 n ang5 &r -langr1 l ang1 &r -langr2 l ang2 &r -langr3 l ang3 &r -langr4 l ang4 &r -langr5 l ang5 &r -gangr1 g ang1 &r -gangr2 g ang2 &r -gangr3 g ang3 &r -gangr4 g ang4 &r -gangr5 g ang5 &r -kangr1 k ang1 &r -kangr2 k ang2 &r -kangr3 k ang3 &r -kangr4 k ang4 &r -kangr5 k ang5 &r -hangr1 h ang1 &r -hangr2 h ang2 &r -hangr3 h ang3 &r -hangr4 h ang4 &r -hangr5 h ang5 &r -zhangr1 zh ang1 &r -zhangr2 zh ang2 &r -zhangr3 zh ang3 &r -zhangr4 zh ang4 &r -zhangr5 zh ang5 &r -changr1 ch ang1 &r -changr2 ch ang2 &r -changr3 ch ang3 &r -changr4 ch ang4 &r -changr5 ch ang5 &r -shangr1 sh ang1 &r -shangr2 sh ang2 &r -shangr3 sh ang3 &r -shangr4 sh ang4 &r -shangr5 sh ang5 &r -rangr1 r ang1 &r -rangr2 r ang2 &r -rangr3 r ang3 &r -rangr4 r ang4 &r -rangr5 r ang5 &r -zangr1 z ang1 &r -zangr2 z ang2 &r -zangr3 z ang3 &r -zangr4 z ang4 &r -zangr5 z ang5 &r -cangr1 c ang1 &r -cangr2 c ang2 &r -cangr3 c ang3 &r -cangr4 c ang4 &r -cangr5 c ang5 &r -sangr1 s ang1 &r -sangr2 s ang2 &r -sangr3 s ang3 &r -sangr4 s ang4 &r -sangr5 s ang5 &r -bengr1 b eng1 &r -bengr2 b eng2 &r -bengr3 b eng3 &r -bengr4 b eng4 &r -bengr5 b eng5 &r -pengr1 p eng1 &r -pengr2 p eng2 &r -pengr3 p eng3 &r -pengr4 p eng4 &r -pengr5 p eng5 &r -mengr1 m eng1 &r -mengr2 m eng2 &r -mengr3 m eng3 &r -mengr4 m eng4 &r -mengr5 m eng5 &r -fengr1 f eng1 &r -fengr2 f eng2 &r -fengr3 f eng3 &r -fengr4 f eng4 &r -fengr5 f eng5 &r -dengr1 d eng1 &r -dengr2 d eng2 &r -dengr3 d eng3 &r -dengr4 d eng4 &r -dengr5 d eng5 &r -tengr1 t eng1 &r -tengr2 t eng2 &r -tengr3 t eng3 &r -tengr4 t eng4 &r -tengr5 t eng5 &r -nengr1 n eng1 &r -nengr2 n eng2 &r -nengr3 n eng3 &r -nengr4 n eng4 &r -nengr5 n eng5 &r -lengr1 l eng1 &r -lengr2 l eng2 &r -lengr3 l eng3 &r -lengr4 l eng4 &r -lengr5 l eng5 &r -gengr1 g eng1 &r -gengr2 g eng2 &r -gengr3 g eng3 &r -gengr4 g eng4 &r -gengr5 g eng5 &r -kengr1 k eng1 &r -kengr2 k eng2 &r -kengr3 k eng3 &r -kengr4 k eng4 &r -kengr5 k eng5 &r -hengr1 h eng1 &r -hengr2 h eng2 &r -hengr3 h eng3 &r -hengr4 h eng4 &r -hengr5 h eng5 &r -zhengr1 zh eng1 &r -zhengr2 zh eng2 &r -zhengr3 zh eng3 &r -zhengr4 zh eng4 &r -zhengr5 zh eng5 &r -chengr1 ch eng1 &r -chengr2 ch eng2 &r -chengr3 ch eng3 &r -chengr4 ch eng4 &r -chengr5 ch eng5 &r -shengr1 sh eng1 &r -shengr2 sh eng2 &r -shengr3 sh eng3 &r -shengr4 sh eng4 &r -shengr5 sh eng5 &r -rengr1 r eng1 &r -rengr2 r eng2 &r -rengr3 r eng3 &r -rengr4 r eng4 &r -rengr5 r eng5 &r -zengr1 z eng1 &r -zengr2 z eng2 &r -zengr3 z eng3 &r -zengr4 z eng4 &r -zengr5 z eng5 &r -cengr1 c eng1 &r -cengr2 c eng2 &r -cengr3 c eng3 &r -cengr4 c eng4 &r -cengr5 c eng5 &r -sengr1 s eng1 &r -sengr2 s eng2 &r -sengr3 s eng3 &r -sengr4 s eng4 &r -sengr5 s eng5 &r -yir1 y i1 &r -yir2 y i2 &r -yir3 y i3 &r -yir4 y i4 &r -yir5 y i5 &r -bir1 b i1 &r -bir2 b i2 &r -bir3 b i3 &r -bir4 b i4 &r -bir5 b i5 &r -pir1 p i1 &r -pir2 p i2 &r -pir3 p i3 &r -pir4 p i4 &r -pir5 p i5 &r -mir1 m i1 &r -mir2 m i2 &r -mir3 m i3 &r -mir4 m i4 &r -mir5 m i5 &r -dir1 d i1 &r -dir2 d i2 &r -dir3 d i3 &r -dir4 d i4 &r -dir5 d i5 &r -tir1 t i1 &r -tir2 t i2 &r -tir3 t i3 &r -tir4 t i4 &r -tir5 t i5 &r -nir1 n i1 &r -nir2 n i2 &r -nir3 n i3 &r -nir4 n i4 &r -nir5 n i5 &r -lir1 l i1 &r -lir2 l i2 &r -lir3 l i3 &r -lir4 l i4 &r -lir5 l i5 &r -jir1 j i1 &r -jir2 j i2 &r -jir3 j i3 &r -jir4 j i4 &r -jir5 j i5 &r -qir1 q i1 &r -qir2 q i2 &r -qir3 q i3 &r -qir4 q i4 &r -qir5 q i5 &r -xir1 x i1 &r -xir2 x i2 &r -xir3 x i3 &r -xir4 x i4 &r -xir5 x i5 &r -yar1 y ia1 &r -yar2 y ia2 &r -yar3 y ia3 &r -yar4 y ia4 &r -yar5 y ia5 &r -diar1 d ia1 &r -diar2 d ia2 &r -diar3 d ia3 &r -diar4 d ia4 &r -diar5 d ia5 &r -liar1 l ia1 &r -liar2 l ia2 &r -liar3 l ia3 &r -liar4 l ia4 &r -liar5 l ia5 &r -jiar1 j ia1 &r -jiar2 j ia2 &r -jiar3 j ia3 &r -jiar4 j ia4 &r -jiar5 j ia5 &r -qiar1 q ia1 &r -qiar2 q ia2 &r -qiar3 q ia3 &r -qiar4 q ia4 &r -qiar5 q ia5 &r -xiar1 x ia1 &r -xiar2 x ia2 &r -xiar3 x ia3 &r -xiar4 x ia4 &r -xiar5 x ia5 &r -yor1 y io1 &r -yor2 y io2 &r -yor3 y io3 &r -yor4 y io4 &r -yor5 y io5 &r -yer1 y ie1 &r -yer2 y ie2 &r -yer3 y ie3 &r -yer4 y ie4 &r -yer5 y ie5 &r -bier1 b ie1 &r -bier2 b ie2 &r -bier3 b ie3 &r -bier4 b ie4 &r -bier5 b ie5 &r -pier1 p ie1 &r -pier2 p ie2 &r -pier3 p ie3 &r -pier4 p ie4 &r -pier5 p ie5 &r -mier1 m ie1 &r -mier2 m ie2 &r -mier3 m ie3 &r -mier4 m ie4 &r -mier5 m ie5 &r -dier1 d ie1 &r -dier2 d ie2 &r -dier3 d ie3 &r -dier4 d ie4 &r -dier5 d ie5 &r -tier1 t ie1 &r -tier2 t ie2 &r -tier3 t ie3 &r -tier4 t ie4 &r -tier5 t ie5 &r -nier1 n ie1 &r -nier2 n ie2 &r -nier3 n ie3 &r -nier4 n ie4 &r -nier5 n ie5 &r -lier1 l ie1 &r -lier2 l ie2 &r -lier3 l ie3 &r -lier4 l ie4 &r -lier5 l ie5 &r -jier1 j ie1 &r -jier2 j ie2 &r -jier3 j ie3 &r -jier4 j ie4 &r -jier5 j ie5 &r -qier1 q ie1 &r -qier2 q ie2 &r -qier3 q ie3 &r -qier4 q ie4 &r -qier5 q ie5 &r -xier1 x ie1 &r -xier2 x ie2 &r -xier3 x ie3 &r -xier4 x ie4 &r -xier5 x ie5 &r -yair1 y ai1 &r -yair2 y ai2 &r -yair3 y ai3 &r -yair4 y ai4 &r -yair5 y ai5 &r -yaor1 y au1 &r -yaor2 y au2 &r -yaor3 y au3 &r -yaor4 y au4 &r -yaor5 y au5 &r -biaor1 b iau1 &r -biaor2 b iau2 &r -biaor3 b iau3 &r -biaor4 b iau4 &r -biaor5 b iau5 &r -piaor1 p iau1 &r -piaor2 p iau2 &r -piaor3 p iau3 &r -piaor4 p iau4 &r -piaor5 p iau5 &r -miaor1 m iau1 &r -miaor2 m iau2 &r -miaor3 m iau3 &r -miaor4 m iau4 &r -miaor5 m iau5 &r -fiaor1 f iau1 &r -fiaor2 f iau2 &r -fiaor3 f iau3 &r -fiaor4 f iau4 &r -fiaor5 f iau5 &r -diaor1 d iau1 &r -diaor2 d iau2 &r -diaor3 d iau3 &r -diaor4 d iau4 &r -diaor5 d iau5 &r -tiaor1 t iau1 &r -tiaor2 t iau2 &r -tiaor3 t iau3 &r -tiaor4 t iau4 &r -tiaor5 t iau5 &r -niaor1 n iau1 &r -niaor2 n iau2 &r -niaor3 n iau3 &r -niaor4 n iau4 &r -niaor5 n iau5 &r -liaor1 l iau1 &r -liaor2 l iau2 &r -liaor3 l iau3 &r -liaor4 l iau4 &r -liaor5 l iau5 &r -jiaor1 j iau1 &r -jiaor2 j iau2 &r -jiaor3 j iau3 &r -jiaor4 j iau4 &r -jiaor5 j iau5 &r -qiaor1 q iau1 &r -qiaor2 q iau2 &r -qiaor3 q iau3 &r -qiaor4 q iau4 &r -qiaor5 q iau5 &r -xiaor1 x iau1 &r -xiaor2 x iau2 &r -xiaor3 x iau3 &r -xiaor4 x iau4 &r -xiaor5 x iau5 &r -your1 y iou1 &r -your2 y iou2 &r -your3 y iou3 &r -your4 y iou4 &r -your5 y iou5 &r -miur1 m iou1 &r -miur2 m iou2 &r -miur3 m iou3 &r -miur4 m iou4 &r -miur5 m iou5 &r -diur1 d iou1 &r -diur2 d iou2 &r -diur3 d iou3 &r -diur4 d iou4 &r -diur5 d iou5 &r -niur1 n iou1 &r -niur2 n iou2 &r -niur3 n iou3 &r -niur4 n iou4 &r -niur5 n iou5 &r -liur1 l iou1 &r -liur2 l iou2 &r -liur3 l iou3 &r -liur4 l iou4 &r -liur5 l iou5 &r -jiur1 j iou1 &r -jiur2 j iou2 &r -jiur3 j iou3 &r -jiur4 j iou4 &r -jiur5 j iou5 &r -qiur1 q iou1 &r -qiur2 q iou2 &r -qiur3 q iou3 &r -qiur4 q iou4 &r -qiur5 q iou5 &r -xiur1 xiou1 &r -xiur2 xiou2 &r -xiur3 xiou3 &r -xiur4 xiou4 &r -xiur5 xiou5 &r -yanr1 y ian1 &r -yanr2 y ian2 &r -yanr3 y ian3 &r -yanr4 y ian4 &r -yanr5 y ian5 &r -bianr1 b ian1 &r -bianr2 b ian2 &r -bianr3 b ian3 &r -bianr4 b ian4 &r -bianr5 b ian5 &r -pianr1 p ian1 &r -pianr2 p ian2 &r -pianr3 p ian3 &r -pianr4 p ian4 &r -pianr5 p ian5 &r -mianr1 m ian1 &r -mianr2 m ian2 &r -mianr3 m ian3 &r -mianr4 m ian4 &r -mianr5 m ian5 &r -dianr1 d ian1 &r -dianr2 d ian2 &r -dianr3 d ian3 &r -dianr4 d ian4 &r -dianr5 d ian5 &r -tianr1 t ian1 &r -tianr2 t ian2 &r -tianr3 t ian3 &r -tianr4 t ian4 &r -tianr5 t ian5 &r -nianr1 n ian1 &r -nianr2 n ian2 &r -nianr3 n ian3 &r -nianr4 n ian4 &r -nianr5 n ian5 &r -lianr1 l ian1 &r -lianr2 l ian2 &r -lianr3 l ian3 &r -lianr4 l ian4 &r -lianr5 l ian5 &r -jianr1 j ian1 &r -jianr2 j ian2 &r -jianr3 j ian3 &r -jianr4 j ian4 &r -jianr5 j ian5 &r -qianr1 q ian1 &r -qianr2 q ian2 &r -qianr3 q ian3 &r -qianr4 q ian4 &r -qianr5 q ian5 &r -xianr1 x ian1 &r -xianr2 x ian2 &r -xianr3 x ian3 &r -xianr4 x ian4 &r -xianr5 x ian5 &r -yinr1 y in1 &r -yinr2 y in2 &r -yinr3 y in3 &r -yinr4 y in4 &r -yinr5 y in5 &r -binr1 b in1 &r -binr2 b in2 &r -binr3 b in3 &r -binr4 b in4 &r -binr5 b in5 &r -pinr1 p in1 &r -pinr2 p in2 &r -pinr3 p in3 &r -pinr4 p in4 &r -pinr5 p in5 &r -minr1 m in1 &r -minr2 m in2 &r -minr3 m in3 &r -minr4 m in4 &r -minr5 m in5 &r -dinr1 d in1 &r -dinr2 d in2 &r -dinr3 d in3 &r -dinr4 d in4 &r -dinr5 d in5 &r -ninr1 n in1 &r -ninr2 n in2 &r -ninr3 n in3 &r -ninr4 n in4 &r -ninr5 n in5 &r -linr1 l in1 &r -linr2 l in2 &r -linr3 l in3 &r -linr4 l in4 &r -linr5 l in5 &r -jinr1 j in1 &r -jinr2 j in2 &r -jinr3 j in3 &r -jinr4 j in4 &r -jinr5 j in5 &r -qinr1 q in1 &r -qinr2 q in2 &r -qinr3 q in3 &r -qinr4 q in4 &r -qinr5 q in5 &r -xinr1 x in1 &r -xinr2 x in2 &r -xinr3 x in3 &r -xinr4 x in4 &r -xinr5 x in5 &r -yangr1 y iang1 &r -yangr2 y iang2 &r -yangr3 y iang3 &r -yangr4 y iang4 &r -yangr5 y iang5 &r -biangr1 b iang1 &r -biangr2 b iang2 &r -biangr3 b iang3 &r -biangr4 b iang4 &r -biangr5 b iang5 &r -niangr1 n iang1 &r -niangr2 n iang2 &r -niangr3 n iang3 &r -niangr4 n iang4 &r -niangr5 n iang5 &r -liangr1 l iang1 &r -liangr2 l iang2 &r -liangr3 l iang3 &r -liangr4 l iang4 &r -liangr5 l iang5 &r -jiangr1 j iang1 &r -jiangr2 j iang2 &r -jiangr3 j iang3 &r -jiangr4 j iang4 &r -jiangr5 j iang5 &r -qiangr1 q iang1 &r -qiangr2 q iang2 &r -qiangr3 q iang3 &r -qiangr4 q iang4 &r -qiangr5 q iang5 &r -xiangr1 x iang1 &r -xiangr2 x iang2 &r -xiangr3 x iang3 &r -xiangr4 x iang4 &r -xiangr5 x iang5 &r -yingr1 y ing1 &r -yingr2 y ing2 &r -yingr3 y ing3 &r -yingr4 y ing4 &r -yingr5 y ing5 &r -bingr1 b ing1 &r -bingr2 b ing2 &r -bingr3 b ing3 &r -bingr4 b ing4 &r -bingr5 b ing5 &r -pingr1 p ing1 &r -pingr2 p ing2 &r -pingr3 p ing3 &r -pingr4 p ing4 &r -pingr5 p ing5 &r -mingr1 m ing1 &r -mingr2 m ing2 &r -mingr3 m ing3 &r -mingr4 m ing4 &r -mingr5 m ing5 &r -dingr1 d ing1 &r -dingr2 d ing2 &r -dingr3 d ing3 &r -dingr4 d ing4 &r -dingr5 d ing5 &r -tingr1 t ing1 &r -tingr2 t ing2 &r -tingr3 t ing3 &r -tingr4 t ing4 &r -tingr5 t ing5 &r -ningr1 n ing1 &r -ningr2 n ing2 &r -ningr3 n ing3 &r -ningr4 n ing4 &r -ningr5 n ing5 &r -lingr1 l ing1 &r -lingr2 l ing2 &r -lingr3 l ing3 &r -lingr4 l ing4 &r -lingr5 l ing5 &r -jingr1 j ing1 &r -jingr2 j ing2 &r -jingr3 j ing3 &r -jingr4 j ing4 &r -jingr5 j ing5 &r -qingr1 q ing1 &r -qingr2 q ing2 &r -qingr3 q ing3 &r -qingr4 q ing4 &r -qingr5 q ing5 &r -xingr1 x ing1 &r -xingr2 x ing2 &r -xingr3 x ing3 &r -xingr4 x ing4 &r -xingr5 x ing5 &r -wur1 w u1 &r -wur2 w u2 &r -wur3 w u3 &r -wur4 w u4 &r -wur5 w u5 &r -bur1 b u1 &r -bur2 b u2 &r -bur3 b u3 &r -bur4 b u4 &r -bur5 b u5 &r -pur1 p u1 &r -pur2 p u2 &r -pur3 p u3 &r -pur4 p u4 &r -pur5 p u5 &r -mur1 m u1 &r -mur2 m u2 &r -mur3 m u3 &r -mur4 m u4 &r -mur5 m u5 &r -fur1 f u1 &r -fur2 f u2 &r -fur3 f u3 &r -fur4 f u4 &r -fur5 f u5 &r -dur1 d u1 &r -dur2 d u2 &r -dur3 d u3 &r -dur4 d u4 &r -dur5 d u5 &r -tur1 t u1 &r -tur2 t u2 &r -tur3 t u3 &r -tur4 t u4 &r -tur5 t u5 &r -nur1 n u1 &r -nur2 n u2 &r -nur3 n u3 &r -nur4 n u4 &r -nur5 n u5 &r -lur1 l u1 &r -lur2 l u2 &r -lur3 l u3 &r -lur4 l u4 &r -lur5 l u5 &r -gur1 g u1 &r -gur2 g u2 &r -gur3 g u3 &r -gur4 g u4 &r -gur5 g u5 &r -kur1 k u1 &r -kur2 k u2 &r -kur3 k u3 &r -kur4 k u4 &r -kur5 k u5 &r -hur1 h u1 &r -hur2 h u2 &r -hur3 h u3 &r -hur4 h u4 &r -hur5 h u5 &r -zhur1 zh u1 &r -zhur2 zh u2 &r -zhur3 zh u3 &r -zhur4 zh u4 &r -zhur5 zh u5 &r -chur1 ch u1 &r -chur2 ch u2 &r -chur3 ch u3 &r -chur4 ch u4 &r -chur5 ch u5 &r -shur1 sh u1 &r -shur2 sh u2 &r -shur3 sh u3 &r -shur4 sh u4 &r -shur5 sh u5 &r -rur1 r u1 &r -rur2 r u2 &r -rur3 r u3 &r -rur4 r u4 &r -rur5 r u5 &r -zur1 z u1 &r -zur2 z u2 &r -zur3 z u3 &r -zur4 z u4 &r -zur5 z u5 &r -cur1 c u1 &r -cur2 c u2 &r -cur3 c u3 &r -cur4 c u4 &r -cur5 c u5 &r -sur1 s u1 &r -sur2 s u2 &r -sur3 s u3 &r -sur4 s u4 &r -sur5 s u5 &r -war1 w ua1 &r -war2 w ua2 &r -war3 w ua3 &r -war4 w ua4 &r -war5 w ua5 &r -guar1 g ua1 &r -guar2 g ua2 &r -guar3 g ua3 &r -guar4 g ua4 &r -guar5 g ua5 &r -kuar1 k ua1 &r -kuar2 k ua2 &r -kuar3 k ua3 &r -kuar4 k ua4 &r -kuar5 k ua5 &r -huar1 h ua1 &r -huar2 h ua2 &r -huar3 h ua3 &r -huar4 h ua4 &r -huar5 h ua5 &r -zhuar1 zh ua1 &r -zhuar2 zh ua2 &r -zhuar3 zh ua3 &r -zhuar4 zh ua4 &r -zhuar5 zh ua5 &r -chuar1 ch ua1 &r -chuar2 ch ua2 &r -chuar3 ch ua3 &r -chuar4 ch ua4 &r -chuar5 ch ua5 &r -shuar1 sh ua1 &r -shuar2 sh ua2 &r -shuar3 sh ua3 &r -shuar4 sh ua4 &r -shuar5 sh ua5 &r -wor1 w uo1 &r -wor2 w uo2 &r -wor3 w uo3 &r -wor4 w uo4 &r -wor5 w uo5 &r -duor1 d uo1 &r -duor2 d uo2 &r -duor3 d uo3 &r -duor4 d uo4 &r -duor5 d uo5 &r -tuor1 t uo1 &r -tuor2 t uo2 &r -tuor3 t uo3 &r -tuor4 t uo4 &r -tuor5 t uo5 &r -nuor1 n uo1 &r -nuor2 n uo2 &r -nuor3 n uo3 &r -nuor4 n uo4 &r -nuor5 n uo5 &r -luor1 l uo1 &r -luor2 l uo2 &r -luor3 l uo3 &r -luor4 l uo4 &r -luor5 l uo5 &r -guor1 g uo1 &r -guor2 g uo2 &r -guor3 g uo3 &r -guor4 g uo4 &r -guor5 g uo5 &r -kuor1 k uo1 &r -kuor2 k uo2 &r -kuor3 k uo3 &r -kuor4 k uo4 &r -kuor5 k uo5 &r -huor1 h uo1 &r -huor2 h uo2 &r -huor3 h uo3 &r -huor4 h uo4 &r -huor5 h uo5 &r -zhuor1 zh uo1 &r -zhuor2 zh uo2 &r -zhuor3 zh uo3 &r -zhuor4 zh uo4 &r -zhuor5 zh uo5 &r -chuor1 ch uo1 &r -chuor2 ch uo2 &r -chuor3 ch uo3 &r -chuor4 ch uo4 &r -chuor5 ch uo5 &r -shuor1 sh uo1 &r -shuor2 sh uo2 &r -shuor3 sh uo3 &r -shuor4 sh uo4 &r -shuor5 sh uo5 &r -ruor1 r uo1 &r -ruor2 r uo2 &r -ruor3 r uo3 &r -ruor4 r uo4 &r -ruor5 r uo5 &r -zuor1 z uo1 &r -zuor2 z uo2 &r -zuor3 z uo3 &r -zuor4 z uo4 &r -zuor5 z uo5 &r -cuor1 c uo1 &r -cuor2 c uo2 &r -cuor3 c uo3 &r -cuor4 c uo4 &r -cuor5 c uo5 &r -suor1 s uo1 &r -suor2 s uo2 &r -suor3 s uo3 &r -suor4 s uo4 &r -suor5 s uo5 &r -wair1 w uai1 &r -wair2 w uai2 &r -wair3 w uai3 &r -wair4 w uai4 &r -wair5 w uai5 &r -guair1 g uai1 &r -guair2 g uai2 &r -guair3 g uai3 &r -guair4 g uai4 &r -guair5 g uai5 &r -kuair1 k uai1 &r -kuair2 k uai2 &r -kuair3 k uai3 &r -kuair4 k uai4 &r -kuair5 k uai5 &r -huair1 h uai1 &r -huair2 h uai2 &r -huair3 h uai3 &r -huair4 h uai4 &r -huair5 h uai5 &r -zhuair1 zh uai1 &r -zhuair2 zh uai2 &r -zhuair3 zh uai3 &r -zhuair4 zh uai4 &r -zhuair5 zh uai5 &r -chuair1 ch uai1 &r -chuair2 ch uai2 &r -chuair3 ch uai3 &r -chuair4 ch uai4 &r -chuair5 ch uai5 &r -shuair1 sh uai1 &r -shuair2 sh uai2 &r -shuair3 sh uai3 &r -shuair4 sh uai4 &r -shuair5 sh uai5 &r -weir1 w uei1 &r -weir2 w uei2 &r -weir3 w uei3 &r -weir4 w uei4 &r -weir5 w uei5 &r -duir1 d uei1 &r -duir2 d uei2 &r -duir3 d uei3 &r -duir4 d uei4 &r -duir5 d uei5 &r -tuir1 t uei1 &r -tuir2 t uei2 &r -tuir3 t uei3 &r -tuir4 t uei4 &r -tuir5 t uei5 &r -guir1 g uei1 &r -guir2 g uei2 &r -guir3 g uei3 &r -guir4 g uei4 &r -guir5 g uei5 &r -kuir1 k uei1 &r -kuir2 k uei2 &r -kuir3 k uei3 &r -kuir4 k uei4 &r -kuir5 k uei5 &r -huir1 h uei1 &r -huir2 h uei2 &r -huir3 h uei3 &r -huir4 h uei4 &r -huir5 h uei5 &r -zhuir1 zh uei1 &r -zhuir2 zh uei2 &r -zhuir3 zh uei3 &r -zhuir4 zh uei4 &r -zhuir5 zh uei5 &r -chuir1 ch uei1 &r -chuir2 ch uei2 &r -chuir3 ch uei3 &r -chuir4 ch uei4 &r -chuir5 ch uei5 &r -shuir1 sh uei1 &r -shuir2 sh uei2 &r -shuir3 sh uei3 &r -shuir4 sh uei4 &r -shuir5 sh uei5 &r -ruir1 r uei1 &r -ruir2 r uei2 &r -ruir3 r uei3 &r -ruir4 r uei4 &r -ruir5 r uei5 &r -zuir1 z uei1 &r -zuir2 z uei2 &r -zuir3 z uei3 &r -zuir4 z uei4 &r -zuir5 z uei5 &r -cuir1 c uei1 &r -cuir2 c uei2 &r -cuir3 c uei3 &r -cuir4 c uei4 &r -cuir5 c uei5 &r -suir1 s uei1 &r -suir2 s uei2 &r -suir3 s uei3 &r -suir4 s uei4 &r -suir5 s uei5 &r -wanr1 w uan1 &r -wanr2 w uan2 &r -wanr3 w uan3 &r -wanr4 w uan4 &r -wanr5 w uan5 &r -duanr1 d uan1 &r -duanr2 d uan2 &r -duanr3 d uan3 &r -duanr4 d uan4 &r -duanr5 d uan5 &r -tuanr1 t uan1 &r -tuanr2 t uan2 &r -tuanr3 t uan3 &r -tuanr4 t uan4 &r -tuanr5 t uan5 &r -nuanr1 n uan1 &r -nuanr2 n uan2 &r -nuanr3 n uan3 &r -nuanr4 n uan4 &r -nuanr5 n uan5 &r -luanr1 l uan1 &r -luanr2 l uan2 &r -luanr3 l uan3 &r -luanr4 l uan4 &r -luanr5 l uan5 &r -guanr1 g uan1 &r -guanr2 g uan2 &r -guanr3 g uan3 &r -guanr4 g uan4 &r -guanr5 g uan5 &r -kuanr1 k uan1 &r -kuanr2 k uan2 &r -kuanr3 k uan3 &r -kuanr4 k uan4 &r -kuanr5 k uan5 &r -huanr1 h uan1 &r -huanr2 h uan2 &r -huanr3 h uan3 &r -huanr4 h uan4 &r -huanr5 h uan5 &r -zhuanr1 zh uan1 &r -zhuanr2 zh uan2 &r -zhuanr3 zh uan3 &r -zhuanr4 zh uan4 &r -zhuanr5 zh uan5 &r -chuanr1 ch uan1 &r -chuanr2 ch uan2 &r -chuanr3 ch uan3 &r -chuanr4 ch uan4 &r -chuanr5 ch uan5 &r -shuanr1 sh uan1 &r -shuanr2 sh uan2 &r -shuanr3 sh uan3 &r -shuanr4 sh uan4 &r -shuanr5 sh uan5 &r -ruanr1 r uan1 &r -ruanr2 r uan2 &r -ruanr3 r uan3 &r -ruanr4 r uan4 &r -ruanr5 r uan5 &r -zuanr1 z uan1 &r -zuanr2 z uan2 &r -zuanr3 z uan3 &r -zuanr4 z uan4 &r -zuanr5 z uan5 &r -cuanr1 c uan1 &r -cuanr2 c uan2 &r -cuanr3 c uan3 &r -cuanr4 c uan4 &r -cuanr5 c uan5 &r -suanr1 s uan1 &r -suanr2 s uan2 &r -suanr3 s uan3 &r -suanr4 s uan4 &r -suanr5 s uan5 &r -wenr1 w uen1 &r -wenr2 w uen2 &r -wenr3 w uen3 &r -wenr4 w uen4 &r -wenr5 w uen5 &r -dunr1 d uen1 &r -dunr2 d uen2 &r -dunr3 d uen3 &r -dunr4 d uen4 &r -dunr5 d uen5 &r -tunr1 t uen1 &r -tunr2 t uen2 &r -tunr3 t uen3 &r -tunr4 t uen4 &r -tunr5 t uen5 &r -nunr1 n uen1 &r -nunr2 n uen2 &r -nunr3 n uen3 &r -nunr4 n uen4 &r -nunr5 n uen5 &r -lunr1 l uen1 &r -lunr2 l uen2 &r -lunr3 l uen3 &r -lunr4 l uen4 &r -lunr5 l uen5 &r -gunr1 g uen1 &r -gunr2 g uen2 &r -gunr3 g uen3 &r -gunr4 g uen4 &r -gunr5 g uen5 &r -kunr1 k uen1 &r -kunr2 k uen2 &r -kunr3 k uen3 &r -kunr4 k uen4 &r -kunr5 k uen5 &r -hunr1 h uen1 &r -hunr2 h uen2 &r -hunr3 h uen3 &r -hunr4 h uen4 &r -hunr5 h uen5 &r -zhunr1 zh uen1 &r -zhunr2 zh uen2 &r -zhunr3 zh uen3 &r -zhunr4 zh uen4 &r -zhunr5 zh uen5 &r -chunr1 ch uen1 &r -chunr2 ch uen2 &r -chunr3 ch uen3 &r -chunr4 ch uen4 &r -chunr5 ch uen5 &r -shunr1 sh uen1 &r -shunr2 sh uen2 &r -shunr3 sh uen3 &r -shunr4 sh uen4 &r -shunr5 sh uen5 &r -runr1 r uen1 &r -runr2 r uen2 &r -runr3 r uen3 &r -runr4 r uen4 &r -runr5 r uen5 &r -zunr1 z uen1 &r -zunr2 z uen2 &r -zunr3 z uen3 &r -zunr4 z uen4 &r -zunr5 z uen5 &r -cunr1 c uen1 &r -cunr2 c uen2 &r -cunr3 c uen3 &r -cunr4 c uen4 &r -cunr5 c uen5 &r -sunr1 s uen1 &r -sunr2 s uen2 &r -sunr3 s uen3 &r -sunr4 s uen4 &r -sunr5 s uen5 &r -wangr1 w uang1 &r -wangr2 w uang2 &r -wangr3 w uang3 &r -wangr4 w uang4 &r -wangr5 w uang5 &r -guangr1 g uang1 &r -guangr2 g uang2 &r -guangr3 g uang3 &r -guangr4 g uang4 &r -guangr5 g uang5 &r -kuangr1 k uang1 &r -kuangr2 k uang2 &r -kuangr3 k uang3 &r -kuangr4 k uang4 &r -kuangr5 k uang5 &r -huangr1 h uang1 &r -huangr2 h uang2 &r -huangr3 h uang3 &r -huangr4 h uang4 &r -huangr5 h uang5 &r -zhuangr1 zh uang1 &r -zhuangr2 zh uang2 &r -zhuangr3 zh uang3 &r -zhuangr4 zh uang4 &r -zhuangr5 zh uang5 &r -chuangr1 ch uang1 &r -chuangr2 ch uang2 &r -chuangr3 ch uang3 &r -chuangr4 ch uang4 &r -chuangr5 ch uang5 &r -shuangr1 sh uang1 &r -shuangr2 sh uang2 &r -shuangr3 sh uang3 &r -shuangr4 sh uang4 &r -shuangr5 sh uang5 &r -wengr1 w ung1 &r -wengr2 w ung2 &r -wengr3 w ung3 &r -wengr4 w ung4 &r -wengr5 w ung5 &r -dongr1 d ung1 &r -dongr2 d ung2 &r -dongr3 d ung3 &r -dongr4 d ung4 &r -dongr5 d ung5 &r -tongr1 t ung1 &r -tongr2 t ung2 &r -tongr3 t ung3 &r -tongr4 t ung4 &r -tongr5 t ung5 &r -nongr1 n ung1 &r -nongr2 n ung2 &r -nongr3 n ung3 &r -nongr4 n ung4 &r -nongr5 n ung5 &r -longr1 l ung1 &r -longr2 l ung2 &r -longr3 l ung3 &r -longr4 l ung4 &r -longr5 l ung5 &r -gongr1 g ung1 &r -gongr2 g ung2 &r -gongr3 g ung3 &r -gongr4 g ung4 &r -gongr5 g ung5 &r -kongr1 k ung1 &r -kongr2 k ung2 &r -kongr3 k ung3 &r -kongr4 k ung4 &r -kongr5 k ung5 &r -hongr1 h ung1 &r -hongr2 h ung2 &r -hongr3 h ung3 &r -hongr4 h ung4 &r -hongr5 h ung5 &r -zhongr1 zh ung1 &r -zhongr2 zh ung2 &r -zhongr3 zh ung3 &r -zhongr4 zh ung4 &r -zhongr5 zh ung5 &r -chongr1 ch ung1 &r -chongr2 ch ung2 &r -chongr3 ch ung3 &r -chongr4 ch ung4 &r -chongr5 ch ung5 &r -rongr1 r ung1 &r -rongr2 r ung2 &r -rongr3 r ung3 &r -rongr4 r ung4 &r -rongr5 r ung5 &r -zongr1 z ung1 &r -zongr2 z ung2 &r -zongr3 z ung3 &r -zongr4 z ung4 &r -zongr5 z ung5 &r -congr1 c ung1 &r -congr2 c ung2 &r -congr3 c ung3 &r -congr4 c ung4 &r -congr5 c ung5 &r -songr1 s ung1 &r -songr2 s ung2 &r -songr3 s ung3 &r -songr4 s ung4 &r -songr5 s ung5 &r -yur1 y v1 &r -yur2 y v2 &r -yur3 y v3 &r -yur4 y v4 &r -yur5 y v5 &r -nvr1 n v1 &r -nvr2 n v2 &r -nvr3 n v3 &r -nvr4 n v4 &r -nvr5 n v5 &r -lvr1 l v1 &r -lvr2 l v2 &r -lvr3 l v3 &r -lvr4 l v4 &r -lvr5 l v5 &r -jur1 j v1 &r -jur2 j v2 &r -jur3 j v3 &r -jur4 j v4 &r -jur5 j v5 &r -qur1 q v1 &r -qur2 q v2 &r -qur3 q v3 &r -qur4 q v4 &r -qur5 q v5 &r -xur1 x v1 &r -xur2 x v2 &r -xur3 x v3 &r -xur4 x v4 &r -xur5 x v5 &r -yuer1 y ve1 &r -yuer2 y ve2 &r -yuer3 y ve3 &r -yuer4 y ve4 &r -yuer5 y ve5 &r -nuer1 n ve1 &r -nuer2 n ve2 &r -nuer3 n ve3 &r -nuer4 n ve4 &r -nuer5 n ve5 &r -nver1 n ve1 &r -nver2 n ve2 &r -nver3 n ve3 &r -nver4 n ve4 &r -nver5 n ve5 &r -luer1 l ve1 &r -luer2 l ve2 &r -luer3 l ve3 &r -luer4 l ve4 &r -luer5 l ve5 &r -lver1 l ve1 &r -lver2 l ve2 &r -lver3 l ve3 &r -lver4 l ve4 &r -lver5 l ve5 &r -juer1 j ve1 &r -juer2 j ve2 &r -juer3 j ve3 &r -juer4 j ve4 &r -juer5 j ve5 &r -quer1 q ve1 &r -quer2 q ve2 &r -quer3 q ve3 &r -quer4 q ve4 &r -quer5 q ve5 &r -xuer1 x ve1 &r -xuer2 x ve2 &r -xuer3 x ve3 &r -xuer4 x ve4 &r -xuer5 x ve5 &r -yuanr1 y van1 &r -yuanr2 y van2 &r -yuanr3 y van3 &r -yuanr4 y van4 &r -yuanr5 y van5 &r -juanr1 j van1 &r -juanr2 j van2 &r -juanr3 j van3 &r -juanr4 j van4 &r -juanr5 j van5 &r -quanr1 q van1 &r -quanr2 q van2 &r -quanr3 q van3 &r -quanr4 q van4 &r -quanr5 q van5 &r -xuanr1 x van1 &r -xuanr2 x van2 &r -xuanr3 x van3 &r -xuanr4 x van4 &r -xuanr5 x van5 &r -yunr1 y vn1 &r -yunr2 y vn2 &r -yunr3 y vn3 &r -yunr4 y vn4 &r -yunr5 y vn5 &r -junr1 j vn1 &r -junr2 j vn2 &r -junr3 j vn3 &r -junr4 j vn4 &r -junr5 j vn5 &r -qunr1 q vn1 &r -qunr2 q vn2 &r -qunr3 q vn3 &r -qunr4 q vn4 &r -qunr5 q vn5 &r -xunr1 x vn1 &r -xunr2 x vn2 &r -xunr3 x vn3 &r -xunr4 x vn4 &r -xunr5 x vn5 &r -yongr1 y vng1 &r -yongr2 y vng2 &r -yongr3 y vng3 &r -yongr4 y vng4 &r -yongr5 y vng5 &r -jiongr1 j vng1 &r -jiongr2 j vng2 &r -jiongr3 j vng3 &r -jiongr4 j vng4 &r -jiongr5 j vng5 &r -qiongr1 q vng1 &r -qiongr2 q vng2 &r -qiongr3 q vng3 &r -qiongr4 q vng4 &r -qiongr5 q vng5 &r -xiongr1 x vng1 &r -xiongr2 x vng2 &r -xiongr3 x vng3 &r -xiongr4 x vng4 &r -xiongr5 x vng5 &r diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/preprocess_transcription.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/preprocess_transcription.py deleted file mode 100644 index ce117d42..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/preprocess_transcription.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import pickle -import re -from pathlib import Path - -import tqdm -import yaml - -zh_pattern = re.compile("[\u4e00-\u9fa5]") - -_tones = {'', '', '', '0', '1', '2', '3', '4', '5'} - -_pauses = {'%', '$'} - -_initials = { - 'b', - 'p', - 'm', - 'f', - 'd', - 't', - 'n', - 'l', - 'g', - 'k', - 'h', - 'j', - 'q', - 'x', - 'zh', - 'ch', - 'sh', - 'r', - 'z', - 'c', - 's', -} - -_finals = { - 'ii', - 'iii', - 'a', - 'o', - 'e', - 'ea', - 'ai', - 'ei', - 'ao', - 'ou', - 'an', - 'en', - 'ang', - 'eng', - 'er', - 'i', - 'ia', - 'io', - 'ie', - 'iai', - 'iao', - 'iou', - 'ian', - 'ien', - 'iang', - 'ieng', - 'u', - 'ua', - 'uo', - 'uai', - 'uei', - 'uan', - 'uen', - 'uang', - 'ueng', - 'v', - 've', - 'van', - 'ven', - 'veng', -} - -_ernized_symbol = {'&r'} - -_specials = {'', '', '', ''} - -_phones = _initials | _finals | _ernized_symbol | _specials | _pauses - - -def is_zh(word): - global zh_pattern - match = zh_pattern.search(word) - return match is not None - - -def ernized(syllable): - return syllable[:2] != "er" and syllable[-2] == 'r' - - -def convert(syllable): - # expansion of o -> uo - syllable = re.sub(r"([bpmf])o$", r"\1uo", syllable) - # syllable = syllable.replace("bo", "buo").replace("po", "puo").replace("mo", "muo").replace("fo", "fuo") - # expansion for iong, ong - syllable = syllable.replace("iong", "veng").replace("ong", "ueng") - - # expansion for ing, in - syllable = syllable.replace("ing", "ieng").replace("in", "ien") - - # expansion for un, ui, iu - syllable = syllable.replace("un", "uen").replace("ui", - "uei").replace("iu", "iou") - - # rule for variants of i - syllable = syllable.replace("zi", "zii").replace("ci", "cii").replace("si", "sii")\ - .replace("zhi", "zhiii").replace("chi", "chiii").replace("shi", "shiii")\ - .replace("ri", "riii") - - # rule for y preceding i, u - syllable = syllable.replace("yi", "i").replace("yu", "v").replace("y", "i") - - # rule for w - syllable = syllable.replace("wu", "u").replace("w", "u") - - # rule for v following j, q, x - syllable = syllable.replace("ju", "jv").replace("qu", - "qv").replace("xu", "xv") - - return syllable - - -def split_syllable(syllable: str): - """Split a syllable in pinyin into a list of phones and a list of tones. - Initials have no tone, represented by '0', while finals have tones from - '1,2,3,4,5'. - - e.g. - - zhang -> ['zh', 'ang'], ['0', '1'] - """ - if syllable in _pauses: - # syllable, tone - return [syllable], ['0'] - - tone = syllable[-1] - syllable = convert(syllable[:-1]) - - phones = [] - tones = [] - - global _initials - if syllable[:2] in _initials: - phones.append(syllable[:2]) - tones.append('0') - phones.append(syllable[2:]) - tones.append(tone) - elif syllable[0] in _initials: - phones.append(syllable[0]) - tones.append('0') - phones.append(syllable[1:]) - tones.append(tone) - else: - phones.append(syllable) - tones.append(tone) - return phones, tones - - -def load_aishell3_transcription(line: str): - sentence_id, pinyin, text = line.strip().split("|") - syllables = pinyin.strip().split() - - results = [] - - for syllable in syllables: - if syllable in _pauses: - results.append(syllable) - elif not ernized(syllable): - results.append(syllable) - else: - results.append(syllable[:-2] + syllable[-1]) - results.append('&r5') - - phones = [] - tones = [] - for syllable in results: - p, t = split_syllable(syllable) - phones.extend(p) - tones.extend(t) - for p in phones: - assert p in _phones, p - return { - "sentence_id": sentence_id, - "text": text, - "syllables": results, - "phones": phones, - "tones": tones - } - - -def process_aishell3(dataset_root, output_dir): - dataset_root = Path(dataset_root).expanduser() - output_dir = Path(output_dir).expanduser() - output_dir.mkdir(parents=True, exist_ok=True) - - prosody_label_path = dataset_root / "label_train-set.txt" - with open(prosody_label_path, 'rt') as f: - lines = [line.strip() for line in f] - - records = lines[5:] - - processed_records = [] - for record in tqdm.tqdm(records): - new_record = load_aishell3_transcription(record) - processed_records.append(new_record) - print(new_record) - - with open(output_dir / "metadata.pickle", 'wb') as f: - pickle.dump(processed_records, f) - - with open(output_dir / "metadata.yaml", 'wt', encoding="utf-8") as f: - yaml.safe_dump( - processed_records, f, default_flow_style=None, allow_unicode=True) - - print("metadata done!") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Preprocess transcription of AiShell3 and save them in a compact file(yaml and pickle)." - ) - parser.add_argument( - "--input", - type=str, - default="~/datasets/aishell3/train", - help="path of the training dataset,(contains a label_train-set.txt).") - parser.add_argument( - "--output", - type=str, - help="the directory to save the processed transcription." - "If not provided, it would be the same as the input.") - args = parser.parse_args() - if args.output is None: - args.output = args.input - - process_aishell3(args.input, args.output) diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/process_wav.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/process_wav.py deleted file mode 100644 index 56d8e4c3..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/process_wav.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -from functools import partial -from multiprocessing import Pool -from pathlib import Path - -import librosa -import numpy as np -import soundfile as sf -from praatio import textgrid -from tqdm import tqdm - - -def get_valid_part(fpath): - f = textgrid.openTextgrid(fpath, includeEmptyIntervals=True) - - start = 0 - phone_entry_list = f.tierDict['phones'].entryList - first_entry = phone_entry_list[0] - if first_entry.label == "sil": - start = first_entry.end - - last_entry = phone_entry_list[-1] - if last_entry.label == "sp": - end = last_entry.start - else: - end = last_entry.end - return start, end - - -def process_utterance(fpath, source_dir, target_dir, alignment_dir): - rel_path = fpath.relative_to(source_dir) - opath = target_dir / rel_path - apath = (alignment_dir / rel_path).with_suffix(".TextGrid") - opath.parent.mkdir(parents=True, exist_ok=True) - - start, end = get_valid_part(apath) - wav, _ = librosa.load(fpath, sr=22050, offset=start, duration=end - start) - normalized_wav = wav / np.max(wav) * 0.999 - sf.write(opath, normalized_wav, samplerate=22050, subtype='PCM_16') - # print(f"{fpath} => {opath}") - - -def preprocess_aishell3(source_dir, target_dir, alignment_dir): - source_dir = Path(source_dir).expanduser() - target_dir = Path(target_dir).expanduser() - alignment_dir = Path(alignment_dir).expanduser() - - wav_paths = list(source_dir.rglob("*.wav")) - print(f"there are {len(wav_paths)} audio files in total") - fx = partial( - process_utterance, - source_dir=source_dir, - target_dir=target_dir, - alignment_dir=alignment_dir) - with Pool(16) as p: - list( - tqdm(p.imap(fx, wav_paths), total=len(wav_paths), unit="utterance")) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Process audio in AiShell3, trim silence according to the alignment " - "files generated by MFA, and normalize volume by peak.") - parser.add_argument( - "--input", - type=str, - default="~/datasets/aishell3/train/wav", - help="path of the original audio folder in aishell3.") - parser.add_argument( - "--output", - type=str, - default="~/datasets/aishell3/train/normalized_wav", - help="path of the folder to save the processed audio files.") - parser.add_argument( - "--alignment", - type=str, - default="~/datasets/aishell3/train/alignment", - help="path of the alignment files.") - args = parser.parse_args() - - preprocess_aishell3(args.input, args.output, args.alignment) diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/train.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/train.py deleted file mode 100644 index ea5f12da..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/train.py +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import time -from collections import defaultdict -from pathlib import Path - -import numpy as np -import paddle -from matplotlib import pyplot as plt -from paddle import distributed as dist -from paddle.io import DataLoader -from paddle.io import DistributedBatchSampler - -from paddlespeech.t2s.data import dataset -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.aishell3 import AiShell3 -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.aishell3 import collate_aishell3_examples -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.config import get_cfg_defaults -from paddlespeech.t2s.models.tacotron2 import Tacotron2 -from paddlespeech.t2s.models.tacotron2 import Tacotron2Loss -from paddlespeech.t2s.training.cli import default_argument_parser -from paddlespeech.t2s.training.experiment import ExperimentBase -from paddlespeech.t2s.utils import display -from paddlespeech.t2s.utils import mp_tools - - -class Experiment(ExperimentBase): - def compute_losses(self, inputs, outputs): - texts, tones, mel_targets, utterance_embeds, text_lens, output_lens, stop_tokens = inputs - - mel_outputs = outputs["mel_output"] - mel_outputs_postnet = outputs["mel_outputs_postnet"] - alignments = outputs["alignments"] - - losses = self.criterion(mel_outputs, mel_outputs_postnet, mel_targets, - alignments, output_lens, text_lens) - return losses - - def train_batch(self): - start = time.time() - batch = self.read_batch() - data_loader_time = time.time() - start - - self.optimizer.clear_grad() - self.model.train() - texts, tones, mels, utterance_embeds, text_lens, output_lens, stop_tokens = batch - outputs = self.model( - texts, - text_lens, - mels, - output_lens, - tones=tones, - global_condition=utterance_embeds) - losses = self.compute_losses(batch, outputs) - loss = losses["loss"] - loss.backward() - self.optimizer.step() - iteration_time = time.time() - start - - losses_np = {k: float(v) for k, v in losses.items()} - # logging - msg = "Rank: {}, ".format(dist.get_rank()) - msg += "step: {}, ".format(self.iteration) - msg += "time: {:>.3f}s/{:>.3f}s, ".format(data_loader_time, - iteration_time) - msg += ', '.join('{}: {:>.6f}'.format(k, v) - for k, v in losses_np.items()) - self.logger.info(msg) - - if dist.get_rank() == 0: - for key, value in losses_np.items(): - self.visualizer.add_scalar(f"train_loss/{key}", value, - self.iteration) - - @mp_tools.rank_zero_only - @paddle.no_grad() - def valid(self): - valid_losses = defaultdict(list) - for i, batch in enumerate(self.valid_loader): - texts, tones, mels, utterance_embeds, text_lens, output_lens, stop_tokens = batch - outputs = self.model( - texts, - text_lens, - mels, - output_lens, - tones=tones, - global_condition=utterance_embeds) - losses = self.compute_losses(batch, outputs) - for key, value in losses.items(): - valid_losses[key].append(float(value)) - - attention_weights = outputs["alignments"] - self.visualizer.add_figure( - f"valid_sentence_{i}_alignments", - display.plot_alignment(attention_weights[0].numpy().T), - self.iteration) - self.visualizer.add_figure( - f"valid_sentence_{i}_target_spectrogram", - display.plot_spectrogram(mels[0].numpy().T), self.iteration) - mel_pred = outputs['mel_outputs_postnet'] - self.visualizer.add_figure( - f"valid_sentence_{i}_predicted_spectrogram", - display.plot_spectrogram(mel_pred[0].numpy().T), self.iteration) - - # write visual log - valid_losses = {k: np.mean(v) for k, v in valid_losses.items()} - - # logging - msg = "Valid: " - msg += "step: {}, ".format(self.iteration) - msg += ', '.join('{}: {:>.6f}'.format(k, v) - for k, v in valid_losses.items()) - self.logger.info(msg) - - for key, value in valid_losses.items(): - self.visualizer.add_scalar(f"valid/{key}", value, self.iteration) - - @mp_tools.rank_zero_only - @paddle.no_grad() - def eval(self): - """Evaluation of Tacotron2 in autoregressive manner.""" - self.model.eval() - mel_dir = Path(self.output_dir / ("eval_{}".format(self.iteration))) - mel_dir.mkdir(parents=True, exist_ok=True) - for i, batch in enumerate(self.test_loader): - texts, tones, mels, utterance_embeds, *_ = batch - outputs = self.model.infer( - texts, tones=tones, global_condition=utterance_embeds) - - display.plot_alignment(outputs["alignments"][0].numpy().T) - plt.savefig(mel_dir / f"sentence_{i}.png") - plt.close() - np.save(mel_dir / f"sentence_{i}", - outputs["mel_outputs_postnet"][0].numpy().T) - print(f"sentence_{i}") - - def setup_model(self): - config = self.config - model = Tacotron2( - vocab_size=config.model.vocab_size, - n_tones=config.model.n_tones, - d_mels=config.data.d_mels, - d_encoder=config.model.d_encoder, - encoder_conv_layers=config.model.encoder_conv_layers, - encoder_kernel_size=config.model.encoder_kernel_size, - d_prenet=config.model.d_prenet, - d_attention_rnn=config.model.d_attention_rnn, - d_decoder_rnn=config.model.d_decoder_rnn, - attention_filters=config.model.attention_filters, - attention_kernel_size=config.model.attention_kernel_size, - d_attention=config.model.d_attention, - d_postnet=config.model.d_postnet, - postnet_kernel_size=config.model.postnet_kernel_size, - postnet_conv_layers=config.model.postnet_conv_layers, - reduction_factor=config.model.reduction_factor, - p_encoder_dropout=config.model.p_encoder_dropout, - p_prenet_dropout=config.model.p_prenet_dropout, - p_attention_dropout=config.model.p_attention_dropout, - p_decoder_dropout=config.model.p_decoder_dropout, - p_postnet_dropout=config.model.p_postnet_dropout, - d_global_condition=config.model.d_global_condition, - use_stop_token=config.model.use_stop_token, ) - - if self.parallel: - model = paddle.DataParallel(model) - - grad_clip = paddle.nn.ClipGradByGlobalNorm( - config.training.grad_clip_thresh) - optimizer = paddle.optimizer.Adam( - learning_rate=config.training.lr, - parameters=model.parameters(), - weight_decay=paddle.regularizer.L2Decay( - config.training.weight_decay), - grad_clip=grad_clip) - criterion = Tacotron2Loss( - use_stop_token_loss=config.model.use_stop_token, - use_guided_attention_loss=config.model.use_guided_attention_loss, - sigma=config.model.guided_attention_loss_sigma) - self.model = model - self.optimizer = optimizer - self.criterion = criterion - - def setup_dataloader(self): - args = self.args - config = self.config - aishell3_dataset = AiShell3(args.data) - - valid_set, train_set = dataset.split(aishell3_dataset, - config.data.valid_size) - batch_fn = collate_aishell3_examples - - if not self.parallel: - self.train_loader = DataLoader( - train_set, - batch_size=config.data.batch_size, - shuffle=True, - drop_last=True, - collate_fn=batch_fn) - else: - sampler = DistributedBatchSampler( - train_set, - batch_size=config.data.batch_size, - shuffle=True, - drop_last=True) - self.train_loader = DataLoader( - train_set, batch_sampler=sampler, collate_fn=batch_fn) - - self.valid_loader = DataLoader( - valid_set, - batch_size=config.data.batch_size, - shuffle=False, - drop_last=False, - collate_fn=batch_fn) - - self.test_loader = DataLoader( - valid_set, - batch_size=1, - shuffle=False, - drop_last=False, - collate_fn=batch_fn) - - -def main_sp(config, args): - exp = Experiment(config, args) - exp.setup() - exp.resume_or_load() - if not args.test: - exp.run() - else: - exp.eval() - - -def main(config, args): - if args.ngpu > 1: - dist.spawn(main_sp, args=(config, args), nprocs=args.ngpu) - else: - main_sp(config, args) - - -if __name__ == "__main__": - config = get_cfg_defaults() - parser = default_argument_parser() - parser.add_argument("--test", action="store_true") - args = parser.parse_args() - if args.config: - config.merge_from_file(args.config) - if args.opts: - config.merge_from_list(args.opts) - config.freeze() - print(config) - print(args) - - main(config, args) diff --git a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/voice_cloning.py b/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/voice_cloning.py deleted file mode 100644 index 4e6b8d36..00000000 --- a/paddlespeech/t2s/exps/voice_cloning/tacotron2_ge2e/voice_cloning.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import os -from pathlib import Path - -import numpy as np -import paddle -import soundfile as sf -from matplotlib import pyplot as plt - -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.aishell3 import voc_phones -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.aishell3 import voc_tones -from paddlespeech.t2s.exps.voice_cloning.tacotron2_ge2e.chinese_g2p import convert_sentence -from paddlespeech.t2s.models.tacotron2 import Tacotron2 -from paddlespeech.t2s.models.waveflow import ConditionalWaveFlow -from paddlespeech.t2s.utils import display -from paddlespeech.vector.exps.ge2e.audio_processor import SpeakerVerificationPreprocessor -from paddlespeech.vector.models.lstm_speaker_encoder import LSTMSpeakerEncoder - - -def voice_cloning(args): - # speaker encoder - p = SpeakerVerificationPreprocessor( - sampling_rate=16000, - audio_norm_target_dBFS=-30, - vad_window_length=30, - vad_moving_average_width=8, - vad_max_silence_length=6, - mel_window_length=25, - mel_window_step=10, - n_mels=40, - partial_n_frames=160, - min_pad_coverage=0.75, - partial_overlap_ratio=0.5) - print("Audio Processor Done!") - - speaker_encoder = LSTMSpeakerEncoder( - n_mels=40, num_layers=3, hidden_size=256, output_size=256) - speaker_encoder.set_state_dict(paddle.load(args.ge2e_params_path)) - speaker_encoder.eval() - print("GE2E Done!") - - synthesizer = Tacotron2( - vocab_size=68, - n_tones=10, - d_mels=80, - d_encoder=512, - encoder_conv_layers=3, - encoder_kernel_size=5, - d_prenet=256, - d_attention_rnn=1024, - d_decoder_rnn=1024, - attention_filters=32, - attention_kernel_size=31, - d_attention=128, - d_postnet=512, - postnet_kernel_size=5, - postnet_conv_layers=5, - reduction_factor=1, - p_encoder_dropout=0.5, - p_prenet_dropout=0.5, - p_attention_dropout=0.1, - p_decoder_dropout=0.1, - p_postnet_dropout=0.5, - d_global_condition=256, - use_stop_token=False, ) - synthesizer.set_state_dict(paddle.load(args.tacotron2_params_path)) - synthesizer.eval() - print("Tacotron2 Done!") - - # vocoder - vocoder = ConditionalWaveFlow( - upsample_factors=[16, 16], - n_flows=8, - n_layers=8, - n_group=16, - channels=128, - n_mels=80, - kernel_size=[3, 3]) - vocoder.set_state_dict(paddle.load(args.waveflow_params_path)) - vocoder.eval() - print("WaveFlow Done!") - - output_dir = Path(args.output_dir) - output_dir.mkdir(parents=True, exist_ok=True) - - input_dir = Path(args.input_dir) - - # 因为 AISHELL-3 数据集中使用 % 和 $ 表示韵律词和韵律短语的边界,它们大约对应着较短和较长的停顿,在文本中可以使用 % 和 $ 来调节韵律。 - # 值得的注意的是,句子的有效字符集仅包含汉字和 %, $, 因此输入的句子只能包含这些字符。 - sentence = "每当你觉得%想要批评什么人的时候$你切要记着%这个世界上的人%并非都具备你禀有的条件$" - phones, tones = convert_sentence(sentence) - phones = np.array( - [voc_phones.lookup(item) for item in phones], dtype=np.int64) - tones = np.array([voc_tones.lookup(item) for item in tones], dtype=np.int64) - phones = paddle.to_tensor(phones).unsqueeze(0) - tones = paddle.to_tensor(tones).unsqueeze(0) - - for name in os.listdir(input_dir): - utt_id = name.split(".")[0] - ref_audio_path = input_dir / name - mel_sequences = p.extract_mel_partials(p.preprocess_wav(ref_audio_path)) - print("mel_sequences: ", mel_sequences.shape) - with paddle.no_grad(): - embed = speaker_encoder.embed_utterance( - paddle.to_tensor(mel_sequences)) - print("embed shape: ", embed.shape) - utterance_embeds = paddle.unsqueeze(embed, 0) - outputs = synthesizer.infer( - phones, tones=tones, global_condition=utterance_embeds) - mel_input = paddle.transpose(outputs["mel_outputs_postnet"], [0, 2, 1]) - alignment = outputs["alignments"][0].numpy().T - display.plot_alignment(alignment) - plt.savefig(str(output_dir / (utt_id + ".png"))) - - with paddle.no_grad(): - wav = vocoder.infer(mel_input) - wav = wav.numpy()[0] - sf.write(str(output_dir / (utt_id + ".wav")), wav, samplerate=22050) - - -def main(): - # parse args and config and redirect to train_sp - parser = argparse.ArgumentParser(description="") - parser.add_argument( - "--ge2e_params_path", type=str, help="ge2e params path.") - parser.add_argument( - "--tacotron2_params_path", type=str, help="tacotron2 params path.") - parser.add_argument( - "--waveflow_params_path", type=str, help="waveflow params path.") - - parser.add_argument( - "--ngpu", type=int, default=1, help="if ngpu=0, use cpu.") - - parser.add_argument( - "--input-dir", - type=str, - help="input dir of *.wav, the sample rate will be resample to 16k.") - parser.add_argument("--output-dir", type=str, help="output dir.") - - args = parser.parse_args() - - if args.ngpu == 0: - paddle.set_device("cpu") - elif args.ngpu > 0: - paddle.set_device("gpu") - else: - print("ngpu should >= 0 !") - - voice_cloning(args) - - -if __name__ == "__main__": - main() diff --git a/paddlespeech/t2s/models/__init__.py b/paddlespeech/t2s/models/__init__.py index 65227374..738943f8 100644 --- a/paddlespeech/t2s/models/__init__.py +++ b/paddlespeech/t2s/models/__init__.py @@ -17,6 +17,5 @@ from .melgan import * from .new_tacotron2 import * from .parallel_wavegan import * from .speedyspeech import * -from .tacotron2 import * from .transformer_tts import * from .waveflow import * diff --git a/paddlespeech/t2s/models/new_tacotron2/tacotron2.py b/paddlespeech/t2s/models/new_tacotron2/tacotron2.py index bd4129fb..da71077f 100644 --- a/paddlespeech/t2s/models/new_tacotron2/tacotron2.py +++ b/paddlespeech/t2s/models/new_tacotron2/tacotron2.py @@ -479,7 +479,7 @@ class Tacotron2(nn.Layer): elif self.spk_embed_integration_type == "concat": # concat hidden states with spk embeds spk_emb = F.normalize(spk_emb).unsqueeze(1).expand( - -1, paddle.shape(hs)[1], -1) + shape=[-1, paddle.shape(hs)[1], -1]) hs = paddle.concat([hs, spk_emb], axis=-1) else: raise NotImplementedError("support only add or concat.") diff --git a/paddlespeech/t2s/models/tacotron2.py b/paddlespeech/t2s/models/tacotron2.py deleted file mode 100644 index 01ea4f7d..00000000 --- a/paddlespeech/t2s/models/tacotron2.py +++ /dev/null @@ -1,1074 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import math - -import paddle -from paddle import nn -from paddle.fluid.layers import sequence_mask -from paddle.nn import functional as F -from paddle.nn import initializer as I -from tqdm import trange - -from paddlespeech.t2s.modules.conv import Conv1dBatchNorm -from paddlespeech.t2s.modules.losses import guided_attention_loss -from paddlespeech.t2s.utils import checkpoint - -__all__ = ["Tacotron2", "Tacotron2Loss"] - - -class LocationSensitiveAttention(nn.Layer): - """Location Sensitive Attention module. - - Reference: `Attention-Based Models for Speech Recognition `_ - - Parameters - ----------- - d_query: int - The feature size of query. - d_key : int - The feature size of key. - d_attention : int - The feature size of dimension. - location_filters : int - Filter size of attention convolution. - location_kernel_size : int - Kernel size of attention convolution. - """ - - def __init__(self, - d_query: int, - d_key: int, - d_attention: int, - location_filters: int, - location_kernel_size: int): - super().__init__() - - self.query_layer = nn.Linear(d_query, d_attention, bias_attr=False) - self.key_layer = nn.Linear(d_key, d_attention, bias_attr=False) - self.value = nn.Linear(d_attention, 1, bias_attr=False) - - # Location Layer - self.location_conv = nn.Conv1D( - 2, - location_filters, - kernel_size=location_kernel_size, - padding=int((location_kernel_size - 1) / 2), - bias_attr=False, - data_format='NLC') - self.location_layer = nn.Linear( - location_filters, d_attention, bias_attr=False) - - def forward(self, - query, - processed_key, - value, - attention_weights_cat, - mask=None): - """Compute context vector and attention weights. - - Parameters - ----------- - query : Tensor [shape=(batch_size, d_query)] - The queries. - processed_key : Tensor [shape=(batch_size, time_steps_k, d_attention)] - The keys after linear layer. - value : Tensor [shape=(batch_size, time_steps_k, d_key)] - The values. - attention_weights_cat : Tensor [shape=(batch_size, time_step_k, 2)] - Attention weights concat. - mask : Tensor, optional - The mask. Shape should be (batch_size, times_steps_k, 1). - Defaults to None. - - Returns - ---------- - attention_context : Tensor [shape=(batch_size, d_attention)] - The context vector. - attention_weights : Tensor [shape=(batch_size, time_steps_k)] - The attention weights. - """ - - processed_query = self.query_layer(paddle.unsqueeze(query, axis=[1])) - processed_attention_weights = self.location_layer( - self.location_conv(attention_weights_cat)) - # (B, T_enc, 1) - alignment = self.value( - paddle.tanh(processed_attention_weights + processed_key + - processed_query)) - - if mask is not None: - alignment = alignment + (1.0 - mask) * -1e9 - - attention_weights = F.softmax(alignment, axis=1) - attention_context = paddle.matmul( - attention_weights, value, transpose_x=True) - - attention_weights = paddle.squeeze(attention_weights, axis=-1) - attention_context = paddle.squeeze(attention_context, axis=1) - - return attention_context, attention_weights - - -class DecoderPreNet(nn.Layer): - """Decoder prenet module for Tacotron2. - - Parameters - ---------- - d_input: int - The input feature size. - - d_hidden: int - The hidden size. - - d_output: int - The output feature size. - - dropout_rate: float - The droput probability. - - """ - - def __init__(self, - d_input: int, - d_hidden: int, - d_output: int, - dropout_rate: float): - super().__init__() - - self.dropout_rate = dropout_rate - self.linear1 = nn.Linear(d_input, d_hidden, bias_attr=False) - self.linear2 = nn.Linear(d_hidden, d_output, bias_attr=False) - - def forward(self, x): - """Calculate forward propagation. - - Parameters - ---------- - x: Tensor [shape=(B, T_mel, C)] - Batch of the sequences of padded mel spectrogram. - - Returns - ------- - output: Tensor [shape=(B, T_mel, C)] - Batch of the sequences of padded hidden state. - - """ - - x = F.dropout(F.relu(self.linear1(x)), self.dropout_rate, training=True) - output = F.dropout( - F.relu(self.linear2(x)), self.dropout_rate, training=True) - return output - - -class DecoderPostNet(nn.Layer): - """Decoder postnet module for Tacotron2. - - Parameters - ---------- - d_mels: int - The number of mel bands. - - d_hidden: int - The hidden size of postnet. - - kernel_size: int - The kernel size of the conv layer in postnet. - - num_layers: int - The number of conv layers in postnet. - - dropout: float - The droput probability. - - """ - - def __init__(self, - d_mels: int, - d_hidden: int, - kernel_size: int, - num_layers: int, - dropout: float): - super().__init__() - self.dropout = dropout - self.num_layers = num_layers - - padding = int((kernel_size - 1) / 2) - - self.conv_batchnorms = nn.LayerList() - k = math.sqrt(1.0 / (d_mels * kernel_size)) - self.conv_batchnorms.append( - Conv1dBatchNorm( - d_mels, - d_hidden, - kernel_size=kernel_size, - padding=padding, - bias_attr=I.Uniform(-k, k), - data_format='NLC')) - - k = math.sqrt(1.0 / (d_hidden * kernel_size)) - self.conv_batchnorms.extend([ - Conv1dBatchNorm( - d_hidden, - d_hidden, - kernel_size=kernel_size, - padding=padding, - bias_attr=I.Uniform(-k, k), - data_format='NLC') for i in range(1, num_layers - 1) - ]) - - self.conv_batchnorms.append( - Conv1dBatchNorm( - d_hidden, - d_mels, - kernel_size=kernel_size, - padding=padding, - bias_attr=I.Uniform(-k, k), - data_format='NLC')) - - def forward(self, x): - """Calculate forward propagation. - - Parameters - ---------- - x: Tensor [shape=(B, T_mel, C)] - Output sequence of features from decoder. - - Returns - ------- - output: Tensor [shape=(B, T_mel, C)] - Output sequence of features after postnet. - - """ - - for i in range(len(self.conv_batchnorms) - 1): - x = F.dropout( - F.tanh(self.conv_batchnorms[i](x)), - self.dropout, - training=self.training) - output = F.dropout( - self.conv_batchnorms[self.num_layers - 1](x), - self.dropout, - training=self.training) - return output - - -class Tacotron2Encoder(nn.Layer): - """Tacotron2 encoder module for Tacotron2. - - Parameters - ---------- - d_hidden: int - The hidden size in encoder module. - - conv_layers: int - The number of conv layers. - - kernel_size: int - The kernel size of conv layers. - - p_dropout: float - The droput probability. - """ - - def __init__(self, - d_hidden: int, - conv_layers: int, - kernel_size: int, - p_dropout: float): - super().__init__() - - k = math.sqrt(1.0 / (d_hidden * kernel_size)) - self.conv_batchnorms = nn.LayerList([ - Conv1dBatchNorm( - d_hidden, - d_hidden, - kernel_size, - stride=1, - padding=int((kernel_size - 1) / 2), - bias_attr=I.Uniform(-k, k), - data_format='NLC') for i in range(conv_layers) - ]) - self.p_dropout = p_dropout - - self.hidden_size = int(d_hidden / 2) - self.lstm = nn.LSTM( - d_hidden, self.hidden_size, direction="bidirectional") - - def forward(self, x, input_lens=None): - """Calculate forward propagation of tacotron2 encoder. - - Parameters - ---------- - x: Tensor [shape=(B, T, C)] - Input embeddings. - - text_lens: Tensor [shape=(B,)], optional - Batch of lengths of each text input batch. Defaults to None. - - Returns - ------- - output : Tensor [shape=(B, T, C)] - Batch of the sequences of padded hidden states. - - """ - for conv_batchnorm in self.conv_batchnorms: - x = F.dropout( - F.relu(conv_batchnorm(x)), - self.p_dropout, - training=self.training) - - output, _ = self.lstm(inputs=x, sequence_length=input_lens) - return output - - -class Tacotron2Decoder(nn.Layer): - """Tacotron2 decoder module for Tacotron2. - - Parameters - ---------- - d_mels: int - The number of mel bands. - - reduction_factor: int - The reduction factor of tacotron. - - d_encoder: int - The hidden size of encoder. - - d_prenet: int - The hidden size in decoder prenet. - - d_attention_rnn: int - The attention rnn layer hidden size. - - d_decoder_rnn: int - The decoder rnn layer hidden size. - - d_attention: int - The hidden size of the linear layer in location sensitive attention. - - attention_filters: int - The filter size of the conv layer in location sensitive attention. - - attention_kernel_size: int - The kernel size of the conv layer in location sensitive attention. - - p_prenet_dropout: float - The droput probability in decoder prenet. - - p_attention_dropout: float - The droput probability in location sensitive attention. - - p_decoder_dropout: float - The droput probability in decoder. - - use_stop_token: bool - Whether to use a binary classifier for stop token prediction. - Defaults to False - """ - - def __init__(self, - d_mels: int, - reduction_factor: int, - d_encoder: int, - d_prenet: int, - d_attention_rnn: int, - d_decoder_rnn: int, - d_attention: int, - attention_filters: int, - attention_kernel_size: int, - p_prenet_dropout: float, - p_attention_dropout: float, - p_decoder_dropout: float, - use_stop_token: bool=False): - super().__init__() - self.d_mels = d_mels - self.reduction_factor = reduction_factor - self.d_encoder = d_encoder - self.d_attention_rnn = d_attention_rnn - self.d_decoder_rnn = d_decoder_rnn - self.p_attention_dropout = p_attention_dropout - self.p_decoder_dropout = p_decoder_dropout - - self.prenet = DecoderPreNet( - d_mels * reduction_factor, - d_prenet, - d_prenet, - dropout_rate=p_prenet_dropout) - - # attention_rnn takes attention's context vector has an - # auxiliary input - self.attention_rnn = nn.LSTMCell(d_prenet + d_encoder, d_attention_rnn) - - self.attention_layer = LocationSensitiveAttention( - d_attention_rnn, d_encoder, d_attention, attention_filters, - attention_kernel_size) - - # decoder_rnn takes prenet's output and attention_rnn's input - # as input - self.decoder_rnn = nn.LSTMCell(d_attention_rnn + d_encoder, - d_decoder_rnn) - self.linear_projection = nn.Linear(d_decoder_rnn + d_encoder, - d_mels * reduction_factor) - - self.use_stop_token = use_stop_token - if use_stop_token: - self.stop_layer = nn.Linear(d_decoder_rnn + d_encoder, 1) - - # states - temporary attributes - self.attention_hidden = None - self.attention_cell = None - - self.decoder_hidden = None - self.decoder_cell = None - - self.attention_weights = None - self.attention_weights_cum = None - self.attention_context = None - - self.key = None - self.mask = None - self.processed_key = None - - def _initialize_decoder_states(self, key): - """init states be used in decoder - """ - batch_size, encoder_steps, _ = key.shape - - self.attention_hidden = paddle.zeros( - shape=[batch_size, self.d_attention_rnn], dtype=key.dtype) - self.attention_cell = paddle.zeros( - shape=[batch_size, self.d_attention_rnn], dtype=key.dtype) - - self.decoder_hidden = paddle.zeros( - shape=[batch_size, self.d_decoder_rnn], dtype=key.dtype) - self.decoder_cell = paddle.zeros( - shape=[batch_size, self.d_decoder_rnn], dtype=key.dtype) - - self.attention_weights = paddle.zeros( - shape=[batch_size, encoder_steps], dtype=key.dtype) - self.attention_weights_cum = paddle.zeros( - shape=[batch_size, encoder_steps], dtype=key.dtype) - self.attention_context = paddle.zeros( - shape=[batch_size, self.d_encoder], dtype=key.dtype) - - self.key = key # [B, T, C] - # pre-compute projected keys to improve efficiency - self.processed_key = self.attention_layer.key_layer(key) # [B, T, C] - - def _decode(self, query): - """decode one time step - """ - cell_input = paddle.concat([query, self.attention_context], axis=-1) - - # The first lstm layer (or spec encoder lstm) - _, (self.attention_hidden, self.attention_cell) = self.attention_rnn( - cell_input, (self.attention_hidden, self.attention_cell)) - self.attention_hidden = F.dropout( - self.attention_hidden, - self.p_attention_dropout, - training=self.training) - - # Loaction sensitive attention - attention_weights_cat = paddle.stack( - [self.attention_weights, self.attention_weights_cum], axis=-1) - self.attention_context, self.attention_weights = self.attention_layer( - self.attention_hidden, self.processed_key, self.key, - attention_weights_cat, self.mask) - self.attention_weights_cum += self.attention_weights - - # The second lstm layer (or spec decoder lstm) - decoder_input = paddle.concat( - [self.attention_hidden, self.attention_context], axis=-1) - _, (self.decoder_hidden, self.decoder_cell) = self.decoder_rnn( - decoder_input, (self.decoder_hidden, self.decoder_cell)) - self.decoder_hidden = F.dropout( - self.decoder_hidden, - p=self.p_decoder_dropout, - training=self.training) - - # decode output one step - decoder_hidden_attention_context = paddle.concat( - [self.decoder_hidden, self.attention_context], axis=-1) - decoder_output = self.linear_projection( - decoder_hidden_attention_context) - if self.use_stop_token: - stop_logit = self.stop_layer(decoder_hidden_attention_context) - return decoder_output, self.attention_weights, stop_logit - return decoder_output, self.attention_weights - - def forward(self, keys, querys, mask): - """Calculate forward propagation of tacotron2 decoder. - - Parameters - ---------- - keys: Tensor[shape=(B, T_key, C)] - Batch of the sequences of padded output from encoder. - - querys: Tensor[shape(B, T_query, C)] - Batch of the sequences of padded mel spectrogram. - - mask: Tensor - Mask generated with text length. Shape should be (B, T_key, 1). - - Returns - ------- - mel_output: Tensor [shape=(B, T_query, C)] - Output sequence of features. - - alignments: Tensor [shape=(B, T_query, T_key)] - Attention weights. - """ - self._initialize_decoder_states(keys) - self.mask = mask - - querys = paddle.reshape( - querys, - [querys.shape[0], querys.shape[1] // self.reduction_factor, -1]) - start_step = paddle.zeros( - shape=[querys.shape[0], 1, querys.shape[-1]], dtype=querys.dtype) - querys = paddle.concat([start_step, querys], axis=1) - - querys = self.prenet(querys) - - mel_outputs, alignments = [], [] - stop_logits = [] - # Ignore the last time step - while len(mel_outputs) < querys.shape[1] - 1: - query = querys[:, len(mel_outputs), :] - if self.use_stop_token: - mel_output, attention_weights, stop_logit = self._decode(query) - else: - mel_output, attention_weights = self._decode(query) - mel_outputs.append(mel_output) - alignments.append(attention_weights) - if self.use_stop_token: - stop_logits.append(stop_logit) - - alignments = paddle.stack(alignments, axis=1) - mel_outputs = paddle.stack(mel_outputs, axis=1) - if self.use_stop_token: - stop_logits = paddle.concat(stop_logits, axis=1) - return mel_outputs, alignments, stop_logits - return mel_outputs, alignments - - def infer(self, key, max_decoder_steps=1000): - """Calculate forward propagation of tacotron2 decoder. - - Parameters - ---------- - keys: Tensor [shape=(B, T_key, C)] - Batch of the sequences of padded output from encoder. - - max_decoder_steps: int, optional - Number of max step when synthesize. Defaults to 1000. - - Returns - ------- - mel_output: Tensor [shape=(B, T_mel, C)] - Output sequence of features. - - alignments: Tensor [shape=(B, T_mel, T_key)] - Attention weights. - - """ - self._initialize_decoder_states(key) - self.mask = None # mask is not needed for single instance inference - encoder_steps = key.shape[1] - - # [B, C] - start_step = paddle.zeros( - shape=[key.shape[0], self.d_mels * self.reduction_factor], - dtype=key.dtype) - query = start_step # [B, C] - first_hit_end = None - - mel_outputs, alignments = [], [] - stop_logits = [] - for i in trange(max_decoder_steps): - query = self.prenet(query) - if self.use_stop_token: - mel_output, alignment, stop_logit = self._decode(query) - else: - mel_output, alignment = self._decode(query) - - mel_outputs.append(mel_output) - alignments.append(alignment) # (B=1, T) - if self.use_stop_token: - stop_logits.append(stop_logit) - - if self.use_stop_token: - if F.sigmoid(stop_logit) > 0.5: - print("hit stop condition!") - break - else: - if int(paddle.argmax(alignment[0])) == encoder_steps - 1: - if first_hit_end is None: - first_hit_end = i - elif i > (first_hit_end + 20): - print("content exhausted!") - break - if len(mel_outputs) == max_decoder_steps: - print("Warning! Reached max decoder steps!!!") - break - - query = mel_output - - alignments = paddle.stack(alignments, axis=1) - mel_outputs = paddle.stack(mel_outputs, axis=1) - if self.use_stop_token: - stop_logits = paddle.concat(stop_logits, axis=1) - return mel_outputs, alignments, stop_logits - return mel_outputs, alignments - - -class Tacotron2(nn.Layer): - """Tacotron2 model for end-to-end text-to-speech (E2E-TTS). - - This is a model of Spectrogram prediction network in Tacotron2 described - in `Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram - Predictions `_, - which converts the sequence of characters - into the sequence of mel spectrogram. - - Parameters - ---------- - vocab_size : int - Vocabulary size of phons of the model. - - n_tones: int - Vocabulary size of tones of the model. Defaults to None. If provided, - the model has an extra tone embedding. - - d_mels: int - Number of mel bands. - - d_encoder: int - Hidden size in encoder module. - - encoder_conv_layers: int - Number of conv layers in encoder. - - encoder_kernel_size: int - Kernel size of conv layers in encoder. - - d_prenet: int - Hidden size in decoder prenet. - - d_attention_rnn: int - Attention rnn layer hidden size in decoder. - - d_decoder_rnn: int - Decoder rnn layer hidden size in decoder. - - attention_filters: int - Filter size of the conv layer in location sensitive attention. - - attention_kernel_size: int - Kernel size of the conv layer in location sensitive attention. - - d_attention: int - Hidden size of the linear layer in location sensitive attention. - - d_postnet: int - Hidden size of postnet. - - postnet_kernel_size: int - Kernel size of the conv layer in postnet. - - postnet_conv_layers: int - Number of conv layers in postnet. - - reduction_factor: int - Reduction factor of tacotron2. - - p_encoder_dropout: float - Droput probability in encoder. - - p_prenet_dropout: float - Droput probability in decoder prenet. - - p_attention_dropout: float - Droput probability in location sensitive attention. - - p_decoder_dropout: float - Droput probability in decoder. - - p_postnet_dropout: float - Droput probability in postnet. - - d_global_condition: int - Feature size of global condition. Defaults to None. If provided, The - model assumes a global condition that is concatenated to the encoder - outputs. - - """ - - def __init__(self, - vocab_size, - n_tones=None, - d_mels: int=80, - d_encoder: int=512, - encoder_conv_layers: int=3, - encoder_kernel_size: int=5, - d_prenet: int=256, - d_attention_rnn: int=1024, - d_decoder_rnn: int=1024, - attention_filters: int=32, - attention_kernel_size: int=31, - d_attention: int=128, - d_postnet: int=512, - postnet_kernel_size: int=5, - postnet_conv_layers: int=5, - reduction_factor: int=1, - p_encoder_dropout: float=0.5, - p_prenet_dropout: float=0.5, - p_attention_dropout: float=0.1, - p_decoder_dropout: float=0.1, - p_postnet_dropout: float=0.5, - d_global_condition=None, - use_stop_token=False): - super().__init__() - - std = math.sqrt(2.0 / (vocab_size + d_encoder)) - val = math.sqrt(3.0) * std # uniform bounds for std - self.embedding = nn.Embedding( - vocab_size, d_encoder, weight_attr=I.Uniform(-val, val)) - if n_tones: - self.embedding_tones = nn.Embedding( - n_tones, - d_encoder, - padding_idx=0, - weight_attr=I.Uniform(-0.1 * val, 0.1 * val)) - self.toned = n_tones is not None - - self.encoder = Tacotron2Encoder(d_encoder, encoder_conv_layers, - encoder_kernel_size, p_encoder_dropout) - - # input augmentation scheme: concat global condition to the encoder output - if d_global_condition is not None: - d_encoder += d_global_condition - self.decoder = Tacotron2Decoder( - d_mels, - reduction_factor, - d_encoder, - d_prenet, - d_attention_rnn, - d_decoder_rnn, - d_attention, - attention_filters, - attention_kernel_size, - p_prenet_dropout, - p_attention_dropout, - p_decoder_dropout, - use_stop_token=use_stop_token) - self.postnet = DecoderPostNet( - d_mels=d_mels * reduction_factor, - d_hidden=d_postnet, - kernel_size=postnet_kernel_size, - num_layers=postnet_conv_layers, - dropout=p_postnet_dropout) - - def forward(self, - text_inputs, - text_lens, - mels, - output_lens=None, - tones=None, - global_condition=None): - """Calculate forward propagation of tacotron2. - - Parameters - ---------- - text_inputs: Tensor [shape=(B, T_text)] - Batch of the sequencees of padded character ids. - - text_lens: Tensor [shape=(B,)] - Batch of lengths of each text input batch. - - mels: Tensor [shape(B, T_mel, C)] - Batch of the sequences of padded mel spectrogram. - - output_lens: Tensor [shape=(B,)], optional - Batch of lengths of each mels batch. Defaults to None. - - tones: Tensor [shape=(B, T_text)] - Batch of sequences of padded tone ids. - - global_condition: Tensor [shape(B, C)] - Batch of global conditions. Defaults to None. If the - `d_global_condition` of the model is not None, this input should be - provided. - - use_stop_token: bool - Whether to include a binary classifier to predict the stop token. - Defaults to False. - - Returns - ------- - outputs : Dict[str, Tensor] - - mel_output: output sequence of features (B, T_mel, C); - - mel_outputs_postnet: output sequence of features after postnet (B, T_mel, C); - - alignments: attention weights (B, T_mel, T_text); - - stop_logits: output sequence of stop logits (B, T_mel) - """ - # input of embedding must be int64 - text_inputs = paddle.cast(text_inputs, 'int64') - embedded_inputs = self.embedding(text_inputs) - if self.toned: - embedded_inputs += self.embedding_tones(tones) - - encoder_outputs = self.encoder(embedded_inputs, text_lens) - - if global_condition is not None: - global_condition = global_condition.unsqueeze(1) - global_condition = paddle.expand(global_condition, - [-1, encoder_outputs.shape[1], -1]) - encoder_outputs = paddle.concat([encoder_outputs, global_condition], - -1) - - # [B, T_enc, 1] - mask = sequence_mask( - text_lens, dtype=encoder_outputs.dtype).unsqueeze(-1) - if self.decoder.use_stop_token: - mel_outputs, alignments, stop_logits = self.decoder( - encoder_outputs, mels, mask=mask) - else: - mel_outputs, alignments = self.decoder( - encoder_outputs, mels, mask=mask) - mel_outputs_postnet = self.postnet(mel_outputs) - mel_outputs_postnet = mel_outputs + mel_outputs_postnet - - if output_lens is not None: - # [B, T_dec, 1] - mask = sequence_mask(output_lens).unsqueeze(-1) - mel_outputs = mel_outputs * mask # [B, T, C] - mel_outputs_postnet = mel_outputs_postnet * mask # [B, T, C] - outputs = { - "mel_output": mel_outputs, - "mel_outputs_postnet": mel_outputs_postnet, - "alignments": alignments - } - if self.decoder.use_stop_token: - outputs["stop_logits"] = stop_logits - - return outputs - - @paddle.no_grad() - def infer(self, - text_inputs, - max_decoder_steps=1000, - tones=None, - global_condition=None): - """Generate the mel sepctrogram of features given the sequences of character ids. - - Parameters - ---------- - text_inputs: Tensor [shape=(B, T_text)] - Batch of the sequencees of padded character ids. - - max_decoder_steps: int, optional - Number of max step when synthesize. Defaults to 1000. - - Returns - ------- - outputs : Dict[str, Tensor] - - mel_output: output sequence of sepctrogram (B, T_mel, C); - - mel_outputs_postnet: output sequence of sepctrogram after postnet (B, T_mel, C); - - stop_logits: output sequence of stop logits (B, T_mel); - - alignments: attention weights (B, T_mel, T_text). This key is only - present when `use_stop_token` is True. - """ - # input of embedding must be int64 - text_inputs = paddle.cast(text_inputs, 'int64') - embedded_inputs = self.embedding(text_inputs) - if self.toned: - embedded_inputs += self.embedding_tones(tones) - encoder_outputs = self.encoder(embedded_inputs) - - if global_condition is not None: - global_condition = global_condition.unsqueeze(1) - global_condition = paddle.expand(global_condition, - [-1, encoder_outputs.shape[1], -1]) - encoder_outputs = paddle.concat([encoder_outputs, global_condition], - -1) - if self.decoder.use_stop_token: - mel_outputs, alignments, stop_logits = self.decoder.infer( - encoder_outputs, max_decoder_steps=max_decoder_steps) - else: - mel_outputs, alignments = self.decoder.infer( - encoder_outputs, max_decoder_steps=max_decoder_steps) - - mel_outputs_postnet = self.postnet(mel_outputs) - mel_outputs_postnet = mel_outputs + mel_outputs_postnet - - outputs = { - "mel_output": mel_outputs, - "mel_outputs_postnet": mel_outputs_postnet, - "alignments": alignments - } - if self.decoder.use_stop_token: - outputs["stop_logits"] = stop_logits - - return outputs - - @classmethod - def from_pretrained(cls, config, checkpoint_path): - """Build a Tacotron2 model from a pretrained model. - - Parameters - ---------- - config: yacs.config.CfgNode - model configs - - checkpoint_path: Path or str - the path of pretrained model checkpoint, without extension name - - Returns - ------- - ConditionalWaveFlow - The model built from pretrained result. - """ - model = cls(vocab_size=config.model.vocab_size, - n_tones=config.model.n_tones, - d_mels=config.data.n_mels, - d_encoder=config.model.d_encoder, - encoder_conv_layers=config.model.encoder_conv_layers, - encoder_kernel_size=config.model.encoder_kernel_size, - d_prenet=config.model.d_prenet, - d_attention_rnn=config.model.d_attention_rnn, - d_decoder_rnn=config.model.d_decoder_rnn, - attention_filters=config.model.attention_filters, - attention_kernel_size=config.model.attention_kernel_size, - d_attention=config.model.d_attention, - d_postnet=config.model.d_postnet, - postnet_kernel_size=config.model.postnet_kernel_size, - postnet_conv_layers=config.model.postnet_conv_layers, - reduction_factor=config.model.reduction_factor, - p_encoder_dropout=config.model.p_encoder_dropout, - p_prenet_dropout=config.model.p_prenet_dropout, - p_attention_dropout=config.model.p_attention_dropout, - p_decoder_dropout=config.model.p_decoder_dropout, - p_postnet_dropout=config.model.p_postnet_dropout, - d_global_condition=config.model.d_global_condition, - use_stop_token=config.model.use_stop_token) - checkpoint.load_parameters(model, checkpoint_path=checkpoint_path) - return model - - -class Tacotron2Loss(nn.Layer): - """ Tacotron2 Loss module - """ - - def __init__(self, - use_stop_token_loss=True, - use_guided_attention_loss=False, - sigma=0.2): - """Tacotron 2 Criterion. - - Args: - use_stop_token_loss (bool, optional): Whether to use a loss for stop token prediction. Defaults to True. - use_guided_attention_loss (bool, optional): Whether to use a loss for attention weights. Defaults to False. - sigma (float, optional): Hyper-parameter sigma for guided attention loss. Defaults to 0.2. - """ - super().__init__() - self.spec_criterion = nn.MSELoss() - self.use_stop_token_loss = use_stop_token_loss - self.use_guided_attention_loss = use_guided_attention_loss - self.attn_criterion = guided_attention_loss - self.stop_criterion = nn.BCEWithLogitsLoss() - self.sigma = sigma - - def forward(self, - mel_outputs, - mel_outputs_postnet, - mel_targets, - attention_weights=None, - slens=None, - plens=None, - stop_logits=None): - """Calculate tacotron2 loss. - - Parameters - ---------- - mel_outputs: Tensor [shape=(B, T_mel, C)] - Output mel spectrogram sequence. - - mel_outputs_postnet: Tensor [shape(B, T_mel, C)] - Output mel spectrogram sequence after postnet. - - mel_targets: Tensor [shape=(B, T_mel, C)] - Target mel spectrogram sequence. - - attention_weights: Tensor [shape=(B, T_mel, T_enc)] - Attention weights. This should be provided when - `use_guided_attention_loss` is True. - - slens: Tensor [shape=(B,)] - Number of frames of mel spectrograms. This should be provided when - `use_guided_attention_loss` is True. - - plens: Tensor [shape=(B, )] - Number of text or phone ids of each utterance. This should be - provided when `use_guided_attention_loss` is True. - - stop_logits: Tensor [shape=(B, T_mel)] - Stop logits of each mel spectrogram frame. This should be provided - when `use_stop_token_loss` is True. - - Returns - ------- - losses : Dict[str, Tensor] - - loss: the sum of the other three losses; - - mel_loss: MSE loss compute by mel_targets and mel_outputs; - - post_mel_loss: MSE loss compute by mel_targets and mel_outputs_postnet; - - guided_attn_loss: Guided attention loss for attention weights; - - stop_loss: Binary cross entropy loss for stop token prediction. - """ - mel_loss = self.spec_criterion(mel_outputs, mel_targets) - post_mel_loss = self.spec_criterion(mel_outputs_postnet, mel_targets) - total_loss = mel_loss + post_mel_loss - if self.use_guided_attention_loss: - gal_loss = self.attn_criterion(attention_weights, slens, plens, - self.sigma) - total_loss += gal_loss - if self.use_stop_token_loss: - T_dec = mel_targets.shape[1] - stop_labels = F.one_hot(slens - 1, num_classes=T_dec) - stop_token_loss = self.stop_criterion(stop_logits, stop_labels) - total_loss += stop_token_loss - - losses = { - "loss": total_loss, - "mel_loss": mel_loss, - "post_mel_loss": post_mel_loss - } - if self.use_guided_attention_loss: - losses["guided_attn_loss"] = gal_loss - if self.use_stop_token_loss: - losses["stop_loss"] = stop_token_loss - return losses diff --git a/paddlespeech/t2s/utils/__init__.py b/paddlespeech/t2s/utils/__init__.py index ce3a4ef6..520c81a2 100644 --- a/paddlespeech/t2s/utils/__init__.py +++ b/paddlespeech/t2s/utils/__init__.py @@ -16,3 +16,7 @@ from . import display from . import layer_tools from . import mp_tools from . import scheduler + + +def str2bool(str): + return True if str.lower() == 'true' else False diff --git a/paddlespeech/vector/exps/ge2e/speaker_verification_dataset.py b/paddlespeech/vector/exps/ge2e/speaker_verification_dataset.py index 194eb7f2..ae6f6ad9 100644 --- a/paddlespeech/vector/exps/ge2e/speaker_verification_dataset.py +++ b/paddlespeech/vector/exps/ge2e/speaker_verification_dataset.py @@ -123,9 +123,3 @@ class Collate(object): frame_clips = [self.random_crop(mel) for mel in examples] batced_clips = np.stack(frame_clips) return batced_clips - - -if __name__ == "__main__": - mydataset = MultiSpeakerMelDataset( - Path("/home/chenfeiyu/datasets/SV2TTS/encoder")) - print(mydataset.get_example_by_index(0, 10)) diff --git a/utils/compute_statistics.py b/utils/compute_statistics.py index e8021c19..5b2a5606 100755 --- a/utils/compute_statistics.py +++ b/utils/compute_statistics.py @@ -22,6 +22,7 @@ from sklearn.preprocessing import StandardScaler from tqdm import tqdm from paddlespeech.t2s.datasets.data_table import DataTable +from paddlespeech.t2s.utils import str2bool def main(): @@ -41,9 +42,6 @@ def main(): help="path to save statistics. if not provided, " "stats will be saved in the above root directory with name stats.npy") - def str2bool(str): - return True if str.lower() == 'true' else False - parser.add_argument( "--use-relative-path", type=str2bool, From 0747600c955636bd9a14000d0eadc2be9b47667b Mon Sep 17 00:00:00 2001 From: TianYuan Date: Tue, 8 Feb 2022 20:20:38 +0800 Subject: [PATCH 20/22] [TTS]add ljspeech new tacotron2 (#1416) * add ljspeech new tacotron2, test=tts * update ljspeech waveflow's synthesize * add config, test=doc Co-authored-by: Hui Zhang --- examples/ljspeech/tts0/README.md | 89 ---------------------- examples/ljspeech/tts0/conf/default.yaml | 87 +++++++++++++++++++++ examples/ljspeech/tts0/local/preprocess.sh | 64 ++++++++++++++-- examples/ljspeech/tts0/local/synthesize.sh | 25 ++++-- examples/ljspeech/tts0/local/train.sh | 11 ++- examples/ljspeech/tts0/path.sh | 2 +- examples/ljspeech/tts0/run.sh | 20 +++-- examples/ljspeech/voc0/run.sh | 4 +- paddlespeech/t2s/exps/synthesize.py | 3 +- paddlespeech/t2s/exps/synthesize_e2e.py | 2 +- 10 files changed, 189 insertions(+), 118 deletions(-) delete mode 100644 examples/ljspeech/tts0/README.md create mode 100644 examples/ljspeech/tts0/conf/default.yaml diff --git a/examples/ljspeech/tts0/README.md b/examples/ljspeech/tts0/README.md deleted file mode 100644 index baaec818..00000000 --- a/examples/ljspeech/tts0/README.md +++ /dev/null @@ -1,89 +0,0 @@ -# Tacotron2 with LJSpeech -PaddlePaddle dynamic graph implementation of Tacotron2, a neural network architecture for speech synthesis directly from the text. The implementation is based on [Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions](https://arxiv.org/abs/1712.05884). - -## Dataset -We experiment with the LJSpeech dataset. Download and unzip [LJSpeech](https://keithito.com/LJ-Speech-Dataset/). - -```bash -wget https://data.keithito.com/data/speech/LJSpeech-1.1.tar.bz2 -tar xjvf LJSpeech-1.1.tar.bz2 -``` -## Get Started -Assume the path to the dataset is `~/datasets/LJSpeech-1.1`. -Run the command below to -1. **source path**. -2. preprocess the dataset. -3. train the model. -4. synthesize mels. -```bash -./run.sh -``` -You can choose a range of stages you want to run, or set `stage` equal to `stop-stage` to use only one stage, for example, running the following command will only preprocess the dataset. -```bash -./run.sh --stage 0 --stop-stage 0 -``` -### Data Preprocessing -```bash -./local/preprocess.sh ${conf_path} -``` -### Model Training -`./local/train.sh` calls `${BIN_DIR}/train.py`. -```bash -CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} -``` -Here's the complete help message. -```text -usage: train.py [-h] [--config FILE] [--data DATA_DIR] [--output OUTPUT_DIR] - [--checkpoint_path CHECKPOINT_PATH] [--ngpu NGPU] [--opts ...] - -optional arguments: - -h, --help show this help message and exit - --config FILE path of the config file to overwrite to default config - with. - --data DATA_DIR path to the dataset. - --output OUTPUT_DIR path to save checkpoint and logs. - --checkpoint_path CHECKPOINT_PATH - path of the checkpoint to load - --ngpu NGPU if ngpu == 0, use cpu. - --opts ... options to overwrite --config file and the default - config, passing in KEY VALUE pairs -``` - -If you want to train on CPU, just set `--ngpu=0`. -If you want to train on multiple GPUs, just set `--ngpu` as the num of GPU. -By default, training will be resumed from the latest checkpoint in `--output`, if you want to start a new training, please use a new `${OUTPUTPATH}` with no checkpoint. -And if you want to resume from another existing model, you should set `checkpoint_path` to be the checkpoint path you want to load. -**Note: The checkpoint path cannot contain the file extension.** - -### Synthesizing -`./local/synthesize.sh` calls `${BIN_DIR}/synthesize.py`, which synthesize **mels** from text_list here. -```bash -CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${train_output_path} ${ckpt_name} -``` -```text -usage: synthesize.py [-h] [--config FILE] [--checkpoint_path CHECKPOINT_PATH] - [--input INPUT] [--output OUTPUT] [--ngpu NGPU] - [--opts ...] [-v] - -generate mel spectrogram with TransformerTTS. - -optional arguments: - -h, --help show this help message and exit - --config FILE extra config to overwrite the default config - --checkpoint_path CHECKPOINT_PATH - path of the checkpoint to load. - --input INPUT path of the text sentences - --output OUTPUT path to save outputs - --ngpu NGPU if ngpu == 0, use cpu. - --opts ... options to overwrite --config file and the default - config, passing in KEY VALUE pairs - -v, --verbose print msg -``` -**Ps.** You can use [waveflow](https://github.com/PaddlePaddle/PaddleSpeech/tree/develop/examples/ljspeech/voc0) as the neural vocoder to synthesize mels to wavs. (Please refer to `synthesize.sh` in our LJSpeech waveflow example) - -## Pretrained Models -Pretrained Models can be downloaded from the links below. We provide 2 models with different configurations. - -1. This model uses a binary classifier to predict the stop token. [tacotron2_ljspeech_ckpt_0.3.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/tacotron2/tacotron2_ljspeech_ckpt_0.3.zip) - -2. This model does not have a stop token predictor. It uses the attention peak position to decide whether all the contents have been uttered. Also, guided attention loss is used to speed up training. This model is trained with `configs/alternative.yaml`.[tacotron2_ljspeech_ckpt_0.3_alternative.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/tacotron2/tacotron2_ljspeech_ckpt_0.3_alternative.zip) diff --git a/examples/ljspeech/tts0/conf/default.yaml b/examples/ljspeech/tts0/conf/default.yaml new file mode 100644 index 00000000..d76ebd43 --- /dev/null +++ b/examples/ljspeech/tts0/conf/default.yaml @@ -0,0 +1,87 @@ +# This configuration is for Paddle to train Tacotron 2. Compared to the +# original paper, this configuration additionally use the guided attention +# loss to accelerate the learning of the diagonal attention. It requires +# only a single GPU with 12 GB memory and it takes ~1 days to finish the +# training on Titan V. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +fs: 22050 # Sampling rate. +n_fft: 1024 # FFT size (samples). +n_shift: 256 # Hop size (samples). 11.6ms +win_length: null # Window length (samples). + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +n_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. (Hz) +fmax: 7600 # Maximum frequency in mel basis calculation. (Hz) + +########################################################### +# DATA SETTING # +########################################################### +batch_size: 64 +num_workers: 2 + +########################################################### +# MODEL SETTING # +########################################################### +model: # keyword arguments for the selected model + embed_dim: 512 # char or phn embedding dimension + elayers: 1 # number of blstm layers in encoder + eunits: 512 # number of blstm units + econv_layers: 3 # number of convolutional layers in encoder + econv_chans: 512 # number of channels in convolutional layer + econv_filts: 5 # filter size of convolutional layer + atype: location # attention function type + adim: 512 # attention dimension + aconv_chans: 32 # number of channels in convolutional layer of attention + aconv_filts: 15 # filter size of convolutional layer of attention + cumulate_att_w: True # whether to cumulate attention weight + dlayers: 2 # number of lstm layers in decoder + dunits: 1024 # number of lstm units in decoder + prenet_layers: 2 # number of layers in prenet + prenet_units: 256 # number of units in prenet + postnet_layers: 5 # number of layers in postnet + postnet_chans: 512 # number of channels in postnet + postnet_filts: 5 # filter size of postnet layer + output_activation: null # activation function for the final output + use_batch_norm: True # whether to use batch normalization in encoder + use_concate: True # whether to concatenate encoder embedding with decoder outputs + use_residual: False # whether to use residual connection in encoder + dropout_rate: 0.5 # dropout rate + zoneout_rate: 0.1 # zoneout rate + reduction_factor: 1 # reduction factor + spk_embed_dim: null # speaker embedding dimension + + +########################################################### +# UPDATER SETTING # +########################################################### +updater: + use_masking: True # whether to apply masking for padded part in loss calculation + bce_pos_weight: 5.0 # weight of positive sample in binary cross entropy calculation + use_guided_attn_loss: True # whether to use guided attention loss + guided_attn_loss_sigma: 0.4 # sigma of guided attention loss + guided_attn_loss_lambda: 1.0 # strength of guided attention loss + + +########################################################## +# OPTIMIZER SETTING # +########################################################## +optimizer: + optim: adam # optimizer type + learning_rate: 1.0e-03 # learning rate + epsilon: 1.0e-06 # epsilon + weight_decay: 0.0 # weight decay coefficient + +########################################################### +# TRAINING SETTING # +########################################################### +max_epoch: 300 +num_snapshots: 5 + +########################################################### +# OTHER SETTING # +########################################################### +seed: 42 diff --git a/examples/ljspeech/tts0/local/preprocess.sh b/examples/ljspeech/tts0/local/preprocess.sh index c39a3172..e0e4bc7a 100755 --- a/examples/ljspeech/tts0/local/preprocess.sh +++ b/examples/ljspeech/tts0/local/preprocess.sh @@ -1,8 +1,62 @@ #!/bin/bash -preprocess_path=$1 +stage=0 +stop_stage=100 -python3 ${BIN_DIR}/preprocess.py \ - --input=~/datasets/LJSpeech-1.1 \ - --output=${preprocess_path} \ - -v \ \ No newline at end of file +config_path=$1 + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # get durations from MFA's result + echo "Generate durations.txt from MFA results ..." + python3 ${MAIN_ROOT}/utils/gen_duration_from_textgrid.py \ + --inputdir=./ljspeech_alignment \ + --output=durations.txt \ + --config=${config_path} +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # extract features + echo "Extract features ..." + python3 ${BIN_DIR}/preprocess.py \ + --dataset=ljspeech \ + --rootdir=~/datasets/LJSpeech-1.1/ \ + --dumpdir=dump \ + --dur-file=durations.txt \ + --config=${config_path} \ + --num-cpu=20 \ + --cut-sil=True +fi + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # get features' stats(mean and std) + echo "Get features' stats ..." + python3 ${MAIN_ROOT}/utils/compute_statistics.py \ + --metadata=dump/train/raw/metadata.jsonl \ + --field-name="speech" + +fi + +if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + # normalize and covert phone to id, dev and test should use train's stats + echo "Normalize ..." + python3 ${BIN_DIR}/normalize.py \ + --metadata=dump/train/raw/metadata.jsonl \ + --dumpdir=dump/train/norm \ + --speech-stats=dump/train/speech_stats.npy \ + --phones-dict=dump/phone_id_map.txt \ + --speaker-dict=dump/speaker_id_map.txt + + python3 ${BIN_DIR}/normalize.py \ + --metadata=dump/dev/raw/metadata.jsonl \ + --dumpdir=dump/dev/norm \ + --speech-stats=dump/train/speech_stats.npy \ + --phones-dict=dump/phone_id_map.txt \ + --speaker-dict=dump/speaker_id_map.txt + + python3 ${BIN_DIR}/normalize.py \ + --metadata=dump/test/raw/metadata.jsonl \ + --dumpdir=dump/test/norm \ + --speech-stats=dump/train/speech_stats.npy \ + --phones-dict=dump/phone_id_map.txt \ + --speaker-dict=dump/speaker_id_map.txt +fi diff --git a/examples/ljspeech/tts0/local/synthesize.sh b/examples/ljspeech/tts0/local/synthesize.sh index 3f5f9c06..0d005820 100755 --- a/examples/ljspeech/tts0/local/synthesize.sh +++ b/examples/ljspeech/tts0/local/synthesize.sh @@ -1,11 +1,20 @@ #!/bin/bash -train_output_path=$1 -ckpt_name=$2 +config_path=$1 +train_output_path=$2 +ckpt_name=$3 -python3 ${BIN_DIR}/synthesize.py \ - --config=${train_output_path}/config.yaml \ - --checkpoint_path=${train_output_path}/checkpoints/${ckpt_name} \ - --input=${BIN_DIR}/../sentences_en.txt \ - --output=${train_output_path}/test \ - --ngpu=1 +FLAGS_allocator_strategy=naive_best_fit \ +FLAGS_fraction_of_gpu_memory_to_use=0.01 \ +python3 ${BIN_DIR}/../synthesize.py \ + --am=tacotron2_ljspeech \ + --am_config=${config_path} \ + --am_ckpt=${train_output_path}/checkpoints/${ckpt_name} \ + --am_stat=dump/train/speech_stats.npy \ + --voc=pwgan_ljspeech \ + --voc_config=pwg_ljspeech_ckpt_0.5/pwg_default.yaml \ + --voc_ckpt=pwg_ljspeech_ckpt_0.5/pwg_snapshot_iter_400000.pdz \ + --voc_stat=pwg_ljspeech_ckpt_0.5/pwg_stats.npy \ + --test_metadata=dump/test/norm/metadata.jsonl \ + --output_dir=${train_output_path}/test \ + --phones_dict=dump/phone_id_map.txt diff --git a/examples/ljspeech/tts0/local/train.sh b/examples/ljspeech/tts0/local/train.sh index a94f955a..f90db915 100755 --- a/examples/ljspeech/tts0/local/train.sh +++ b/examples/ljspeech/tts0/local/train.sh @@ -1,9 +1,12 @@ #!/bin/bash -preprocess_path=$1 +config_path=$1 train_output_path=$2 python3 ${BIN_DIR}/train.py \ - --data=${preprocess_path} \ - --output=${train_output_path} \ - --ngpu=1 \ \ No newline at end of file + --train-metadata=dump/train/norm/metadata.jsonl \ + --dev-metadata=dump/dev/norm/metadata.jsonl \ + --config=${config_path} \ + --output-dir=${train_output_path} \ + --ngpu=1 \ + --phones-dict=dump/phone_id_map.txt \ No newline at end of file diff --git a/examples/ljspeech/tts0/path.sh b/examples/ljspeech/tts0/path.sh index a37cd21e..9cdbe256 100755 --- a/examples/ljspeech/tts0/path.sh +++ b/examples/ljspeech/tts0/path.sh @@ -9,5 +9,5 @@ export PYTHONDONTWRITEBYTECODE=1 export PYTHONIOENCODING=UTF-8 export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} -MODEL=tacotron2 +MODEL=new_tacotron2 export BIN_DIR=${MAIN_ROOT}/paddlespeech/t2s/exps/${MODEL} diff --git a/examples/ljspeech/tts0/run.sh b/examples/ljspeech/tts0/run.sh index 47c76c3d..c64fa888 100755 --- a/examples/ljspeech/tts0/run.sh +++ b/examples/ljspeech/tts0/run.sh @@ -3,13 +3,13 @@ set -e source path.sh -gpus=0 +gpus=0,1 stage=0 stop_stage=100 -preprocess_path=preprocessed_ljspeech -train_output_path=output -ckpt_name=step-35000 +conf_path=conf/default.yaml +train_output_path=exp/default +ckpt_name=snapshot_iter_201.pdz # with the following command, you can choose the stage range you want to run # such as `./run.sh --stage 0 --stop-stage 0` @@ -18,16 +18,20 @@ source ${MAIN_ROOT}/utils/parse_options.sh || exit 1 if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then # prepare data - ./local/preprocess.sh ${preprocess_path} || exit -1 + ./local/preprocess.sh ${conf_path} || exit -1 fi if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then # train model, all `ckpt` under `train_output_path/checkpoints/` dir - CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${preprocess_path} ${train_output_path} || exit -1 + CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} || exit -1 fi if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then - # train model, all `ckpt` under `train_output_path/checkpoints/` dir - CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${train_output_path} ${ckpt_name} || exit -1 + # synthesize, vocoder is pwgan + CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1 fi +if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + # synthesize_e2e, vocoder is pwgan + CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize_e2e.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1 +fi diff --git a/examples/ljspeech/voc0/run.sh b/examples/ljspeech/voc0/run.sh index ddd82cb4..b040c0b2 100755 --- a/examples/ljspeech/voc0/run.sh +++ b/examples/ljspeech/voc0/run.sh @@ -10,7 +10,7 @@ stop_stage=100 preprocess_path=preprocessed_ljspeech train_output_path=output # mel generated by Tacotron2 -input_mel_path=../tts0/output/test +input_mel_path=${preprocess_path}/mel_test ckpt_name=step-10000 # with the following command, you can choose the stage range you want to run @@ -28,5 +28,7 @@ if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then fi if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + mkdir -p ${preprocess_path}/mel_test + cp ${preprocess_path}/mel/LJ050-001*.npy ${preprocess_path}/mel_test/ CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${input_mel_path} ${train_output_path} ${ckpt_name} || exit -1 fi diff --git a/paddlespeech/t2s/exps/synthesize.py b/paddlespeech/t2s/exps/synthesize.py index e6cc630a..d6dd7af1 100644 --- a/paddlespeech/t2s/exps/synthesize.py +++ b/paddlespeech/t2s/exps/synthesize.py @@ -207,7 +207,8 @@ def main(): default='fastspeech2_csmsc', choices=[ 'speedyspeech_csmsc', 'fastspeech2_csmsc', 'fastspeech2_ljspeech', - 'fastspeech2_aishell3', 'fastspeech2_vctk', 'tacotron2_csmsc', 'tacotron2_aishell3' + 'fastspeech2_aishell3', 'fastspeech2_vctk', 'tacotron2_csmsc', + 'tacotron2_ljspeech', 'tacotron2_aishell3' ], help='Choose acoustic model type of tts task.') parser.add_argument( diff --git a/paddlespeech/t2s/exps/synthesize_e2e.py b/paddlespeech/t2s/exps/synthesize_e2e.py index 8fca935a..d615f4f5 100644 --- a/paddlespeech/t2s/exps/synthesize_e2e.py +++ b/paddlespeech/t2s/exps/synthesize_e2e.py @@ -285,7 +285,7 @@ def main(): choices=[ 'speedyspeech_csmsc', 'speedyspeech_aishell3', 'fastspeech2_csmsc', 'fastspeech2_ljspeech', 'fastspeech2_aishell3', 'fastspeech2_vctk', - 'tacotron2_csmsc' + 'tacotron2_csmsc', 'tacotron2_ljspeech' ], help='Choose acoustic model type of tts task.') parser.add_argument( From 1a2e2d28b0aedb5099aa89a703c4eeff88cc34a7 Mon Sep 17 00:00:00 2001 From: lizi <49679880@qq.com> Date: Tue, 8 Feb 2022 20:57:12 +0800 Subject: [PATCH 21/22] Modify typesetting, test=doc --- docs/source/tts/quick_start_cn.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/tts/quick_start_cn.md b/docs/source/tts/quick_start_cn.md index c14fccd5..39bf3d0a 100644 --- a/docs/source/tts/quick_start_cn.md +++ b/docs/source/tts/quick_start_cn.md @@ -43,7 +43,7 @@ PaddleSpeech 的 TTS 模型具有以下映射关系: ```bash bash run.sh ``` - 这只是一个演示,请确保源数据已经准备好,并且在下一个 `步骤` 之前每个 `步骤` 都运行正常. + 这只是一个演示,请确保源数据已经准备好,并且在下一个 `step` 之前每个 `step` 都运行正常. ### 用CSMSC数据集训练FastSpeech2 - 进入目录 @@ -62,7 +62,7 @@ PaddleSpeech 的 TTS 模型具有以下映射关系: ```bash bash run.sh ``` - 这只是一个演示,请确保源数据已经准备好,并且在下一个 `步骤` 之前每个 `步骤` 都运行正常。 + 这只是一个演示,请确保源数据已经准备好,并且在下一个 `step` 之前每个 `step` 都运行正常。 `run.sh` 中主要包括以下步骤: @@ -102,7 +102,7 @@ checkpoint_name - `*_stats.npy` 是特征的统计文件,如果它在训练前已被标准化。 - `phone_id_map.txt` 是音素到音素 ID 的映射关系。 - `tone_id_map.txt` 是在训练声学模型之前分割音调和拼音时,音调到音调 ID 的映射关系。(例如在 csmsc/speedyspeech 的示例中) -- `spk_id_map.txt` 是多发音人声学模型中`发音人`到`spk_ids`的映射关系。 +- `spk_id_map.txt` 是多发音人声学模型中 "发音人" 到 "spk_ids" 的映射关系。 下面的示例代码显示了如何使用模型进行预测。 ### Acoustic Models 声学模型(文本到频谱图) From ea29275acd46cf0b37187a78a97046a99ccfe617 Mon Sep 17 00:00:00 2001 From: TianYuan Date: Wed, 9 Feb 2022 06:39:25 +0000 Subject: [PATCH 22/22] fix dead links, test=doc --- docs/source/released_model.md | 8 ++++---- docs/source/tts/README.md | 4 ---- examples/thchs30/align0/README.md | 4 ++-- tests/benchmark/conformer/README.md | 2 +- 4 files changed, 7 insertions(+), 11 deletions(-) diff --git a/docs/source/released_model.md b/docs/source/released_model.md index 5d29968e..23309d8e 100644 --- a/docs/source/released_model.md +++ b/docs/source/released_model.md @@ -10,9 +10,9 @@ Acoustic Model | Training Data | Token-based | Size | Descriptions | CER | WER | [Conformer Offline Aishell ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/aishell/asr1/asr1_conformer_aishell_ckpt_0.1.1.model.tar.gz) | Aishell Dataset | Char-based | 284 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention rescoring | 0.056 |-| 151 h | [Conformer Offline Aishell ASR1](../../examples/aishell/asr1) [Transformer Aishell ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/aishell/asr1/asr1_transformer_aishell_ckpt_0.1.1.model.tar.gz) | Aishell Dataset | Char-based | 128 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: Attention rescoring | 0.0523 || 151 h | [Transformer Aishell ASR1](../../examples/aishell/asr1) [Ds2 Offline Librispeech ASR0 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr0/asr0_deepspeech2_librispeech_ckpt_0.1.1.model.tar.gz)| Librispeech Dataset | Char-based | 518 MB | 2 Conv + 3 bidirectional LSTM layers| - |0.0725| 960 h | [Ds2 Offline Librispeech ASR0](../../examples/librispeech/asr0) -[Conformer Librispeech ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr1/asr1_conformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 191 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention rescoring |-| 0.0337 | 960 h | [Conformer Librispeech ASR1](../../example/librispeech/asr1) -[Transformer Librispeech ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr1/asr1_transformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 131 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: Attention rescoring |-| 0.0381 | 960 h | [Transformer Librispeech ASR1](../../example/librispeech/asr1) -[Transformer Librispeech ASR2 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr2/asr2_transformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 131 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: JoinCTC w/ LM |-| 0.0240 | 960 h | [Transformer Librispeech ASR2](../../example/librispeech/asr2) +[Conformer Librispeech ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr1/asr1_conformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 191 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention rescoring |-| 0.0337 | 960 h | [Conformer Librispeech ASR1](../../examples/librispeech/asr1) +[Transformer Librispeech ASR1 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr1/asr1_transformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 131 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: Attention rescoring |-| 0.0381 | 960 h | [Transformer Librispeech ASR1](../../examples/librispeech/asr1) +[Transformer Librispeech ASR2 Model](https://paddlespeech.bj.bcebos.com/s2t/librispeech/asr2/asr2_transformer_librispeech_ckpt_0.1.1.model.tar.gz) | Librispeech Dataset | subword-based | 131 MB | Encoder:Transformer, Decoder:Transformer, Decoding method: JoinCTC w/ LM |-| 0.0240 | 960 h | [Transformer Librispeech ASR2](../../examples/librispeech/asr2) ### Language Model based on NGram Language Model | Training Data | Token-based | Size | Descriptions @@ -66,7 +66,7 @@ GE2E + FastSpeech2 | AISHELL-3 |[ge2e-fastspeech2-aishell3](https://github.com/ Model Type | Dataset| Example Link | Pretrained Models :-------------:| :------------:| :-----: | :-----: PANN | Audioset| [audioset_tagging_cnn](https://github.com/qiuqiangkong/audioset_tagging_cnn) | [panns_cnn6.pdparams](https://bj.bcebos.com/paddleaudio/models/panns_cnn6.pdparams), [panns_cnn10.pdparams](https://bj.bcebos.com/paddleaudio/models/panns_cnn10.pdparams), [panns_cnn14.pdparams](https://bj.bcebos.com/paddleaudio/models/panns_cnn14.pdparams) -PANN | ESC-50 |[pann-esc50]("./examples/esc50/cls0")|[esc50_cnn6.tar.gz](https://paddlespeech.bj.bcebos.com/cls/esc50/esc50_cnn6.tar.gz), [esc50_cnn10.tar.gz](https://paddlespeech.bj.bcebos.com/cls/esc50/esc50_cnn10.tar.gz), [esc50_cnn14.tar.gz](https://paddlespeech.bj.bcebos.com/cls/esc50/esc50_cnn14.tar.gz) +PANN | ESC-50 |[pann-esc50](../../examples/esc50/cls0)|[esc50_cnn6.tar.gz](https://paddlespeech.bj.bcebos.com/cls/esc50/esc50_cnn6.tar.gz), [esc50_cnn10.tar.gz](https://paddlespeech.bj.bcebos.com/cls/esc50/esc50_cnn10.tar.gz), [esc50_cnn14.tar.gz](https://paddlespeech.bj.bcebos.com/cls/esc50/esc50_cnn14.tar.gz) ## Punctuation Restoration Models Model Type | Dataset| Example Link | Pretrained Models diff --git a/docs/source/tts/README.md b/docs/source/tts/README.md index 3de8901b..835db08e 100644 --- a/docs/source/tts/README.md +++ b/docs/source/tts/README.md @@ -71,7 +71,3 @@ Check our [website](https://paddlespeech.readthedocs.io/en/latest/tts/demo.html) #### GE2E 1. [ge2e_ckpt_0.3.zip](https://paddlespeech.bj.bcebos.com/Parakeet/ge2e_ckpt_0.3.zip) - -## License - -Parakeet is provided under the [Apache-2.0 license](LICENSE). diff --git a/examples/thchs30/align0/README.md b/examples/thchs30/align0/README.md index da56fffc..5195ab80 100644 --- a/examples/thchs30/align0/README.md +++ b/examples/thchs30/align0/README.md @@ -27,7 +27,7 @@ cd a0 应用程序会自动下载 THCHS-30数据集,处理成 MFA 所需的文件格式并开始训练,您可以修改 `run.sh` 中的参数 `LEXICON_NAME` 来决定您需要强制对齐的级别(word、syllable 和 phone) ## MFA 所使用的字典 --- -MFA 字典的格式请参考: [MFA 官方文档 Dictionary format ](https://montreal-forced-aligner.readthedocs.io/en/latest/dictionary.html) +MFA 字典的格式请参考: [MFA 官方文档](https://montreal-forced-aligner.readthedocs.io/en/latest/) phone.lexicon 直接使用的是 `THCHS-30/data_thchs30/lm_phone/lexicon.txt` word.lexicon 考虑到了中文的多音字,使用**带概率的字典**, 生成规则请参考 `local/gen_word2phone.py` `syllable.lexicon` 获取自 [DNSun/thchs30-pinyin2tone](https://github.com/DNSun/thchs30-pinyin2tone) @@ -39,4 +39,4 @@ word.lexicon 考虑到了中文的多音字,使用**带概率的字典**, 生 **syllabel 级别:** [syllable.lexicon](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/syllable/syllable.lexicon)、[对齐结果](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/syllable/thchs30_alignment.tar.gz)、[模型](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/syllable/thchs30_model.zip) **word 级别:** [word.lexicon](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/word/word.lexicon)、[对齐结果](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/word/thchs30_alignment.tar.gz)、[模型](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/word/thchs30_model.zip) -随后,您可以参考 [MFA 官方文档 Align using pretrained models](https://montreal-forced-aligner.readthedocs.io/en/stable/aligning.html#align-using-pretrained-models) 使用我们给您提供好的模型直接对自己的数据集进行强制对齐,注意,您需要使用和模型对应的 lexicon 文件,当文本是汉字时,您需要用空格把不同的**汉字**(而不是词语)分开 +随后,您可以参考 [MFA 官方文档](https://montreal-forced-aligner.readthedocs.io/en/latest/) 使用我们给您提供好的模型直接对自己的数据集进行强制对齐,注意,您需要使用和模型对应的 lexicon 文件,当文本是汉字时,您需要用空格把不同的**汉字**(而不是词语)分开 diff --git a/tests/benchmark/conformer/README.md b/tests/benchmark/conformer/README.md index 22e0009d..72242bad 100644 --- a/tests/benchmark/conformer/README.md +++ b/tests/benchmark/conformer/README.md @@ -1,5 +1,5 @@ ### Prepare the environment -Please follow the instructions shown in [here](../../docs/source/install.md) to install the Deepspeech first. +Please follow the instructions shown in [here](../../../docs/source/install.md) to install the Deepspeech first. ### File list └── benchmark # 模型名