From 472cf70ebdfaf4e4a1448f85e9458cd9cae73808 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Sat, 2 Oct 2021 14:15:43 +0000 Subject: [PATCH] refactor egs; add utils; add tools; rm notebook;add speechnn; more docs; --- .bashrc | 15 + .flake8 | 4 + .gitignore | 7 + .mergify.yml | 6 + .notebook/Linear_test.ipynb | 605 -- .notebook/WarmupLR.ipynb | 339 -- .notebook/compute_cmvn_loader_test.ipynb | 793 --- .notebook/dataloader.ipynb | 389 -- .../dataloader_with_tokens_tokenids.ipynb | 1204 ---- .notebook/hack_api_test.ipynb | 290 - .notebook/jit_infer.ipynb | 672 --- .notebook/layer_norm_test.ipynb | 229 - .notebook/mask_and_masked_fill_test.ipynb | 449 -- .notebook/position_embeding_check.ipynb | 231 - .notebook/python_test.ipynb | 1680 ------ .notebook/train_test.ipynb | 1887 ------- .notebook/u2_confermer_model_wenet.ipynb | 4608 --------------- .notebook/u2_tansformer_model_espnet.ipynb | 1672 ------ .notebook/wenet_model.ipynb | 5015 ----------------- README.md | 32 +- README_cn.md | 48 - doc/images/multi_gpu_speedup.png | Bin 211490 -> 0 bytes doc/images/tuning_error_surface.png | Bin 110461 -> 0 bytes doc/src/benchmark.md | 16 - doc/src/faq.md | 37 - doc/src/reference.md | 3 - doc/src/released_model.md | 9 - doc/src/server.md | 34 - docs/images/ds2offlineModel.png | Bin 0 -> 95607 bytes docs/images/ds2onlineModel.png | Bin 0 -> 95445 bytes {doc => docs}/src/augmentation.md | 0 {doc => docs}/src/data_preparation.md | 2 +- docs/src/deepspeech_architecture.md | 190 + {doc => docs}/src/feature_list.md | 22 +- {doc => docs}/src/getting_started.md | 0 {doc => docs}/src/install.md | 5 +- {doc => docs}/src/ngram_lm.md | 49 - docs/src/reference.md | 8 + docs/src/released_model.md | 28 + env.sh | 4 +- examples/1xt2x/.gitignore | 1 + examples/1xt2x/README.md | 11 + examples/1xt2x/aishell/.gitignore | 5 + examples/1xt2x/aishell/conf/augmentation.json | 1 + examples/1xt2x/aishell/conf/deepspeech2.yaml | 67 + examples/1xt2x/aishell/local/data.sh | 70 + .../1xt2x/aishell/local/download_lm_ch.sh | 21 + .../1xt2x/aishell/local/download_model.sh | 25 + examples/1xt2x/aishell/local/test.sh | 34 + examples/1xt2x/aishell/path.sh | 16 + examples/1xt2x/aishell/run.sh | 28 + examples/1xt2x/baidu_en8k/.gitignore | 5 + .../1xt2x/baidu_en8k/conf/augmentation.json | 1 + .../1xt2x/baidu_en8k/conf/deepspeech2.yaml | 67 + examples/1xt2x/baidu_en8k/local/data.sh | 86 + .../1xt2x/baidu_en8k/local/download_lm_en.sh | 20 + .../1xt2x/baidu_en8k/local/download_model.sh | 25 + examples/1xt2x/baidu_en8k/local/test.sh | 34 + examples/1xt2x/baidu_en8k/path.sh | 16 + examples/1xt2x/baidu_en8k/run.sh | 28 + examples/1xt2x/librispeech/.gitignore | 5 + .../1xt2x/librispeech/conf/augmentation.json | 1 + .../1xt2x/librispeech/conf/deepspeech2.yaml | 67 + examples/1xt2x/librispeech/local/data.sh | 84 + .../1xt2x/librispeech/local/download_lm_en.sh | 20 + .../1xt2x/librispeech/local/download_model.sh | 25 + examples/1xt2x/librispeech/local/test.sh | 34 + examples/1xt2x/librispeech/path.sh | 15 + examples/1xt2x/librispeech/run.sh | 27 + examples/1xt2x/src_deepspeech2x/__init__.py | 370 ++ examples/1xt2x/src_deepspeech2x/bin/test.py | 56 + .../1xt2x/src_deepspeech2x/models/__init__.py | 13 + .../src_deepspeech2x/models/ds2/__init__.py | 17 + .../models/ds2/deepspeech2.py | 314 ++ .../1xt2x/src_deepspeech2x/models/ds2/rnn.py | 334 ++ examples/1xt2x/src_deepspeech2x/test_model.py | 429 ++ examples/aug_conf/augmentation.json | 10 - .../augmentation.json} | 10 +- examples/cc-cedict/README.md | 58 + examples/cc-cedict/path.sh | 2 +- examples/chinese_g2p/README.md | 5 - examples/dataset/aidatatang_200zh/.gitignore | 4 + examples/dataset/aidatatang_200zh/README.md | 14 + .../aidatatang_200zh/aidatatang_200zh.py | 153 + examples/dataset/aishell/.gitignore | 4 + examples/dataset/aishell/README.md | 3 + examples/dataset/aishell/aishell.py | 52 +- examples/dataset/aishell3/README.md | 3 + examples/dataset/gigaspeech/.gitignore | 1 + examples/dataset/gigaspeech/README.md | 10 + examples/dataset/gigaspeech/gigaspeech.py | 13 + examples/dataset/gigaspeech/run.sh | 14 + examples/dataset/librispeech/.gitignore | 16 +- examples/dataset/librispeech/librispeech.py | 25 +- examples/dataset/magicdata/README.md | 15 + examples/dataset/multi_cn/README.md | 11 + examples/dataset/primewords/README.md | 6 + examples/dataset/st-cmds/README.md | 1 + examples/dataset/ted_en_zh/.gitignore | 6 + examples/dataset/ted_en_zh/ted_en_zh.py | 116 + examples/dataset/thchs30/.gitignore | 6 + examples/dataset/thchs30/README.md | 55 + examples/dataset/thchs30/thchs30.py | 186 + examples/dataset/timit/.gitignore | 4 + examples/dataset/timit/timit.py | 241 + .../timit/timit_kaldi_standard_split.py | 108 + examples/{chinese_g2p => g2p}/.gitignore | 0 examples/g2p/README.md | 3 + examples/g2p/zh/README.md | 93 + .../zh}/local/convert_transcription.py | 0 .../zh}/local/extract_pinyin_label.py | 0 .../zh}/local/ignore_sandhi.py | 0 .../zh}/local/prepare_dataset.sh | 0 examples/{chinese_g2p => g2p/zh}/path.sh | 2 +- .../{chinese_g2p => g2p/zh}/requirements.txt | 0 examples/{chinese_g2p => g2p/zh}/run.sh | 8 +- examples/ngram_lm/READEME.md | 3 + examples/ngram_lm/README.md | 7 - examples/ngram_lm/s0/.gitignore | 1 + examples/ngram_lm/s0/README.md | 96 + examples/ngram_lm/{ => s0}/data/README.md | 0 .../{ => s0}/data/custom_confusion.txt | 0 .../ngram_lm/{ => s0}/data/text_correct.txt | 0 .../ngram_lm/{ => s0}/local/build_zh_lm.sh | 0 .../ngram_lm/{ => s0}/local/download_lm_zh.sh | 0 .../{ => s0}/local/kenlm_score_test.py | 0 examples/ngram_lm/{ => s0}/path.sh | 4 +- examples/ngram_lm/{ => s0}/requirements.txt | 0 examples/ngram_lm/{ => s0}/run.sh | 0 examples/punctuation_restoration/README.md | 3 + examples/spm/README.md | 89 + examples/spm/path.sh | 2 +- examples/thchs30/README.md | 3 + examples/thchs30/a0/README.md | 42 + .../thchs30/a0/data/dict/syllable.lexicon | 2490 ++++++++ examples/thchs30/a0/local/data.sh | 53 + examples/thchs30/a0/local/gen_word2phone.py | 114 + .../thchs30/a0/local/reorganize_thchs30.py | 84 + examples/thchs30/a0/path.sh | 13 + examples/thchs30/a0/run.sh | 35 + examples/tn/.gitignore | 1 + examples/tn/README.md | 36 + examples/tn/data/sentences.txt | 26 + examples/tn/local/test_normalization.py | 29 + examples/tn/path.sh | 8 + examples/tn/run.sh | 26 + hub/requirements.txt | 26 + hub/setup_hub.sh | 66 + requirements.txt | 9 + setup.sh | 28 +- speechnn/.gitignore | 1 + speechnn/CMakeLists.txt | 56 + speechnn/cmake/third_party.cmake | 197 + speechnn/cmake/third_party/absl.cmake | 13 + speechnn/cmake/third_party/boost.cmake | 49 + speechnn/cmake/third_party/eigen.cmake | 53 + speechnn/cmake/third_party/libsndfile.cmake | 11 + speechnn/cmake/third_party/openfst.cmake | 26 + .../third_party/openfst_lib_target.cmake | 31 + speechnn/cmake/third_party/threadpool.cmake | 36 + speechnn/cmake/third_party/version.cmake | 15 + speechnn/core/transformers/.gitkeep | 0 speechnn/core/transformers/README.md | 9 + speechnn/examples/.gitkeep | 0 speechnn/examples/CMakeLists.txt | 0 speechnn/speechnn/CMakeLists.txt | 0 speechnn/speechnn/decoder/CMakeLists.txt | 2 + speechnn/speechnn/frontend/CMakeLists.txt | 0 .../speechnn/frontend/audio/CMakeLists.txt | 0 .../speechnn/frontend/text/CMakeLists.txt | 0 speechnn/speechnn/model/CMakeLists.txt | 0 speechnn/speechnn/nn/CMakeLists.txt | 0 speechnn/speechnn/protocol/CMakeLists.txt | 0 speechnn/speechnn/utils/CMakeLists.txt | 0 third_party/__init__.py | 0 third_party/nnAudio/.gitignore | 3 + third_party/nnAudio/nnAudio/Spectrogram.py | 2443 ++++++++ third_party/nnAudio/nnAudio/__init__.py | 1 + .../nnAudio/nnAudio/librosa_functions.py | 490 ++ third_party/nnAudio/nnAudio/utils.py | 535 ++ third_party/nnAudio/setup.py | 34 + third_party/nnAudio/tests/parameters.py | 38 + third_party/nnAudio/tests/test_spectrogram.py | 373 ++ third_party/paddle_audio/__init__.py | 0 third_party/paddle_audio/frontend/common.py | 201 + third_party/paddle_audio/frontend/english.wav | Bin 0 -> 35824 bytes third_party/paddle_audio/frontend/kaldi.py | 266 + .../paddle_audio/frontend/kaldi_test.py | 533 ++ third_party/text_processing/__ini__.py | 1 + third_party/text_processing/__init__.py | 0 .../text_processing/normalization/__init__.py | 42 + .../normalization/char_convert.py | 15 + .../normalization/chronology.py | 64 + .../normalization/constants.py | 58 + .../text_processing/normalization/num.py | 155 + .../text_processing/normalization/phone.py | 31 + .../normalization/quantifier.py | 18 + .../normalization/sentence_split.py | 23 + tools/Makefile | 69 +- tools/extras/README.md | 11 + tools/extras/install_gcc.sh | 17 + tools/extras/install_kaldi.sh | 35 + tools/extras/install_kenlm.sh | 9 + tools/extras/install_liblbfgs.sh | 40 + tools/extras/install_mfa.sh | 15 + tools/extras/install_miniconda.sh | 19 + tools/extras/install_mkl.sh | 277 + tools/extras/install_ngram.sh | 24 + tools/extras/install_openblas.sh | 39 + tools/extras/install_openfst.sh | 25 + tools/extras/install_pynini.sh | 14 + tools/extras/install_srilm.sh | 91 + tools/extras/srilm.patch | 17 + utils/README.md | 4 + utils/__init__.py | 13 + utils/avg.sh | 28 +- utils/avg_model.py | 45 +- utils/build_kenlm_model_from_arpa.sh | 44 + utils/build_vocab.py | 18 +- utils/compute_mean_std.py | 4 +- utils/dump_manifest.py | 63 + utils/duration_from_maniefst.sh | 10 + utils/filter.py | 53 + utils/filter_scp.pl | 87 + utils/format_data.py | 6 +- utils/format_triplet_data.py | 96 + utils/fst/add_lex_disambig.pl | 195 + utils/fst/compile_lexicon_token_fst.sh | 88 + utils/fst/ctc_token_fst.py | 49 + utils/fst/ctc_token_fst_corrected.py | 78 + utils/fst/eps2disambig.pl | 29 + utils/fst/make_lexicon_fst.pl | 154 + utils/fst/make_tlg.sh | 49 + utils/fst/prepare_dict.py | 88 + utils/fst/remove_oovs.pl | 42 + utils/fst/rnnt_token_fst.py | 36 + utils/fst/s2eps.pl | 27 + utils/log.sh | 0 utils/manifest_key_value.py | 64 + utils/ngram_train.sh | 2 +- utils/parallel/run.pl | 356 ++ utils/parse_options.sh | 0 utils/pd_env_collect.sh | 167 + utils/profile.sh | 0 utils/run.pl | 1 + utils/score_sclite.sh | 126 + utils/spk2utt_to_utt2spk.pl | 25 + utils/split_data.sh | 79 + utils/split_json.sh | 31 + utils/split_scp.pl | 212 + utils/tarball.sh | 17 +- utils/train_arpa_with_kenlm.sh | 67 + utils/utility.py | 97 +- utils/utility.sh | 0 utils/utt2spk_to_spk2utt.pl | 38 + 255 files changed, 16230 insertions(+), 20389 deletions(-) create mode 100644 .bashrc delete mode 100644 .notebook/Linear_test.ipynb delete mode 100644 .notebook/WarmupLR.ipynb delete mode 100644 .notebook/compute_cmvn_loader_test.ipynb delete mode 100644 .notebook/dataloader.ipynb delete mode 100644 .notebook/dataloader_with_tokens_tokenids.ipynb delete mode 100644 .notebook/hack_api_test.ipynb delete mode 100644 .notebook/jit_infer.ipynb delete mode 100644 .notebook/layer_norm_test.ipynb delete mode 100644 .notebook/mask_and_masked_fill_test.ipynb delete mode 100644 .notebook/position_embeding_check.ipynb delete mode 100644 .notebook/python_test.ipynb delete mode 100644 .notebook/train_test.ipynb delete mode 100644 .notebook/u2_confermer_model_wenet.ipynb delete mode 100644 .notebook/u2_tansformer_model_espnet.ipynb delete mode 100644 .notebook/wenet_model.ipynb delete mode 100644 README_cn.md delete mode 100755 doc/images/multi_gpu_speedup.png delete mode 100644 doc/images/tuning_error_surface.png delete mode 100644 doc/src/benchmark.md delete mode 100644 doc/src/faq.md delete mode 100644 doc/src/reference.md delete mode 100644 doc/src/released_model.md delete mode 100644 doc/src/server.md create mode 100644 docs/images/ds2offlineModel.png create mode 100644 docs/images/ds2onlineModel.png rename {doc => docs}/src/augmentation.md (100%) rename {doc => docs}/src/data_preparation.md (98%) create mode 100644 docs/src/deepspeech_architecture.md rename {doc => docs}/src/feature_list.md (78%) rename {doc => docs}/src/getting_started.md (100%) rename {doc => docs}/src/install.md (95%) rename {doc => docs}/src/ngram_lm.md (64%) create mode 100644 docs/src/reference.md create mode 100644 docs/src/released_model.md create mode 100644 examples/1xt2x/.gitignore create mode 100644 examples/1xt2x/README.md create mode 100644 examples/1xt2x/aishell/.gitignore create mode 100644 examples/1xt2x/aishell/conf/augmentation.json create mode 100644 examples/1xt2x/aishell/conf/deepspeech2.yaml create mode 100755 examples/1xt2x/aishell/local/data.sh create mode 100755 examples/1xt2x/aishell/local/download_lm_ch.sh create mode 100644 examples/1xt2x/aishell/local/download_model.sh create mode 100755 examples/1xt2x/aishell/local/test.sh create mode 100644 examples/1xt2x/aishell/path.sh create mode 100755 examples/1xt2x/aishell/run.sh create mode 100644 examples/1xt2x/baidu_en8k/.gitignore create mode 100644 examples/1xt2x/baidu_en8k/conf/augmentation.json create mode 100644 examples/1xt2x/baidu_en8k/conf/deepspeech2.yaml create mode 100755 examples/1xt2x/baidu_en8k/local/data.sh create mode 100755 examples/1xt2x/baidu_en8k/local/download_lm_en.sh create mode 100644 examples/1xt2x/baidu_en8k/local/download_model.sh create mode 100755 examples/1xt2x/baidu_en8k/local/test.sh create mode 100644 examples/1xt2x/baidu_en8k/path.sh create mode 100755 examples/1xt2x/baidu_en8k/run.sh create mode 100644 examples/1xt2x/librispeech/.gitignore create mode 100644 examples/1xt2x/librispeech/conf/augmentation.json create mode 100644 examples/1xt2x/librispeech/conf/deepspeech2.yaml create mode 100755 examples/1xt2x/librispeech/local/data.sh create mode 100755 examples/1xt2x/librispeech/local/download_lm_en.sh create mode 100644 examples/1xt2x/librispeech/local/download_model.sh create mode 100755 examples/1xt2x/librispeech/local/test.sh create mode 100644 examples/1xt2x/librispeech/path.sh create mode 100755 examples/1xt2x/librispeech/run.sh create mode 100644 examples/1xt2x/src_deepspeech2x/__init__.py create mode 100644 examples/1xt2x/src_deepspeech2x/bin/test.py create mode 100644 examples/1xt2x/src_deepspeech2x/models/__init__.py create mode 100644 examples/1xt2x/src_deepspeech2x/models/ds2/__init__.py create mode 100644 examples/1xt2x/src_deepspeech2x/models/ds2/deepspeech2.py create mode 100644 examples/1xt2x/src_deepspeech2x/models/ds2/rnn.py create mode 100644 examples/1xt2x/src_deepspeech2x/test_model.py delete mode 100644 examples/aug_conf/augmentation.json rename examples/{aug_conf/augmentation.example.json => augmentation/augmentation.json} (91%) delete mode 100644 examples/chinese_g2p/README.md create mode 100644 examples/dataset/aidatatang_200zh/.gitignore create mode 100644 examples/dataset/aidatatang_200zh/README.md create mode 100644 examples/dataset/aidatatang_200zh/aidatatang_200zh.py create mode 100644 examples/dataset/aishell/README.md create mode 100644 examples/dataset/aishell3/README.md create mode 100644 examples/dataset/gigaspeech/.gitignore create mode 100644 examples/dataset/gigaspeech/README.md create mode 100644 examples/dataset/gigaspeech/gigaspeech.py create mode 100755 examples/dataset/gigaspeech/run.sh create mode 100644 examples/dataset/magicdata/README.md create mode 100644 examples/dataset/multi_cn/README.md create mode 100644 examples/dataset/primewords/README.md create mode 100644 examples/dataset/st-cmds/README.md create mode 100644 examples/dataset/ted_en_zh/.gitignore create mode 100644 examples/dataset/ted_en_zh/ted_en_zh.py create mode 100644 examples/dataset/thchs30/.gitignore create mode 100644 examples/dataset/thchs30/README.md create mode 100644 examples/dataset/thchs30/thchs30.py create mode 100644 examples/dataset/timit/.gitignore create mode 100644 examples/dataset/timit/timit.py create mode 100644 examples/dataset/timit/timit_kaldi_standard_split.py rename examples/{chinese_g2p => g2p}/.gitignore (100%) create mode 100644 examples/g2p/README.md create mode 100644 examples/g2p/zh/README.md rename examples/{chinese_g2p => g2p/zh}/local/convert_transcription.py (100%) rename examples/{chinese_g2p => g2p/zh}/local/extract_pinyin_label.py (100%) rename examples/{chinese_g2p => g2p/zh}/local/ignore_sandhi.py (100%) rename examples/{chinese_g2p => g2p/zh}/local/prepare_dataset.sh (100%) rename examples/{chinese_g2p => g2p/zh}/path.sh (82%) rename examples/{chinese_g2p => g2p/zh}/requirements.txt (100%) rename examples/{chinese_g2p => g2p/zh}/run.sh (80%) create mode 100644 examples/ngram_lm/READEME.md delete mode 100644 examples/ngram_lm/README.md create mode 100644 examples/ngram_lm/s0/.gitignore create mode 100644 examples/ngram_lm/s0/README.md rename examples/ngram_lm/{ => s0}/data/README.md (100%) rename examples/ngram_lm/{ => s0}/data/custom_confusion.txt (100%) rename examples/ngram_lm/{ => s0}/data/text_correct.txt (100%) rename examples/ngram_lm/{ => s0}/local/build_zh_lm.sh (100%) rename examples/ngram_lm/{ => s0}/local/download_lm_zh.sh (100%) rename examples/ngram_lm/{ => s0}/local/kenlm_score_test.py (100%) rename examples/ngram_lm/{ => s0}/path.sh (67%) rename examples/ngram_lm/{ => s0}/requirements.txt (100%) rename examples/ngram_lm/{ => s0}/run.sh (100%) create mode 100644 examples/punctuation_restoration/README.md create mode 100644 examples/thchs30/README.md create mode 100644 examples/thchs30/a0/README.md create mode 100644 examples/thchs30/a0/data/dict/syllable.lexicon create mode 100644 examples/thchs30/a0/local/data.sh create mode 100644 examples/thchs30/a0/local/gen_word2phone.py create mode 100644 examples/thchs30/a0/local/reorganize_thchs30.py create mode 100644 examples/thchs30/a0/path.sh create mode 100755 examples/thchs30/a0/run.sh create mode 100644 examples/tn/.gitignore create mode 100644 examples/tn/README.md create mode 100644 examples/tn/data/sentences.txt create mode 100644 examples/tn/local/test_normalization.py create mode 100644 examples/tn/path.sh create mode 100755 examples/tn/run.sh create mode 100644 hub/requirements.txt create mode 100644 hub/setup_hub.sh create mode 100644 speechnn/.gitignore create mode 100644 speechnn/CMakeLists.txt create mode 100644 speechnn/cmake/third_party.cmake create mode 100644 speechnn/cmake/third_party/absl.cmake create mode 100644 speechnn/cmake/third_party/boost.cmake create mode 100644 speechnn/cmake/third_party/eigen.cmake create mode 100644 speechnn/cmake/third_party/libsndfile.cmake create mode 100644 speechnn/cmake/third_party/openfst.cmake create mode 100644 speechnn/cmake/third_party/openfst_lib_target.cmake create mode 100644 speechnn/cmake/third_party/threadpool.cmake create mode 100644 speechnn/cmake/third_party/version.cmake create mode 100644 speechnn/core/transformers/.gitkeep create mode 100644 speechnn/core/transformers/README.md create mode 100644 speechnn/examples/.gitkeep create mode 100644 speechnn/examples/CMakeLists.txt create mode 100644 speechnn/speechnn/CMakeLists.txt create mode 100644 speechnn/speechnn/decoder/CMakeLists.txt create mode 100644 speechnn/speechnn/frontend/CMakeLists.txt create mode 100644 speechnn/speechnn/frontend/audio/CMakeLists.txt create mode 100644 speechnn/speechnn/frontend/text/CMakeLists.txt create mode 100644 speechnn/speechnn/model/CMakeLists.txt create mode 100644 speechnn/speechnn/nn/CMakeLists.txt create mode 100644 speechnn/speechnn/protocol/CMakeLists.txt create mode 100644 speechnn/speechnn/utils/CMakeLists.txt create mode 100644 third_party/__init__.py create mode 100644 third_party/nnAudio/.gitignore create mode 100755 third_party/nnAudio/nnAudio/Spectrogram.py create mode 100755 third_party/nnAudio/nnAudio/__init__.py create mode 100755 third_party/nnAudio/nnAudio/librosa_functions.py create mode 100644 third_party/nnAudio/nnAudio/utils.py create mode 100755 third_party/nnAudio/setup.py create mode 100644 third_party/nnAudio/tests/parameters.py create mode 100644 third_party/nnAudio/tests/test_spectrogram.py create mode 100644 third_party/paddle_audio/__init__.py create mode 100644 third_party/paddle_audio/frontend/common.py create mode 100644 third_party/paddle_audio/frontend/english.wav create mode 100644 third_party/paddle_audio/frontend/kaldi.py create mode 100644 third_party/paddle_audio/frontend/kaldi_test.py create mode 100644 third_party/text_processing/__ini__.py create mode 100644 third_party/text_processing/__init__.py create mode 100644 third_party/text_processing/normalization/__init__.py create mode 100644 third_party/text_processing/normalization/char_convert.py create mode 100644 third_party/text_processing/normalization/chronology.py create mode 100644 third_party/text_processing/normalization/constants.py create mode 100644 third_party/text_processing/normalization/num.py create mode 100644 third_party/text_processing/normalization/phone.py create mode 100644 third_party/text_processing/normalization/quantifier.py create mode 100644 third_party/text_processing/normalization/sentence_split.py create mode 100644 tools/extras/README.md create mode 100755 tools/extras/install_gcc.sh create mode 100755 tools/extras/install_kaldi.sh create mode 100755 tools/extras/install_kenlm.sh create mode 100755 tools/extras/install_liblbfgs.sh create mode 100755 tools/extras/install_mfa.sh create mode 100755 tools/extras/install_miniconda.sh create mode 100755 tools/extras/install_mkl.sh create mode 100755 tools/extras/install_ngram.sh create mode 100755 tools/extras/install_openblas.sh create mode 100755 tools/extras/install_openfst.sh create mode 100755 tools/extras/install_pynini.sh create mode 100755 tools/extras/install_srilm.sh create mode 100644 tools/extras/srilm.patch create mode 100644 utils/README.md create mode 100644 utils/__init__.py create mode 100755 utils/build_kenlm_model_from_arpa.sh create mode 100755 utils/dump_manifest.py create mode 100755 utils/duration_from_maniefst.sh create mode 100755 utils/filter.py create mode 100755 utils/filter_scp.pl create mode 100755 utils/format_triplet_data.py create mode 100755 utils/fst/add_lex_disambig.pl create mode 100755 utils/fst/compile_lexicon_token_fst.sh create mode 100755 utils/fst/ctc_token_fst.py create mode 100755 utils/fst/ctc_token_fst_corrected.py create mode 100755 utils/fst/eps2disambig.pl create mode 100755 utils/fst/make_lexicon_fst.pl create mode 100755 utils/fst/make_tlg.sh create mode 100755 utils/fst/prepare_dict.py create mode 100755 utils/fst/remove_oovs.pl create mode 100755 utils/fst/rnnt_token_fst.py create mode 100755 utils/fst/s2eps.pl mode change 100644 => 100755 utils/log.sh create mode 100755 utils/manifest_key_value.py create mode 100755 utils/parallel/run.pl mode change 100644 => 100755 utils/parse_options.sh create mode 100755 utils/pd_env_collect.sh mode change 100644 => 100755 utils/profile.sh create mode 120000 utils/run.pl create mode 100755 utils/score_sclite.sh create mode 100755 utils/spk2utt_to_utt2spk.pl create mode 100755 utils/split_data.sh create mode 100755 utils/split_json.sh create mode 100755 utils/split_scp.pl create mode 100755 utils/train_arpa_with_kenlm.sh mode change 100644 => 100755 utils/utility.py mode change 100644 => 100755 utils/utility.sh create mode 100755 utils/utt2spk_to_spk2utt.pl diff --git a/.bashrc b/.bashrc new file mode 100644 index 000000000..8abbb3c7d --- /dev/null +++ b/.bashrc @@ -0,0 +1,15 @@ +unset GREP_OPTIONS + +# https://zhuanlan.zhihu.com/p/33050965 +alias nvs='nvidia-smi' +alias his='history' +alias jobs='jobs -l' +alias ports='netstat -tulanp' +alias wget='wget -c' + +## Colorize the grep command output for ease of use (good for log files)## +alias grep='grep --color=auto' +alias egrep='egrep --color=auto' +alias fgrep='fgrep --color=auto' + + diff --git a/.flake8 b/.flake8 index 722899439..44685f23a 100644 --- a/.flake8 +++ b/.flake8 @@ -42,6 +42,10 @@ ignore = # these ignores are from flake8-comprehensions; please fix! C400,C401,C402,C403,C404,C405,C407,C411,C413,C414,C415 + +per-file-ignores = + */__init__.py: F401 + # Specify the list of error codes you wish Flake8 to report. select = E, diff --git a/.gitignore b/.gitignore index 6fa377222..cd2360e15 100644 --- a/.gitignore +++ b/.gitignore @@ -10,8 +10,15 @@ .ipynb_checkpoints *.npz *.done +*.whl tools/venv tools/kenlm tools/sox-14.4.2 tools/soxbindings +tools/montreal-forced-aligner/ +tools/Montreal-Forced-Aligner/ +tools/sctk +tools/sctk-20159b5/ + +*output/ diff --git a/.mergify.yml b/.mergify.yml index b11fd5c1f..03e57e14b 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -87,3 +87,9 @@ pull_request_rules: actions: label: add: ["Docker"] + - name: "auto add label=Deployment" + conditions: + - files~=^speechnn/ + actions: + label: + add: ["Deployment"] diff --git a/.notebook/Linear_test.ipynb b/.notebook/Linear_test.ipynb deleted file mode 100644 index 5c7370cf3..000000000 --- a/.notebook/Linear_test.ipynb +++ /dev/null @@ -1,605 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "academic-surname", - "metadata": {}, - "outputs": [], - "source": [ - "import paddle\n", - "from paddle import nn" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "fundamental-treasure", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv-dev/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], - "source": [ - "L = nn.Linear(256, 2048)\n", - "L2 = nn.Linear(2048, 256)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "consolidated-elephant", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import torch\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "moderate-noise", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "float64\n", - "Tensor(shape=[2, 51, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[[-1.54171216, -2.61531472, -1.79881978, ..., -0.31395876, 0.56513089, -0.44516513],\n", - " [-0.79492962, 1.91157901, 0.66567147, ..., 0.54825783, -1.01471853, -0.84924090],\n", - " [-1.22556651, -0.36225814, 0.65063190, ..., 0.65726501, 0.05563191, 0.09009409],\n", - " ...,\n", - " [ 0.38615900, -0.77905393, 0.99732304, ..., -1.38463700, -3.32365036, -1.31089687],\n", - " [ 0.05579993, 0.06885809, -1.66662002, ..., -0.23346378, -3.29372883, 1.30561364],\n", - " [ 1.90676069, 1.95093191, -0.28849599, ..., -0.06860496, 0.95347673, 1.00475824]],\n", - "\n", - " [[-0.91453546, 0.55298805, -1.06146812, ..., -0.86378336, 1.00454640, 1.26062179],\n", - " [ 0.10223761, 0.81301165, 2.36865163, ..., 0.16821407, 0.29240361, 1.05408621],\n", - " [-1.33196676, 1.94433689, 0.01934209, ..., 0.48036841, 0.51585966, 1.22893548],\n", - " ...,\n", - " [-0.19558455, -0.47075930, 0.90796155, ..., -1.28598249, -0.24321797, 0.17734711],\n", - " [ 0.89819717, -1.39516675, 0.17138045, ..., 2.39761519, 1.76364994, -0.52177650],\n", - " [ 0.94122332, -0.18581429, 1.36099780, ..., 0.67647684, -0.04699665, 1.51205540]]])\n", - "tensor([[[-1.5417, -2.6153, -1.7988, ..., -0.3140, 0.5651, -0.4452],\n", - " [-0.7949, 1.9116, 0.6657, ..., 0.5483, -1.0147, -0.8492],\n", - " [-1.2256, -0.3623, 0.6506, ..., 0.6573, 0.0556, 0.0901],\n", - " ...,\n", - " [ 0.3862, -0.7791, 0.9973, ..., -1.3846, -3.3237, -1.3109],\n", - " [ 0.0558, 0.0689, -1.6666, ..., -0.2335, -3.2937, 1.3056],\n", - " [ 1.9068, 1.9509, -0.2885, ..., -0.0686, 0.9535, 1.0048]],\n", - "\n", - " [[-0.9145, 0.5530, -1.0615, ..., -0.8638, 1.0045, 1.2606],\n", - " [ 0.1022, 0.8130, 2.3687, ..., 0.1682, 0.2924, 1.0541],\n", - " [-1.3320, 1.9443, 0.0193, ..., 0.4804, 0.5159, 1.2289],\n", - " ...,\n", - " [-0.1956, -0.4708, 0.9080, ..., -1.2860, -0.2432, 0.1773],\n", - " [ 0.8982, -1.3952, 0.1714, ..., 2.3976, 1.7636, -0.5218],\n", - " [ 0.9412, -0.1858, 1.3610, ..., 0.6765, -0.0470, 1.5121]]])\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv-dev/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], - "source": [ - "x = np.random.randn(2, 51, 256)\n", - "print(x.dtype)\n", - "px = paddle.to_tensor(x, dtype='float32')\n", - "tx = torch.tensor(x, dtype=torch.float32)\n", - "print(px)\n", - "print(tx)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cooked-progressive", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "mechanical-prisoner", - "metadata": {}, - "outputs": [], - "source": [ - "data = np.load('enc_0_ff_out.npz', allow_pickle=True)\n", - "t_norm_ff = data['norm_ff']\n", - "t_ff_out = data['ff_out']\n", - "t_ff_l_x = data['ff_l_x']\n", - "t_ff_l_a_x = data['ff_l_a_x']\n", - "t_ff_l_a_l_x = data['ff_l_a_l_x']\n", - "t_ps = data['ps']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "indie-marriage", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "assured-zambia", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n", - "True\n", - "True\n" - ] - } - ], - "source": [ - "L.set_state_dict({'weight': t_ps[0].T, 'bias': t_ps[1]})\n", - "L2.set_state_dict({'weight': t_ps[2].T, 'bias': t_ps[3]})\n", - "\n", - "ps = []\n", - "for n, p in L.named_parameters():\n", - " ps.append(p)\n", - "\n", - "for n, p in L2.state_dict().items():\n", - " ps.append(p)\n", - " \n", - "for p, tp in zip(ps, t_ps):\n", - " print(np.allclose(p.numpy(), tp.T))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "committed-jacob", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "extreme-traffic", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "optimum-milwaukee", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "viral-indian", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n", - "True\n", - "True\n" - ] - } - ], - "source": [ - "# data = np.load('enc_0_ff_out.npz', allow_pickle=True)\n", - "# t_norm_ff = data['norm_ff']\n", - "# t_ff_out = data['ff_out']\n", - "# t_ff_l_x = data['ff_l_x']\n", - "# t_ff_l_a_x = data['ff_l_a_x']\n", - "# t_ff_l_a_l_x = data['ff_l_a_l_x']\n", - "# t_ps = data['ps']\n", - "TL = torch.nn.Linear(256, 2048)\n", - "TL2 = torch.nn.Linear(2048, 256)\n", - "TL.load_state_dict({'weight': torch.tensor(t_ps[0]), 'bias': torch.tensor(t_ps[1])})\n", - "TL2.load_state_dict({'weight': torch.tensor(t_ps[2]), 'bias': torch.tensor(t_ps[3])})\n", - "\n", - "# for n, p in TL.named_parameters():\n", - "# print(n, p)\n", - "# for n, p in TL2.named_parameters():\n", - "# print(n, p)\n", - "\n", - "ps = []\n", - "for n, p in TL.state_dict().items():\n", - " ps.append(p.data.numpy())\n", - " \n", - "for n, p in TL2.state_dict().items():\n", - " ps.append(p.data.numpy())\n", - " \n", - "for p, tp in zip(ps, t_ps):\n", - " print(np.allclose(p, tp))" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "skilled-vietnamese", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[ 0.67277956 0.08313607 -0.62761104 ... -0.17480263 0.42718208\n", - " -0.5787626 ]\n", - " [ 0.91516656 0.5393416 1.7159258 ... 0.06144593 0.06486575\n", - " -0.03350811]\n", - " [ 0.438351 0.6227843 0.24096036 ... 1.0912522 -0.90929437\n", - " -1.012989 ]\n", - " ...\n", - " [ 0.68631977 0.14240924 0.10763275 ... -0.11513516 0.48065388\n", - " 0.04070369]\n", - " [-0.9525228 0.23197874 0.31264272 ... 0.5312439 0.18773697\n", - " -0.8450228 ]\n", - " [ 0.42024016 -0.04561988 0.54541194 ... -0.41933843 -0.00436018\n", - " -0.06663495]]\n", - "\n", - " [[-0.11638781 -0.33566502 -0.20887226 ... 0.17423287 -0.9195841\n", - " -0.8161046 ]\n", - " [-0.3469874 0.88269687 -0.11887559 ... -0.15566081 0.16357468\n", - " -0.20766167]\n", - " [-0.3847657 0.3984318 -0.06963477 ... -0.00360622 1.2360432\n", - " -0.26811332]\n", - " ...\n", - " [ 0.08230796 -0.46158582 0.54582864 ... 0.15747628 -0.44790155\n", - " 0.06020184]\n", - " [-0.8095085 0.43163058 -0.42837143 ... 0.8627463 0.90656304\n", - " 0.15847842]\n", - " [-1.485811 -0.18216592 -0.8882585 ... 0.32596245 0.7822631\n", - " -0.6460344 ]]]\n", - "[[[ 0.67278004 0.08313602 -0.6276114 ... -0.17480245 0.42718196\n", - " -0.5787625 ]\n", - " [ 0.91516703 0.5393413 1.7159253 ... 0.06144581 0.06486579\n", - " -0.03350812]\n", - " [ 0.43835106 0.62278455 0.24096027 ... 1.0912521 -0.9092943\n", - " -1.0129892 ]\n", - " ...\n", - " [ 0.6863195 0.14240888 0.10763284 ... -0.11513527 0.48065376\n", - " 0.04070365]\n", - " [-0.9525231 0.23197863 0.31264275 ... 0.53124386 0.18773702\n", - " -0.84502304]\n", - " [ 0.42024007 -0.04561983 0.545412 ... -0.41933888 -0.00436005\n", - " -0.066635 ]]\n", - "\n", - " [[-0.11638767 -0.33566508 -0.20887226 ... 0.17423296 -0.9195838\n", - " -0.8161046 ]\n", - " [-0.34698725 0.88269705 -0.11887549 ... -0.15566081 0.16357464\n", - " -0.20766166]\n", - " [-0.3847657 0.3984319 -0.06963488 ... -0.00360619 1.2360426\n", - " -0.26811326]\n", - " ...\n", - " [ 0.08230786 -0.4615857 0.5458287 ... 0.15747619 -0.44790167\n", - " 0.06020182]\n", - " [-0.8095083 0.4316307 -0.42837155 ... 0.862746 0.9065631\n", - " 0.15847899]\n", - " [-1.485811 -0.18216613 -0.8882584 ... 0.32596254 0.7822631\n", - " -0.6460344 ]]]\n", - "True\n", - "False\n" - ] - } - ], - "source": [ - "y = L(px)\n", - "print(y.numpy())\n", - "\n", - "ty = TL(tx)\n", - "print(ty.data.numpy())\n", - "print(np.allclose(px.numpy(), tx.detach().numpy()))\n", - "print(np.allclose(y.numpy(), ty.detach().numpy()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "incorrect-allah", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "prostate-cameroon", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "governmental-surge", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 0.04476918 0.554463 -0.3027508 ... -0.49600336 0.3751858\n", - " 0.8254095 ]\n", - " [ 0.95594174 -0.29528382 -1.2899452 ... 0.43718258 0.05584608\n", - " -0.06974669]]\n", - "[[ 0.04476918 0.5544631 -0.3027507 ... -0.49600336 0.37518573\n", - " 0.8254096 ]\n", - " [ 0.95594174 -0.29528376 -1.2899454 ... 0.4371827 0.05584623\n", - " -0.0697467 ]]\n", - "True\n", - "False\n", - "True\n" - ] - } - ], - "source": [ - "x = np.random.randn(2, 256)\n", - "px = paddle.to_tensor(x, dtype='float32')\n", - "tx = torch.tensor(x, dtype=torch.float32)\n", - "y = L(px)\n", - "print(y.numpy())\n", - "ty = TL(tx)\n", - "print(ty.data.numpy())\n", - "print(np.allclose(px.numpy(), tx.detach().numpy()))\n", - "print(np.allclose(y.numpy(), ty.detach().numpy()))\n", - "print(np.allclose(y.numpy(), ty.detach().numpy(), atol=1e-5))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "confidential-jacket", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "improved-civilization", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "5e7e7c9fde8350084abf1898cf52651cfc84b17a\n" - ] - } - ], - "source": [ - "print(paddle.version.commit)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "d1e2d3b4", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['__builtins__',\n", - " '__cached__',\n", - " '__doc__',\n", - " '__file__',\n", - " '__loader__',\n", - " '__name__',\n", - " '__package__',\n", - " '__spec__',\n", - " 'commit',\n", - " 'full_version',\n", - " 'istaged',\n", - " 'major',\n", - " 'minor',\n", - " 'mkl',\n", - " 'patch',\n", - " 'rc',\n", - " 'show',\n", - " 'with_mkl']" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dir(paddle.version)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "c880c719", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.1.0\n" - ] - } - ], - "source": [ - "print(paddle.version.full_version)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "f26977bf", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "commit: 5e7e7c9fde8350084abf1898cf52651cfc84b17a\n", - "None\n" - ] - } - ], - "source": [ - "print(paddle.version.show())" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "04ad47f6", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1.6.0\n" - ] - } - ], - "source": [ - "print(torch.__version__)" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "e1e03830", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['__builtins__',\n", - " '__cached__',\n", - " '__doc__',\n", - " '__file__',\n", - " '__loader__',\n", - " '__name__',\n", - " '__package__',\n", - " '__spec__',\n", - " '__version__',\n", - " 'cuda',\n", - " 'debug',\n", - " 'git_version',\n", - " 'hip']" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dir(torch.version)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "4ad0389b", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'b31f58de6fa8bbda5353b3c77d9be4914399724d'" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "torch.version.git_version" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "7870ea10", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'10.2'" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "torch.version.cuda" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "db8ee5a7", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6321ec2a", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/WarmupLR.ipynb b/.notebook/WarmupLR.ipynb deleted file mode 100644 index 21abf9cbe..000000000 --- a/.notebook/WarmupLR.ipynb +++ /dev/null @@ -1,339 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "d6a0e098", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Union\n", - "\n", - "import torch\n", - "from torch.optim.lr_scheduler import _LRScheduler\n", - "\n", - "from typeguard import check_argument_types\n", - "\n", - "\n", - "class WarmupLR(_LRScheduler):\n", - " \"\"\"The WarmupLR scheduler\n", - " This scheduler is almost same as NoamLR Scheduler except for following\n", - " difference:\n", - " NoamLR:\n", - " lr = optimizer.lr * model_size ** -0.5\n", - " * min(step ** -0.5, step * warmup_step ** -1.5)\n", - " WarmupLR:\n", - " lr = optimizer.lr * warmup_step ** 0.5\n", - " * min(step ** -0.5, step * warmup_step ** -1.5)\n", - " Note that the maximum lr equals to optimizer.lr in this scheduler.\n", - " \"\"\"\n", - "\n", - " def __init__(\n", - " self,\n", - " optimizer: torch.optim.Optimizer,\n", - " warmup_steps: Union[int, float] = 25000,\n", - " last_epoch: int = -1,\n", - " ):\n", - " assert check_argument_types()\n", - " self.warmup_steps = warmup_steps\n", - "\n", - " # __init__() must be invoked before setting field\n", - " # because step() is also invoked in __init__()\n", - " super().__init__(optimizer, last_epoch)\n", - "\n", - " def __repr__(self):\n", - " return f\"{self.__class__.__name__}(warmup_steps={self.warmup_steps})\"\n", - "\n", - " def get_lr(self):\n", - " step_num = self.last_epoch + 1\n", - " return [\n", - " lr\n", - " * self.warmup_steps ** 0.5\n", - " * min(step_num ** -0.5, step_num * self.warmup_steps ** -1.5)\n", - " for lr in self.base_lrs\n", - " ]\n", - "\n", - " def set_step(self, step: int):\n", - " self.last_epoch = step" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "0d496677", - "metadata": {}, - "outputs": [], - "source": [ - "import torch.optim as optim\n", - "model = torch.nn.Linear(10, 200)\n", - "optimizer = optim.Adam(model.parameters())\n", - "scheduler = WarmupLR(optimizer, warmup_steps=25000)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "e3e3f3dc", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0 0.0 -1\n" - ] - } - ], - "source": [ - "infos = {}\n", - "start_epoch = infos.get('epoch', -1) + 1\n", - "cv_loss = infos.get('cv_loss', 0.0)\n", - "step = infos.get('step', -1)\n", - "print(start_epoch, cv_loss, step)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "dc3d550c", - "metadata": {}, - "outputs": [], - "source": [ - "scheduler.set_step(step)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "e527634e", - "metadata": {}, - "outputs": [], - "source": [ - "lrs=[]\n", - "for i in range(100000):\n", - " scheduler.step()\n", - " lrs.append(scheduler.get_lr())" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "f1452db9", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Collecting matplotlib\n", - " Downloading matplotlib-3.4.1-cp38-cp38-manylinux1_x86_64.whl (10.3 MB)\n", - "\u001b[K |████████████████████████████████| 10.3 MB 575 kB/s eta 0:00:01\n", - "\u001b[?25hCollecting kiwisolver>=1.0.1\n", - " Downloading kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl (1.2 MB)\n", - "\u001b[K |████████████████████████████████| 1.2 MB 465 kB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: pillow>=6.2.0 in /workspace/wenet/venv/lib/python3.8/site-packages (from matplotlib) (8.1.2)\n", - "Requirement already satisfied: numpy>=1.16 in /workspace/wenet/venv/lib/python3.8/site-packages (from matplotlib) (1.20.1)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /workspace/wenet/venv/lib/python3.8/site-packages (from matplotlib) (2.8.1)\n", - "Collecting cycler>=0.10\n", - " Downloading cycler-0.10.0-py2.py3-none-any.whl (6.5 kB)\n", - "Requirement already satisfied: pyparsing>=2.2.1 in /workspace/wenet/venv/lib/python3.8/site-packages (from matplotlib) (2.4.7)\n", - "Requirement already satisfied: six in /workspace/wenet/venv/lib/python3.8/site-packages (from cycler>=0.10->matplotlib) (1.15.0)\n", - "Installing collected packages: kiwisolver, cycler, matplotlib\n", - "Successfully installed cycler-0.10.0 kiwisolver-1.3.1 matplotlib-3.4.1\n" - ] - } - ], - "source": [ - "!pip install matplotlib\n", - "import matplotlib.pyplot as plt\n", - "\n", - "%matplotlib inline" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "0f36d04f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAD4CAYAAADy46FuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAAqc0lEQVR4nO3deXxV1b338c8vCUkYkkAghJAEAhLQIJMEHHFCBa2KVkG0T7Wt1qet9ra1w9Xn3ufe1ld7b21tvVq1alut+mhJQK3Yqjig1SpCDgIyBiLTSZhCAglTyLSeP86GxjTDQZKc6ft+vXh5zjrrrLM2O+bL3mvv3zHnHCIiIu2JC/UEREQkvCkoRESkQwoKERHpkIJCREQ6pKAQEZEOJYR6Al1h0KBBLi8vL9TTEBGJKMuXL9/rnMvorF9UBEVeXh4+ny/U0xARiShmti2Yfjr1JCIiHVJQiIhIhxQUIiLSIQWFiIh0SEEhIiIdCioozGymmZWaWZmZ3d3G60lmVuS9vtTM8lq8do/XXmpmM1q0P2lme8xsTaux0s3sTTPb5P13wElsn4iInKROg8LM4oFHgMuBAuBGMyto1e1WYJ9zbhTwAHCf994CYC4wFpgJPOqNB/BHr621u4G3nXP5wNvecxERCZFgjiimAmXOuc3OuXpgHjCrVZ9ZwNPe4wXAdDMzr32ec+6oc24LUOaNh3PuPaC6jc9rOdbTwDXBb450p82VB3m3dE+opyEiPSyYoMgG/C2el3ttbfZxzjUCNcDAIN/bWqZzbqf3eBeQ2VYnM7vdzHxm5qusrAxiM+Rk3fS7pXzlqRLeWrc71FMRkR4U1ovZLvCtSm1+s5Jz7gnnXKFzrjAjo9M70OUkle05wK7aOgC+V7SSTysPhnhGItJTggmKCiC3xfMcr63NPmaWAKQBVUG+t7XdZpbljZUF6FxHGCj2lZMQZ7xy53kkJsRx+zM+DtQ1hHpaItIDggmKEiDfzEaYWSKBxemFrfosBG7xHl8PLPaOBhYCc72rokYA+cCyTj6v5Vi3AC8HMUfpRg1Nzbz4cTnTTxvMuJw0Hr7pDLZWHeZ7RatobtZX6YpEu06DwltzuBNYBKwHip1za83sXjO72uv2B2CgmZUBd+FdqeScWwsUA+uA14E7nHNNAGb2J2AJMMbMys3sVm+snwOXmtkm4BLvuYTQ4g172HuwnjmFgYPDs08ZyL9/4TTeWr+bB9/eFOLZiUh3s8A//CNbYWGhU/XY7nPb0yV8Ul7Dh3dfTEJ84N8Wzjl+uOATFiwv58G5E5k1sbNrFEQk3JjZcudcYWf9wnoxW0JvT20d75RWct3knOMhAWBm/Oza0zlzRDo/nP8JJVvbutJZRKKBgkI6tODjcpqa3fHTTi0lJcTz+Jcnk5Pem68/42PL3kMhmKGIdDcFhbTLOcd8XzlT89IZMahvm33690nkqa9MIc6Mrz61jOpD9T08SxHpbgoKaVfJ1n1s2XuIOVP++WiipeED+/K7myezo6aOrz/j40h9Uw/NUER6goJC2lXs89MvKYErxg3ptO/k4ek8eMNEVmzfxzefW059Y3MPzFBEeoKCQtp0oK6Bv36yk6smZNEnMbivVr98XBY/u3Yc75ZW8oP5usdCJFoE9xtAYs5fP9nJkYamNhexO3Lj1GHsP9zAfa9vIK13L+6dNZZAfUgRiVQKCmlTkc9P/uB+TMztf8Lv/eaFp7D/cD2Pv7eZ/n168f3LxnT9BEWkxygo5J9s2n2AFdv38+9fOO1zHw3cffmp1Bxp4DeLy0iMj+Pb0/O7eJYi0lMUFPJPin1+EuKMayZ9/rutAzfkjaO+sZlfvbkRM7jzYoWFSCRSUMhnBAoAVnDJaZkM6pd0UmPFxxm/nD0BgPvf2AgoLEQikYJCPuPt9XuoOlTPnCk5XTJe67AwM+64aFSXjC0iPUNBIZ8x3+cnMzWJ8/O77sugjoWFA365qJT6xma+e0m+roYSiRAKCjlud20d75Tu4RsXnPKZAoBdIT7OuH/2BBLijAff3kTNkQb+48oC4uIUFiLhTkEhxy1YXk6z44TvnQhWfJxx33XjSUnuxZMfbOFAXSP3XTeuy0NJRLqWgkKAYwUA/UwdkU5eOwUAu0JcnPF/rzyNtN69eOCtjRyoa+A3N00iKSG+2z5TRE6O/iknACzbUs3WqsPc0E1HEy2ZGd+5JJ//vKqAN9bt5qtPlVCr798WCVsKCgGg2FfuFQDM6rHP/Oq5I/j1nAks21LN9b/9kIr9R3rss0UkeAoK4UBdA6+u3slVE4bSO7FnTwF98Ywcnv7aVHbur+PaRz5gTUVNj36+iHROQSH8xSsAeEMn3zvRXc4dNYgF3zyHXvFxzHl8Ce9s2BOSeYhI2xQUQlGJn9GZ/ZiQkxayOYwZksJL3zqHkRl9ufXpEp5ZshXnVKZcJBwoKGLcxt0HWOnfz5zC3JDfADc4NZmi28/mojGD+Y+X13LPi6s52qhvyxMJNQVFjCsu8dMr3rj2JAoAdqW+SQk8cXMhd1x0CvNK/Nz0u6Xsqa0L9bREYpqCIobVNzbz0opAAcCBJ1kAsCvFxxk/nHEqj9x0But21HLVw39npX9/qKclErMUFDFs8YbdgQKAPXDvxOfxhfFZvPDNc0iICyxyF5f4Qz0lkZikoIhhxb5yhqQmc/7orisA2NUKhqbyyrfPo3D4AH70wid8v3gVh+sbQz0tkZiioIhRu2rqeLd0D9dNziY+zAvzpfdN5Nlbz+Rfpufz4opyZj38AWV7DoR6WiIxQ0ERo174OFAAcPbk8Dzt1Fp8nHHXpaN55mtTqT5Uz1W/+YCXVpSHeloiMUFBEYOccxT7/JzZzQUAu8O0/Axe/c40xuWk8b2iVfxowSoOHdWpKJHupKCIQUu3VLOt6nDI7sQ+WZmpyTx/25ncedEo5i8v54qH3mf5tn2hnpZI1FJQxKBin5+UpAQuP73nCgB2tYT4OH4wYwxFt59NY5Nj9mMf8us3N9LQ1BzqqYlEnaCCwsxmmlmpmZWZ2d1tvJ5kZkXe60vNLK/Fa/d47aVmNqOzMc1supl9bGYrzezvZqYvWO5CtccKAE7s+QKA3WHqiHRe++40rpmUzUNvb2L2Y0vYsvdQqKclElU6DQoziwceAS4HCoAbzaygVbdbgX3OuVHAA8B93nsLgLnAWGAm8KiZxXcy5m+BLznnJgLPA/9+Ulson/GXVTupa2juke+d6Cmpyb349ZyJPHzTJLbsPcQVD77PUx9soblZtaJEukIwRxRTgTLn3GbnXD0wD5jVqs8s4Gnv8QJgugUKB80C5jnnjjrntgBl3ngdjemAVO9xGrDj822atKXI52dMZgrjQ1gAsLtcOX4or393GmeOTOcnr6xjzuNL+LTyYKinJRLxggmKbKDlLbHlXlubfZxzjUANMLCD93Y05m3Aq2ZWDnwZ+HlbkzKz283MZ2a+ysrKIDZDSncdYJV/P3OmhL4AYHfJSuvNU1+Zwq9mT2DTnoNc/uD7/PbdT2nU2oXI5xaOi9nfA65wzuUATwG/bquTc+4J51yhc64wIyN87ywOJ8W+8CoA2F3MjOsm5/DmXedz0ZgM7nt9A9c++iHrd9aGemoiESmYoKgAWp7QzvHa2uxjZgkEThlVdfDeNtvNLAOY4Jxb6rUXAecEtSXSoWMFAC8tyCS9b2Kop9MjBqck89j/mswjN53Bjv1HuPI3f+e/Xl2v+y5ETlAwQVEC5JvZCDNLJLA4vbBVn4XALd7j64HFLvCtMwuBud5VUSOAfGBZB2PuA9LMbLQ31qXA+s+/eXLM2+t3U32ontlRtIgdDDPjC+OzeOuuC5hTmMMT721m+q/+xmurd+qLkUSClNBZB+dco5ndCSwC4oEnnXNrzexewOecWwj8AXjWzMqAagK/+PH6FQPrgEbgDudcE0BbY3rtXwdeMLNmAsHxtS7d4hhV5PMHCgDmx+ZpugF9E/nvL45ndmEu//bSGr753MdcMDqDn1w9NuLuThfpaRYN/6oqLCx0Pp8v1NMIWztrjnDuzxfzrQtH8YMZY0I9nZBrbGrm2Y+28as3NlLf1Mw3LjiFb1wwkj6Jnf67SSSqmNly51xhZ/3CcTFbutgLy70CgIU5oZ5KWEiIj+Or547g7e9fwMyxQ3jo7U1cfP/fePHjct17IdIGBUWUa252FPvKOWtkOsMH6hRLS5mpyTx04yQWfONsMlOTuKt4Fdc8+gG+rdWhnppIWFFQRLmlW6rZXh25BQB7QmFeOi9961weuGECe2qPcv1jS7jj+Y/xVx8O9dREwoJOyka5+T4/KcmRXQCwJ8TFGddOymHG2CE8/rfNPP7ep7y5djdfOmsYd1w0ikFh9J3iIj1NRxRRrLaugVfX7OTqCUNJ7hX5BQB7Qp/EBL536Wje+cGFXDspm6c/3MoFv3iHX7+5kQN1DaGenkhIKCii2CurdgQKAOq00wnLSuvNfdeP543vXcAFYzJ46O1NnP+Ld/j9+5upa2gK9fREepSCIooVl/g5dUgK47KjrwBgTxk1uB+PfmkyC+88l9Oz0/jpX9dz0f3v8vzS7dQ3qn6UxAYFRZTasKuWVeU1zCmM3gKAPWl8Tn+evfVMnr/tTDJTk/k/L63mwl++w7NLtnK0UUcYEt0UFFGquKScXvHGNVFeALCnnTNqEC996xye+dpUsvr35v++vJYLfvEuf/xgi05JSdRSUEShQAHAci4rGBIzBQB7kplx/ugMFnzjbJ677UyGpffhx6+sY5q3hnG4XkUHJbro8tgo9Nb63ew73KA7sbuZmXHuqEGcO2oQH22u4qG3N/HTv67n4XfKuPms4dx8Tp4uq5WooKCIQkUlfrLSkpkWowUAQ+GskQM5a+RAlm+r5rG/beahxWU8/t5mrp+cw9enjVThQYloCooos2P/Ed7bVMmdF40iPk6L2D1t8vB0fndzOmV7DvL79zcz31fO88u2M3PsEG4/fySThg0I9RRFTpiCIsq8sLwc52D2ZN07EUqjBvfj59eN565LR/PHD7fy/z7axmtrdjE1L51bzsnjsrGZ9IrXEqFEBpUZjyLNzY4L73+XnAG9ef7rZ4V6OtLCwaONzFu2naeXbMVffYQhqcl8+ezhzJ2Sy0CtY0iIqMx4DPpoSxXbqw8zJ8a+xS4S9EtK4LZpI3n3Bxfxu5sLGTW4H79cVMrZP1/M94tXsbq8JtRTFGmXTj1Fkfm+clKSE5h5+pBQT0XaER9nXFqQyaUFmZTtOcDTH27jhY/LeeHjcs4Y1p8vnz2cy0/PUm0uCSs69RQlao40MPVnbzG7MIefXjMu1NORE1Bb18ACXznPLNnK1qrDpPXuxbWTsrlx6jDGDEkJ9fQkigV76klHFFHilVU7ONrYzA2Fw0I9FTlBqcm9+Np5I/jKOXl8tLmK55dt57ml2/jjh1s5Y1h/bpw6jCvHD6V3oo4yJDR0RBElrn7479Q3NvPad6aptlMUqDp4lBc/ruBPJdvZXHmIlOQErpmYzQ1Tchk7NFX7WLqEjihiyPqdtXxSXsN/XlWgXyBRYmC/JL5+/khumzaCZVuq+dOy7RT5/Dz70TbGZKZw3eRsZk3MJjM1OdRTlRigoIgCxT4/ifFxXDNRBQCjjZlx5siBnDlyID8+XM8rn+zkxY/L+a9XN/Dz1zZwXn4G152RzWUFQ3RqSrqNgiLCHW1s4s8rKrh0bCYDVAAwqvXvk8iXzxrOl88azqeVB3np4wpeWlHBd+atpF9SAl8Yl8UXz8hmSl46cborX7qQgiLCvbVuD/sON+jeiRhzSkY/fjBjDHddOpqPtlTx4scV/OWTHRT5AnW+rhyfxZXjhzI+J02nI+WkaTE7wt385DLKdh/g/X+9WLWdYtzh+kbeWLubv3yyg79trKShyTEsvQ9XTcjiqglDGZOZotCQz9BidgzYsf8I72+q5NsqAChAn8QErpmUzTWTsqk53MCitbt45ZMdPPa3zTzyzqeMGtyPq8YP5coJWZyS0S/U05UIoqCIYAuOFQDUaSdpJa1PL+ZMyWXOlFz2HjzKa2t28ZdVO/iftzfywFsbOXVICpeNHcKMsZkUZOlyW+mYTj1FqOZmxwX3v8Ow9D48d5sKAEpwdtXU8erqnby+dhe+rdU0O8hN782MgiHMOH0IZwwboKPTGKJTT1Huo81V+KuP8IPLxoR6KhJBhqQl87XzRvC180aw9+BR3lq3m0Vrd/HMkm38/u9bGNQviUsLMpl5+hDOHjmQxATVDRUFRcQq9vlJTU5gxlgVAJTPZ1C/JOZOHcbcqcM4UNfAO6WVLFq7i5dXVvCnZdtJSUrg/NEZXHTqYC4ck6GvdY1hQQWFmc0EHgTigd87537e6vUk4BlgMlAF3OCc2+q9dg9wK9AE/ItzblFHY1rgZOlPgdnee37rnHvo5DYzutQcaeC1NbuYU5irKqPSJVKSe3H1hKFcPWEodQ1NfFC2lzfW7uad0j38dfVOzGBCTn+mnzqYi04drDIiMabToDCzeOAR4FKgHCgxs4XOuXUtut0K7HPOjTKzucB9wA1mVgDMBcYCQ4G3zGy09572xvwKkAuc6pxrNrPBXbGh0WThsQKAU7SILV0vuVc800/LZPppmTQ3O9btrOXt9XtYXLqHX725kV+9uZEhqclcdGoGF5+aybmjBtInUScnolkwe3cqUOac2wxgZvOAWUDLoJgF/Nh7vAB42DsymAXMc84dBbaYWZk3Hh2M+U3gJudcM4Bzbs/n37zoVFzi57SsVMYOTQ31VCTKxcUZp2encXp2Gt+5JJ89B+p4t7SSdzbsYeHKHfxpmZ/EhDim5qUzLX8Q54/O4NQhul8j2gQTFNmAv8XzcuDM9vo45xrNrAYY6LV/1Oq9xwoStTfmKQSORq4FKgmcrtrUelJmdjtwO8CwYbFTWnvdjlpWV9TwYxUAlBAYnJLMnMJc5hTmUt/YTMnWahZv2MP7myr579c28N+vbSAjJYlpowYxbfQgzhuVQUaK1jYiXTgeLyYBdc65QjP7IvAkMK11J+fcE8ATELg8tmenGDrHCgDOUgFACbHEhDjOHTWIc0cNAgKX3r6/qZL3N+3l3Y2VvLiiAoDTslI5P38Q0/IzKMwboHW1CBRMUFQQWDM4Jsdra6tPuZklAGkEFrU7em977eXAi97jl4CngphjTDja2MSfV1ZwmQoAShgakpbM7MJcZhfmHl/beG9TJe9v3MuTH2zh8fc2k5QQR2HeAM4eOZCzRg5kfE5/XYIbAYIJihIg38xGEPhlPhe4qVWfhcAtwBLgemCxc86Z2ULgeTP7NYHF7HxgGWAdjPln4CJgC3ABsPFzb12UeXPdbvarAKBEgJZrG9+6cBSHjjaybEs172/ay5LNVdz/RuB/69694gPBccpAzh45kHHZaSTEKzjCTadB4a053AksInAp65POubVmdi/gc84tBP4APOstVlcT+MWP16+YwCJ1I3CHc64JoK0xvY/8OfCcmX0POAjc1nWbG9mKSvxk9+99/FBfJFL0TUrgIu/SWoB9h+pZuqWKJZ9WsWRzFb94vRSAfkkJTDkeHIMoGJqqO8XDgEp4RIiK/Uc4777FfPvifO66dHTnbxCJIHsPHuWjzf8Ijs2VhwBISUrgjOEDmJI3gMK8dCbm9tcaRxdSCY8os8BXDsDsyTkhnolI1xvUL4krxw/lyvFDAdhdW8dHm6tYtqUa39Z9x09V9YoPnNKakpdO4fBAeKRrva7bKSgiQHOzY/5yP+eeMojc9D6hno5It8tMTWbWxOzjV/ftP1zP8m37KNm6D9/Wav74wVaeeG8zAKdk9A0Ehxcewwf20aXjXUxBEQGWbK6ifN8RfjhDBQAlNvXvk3j8bnGAuoYmVlfUULI1cMTx6uqdzCsJ3JqV3jeRibn9mZTbn0nDBjA+N43U5F6hnH7EU1BEABUAFPms5F7xTMlLZ0peOhA46t605yC+bdWs3L6fFf79LN4QKOpgBqMy+gXCY9gAJub2Z3RmP11ddQIUFGGu5nCgAODcKSoAKNKeuDhjzJAUxgxJ4UtnDgcCxTM/Kd/Piu37Wenfz1vrdzN/eWCtr09iPONz0piYGwiOCblpDElN1imrdigowtzCVRXUNzbr3gmRE5TWuxfT8jOYlp8BgHOObVWHWenfz4rt+1jp38/v399MY3Pgys9B/RI5PTuN8d79H+NyFB7HKCjCXJHPT0FWKqdnp4V6KiIRzczIG9SXvEF9uWZSYJG8rqGJtTtqWVNRw+qKGlaX1/Dexkq87GBQvyTGZacyLqc/47LTGJ+TRmZqcgi3IjQUFGFs7Y4a1lTU8pOrx4Z6KiJRKblXPJOHD2Dy8AHH247UN7FuZyA0VlfUsrpiP39rER4ZKUmM8446CrwqzjkDekf1kYeCIozN95WTmBDHrIlDQz0VkZjROzGeycPTmTw8/Xjb4fpG1u+s5ZPywJHHmooa3i3dczw8UpISOC0rldOyUjgtK5WCoamMzkyJmnVFBUWYqmto4qUVFcwYO4T+fXRDkUgo9UlM+KfwOFLfROnuA6zbUcv6nbWs21nLguXlHKpvAiDO4JSMfseD41iQDE6JvFNXCoow9ea63dQcaWBOoe7EFglHvRPjmZjbn4m5/Y+3NTc7tlcfZv3Of4TH8m37WLhqx/E+g/olcVpWCmMyUxg9JIXRmSnkD+5H36Tw/XUcvjOLccU+rwDgKSoAKBIp4uL+sWB++bis4+37D9ezfueB4+Gxfmctz360jaONzcf75Kb3ZkxmCvmZXohkpnDK4L4kJYT+9JWCIgyV7zvM38v28p3p+cSpcqZIxOvfJzFQEfeUgcfbmpod/urDlO4+wMZdByjdfYBNuw/ybmnl8Ut24+OMvIF9GO0Fx5ghKYzO7EfewL49esOggiIMLfBuCrpeBQBFolZ8i6OPllUX6hub2Vp1iI0tAmTDrgMsWrvr+OJ5YnwcIwb1ZdTgftx9+andXgNOQRFmmpsd833lnDdqEDkDVABQJNYkJsQdP4Jg/D/a6xqaKNtzMBAguw9Stucga3fU9Mg3BCoowsyHn1ZRsf8I/3r5qaGeioiEkeRe8ce/NbCnqSpWmCn2+Unr3YvLCjJDPRUREUBBEVZqDjfw+tpdXDNxaNTcqCMikU9BEUZePlYAcIoKAIpI+FBQhJGiEj9jh6YydqgKAIpI+FBQhIk1FTWs3VHLDTqaEJEwo6AIE/N9/kABwAnZoZ6KiMhnKCjCQF1DE39euYOZY4eQ1kff7Ssi4UVBEQbeOF4AUKedRCT8KCjCQHGJn5wBvTmnRR0YEZFwoaAIMX/1YT74dC+zJ+eqAKCIhCUFRYgdLwCo750QkTCloAih5mbHguWBAoDZ/XuHejoiIm1SUITQB5/upWL/ES1ii0hYU1CEULGvnP59enHZWBUAFJHwpaAIkf2H61m0dhfXTMwOi686FBFpT1BBYWYzzazUzMrM7O42Xk8ysyLv9aVmltfitXu89lIzm3ECYz5kZgc/53aFvZdX7ggUANRpJxEJc50GhZnFA48AlwMFwI1mVtCq263APufcKOAB4D7vvQXAXGAsMBN41MziOxvTzAqBASe5bWGtqMTP6dmpFAxNDfVUREQ6FMwRxVSgzDm32TlXD8wDZrXqMwt42nu8AJhuZua1z3POHXXObQHKvPHaHdMLkV8CPzq5TQtfaypqWLezlht0NCEiESCYoMgG/C2el3ttbfZxzjUCNcDADt7b0Zh3Agudczs7mpSZ3W5mPjPzVVZWBrEZ4aPYKwB4tQoAikgECKvFbDMbCswGftNZX+fcE865QudcYUZGRvdProvUNTTx5xUVXH66CgCKSGQIJigqgJbnSHK8tjb7mFkCkAZUdfDe9tonAaOAMjPbCvQxs7IgtyUiLFq7i9q6Ri1ii0jECCYoSoB8MxthZokEFqcXtuqzELjFe3w9sNg557z2ud5VUSOAfGBZe2M65/7qnBvinMtzzuUBh70F8qhR7POTm96bs0eqAKCIRIaEzjo45xrN7E5gERAPPOmcW2tm9wI+59xC4A/As96//qsJ/OLH61cMrAMagTucc00AbY3Z9ZsXXvzVh/mgrIq7Lh2tAoAiEjE6DQoA59yrwKut2v6jxeM6AmsLbb33Z8DPghmzjT79gplfpJi/vBwzuG6yCgCKSOQIq8XsaNbU7Fjg8zMtP0MFAEUkoigoesgHZXvZUVPHHJUTF5EIo6DoIcU+P/379OLSAhUAFJHIoqDoAfsO1fPG2t0qACgiEUlB0QNeXllBfZMKAIpIZFJQdDPnHEW+csZlp6kAoIhEJAVFN1tTUcv6nbXMmaKjCRGJTAqKblbs85OUEMfVE4aGeioiIp+LgqIb1TU08eeVXgHA3ioAKCKRSUHRjRat3cWBukaddhKRiKag6EZFJYECgGeNUAFAEYlcCopu4q8+zIefVjFncq4KAIpIRFNQdJP5Pr8KAIpIVFBQdIOmZseC5eWcn5/BUBUAFJEIp6DoBn8/XgBQi9giEvkUFN2g2OdnQJ9eXFIwONRTERE5aQqKLrbvUD1vrt3NNZNUAFBEooOCoou9tCJQAPAG3TshIlFCQdGFnHMU+/yMz0nj1CEqACgi0UFB0YVWV9SwYdcBLWKLSFRRUHShYwUAr1IBQBGJIgqKLlLX0MTLK3dwxbgsFQAUkaiioOgir6/xCgDqtJOIRBkFRRcpKvEzLL0PZ45ID/VURES6lIKiC2yvOsySzVXMKcxRAUARiToKii4wf7mfOBUAFJEopaA4SccLAI7OICtNBQBFJPooKE7S+5sq2akCgCISxRQUJ2m+r5z0volcclpmqKciItItFBQnofpQPW+s28U1E7NJTNBfpYhEp6B+u5nZTDMrNbMyM7u7jdeTzKzIe32pmeW1eO0er73UzGZ0NqaZPee1rzGzJ80sbO9ee2lFBQ1NTgUARSSqdRoUZhYPPAJcDhQAN5pZQatutwL7nHOjgAeA+7z3FgBzgbHATOBRM4vvZMzngFOBcUBv4LaT2sJu4pxjvs/PhJw0xgxJCfV0RES6TTBHFFOBMufcZudcPTAPmNWqzyzgae/xAmC6mZnXPs85d9Q5twUo88Zrd0zn3KvOAywDwvKa00/KvQKAOpoQkSgXTFBkA/4Wz8u9tjb7OOcagRpgYAfv7XRM75TTl4HX25qUmd1uZj4z81VWVgaxGV2r2OcnuZcKAIpI9AvnFdhHgfecc++39aJz7gnnXKFzrjAjI6NHJ3akvomFK3dwxelZpCaH7RKKiEiXSAiiTwXQ8vxKjtfWVp9yM0sA0oCqTt7b7phm9p9ABvC/g5hfj3t97U4OHG3UaScRiQnBHFGUAPlmNsLMEgksTi9s1WchcIv3+HpgsbfGsBCY610VNQLIJ7Du0O6YZnYbMAO40TnXfHKb1z2KSvwMH6gCgCISGzo9onDONZrZncAiIB540jm31szuBXzOuYXAH4BnzawMqCbwix+vXzGwDmgE7nDONQG0Nab3kY8B24AlgfVwXnTO3dtlW3yStlUd4qPN1fxwxhi8+YmIRLVgTj3hnHsVeLVV23+0eFwHzG7nvT8DfhbMmF57UHMKlfm+8kABwDPC8mIsEZEuF86L2WHnWAHAC0ZnMCQtOdTTERHpEQqKE/Depkp21aoAoIjEFgXFCZjv85PeN5HpKgAoIjFEQRGkqoNHeXPdbq6dpAKAIhJb9BsvSMcKAOq0k4jEGgVFEJxzFPv8TMjtrwKAIhJzFBRBWFVew8bdB7lBRxMiEoMUFEH4RwHArFBPRUSkxykoOnGkvolXVu7ginFZpKgAoIjEIAVFJ15bEygAqNNOIhKrFBSdKCrxkzewD1NVAFBEYpSCogNb9x5i6ZZqZhfmqgCgiMQsBUUH5i/3qwCgiMQ8BUU7jhUAvHDMYBUAFJGYpqBox3sbK9lde5Q5hTqaEJHYpqBoR1GJn4F9E7n4VBUAFJHYpqBoQ9XBo7y1XgUARURAQdGml1ZU0NjsmDNF906IiCgoWnHOUVTiZ2Juf0ZnqgCgiIiCopWV/v1s2nOQG3Q0ISICKCj+SbGvnN694rlyvAoAioiAguIzDtc38soqFQAUEWlJQdHCa6t3cfBoo047iYi0oKBoocjnZ8SgvkzJGxDqqYiIhA0FhWfL3kMs21LN7MIcFQAUEWlBQeGZ71MBQBGRtigogMamZl74uJyLxgwmM1UFAEVEWlJQAO9tChQAnK1vsRMR+ScKCgIFAAf1S2T6aYNDPRURkbAT80Gx9+BR3l6/h2snZdMrPub/OkRE/knM/2Z86eNAAUDdOyEi0raggsLMZppZqZmVmdndbbyeZGZF3utLzSyvxWv3eO2lZjajszHNbIQ3Rpk3ZuJJbmO7nHMU+/ycMaw/owarAKCISFs6DQoziwceAS4HCoAbzaygVbdbgX3OuVHAA8B93nsLgLnAWGAm8KiZxXcy5n3AA95Y+7yxu8UKrwDgHC1ii4i0K5gjiqlAmXNus3OuHpgHzGrVZxbwtPd4ATDdAnetzQLmOeeOOue2AGXeeG2O6b3nYm8MvDGv+dxb14n5Pn+gAOCEod31ESIiES+YoMgG/C2el3ttbfZxzjUCNcDADt7bXvtAYL83RnufBYCZ3W5mPjPzVVZWBrEZ/2xYel++cm4e/ZISPtf7RURiQcT+hnTOPQE8AVBYWOg+zxjfvPCULp2TiEg0CuaIogJoeRI/x2trs4+ZJQBpQFUH722vvQro743R3meJiEgPCiYoSoB872qkRAKL0wtb9VkI3OI9vh5Y7JxzXvtc76qoEUA+sKy9Mb33vOONgTfmy59/80RE5GR1eurJOddoZncCi4B44Enn3FozuxfwOecWAn8AnjWzMqCawC9+vH7FwDqgEbjDOdcE0NaY3kf+KzDPzH4KrPDGFhGRELHAP+IjW2FhofP5fKGehohIRDGz5c65ws76xfyd2SIi0jEFhYiIdEhBISIiHVJQiIhIh6JiMdvMKoFtn/Ptg4C9XTidSKBtjg3a5uh3sts73DmX0VmnqAiKk2FmvmBW/aOJtjk2aJujX09tr049iYhIhxQUIiLSIQWFV1gwxmibY4O2Ofr1yPbG/BqFiIh0TEcUIiLSIQWFiIh0KKaDwsxmmlmpmZWZ2d2hns+JMLNcM3vHzNaZ2Voz+47Xnm5mb5rZJu+/A7x2M7OHvG39xMzOaDHWLV7/TWZ2S4v2yWa22nvPQ95X1Yac973rK8zsL97zEWa21JtnkVe6Hq+8fZHXvtTM8lqMcY/XXmpmM1q0h93PhJn1N7MFZrbBzNab2dnRvp/N7Hvez/UaM/uTmSVH2342syfNbI+ZrWnR1u37tb3P6JBzLib/EChv/ikwEkgEVgEFoZ7XCcw/CzjDe5wCbAQKgF8Ad3vtdwP3eY+vAF4DDDgLWOq1pwObvf8O8B4P8F5b5vU1772Xh3q7vXndBTwP/MV7XgzM9R4/BnzTe/wt4DHv8VygyHtc4O3vJGCE93MQH64/EwS+O/4273Ei0D+a9zOBrz/eAvRusX+/Em37GTgfOANY06Kt2/dre5/R4VxD/T9BCH8YzwYWtXh+D3BPqOd1EtvzMnApUApkeW1ZQKn3+HHgxhb9S73XbwQeb9H+uNeWBWxo0f6ZfiHczhzgbeBi4C/e/wR7gYTW+5XA952c7T1O8PpZ6319rF84/kwQ+LbILXgXnrTef9G4nwkEhd/75Zfg7ecZ0bifgTw+GxTdvl/b+4yO/sTyqadjP4zHlHttEcc71J4ELAUynXM7vZd2AZne4/a2t6P28jbaQ+1/gB8Bzd7zgcB+51yj97zlPI9vm/d6jdf/RP8uQmkEUAk85Z1u+72Z9SWK97NzrgK4H9gO7CSw35YT3fv5mJ7Yr+19RrtiOSiigpn1A14Avuucq235mgv8kyFqrn82syuBPc655aGeSw9KIHB64rfOuUnAIQKnC46Lwv08AJhFICSHAn2BmSGdVAj0xH4N9jNiOSgqgNwWz3O8tohhZr0IhMRzzrkXvebdZpblvZ4F7PHa29vejtpz2mgPpXOBq81sKzCPwOmnB4H+Znbsa31bzvP4tnmvpwFVnPjfRSiVA+XOuaXe8wUEgiOa9/MlwBbnXKVzrgF4kcC+j+b9fExP7Nf2PqNdsRwUJUC+dyVFIoFFsIUhnlPQvCsY/gCsd879usVLC4FjVz7cQmDt4lj7zd7VE2cBNd7h5yLgMjMb4P1L7jIC5293ArVmdpb3WTe3GCsknHP3OOdynHN5BPbXYufcl4B3gOu9bq23+djfxfVef+e1z/WulhkB5BNY+Au7nwnn3C7Ab2ZjvKbpBL6DPmr3M4FTTmeZWR9vTse2OWr3cws9sV/b+4z2hXLRKtR/CFxJsJHAFRD/Fur5nODczyNwyPgJsNL7cwWBc7NvA5uAt4B0r78Bj3jbuhoobDHW14Ay789XW7QXAmu89zxMqwXVEG//hfzjqqeRBH4BlAHzgSSvPdl7Xua9PrLF+//N265SWlzlE44/E8BEwOft6z8TuLolqvcz8BNggzevZwlcuRRV+xn4E4E1mAYCR4639sR+be8zOvqjEh4iItKhWD71JCIiQVBQiIhIhxQUIiLSIQWFiIh0SEEhIiIdUlCIiEiHFBQiItKh/w/uhegfvR+Q7QAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "xs = list(range(100000))\n", - "plt.plot(xs, lrs)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "4f4e282c", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/wenet/venv/lib/python3.8/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], - "source": [ - "from typing import Union\n", - "\n", - "from paddle.optimizer.lr import LRScheduler\n", - "from typeguard import check_argument_types\n", - "\n", - "class WarmupLR(LRScheduler):\n", - " \"\"\"The WarmupLR scheduler\n", - " This scheduler is almost same as NoamLR Scheduler except for following\n", - " difference:\n", - " NoamLR:\n", - " lr = optimizer.lr * model_size ** -0.5\n", - " * min(step ** -0.5, step * warmup_step ** -1.5)\n", - " WarmupLR:\n", - " lr = optimizer.lr * warmup_step ** 0.5\n", - " * min(step ** -0.5, step * warmup_step ** -1.5)\n", - " Note that the maximum lr equals to optimizer.lr in this scheduler.\n", - " \"\"\"\n", - "\n", - " def __init__(self,\n", - " warmup_steps: Union[int, float]=25000,\n", - " learning_rate=1.0,\n", - " last_epoch=-1,\n", - " verbose=False):\n", - " assert check_argument_types()\n", - " self.warmup_steps = warmup_steps\n", - " super().__init__(learning_rate, last_epoch, verbose)\n", - "\n", - " def __repr__(self):\n", - " return f\"{self.__class__.__name__}(warmup_steps={self.warmup_steps})\"\n", - "\n", - " def get_lr(self):\n", - " step_num = self.last_epoch + 1\n", - " return self.base_lr * self.warmup_steps**0.5 * min(\n", - " step_num**-0.5, step_num * self.warmup_steps**-1.5)\n", - "\n", - " def set_step(self, step: int):\n", - " self.step(step)" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "8c40b202", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-1\n" - ] - } - ], - "source": [ - "sc = WarmupLR(warmup_steps=25000, learning_rate=0.001)\n", - "print(step)\n", - "#sc.set_step(step)\n", - "sc.set_step(0)" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "ecbc7e37", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAD4CAYAAADy46FuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAAqaUlEQVR4nO3de3xU9Z3/8dcnCUm4JIGEEAIBEiCAQW4SEG94F7QqagGhu9Varb9a3W51267+tr/dtrvdVevW1VardrVaa4WAN7QqKqJ4QchwvwYiAZMQICQQ7uT2/f0xB4xpLoMkmcnM+/l48GDmO99z5ns4Yd4553vOZ8w5h4iISHOigj0AEREJbQoKERFpkYJCRERapKAQEZEWKShERKRFMcEeQFvo3bu3y8zMDPYwREQ6lRUrVux1zqW21i8sgiIzMxOfzxfsYYiIdCpmtiOQfjr1JCIiLVJQiIhIixQUIiLSIgWFiIi0SEEhIiItCigozGyqmRWYWaGZ3dvE63FmNtd7fZmZZTZ47T6vvcDMpjRof8bM9pjZ+kbrSjazd81sq/d3r9PYPhEROU2tBoWZRQOPAVcCOcBsM8tp1O1WYJ9zbijwMPCAt2wOMAsYCUwFHvfWB/Cs19bYvcAi51w2sMh7LiIiQRLIEcVEoNA5t805Vw3MAaY16jMNeM57PB+41MzMa5/jnDvunCsCCr314ZxbAlQ28X4N1/UccF3gmyPtaVv5IT4o2BPsYYhIBwskKPoDxQ2el3htTfZxztUCVUBKgMs2luacK/Me7wLSmupkZrebmc/MfOXl5QFshpyuWU99xnf+mM+iTbuDPRQR6UAhPZnt/N+q1OQ3KznnnnLO5TrnclNTW70DXU7T1t0H2XPwOAA/mrOaz8sPBXlEItJRAgmKUmBAg+cZXluTfcwsBkgCKgJctrHdZpburSsd0LmOEJDnKyYmynj9rvPpEhPF7X/ycfBYTbCHJSIdIJCgyAeyzSzLzGLxT04vaNRnAXCz93g68L53NLAAmOVdFZUFZAPLW3m/huu6GXgtgDFKO6qpq+fllaVcdkYaozKSeOxbZ7G94gj35K2hvl5fpSsS7loNCm/O4S5gIbAJyHPObTCzX5rZtV63p4EUMysE7sG7Usk5twHIAzYCbwN3OufqAMzsRWApMNzMSszsVm9d9wOXm9lW4DLvuQTRok17qDhczcwJGQCcMySFf7nqDN7duJtH398a5NGJSHsz/y/+nVtubq5T9dj2c+uz+azfWcUn/3wJMdH+3y2cc/x43lpeWlnCI7PGMm1sa9coiEioMbMVzrnc1vqF9GS2BN/uA8dYXLCHb56VcTIkAMyM/7zhTCZmJfOTeWvJ397Ulc4iEg4UFNKil1aWUO9gZu6Av3ktLiaap749noxeXbn9Tz627z0chBGKSHtTUEiznHPM85UwMSuZzN7dm+zTs1ssf7xlAmbGLc/ms+9wdQePUkTam4JCmrW8qJKivYe5sYmjiYYGpXTnDzeNp3T/Ub73Jx9Hq+s6aIQi0hEUFNKsPF8JPeJiuHJU31b7jh+UzP/cOJYVX+zjBy+soKauvgNGKCIdQUEhTTp4rIY315VxzZh+dIsN7KvVrxqVzn9eP4rFBeX8eJ7usRAJF4F9AkjEeWNtGUdr6rhxQsunnRqbPXEg+45U8+DbBSR17cIvrh2Jvz6kiHRWCgpp0tz8Yoal9WBMRtIpL3vHhUPYf6SGp5Zso2fXLtxzxfB2GKGIdBQFhfyNLbsPsrp4Pz/7xhlf62jAzLjvyhFUHanh0fcLiY2J4q5LstthpCLSERQU8jfy8ovpEm1cP+7r323tvyFvFDV19Tz0zhbMjDsvHtqGoxSRjqKgkK+orq3nlVX+AoApPeJOa13RUcavZ4zBAb9eWACgsBDphBQU8hXvb97tLwDYyr0TgYqOMh6aMQbwh4UZ/OAihYVIZ6KgkK/I85XQNzGeycPa7sugToSFc44H3y6gptbxw0uH6mookU5CQSEn7ao6xgcFe7jjoiFER7Xth3h0lPHfM8cSEx3Fw+9toepoDT/7xhlEtfH7iEjbU1DISScKAM4Y3zannRqLjjIe/OZoEuJjeOaTIg4cq+H+G0Z9pSqtiIQeBYUAJwoAFnN2CwUA20JUlPGvV+eQ1LUL//PeVg4dq+WR2WOJi4lut/cUkdOjX+UEgGVFlWyvOHLKd2J/HWbGjy4bxr9encPbG3bx3WfzOaDv3xYJWQoKASDPV0xCXAxXnpneYe/53fOz+O8ZY1i2rZIZv1/Kzv1HO+y9RSRwCgrhwIkCgGP70TW2Y08BfXN8Bs/eMpGd+49y3WOfsL60qkPfX0Rap6AQ3lhTxrGa+ja7d+JUnZ/dm3l3nENMlHHjk0tZXLAnKOMQkaYpKIS5vmKGpyV8rQKAbWVE30ReufM8BqV057bnfDy/dDvOqUy5SChQUES4gl0HWVO8n5kTBgT9Bri0xHjyvn8OFw5L5f+9toH/+8o6qmv1BUgiwaagiHB5vtMvANiWesTF8IebcvnBRUN4cXkxs//wGXsOHgv2sEQimoIigp0oAHh5ThrJ3WODPZyToqOMn04dwe++NY6NOw9w7W8/YW3J/mAPSyRiKSgi2KJNu6k8XM2MIE1it+bq0f2Yf8c5REcZ059YyjxfcbCHJBKRFBQRLM9X7C8AmN12BQDb2sh+SSy46zxyB/XiJ/PX8uN5azhaXRfsYYlEFAVFhNpVdYwPt5QzfXxGmxcAbGspPeJ4/taz+eElQ3lpZQnTHvuYwj0Hgz0skYihoIhQJwsA5mYEeygBiY4y7rliOM/dMpGKQ9Vc+7tPeGVVSbCHJRIRFBQRqL7ekecrZtLgZAaltF8BwPYweVgqf/3hBZzZL4m7567hp/PXcPh4bbCHJRLWFBQRaPn2SnZ0UAHA9tA3KZ6/fO9sfnDREOatKOGqRz9i5Rf7gj0skbCloIhAefn+AoBTR3ZcAcC2FhMdxU+njmDO9yZRW+eY8cRSHn53C7V1ukFPpK0FFBRmNtXMCsys0MzubeL1ODOb672+zMwyG7x2n9deYGZTWlunmV1qZivNbLWZfWxm+oLlNnTgWA1vri/j2iAUAGwPZw9O4a0fXcC0Mf14ZNFWpj+xlKK9h4M9LJGw0mpQmFk08BhwJZADzDaznEbdbgX2OeeGAg8DD3jL5gCzgJHAVOBxM4tuZZ2/B/7OOTcW+Avws9PaQvmK19fsDGoBwPaQGN+F39w4lt/OHse28kNc9chHPPtJEfX1qhUl0hYCOaKYCBQ657Y556qBOcC0Rn2mAc95j+cDl5q/cNA0YI5z7rhzrggo9NbX0jodkOg9TgJ2fr1Nk6bk5Rczom8Co4NYALC9XDOmHwvvnszErGR+/vpGZj65lG3lh4I9LJFOL5Cg6A80vCW2xGtrso9zrhaoAlJaWLaldd4GvGlmJcC3gfubGpSZ3W5mPjPzlZeXB7AZsnnXAdaUVDEzN/gFANtLelJXnr1lAg/NGMOW3QeZ+shHPPHh55q7EDkNoTiZfTdwlXMuA/gj8JumOjnnnnLO5TrnclNTQ/fO4lCSl19Cl2jjuhApANhezIzp4zN4754LuXh4Kve/tZkbfv8pm3cdCPbQRDqlQIKiFGh4QjvDa2uyj5nF4D9lVNHCsk22m1kqMMY5t8xrnwucG9CWSIv8BQBLuCKnb0gVAGxPfRLjeeLvx/O7b42jdN9RvvHox/znm5t034XIKQokKPKBbDPLMrNY/JPTCxr1WQDc7D2eDrzv/N86swCY5V0VlQVkA8tbWOc+IMnMhnnruhzY9PU3T054b9Nu9h2p6TR3YrcVM+Pq0f14754LmTE+g6eWbOOy33zIW+vK9MVIIgGKaa2Dc67WzO4CFgLRwDPOuQ1m9kvA55xbADwNPG9mhUAl/g9+vH55wEagFrjTOVcH0NQ6vfbvAS+ZWT3+4Phum25xhMrzFZOeFM8FIVwAsD316h7L/d8czYzcAfzs1fXc8cJKLhyWyi+uHUlm7851d7pIR7Nw+K0qNzfX+Xy+YA8jZJVVHeW8+9/nzouH8k9XDA/2cIKutq6ePy3dwW/e3UJ1XT3fv3AI379wMN1iW/29SSSsmNkK51xua/1CcTJb2thLK7wCgOPD596J0xETHcV3z89i0T9dyJSRfXl00VYueehDXl5ZonsvRJqgoAhz/gKAJZwzOIWBKd2CPZyQkpYYz29nj2Pe98+hT2Ic9+St4brHP8G3vTLYQxMJKQqKMLesqJIvKjtvAcCOMCEzmVd/cB6/mTmG3QeOMf2Jpdz5l5UUVx4J9tBEQoJOyoa5PF8xCfExTD2zb7CHEtKioowbzspg6pl9efLDbTy55HPe3bCbv580iDsvHkJKj7hgD1EkaHREEcaqjtbw5roypo3tR3yXzl8AsCN0i43h7suHsfjHF3H9uP48+2kRkx9czMPvbuHgsZpgD08kKBQUYez1NTs5XhteBQA7SnpSVx6YPpp37p7M5GGpPLJoKxf++gOe/riIYzX6zm6JLAqKMJbn8xcAHNU//AoAdpShfRL4/d+P57U7zyMnPZF/f2Mjlzz0AS8u/4LqWtWPksigoAhTm8oOsDbMCwB2pDEDevLn287mhdvOJjUxnvteXsfFD33Anz/bwfFaHWFIeFNQhKk8XzGx0VFcH+YFADvaeUN78+oPzuXZWybQJzGOn726ngsf/IDnPt2uU1ISthQUYeh4bR2vrirl8pFp9IqQAoAdycy4aHgfXr7jXP5869kMSO7Kvy3YwOQHF/P0x0UcrVZgSHjR5bFh6L2Ne9h3pEaT2O3MzDg/uzfnDU1h6bYKHl20lX9/YyO/e38rN52TyU3nDNJltRIWFBRhKM9XTL+keM4f2jvYQ4kIZsa5Q3pz7pDe5G+v5MkPP+eRRVt5csnnzBg/gNsuyGJQigoPSueloAgzO/cfZcnWcv7h4qFER2kSu6NNyExmQmYyhXsO8tSSbczNL+aFZTu48sx0bp88mDEDegZ7iCKnTEERZl5aUYJzMEOnnYJqaJ8EHpw+hh9fMZw/frqdP3+2g7+uK2NiVjLfOTeTK3LSiInWFKF0DiozHkbq6x0XPrSYAb268ZfvTQr2cKSBQ8drmbP8C579dDsl+47SLymev5s0iNkTB0bMNw5K6FGZ8Qj0WVEFxZVHVQAwBPWIi+G2Cwbz4U8u5g835ZKV2p1fLyxg0n8t4sfz1rC+tCrYQxRplk49hZG8fH8BwCkjVQAwVEVHGZfnpHF5Thpbdx/kuaXbeXllKfNXlDB+UC9uOmcQU0b2VW0uCSkKijBRdbSGt9bvYmbuAH3IdBLZaQn8x3Wj+MmUEcxfUcLzS7fzj3NW07NbF24Yl8HsiQPITksI9jBFFBThYoEKAHZaSV27cOv5WdxybiZLt1Xw4vIveP6z7TzzSRG5g3oxe+JArhqVTtdY/QIgwaHJ7DBxzW8/prbe8eYPz1dtpzBQceg4L68s5cXlX7Bt72ES4mO4YVx/Zk4YwMh+KvIobSPQyWwdUYSBjTsPsK60in+7JkchESZSesTxvcmDue2CLJYXVfLi8i94Mb+Y55buYETfBL55VgbTxvajT2J8sIcqEUBBEQZOFAC8bqwKAIYbM+PswSmcPTiFnx+p5vW1Zby0ooRfvbmJ/3prE5OHpXLDWRlckZOmuSlpNwqKTu54bR2vrlYBwEjQs1ss3540iG9PGsTn5Yd4eWUJr6ws5YcvriIhLoZvjE7nhrMyyB3UiyjdlS9tSEHRyb27cTf7j9RwoyaxI8qQ1B78ZMoI/uny4XxWVMFLK0pZsGYnc/L9db6uHtOPq0enM6p/kk5HymnTZHYnd9Mzy/l8zyGW/PRi1XaKcEeqa3lnw25eX7OTJVvLqalzDErpxjWj+3H1mHSGpyUoNOQrNJkdAUr3H+WjreX8wyXZCgmhW2wM143rz3Xj+lN1pIaFG3bx+tqdPP5BIb9bXEh2nx5cPbof14xJZ3Bqj2APVzoRBUUndrIA4PiMYA9FQkxSty7MnDCAmRMGsPfQcd5aV8bra8t4+L0tPPzeFkb0TWDKyL5MGdmXM9J1pCEt06mnTqq+3jH514sZlNKNF25TAUAJTFnVUd5ct4uF63eRv6MS52BgcjemjExjysi+nDVQE+GRRKeewtxn2yoo2XeUn0wZHuyhSCeSntSVW8/P4tbzsyg/eJz3Nu1m4YZdPPvpdv7wURGpCXFcnpPG1JF9mTQ4hdgY1Q0VBUWnNddXTKIKAMppSE2IY/bEgcyeOJADx2pYvHkPCzfs4tVVpfxl2RckxMcweVgql47ow0XD+6gcegQLKCjMbCrwCBAN/K9z7v5Gr8cBfwLGAxXAjc657d5r9wG3AnXAD51zC1tap/lPlv4HMMNb5vfOuUdPbzPDS9URfwHAWRNUAFDaRmJ8F6aN7c+0sf05VlPHx1v38s7GXSwuKOeva8swg3EDenLJiD5cMiJN8xoRptWgMLNo4DHgcqAEyDezBc65jQ263Qrsc84NNbNZwAPAjWaWA8wCRgL9gPfMbJi3THPr/A4wABjhnKs3sz5tsaHhZMGaUqpVAFDaSXyXaC7LSeOynDTq6x3rd1bx/uY9vL95Dw+9s4WH3tlCelI8F4/owyXD+3De0N4qWBjmAjmimAgUOue2AZjZHGAa0DAopgE/9x7PB37nHRlMA+Y4544DRWZW6K2PFtZ5B/At51w9gHNuz9ffvPA011dMTnoiZ/ZXcThpX1FRxuiMnozO6MmPLhvGngPH+KCgnEWbd/Oad4oqLiaKiVnJTM5O5YJhvXW/RhgKJCj6A8UNnpcAZzfXxzlXa2ZVQIrX/lmjZU8UJGpunUPwH41cD5TjP121tfGgzOx24HaAgQMHBrAZ4WHDzirWlx7g59fkBHsoEoH6JMafvOz2eG0dy4sqWby5nI+2lvOrNzfBm/65jwuyezM5O5XzhvYmNSEu2MOW0xSKk9lxwDHnXK6Z3QA8A1zQuJNz7ingKfBfHtuxQwyeeb4SfwHAcSoAKMEVFxPNBdmpXJCdCvgvvf1o614+2rqXxZv38PLKUgBy0hO5YJg/OHIzexEXo9NUnU0gQVGKf87ghAyvrak+JWYWAyThn9Ruadnm2kuAl73HrwB/DGCMEeFYTR2vrCrlipFp9OymK1AktKQndWVm7gBm5g6gvt6xYecBlmwtZ8mWcp75uIgnP9xGfJcocgclc86QFCYNTmF0RhJdonUJbqgLJCjygWwzy8L/YT4L+FajPguAm4GlwHTgfeecM7MFwF/M7Df4J7OzgeWAtbDOV4GLgSLgQmDL1966MPPuxt1UHa3hxgmaxJbQFhVljMpIYlRGEndePJTDx2tZVlTBki17+WxbBb9eWABAt9hoJmQmM2lwCucMSeHMfonEKDhCTqtB4c053AUsxH8p6zPOuQ1m9kvA55xbADwNPO9NVlfi/+DH65eHf5K6FrjTOVcH0NQ6vbe8H3jBzO4GDgG3td3mdm55vmL69+zKeUN6B3soIqeke1wMl4xI45IRaYD/G/yWFVXy2bYKln5ewQNvbwYgIS6GCVnJnOMFxxnpiapjFgJUwqOTKNl3hAseXMwPL8nm7suHtb6ASCdSfvC4PzS2VfDZ5xVs23sYgIT4GMYP6sWEzGRyB/VizICeuneoDamER5h5aYV/CmdGrgoASvhJTYjjmjH9uGZMPwB2VR3js20VLN9eSX5RJR8U+E9VdYk2RvVP8geHFx76wq72pyOKTuBEAcDMlO78+bbGVyaLhL99h6tZsWMf+Tsq8W3fx9qS/dTU+T+7hvbpwYTMXuQOSmZCZjIDkrvqPo4A6YgijCz1CgD+dOqIYA9FJCh6dY89ebc4+K8AXFtSRf72SnzbK3ljbRkvLvffmpXSPZaxA3oybmBPxg3sxeiMJBLiuwRz+J2egqITmJtfTFLXLlzh/ScRiXTxXaKZmJXMxKxkwH/UvWXPQfK372P1F/tZXbyPRZv9RR3MILtPDy88ejF2QE+GpSVokvwUKChCXNWRGt7esIvZKgAo0qyoKGNE30RG9E3k25MGAf7/O2tK9rPKC453Nu4mz1cCQPfYaEZlJJ0MjtEZSfRNjNcpq2YoKELca14BwBkqAChySpK6dWHysFQmD/PfOe6cY0fFEVYV+486VhXv5w9LtlFb75/r6N0jjlH9ExnVP4lRGT0Z1T+JtMQ4hQcKipCX5ytmZD8VABQ5XWZGZu/uZPbuzvXj/FcPHqupY8POA6wvrWJtSRXrS6v4cEs5XnbQu0ccozOSOLN/EqP7+28gTEuMD+JWBIeCIoSdKAD4i2tHBnsoImEpvks04wf1YvygXifbjlTXsqnsAOtKqlhb6g+PDwr2nAyP1IQ4Rvf3h0dOv0Ry0hPJ6BXeV1opKEJYXn4xsTFRTBvbL9hDEYkY3WJjGD8omfGDkk+2HamuZePOA6wrrWJdSRXrSqtY3CA8EuJjOKNvIjn9EjkjPYEz0hMZlpYQNvOKCooQdaymjldX72TKyL4qACgSZN1iY/w3+GV+NTwKdh1kU9lBNpZVsansIHm+Yo5U1wEQHWUM7t3dCw//n5z0xE5Zdl1BEaLeOVEAUJPYIiGpW2wM4wb2YtzAL09b1dc7vqg8wsayA2wqO8DGnQfIL6rktdU7T/bp3SOOM9ITGJ6WwLC+/r+z03rQLTZ0P45Dd2QRbp5XAPDcISnBHoqIBCgq6ssJ86tGpZ9s33e4mk27/MGxqewgm8oO8KeiHVTX1p/sMyC5qz880hIY3tf/9+DU7iHx/R0KihBUsu8IHxfu5R8vzSZKNwWJdHq9usdy7pDenNug8nNdvWNHxWG27D7Ilt2HKNh9kC27DvJBQfnJS3ajo4zMlG4ng+PEn8yUbh1ajl1BEYLmr/DfFDR9vAoAioSr6ChjcGoPBqf2YOqZX7ZX19ZTtPfwyeDYsvsgG3ce4K31uzhRmi82Ooqs3t0ZmtaDe6eOYEByt3Ydq4IixNTXO+b5Sjh/aG8yerXvzheR0BMbE8Xwvv7TT4z5sv1odR2Few55RyAHKdxziHUlVcTGtP+RhYIixHz6eQWl+49y75UqACgiX+rqlR0ZldHxN9/qOwdDzFyfvwDg5SoAKCIhQkERQvYfqWbhhl1cP65/2NyoIyKdn4IihLy2eqdXAFCT2CISOhQUISTPV8yZ/RMZ2U8FAEUkdCgoQsT60io27DzATN2JLSIhRkERIvJ8XgHAMf2DPRQRka9QUISAYzV1vLqqlKkj+5LUTd/tKyKhRUERAhZu2MWBY7XcOEGnnUQk9CgoQsA8XwkZvbpyzmAVABSR0KOgCLLiSn8BwBnjB6gAoIiEJAVFkM1fUYIZTNe9EyISohQUQVRX75i/wl8AsH/PrsEejohIkxQUQfTp53sp3X9Uk9giEtIUFEE0N7+Ynt1UAFBEQpuCIkj2H6nmnQ27uW5s/5D4qkMRkeYEFBRmNtXMCsys0MzubeL1ODOb672+zMwyG7x2n9deYGZTTmGdj5rZoa+5XSHv1VWlVNfVq2SHiIS8VoPCzKKBx4ArgRxgtpnlNOp2K7DPOTcUeBh4wFs2B5gFjASmAo+bWXRr6zSzXKDXaW5bSMvzlTCqfxI5/RKDPRQRkRYFckQxESh0zm1zzlUDc4BpjfpMA57zHs8HLjUz89rnOOeOO+eKgEJvfc2u0wuRXwM/Pb1NC13rS6vYWHaAmbokVkQ6gUCCoj9Q3OB5idfWZB/nXC1QBaS0sGxL67wLWOCcK2tpUGZ2u5n5zMxXXl4ewGaEjjxfMXExUVw7VgUARST0hdRktpn1A2YAv22tr3PuKedcrnMuNzU1tf0H10ZOFgA8sy9JXVUAUERCXyBBUQo0nHHN8Nqa7GNmMUASUNHCss21jwOGAoVmth3oZmaFAW5Lp3CyAKAmsUWkkwgkKPKBbDPLMrNY/JPTCxr1WQDc7D2eDrzvnHNe+yzvqqgsIBtY3tw6nXN/dc71dc5lOucygSPeBHnYyPMVMyC5K5NUAFBEOomY1jo452rN7C5gIRANPOOc22BmvwR8zrkFwNPA895v/5X4P/jx+uUBG4Fa4E7nXB1AU+ts+80LLcWVR/iksIJ7Lh+mAoAi0mm0GhQAzrk3gTcbtf1rg8fH8M8tNLXsr4BfBbLOJvr0CGR8ncU8rwDgN8fraicR6TxCajI7nNXVO+b7irkgO1UFAEWkU1FQdJBPCveys+qYJrFFpNNRUHSQub5ienXrwmU5fYI9FBGRU6Kg6AD7Dlfz7obdXDdOBQBFpPNRUHSAV1erAKCIdF4KinbmnGNufjGjM5I4I10FAEWk81FQtLP1pQfYvOsgM3Q0ISKdlIKinZ0sADimX7CHIiLytSgo2tGxmjpeXV3KlSoAKCKdmIKiHb29fhcHj9Uyc4JOO4lI56WgaEcnCwBmqQCgiHReCop28kXFET79vIKZ4weoAKCIdGoKinYyf0WxCgCKSFhQULSDunrHvBUlTM5OpZ8KAIpIJ6egaAcfF+6lrOoYN2oSW0TCgIKiHeTl+wsAXnqGCgCKSOenoGhjlYereWfjLq4fl6ECgCISFhQUbezVVaXU1DlmTtAktoiEBwVFG3LOkecrZkxGEiP6qgCgiIQHBUUbWldapQKAIhJ2FBRt6GQBwLEqACgi4UNB0UaO1dTx2uqdXDUqncR4FQAUkfChoGgjJwsA6rSTiIQZBUUbmZtfzMDkbpydlRzsoYiItCkFRRvYUXGYpdsqmJmboQKAIhJ2FBRtYP6KEqJUAFBEwpSC4jTV1Tvmryhh8rBU0pNUAFBEwo+C4jR9tLWcsqpjmsQWkbCloDhNeb5ikrvHctkZacEeiohIu1BQnIbKw9W8u3E314/rT2yM/ilFJDwF9OlmZlPNrMDMCs3s3iZejzOzud7ry8wss8Fr93ntBWY2pbV1mtkLXvt6M3vGzEL27rVXThQA1GknEQljrQaFmUUDjwFXAjnAbDPLadTtVmCfc24o8DDwgLdsDjALGAlMBR43s+hW1vkCMAIYBXQFbjutLWwnzjnm+YoZM6Anw/smBHs4IiLtJpAjiolAoXNum3OuGpgDTGvUZxrwnPd4PnCpmZnXPsc5d9w5VwQUeutrdp3OuTedB1gOhOQ1p2tL/AUAZ+aG5PBERNpMIEHRHyhu8LzEa2uyj3OuFqgCUlpYttV1eqecvg283dSgzOx2M/OZma+8vDyAzWhbeb5i4rtEcc0YFQAUkfAWyjOwjwNLnHMfNfWic+4p51yucy43NTW1Qwd2tLqOBat3ctWZKgAoIuEvJoA+pUDD2doMr62pPiVmFgMkARWtLNvsOs3s34BU4P8EML4O9/aGMg4er2XmBE1ii0j4C+SIIh/INrMsM4vFPzm9oFGfBcDN3uPpwPveHMMCYJZ3VVQWkI1/3qHZdZrZbcAUYLZzrv70Nq99zM0vZlCKCgCKSGRo9YjCOVdrZncBC4Fo4Bnn3AYz+yXgc84tAJ4GnjezQqAS/wc/Xr88YCNQC9zpnKsDaGqd3ls+AewAlvrnw3nZOffLNtvi07Sj4jCfbavkJ1OG441PRCSsBXLqCefcm8Cbjdr+tcHjY8CMZpb9FfCrQNbptQc0pmCZ5/MKAJ6lq51EJDKE8mR2yDlRAPDCYan0TYoP9nBERDqEguIULNlazq4DKgAoIpFFQXEK8vKLSekey6UqACgiEURBEaCKQ8d5b5MKAIpI5NEnXoBOFgDUvRMiEmEUFAFwzpHnK2bsgJ4MS1MBQBGJLAqKAKwpqWLL7kOaxBaRiKSgCMCXBQDTgz0UEZEOp6BoxdHqOl5fvZOrRqWToAKAIhKBFBSteGu9vwDgjTrtJCIRSkHRirn5xWSmdGOiCgCKSIRSULRg+97DLCuqZEbuABUAFJGIpaBowbwVxSoAKCIRT0HRjNq6euavKOGi4X1UAFBEIpqCohkfbd3L7gPHmZmrowkRiWwKimbM9QoAXjJCBQBFJLIpKJqgAoAiIl/Sp2ATXllVSm2940YVABQRUVA05pxjbn4x4wb2JFsFAEVEFBSNrS7ez9Y9KgAoInKCgqKRPF8JXbtEc/VoFQAUEQEFxVccqa7l9TUqACgi0pCCooG31u3i0PFaTWKLiDSgoGhgrq+YrN7dmZDZK9hDEREJGQoKT9HewywvqmRGboYKAIqINKCg8MzzqQCgiEhTFBT4CwC+tLKEi4f3IS1RBQBFRBpSUABLtpaz+8BxZujeCRGRv6GgwF8AsHePWC49o0+whyIiEnIiPij2HjrOok17uH5cf7pER/w/h4jI34j4T8ZXVqoAoIhISwIKCjObamYFZlZoZvc28Xqcmc31Xl9mZpkNXrvPay8wsymtrdPMsrx1FHrrjD3NbWyWc448XzFnDezJ0D4qACgi0pRWg8LMooHHgCuBHGC2meU06nYrsM85NxR4GHjAWzYHmAWMBKYCj5tZdCvrfAB42FvXPm/d7WKVCgCKiLQqkCOKiUChc26bc64amANMa9RnGvCc93g+cKn571qbBsxxzh13zhUBhd76mlynt8wl3jrw1nnd1966VszzFfsLAI7p115vISLS6QUSFP2B4gbPS7y2Jvs452qBKiClhWWba08B9nvraO69ADCz283MZ2a+8vLyADbjbw1M7s53zsukR1zM11peRCQSdNpPSOfcU8BTALm5ue7rrOOOi4a06ZhERMJRIEcUpUDDk/gZXluTfcwsBkgCKlpYtrn2CqCnt47m3ktERDpQIEGRD2R7VyPF4p+cXtCozwLgZu/xdOB955zz2md5V0VlAdnA8ubW6S2z2FsH3jpf+/qbJyIip6vVU0/OuVozuwtYCEQDzzjnNpjZLwGfc24B8DTwvJkVApX4P/jx+uUBG4Fa4E7nXB1AU+v03vKfgTlm9h/AKm/dIiISJOb/Jb5zy83NdT6fL9jDEBHpVMxshXMut7V+EX9ntoiItExBISIiLVJQiIhIixQUIiLSorCYzDazcmDH11y8N7C3DYfTGWibI4O2Ofyd7vYOcs6lttYpLILidJiZL5BZ/3CibY4M2ubw11Hbq1NPIiLSIgWFiIi0SEHhFRaMMNrmyKBtDn8dsr0RP0chIiIt0xGFiIi0SEEhIiItiuigMLOpZlZgZoVmdm+wx3MqzGyAmS02s41mtsHM/tFrTzazd81sq/d3L6/dzOxRb1vXmtlZDdZ1s9d/q5nd3KB9vJmt85Z51Puq2qDzvnd9lZm94T3PMrNl3jjneqXr8crbz/Xal5lZZoN13Oe1F5jZlAbtIfczYWY9zWy+mW02s01mdk6472czu9v7uV5vZi+aWXy47Wcze8bM9pjZ+gZt7b5fm3uPFjnnIvIP/vLmnwODgVhgDZAT7HGdwvjTgbO8xwnAFiAHeBC412u/F3jAe3wV8BZgwCRgmdeeDGzz/u7lPe7lvbbc62veslcGe7u9cd0D/AV4w3ueB8zyHj8B3OE9/gHwhPd4FjDXe5zj7e84IMv7OYgO1Z8J/N8df5v3OBboGc77Gf/XHxcBXRvs3++E234GJgNnAesbtLX7fm3uPVoca7D/EwTxh/EcYGGD5/cB9wV7XKexPa8BlwMFQLrXlg4UeI+fBGY36F/gvT4beLJB+5NeWzqwuUH7V/oFcTszgEXAJcAb3n+CvUBM4/2K//tOzvEex3j9rPG+PtEvFH8m8H9bZBHehSeN91847mf8QVHsffjFePt5SjjuZyCTrwZFu+/X5t6jpT+RfOrpxA/jCSVeW6fjHWqPA5YBac65Mu+lXUCa97i57W2pvaSJ9mD7H+CnQL33PAXY75yr9Z43HOfJbfNer/L6n+q/RTBlAeXAH73Tbf9rZt0J4/3snCsFHgK+AMrw77cVhPd+PqEj9mtz79GsSA6KsGBmPYCXgB855w40fM35f2UIm+ufzexqYI9zbkWwx9KBYvCfnvi9c24ccBj/6YKTwnA/9wKm4Q/JfkB3YGpQBxUEHbFfA32PSA6KUmBAg+cZXlunYWZd8IfEC865l73m3WaW7r2eDuzx2pvb3pbaM5poD6bzgGvNbDswB//pp0eAnmZ24mt9G47z5LZ5rycBFZz6v0UwlQAlzrll3vP5+IMjnPfzZUCRc67cOVcDvIx/34fzfj6hI/Zrc+/RrEgOinwg27uSIhb/JNiCII8pYN4VDE8Dm5xzv2nw0gLgxJUPN+OfuzjRfpN39cQkoMo7/FwIXGFmvbzf5K7Af/62DDhgZpO897qpwbqCwjl3n3MuwzmXiX9/ve+c+ztgMTDd69Z4m0/8W0z3+juvfZZ3tUwWkI1/4i/kfiacc7uAYjMb7jVdiv876MN2P+M/5TTJzLp5YzqxzWG7nxvoiP3a3Hs0L5iTVsH+g/9Kgi34r4D4l2CP5xTHfj7+Q8a1wGrvz1X4z80uArYC7wHJXn8DHvO2dR2Q22Bd3wUKvT+3NGjPBdZ7y/yORhOqQd7+i/jyqqfB+D8ACoF5QJzXHu89L/ReH9xg+X/xtquABlf5hOLPBDAW8Hn7+lX8V7eE9X4GfgFs9sb1PP4rl8JqPwMv4p+DqcF/5HhrR+zX5t6jpT8q4SEiIi2K5FNPIiISAAWFiIi0SEEhIiItUlCIiEiLFBQiItIiBYWIiLRIQSEiIi36/zob5nVzA95IAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "lrs=[]\n", - "for i in range(100000):\n", - " sc.step()\n", - " lrs.append(sc.get_lr())\n", - "xs = list(range(100000))\n", - "plt.plot(xs, lrs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e613fe16", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f0fd9f40", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/compute_cmvn_loader_test.ipynb b/.notebook/compute_cmvn_loader_test.ipynb deleted file mode 100644 index 2b0a8b75f..000000000 --- a/.notebook/compute_cmvn_loader_test.ipynb +++ /dev/null @@ -1,793 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "purple-consequence", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/ssd5/zhanghui/DeepSpeech2.x\n" - ] - }, - { - "data": { - "text/plain": [ - "'/home/ssd5/zhanghui/DeepSpeech2.x'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd ..\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "defensive-mason", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "patient-convention", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Namespace(delta_delta=False, feat_dim=80, manifest_path='examples/aishell/s1/data/manifest.train.raw', num_samples=-1, num_workers=16, output_path='data/librispeech/mean_std.npz', sample_rate=16000, specgram_type='fbank', stride_ms=10.0, window_ms=25.0)\n" - ] - } - ], - "source": [ - "import argparse\n", - "import functools\n", - "\n", - "from deepspeech.frontend.augmentor.augmentation import AugmentationPipeline\n", - "from deepspeech.frontend.featurizer.audio_featurizer import AudioFeaturizer\n", - "from deepspeech.frontend.normalizer import FeatureNormalizer\n", - "from deepspeech.utils.utility import add_arguments\n", - "from deepspeech.utils.utility import print_arguments\n", - "\n", - "parser = argparse.ArgumentParser(description=__doc__)\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "# yapf: disable\n", - "add_arg('num_samples', int, -1, \"# of samples to for statistics.\")\n", - "add_arg('specgram_type', str,\n", - " 'fbank',\n", - " \"Audio feature type. Options: linear, mfcc, fbank.\",\n", - " choices=['linear', 'mfcc', 'fbank'])\n", - "add_arg('feat_dim', int, 80, \"Audio feature dim.\")\n", - "add_arg('delta_delta', bool,\n", - " False,\n", - " \"Audio feature with delta delta.\")\n", - "add_arg('stride_ms', float, 10.0, \"stride length in ms.\")\n", - "add_arg('window_ms', float, 25.0, \"stride length in ms.\")\n", - "add_arg('sample_rate', int, 16000, \"target sample rate.\")\n", - "add_arg('manifest_path', str,\n", - " 'examples/aishell/s1/data/manifest.train.raw',\n", - " \"Filepath of manifest to compute normalizer's mean and stddev.\")\n", - "add_arg('num_workers',\n", - " default=16,\n", - " type=int,\n", - " help='num of subprocess workers for processing')\n", - "add_arg('output_path', str,\n", - " 'data/librispeech/mean_std.npz',\n", - " \"Filepath of write mean and stddev to (.npz).\")\n", - "# yapf: disable\n", - "args = parser.parse_args([])\n", - "print(args)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "enormous-currency", - "metadata": {}, - "outputs": [], - "source": [ - "import random\n", - "\n", - "import numpy as np\n", - "import paddle\n", - "from paddle.io import DataLoader\n", - "from paddle.io import Dataset\n", - "\n", - "from deepspeech.frontend.audio import AudioSegment\n", - "from deepspeech.frontend.utility import load_cmvn\n", - "from deepspeech.frontend.utility import read_manifest\n", - "\n", - "class CollateFunc(object):\n", - " ''' Collate function for AudioDataset\n", - " '''\n", - " def __init__(self):\n", - " pass\n", - " \n", - " def __call__(self, batch):\n", - " mean_stat = None\n", - " var_stat = None\n", - " number = 0\n", - " for feat in batch:\n", - " sums = np.sum(feat, axis=1)\n", - " if mean_stat is None:\n", - " mean_stat = sums\n", - " else:\n", - " mean_stat += sums\n", - "\n", - " square_sums = np.sum(np.square(feat), axis=1)\n", - " if var_stat is None:\n", - " var_stat = square_sums\n", - " else:\n", - " var_stat += square_sums\n", - "\n", - " number += feat.shape[1]\n", - " #return paddle.to_tensor(number), paddle.to_tensor(mean_stat), paddle.to_tensor(var_stat)\n", - " return number, mean_stat, var_stat\n", - "\n", - "\n", - "class AudioDataset(Dataset):\n", - " def __init__(self, manifest_path, feature_func, num_samples=-1, rng=None):\n", - " self.feature_func = feature_func\n", - " self._rng = rng\n", - " manifest = read_manifest(manifest_path)\n", - " if num_samples == -1:\n", - " sampled_manifest = manifest\n", - " else:\n", - " sampled_manifest = self._rng.sample(manifest, num_samples)\n", - " self.items = sampled_manifest\n", - "\n", - " def __len__(self):\n", - " return len(self.items)\n", - "\n", - " def __getitem__(self, idx):\n", - " key = self.items[idx]['feat']\n", - " audioseg = AudioSegment.from_file(key)\n", - " feat = self.feature_func(audioseg) #(D, T)\n", - " return feat" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "armed-semester", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "process 1000 wavs,450739 frames\n", - "process 2000 wavs,887447 frames\n", - "process 3000 wavs,1354148 frames\n", - "process 4000 wavs,1816494 frames\n", - "process 5000 wavs,2359211 frames\n", - "process 6000 wavs,2828455 frames\n", - "process 7000 wavs,3276186 frames\n", - "process 8000 wavs,3692234 frames\n", - "process 9000 wavs,4139360 frames\n", - "process 10000 wavs,4591528 frames\n", - "process 11000 wavs,5020114 frames\n", - "process 12000 wavs,5459523 frames\n", - "process 13000 wavs,5899534 frames\n", - "process 14000 wavs,6323242 frames\n", - "process 15000 wavs,6736597 frames\n", - "process 16000 wavs,7207686 frames\n", - "process 17000 wavs,7637800 frames\n", - "process 18000 wavs,8093004 frames\n", - "process 19000 wavs,8529518 frames\n", - "process 20000 wavs,8906022 frames\n", - "process 21000 wavs,9352652 frames\n", - "process 22000 wavs,9807495 frames\n", - "process 23000 wavs,10247938 frames\n", - "process 24000 wavs,10700011 frames\n", - "process 25000 wavs,11126134 frames\n", - "process 26000 wavs,11558061 frames\n", - "process 27000 wavs,12010359 frames\n", - "process 28000 wavs,12470938 frames\n", - "process 29000 wavs,12916013 frames\n", - "process 30000 wavs,13345816 frames\n", - "process 31000 wavs,13752365 frames\n", - "process 32000 wavs,14174801 frames\n", - "process 33000 wavs,14642170 frames\n", - "process 34000 wavs,15053557 frames\n", - "process 35000 wavs,15531890 frames\n", - "process 36000 wavs,16022711 frames\n", - "process 37000 wavs,16437688 frames\n", - "process 38000 wavs,16859517 frames\n", - "process 39000 wavs,17307676 frames\n", - "process 40000 wavs,17796629 frames\n", - "process 41000 wavs,18264151 frames\n", - "process 42000 wavs,18711898 frames\n", - "process 43000 wavs,19159890 frames\n", - "process 44000 wavs,19576435 frames\n", - "process 45000 wavs,19992793 frames\n", - "process 46000 wavs,20464449 frames\n", - "process 47000 wavs,20886021 frames\n", - "process 48000 wavs,21317318 frames\n", - "process 49000 wavs,21738034 frames\n", - "process 50000 wavs,22171890 frames\n", - "process 51000 wavs,22622238 frames\n", - "process 52000 wavs,23100734 frames\n", - "process 53000 wavs,23526901 frames\n", - "process 54000 wavs,23969746 frames\n", - "process 55000 wavs,24418691 frames\n", - "process 56000 wavs,24862546 frames\n", - "process 57000 wavs,25336448 frames\n", - "process 58000 wavs,25778435 frames\n", - "process 59000 wavs,26216199 frames\n", - "process 60000 wavs,26694692 frames\n", - "process 61000 wavs,27148978 frames\n", - "process 62000 wavs,27617088 frames\n", - "process 63000 wavs,28064946 frames\n", - "process 64000 wavs,28519843 frames\n", - "process 65000 wavs,28989722 frames\n", - "process 66000 wavs,29470156 frames\n", - "process 67000 wavs,29952931 frames\n", - "process 68000 wavs,30360555 frames\n", - "process 69000 wavs,30797929 frames\n", - "process 70000 wavs,31218227 frames\n", - "process 71000 wavs,31663934 frames\n", - "process 72000 wavs,32107468 frames\n", - "process 73000 wavs,32541943 frames\n", - "process 74000 wavs,33010702 frames\n", - "process 75000 wavs,33448082 frames\n", - "process 76000 wavs,33886812 frames\n", - "process 77000 wavs,34338108 frames\n", - "process 78000 wavs,34761495 frames\n", - "process 79000 wavs,35199730 frames\n", - "process 80000 wavs,35669630 frames\n", - "process 81000 wavs,36122402 frames\n", - "process 82000 wavs,36604561 frames\n", - "process 83000 wavs,37085552 frames\n", - "process 84000 wavs,37517500 frames\n", - "process 85000 wavs,37987196 frames\n", - "process 86000 wavs,38415721 frames\n", - "process 87000 wavs,38889467 frames\n", - "process 88000 wavs,39337809 frames\n", - "process 89000 wavs,39792342 frames\n", - "process 90000 wavs,40287946 frames\n", - "process 91000 wavs,40719461 frames\n", - "process 92000 wavs,41178919 frames\n", - "process 93000 wavs,41659635 frames\n", - "process 94000 wavs,42132985 frames\n", - "process 95000 wavs,42584564 frames\n", - "process 96000 wavs,43018598 frames\n", - "process 97000 wavs,43480662 frames\n", - "process 98000 wavs,43973670 frames\n", - "process 99000 wavs,44448190 frames\n", - "process 100000 wavs,44935034 frames\n", - "process 101000 wavs,45379812 frames\n", - "process 102000 wavs,45821207 frames\n", - "process 103000 wavs,46258420 frames\n", - "process 104000 wavs,46743733 frames\n", - "process 105000 wavs,47206922 frames\n", - "process 106000 wavs,47683041 frames\n", - "process 107000 wavs,48122809 frames\n", - "process 108000 wavs,48594623 frames\n", - "process 109000 wavs,49086358 frames\n", - "process 110000 wavs,49525568 frames\n", - "process 111000 wavs,49985820 frames\n", - "process 112000 wavs,50428262 frames\n", - "process 113000 wavs,50897957 frames\n", - "process 114000 wavs,51344589 frames\n", - "process 115000 wavs,51774621 frames\n", - "process 116000 wavs,52243372 frames\n", - "process 117000 wavs,52726025 frames\n", - "process 118000 wavs,53170026 frames\n", - "process 119000 wavs,53614141 frames\n", - "process 120000 wavs,54071271 frames\n" - ] - } - ], - "source": [ - "\n", - "augmentation_pipeline = AugmentationPipeline('{}')\n", - "audio_featurizer = AudioFeaturizer(\n", - " specgram_type=args.specgram_type,\n", - " feat_dim=args.feat_dim,\n", - " delta_delta=args.delta_delta,\n", - " stride_ms=args.stride_ms,\n", - " window_ms=args.window_ms,\n", - " n_fft=None,\n", - " max_freq=None,\n", - " target_sample_rate=args.sample_rate,\n", - " use_dB_normalization=True,\n", - " target_dB=-20)\n", - "\n", - "def augment_and_featurize(audio_segment):\n", - " augmentation_pipeline.transform_audio(audio_segment)\n", - " return audio_featurizer.featurize(audio_segment)\n", - "\n", - "\n", - "collate_func = CollateFunc()\n", - "\n", - "dataset = AudioDataset(\n", - " args.manifest_path,\n", - " augment_and_featurize, \n", - " args.num_samples)\n", - "\n", - "batch_size = 20\n", - "data_loader = DataLoader(\n", - " dataset,\n", - " batch_size=batch_size,\n", - " shuffle=False,\n", - " num_workers=args.num_workers,\n", - " collate_fn=collate_func)\n", - "\n", - "with paddle.no_grad():\n", - " all_mean_stat = None\n", - " all_var_stat = None\n", - " all_number = 0\n", - " wav_number = 0\n", - " for i, batch in enumerate(data_loader()):\n", - " #for batch in data_loader():\n", - " number, mean_stat, var_stat = batch\n", - " if i == 0:\n", - " all_mean_stat = mean_stat\n", - " all_var_stat = var_stat\n", - " else:\n", - " all_mean_stat += mean_stat\n", - " all_var_stat += var_stat\n", - " all_number += number\n", - " wav_number += batch_size\n", - "\n", - " if wav_number % 1000 == 0:\n", - " print('process {} wavs,{} frames'.format(wav_number,\n", - " all_number))\n", - "\n", - "cmvn_info = {\n", - " 'mean_stat': list(all_mean_stat.tolist()),\n", - " 'var_stat': list(all_var_stat.tolist()),\n", - " 'frame_num': all_number\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "danish-executive", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'mean_stat': [-813852467.7953382, -769025957.9140725, -809499593.411409, -774700574.014532, -750961217.5896736, -760564397.2864963, -805662399.3771614, -843490965.4231446, -850242081.9416809, -857678651.504435, -879067453.9826999, -908602072.3856701, -936850957.7187386, -957242686.489041, -968425442.0916103, -972687545.5953809, -980383731.7683417, -991533337.6343704, -1001966818.1164789, -1010334169.7486078, -1016855066.9099333, -1022176245.7021623, -1025700476.4788507, -1030678878.3195274, -1037075963.124199, -1042705719.0195516, -1047422212.6492896, -1049003537.271861, -1050314833.7453628, -1050772191.0204058, -1050010034.9948177, -1050436065.1336465, -1053327181.7978873, -1058710548.2036785, -1065950852.4966162, -1071709705.0060445, -1077682778.259181, -1083371045.272074, -1089708906.2657735, -1096312217.7865202, -1101089858.8364556, -1104965332.4332569, -1107791702.5223634, -1109431075.2374773, -1110066333.0280604, -1110382732.0722318, -1110480306.3793216, -1110203297.7110727, -1109972534.3583376, -1109378081.8792782, -1108212059.413654, -1107235713.2041805, -1106973581.9280007, -1107352339.7860134, -1108730029.862537, -1110425202.83704, -1113220669.4552443, -1115887535.4870913, -1118105356.3628063, -1120001376.8503075, -1121135822.320366, -1122265971.8751016, -1123990217.401155, -1125786729.6230593, -1127784957.2745507, -1129180108.9033566, -1132000461.6688302, -1134675829.8190608, -1137652487.5164194, -1141755948.0463965, -1145340901.5468378, -1148637682.593287, -1151755522.470022, -1154981643.2268832, -1157417488.840151, -1161240429.0989249, -1165411128.671642, -1170521097.1034513, -1176307165.5109766, -1183456865.0039694, -1190535938.6591117, -1197946309.0472982, -1203596565.037139, -1207563038.1241052, -1209707561.5829782, -1211407066.2452552, -1211884576.9201162, -1212778872.005509, -1214041413.8080075, -1215367953.1745043, -1216850831.482193, -1217678325.5351057, -1218854289.54188, -1219325064.8610544, -1219080344.7580786, -1218541313.657531, -1217889833.2067819, -1216552930.1654336, -1216423777.4113154, -1216575252.225508, -1217075384.9826024, -1217391577.901724, -1217838974.57273, -1218131805.6054134, -1218294889.7465532, -1218566666.1755593, -1218790537.5519717, -1218748668.9956846, -1218603191.4941735, -1218004566.4348054, -1217312410.127734, -1217207493.9522285, -1217284002.3834674, -1217644312.51745, -1218039821.6444128, -1218721811.6269798, -1219121088.9265897, -1219014460.8090584, -1218530127.6776083, -1217952335.451711, -1217316073.8666434, -1217035380.1151958, -1216636431.2964456, -1216257015.2945514, -1215658496.1208403, -1215097272.0976632, -1214669859.2064147, -1214593853.4809475, -1214599475.7838447, -1214575440.823035, -1214158828.8008435, -1213482920.2673717, -1212476577.5897374, -1211251374.2198513, -1210284855.590475, -1209302456.065669, -1209106252.6625297, -1209373211.5146718, -1209689421.7984035, -1210021342.495856, -1210650609.3592312, -1211428521.3900626, -1212616111.4257205, -1213820075.2948189, -1215320588.7144456, -1217175082.2739282, -1219703351.4585004, -1222007827.120464, -1224637375.5900724, -1228367798.912171, -1234853879.862459, -1247222219.867692, -1268562808.1616178, -1302034822.9569275, -1347823631.0776038, -1402753916.9445229, -1458826717.3262982, -1505843092.0970414, -1534278782.249077, -1543955545.8994718, -1600409154.893352], 'var_stat': [12665413908.91729, 11145088801.244318, 12567119446.035736, 11758392758.06822, 11200687982.736668, 11551903443.711124, 12880777868.435602, 14084854368.236998, 14394011058.866192, 14678818621.277662, 15346278722.626339, 16268053979.757076, 17191705347.854794, 17877540386.548733, 18251857849.077663, 18392628178.710472, 18645534548.4045, 19018598212.22902, 19366711357.782673, 19655730286.72857, 19890681996.786858, 20094163350.461906, 20227774955.225887, 20423525628.66887, 20669928826.76939, 20882313568.247944, 21062392676.270527, 21126648821.879055, 21185210734.751118, 21209014745.520447, 21182293842.91236, 21197433134.875977, 21302147790.662144, 21504666657.651955, 21781818550.89697, 21996170165.145462, 22217169779.096275, 22431161762.176693, 22672708668.38104, 22922683961.072956, 23101137011.201683, 23249680793.556847, 23358894817.24979, 23422895267.919228, 23449479198.303394, 23464433357.671055, 23469197140.124596, 23459013479.866177, 23447935341.542686, 23422585038.052387, 23375601301.949135, 23338397991.497776, 23329682884.21905, 23348002892.39853, 23406274659.89975, 23478242518.92228, 23592891371.876236, 23703885161.772205, 23797158601.65954, 23875230355.66992, 23918333664.3946, 23968582109.371258, 24040547318.081936, 24112364295.110058, 24189973697.612144, 24242165205.640236, 24364255205.82311, 24472408850.760197, 24590211203.05312, 24763026764.005527, 24909192634.69144, 25043438176.23281, 25167141466.500504, 25297108031.48665, 25395377064.0999, 25550930772.86505, 25721404827.10336, 25931101211.156487, 26168988710.098465, 26465528802.762875, 26760033029.443783, 27075408488.605213, 27316626931.655052, 27487275073.52796, 27579518448.2332, 27652308513.875782, 27673412508.45838, 27711509210.702576, 27767312240.641487, 27827464683.295334, 27894794590.957966, 27935988489.16511, 27992337099.891083, 28019655483.58796, 28014286886.252903, 27996189233.857716, 27973078840.875465, 27920045013.68706, 27917103211.22359, 27927566165.64652, 27953525818.61368, 27973386070.140022, 27999317832.502476, 28019494120.641834, 28033010746.452637, 28051086123.896503, 28066195174.191753, 28068570977.318798, 28064890246.85437, 28042424375.860577, 28015849655.869568, 28014812222.566605, 28021039053.959835, 28039270607.169422, 28058271295.10199, 28088976520.10178, 28107824988.74732, 28105633030.784756, 28087681357.818607, 28065484299.963837, 28039555887.004284, 28028214431.52875, 28011714871.929447, 27995603790.480755, 27970125897.561134, 27946436130.511288, 27929044772.5522, 27926612443.390316, 27926256324.387302, 27924771848.71099, 27905526922.390133, 27876268519.168198, 27832532606.552593, 27779497699.976765, 27737034351.907337, 27692129825.179924, 27684252911.371475, 27698882622.878677, 27712387157.27985, 27726474638.933037, 27752647691.051613, 27786197932.382797, 27836378752.662235, 27887415700.334576, 27949784230.702114, 28028117657.84245, 28136313097.200474, 28234098926.207996, 28345845477.25874, 28507222800.146496, 28793832339.90449, 29350765483.070816, 30328262350.231213, 31894930713.76519, 34093669067.422382, 36801959396.22739, 39638995447.49344, 42088579425.44825, 43616108982.85117, 44152063315.31461, 47464832889.5967], 'frame_num': 54129649}\n" - ] - } - ], - "source": [ - "print(cmvn_info)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "accurate-terminal", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "dominant-abuse", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \n", - "process 1000 wavs,450240 frames\n", - " \n", - "process 2000 wavs,886411 frames\n", - " \n", - "process 3000 wavs,1352580 frames\n", - " \n", - "process 4000 wavs,1814397 frames\n", - " \n", - "process 5000 wavs,2356587 frames\n", - " \n", - "process 6000 wavs,2825310 frames\n", - " \n", - "process 7000 wavs,3272506 frames\n", - " \n", - "process 8000 wavs,3688045 frames\n", - " \n", - "process 9000 wavs,4134669 frames\n", - " \n", - "process 10000 wavs,4586357 frames\n", - " \n", - "process 11000 wavs,5014429 frames\n", - " \n", - "process 12000 wavs,5453334 frames\n", - " \n", - "process 13000 wavs,5892888 frames\n", - " \n", - "process 14000 wavs,6316059 frames\n", - " \n", - "process 15000 wavs,6728870 frames\n", - " \n", - "process 16000 wavs,7199442 frames\n", - " \n", - "process 17000 wavs,7629055 frames\n", - " \n", - "process 18000 wavs,8083729 frames\n", - " \n", - "process 19000 wavs,8519732 frames\n", - " \n", - "process 20000 wavs,8895694 frames\n", - " \n", - "process 21000 wavs,9341778 frames\n", - " \n", - "process 22000 wavs,9796126 frames\n", - " \n", - "process 23000 wavs,10236057 frames\n", - " \n", - "process 24000 wavs,10687461 frames\n", - " \n", - "process 25000 wavs,11113082 frames\n", - " \n", - "process 26000 wavs,11544482 frames\n", - " \n", - "process 27000 wavs,11996273 frames\n", - " \n", - "process 28000 wavs,12456350 frames\n", - " \n", - "process 29000 wavs,12900895 frames\n", - " \n", - "process 30000 wavs,13330353 frames\n", - " \n", - "process 31000 wavs,13736568 frames\n", - " \n", - "process 32000 wavs,14158472 frames\n", - " \n", - "process 33000 wavs,14625316 frames\n", - " \n", - "process 34000 wavs,15036206 frames\n", - " \n", - "process 35000 wavs,15514001 frames\n", - " \n", - "process 36000 wavs,16004323 frames\n", - " \n", - "process 37000 wavs,16418799 frames\n", - " \n", - "process 38000 wavs,16840100 frames\n", - " \n", - "process 39000 wavs,17287752 frames\n", - " \n", - "process 40000 wavs,17776206 frames\n", - " \n", - "process 41000 wavs,18243209 frames\n", - " \n", - "process 42000 wavs,18690449 frames\n", - " \n", - "process 43000 wavs,19137940 frames\n", - " \n", - "process 44000 wavs,19553966 frames\n", - " \n", - "process 45000 wavs,19969813 frames\n", - " \n", - "process 46000 wavs,20440963 frames\n", - " \n", - "process 47000 wavs,20862022 frames\n", - " \n", - "process 48000 wavs,21292801 frames\n", - " \n", - "process 49000 wavs,21713004 frames\n", - " \n", - "process 50000 wavs,22146346 frames\n", - " \n", - "process 51000 wavs,22596172 frames\n", - " \n", - "process 52000 wavs,23074160 frames\n", - " \n", - "process 53000 wavs,23499823 frames\n", - " \n", - "process 54000 wavs,23942151 frames\n", - " \n", - "process 55000 wavs,24390566 frames\n", - " \n", - "process 56000 wavs,24833905 frames\n", - " \n", - "process 57000 wavs,25307270 frames\n", - " \n", - "process 58000 wavs,25748720 frames\n", - " \n", - "process 59000 wavs,26185964 frames\n", - " \n", - "process 60000 wavs,26663953 frames\n", - " \n", - "process 61000 wavs,27117720 frames\n", - " \n", - "process 62000 wavs,27585349 frames\n", - " \n", - "process 63000 wavs,28032693 frames\n", - " \n", - "process 64000 wavs,28487074 frames\n", - " \n", - "process 65000 wavs,28956462 frames\n", - " \n", - "process 66000 wavs,29436358 frames\n", - " \n", - "process 67000 wavs,29918569 frames\n", - " \n", - "process 68000 wavs,30325682 frames\n", - " \n", - "process 69000 wavs,30762528 frames\n", - " \n", - "process 70000 wavs,31182319 frames\n", - " \n", - "process 71000 wavs,31627526 frames\n", - " \n", - "process 72000 wavs,32070556 frames\n", - " \n", - "process 73000 wavs,32504534 frames\n", - " \n", - "process 74000 wavs,32972775 frames\n", - " \n", - "process 75000 wavs,33409637 frames\n", - " \n", - "process 76000 wavs,33847861 frames\n", - " \n", - "process 77000 wavs,34298647 frames\n", - " \n", - "process 78000 wavs,34721536 frames\n", - " \n", - "process 79000 wavs,35159236 frames\n", - " \n", - "process 80000 wavs,35628628 frames\n", - " \n", - "process 81000 wavs,36080909 frames\n", - " \n", - "process 82000 wavs,36562496 frames\n", - " \n", - "process 83000 wavs,37042976 frames\n", - " \n", - "process 84000 wavs,37474403 frames\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \n", - "process 85000 wavs,37943596 frames\n", - " \n", - "process 86000 wavs,38371620 frames\n", - " \n", - "process 87000 wavs,38844874 frames\n", - " \n", - "process 88000 wavs,39292686 frames\n", - " \n", - "process 89000 wavs,39746715 frames\n", - " \n", - "process 90000 wavs,40241800 frames\n", - " \n", - "process 91000 wavs,40672817 frames\n", - " \n", - "process 92000 wavs,41131773 frames\n", - " \n", - "process 93000 wavs,41612001 frames\n", - " \n", - "process 94000 wavs,42084822 frames\n", - " \n", - "process 95000 wavs,42535878 frames\n", - " \n", - "process 96000 wavs,42969365 frames\n", - " \n", - "process 97000 wavs,43430890 frames\n", - " \n", - "process 98000 wavs,43923378 frames\n", - " \n", - "process 99000 wavs,44397370 frames\n", - " \n", - "process 100000 wavs,44883695 frames\n", - " \n", - "process 101000 wavs,45327968 frames\n", - " \n", - "process 102000 wavs,45768860 frames\n", - " \n", - "process 103000 wavs,46205602 frames\n", - " \n", - "process 104000 wavs,46690407 frames\n", - " \n", - "process 105000 wavs,47153089 frames\n", - " \n", - "process 106000 wavs,47628699 frames\n", - " \n", - "process 107000 wavs,48067945 frames\n", - " \n", - "process 108000 wavs,48539256 frames\n", - " \n", - "process 109000 wavs,49030485 frames\n", - " \n", - "process 110000 wavs,49469189 frames\n", - " \n", - "process 111000 wavs,49928968 frames\n", - " \n", - "process 112000 wavs,50370921 frames\n", - " \n", - "process 113000 wavs,50840090 frames\n", - " \n", - "process 114000 wavs,51286249 frames\n", - " \n", - "process 115000 wavs,51715786 frames\n", - " \n", - "process 116000 wavs,52184017 frames\n", - " \n", - "process 117000 wavs,52666156 frames\n", - " \n", - "process 118000 wavs,53109645 frames\n", - " \n", - "process 119000 wavs,53553253 frames\n", - " \n", - "process 120000 wavs,54009877 frames\n", - "{'mean_stat': [700612678.1184504, 704246512.9321843, 720430663.1822729, 754033269.0474415, 798737761.616614, 829467218.4204571, 851246702.9426627, 862261185.2661449, 859339943.6923889, 846303730.8696194, 832995109.605447, 823196536.6029147, 832626008.2569772, 845571326.1936859, 848801373.0562981, 846503549.328017, 836774344.5500796, 823481091.0445303, 820728368.2518216, 804571348.4957463, 795306095.0083207, 811729024.2415155, 805734803.5703195, 813076782.1959459, 806620199.406499, 809655573.8886961, 804371708.9347517, 809272248.6085774, 810322689.7490631, 814294131.1973915, 816262716.0476038, 816213124.2411841, 817158473.4380915, 821414211.5629157, 827408091.5728914, 834353896.0519086, 840094990.3467333, 842613218.6554606, 842070761.1727513, 834970952.5260613, 837020570.8200948, 829592602.7833654, 830116543.8893851, 829482316.3881509, 833397219.4597517, 839251633.3120549, 845475010.4718693, 852378426.7183967, 859563981.8633184, 866063840.5523493, 867790921.9978689, 868215100.5962687, 869683066.032885, 872467375.6674014, 873097681.1780069, 873025823.0543871, 869897292.7201596, 866386426.3869117, 863166726.7256871, 854653071.2244718, 842402803.9000899, 830838253.4144138, 830143002.3536818, 831492285.0310817, 833304371.8781006, 838896092.8621838, 843866088.9578133, 847316792.1429776, 851038022.3643295, 855931698.0149751, 859320543.9795249, 863031001.3470656, 868325062.1832993, 873626971.0115026, 878726636.924209, 884861725.972504, 886920281.5192285, 883056006.5094173, 863719240.7255149, 773378975.9476194], 'var_stat': [9237018652.657722, 9417257721.82426, 10105084297.159702, 11071318522.587782, 12422783727.426847, 13400306419.784964, 14148498843.406874, 14576436982.89939, 14529009036.494726, 14105645932.596651, 13682988821.478252, 13413013425.088106, 13764134927.293928, 14233704806.737064, 14361631309.367067, 14281358385.45644, 13939662689.213865, 13496884231.929493, 13382566162.783987, 12871350930.6626, 12576198160.876635, 13051463889.56708, 12859205935.513906, 13053861416.098743, 12830323588.550724, 12886405923.897238, 12708529922.84171, 12847306110.231739, 12880398489.53404, 13002566299.565536, 13066708060.463543, 13064231286.858614, 13088983337.353497, 13221393824.891022, 13412425607.755072, 13631485149.777075, 13807797519.156103, 13877277485.033077, 13848613909.96762, 13609176326.2529, 13649815250.130072, 13397698404.696907, 13388964704.359968, 13354326914.968012, 13469861474.898457, 13652539440.283333, 13846837321.329163, 14062143714.601675, 14292571198.61228, 14504626563.299246, 14563864749.132776, 14579720287.991764, 14626700787.353922, 14716185568.128899, 14728532777.28015, 14719101187.113443, 14607945896.239174, 14478517828.531614, 14355110561.681187, 14057430280.249746, 13634284490.879377, 13248236002.494394, 13217602306.335958, 13257856701.946049, 13323688441.072674, 13515395318.023148, 13685827169.67645, 13811622609.426846, 13947347160.615082, 14115883822.884943, 14231204526.433033, 14356066668.651815, 14533604268.238445, 14708971788.69237, 14875667326.732443, 15079098318.79331, 15144888989.667963, 15002658970.504765, 14349232841.34513, 11544480117.013124], 'frame_num': 54068199}\n" - ] - } - ], - "source": [ - "import random\n", - "\n", - "import numpy as np\n", - "import paddle\n", - "from paddle.io import DataLoader\n", - "from paddle.io import Dataset\n", - "\n", - "from deepspeech.frontend.audio import AudioSegment\n", - "from deepspeech.frontend.utility import load_cmvn\n", - "from deepspeech.frontend.utility import read_manifest\n", - "\n", - "# https://github.com/PaddlePaddle/Paddle/pull/31481\n", - "class CollateFunc(object):\n", - " ''' Collate function for AudioDataset\n", - " '''\n", - " def __init__(self, feature_func):\n", - " self.feature_func = feature_func\n", - " \n", - " def __call__(self, batch):\n", - " mean_stat = None\n", - " var_stat = None\n", - " number = 0\n", - " for item in batch:\n", - " audioseg = AudioSegment.from_file(item['feat'])\n", - " feat = self.feature_func(audioseg) #(D, T)\n", - "\n", - " sums = np.sum(feat, axis=1)\n", - " if mean_stat is None:\n", - " mean_stat = sums\n", - " else:\n", - " mean_stat += sums\n", - "\n", - " square_sums = np.sum(np.square(feat), axis=1)\n", - " if var_stat is None:\n", - " var_stat = square_sums\n", - " else:\n", - " var_stat += square_sums\n", - "\n", - " number += feat.shape[1]\n", - " return number, mean_stat, var_stat\n", - "\n", - "\n", - "class AudioDataset(Dataset):\n", - " def __init__(self, manifest_path, num_samples=-1, rng=None, random_seed=0):\n", - " self._rng = rng if rng else np.random.RandomState(random_seed)\n", - " manifest = read_manifest(manifest_path)\n", - " if num_samples == -1:\n", - " sampled_manifest = manifest\n", - " else:\n", - " sampled_manifest = self._rng.choice(manifest, num_samples, replace=False)\n", - " self.items = sampled_manifest\n", - "\n", - " def __len__(self):\n", - " return len(self.items)\n", - "\n", - " def __getitem__(self, idx):\n", - " return self.items[idx]\n", - " \n", - " \n", - "augmentation_pipeline = AugmentationPipeline('{}')\n", - "audio_featurizer = AudioFeaturizer(\n", - " specgram_type=args.specgram_type,\n", - " feat_dim=args.feat_dim,\n", - " delta_delta=args.delta_delta,\n", - " stride_ms=args.stride_ms,\n", - " window_ms=args.window_ms,\n", - " n_fft=None,\n", - " max_freq=None,\n", - " target_sample_rate=args.sample_rate,\n", - " use_dB_normalization=True,\n", - " target_dB=-20)\n", - "\n", - "def augment_and_featurize(audio_segment):\n", - " augmentation_pipeline.transform_audio(audio_segment)\n", - " return audio_featurizer.featurize(audio_segment)\n", - "\n", - "\n", - "collate_func = CollateFunc(augment_and_featurize)\n", - "\n", - "dataset = AudioDataset(\n", - " args.manifest_path,\n", - " args.num_samples)\n", - "\n", - "batch_size = 20\n", - "data_loader = DataLoader(\n", - " dataset,\n", - " batch_size=batch_size,\n", - " shuffle=False,\n", - " num_workers=args.num_workers,\n", - " collate_fn=collate_func)\n", - "\n", - "with paddle.no_grad():\n", - " all_mean_stat = None\n", - " all_var_stat = None\n", - " all_number = 0\n", - " wav_number = 0\n", - " for i, batch in enumerate(data_loader):\n", - " number, mean_stat, var_stat = batch\n", - " if i == 0:\n", - " all_mean_stat = mean_stat\n", - " all_var_stat = var_stat\n", - " else:\n", - " all_mean_stat += mean_stat\n", - " all_var_stat += var_stat\n", - " all_number += number\n", - " wav_number += batch_size\n", - "\n", - " if wav_number % 1000 == 0:\n", - " print('process {} wavs,{} frames'.format(wav_number,\n", - " all_number))\n", - "\n", - "cmvn_info = {\n", - " 'mean_stat': list(all_mean_stat.tolist()),\n", - " 'var_stat': list(all_var_stat.tolist()),\n", - " 'frame_num': all_number\n", - "}\n", - "print(cmvn_info)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "unlike-search", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/dataloader.ipynb b/.notebook/dataloader.ipynb deleted file mode 100644 index 3de8f64a9..000000000 --- a/.notebook/dataloader.ipynb +++ /dev/null @@ -1,389 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "emerging-meter", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " def convert_to_list(value, n, name, dtype=np.int):\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/fftpack/__init__.py:103: DeprecationWarning: The module numpy.dual is deprecated. Instead of using dual, use the functions directly from numpy or scipy.\n", - " from numpy.dual import register_func\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/special/orthogonal.py:81: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:108: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " long_ = _make_signed(np.long)\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:109: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " ulong = _make_unsigned(np.long)\n" - ] - } - ], - "source": [ - "import math\n", - "import random\n", - "import tarfile\n", - "import logging\n", - "import numpy as np\n", - "from collections import namedtuple\n", - "from functools import partial\n", - "\n", - "import paddle\n", - "from paddle.io import Dataset\n", - "from paddle.io import DataLoader\n", - "from paddle.io import BatchSampler\n", - "from paddle.io import DistributedBatchSampler\n", - "from paddle import distributed as dist\n", - "\n", - "from data_utils.utility import read_manifest\n", - "from data_utils.augmentor.augmentation import AugmentationPipeline\n", - "from data_utils.featurizer.speech_featurizer import SpeechFeaturizer\n", - "from data_utils.speech import SpeechSegment\n", - "from data_utils.normalizer import FeatureNormalizer\n", - "\n", - "\n", - "from data_utils.dataset import (\n", - " DeepSpeech2Dataset,\n", - " DeepSpeech2DistributedBatchSampler,\n", - " DeepSpeech2BatchSampler,\n", - " SpeechCollator,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "excessive-american", - "metadata": {}, - "outputs": [], - "source": [ - "def create_dataloader(manifest_path,\t\n", - " vocab_filepath,\t\n", - " mean_std_filepath,\t\n", - " augmentation_config='{}',\t\n", - " max_duration=float('inf'),\t\n", - " min_duration=0.0,\t\n", - " stride_ms=10.0,\t\n", - " window_ms=20.0,\t\n", - " max_freq=None,\t\n", - " specgram_type='linear',\t\n", - " use_dB_normalization=True,\t\n", - " random_seed=0,\t\n", - " keep_transcription_text=False,\t\n", - " is_training=False,\t\n", - " batch_size=1,\t\n", - " num_workers=0,\t\n", - " sortagrad=False,\t\n", - " shuffle_method=None,\t\n", - " dist=False):\t\n", - "\n", - " dataset = DeepSpeech2Dataset(\t\n", - " manifest_path,\t\n", - " vocab_filepath,\t\n", - " mean_std_filepath,\t\n", - " augmentation_config=augmentation_config,\t\n", - " max_duration=max_duration,\t\n", - " min_duration=min_duration,\t\n", - " stride_ms=stride_ms,\t\n", - " window_ms=window_ms,\t\n", - " max_freq=max_freq,\t\n", - " specgram_type=specgram_type,\t\n", - " use_dB_normalization=use_dB_normalization,\t\n", - " random_seed=random_seed,\t\n", - " keep_transcription_text=keep_transcription_text)\t\n", - "\n", - " if dist:\t\n", - " batch_sampler = DeepSpeech2DistributedBatchSampler(\t\n", - " dataset,\t\n", - " batch_size,\t\n", - " num_replicas=None,\t\n", - " rank=None,\t\n", - " shuffle=is_training,\t\n", - " drop_last=is_training,\t\n", - " sortagrad=is_training,\t\n", - " shuffle_method=shuffle_method)\t\n", - " else:\t\n", - " batch_sampler = DeepSpeech2BatchSampler(\t\n", - " dataset,\t\n", - " shuffle=is_training,\t\n", - " batch_size=batch_size,\t\n", - " drop_last=is_training,\t\n", - " sortagrad=is_training,\t\n", - " shuffle_method=shuffle_method)\t\n", - "\n", - " def padding_batch(batch, padding_to=-1, flatten=False, is_training=True):\t\n", - " \"\"\"\t\n", - " Padding audio features with zeros to make them have the same shape (or\t\n", - " a user-defined shape) within one bach.\t\n", - "\n", - " If ``padding_to`` is -1, the maximun shape in the batch will be used\t\n", - " as the target shape for padding. Otherwise, `padding_to` will be the\t\n", - " target shape (only refers to the second axis).\t\n", - "\n", - " If `flatten` is True, features will be flatten to 1darray.\t\n", - " \"\"\"\t\n", - " new_batch = []\t\n", - " # get target shape\t\n", - " max_length = max([audio.shape[1] for audio, text in batch])\t\n", - " if padding_to != -1:\t\n", - " if padding_to < max_length:\t\n", - " raise ValueError(\"If padding_to is not -1, it should be larger \"\t\n", - " \"than any instance's shape in the batch\")\t\n", - " max_length = padding_to\t\n", - " max_text_length = max([len(text) for audio, text in batch])\t\n", - " # padding\t\n", - " padded_audios = []\t\n", - " audio_lens = []\t\n", - " texts, text_lens = [], []\t\n", - " for audio, text in batch:\t\n", - " padded_audio = np.zeros([audio.shape[0], max_length])\t\n", - " padded_audio[:, :audio.shape[1]] = audio\t\n", - " if flatten:\t\n", - " padded_audio = padded_audio.flatten()\t\n", - " padded_audios.append(padded_audio)\t\n", - " audio_lens.append(audio.shape[1])\t\n", - "\n", - " padded_text = np.zeros([max_text_length])\n", - " if is_training:\n", - " padded_text[:len(text)] = text\t# ids\n", - " else:\n", - " padded_text[:len(text)] = [ord(t) for t in text] # string\n", - " \n", - " texts.append(padded_text)\t\n", - " text_lens.append(len(text))\t\n", - "\n", - " padded_audios = np.array(padded_audios).astype('float32')\t\n", - " audio_lens = np.array(audio_lens).astype('int64')\t\n", - " texts = np.array(texts).astype('int32')\t\n", - " text_lens = np.array(text_lens).astype('int64')\t\n", - " return padded_audios, texts, audio_lens, text_lens\t\n", - "\n", - " loader = DataLoader(\t\n", - " dataset,\t\n", - " batch_sampler=batch_sampler,\t\n", - " collate_fn=partial(padding_batch, is_training=is_training),\t\n", - " num_workers=num_workers)\t\n", - " return loader" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "naval-brave", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'num_samples': 5, 'beam_size': 500, 'num_proc_bsearch': 8, 'num_conv_layers': 2, 'num_rnn_layers': 3, 'rnn_layer_size': 2048, 'alpha': 2.5, 'beta': 0.3, 'cutoff_prob': 1.0, 'cutoff_top_n': 40, 'use_gru': False, 'use_gpu': True, 'share_rnn_weights': True, 'infer_manifest': 'examples/aishell/data/manifest.dev', 'mean_std_path': 'examples/aishell/data/mean_std.npz', 'vocab_path': 'examples/aishell/data/vocab.txt', 'lang_model_path': 'models/lm/common_crawl_00.prune01111.trie.klm', 'model_path': 'examples/aishell/checkpoints/step_final', 'decoding_method': 'ctc_beam_search', 'error_rate_type': 'wer', 'specgram_type': 'linear'}\n" - ] - } - ], - "source": [ - "import sys\n", - "import argparse\n", - "import functools\n", - "from utils.utility import add_arguments, print_arguments\n", - "parser = argparse.ArgumentParser(description=__doc__)\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "# yapf: disable\n", - "add_arg('num_samples', int, 5, \"# of samples to infer.\")\n", - "add_arg('beam_size', int, 500, \"Beam search width.\")\n", - "add_arg('num_proc_bsearch', int, 8, \"# of CPUs for beam search.\")\n", - "add_arg('num_conv_layers', int, 2, \"# of convolution layers.\")\n", - "add_arg('num_rnn_layers', int, 3, \"# of recurrent layers.\")\n", - "add_arg('rnn_layer_size', int, 2048, \"# of recurrent cells per layer.\")\n", - "add_arg('alpha', float, 2.5, \"Coef of LM for beam search.\")\n", - "add_arg('beta', float, 0.3, \"Coef of WC for beam search.\")\n", - "add_arg('cutoff_prob', float, 1.0, \"Cutoff probability for pruning.\")\n", - "add_arg('cutoff_top_n', int, 40, \"Cutoff number for pruning.\")\n", - "add_arg('use_gru', bool, False, \"Use GRUs instead of simple RNNs.\")\n", - "add_arg('use_gpu', bool, True, \"Use GPU or not.\")\n", - "add_arg('share_rnn_weights',bool, True, \"Share input-hidden weights across \"\n", - " \"bi-directional RNNs. Not for GRU.\")\n", - "add_arg('infer_manifest', str,\n", - " 'examples/aishell/data/manifest.dev',\n", - " \"Filepath of manifest to infer.\")\n", - "add_arg('mean_std_path', str,\n", - " 'examples/aishell/data/mean_std.npz',\n", - " \"Filepath of normalizer's mean & std.\")\n", - "add_arg('vocab_path', str,\n", - " 'examples/aishell/data/vocab.txt',\n", - " \"Filepath of vocabulary.\")\n", - "add_arg('lang_model_path', str,\n", - " 'models/lm/common_crawl_00.prune01111.trie.klm',\n", - " \"Filepath for language model.\")\n", - "add_arg('model_path', str,\n", - " 'examples/aishell/checkpoints/step_final',\n", - " \"If None, the training starts from scratch, \"\n", - " \"otherwise, it resumes from the pre-trained model.\")\n", - "add_arg('decoding_method', str,\n", - " 'ctc_beam_search',\n", - " \"Decoding method. Options: ctc_beam_search, ctc_greedy\",\n", - " choices = ['ctc_beam_search', 'ctc_greedy'])\n", - "add_arg('error_rate_type', str,\n", - " 'wer',\n", - " \"Error rate type for evaluation.\",\n", - " choices=['wer', 'cer'])\n", - "add_arg('specgram_type', str,\n", - " 'linear',\n", - " \"Audio feature type. Options: linear, mfcc.\",\n", - " choices=['linear', 'mfcc'])\n", - "# yapf: disable\n", - "args = parser.parse_args([])\n", - "print(vars(args))" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "bearing-physics", - "metadata": {}, - "outputs": [], - "source": [ - "batch_reader = create_dataloader(\n", - " manifest_path=args.infer_manifest,\n", - " vocab_filepath=args.vocab_path,\n", - " mean_std_filepath=args.mean_std_path,\n", - " augmentation_config='{}',\n", - " #max_duration=float('inf'),\n", - " max_duration=27.0,\n", - " min_duration=0.0,\n", - " stride_ms=10.0,\n", - " window_ms=20.0,\n", - " max_freq=None,\n", - " specgram_type=args.specgram_type,\n", - " use_dB_normalization=True,\n", - " random_seed=0,\n", - " keep_transcription_text=True,\n", - " is_training=False,\n", - " batch_size=args.num_samples,\n", - " sortagrad=True,\n", - " shuffle_method=None,\n", - " dist=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "classified-melissa", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "test Tensor(shape=[5, 6], dtype=int32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[22823, 26102, 20195, 37324, 0 , 0 ],\n", - " [22238, 26469, 23601, 22909, 0 , 0 ],\n", - " [20108, 26376, 22235, 26085, 0 , 0 ],\n", - " [36824, 35201, 20445, 25345, 32654, 24863],\n", - " [29042, 27748, 21463, 23456, 0 , 0 ]])\n", - "test raw 大时代里\n", - "test raw 煲汤受宠\n", - "audio len Tensor(shape=[5], dtype=int64, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [163, 167, 180, 186, 186])\n", - "test len Tensor(shape=[5], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [4, 4, 4, 6, 4])\n", - "audio Tensor(shape=[5, 161, 186], dtype=float32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[[ 1.11669052, 0.79015088, 0.93658292, ..., 0. , 0. , 0. ],\n", - " [ 0.83549136, 0.72643483, 0.83578080, ..., 0. , 0. , 0. ],\n", - " [-0.89155018, -0.18894747, -0.53357804, ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [ 0.33386710, -0.81240511, 0.12869737, ..., 0. , 0. , 0. ],\n", - " [-0.17537928, 0.58380985, 0.70696265, ..., 0. , 0. , 0. ],\n", - " [-0.84175998, 1.22041416, 0.07929770, ..., 0. , 0. , 0. ]],\n", - "\n", - " [[-0.35964420, 0.77392709, 0.71409988, ..., 0. , 0. , 0. ],\n", - " [-0.15990183, 0.42962283, 0.06222462, ..., 0. , 0. , 0. ],\n", - " [-0.31166190, -0.74864638, -0.52836996, ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [-0.27546275, 0.32889456, 0.12410031, ..., 0. , 0. , 0. ],\n", - " [ 0.16264282, 0.49418071, -0.15960945, ..., 0. , 0. , 0. ],\n", - " [ 0.12476666, 0.00516864, 1.16021466, ..., 0. , 0. , 0. ]],\n", - "\n", - " [[ 0.90202141, 1.48541915, 0.92062062, ..., 0. , 0. , 0. ],\n", - " [ 0.82661545, 1.37171340, 0.86746097, ..., 0. , 0. , 0. ],\n", - " [-0.62287915, -0.48645937, 0.35041964, ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [ 0.07376949, 0.07138316, 0.76355994, ..., 0. , 0. , 0. ],\n", - " [-0.32306790, 0.43247896, 1.27311838, ..., 0. , 0. , 0. ],\n", - " [-0.97667056, 0.60747612, 0.79181534, ..., 0. , 0. , 0. ]],\n", - "\n", - " [[ 0.72022128, 0.95428467, 0.92766261, ..., 0.29105374, -0.45564806, -0.62151009],\n", - " [ 0.42083180, 0.49279949, 0.82724041, ..., -0.17333922, -1.45363355, -0.61673522],\n", - " [-0.76116520, -0.84750438, -0.09512503, ..., -1.01497340, -1.42781055, -0.80859023],\n", - " ...,\n", - " [-0.23009977, 1.06155431, 1.09065628, ..., 0.25581080, 0.53794998, -1.22650719],\n", - " [-1.37693381, 0.30778193, 0.17152318, ..., 0.51650339, 0.25580606, 0.83097816],\n", - " [-1.62180591, 1.30567718, 1.09928656, ..., -0.77590007, 1.27712476, 0.53189957]],\n", - "\n", - " [[ 1.03205252, -0.51535392, 0.21077573, ..., 0.76618457, 1.27425683, 1.52250278],\n", - " [ 0.82059991, 0.43990925, 0.13090958, ..., 0.86662549, 1.01687658, 1.48495352],\n", - " [-0.75489789, -0.01997089, -0.65174174, ..., 0.09061214, -0.55211234, -0.01614586],\n", - " ...,\n", - " [ 0.50985396, 1.84555030, 0.79185146, ..., 1.13666189, 1.19898069, 1.98158395],\n", - " [ 1.98721015, 2.52385354, 1.11714780, ..., 0.19416514, 1.11329341, 0.64460152],\n", - " [ 2.69512844, 1.90993905, 0.50245082, ..., -0.50902629, 0.03333465, -1.24584770]]])\n" - ] - } - ], - "source": [ - "for idx, (audio, audio_len, text, text_len) in enumerate(batch_reader()):\n", - " print('test', text)\n", - " print(\"test raw\", ''.join( chr(i) for i in text[0][:int(text_len[0])] ))\n", - " print(\"test raw\", ''.join( chr(i) for i in text[-1][:int(text_len[-1])] ))\n", - " print('audio len', audio_len)\n", - " print('test len', text_len)\n", - " print('audio', audio)\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "unexpected-skating", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "minus-modern", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} \ No newline at end of file diff --git a/.notebook/dataloader_with_tokens_tokenids.ipynb b/.notebook/dataloader_with_tokens_tokenids.ipynb deleted file mode 100644 index 7d93dd009..000000000 --- a/.notebook/dataloader_with_tokens_tokenids.ipynb +++ /dev/null @@ -1,1204 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "medieval-monday", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x\n" - ] - }, - { - "data": { - "text/plain": [ - "'/workspace/DeepSpeech-2.x'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd ..\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "emerging-meter", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " def convert_to_list(value, n, name, dtype=np.int):\n" - ] - } - ], - "source": [ - "import math\n", - "import random\n", - "import tarfile\n", - "import logging\n", - "import numpy as np\n", - "from collections import namedtuple\n", - "from functools import partial\n", - "\n", - "import paddle\n", - "from paddle.io import Dataset\n", - "from paddle.io import DataLoader\n", - "from paddle.io import BatchSampler\n", - "from paddle.io import DistributedBatchSampler\n", - "from paddle import distributed as dist\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "excessive-american", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "naval-brave", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:93] register user softmax to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:97] register user log_softmax to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:101] register user sigmoid to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:105] register user log_sigmoid to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:109] register user relu to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:119] override cat of paddle if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:133] override item of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:144] override long of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:164] override new_full of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:179] override eq of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:185] override eq of paddle if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:195] override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:212] override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:223] register user view to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:233] register user view_as to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:259] register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:277] register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:288] register user fill_ to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:298] register user repeat to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:303] register user softmax to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:308] register user sigmoid to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:312] register user relu to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:322] register user type_as to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:337] register user to to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:346] register user float to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:356] register user tolist to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:371] register user glu to paddle.nn.functional, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:422] override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:428] register user Module to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:434] register user ModuleList to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:450] register user GLU to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:483] register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 06:32:09 __init__.py:489] register user export to paddle.jit, remove this when fixed!\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'num_samples': 5, 'beam_size': 500, 'num_proc_bsearch': 8, 'num_conv_layers': 2, 'num_rnn_layers': 3, 'rnn_layer_size': 2048, 'alpha': 2.5, 'beta': 0.3, 'cutoff_prob': 1.0, 'cutoff_top_n': 40, 'use_gru': False, 'use_gpu': True, 'share_rnn_weights': True, 'unit_type': 'char', 'spm_model_prefix': 'examples/tiny/s1/data/spm_bpe', 'infer_manifest': 'examples/tiny/s1/data/manifest.tiny', 'mean_std_path': 'examples/tiny/s1/data/mean_std.npz', 'vocab_path': 'examples/tiny/s1/data/vocab.txt', 'lang_model_path': 'models/lm/common_crawl_00.prune01111.trie.klm', 'model_path': 'examples/tiny/s1/checkpoints/step_final', 'decoding_method': 'ctc_beam_search', 'error_rate_type': 'wer', 'specgram_type': 'fbank', 'feat_dim': 80, 'delta_delta': False}\n" - ] - } - ], - "source": [ - "import sys\n", - "import argparse\n", - "import functools\n", - "from deepspeech.utils.utility import add_arguments, print_arguments\n", - "parser = argparse.ArgumentParser(description=__doc__)\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "# yapf: disable\n", - "add_arg('num_samples', int, 5, \"# of samples to infer.\")\n", - "add_arg('beam_size', int, 500, \"Beam search width.\")\n", - "add_arg('num_proc_bsearch', int, 8, \"# of CPUs for beam search.\")\n", - "add_arg('num_conv_layers', int, 2, \"# of convolution layers.\")\n", - "add_arg('num_rnn_layers', int, 3, \"# of recurrent layers.\")\n", - "add_arg('rnn_layer_size', int, 2048, \"# of recurrent cells per layer.\")\n", - "add_arg('alpha', float, 2.5, \"Coef of LM for beam search.\")\n", - "add_arg('beta', float, 0.3, \"Coef of WC for beam search.\")\n", - "add_arg('cutoff_prob', float, 1.0, \"Cutoff probability for pruning.\")\n", - "add_arg('cutoff_top_n', int, 40, \"Cutoff number for pruning.\")\n", - "add_arg('use_gru', bool, False, \"Use GRUs instead of simple RNNs.\")\n", - "add_arg('use_gpu', bool, True, \"Use GPU or not.\")\n", - "add_arg('share_rnn_weights',bool, True, \"Share input-hidden weights across \"\n", - " \"bi-directional RNNs. Not for GRU.\")\n", - "add_arg('unit_type', str,\n", - " 'char',\n", - " \"Options: char, word, spm.\",\n", - " choices=['char', 'word', 'spm'])\n", - "add_arg('spm_model_prefix', str,\n", - " 'examples/tiny/s1/data/spm_bpe',\n", - " \"spm model prefix.\",)\n", - "add_arg('infer_manifest', str,\n", - " 'examples/tiny/s1/data/manifest.tiny',\n", - " \"Filepath of manifest to infer.\")\n", - "add_arg('mean_std_path', str,\n", - " 'examples/tiny/s1/data/mean_std.npz',\n", - " \"Filepath of normalizer's mean & std.\")\n", - "add_arg('vocab_path', str,\n", - " 'examples/tiny/s1/data/vocab.txt',\n", - " \"Filepath of vocabulary.\")\n", - "add_arg('lang_model_path', str,\n", - " 'models/lm/common_crawl_00.prune01111.trie.klm',\n", - " \"Filepath for language model.\")\n", - "add_arg('model_path', str,\n", - " 'examples/tiny/s1/checkpoints/step_final',\n", - " \"If None, the training starts from scratch, \"\n", - " \"otherwise, it resumes from the pre-trained model.\")\n", - "add_arg('decoding_method', str,\n", - " 'ctc_beam_search',\n", - " \"Decoding method. Options: ctc_beam_search, ctc_greedy\",\n", - " choices = ['ctc_beam_search', 'ctc_greedy'])\n", - "add_arg('error_rate_type', str,\n", - " 'wer',\n", - " \"Error rate type for evaluation.\",\n", - " choices=['wer', 'cer'])\n", - "add_arg('specgram_type', str,\n", - " 'fbank',\n", - " \"Audio feature type. Options: linear, mfcc.\",\n", - " choices=['linear', 'mfcc'])\n", - "add_arg('feat_dim', int, 80, \"mfcc or fbank feat dim.\")\n", - "add_arg('delta_delta', bool, False, \"delta delta\")\n", - "# yapf: disable\n", - "args = parser.parse_args([])\n", - "print(vars(args))" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "wired-principal", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'num_samples': 5, 'beam_size': 500, 'num_proc_bsearch': 8, 'num_conv_layers': 2, 'num_rnn_layers': 3, 'rnn_layer_size': 2048, 'alpha': 2.5, 'beta': 0.3, 'cutoff_prob': 1.0, 'cutoff_top_n': 40, 'use_gru': False, 'use_gpu': True, 'share_rnn_weights': True, 'unit_type': 'char', 'spm_model_prefix': 'examples/aishell/s1/data/spm_bpe', 'infer_manifest': 'examples/aishell/s1/data/manifest.test', 'mean_std_path': '', 'vocab_path': 'examples/aishell/s1/data/vocab.txt', 'lang_model_path': 'models/lm/common_crawl_00.prune01111.trie.klm', 'model_path': 'examples/aishell/s1/checkpoints/step_final', 'decoding_method': 'ctc_beam_search', 'error_rate_type': 'wer', 'specgram_type': 'fbank', 'feat_dim': 80, 'delta_delta': False}\n" - ] - } - ], - "source": [ - "import sys\n", - "import argparse\n", - "import functools\n", - "from deepspeech.utils.utility import add_arguments, print_arguments\n", - "parser = argparse.ArgumentParser(description=__doc__)\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "# yapf: disable\n", - "add_arg('num_samples', int, 5, \"# of samples to infer.\")\n", - "add_arg('beam_size', int, 500, \"Beam search width.\")\n", - "add_arg('num_proc_bsearch', int, 8, \"# of CPUs for beam search.\")\n", - "add_arg('num_conv_layers', int, 2, \"# of convolution layers.\")\n", - "add_arg('num_rnn_layers', int, 3, \"# of recurrent layers.\")\n", - "add_arg('rnn_layer_size', int, 2048, \"# of recurrent cells per layer.\")\n", - "add_arg('alpha', float, 2.5, \"Coef of LM for beam search.\")\n", - "add_arg('beta', float, 0.3, \"Coef of WC for beam search.\")\n", - "add_arg('cutoff_prob', float, 1.0, \"Cutoff probability for pruning.\")\n", - "add_arg('cutoff_top_n', int, 40, \"Cutoff number for pruning.\")\n", - "add_arg('use_gru', bool, False, \"Use GRUs instead of simple RNNs.\")\n", - "add_arg('use_gpu', bool, True, \"Use GPU or not.\")\n", - "add_arg('share_rnn_weights',bool, True, \"Share input-hidden weights across \"\n", - " \"bi-directional RNNs. Not for GRU.\")\n", - "add_arg('unit_type', str,\n", - " 'char',\n", - " \"Options: char, word, spm.\",\n", - " choices=['char', 'word', 'spm'])\n", - "add_arg('spm_model_prefix', str,\n", - " 'examples/aishell/s1/data/spm_bpe',\n", - " \"spm model prefix.\",)\n", - "add_arg('infer_manifest', str,\n", - " 'examples/aishell/s1/data/manifest.test',\n", - " \"Filepath of manifest to infer.\")\n", - "add_arg('mean_std_path', str,\n", - " '',\n", - " \"examples/aishell/s1/data/mean_std.npz, Filepath of normalizer's mean & std.\")\n", - "add_arg('vocab_path', str,\n", - " 'examples/aishell/s1/data/vocab.txt',\n", - " \"Filepath of vocabulary.\")\n", - "add_arg('lang_model_path', str,\n", - " 'models/lm/common_crawl_00.prune01111.trie.klm',\n", - " \"Filepath for language model.\")\n", - "add_arg('model_path', str,\n", - " 'examples/aishell/s1/checkpoints/step_final',\n", - " \"If None, the training starts from scratch, \"\n", - " \"otherwise, it resumes from the pre-trained model.\")\n", - "add_arg('decoding_method', str,\n", - " 'ctc_beam_search',\n", - " \"Decoding method. Options: ctc_beam_search, ctc_greedy\",\n", - " choices = ['ctc_beam_search', 'ctc_greedy'])\n", - "add_arg('error_rate_type', str,\n", - " 'wer',\n", - " \"Error rate type for evaluation.\",\n", - " choices=['wer', 'cer'])\n", - "add_arg('specgram_type', str,\n", - " 'fbank',\n", - " \"Audio feature type. Options: linear, mfcc.\",\n", - " choices=['linear', 'mfcc', 'fbank'])\n", - "add_arg('feat_dim', int, 80, \"mfcc or fbank feat dim.\")\n", - "add_arg('delta_delta', bool, False, \"delta delta\")\n", - "# yapf: disable\n", - "args = parser.parse_args([])\n", - "print(vars(args))" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "bearing-physics", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/fftpack/__init__.py:103: DeprecationWarning: The module numpy.dual is deprecated. Instead of using dual, use the functions directly from numpy or scipy.\n", - " from numpy.dual import register_func\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/special/orthogonal.py:81: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:108: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " long_ = _make_signed(np.long)\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:109: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " ulong = _make_unsigned(np.long)\n" - ] - } - ], - "source": [ - "from deepspeech.frontend.utility import read_manifest\n", - "from deepspeech.frontend.augmentor.augmentation import AugmentationPipeline\n", - "from deepspeech.frontend.featurizer.speech_featurizer import SpeechFeaturizer\n", - "from deepspeech.frontend.speech import SpeechSegment\n", - "from deepspeech.frontend.normalizer import FeatureNormalizer\n", - "\n", - "\n", - "from deepspeech.io.collator import SpeechCollator\n", - "from deepspeech.io.dataset import ManifestDataset\n", - "from deepspeech.io.sampler import (\n", - " SortagradDistributedBatchSampler,\n", - " SortagradBatchSampler,\n", - ")\n", - "from deepspeech.io import create_dataloader\n", - "batch_reader = create_dataloader(\n", - " manifest_path=args.infer_manifest,\n", - " unit_type=args.unit_type,\n", - " vocab_filepath=args.vocab_path,\n", - " mean_std_filepath=args.mean_std_path,\n", - " spm_model_prefix=args.spm_model_prefix,\n", - " augmentation_config='{}',\n", - " max_input_len=27.0,\n", - " min_input_len=0.0,\n", - " max_output_len=float('inf'),\n", - " min_output_len=0.0,\n", - " max_output_input_ratio=float('inf'),\n", - " min_output_input_ratio=0.0,\n", - " stride_ms=10.0,\n", - " window_ms=20.0,\n", - " max_freq=None,\n", - " specgram_type=args.specgram_type,\n", - " feat_dim=args.feat_dim,\n", - " delta_delta=args.delta_delta,\n", - " use_dB_normalization=True,\n", - " random_seed=0,\n", - " keep_transcription_text=True,\n", - " is_training=False,\n", - " batch_size=args.num_samples,\n", - " num_workers=0,\n", - " sortagrad=True,\n", - " shuffle_method=None,\n", - " dist=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "classified-melissa", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "fbank\n", - "[232 387 331 ... 249 249 262] int16\n", - "fbank\n", - "[-138 -219 -192 ... 338 324 351] int16\n", - "fbank\n", - "[ 694 1175 1022 ... 553 514 627] int16\n", - "fbank\n", - "[-39 -79 -53 ... 139 172 99] int16\n", - "fbank\n", - "[-277 -480 -425 ... 758 767 739] int16\n", - "fbank\n", - "[ 399 693 609 ... 1291 1270 1291] int16\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py:354: DeprecationWarning: `np.object` is a deprecated alias for the builtin `object`. To silence this warning, use `object` by itself. Doing this will not modify any behavior and is safe. \n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " if arr.dtype == np.object:\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "fbank\n", - "[ -750 -1254 -1107 ... 2276 1889 2067] int16\n", - "fbank\n", - "[ -127 -199 -149 ... -5243 -5065 -5398] int16\n", - "fbank\n", - "[ 465 783 677 ... 980 903 1008] int16\n", - "fbank\n", - "[ 90 160 157 ... -2 -16 -21] int16\n", - "fbank\n", - "[ 213 345 295 ... 2483 2246 2501] int16\n", - "fbank\n", - "[ -86 -159 -131 ... 270 258 290] int16\n", - "fbank\n", - "[-1023 -1714 -1505 ... 1532 1596 1575] int16\n", - "fbank\n", - "[-366 -602 -527 ... 374 370 379] int16\n", - "fbank\n", - "[ 761 1275 1127 ... 369 413 295] int16\n", - "fbank\n", - "[382 621 550 ... 161 161 174] int16\n", - "fbank\n", - "[ -28 -91 -120 ... 28 34 11] int16\n", - "fbank\n", - "[ -5 -5 -5 ... 268 294 341] int16\n", - "fbank\n", - "[240 417 684 ... 267 262 219] int16\n", - "fbank\n", - "[131 206 194 ... 383 320 343] int16\n", - "test: Tensor(shape=[5, 7], dtype=int32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[31069, 21487, 29233, 30340, 20320, -1 , -1 ],\n", - " [20540, 24471, 19968, 25552, 30340, 26159, -1 ],\n", - " [36825, 20010, 31243, 24230, 26159, 32654, 30340],\n", - " [20108, 21040, 20108, -1 , -1 , -1 , -1 ],\n", - " [21435, 34892, 25919, 21270, -1 , -1 , -1 ]])\n", - "fbank\n", - "[1155 1890 1577 ... 1092 989 1130] int16\n", - "fbank\n", - "[296 358 296 ... 140 140 168] int16\n", - "fbank\n", - "[-50 -91 -63 ... 104 104 86] int16\n", - "fbank\n", - "[-37 -66 -50 ... -31 -45 -52] int16\n", - "fbank\n", - "[-401 -652 -547 ... -339 -307 -344] int16\n", - "fbank\n", - "[-21 -47 -51 ... 94 81 107] int16\n", - "fbank\n", - "[ 533 887 755 ... 3074 2853 3254] int16\n", - "fbank\n", - "[ 44 71 66 ... -628 -733 -601] int16\n", - "fbank\n", - "[ 50 86 79 ... 129 116 138] int16\n", - "fbank\n", - "[ 92 146 126 ... -208 -193 -179] int16\n", - "test raw: 祝可爱的你\n", - "test raw: 去行政化\n", - "audio len: Tensor(shape=[5], dtype=int64, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [184, 194, 196, 204, 207])\n", - "test len: Tensor(shape=[5], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [5, 6, 7, 3, 4])\n", - "audio: Tensor(shape=[5, 207, 80], dtype=float32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[[12.25633812, 12.61639309, 10.36936474, ..., 13.02949619, 11.51365757, 10.59789085],\n", - " [13.32148266, 13.41071606, 11.43800735, ..., 13.69783783, 12.83939362, 11.51259613],\n", - " [12.62640572, 12.53621101, 10.97212505, ..., 13.33757591, 12.32293034, 10.75493717],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[10.99619484, 11.35202599, 9.56922054 , ..., 9.94971657 , 9.88354111 , 9.55315971 ],\n", - " [10.44461155, 9.81688595 , 5.62538481 , ..., 10.60468388, 10.94417381, 9.42646980 ],\n", - " [10.23835754, 10.23407459, 7.99464273 , ..., 10.68097591, 9.91640091 , 10.04131031],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[14.10299397, 14.50298119, 12.87738323, ..., 12.62796497, 12.69949627, 11.43171215],\n", - " [13.85035992, 13.15289116, 10.66541386, ..., 13.34364223, 13.46972179, 11.02160740],\n", - " [13.19866467, 13.23537827, 11.65760899, ..., 12.72559357, 12.42716217, 11.74562359],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[12.85668373, 12.82431412, 11.68144703, ..., 14.10119247, 15.12791920, 13.68221378],\n", - " [13.19507027, 13.40244961, 11.43618393, ..., 13.32919979, 13.68267441, 12.73429012],\n", - " [13.02173328, 12.92082500, 11.44303989, ..., 12.77793121, 13.10915661, 11.77327728],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[12.90771198, 13.40234852, 13.01435471, ..., 13.80359459, 14.08088684, 13.17883396],\n", - " [14.06678009, 14.06943512, 12.52837276, ..., 13.66423225, 13.66300583, 13.60142994],\n", - " [12.58743191, 12.94520760, 11.75190544, ..., 14.28828907, 14.08229160, 13.02433395],\n", - " ...,\n", - " [16.20896912, 16.42283821, 14.94358730, ..., 12.91146755, 12.66766262, 11.76361752],\n", - " [13.49324894, 14.14653301, 13.16490936, ..., 13.23435783, 13.45378494, 12.60386276],\n", - " [15.56288910, 15.92445087, 14.90794277, ..., 13.43840790, 13.41075516, 12.55605984]]])\n" - ] - } - ], - "source": [ - "for idx, (audio, audio_len, text, text_len) in enumerate(batch_reader()):\n", - " print('test:', text)\n", - " print(\"test raw:\", ''.join( chr(i) for i in text[0][:int(text_len[0])] ))\n", - " print(\"test raw:\", ''.join( chr(i) for i in text[-1][:int(text_len[-1])] ))\n", - " print('audio len:', audio_len)\n", - " print('test len:', text_len)\n", - " print('audio:', audio)\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "unexpected-skating", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "minus-modern", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "fbank\n", - "[232 387 331 ... 249 249 262] int16\n", - "fbank\n", - "[-138 -219 -192 ... 338 324 351] int16\n", - "fbank\n", - "[ 694 1175 1022 ... 553 514 627] int16\n", - "fbank\n", - "[-39 -79 -53 ... 139 172 99] int16\n", - "fbank\n", - "[-277 -480 -425 ... 758 767 739] int16\n", - "fbank\n", - "test: Tensor(shape=[5, 7], dtype=int32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[2695, 505, 2332, 2553, 169, -1 , -1 ],\n", - " [ 230, 1237, 2 , 1556, 2553, 1694, -1 ],\n", - " [3703, 28 , 2739, 1172, 1694, 2966, 2553],\n", - " [ 70 , 355, 70 , -1 , -1 , -1 , -1 ],\n", - " [ 477, 3363, 1621, 412, -1 , -1 , -1 ]])\n", - "[ 399 693 609 ... 1291 1270 1291] int16\n", - "test raw: ઇǹज৹©\n", - "test raw: ǝണٕƜ\n", - "test len: Tensor(shape=[5], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [5, 6, 7, 3, 4])\n", - "audio: Tensor(shape=[5, 207, 80], dtype=float32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[[12.25794601, 12.61855793, 10.37306023, ..., 13.12571049, 11.53678799, 10.32210350],\n", - " [13.32333183, 13.41336918, 11.44248962, ..., 13.65861225, 12.79308128, 11.31168747],\n", - " [12.62584686, 12.53506088, 10.96861362, ..., 13.32526493, 12.41560936, 10.71458912],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[11.00003052, 11.35529137, 9.56384087 , ..., 10.06063652, 10.16322994, 9.43149185 ],\n", - " [10.44556236, 9.81155300 , 5.49400425 , ..., 10.84116268, 11.02734756, 9.42253590 ],\n", - " [10.23620510, 10.23321152, 7.99466419 , ..., 10.93381882, 10.28395081, 10.00841141],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[14.10379314, 14.50375748, 12.87825108, ..., 12.68065739, 12.62359715, 11.53773308],\n", - " [13.84964657, 13.15079498, 10.67198086, ..., 13.24875164, 13.45796680, 10.97363472],\n", - " [13.19808197, 13.23482990, 11.65900230, ..., 12.70375061, 12.41395664, 11.88668156],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[12.85676289, 12.82410812, 11.67961884, ..., 14.12018299, 15.14850044, 13.80065727],\n", - " [13.19532776, 13.40243340, 11.43492508, ..., 13.29144669, 13.70278549, 12.67841339],\n", - " [13.02196407, 12.92111111, 11.43998623, ..., 12.71165752, 13.16518497, 11.92028046],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[12.90661621, 13.40162563, 13.01394463, ..., 13.84056377, 14.11240959, 13.21227264],\n", - " [14.06642914, 14.06922340, 12.52955723, ..., 13.55829811, 13.60157204, 13.50268650],\n", - " [12.58881378, 12.94780254, 11.75758171, ..., 14.29055786, 14.12165928, 13.02695847],\n", - " ...,\n", - " [16.20891571, 16.42290306, 14.94398117, ..., 12.86083794, 12.63515949, 11.67581463],\n", - " [13.49345875, 14.14656067, 13.16498375, ..., 13.28024578, 13.40956783, 12.70357513],\n", - " [15.56265163, 15.92387581, 14.90643024, ..., 13.45694065, 13.44703197, 12.81099033]]])\n", - "audio len: Tensor(shape=[5], dtype=int64, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [184, 194, 196, 204, 207])\n" - ] - } - ], - "source": [ - "keep_transcription_text=False\n", - "batch_reader = create_dataloader(\n", - " manifest_path=args.infer_manifest,\n", - " unit_type=args.unit_type,\n", - " vocab_filepath=args.vocab_path,\n", - " mean_std_filepath=args.mean_std_path,\n", - " spm_model_prefix=args.spm_model_prefix,\n", - " augmentation_config='{}',\n", - " max_input_len=27.0,\n", - " min_input_len=0.0,\n", - " max_output_len=float('inf'),\n", - " min_output_len=0.0,\n", - " max_output_input_ratio=float('inf'),\n", - " min_output_input_ratio=0.0,\n", - " stride_ms=10.0,\n", - " window_ms=20.0,\n", - " max_freq=None,\n", - " specgram_type=args.specgram_type,\n", - " feat_dim=args.feat_dim,\n", - " delta_delta=args.delta_delta,\n", - " use_dB_normalization=True,\n", - " random_seed=0,\n", - " keep_transcription_text=keep_transcription_text,\n", - " is_training=False,\n", - " batch_size=args.num_samples,\n", - " num_workers=0,\n", - " sortagrad=True,\n", - " shuffle_method=None,\n", - " dist=False)\n", - "for idx, (audio, audio_len, text, text_len) in enumerate(batch_reader()):\n", - " print('test:', text)\n", - " print(\"test raw:\", ''.join( chr(i) for i in text[0][:int(text_len[0])] ))\n", - " print(\"test raw:\", ''.join( chr(i) for i in text[-1][:int(text_len[-1])] ))\n", - " print('test len:', text_len)\n", - " print('audio:', audio)\n", - " print('audio len:', audio_len)\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "competitive-mounting", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "knowing-military", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'num_samples': 1, 'specgram_type': 'fbank', 'feat_dim': 80, 'delta_delta': False, 'stride_ms': 10.0, 'window_ms': 25.0, 'sample_rate': 16000, 'manifest_path': 'examples/aishell/s1/data/manifest.train', 'output_path': 'examples/aishell/s1/data/mean_std.npz'}\n" - ] - } - ], - "source": [ - "import sys\n", - "import argparse\n", - "import functools\n", - "from deepspeech.utils.utility import add_arguments, print_arguments\n", - "parser = argparse.ArgumentParser(description=__doc__)\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "\n", - "add_arg('num_samples', int, 1, \"# of samples to for statistics.\")\n", - "add_arg('specgram_type', str, 'fbank',\n", - " \"Audio feature type. Options: linear, mfcc, fbank.\",\n", - " choices=['linear', 'mfcc', 'fbank'])\n", - "add_arg('feat_dim', int, 80, \"Audio feature dim.\")\n", - "add_arg('delta_delta', bool, False,\"Audio feature with delta delta.\")\n", - "add_arg('stride_ms', float, 10.0, \"stride length in ms.\")\n", - "add_arg('window_ms', float, 25.0, \"stride length in ms.\")\n", - "add_arg('sample_rate', int, 16000, \"target sample rate.\")\n", - "add_arg('manifest_path', str,\n", - " 'examples/aishell/s1/data/manifest.train',\n", - " \"Filepath of manifest to compute normalizer's mean and stddev.\")\n", - "add_arg('output_path', str,\n", - " 'examples/aishell/s1/data/mean_std.npz',\n", - " \"Filepath of write mean and stddev to (.npz).\")\n", - "args = parser.parse_args([])\n", - "print(vars(args))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "unnecessary-province", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "from deepspeech.frontend.augmentor.augmentation import AugmentationPipeline\n", - "from deepspeech.frontend.featurizer.audio_featurizer import AudioFeaturizer\n", - "from deepspeech.frontend.normalizer import FeatureNormalizer\n", - "from deepspeech.frontend.audio import AudioSegment\n", - "from deepspeech.frontend.utility import load_cmvn\n", - "from deepspeech.frontend.utility import read_manifest\n", - "\n", - "\n", - "\n", - "def mean(args):\n", - " augmentation_pipeline = AugmentationPipeline('{}')\n", - " audio_featurizer = AudioFeaturizer(\n", - " specgram_type=args.specgram_type,\n", - " feat_dim=args.feat_dim,\n", - " delta_delta=args.delta_delta,\n", - " stride_ms=args.stride_ms,\n", - " window_ms=args.window_ms,\n", - " n_fft=None,\n", - " max_freq=None,\n", - " target_sample_rate=args.sample_rate,\n", - " use_dB_normalization=True,\n", - " target_dB=-20,\n", - " dither=0.0)\n", - "\n", - " def augment_and_featurize(audio_segment):\n", - " augmentation_pipeline.transform_audio(audio_segment)\n", - " return audio_featurizer.featurize(audio_segment)\n", - "\n", - " normalizer = FeatureNormalizer(\n", - " mean_std_filepath=None,\n", - " manifest_path=args.manifest_path,\n", - " featurize_func=augment_and_featurize,\n", - " num_samples=args.num_samples)\n", - " normalizer.write_to_file(args.output_path)\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "interested-camping", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0.00164795 0.00274658 0.00234985 ... 0.00177002 0.00177002 0.00186157]\n", - "[54. 90. 77. ... 58. 58. 61.]\n", - "29746\n", - "fbank\n", - "[54 90 77 ... 58 58 61] int16\n", - "(184, 80) float64\n", - "[[10.61737914 10.07708936 5.32487528 ... 10.2481839 8.89699394\n", - " 7.80671114]\n", - " [11.0440077 10.3180721 6.30866128 ... 11.23730926 10.35838868\n", - " 8.83860079]\n", - " [10.26930555 9.99636567 7.3296638 ... 10.45131595 9.69295303\n", - " 7.96168491]\n", - " ...\n", - " [10.14497345 9.88674207 6.73801138 ... 10.21580627 9.00343472\n", - " 8.75616521]\n", - " [ 9.97745961 9.67949736 7.90660425 ... 10.22436653 9.59456493\n", - " 7.69287184]\n", - " [ 6.47357374 7.76335491 7.75765843 ... 9.96522077 9.6226365\n", - " 8.16007108]]\n", - "(184, 80) float64\n", - "[[10.61737914 10.07708936 5.32487528 ... 10.2481839 8.89699394\n", - " 7.80671114]\n", - " [11.0440077 10.3180721 6.30866128 ... 11.23730926 10.35838868\n", - " 8.83860079]\n", - " [10.26930555 9.99636567 7.3296638 ... 10.45131595 9.69295303\n", - " 7.96168491]\n", - " ...\n", - " [10.14497345 9.88674207 6.73801138 ... 10.21580627 9.00343472\n", - " 8.75616521]\n", - " [ 9.97745961 9.67949736 7.90660425 ... 10.22436653 9.59456493\n", - " 7.69287184]\n", - " [ 6.47357374 7.76335491 7.75765843 ... 9.96522077 9.6226365\n", - " 8.16007108]]\n" - ] - } - ], - "source": [ - "wav='/workspace/DeepSpeech-2.x/examples/aishell/s1/../../..//examples/dataset/aishell/data_aishell/wav/test/S0916/BAC009S0916W0426.wav'\n", - "test='祝可爱的你'\n", - "audio_featurizer = AudioFeaturizer(\n", - " specgram_type=args.specgram_type,\n", - " feat_dim=args.feat_dim,\n", - " delta_delta=args.delta_delta,\n", - " stride_ms=args.stride_ms,\n", - " window_ms=args.window_ms,\n", - " n_fft=None,\n", - " max_freq=None,\n", - " target_sample_rate=args.sample_rate,\n", - " use_dB_normalization=False,\n", - " target_dB=-20,\n", - " dither=0.0)\n", - "samples = AudioSegment.from_file(wav)\n", - "print(samples._samples)\n", - "print(samples._samples * 2**15)\n", - "print(len(samples._samples))\n", - "feat = audio_featurizer.featurize(samples, False, False)\n", - "feat = feat.T\n", - "print(feat.shape, feat.dtype)\n", - "print(feat)\n", - "\n", - "from python_speech_features import logfbank\n", - "max_freq = args.sample_rate / 2\n", - "fbank_feat = logfbank(\n", - " signal=samples.to('int16'),\n", - " samplerate=args.sample_rate,\n", - " winlen=0.001 * args.window_ms,\n", - " winstep=0.001 * args.stride_ms,\n", - " nfilt=args.feat_dim,\n", - " nfft=512,\n", - " lowfreq=20,\n", - " highfreq=max_freq,\n", - " preemph=0.97,\n", - " dither=0.0,\n", - " wintype='povey')\n", - "print(fbank_feat.shape, fbank_feat.dtype)\n", - "print(fbank_feat)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "numeric-analyst", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(184, 160)\n", - "[ 8.59522397 8.43148278 8.36414052 8.45487173 8.31761643 8.04843683\n", - " 8.01683696 7.6574614 7.95521932 8.22945157 10.20138275 9.0447775\n", - " 9.14763398 9.18184349 9.03801065 9.04852307 8.67706728 8.71894271\n", - " 9.54553655 9.19535135 8.76413076 8.47828946 8.52586143 8.49469288\n", - " 8.72461247 8.28562879 8.11581393 7.99922156 7.91023364 8.04142296\n", - " 7.89762773 7.76257636 8.32043745 8.01592886 8.34109665 8.90115454\n", - " 8.48246945 7.98658664 8.05745122 8.11384088 8.18864479 8.8091827\n", - " 11.8067711 13.25258218 14.44311795 13.90515283 14.00120623 13.99801252\n", - " 13.81595394 13.6379904 13.3574897 13.14933334 12.96518543 13.02601156\n", - " 12.70246737 12.54410834 12.15615068 11.86574681 11.67497882 10.79645481\n", - " 10.48150035 10.03758575 10.05637027 9.92891308 10.06923218 12.43382431\n", - " 12.71428321 14.33135052 13.94470959 14.29188291 14.11483993 14.03496606\n", - " 13.78167331 13.66701466 14.40308625 14.73934137 15.09569382 14.89565815\n", - " 15.10519995 14.94383582 15.03275563 15.42194679 15.29219967 15.41602274\n", - " 15.39242545 15.76836177 16.259222 16.47777231 17.03366795 17.46165793\n", - " 17.52596217 17.78844031 17.99878075 18.11446843 17.95761578 17.99900337\n", - " 17.86282737 17.7290163 17.47686504 17.43425516 17.07750485 16.64395242\n", - " 15.68217043 14.90058399 14.45645737 14.0405463 14.89549542 16.00405781\n", - " 16.27301689 16.37572895 16.31219037 16.31765447 16.44819716 16.36281089\n", - " 16.24932823 15.79302555 14.76361963 13.95761882 13.48917053 13.45543501\n", - " 13.00091327 13.13854248 13.74596395 13.86340629 14.00656109 13.77432101\n", - " 13.64267001 13.35742634 13.23042234 12.97916104 12.80694468 12.70005006\n", - " 13.2802483 13.22644525 13.14579624 13.02536594 13.36511022 11.37167205\n", - " 12.11598045 12.47619798 12.83885973 11.63880287 11.42083924 11.08747705\n", - " 11.04093403 11.11263149 10.74353319 10.58734669 10.46180738 10.34157335\n", - " 9.63131146 9.70582692 9.29059204 8.94583657 8.66065094 8.46799095\n", - " 8.25064103 8.30239167 8.19463371 8.12104567 8.02731234 8.06412715\n", - " 7.84889951 7.73090283 7.74119562 7.85444657 7.80717312 7.7129933\n", - " 7.84087442 7.77907788 7.60660865 7.55051479 7.458385 7.496416\n", - " 7.69519793 7.49086759 7.32199493 8.01617458 7.58525375 7.06661122\n", - " 6.94653756 7.19874283 7.28515661 7.17574078]\n", - "(184,)\n", - "(184,)\n", - "[1.48370471 1.52174523 1.46984238 1.67010478 1.88757689 1.68825992\n", - " 1.74270259 1.55497318 1.29200818 1.68446481 1.88133219 1.97138928\n", - " 2.15910096 2.3149476 1.9820247 2.07694378 1.93498835 2.01493974\n", - " 2.39156824 2.02396518 1.69586449 1.63808752 1.64020228 1.43573473\n", - " 1.93092656 1.37466294 1.34704929 1.59600739 1.03960441 1.45276496\n", - " 1.59360131 1.57466343 1.89491479 1.79333746 1.32701974 1.49441767\n", - " 1.51466756 1.63497989 1.42858074 1.51135396 1.61077201 1.81066387\n", - " 1.83367783 2.3507094 2.87885378 3.26231227 2.1313117 1.98557548\n", - " 1.99105426 2.26150533 2.34298751 2.44621608 2.39201042 2.41226503\n", - " 2.5142992 3.03777565 2.81592295 2.75117863 2.78324175 2.68819666\n", - " 2.8945782 2.84464168 2.680973 2.78397395 2.47996808 1.71829563\n", - " 1.60636949 1.65992483 1.38122631 1.74831825 2.16006884 1.68076185\n", - " 1.69329487 1.44929837 1.63763312 1.80101076 2.01166253 2.03254244\n", - " 1.9583913 2.04542255 2.00859694 2.16600883 2.16095629 1.97541122\n", - " 2.13807632 2.06386436 2.2154187 2.84205688 2.54862449 2.64321545\n", - " 2.6805773 2.52300146 2.53209001 2.54682059 2.4521937 2.43155532\n", - " 2.42571275 2.23421289 2.23164529 2.23597192 2.14215121 2.10406703\n", - " 2.07962874 1.88506161 1.80092372 1.61156092 1.77426835 1.98765563\n", - " 2.0356793 1.87964187 1.779513 1.87187681 1.76463632 1.70978684\n", - " 1.76471778 1.75604749 1.62792552 1.73929352 1.6887024 1.8677704\n", - " 2.17342368 2.08166072 2.14567453 2.15936953 2.18351006 2.41010388\n", - " 2.26101752 2.25468001 2.23739715 2.15395133 2.04547813 1.92038843\n", - " 1.85491264 1.91905927 2.16709365 1.99924152 2.1850471 2.55461622\n", - " 2.72476673 1.69682926 1.73249614 2.06992695 2.1210591 1.66854454\n", - " 1.63907505 1.32203822 1.38992558 1.2436937 1.17932877 1.02963653\n", - " 1.26085036 1.16997132 1.09339504 1.14188689 1.18675772 1.31859788\n", - " 1.21746591 1.3872131 1.26095274 1.34885761 1.46633543 1.64506975\n", - " 1.36013821 1.45574721 1.43766588 1.65119054 1.57163772 1.55082968\n", - " 1.29413316 1.38351736 1.64234673 1.57186432 1.45381083 1.71204761\n", - " 1.51828607 1.30639985 1.32928395 1.49004237 1.6057589 1.81815735\n", - " 1.67784678 1.72180861 1.60703743 1.64850255]\n" - ] - } - ], - "source": [ - "a = np.hstack([feat, feat])\n", - "print(a.shape)\n", - "m = np.mean(a, axis=1)\n", - "print(m)\n", - "print(m.shape)\n", - "std = np.std(a, axis=1)\n", - "print(std.shape)\n", - "print(std)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "nonprofit-potato", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "hispanic-ethics", - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "import torchaudio\n", - "import torchaudio.compliance.kaldi as kaldi\n", - "import torchaudio.sox_effects as sox_effects\n", - "from torch.nn.utils.rnn import pad_sequence\n", - "torchaudio.set_audio_backend(\"sox\")" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "changing-calvin", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([1, 29746])\n", - "tensor([[54., 90., 77., ..., 58., 58., 61.]])\n", - "(184, 80)\n", - "[[10.617376 10.077089 5.3248763 ... 10.248186 8.896992 7.8067265]\n", - " [11.044004 10.318072 6.3086634 ... 11.237308 10.358393 8.838616 ]\n", - " [10.269302 9.9963665 7.3296647 ... 10.451319 9.692951 7.9617033]\n", - " ...\n", - " [10.14497 9.886743 6.738012 ... 10.215809 9.0034275 8.756177 ]\n", - " [ 9.977456 9.679498 7.9066052 ... 10.224365 9.594568 7.6928873]\n", - " [ 6.4735703 7.7633557 7.7576594 ... 9.965221 9.622637 8.160085 ]]\n", - "-----------\n", - "[0.00164795 0.00274658 0.00234985 ... 0.00177002 0.00177002 0.00186157]\n", - "(184, 80)\n", - "[[-10.177039 -10.717326 -15.46954 ... -10.546229 -11.897424 -12.987689]\n", - " [ -9.750411 -10.476343 -14.485752 ... -9.557108 -10.436023 -11.955799]\n", - " [-10.525113 -10.798049 -13.46475 ... -10.343097 -11.101464 -12.832712]\n", - " ...\n", - " [-10.649446 -10.907673 -14.056403 ... -10.578607 -11.790988 -12.038239]\n", - " [-10.816959 -11.114918 -12.88781 ... -10.570049 -11.199847 -13.101528]\n", - " [-14.320845 -13.03106 -13.036756 ... -10.829194 -11.171779 -12.634331]]\n", - "**************\n", - "[0.00164795 0.00274658 0.00234985 ... 0.00177002 0.00177002 0.00186157]\n", - "[54. 90. 77. ... 58. 58. 61.] float32\n", - "(184, 80)\n", - "[[10.617376 10.077089 5.3248763 ... 10.248186 8.896992 7.8067265]\n", - " [11.044004 10.318072 6.3086634 ... 11.237308 10.358393 8.838616 ]\n", - " [10.269302 9.9963665 7.3296647 ... 10.451319 9.692951 7.9617033]\n", - " ...\n", - " [10.14497 9.886743 6.738012 ... 10.215809 9.0034275 8.756177 ]\n", - " [ 9.977456 9.679498 7.9066052 ... 10.224365 9.594568 7.6928873]\n", - " [ 6.4735703 7.7633557 7.7576594 ... 9.965221 9.622637 8.160085 ]]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel_launcher.py:1: UserWarning: torchaudio.backend.sox_backend.load_wav has been deprecated and will be removed from 0.9.0 release. Please use \"torchaudio.load\".\n", - " \"\"\"Entry point for launching an IPython kernel.\n" - ] - } - ], - "source": [ - "waveform, sample_rate = torchaudio.load_wav(wav)\n", - "print(waveform.shape)\n", - "print(waveform)\n", - "mat = kaldi.fbank(\n", - " waveform,\n", - " num_mel_bins=80,\n", - " frame_length=25,\n", - " frame_shift=10,\n", - " dither=0,\n", - " energy_floor=0.0,\n", - " sample_frequency=sample_rate\n", - " )\n", - "mat = mat.detach().numpy()\n", - "print(mat.shape)\n", - "print(mat)\n", - "\n", - "print('-----------')\n", - "print(samples._samples)\n", - "aud = torch.tensor(samples._samples).view(1, -1)\n", - "mat = kaldi.fbank(\n", - " aud,\n", - " num_mel_bins=80,\n", - " frame_length=25,\n", - " frame_shift=10,\n", - " dither=0,\n", - " energy_floor=0.0,\n", - " sample_frequency=sample_rate\n", - " )\n", - "mat = mat.detach().numpy()\n", - "print(mat.shape)\n", - "print(mat)\n", - "\n", - "print('**************')\n", - "print(samples._samples)\n", - "tmp = samples.to('int16').astype('float32')\n", - "print(tmp, tmp.dtype)\n", - "aud = torch.tensor(tmp).view(1, -1)\n", - "mat = kaldi.fbank(\n", - " aud,\n", - " num_mel_bins=80,\n", - " frame_length=25,\n", - " frame_shift=10,\n", - " dither=0,\n", - " energy_floor=0.0,\n", - " sample_frequency=sample_rate\n", - " )\n", - "mat = mat.detach().numpy()\n", - "print(mat.shape)\n", - "print(mat)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "buried-dependence", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "silver-printing", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "outer-space", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(29746,)\n", - "[54 90 77 ... 58 58 61]\n", - "(184, 80)\n", - "[[10.61737914 10.07708936 5.32487528 ... 10.2481839 8.89699394\n", - " 7.80671114]\n", - " [11.0440077 10.3180721 6.30866128 ... 11.23730926 10.35838868\n", - " 8.83860079]\n", - " [10.26930555 9.99636567 7.3296638 ... 10.45131595 9.69295303\n", - " 7.96168491]\n", - " ...\n", - " [10.14497345 9.88674207 6.73801138 ... 10.21580627 9.00343472\n", - " 8.75616521]\n", - " [ 9.97745961 9.67949736 7.90660425 ... 10.22436653 9.59456493\n", - " 7.69287184]\n", - " [ 6.47357374 7.76335491 7.75765843 ... 9.96522077 9.6226365\n", - " 8.16007108]]\n", - "(184, 13)\n", - "[[ 14.73775998 -13.30393391 5.85974818 ... -3.42359739 2.82785335\n", - " 8.86862748]\n", - " [ 15.31274834 -13.33671651 4.06537223 ... 8.15970347 2.15934846\n", - " 6.78353115]\n", - " [ 13.82218765 -13.39296404 6.8304843 ... 2.55332563 8.86724453\n", - " -0.05919222]\n", - " ...\n", - " [ 13.5837844 -13.42104892 11.21222354 ... 4.81477718 1.66627505\n", - " 5.59045842]\n", - " [ 13.75757034 -13.92626662 13.06074011 ... -0.46694046 5.56214833\n", - " 12.0785146 ]\n", - " [ 11.92813809 -15.9169855 8.78372271 ... -1.42014277 -3.25768086\n", - " 0.88337965]]\n" - ] - } - ], - "source": [ - "from python_speech_features import mfcc\n", - "from python_speech_features import delta\n", - "from python_speech_features import logfbank\n", - "import scipy.io.wavfile as iowav\n", - "\n", - "(rate,sig) = iowav.read(wav)\n", - "print(sig.shape)\n", - "print(sig)\n", - "\n", - "# note that generally nfilt=40 is used for speech recognition\n", - "fbank_feat = logfbank(sig,nfilt=80,lowfreq=20,dither=0,wintype='povey')\n", - "print(fbank_feat.shape)\n", - "print(fbank_feat)\n", - "\n", - "# the computed fbank coefficents of english.wav with dimension [110,23]\n", - "# [ 12.2865\t12.6906\t13.1765\t15.714\t16.064\t15.7553\t16.5746\t16.9205\t16.6472\t16.1302\t16.4576\t16.7326\t16.8864\t17.7215\t18.88\t19.1377\t19.1495\t18.6683\t18.3886\t20.3506\t20.2772\t18.8248\t18.1899\n", - "# 11.9198\t13.146\t14.7215\t15.8642\t17.4288\t16.394\t16.8238\t16.1095\t16.4297\t16.6331\t16.3163\t16.5093\t17.4981\t18.3429\t19.6555\t19.6263\t19.8435\t19.0534\t19.001\t20.0287\t19.7707\t19.5852\t19.1112\n", - "# ...\n", - "# ...\n", - "# the same with that using kaldi commands: compute-fbank-feats --dither=0.0\n", - "\n", - "mfcc_feat = mfcc(sig,dither=0,useEnergy=True,wintype='povey')\n", - "print(mfcc_feat.shape)\n", - "print(mfcc_feat)\n", - "\n", - "# the computed mfcc coefficents of english.wav with dimension [110,13]\n", - "# [ 17.1337\t-23.3651\t-7.41751\t-7.73686\t-21.3682\t-8.93884\t-3.70843\t4.68346\t-16.0676\t12.782\t-7.24054\t8.25089\t10.7292\n", - "# 17.1692\t-23.3028\t-5.61872\t-4.0075\t-23.287\t-20.6101\t-5.51584\t-6.15273\t-14.4333\t8.13052\t-0.0345329\t2.06274\t-0.564298\n", - "# ...\n", - "# ...\n", - "# the same with that using kaldi commands: compute-mfcc-feats --dither=0.0" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "sporting-school", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(184, 80)\n", - "[[-10.17703627 -10.71732606 -15.46954014 ... -10.54623152 -11.89742148\n", - " -12.98770428]\n", - " [ -9.75040771 -10.47634331 -14.48575413 ... -9.55710616 -10.43602673\n", - " -11.95581463]\n", - " [-10.52510987 -10.79804975 -13.46475161 ... -10.34309947 -11.10146239\n", - " -12.83273051]\n", - " ...\n", - " [-10.64944197 -10.90767335 -14.05640404 ... -10.57860915 -11.7909807\n", - " -12.03825021]\n", - " [-10.8169558 -11.11491806 -12.88781116 ... -10.57004889 -11.19985048\n", - " -13.10154358]\n", - " [-14.32084168 -13.03106051 -13.03675699 ... -10.82919465 -11.17177892\n", - " -12.63434434]]\n", - "(184, 13)\n", - "[[ -6.05665544 -13.30393391 5.85974818 ... -3.42359739 2.82785335\n", - " 8.86862748]\n", - " [ -5.48166707 -13.33671651 4.06537223 ... 8.15970347 2.15934846\n", - " 6.78353115]\n", - " [ -6.97222776 -13.39296404 6.8304843 ... 2.55332563 8.86724453\n", - " -0.05919222]\n", - " ...\n", - " [ -7.21063102 -13.42104892 11.21222354 ... 4.81477718 1.66627505\n", - " 5.59045842]\n", - " [ -7.03684508 -13.92626662 13.06074011 ... -0.46694046 5.56214833\n", - " 12.0785146 ]\n", - " [ -8.86627732 -15.9169855 8.78372271 ... -1.42014277 -3.25768086\n", - " 0.88337965]]\n" - ] - } - ], - "source": [ - "fbank_feat = logfbank(samples._samples,nfilt=80,lowfreq=20,dither=0,wintype='povey')\n", - "print(fbank_feat.shape)\n", - "print(fbank_feat)\n", - "\n", - "mfcc_feat = mfcc(samples._samples,dither=0,useEnergy=True,wintype='povey')\n", - "print(mfcc_feat.shape)\n", - "print(mfcc_feat)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "restricted-license", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "specialized-threat", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/hack_api_test.ipynb b/.notebook/hack_api_test.ipynb deleted file mode 100644 index f653084e6..000000000 --- a/.notebook/hack_api_test.ipynb +++ /dev/null @@ -1,290 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "breeding-haven", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/ssd5/zhanghui/DeepSpeech2.x\n" - ] - }, - { - "data": { - "text/plain": [ - "'/home/ssd5/zhanghui/DeepSpeech2.x'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd ..\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "appropriate-theta", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "LICENSE deepspeech examples\t\t requirements.txt tools\r\n", - "README.md docs\t libsndfile-1.0.28\t setup.sh\t utils\r\n", - "README_cn.md env.sh\t libsndfile-1.0.28.tar.gz tests\r\n" - ] - } - ], - "source": [ - "!ls" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "entire-bloom", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/ssd5/zhanghui/DeepSpeech2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " def convert_to_list(value, n, name, dtype=np.int):\n", - "WARNING:root:override cat of paddle.Tensor if exists or register, remove this when fixed!\n", - "WARNING:root:register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "WARNING:root:register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "WARNING:root:register user repeat to paddle.Tensor, remove this when fixed!\n", - "WARNING:root:register user glu to paddle.nn.functional, remove this when fixed!\n", - "WARNING:root:register user GLU to paddle.nn, remove this when fixed!\n", - "WARNING:root:register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "WARNING:root:override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n" - ] - } - ], - "source": [ - "from deepspeech.modules import loss" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "governmental-aircraft", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/ssd5/zhanghui/DeepSpeech2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], - "source": [ - "import paddle" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "proprietary-disaster", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - " paddle.VarBase>" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "paddle.Tensor.repeat" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "first-diagram", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "paddle.Tensor.size" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "intelligent-david", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "paddle.Tensor.cat" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "bronze-tenant", - "metadata": {}, - "outputs": [], - "source": [ - "a = paddle.to_tensor([12,32, 10, 12, 123,32 ,4])" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "balanced-bearing", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "7" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a.size" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "extreme-republic", - "metadata": {}, - "outputs": [], - "source": [ - "def size(xs: paddle.Tensor, *args: int) -> paddle.Tensor:\n", - " nargs = len(args)\n", - " assert (nargs <= 1)\n", - " s = paddle.shape(xs)\n", - " if nargs == 1:\n", - " return s[args[0]]\n", - " else:\n", - " return s\n", - "\n", - "# logger.warn(\n", - "# \"override size of paddle.Tensor if exists or register, remove this when fixed!\"\n", - "# )\n", - "paddle.Tensor.size = size" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "gross-addiction", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[1], dtype=int32, place=CPUPlace, stop_gradient=True,\n", - " [7])" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a.size(0)\n", - "a.size()" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "adverse-dining", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[1], dtype=int32, place=CPUPlace, stop_gradient=True,\n", - " [7])" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a.size()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "popular-potato", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/jit_infer.ipynb b/.notebook/jit_infer.ipynb deleted file mode 100644 index ba50d8743..000000000 --- a/.notebook/jit_infer.ipynb +++ /dev/null @@ -1,672 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/ssd5/zhanghui/DeepSpeech2.x\n" - ] - }, - { - "data": { - "text/plain": [ - "'/home/ssd5/zhanghui/DeepSpeech2.x'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd ..\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-03-26 02:55:23,873 - WARNING - register user softmax to paddle, remove this when fixed!\n", - "2021-03-26 02:55:23,875 - WARNING - register user sigmoid to paddle, remove this when fixed!\n", - "2021-03-26 02:55:23,875 - WARNING - register user relu to paddle, remove this when fixed!\n", - "2021-03-26 02:55:23,876 - WARNING - override cat of paddle if exists or register, remove this when fixed!\n", - "2021-03-26 02:55:23,876 - WARNING - override eq of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-03-26 02:55:23,877 - WARNING - override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-03-26 02:55:23,877 - WARNING - override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", - "2021-03-26 02:55:23,878 - WARNING - register user view to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,878 - WARNING - register user view_as to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,879 - WARNING - register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,880 - WARNING - register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,880 - WARNING - register user fill_ to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,881 - WARNING - register user repeat to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,881 - WARNING - register user softmax to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,882 - WARNING - register user sigmoid to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,882 - WARNING - register user relu to paddle.Tensor, remove this when fixed!\n", - "2021-03-26 02:55:23,883 - WARNING - register user glu to paddle.nn.functional, remove this when fixed!\n", - "2021-03-26 02:55:23,883 - WARNING - override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", - "2021-03-26 02:55:23,884 - WARNING - register user GLU to paddle.nn, remove this when fixed!\n", - "2021-03-26 02:55:23,884 - WARNING - register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "/home/ssd5/zhanghui/DeepSpeech2.x/tools/venv-dev/lib/python3.7/site-packages/scipy/fftpack/__init__.py:103: DeprecationWarning: The module numpy.dual is deprecated. Instead of using dual, use the functions directly from numpy or scipy.\n", - " from numpy.dual import register_func\n", - "/home/ssd5/zhanghui/DeepSpeech2.x/tools/venv-dev/lib/python3.7/site-packages/scipy/special/orthogonal.py:81: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n" - ] - } - ], - "source": [ - "import os\n", - "import time\n", - "import argparse\n", - "import functools\n", - "import paddle\n", - "import numpy as np\n", - "\n", - "from deepspeech.utils.socket_server import warm_up_test\n", - "from deepspeech.utils.socket_server import AsrTCPServer\n", - "from deepspeech.utils.socket_server import AsrRequestHandler\n", - "\n", - "from deepspeech.training.cli import default_argument_parser\n", - "from deepspeech.exps.deepspeech2.config import get_cfg_defaults\n", - "\n", - "from deepspeech.frontend.utility import read_manifest\n", - "from deepspeech.utils.utility import add_arguments, print_arguments\n", - "\n", - "from deepspeech.models.deepspeech2 import DeepSpeech2Model\n", - "from deepspeech.models.deepspeech2 import DeepSpeech2InferModel\n", - "from deepspeech.io.dataset import ManifestDataset\n", - "\n", - "\n", - "\n", - "from deepspeech.frontend.utility import read_manifest" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0.0\n", - "e7f28d6c0db54eb9c9a810612300b526687e56a6\n", - "OFF\n", - "OFF\n", - "commit: e7f28d6c0db54eb9c9a810612300b526687e56a6\n", - "None\n", - "0\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/ssd5/zhanghui/DeepSpeech2.x/tools/venv-dev/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - }, - { - "data": { - "text/plain": [ - "['__builtins__',\n", - " '__cached__',\n", - " '__doc__',\n", - " '__file__',\n", - " '__loader__',\n", - " '__name__',\n", - " '__package__',\n", - " '__spec__',\n", - " 'commit',\n", - " 'full_version',\n", - " 'istaged',\n", - " 'major',\n", - " 'minor',\n", - " 'mkl',\n", - " 'patch',\n", - " 'rc',\n", - " 'show',\n", - " 'with_mkl']" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "print(paddle.__version__)\n", - "print(paddle.version.commit)\n", - "print(paddle.version.with_mkl)\n", - "print(paddle.version.mkl())\n", - "print(paddle.version.show())\n", - "print(paddle.version.patch)\n", - "dir(paddle.version)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "data:\n", - " augmentation_config: conf/augmentation.config\n", - " batch_size: 64\n", - " dev_manifest: data/manifest.dev\n", - " keep_transcription_text: False\n", - " max_duration: 27.0\n", - " max_freq: None\n", - " mean_std_filepath: examples/aishell/data/mean_std.npz\n", - " min_duration: 0.0\n", - " n_fft: None\n", - " num_workers: 0\n", - " random_seed: 0\n", - " shuffle_method: batch_shuffle\n", - " sortagrad: True\n", - " specgram_type: linear\n", - " stride_ms: 10.0\n", - " target_dB: -20\n", - " target_sample_rate: 16000\n", - " test_manifest: examples/aishell/data/manifest.test\n", - " train_manifest: data/manifest.train\n", - " use_dB_normalization: True\n", - " vocab_filepath: examples/aishell/data/vocab.txt\n", - " window_ms: 20.0\n", - "decoding:\n", - " alpha: 2.6\n", - " batch_size: 128\n", - " beam_size: 300\n", - " beta: 5.0\n", - " cutoff_prob: 0.99\n", - " cutoff_top_n: 40\n", - " decoding_method: ctc_beam_search\n", - " error_rate_type: cer\n", - " lang_model_path: data/lm/zh_giga.no_cna_cmn.prune01244.klm\n", - " num_proc_bsearch: 10\n", - "model:\n", - " num_conv_layers: 2\n", - " num_rnn_layers: 3\n", - " rnn_layer_size: 1024\n", - " share_rnn_weights: False\n", - " use_gru: True\n", - "training:\n", - " global_grad_clip: 5.0\n", - " lr: 0.0005\n", - " lr_decay: 0.83\n", - " n_epoch: 30\n", - " weight_decay: 1e-06\n", - "----------- Configuration Arguments -----------\n", - "checkpoint_path: examples/aishell/ckpt-loss2e-3-0.83-5/checkpoints/step-11725\n", - "config: examples/aishell/conf/deepspeech2.yaml\n", - "device: gpu\n", - "dump_config: None\n", - "export_path: None\n", - "host_ip: localhost\n", - "host_port: 8086\n", - "model_dir: None\n", - "model_file: examples/aishell/jit.model.pdmodel\n", - "nprocs: 1\n", - "opts: ['data.test_manifest', 'examples/aishell/data/manifest.test', 'data.mean_std_filepath', 'examples/aishell/data/mean_std.npz', 'data.vocab_filepath', 'examples/aishell/data/vocab.txt']\n", - "output: None\n", - "params_file: examples/aishell/jit.model.pdiparams\n", - "speech_save_dir: demo_cache\n", - "use_gpu: False\n", - "warmup_manifest: examples/aishell/data/manifest.test\n", - "------------------------------------------------\n" - ] - } - ], - "source": [ - "parser = default_argument_parser()\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "add_arg('host_ip', str,\n", - " 'localhost',\n", - " \"Server's IP address.\")\n", - "add_arg('host_port', int, 8086, \"Server's IP port.\")\n", - "add_arg('speech_save_dir', str,\n", - " 'demo_cache',\n", - " \"Directory to save demo audios.\")\n", - "add_arg('warmup_manifest', \n", - " str, \n", - " \"examples/aishell/data/manifest.test\", \n", - " \"Filepath of manifest to warm up.\")\n", - "add_arg(\n", - " \"--model_file\",\n", - " type=str,\n", - " default=\"examples/aishell/jit.model.pdmodel\",\n", - " help=\"Model filename, Specify this when your model is a combined model.\"\n", - ")\n", - "add_arg(\n", - " \"--params_file\",\n", - " type=str,\n", - " default=\"examples/aishell/jit.model.pdiparams\",\n", - " help=\n", - " \"Parameter filename, Specify this when your model is a combined model.\"\n", - ")\n", - "add_arg(\n", - " \"--model_dir\",\n", - " type=str,\n", - " default=None,\n", - " help=\n", - " \"Model dir, If you load a non-combined model, specify the directory of the model.\"\n", - ")\n", - "add_arg(\"--use_gpu\",type=bool,default=False, help=\"Whether use gpu.\")\n", - "\n", - "\n", - "args = parser.parse_args(\n", - " \"--checkpoint_path examples/aishell/ckpt-loss2e-3-0.83-5/checkpoints/step-11725 --config examples/aishell/conf/deepspeech2.yaml --opts data.test_manifest examples/aishell/data/manifest.test data.mean_std_filepath examples/aishell/data/mean_std.npz data.vocab_filepath examples/aishell/data/vocab.txt\".split()\n", - ")\n", - "\n", - "\n", - "config = get_cfg_defaults()\n", - "if args.config:\n", - " config.merge_from_file(args.config)\n", - "if args.opts:\n", - " config.merge_from_list(args.opts)\n", - "config.freeze()\n", - "print(config)\n", - "\n", - "args.warmup_manifest = config.data.test_manifest\n", - "\n", - "print_arguments(args)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "dataset = ManifestDataset(\n", - " config.data.test_manifest,\n", - " config.data.unit_type,\n", - " config.data.vocab_filepath,\n", - " config.data.mean_std_filepath,\n", - " augmentation_config=\"{}\",\n", - " max_duration=config.data.max_duration,\n", - " min_duration=config.data.min_duration,\n", - " stride_ms=config.data.stride_ms,\n", - " window_ms=config.data.window_ms,\n", - " n_fft=config.data.n_fft,\n", - " max_freq=config.data.max_freq,\n", - " target_sample_rate=config.data.target_sample_rate,\n", - " specgram_type=config.data.specgram_type,\n", - " feat_dim=config.data.feat_dim,\n", - " delta_delta=config.data.delat_delta,\n", - " use_dB_normalization=config.data.use_dB_normalization,\n", - " target_dB=config.data.target_dB,\n", - " random_seed=config.data.random_seed,\n", - " keep_transcription_text=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-03-26 02:55:57,930 - INFO - [checkpoint] Rank 0: loaded model from examples/aishell/ckpt-loss2e-3-0.83-5/checkpoints/step-11725.pdparams\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "layer summary:\n", - "encoder.conv.conv_in.conv.weight|[32, 1, 41, 11]|14432\n", - "encoder.conv.conv_in.bn.weight|[32]|32\n", - "encoder.conv.conv_in.bn.bias|[32]|32\n", - "encoder.conv.conv_in.bn._mean|[32]|32\n", - "encoder.conv.conv_in.bn._variance|[32]|32\n", - "encoder.conv.conv_stack.0.conv.weight|[32, 32, 21, 11]|236544\n", - "encoder.conv.conv_stack.0.bn.weight|[32]|32\n", - "encoder.conv.conv_stack.0.bn.bias|[32]|32\n", - "encoder.conv.conv_stack.0.bn._mean|[32]|32\n", - "encoder.conv.conv_stack.0.bn._variance|[32]|32\n", - "encoder.rnn.rnn_stacks.0.fw_fc.weight|[1312, 3072]|4030464\n", - "encoder.rnn.rnn_stacks.0.fw_bn.weight|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.fw_bn.bias|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.fw_bn._mean|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.fw_bn._variance|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.bw_fc.weight|[1312, 3072]|4030464\n", - "encoder.rnn.rnn_stacks.0.bw_bn.weight|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.bw_bn.bias|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.bw_bn._mean|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.bw_bn._variance|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.fw_cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.0.fw_cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.bw_cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.0.bw_cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.fw_rnn.cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.0.fw_rnn.cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.0.bw_rnn.cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.0.bw_rnn.cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.fw_fc.weight|[2048, 3072]|6291456\n", - "encoder.rnn.rnn_stacks.1.fw_bn.weight|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.fw_bn.bias|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.fw_bn._mean|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.fw_bn._variance|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.bw_fc.weight|[2048, 3072]|6291456\n", - "encoder.rnn.rnn_stacks.1.bw_bn.weight|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.bw_bn.bias|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.bw_bn._mean|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.bw_bn._variance|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.fw_cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.1.fw_cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.bw_cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.1.bw_cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.fw_rnn.cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.1.fw_rnn.cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.1.bw_rnn.cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.1.bw_rnn.cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.fw_fc.weight|[2048, 3072]|6291456\n", - "encoder.rnn.rnn_stacks.2.fw_bn.weight|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.fw_bn.bias|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.fw_bn._mean|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.fw_bn._variance|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.bw_fc.weight|[2048, 3072]|6291456\n", - "encoder.rnn.rnn_stacks.2.bw_bn.weight|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.bw_bn.bias|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.bw_bn._mean|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.bw_bn._variance|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.fw_cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.2.fw_cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.bw_cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.2.bw_cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.fw_rnn.cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.2.fw_rnn.cell.bias_hh|[3072]|3072\n", - "encoder.rnn.rnn_stacks.2.bw_rnn.cell.weight_hh|[3072, 1024]|3145728\n", - "encoder.rnn.rnn_stacks.2.bw_rnn.cell.bias_hh|[3072]|3072\n", - "decoder.ctc_lo.weight|[2048, 4300]|8806400\n", - "decoder.ctc_lo.bias|[4300]|4300\n", - "layer has 66 parameters, 80148012 elements.\n" - ] - } - ], - "source": [ - "model = DeepSpeech2InferModel.from_pretrained(dataset, config,\n", - " args.checkpoint_path)\n", - "model.eval()" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "examples/aishell/jit.model.pdmodel\n", - "examples/aishell/jit.model.pdiparams\n", - "0\n", - "False\n" - ] - } - ], - "source": [ - "\n", - "from paddle.inference import Config\n", - "from paddle.inference import PrecisionType\n", - "from paddle.inference import create_predictor\n", - "\n", - "args.use_gpu=False\n", - "paddle.set_device('cpu')\n", - "\n", - "def init_predictor(args):\n", - " if args.model_dir is not None:\n", - " config = Config(args.model_dir)\n", - " else:\n", - " config = Config(args.model_file, args.params_file)\n", - "\n", - " if args.use_gpu:\n", - " config.enable_use_gpu(memory_pool_init_size_mb=1000, device_id=0)\n", - "# config.enable_tensorrt_engine(precision_mode=PrecisionType.Float32,\n", - "# use_calib_mode=True) # 开启TensorRT预测,精度为fp32,开启int8离线量化\n", - " else:\n", - " # If not specific mkldnn, you can set the blas thread.\n", - " # The thread num should not be greater than the number of cores in the CPU.\n", - " config.set_cpu_math_library_num_threads(1)\n", - " config.enable_mkldnn()\n", - " \n", - " config.enable_memory_optim()\n", - " config.switch_ir_optim(True)\n", - " \n", - " print(config.model_dir())\n", - " print(config.prog_file())\n", - " print(config.params_file())\n", - " print(config.gpu_device_id())\n", - " print(args.use_gpu)\n", - " predictor = create_predictor(config)\n", - " return predictor\n", - "\n", - "def run(predictor, audio, audio_len):\n", - " # copy img data to input tensor\n", - " input_names = predictor.get_input_names()\n", - " for i, name in enumerate(input_names):\n", - " print(\"input:\", i, name)\n", - " \n", - " audio_tensor = predictor.get_input_handle('audio')\n", - " audio_tensor.reshape(audio.shape)\n", - " audio_tensor.copy_from_cpu(audio.copy())\n", - " \n", - " audiolen_tensor = predictor.get_input_handle('audio_len')\n", - " audiolen_tensor.reshape(audio_len.shape)\n", - " audiolen_tensor.copy_from_cpu(audio_len.copy())\n", - "\n", - " output_names = predictor.get_output_names()\n", - " for i, name in enumerate(output_names):\n", - " print(\"output:\", i, name)\n", - "\n", - " # do the inference\n", - " predictor.run()\n", - "\n", - " results = []\n", - " # get out data from output tensor\n", - " output_names = predictor.get_output_names()\n", - " for i, name in enumerate(output_names):\n", - " output_tensor = predictor.get_output_handle(name)\n", - " output_data = output_tensor.copy_to_cpu()\n", - " results.append(output_data)\n", - "\n", - " return results\n", - "\n", - "\n", - "predictor = init_predictor(args)\n", - "\n", - "def file_to_transcript(filename):\n", - " print(filename)\n", - " feature = dataset.process_utterance(filename, \"\")\n", - " audio = np.array([feature[0]]).astype('float32') #[1, D, T]\n", - " audio_len = feature[0].shape[1]\n", - " audio_len = np.array([audio_len]).astype('int64') # [1]\n", - " \n", - " \n", - " i_probs = run(predictor, audio, audio_len)\n", - " print('jit:', i_probs[0], type(i_probs[0]))\n", - " \n", - " audio = paddle.to_tensor(audio)\n", - " audio_len = paddle.to_tensor(audio_len)\n", - " print(audio.shape)\n", - " print(audio_len.shape)\n", - " \n", - " #eouts, eouts_len = model.encoder(audio, audio_len)\n", - " #probs = model.decoder.softmax(eouts)\n", - " probs = model.forward(audio, audio_len)\n", - " print('paddle:', probs.numpy())\n", - " \n", - " flag = np.allclose(i_probs[0], probs.numpy())\n", - " print(flag)\n", - " \n", - " return probs\n", - "\n", - "# result_transcript = model.decode(\n", - "# audio,\n", - "# audio_len,\n", - "# vocab_list=dataset.vocab_list,\n", - "# decoding_method=config.decoding.decoding_method,\n", - "# lang_model_path=config.decoding.lang_model_path,\n", - "# beam_alpha=config.decoding.alpha,\n", - "# beam_beta=config.decoding.beta,\n", - "# beam_size=config.decoding.beam_size,\n", - "# cutoff_prob=config.decoding.cutoff_prob,\n", - "# cutoff_top_n=config.decoding.cutoff_top_n,\n", - "# num_processes=config.decoding.num_proc_bsearch)\n", - "# return result_transcript[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Warm-up Test Case %d: %s 0 /home/ssd5/zhanghui/DeepSpeech2.x/examples/aishell/../dataset/aishell/data_aishell/wav/test/S0764/BAC009S0764W0124.wav\n", - "/home/ssd5/zhanghui/DeepSpeech2.x/examples/aishell/../dataset/aishell/data_aishell/wav/test/S0764/BAC009S0764W0124.wav\n", - "input: 0 audio\n", - "input: 1 audio_len\n", - "output: 0 tmp_75\n", - "jit: [[[8.91786298e-12 4.45648032e-12 3.67572750e-09 ... 8.91767563e-12\n", - " 8.91573707e-12 4.64317296e-08]\n", - " [1.55950222e-15 2.62794089e-14 4.50423509e-12 ... 1.55944271e-15\n", - " 1.55891342e-15 9.99992609e-01]\n", - " [1.24638127e-17 7.61802427e-16 2.93265812e-14 ... 1.24633371e-17\n", - " 1.24587264e-17 1.00000000e+00]\n", - " ...\n", - " [4.37488240e-15 2.43676260e-12 1.98770514e-12 ... 4.37479896e-15\n", - " 4.37354747e-15 1.00000000e+00]\n", - " [3.89334696e-13 1.66754856e-11 1.42900388e-11 ... 3.89329492e-13\n", - " 3.89252270e-13 1.00000000e+00]\n", - " [1.00349985e-10 2.56293708e-10 2.91177582e-10 ... 1.00347876e-10\n", - " 1.00334095e-10 9.99998808e-01]]] \n", - "[1, 161, 522]\n", - "[1]\n", - "paddle: [[[8.91789680e-12 4.45649724e-12 3.67574149e-09 ... 8.91770945e-12\n", - " 8.91577090e-12 4.64319072e-08]\n", - " [1.55950222e-15 2.62794089e-14 4.50423509e-12 ... 1.55944271e-15\n", - " 1.55891342e-15 9.99992609e-01]\n", - " [1.24638599e-17 7.61805339e-16 2.93267472e-14 ... 1.24633842e-17\n", - " 1.24587735e-17 1.00000000e+00]\n", - " ...\n", - " [4.37488240e-15 2.43676737e-12 1.98770514e-12 ... 4.37479896e-15\n", - " 4.37354747e-15 1.00000000e+00]\n", - " [3.89336187e-13 1.66755481e-11 1.42900925e-11 ... 3.89330983e-13\n", - " 3.89253761e-13 1.00000000e+00]\n", - " [1.00349985e-10 2.56293708e-10 2.91177582e-10 ... 1.00347876e-10\n", - " 1.00334095e-10 9.99998808e-01]]]\n", - "False\n" - ] - } - ], - "source": [ - "manifest = read_manifest(args.warmup_manifest)\n", - "\n", - "for idx, sample in enumerate(manifest[:1]):\n", - " print(\"Warm-up Test Case %d: %s\", idx, sample['audio_filepath'])\n", - " start_time = time.time()\n", - " transcript = file_to_transcript(sample['audio_filepath'])\n", - " finish_time = time.time()\n", - "# print(\"Response Time: %f, Transcript: %s\" %\n", - "# (finish_time - start_time, transcript))\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(1, 161, 522) (1,)\n", - "input: 0 audio\n", - "input: 1 audio_len\n", - "output: 0 tmp_75\n", - "jit: [[[8.91789680e-12 4.45649724e-12 3.67574149e-09 ... 8.91770945e-12\n", - " 8.91577090e-12 4.64319072e-08]\n", - " [1.55950222e-15 2.62794089e-14 4.50423509e-12 ... 1.55944271e-15\n", - " 1.55891342e-15 9.99992609e-01]\n", - " [1.24638599e-17 7.61805339e-16 2.93267472e-14 ... 1.24633842e-17\n", - " 1.24587735e-17 1.00000000e+00]\n", - " ...\n", - " [4.37488240e-15 2.43676737e-12 1.98770514e-12 ... 4.37479896e-15\n", - " 4.37354747e-15 1.00000000e+00]\n", - " [3.89336187e-13 1.66755481e-11 1.42900925e-11 ... 3.89330983e-13\n", - " 3.89253761e-13 1.00000000e+00]\n", - " [1.00349985e-10 2.56293708e-10 2.91177582e-10 ... 1.00347876e-10\n", - " 1.00334095e-10 9.99998808e-01]]]\n" - ] - } - ], - "source": [ - "def test(filename):\n", - " feature = dataset.process_utterance(filename, \"\")\n", - " audio = np.array([feature[0]]).astype('float32') #[1, D, T]\n", - " audio_len = feature[0].shape[1]\n", - " audio_len = np.array([audio_len]).astype('int64') # [1]\n", - " \n", - " print(audio.shape, audio_len.shape)\n", - "\n", - " i_probs = run(predictor, audio, audio_len)\n", - " print('jit:', i_probs[0])\n", - " return i_probs\n", - " \n", - "probs = test(sample['audio_filepath'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/.notebook/layer_norm_test.ipynb b/.notebook/layer_norm_test.ipynb deleted file mode 100644 index eac3566ff..000000000 --- a/.notebook/layer_norm_test.ipynb +++ /dev/null @@ -1,229 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 32, - "id": "academic-surname", - "metadata": {}, - "outputs": [], - "source": [ - "import paddle\n", - "from paddle import nn" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "id": "fundamental-treasure", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter containing:\n", - "Tensor(shape=[256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])\n", - "Parameter containing:\n", - "Tensor(shape=[256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])\n" - ] - } - ], - "source": [ - "L = nn.LayerNorm(256, epsilon=1e-12)\n", - "for p in L.parameters():\n", - " print(p)" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "id": "consolidated-elephant", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "id": "moderate-noise", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "float64\n" - ] - } - ], - "source": [ - "x = np.random.randn(2, 51, 256)\n", - "print(x.dtype)" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "id": "cooked-progressive", - "metadata": {}, - "outputs": [], - "source": [ - "y = L(paddle.to_tensor(x, dtype='float32'))" - ] - }, - { - "cell_type": "code", - "execution_count": 48, - "id": "optimum-milwaukee", - "metadata": {}, - "outputs": [], - "source": [ - "import torch" - ] - }, - { - "cell_type": "code", - "execution_count": 49, - "id": "viral-indian", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Parameter containing:\n", - "tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", - " 1., 1., 1., 1.], requires_grad=True)\n", - "Parameter containing:\n", - "tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n", - " requires_grad=True)\n" - ] - } - ], - "source": [ - "TL = torch.nn.LayerNorm(256, eps=1e-12)\n", - "for p in TL.parameters():\n", - " print(p)" - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "id": "skilled-vietnamese", - "metadata": {}, - "outputs": [], - "source": [ - "ty = TL(torch.tensor(x, dtype=torch.float32))" - ] - }, - { - "cell_type": "code", - "execution_count": 51, - "id": "incorrect-allah", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "False" - ] - }, - "execution_count": 51, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "np.allclose(y.numpy(), ty.detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "prostate-cameroon", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 52, - "id": "governmental-surge", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 52, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "x = np.random.randn(2, 256)\n", - "y = L(paddle.to_tensor(x, dtype='float32'))\n", - "ty = TL(torch.tensor(x, dtype=torch.float32))\n", - "np.allclose(y.numpy(), ty.detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "confidential-jacket", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/mask_and_masked_fill_test.ipynb b/.notebook/mask_and_masked_fill_test.ipynb deleted file mode 100644 index 265ec536b..000000000 --- a/.notebook/mask_and_masked_fill_test.ipynb +++ /dev/null @@ -1,449 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "primary-organic", - "metadata": {}, - "outputs": [], - "source": [ - "import torch" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "id": "stopped-semester", - "metadata": {}, - "outputs": [], - "source": [ - "def mask_finished_scores(score: torch.Tensor,\n", - " flag: torch.Tensor) -> torch.Tensor:\n", - " \"\"\"\n", - " If a sequence is finished, we only allow one alive branch. This function\n", - " aims to give one branch a zero score and the rest -inf score.\n", - " Args:\n", - " score (torch.Tensor): A real value array with shape\n", - " (batch_size * beam_size, beam_size).\n", - " flag (torch.Tensor): A bool array with shape\n", - " (batch_size * beam_size, 1).\n", - " Returns:\n", - " torch.Tensor: (batch_size * beam_size, beam_size).\n", - " \"\"\"\n", - " beam_size = score.size(-1)\n", - " zero_mask = torch.zeros_like(flag, dtype=torch.bool)\n", - " if beam_size > 1:\n", - " unfinished = torch.cat((zero_mask, flag.repeat([1, beam_size - 1])),\n", - " dim=1)\n", - " finished = torch.cat((flag, zero_mask.repeat([1, beam_size - 1])),\n", - " dim=1)\n", - " else:\n", - " unfinished = zero_mask\n", - " finished = flag\n", - " print(unfinished)\n", - " print(finished)\n", - " score.masked_fill_(unfinished, -float('inf'))\n", - " score.masked_fill_(finished, 0)\n", - " return score" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "id": "agreed-portuguese", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[ True],\n", - " [False]])\n", - "tensor([[-0.8841, 0.7381, -0.9986],\n", - " [ 0.2675, -0.7971, 0.3798]])\n", - "tensor([[ True, True],\n", - " [False, False]])\n" - ] - } - ], - "source": [ - "score = torch.randn((2, 3))\n", - "flag = torch.ones((2, 1), dtype=torch.bool)\n", - "flag[1] = False\n", - "print(flag)\n", - "print(score)\n", - "print(flag.repeat([1, 2]))" - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "id": "clean-aspect", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[False, True, True],\n", - " [False, False, False]])\n", - "tensor([[ True, False, False],\n", - " [False, False, False]])\n", - "tensor([[ 0.0000, -inf, -inf],\n", - " [ 0.2675, -0.7971, 0.3798]])\n", - "tensor([[ 0.0000, -inf, -inf],\n", - " [ 0.2675, -0.7971, 0.3798]])\n" - ] - } - ], - "source": [ - "r = mask_finished_scores(score, flag)\n", - "print(r)\n", - "print(score)" - ] - }, - { - "cell_type": "code", - "execution_count": 55, - "id": "thrown-airline", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[2, 1], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[True ],\n", - " [False]])\n", - "Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 2.05994511, 1.87704289, 0.01988174],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "Tensor(shape=[2, 2], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[True , True ],\n", - " [False, False]])\n" - ] - } - ], - "source": [ - "import paddle\n", - "\n", - "score = paddle.randn((2, 3))\n", - "flag = paddle.ones((2, 1), dtype='bool')\n", - "flag[1] = False\n", - "print(flag)\n", - "print(score)\n", - "print(flag.tile([1, 2]))" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "id": "internal-patent", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[2, 3], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[False, True , True ],\n", - " [False, False, False]])\n", - "Tensor(shape=[2, 3], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[True , False, False],\n", - " [False, False, False]])\n", - "x Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 2.05994511, 1.87704289, 0.01988174],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "2 Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 2.05994511, 1.87704289, 0.01988174],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "3 Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 2.05994511, -inf. , -inf. ],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "x Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 2.05994511, -inf. , -inf. ],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "2 Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 2.05994511, -inf. , -inf. ],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "3 Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 0. , -inf. , -inf. ],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n", - "Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[ 0. , -inf. , -inf. ],\n", - " [-0.40165186, 0.77547729, -0.64469045]])\n" - ] - } - ], - "source": [ - "paddle.bool = 'bool'\n", - "\n", - "def masked_fill(xs:paddle.Tensor, mask:paddle.Tensor, value:float):\n", - " print(xs)\n", - " trues = paddle.ones_like(xs) * value\n", - " assert xs.shape == mask.shape\n", - " xs = paddle.where(mask, trues, xs)\n", - " return xs\n", - "\n", - "def masked_fill_(xs:paddle.Tensor, mask:paddle.Tensor, value:float):\n", - " print('x', xs)\n", - " trues = paddle.ones_like(xs) * value\n", - " assert xs.shape == mask.shape\n", - " ret = paddle.where(mask, trues, xs)\n", - " print('2', xs)\n", - " paddle.assign(ret, output=xs)\n", - " print('3', xs)\n", - "\n", - "paddle.Tensor.masked_fill = masked_fill\n", - "paddle.Tensor.masked_fill_ = masked_fill_\n", - "\n", - "def mask_finished_scores_pd(score: paddle.Tensor,\n", - " flag: paddle.Tensor) -> paddle.Tensor:\n", - " \"\"\"\n", - " If a sequence is finished, we only allow one alive branch. This function\n", - " aims to give one branch a zero score and the rest -inf score.\n", - " Args:\n", - " score (torch.Tensor): A real value array with shape\n", - " (batch_size * beam_size, beam_size).\n", - " flag (torch.Tensor): A bool array with shape\n", - " (batch_size * beam_size, 1).\n", - " Returns:\n", - " torch.Tensor: (batch_size * beam_size, beam_size).\n", - " \"\"\"\n", - " beam_size = score.shape[-1]\n", - " zero_mask = paddle.zeros_like(flag, dtype=paddle.bool)\n", - " if beam_size > 1:\n", - " unfinished = paddle.concat((zero_mask, flag.tile([1, beam_size - 1])),\n", - " axis=1)\n", - " finished = paddle.concat((flag, zero_mask.tile([1, beam_size - 1])),\n", - " axis=1)\n", - " else:\n", - " unfinished = zero_mask\n", - " finished = flag\n", - " print(unfinished)\n", - " print(finished)\n", - " \n", - " #score.masked_fill_(unfinished, -float('inf'))\n", - " #score.masked_fill_(finished, 0)\n", - "# infs = paddle.ones_like(score) * -float('inf')\n", - "# score = paddle.where(unfinished, infs, score)\n", - "# score = paddle.where(finished, paddle.zeros_like(score), score)\n", - "\n", - "# score = score.masked_fill(unfinished, -float('inf'))\n", - "# score = score.masked_fill(finished, 0)\n", - " score.masked_fill_(unfinished, -float('inf'))\n", - " score.masked_fill_(finished, 0)\n", - " return score\n", - "\n", - "r = mask_finished_scores_pd(score, flag)\n", - "print(r)" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "id": "vocal-prime", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 57, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "score.value" - ] - }, - { - "cell_type": "code", - "execution_count": 71, - "id": "bacterial-adolescent", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Union, Any" - ] - }, - { - "cell_type": "code", - "execution_count": 72, - "id": "absent-fiber", - "metadata": {}, - "outputs": [], - "source": [ - "def repeat(xs : paddle.Tensor, *size: Any):\n", - " print(size)\n", - " return paddle.tile(xs, size)\n", - "paddle.Tensor.repeat = repeat" - ] - }, - { - "cell_type": "code", - "execution_count": 73, - "id": "material-harbor", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(1, 2)\n", - "Tensor(shape=[2, 2], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[True , True ],\n", - " [False, False]])\n" - ] - } - ], - "source": [ - "flag = paddle.ones((2, 1), dtype='bool')\n", - "flag[1] = False\n", - "print(flag.repeat(1, 2))" - ] - }, - { - "cell_type": "code", - "execution_count": 84, - "id": "acute-brighton", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(Tensor(shape=[1], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [1]), 2)\n", - "Tensor(shape=[2, 2], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[True , True ],\n", - " [False, False]])\n" - ] - } - ], - "source": [ - "flag = paddle.ones((2, 1), dtype='bool')\n", - "flag[1] = False\n", - "print(flag.repeat(paddle.to_tensor(1), 2))" - ] - }, - { - "cell_type": "code", - "execution_count": 85, - "id": "european-rugby", - "metadata": {}, - "outputs": [], - "source": [ - "def size(xs, *args: int):\n", - " nargs = len(args)\n", - " s = paddle.shape(xs)\n", - " assert(nargs <= 1)\n", - " if nargs == 1:\n", - " return s[args[0]]\n", - " else:\n", - " return s\n", - "paddle.Tensor.size = size" - ] - }, - { - "cell_type": "code", - "execution_count": 86, - "id": "moral-special", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[2], dtype=int32, place=CPUPlace, stop_gradient=True,\n", - " [2, 1])" - ] - }, - "execution_count": 86, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "flag.size()" - ] - }, - { - "cell_type": "code", - "execution_count": 87, - "id": "ahead-coach", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[1], dtype=int32, place=CPUPlace, stop_gradient=True,\n", - " [1])" - ] - }, - "execution_count": 87, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "flag.size(1)" - ] - }, - { - "cell_type": "code", - "execution_count": 88, - "id": "incomplete-fitness", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Tensor(shape=[1], dtype=int32, place=CPUPlace, stop_gradient=True,\n", - " [2])" - ] - }, - "execution_count": 88, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "flag.size(0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "upset-connectivity", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/position_embeding_check.ipynb b/.notebook/position_embeding_check.ipynb deleted file mode 100644 index d4b9098d9..000000000 --- a/.notebook/position_embeding_check.ipynb +++ /dev/null @@ -1,231 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 2, - "id": "designing-borough", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00\n", - " 0.0000000e+00 0.0000000e+00]\n", - " [ 8.4147096e-01 8.0196178e-01 7.6172036e-01 ... 1.2409373e-04\n", - " 1.1547816e-04 1.0746076e-04]\n", - " [ 9.0929741e-01 9.5814437e-01 9.8704624e-01 ... 2.4818745e-04\n", - " 2.3095631e-04 2.1492151e-04]\n", - " ...\n", - " [ 3.7960774e-01 7.4510968e-01 7.3418564e-01 ... 1.2036801e-02\n", - " 1.1201146e-02 1.0423505e-02]\n", - " [-5.7338190e-01 -8.9752287e-02 -4.1488394e-02 ... 1.2160885e-02\n", - " 1.1316618e-02 1.0530960e-02]\n", - " [-9.9920684e-01 -8.5234123e-01 -7.8794664e-01 ... 1.2284970e-02\n", - " 1.1432089e-02 1.0638415e-02]]\n", - "True\n", - "True\n" - ] - } - ], - "source": [ - "import torch\n", - "import math\n", - "import numpy as np\n", - "\n", - "max_len=100\n", - "d_model=256\n", - "\n", - "pe = torch.zeros(max_len, d_model)\n", - "position = torch.arange(0, max_len,\n", - " dtype=torch.float32).unsqueeze(1)\n", - "toruch_position = position\n", - "div_term = torch.exp(\n", - " torch.arange(0, d_model, 2, dtype=torch.float32) *\n", - " -(math.log(10000.0) / d_model))\n", - "tourch_div_term = div_term.cpu().detach().numpy()\n", - "\n", - "\n", - "\n", - "torhc_sin = torch.sin(position * div_term)\n", - "torhc_cos = torch.cos(position * div_term)\n", - "print(torhc_sin.cpu().detach().numpy())\n", - "np_sin = np.sin((position * div_term).cpu().detach().numpy())\n", - "np_cos = np.cos((position * div_term).cpu().detach().numpy())\n", - "print(np.allclose(np_sin, torhc_sin.cpu().detach().numpy()))\n", - "print(np.allclose(np_cos, torhc_cos.cpu().detach().numpy()))\n", - "pe[:, 0::2] = torhc_sin\n", - "pe[:, 1::2] = torhc_cos\n", - "tourch_pe = pe.cpu().detach().numpy()" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "swiss-referral", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n", - "False\n", - "False\n", - "False\n", - "False\n", - "[[ 1. 1. 1. ... 1. 1.\n", - " 1. ]\n", - " [ 0.5403023 0.59737533 0.6479059 ... 1. 1.\n", - " 1. ]\n", - " [-0.41614684 -0.28628543 -0.1604359 ... 0.99999994 1.\n", - " 1. ]\n", - " ...\n", - " [-0.92514753 -0.66694194 -0.67894876 ... 0.9999276 0.99993724\n", - " 0.9999457 ]\n", - " [-0.81928825 -0.9959641 -0.999139 ... 0.99992603 0.999936\n", - " 0.99994457]\n", - " [ 0.03982088 -0.52298605 -0.6157435 ... 0.99992454 0.9999347\n", - " 0.99994344]]\n", - "----\n", - "[[ 1. 1. 1. ... 1. 1.\n", - " 1. ]\n", - " [ 0.54030234 0.59737533 0.6479059 ... 1. 1.\n", - " 1. ]\n", - " [-0.41614684 -0.28628543 -0.1604359 ... 1. 1.\n", - " 1. ]\n", - " ...\n", - " [-0.92514753 -0.66694194 -0.67894876 ... 0.9999276 0.9999373\n", - " 0.9999457 ]\n", - " [-0.81928825 -0.9959641 -0.999139 ... 0.99992603 0.999936\n", - " 0.99994457]\n", - " [ 0.03982088 -0.5229861 -0.6157435 ... 0.99992454 0.9999347\n", - " 0.99994344]]\n", - ")))))))\n", - "[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00\n", - " 0.0000000e+00 0.0000000e+00]\n", - " [ 8.4147096e-01 8.0196178e-01 7.6172036e-01 ... 1.2409373e-04\n", - " 1.1547816e-04 1.0746076e-04]\n", - " [ 9.0929741e-01 9.5814437e-01 9.8704624e-01 ... 2.4818745e-04\n", - " 2.3095631e-04 2.1492151e-04]\n", - " ...\n", - " [ 3.7960774e-01 7.4510968e-01 7.3418564e-01 ... 1.2036801e-02\n", - " 1.1201146e-02 1.0423505e-02]\n", - " [-5.7338190e-01 -8.9752287e-02 -4.1488394e-02 ... 1.2160885e-02\n", - " 1.1316618e-02 1.0530960e-02]\n", - " [-9.9920684e-01 -8.5234123e-01 -7.8794664e-01 ... 1.2284970e-02\n", - " 1.1432089e-02 1.0638415e-02]]\n", - "----\n", - "[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00\n", - " 0.0000000e+00 0.0000000e+00]\n", - " [ 8.4147096e-01 8.0196178e-01 7.6172036e-01 ... 1.2409373e-04\n", - " 1.1547816e-04 1.0746076e-04]\n", - " [ 9.0929741e-01 9.5814437e-01 9.8704624e-01 ... 2.4818745e-04\n", - " 2.3095631e-04 2.1492151e-04]\n", - " ...\n", - " [ 3.7960774e-01 7.4510968e-01 7.3418564e-01 ... 1.2036801e-02\n", - " 1.1201146e-02 1.0423505e-02]\n", - " [-5.7338190e-01 -8.9752287e-02 -4.1488394e-02 ... 1.2160885e-02\n", - " 1.1316618e-02 1.0530960e-02]\n", - " [-9.9920684e-01 -8.5234123e-01 -7.8794664e-01 ... 1.2284970e-02\n", - " 1.1432089e-02 1.0638415e-02]]\n" - ] - } - ], - "source": [ - "import paddle\n", - "paddle.set_device('cpu')\n", - "ppe = paddle.zeros((max_len, d_model), dtype='float32')\n", - "position = paddle.arange(0, max_len,\n", - " dtype='float32').unsqueeze(1)\n", - "print(np.allclose(position.numpy(), toruch_position))\n", - "div_term = paddle.exp(\n", - " paddle.arange(0, d_model, 2, dtype='float32') *\n", - " -(math.log(10000.0) / d_model))\n", - "print(np.allclose(div_term.numpy(), tourch_div_term))\n", - "\n", - "\n", - "\n", - "p_sin = paddle.sin(position * div_term)\n", - "p_cos = paddle.cos(position * div_term)\n", - "print(np.allclose(np_sin, p_sin.numpy(), rtol=1.e-6, atol=0))\n", - "print(np.allclose(np_cos, p_cos.numpy(), rtol=1.e-6, atol=0))\n", - "ppe[:, 0::2] = p_sin\n", - "ppe[:, 1::2] = p_cos\n", - "print(np.allclose(p_sin.numpy(), torhc_sin.cpu().detach().numpy()))\n", - "print(np.allclose(p_cos.numpy(), torhc_cos.cpu().detach().numpy()))\n", - "print(p_cos.numpy())\n", - "print(\"----\")\n", - "print(torhc_cos.cpu().detach().numpy())\n", - "print(\")))))))\")\n", - "print(p_sin.numpy())\n", - "print(\"----\")\n", - "print(torhc_sin.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "integrated-boards", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "False\n" - ] - } - ], - "source": [ - "print(np.allclose(ppe.numpy(), pe.numpy()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "flying-reserve", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "revised-divide", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/python_test.ipynb b/.notebook/python_test.ipynb deleted file mode 100644 index 819d4c48f..000000000 --- a/.notebook/python_test.ipynb +++ /dev/null @@ -1,1680 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "choice-lender", - "metadata": {}, - "outputs": [], - "source": [ - "eng=\"one minute a voice said and the time buzzer sounded\"\n", - "chn=\"可控是病毒武器最基本的要求\"" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "ruled-kuwait", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "o\n", - "n\n", - "e\n", - " \n", - "m\n", - "i\n", - "n\n", - "u\n", - "t\n", - "e\n", - " \n", - "a\n", - " \n", - "v\n", - "o\n", - "i\n", - "c\n", - "e\n", - " \n", - "s\n", - "a\n", - "i\n", - "d\n", - " \n", - "a\n", - "n\n", - "d\n", - " \n", - "t\n", - "h\n", - "e\n", - " \n", - "t\n", - "i\n", - "m\n", - "e\n", - " \n", - "b\n", - "u\n", - "z\n", - "z\n", - "e\n", - "r\n", - " \n", - "s\n", - "o\n", - "u\n", - "n\n", - "d\n", - "e\n", - "d\n" - ] - } - ], - "source": [ - "for char in eng:\n", - " print(char)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "passive-petite", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "可\n", - "控\n", - "是\n", - "病\n", - "毒\n", - "武\n", - "器\n", - "最\n", - "基\n", - "本\n", - "的\n", - "要\n", - "求\n" - ] - } - ], - "source": [ - "for char in chn:\n", - " print(char)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "olympic-realtor", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "one\n", - "minute\n", - "a\n", - "voice\n", - "said\n", - "and\n", - "the\n", - "time\n", - "buzzer\n", - "sounded\n" - ] - } - ], - "source": [ - "for word in eng.split():\n", - " print(word)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "induced-enhancement", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "可控是病毒武器最基本的要求\n" - ] - } - ], - "source": [ - "for word in chn.split():\n", - " print(word)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "lovely-bottle", - "metadata": {}, - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'StringIO'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mStringIO\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'StringIO'" - ] - } - ], - "source": [ - "import StringIO" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "interested-cardiff", - "metadata": {}, - "outputs": [], - "source": [ - "from io import StringIO" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "portable-ivory", - "metadata": {}, - "outputs": [], - "source": [ - "inputs = StringIO()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "compatible-destination", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "64" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "inputs.write(\"nor is mister quilter's manner less interesting than his matter\" + '\\n')" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "federal-margin", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "nor is mister quilter's manner less interesting than his matternor is mister quilter's manner less interesting than his matter\n", - "\n" - ] - } - ], - "source": [ - "print(inputs.getvalue())" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "consecutive-entity", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "64" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "inputs.write(\"nor is mister quilter's manner less interesting than his matter\" + '\\n')" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "desirable-anxiety", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "nor is mister quilter's manner less interesting than his matternor is mister quilter's manner less interesting than his matter\n", - "nor is mister quilter's manner less interesting than his matter\n", - "\n" - ] - } - ], - "source": [ - "print(inputs.getvalue())" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "employed-schedule", - "metadata": {}, - "outputs": [], - "source": [ - "import tempfile" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "unlikely-honduras", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['__class__', '__del__', '__delattr__', '__dict__', '__dir__', '__doc__', '__enter__', '__eq__', '__exit__', '__format__', '__ge__', '__getattribute__', '__getstate__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__lt__', '__ne__', '__new__', '__next__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '_checkClosed', '_checkReadable', '_checkSeekable', '_checkWritable', '_dealloc_warn', '_finalizing', 'close', 'closed', 'detach', 'fileno', 'flush', 'isatty', 'mode', 'name', 'peek', 'raw', 'read', 'read1', 'readable', 'readinto', 'readinto1', 'readline', 'readlines', 'seek', 'seekable', 'tell', 'truncate', 'writable', 'write', 'writelines']\n", - "57\n" - ] - } - ], - "source": [ - "with tempfile.TemporaryFile() as fp:\n", - " print(dir(fp))\n", - " print(fp.name)" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "id": "needed-trail", - "metadata": {}, - "outputs": [], - "source": [ - "a = tempfile.mkstemp(suffix=None, prefix='test', dir=None, text=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "id": "hazardous-choir", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['__add__', '__class__', '__contains__', '__delattr__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__getnewargs__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__len__', '__lt__', '__mul__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__rmul__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'count', 'index']\n" - ] - } - ], - "source": [ - "print(dir(a))" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "id": "front-sauce", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(57, '/tmp/test27smzbzc')\n" - ] - } - ], - "source": [ - "print(a)" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "shared-wages", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "print(a.index)" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "id": "charged-carnival", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__enter__', '__eq__', '__exit__', '__format__', '__ge__', '__getattr__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_closer', 'close', 'delete', 'file', 'name']\n", - "/tmp/tmpfjn7mygy\n" - ] - } - ], - "source": [ - "fp= tempfile.NamedTemporaryFile(mode='w', delete=False)\n", - "print(dir(fp))\n", - "print(fp.name)\n", - "fp.close()" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "id": "religious-terror", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/tmp/tmpfjn7mygy\n" - ] - } - ], - "source": [ - "import os\n", - "os.path.exists(fp.name)\n", - "print(fp.name)" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "id": "communist-gospel", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "fp.write" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "id": "simplified-clarity", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'example'" - ] - }, - "execution_count": 39, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "s='/home/ubuntu/python/example.py'\n", - "os.path.splitext(os.path.basename(s))[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "id": "popular-genius", - "metadata": {}, - "outputs": [], - "source": [ - "from collections import Counter" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "id": "studied-burner", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_items([('hello', 1), ('world', 1)])\n" - ] - } - ], - "source": [ - "counter = Counter()\n", - "counter.update([\"hello\"])\n", - "counter.update([\"world\"])\n", - "print(counter.items())" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "id": "mineral-ceremony", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_items([('h', 1), ('e', 1), ('l', 3), ('o', 2), ('w', 1), ('r', 1), ('d', 1)])\n" - ] - } - ], - "source": [ - "counter = Counter()\n", - "counter.update(\"hello\")\n", - "counter.update(\"world\")\n", - "print(counter.items())" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "id": "nonprofit-freedom", - "metadata": {}, - "outputs": [], - "source": [ - "counter.update(list(\"hello\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "id": "extended-methodology", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dict_items([('h', 2), ('e', 2), ('l', 5), ('o', 3), ('w', 1), ('r', 1), ('d', 1)])\n" - ] - } - ], - "source": [ - "print(counter.items())" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "id": "grand-benjamin", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['h', 'e', 'l', 'l', 'o']" - ] - }, - "execution_count": 47, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "list(\"hello\")" - ] - }, - { - "cell_type": "code", - "execution_count": 53, - "id": "marine-fundamentals", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{}\n" - ] - } - ], - "source": [ - "from io import StringIO\n", - "a = StringIO(initial_value='{}', newline='')\n", - "print(a.read())" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "id": "suitable-charlotte", - "metadata": {}, - "outputs": [ - { - "ename": "TypeError", - "evalue": "expected str, bytes or os.PathLike object, not _io.StringIO", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mwith\u001b[0m \u001b[0mio\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mTypeError\u001b[0m: expected str, bytes or os.PathLike object, not _io.StringIO" - ] - } - ], - "source": [ - "with io.open(a) as f:\n", - " print(f.read())" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "id": "institutional-configuration", - "metadata": {}, - "outputs": [], - "source": [ - "io.open?" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "id": "pregnant-modem", - "metadata": {}, - "outputs": [], - "source": [ - "def get_default_args(fn):\n", - " if fn is None:\n", - " return {}\n", - "\n", - " signature = inspect.signature(fn)\n", - " return {\n", - " k: v.default\n", - " for k, v in signature.parameters.items()\n", - " if v.default is not inspect.Parameter.empty\n", - " }" - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "id": "first-release", - "metadata": {}, - "outputs": [ - { - "ename": "NameError", - "evalue": "name 'inspect' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_default_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mio\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m\u001b[0m in \u001b[0;36mget_default_args\u001b[0;34m(fn)\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0msignature\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minspect\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msignature\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m return {\n\u001b[1;32m 7\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdefault\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'inspect' is not defined" - ] - } - ], - "source": [ - "get_default_args(io.open)" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "id": "convertible-roulette", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: sox in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (1.4.1)\n", - "Requirement already satisfied: numpy>=1.9.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from sox) (1.20.1)\n", - "Requirement already satisfied: librosa in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (0.8.0)\n", - "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (0.24.1)\n", - "Requirement already satisfied: numba>=0.43.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (0.52.0)\n", - "Requirement already satisfied: pooch>=1.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (1.3.0)\n", - "Requirement already satisfied: scipy>=1.0.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (1.2.1)\n", - "Requirement already satisfied: numpy>=1.15.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (1.20.1)\n", - "Requirement already satisfied: decorator>=3.0.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (4.4.2)\n", - "Requirement already satisfied: resampy>=0.2.2 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (0.2.2)\n", - "Requirement already satisfied: audioread>=2.0.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (2.1.9)\n", - "Requirement already satisfied: soundfile>=0.9.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (0.9.0.post1)\n", - "Requirement already satisfied: joblib>=0.14 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from librosa) (1.0.1)\n", - "Requirement already satisfied: setuptools in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from numba>=0.43.0->librosa) (51.0.0)\n", - "Requirement already satisfied: llvmlite<0.36,>=0.35.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from numba>=0.43.0->librosa) (0.35.0)\n", - "Requirement already satisfied: appdirs in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from pooch>=1.0->librosa) (1.4.4)\n", - "Requirement already satisfied: packaging in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from pooch>=1.0->librosa) (20.9)\n", - "Requirement already satisfied: requests in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from pooch>=1.0->librosa) (2.25.1)\n", - "Requirement already satisfied: six>=1.3 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from resampy>=0.2.2->librosa) (1.15.0)\n", - "Requirement already satisfied: threadpoolctl>=2.0.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from scikit-learn!=0.19.0,>=0.14.0->librosa) (2.1.0)\n", - "Requirement already satisfied: cffi>=0.6 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from soundfile>=0.9.0->librosa) (1.14.4)\n", - "Requirement already satisfied: pycparser in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from cffi>=0.6->soundfile>=0.9.0->librosa) (2.20)\n", - "Requirement already satisfied: pyparsing>=2.0.2 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from packaging->pooch>=1.0->librosa) (2.4.7)\n", - "Requirement already satisfied: idna<3,>=2.5 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from requests->pooch>=1.0->librosa) (2.10)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from requests->pooch>=1.0->librosa) (2020.12.5)\n", - "Requirement already satisfied: chardet<5,>=3.0.2 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from requests->pooch>=1.0->librosa) (4.0.0)\n", - "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from requests->pooch>=1.0->librosa) (1.26.3)\n" - ] - } - ], - "source": [ - "!pip install sox\n", - "!pip install librosa" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "cutting-fleece", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import sox\n", - "tfm = sox.Transformer()\n", - "sample_rate = 44100\n", - "y = np.sin(2 * np.pi * 440.0 * np.arange(sample_rate * 1.0) / sample_rate)\n", - "print(y.dtype.type)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "historical-diving", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ 0. 0.06264832 0.12505052 ... -0.18696144 -0.12505052\n", - " -0.06264832]\n" - ] - } - ], - "source": [ - "output_array = tfm.build_array(input_array=y, sample_rate_in=sample_rate)\n", - "print(output_array)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "similar-spice", - "metadata": {}, - "outputs": [], - "source": [ - "tfm.build_array?" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "grand-influence", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['8svx', 'aif', 'aifc', 'aiff', 'aiffc', 'al', 'amb', 'amr-nb', 'amr-wb', 'anb', 'au', 'avr', 'awb', 'caf', 'cdda', 'cdr', 'cvs', 'cvsd', 'cvu', 'dat', 'dvms', 'f32', 'f4', 'f64', 'f8', 'fap', 'flac', 'fssd', 'gsm', 'gsrt', 'hcom', 'htk', 'ima', 'ircam', 'la', 'lpc', 'lpc10', 'lu', 'mat', 'mat4', 'mat5', 'maud', 'nist', 'ogg', 'paf', 'prc', 'pvf', 'raw', 's1', 's16', 's2', 's24', 's3', 's32', 's4', 's8', 'sb', 'sd2', 'sds', 'sf', 'sl', 'sln', 'smp', 'snd', 'sndfile', 'sndr', 'sndt', 'sou', 'sox', 'sph', 'sw', 'txw', 'u1', 'u16', 'u2', 'u24', 'u3', 'u32', 'u4', 'u8', 'ub', 'ul', 'uw', 'vms', 'voc', 'vorbis', 'vox', 'w64', 'wav', 'wavpcm', 'wv', 'wve', 'xa', 'xi']\n" - ] - } - ], - "source": [ - "print(sox.core._get_valid_formats())" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "wireless-hypothetical", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "float64\n", - "(59471,)\n", - "16000\n", - "(54065,)\n", - "1.0999907518727459\n" - ] - } - ], - "source": [ - "import soundfile as sf\n", - "wav='/workspace/DeepSpeech-2.x/examples/aishell/s1/../../..//examples/dataset/aishell/data_aishell/wav/dev/S0724/BAC009S0724W0190.wav'\n", - "samples, sr = sf.read(wav)\n", - "print(samples.dtype)\n", - "print(samples.shape)\n", - "print(sr)\n", - "tfm = sox.Transformer()\n", - "tfm.speed(1.1)\n", - "output_array = tfm.build_array(input_array=samples, sample_rate_in=sr)\n", - "output_array.dtype\n", - "print(output_array.shape)\n", - "print(len(samples)/len(output_array))" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "designed-fluid", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import IPython.display as ipd\n", - "ipd.Audio(wav) # load a local WAV file" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "cultural-friendship", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tfm = sox.Transformer()\n", - "tfm.speed(1.0)\n", - "output_array = tfm.build_array(input_array=samples, sample_rate_in=sr)\n", - "ipd.Audio(output_array, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "fossil-lotus", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tfm = sox.Transformer()\n", - "tfm.speed(1.1)\n", - "output_array = tfm.build_array(input_array=samples, sample_rate_in=sr)\n", - "ipd.Audio(output_array, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "constitutional-poker", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tfm = sox.Transformer()\n", - "tfm.speed(0.9)\n", - "output_array = tfm.build_array(input_array=samples, sample_rate_in=sr)\n", - "ipd.Audio(output_array, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "id": "threaded-strap", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "66078\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAA8K0lEQVR4nO2dd3hUZfbHvycdQoAEQpEWmlQVJICoKApqABdcF8u6KlbUXX+77rqrIFZs7Lr2si5WXHXtrigI0myoSFB67xDpoYQEUs/vj7kTJpM7M/fO7XPP53nmye33ZObe97zveU8hZoYgCILgX5KcFkAQBEFwFlEEgiAIPkcUgSAIgs8RRSAIguBzRBEIgiD4HFEEgiAIPscURUBEBUS0log2ENF4lf1/IaJVRLSMiOYSUYeQfWOJaL3yGWuGPIIgCIJ2yGgcARElA1gH4DwAOwAsAvBbZl4Vcsw5ABYycxkR3QJgCDNfRkQ5AAoB5ANgAIsB9GPmA4aEEgRBEDRjxohgAIANzLyJmSsAvANgdOgBzDyfmcuU1R8AtFWWLwAwm5mLlcZ/NoACE2QSBEEQNJJiwjXaANgesr4DwMAox18P4PMo57aJdcPmzZtzXl6ePikFQRB8zuLFi/cxc274djMUgWaI6EoEzEBnx3HuOADjAKB9+/YoLCw0WTpBEITEhoi2qm03wzRUBKBdyHpbZVu4AMMATAQwipnL9ZwLAMw8hZnzmTk/N7eeQhMEQRDixAxFsAhAVyLqSERpAC4HMC30ACLqC+DfCCiBPSG7ZgE4n4iyiSgbwPnKNkEQBMEmDJuGmLmKiG5FoAFPBvAqM68kokkACpl5GoDHADQC8D4RAcA2Zh7FzMVE9CACygQAJjFzsVGZBEEQBO0Ydh91gvz8fJY5AkEQBH0Q0WJmzg/fLpHFgiAIPkcUgSAIgs8RRSAIguBzRBEIgiD4HFEEgif42/tLsXirpKASBCuwNbJYEOLl/cU7kJ6ahH4dsp0WRRASDhkRCIKQcKwoOoSZK3Y5LYZnEEUgCELCMeGj5bj5zcVOi+EZRBEInsGDsY+CQzDkYdGDKALBM8irLWhFOg36EEUgCELCIYpAH6IIBEFIOEQP6EMUgeAZ3l64zWkRBI/gxWSaTiKKQBAEweeIIhAEIaGorK7Bml0lTovhKUQRCIKQUBworXBaBM8hikAQhISiRqYHdCOKQPAUFVU1OPHuz50WQ3AxNTJRrBtTFAERFRDRWiLaQETjVfafRUQ/EVEVEY0J21dNREuUz7TwcwUhlKMV1aioqnFaDMHFiCLQj+Hso0SUDOB5AOcB2AFgERFNY+ZVIYdtA3ANgL+qXOIoM/cxKofgE8hpAQS3UyP9BN2YMSIYAGADM29i5goA7wAYHXoAM29h5mUA5CcSdDFj+U7kjZ/utBiCh5ARgX7MUARtAGwPWd+hbNNKBhEVEtEPRHSRCfIkNHnjp2P/kXKnxbCNtWFugEkyIhBiMPKZb5wWwXO4YbK4AzPnA7gCwFNE1FntICIapyiMwr1799orocvYU+IfRRDetyMSTSBEp7SiunZ5n486TUYwQxEUAWgXst5W2aYJZi5S/m4C8CWAvhGOm8LM+cycn5ubG7+0HmD+mj1Ysv1gve3HKgMPeLWf/ONkmC8YYM9hUQRaMEMRLALQlYg6ElEagMsBaPL+IaJsIkpXlpsDOAPAquhnJT7Xvr4If3jrp3rb/zFzLQB/KYLw/3Tu6t2OyCF4E5kv0IZhRcDMVQBuBTALwGoA7zHzSiKaRESjAICI+hPRDgCXAPg3Ea1UTu8BoJCIlgKYD2BymLeRr8kbP73OROn+0kDv5kCZfyMn//TOEgCSVEzQhjwm2jCleD0zzwAwI2zbvSHLixAwGYWf9x2Ak8yQIdFQM4UHH+oHPl2FId1a2CuQQ0R6kcurapCRmmyvMB6kpoZRw4yUZDdMB9qPjAi04c+nw8UcVHr7as9vsZJDZfO+UjtFcpRIJQeHPfGVzZJ4k7/PWoPu98x0WgzH+GHTfqdF8ASiCFzG8qJDqts37DmCbzfss1ka54nUodtx4Ki9gniUVb8cRpWP5pQ+WVLXT0UUgTZEEXiEV77d7LQIjuCfJsxanGgQDx+rxLIdB2295+qddeNOkpOkidOCfEseoVri5gUDXD7lB9vv+cQX6zDquQW23zcUn06N6Ea+Jo/gp+F9KDLXZ4yFm4odu3eVCzovBb1bOS2CJxBF4BFq/KoIxDhkiIpq5xrjJAeiwMOfl7Rk8SzTgigCl1J0sO5kaLWG9rDkWCXKKqosksghRA94FicUgRAfoghcRiRTiJYRwbAnvnLEFmwloge8i+gB72BKQJlgPdOX74x5zO7D5ThyLMFGBIJnkRGBd5ARgcsw+u5UJ9jsqqSS8C4rIsTEWMl7i7bHPkiohygCB2FmVJk8mZdoCelED3iTWSt3YeFm+z2WDpRV1lkXZwNtiCJwkBe+3IguE80txJ5oikDwJou3HqhdPmPyPAclEbQgisBBPltW3+4f+gLFQ6LpgQT7d3xDqIUz3ANOcB8yWewgq3cerrftqTnrHZBEEMzjdy//gAUb3JHjp0hyUmlCRgSCq5E5Am9xoLTCNUoAAB79fA0A4L8/bkNpuXjURUIUgUuoqq7Bawu0J5Yrr6qus/7yN5vMFsn1/PX9pfhmvb/rVwPA7sPHsOvQMafFAAAcrayOfZADTPhoOeau2eO0GK5FFIFL2LK/FA98qr0427pdR+qsPzR9tdkiuZ4PFu/AB4t3OC2G4/z6+QU4+7H5TosRFSkx6m5EEbgE0hlA8Kvnvq1dTuSc6+L+F5tjVTUor6rvhjxzxS7bZYn0GF8/tdBeQQRdmKIIiKiAiNYS0QYiGq+y/ywi+omIqohoTNi+sUS0XvmMNUMerzH6uW9hJI7s2tcWmSaL23htwZaYx0xfthMb9hyJeZzfePyLtbbfkww9yYJTGFYERJQM4HkAwwH0BPBbIuoZdtg2ANcAeDvs3BwA9wEYCGAAgPuIKNuoTF5j6Y5DukcEwPGo20oHM0y6gT+8/RMeneE/01iQSHmoJMODoBUzRgQDAGxg5k3MXAHgHQCjQw9g5i3MvAxAeIt1AYDZzFzMzAcAzAZQYIJMniPaOxvpRT+s5BXya60C4Pj35tdGr7i0AgePVsY+UBCiYIYiaAMgNMHHDmWb1ecmFNESdHW6a4b6DpZcPMf/e/9ogsv+/T0qlDmBaC6RTjwaTipkv9bsMAPPTBYT0TgiKiSiwr17ve8yOPbVH+usx/MCMRiLthiLRE4U9h4pd1oES7lhaiEOllUAABZuLsbhY4FRQFKSuxSgk9IcijAyyhs/3WZJvIcZiqAIQLuQ9bbKNlPPZeYpzJzPzPm5ublxCeomvlpnXJkxu6McoJN8suQXAMDS7QedFcRi5qzejeUh2TyDvX09De/KXw4ldKMo44H4MUMRLALQlYg6ElEagMsBTNN47iwA5xNRtjJJfL6yzXfENyIQL40gLusYW8LRiuPBWkG3Wj05/x/6rO6E+okTP8cvCZQH6JVvowdV+t2MGg3DioCZqwDcikADvhrAe8y8kogmEdEoACCi/kS0A8AlAP5NRCuVc4sBPIiAMlkEYJKyzXfE6zXk10nScOL5/rwGA9hbUn58BdE7EOH7UlPqvu4V1TXYsr/UPAEd5sWv/BddbxamzBEw8wxmPpGZOzPzw8q2e5l5mrK8iJnbMnMmMzdj5l4h577KzF2Uz2tmyONF4mnGGJKLx2/0f3gOAODCZwMBhXqem68Vc+ScVbstm1j9YlXkCGIr3ZwPllVICnYDeGayONFRy0QaC2ZgW3H9Hp0f4wqqaxj7E3zCOJQ9JeXIGz8dO+PIMXTDG4VY8Ysy32By23n3/1ZE3Ldwk3WD/QofPvNmIorAJcQTgs9g3Pnh8nrbS3xat7jfQ3NwJIEzTKqN/lbF0YEArIk9eXZu9BTqKckWmu9kMGAIUQQeJpJZyM+eREcSUAlGGy2GNq16TCMXv/CdAYnUeXz2uqj7Uy1UBFpqdYsZNTKiCDxMpBffz7bSRExSN/zpbwAAN7+5uN6+X0JMQ+EeQJo8ymycY09Jsq65qapOvN/dTkQReJhIj77VL8XmfaU49/EvLb2HoI1nQswxySE+tC99vUlTL9lORj+/wLJra/lXE7GTYBaiCDxMJM+PvUfK6/icm83S7Qexaa873Q79Oj8C1B0JPjxjta8mz2tcpvS8higCBzArsGXwP9SLkVz8wnf44zs/m3IPNYI9q2U7Dlp2j3g5/8mvbb/n/iPl2HnI+cCscM8ZTSbCBGk/tfwbEnwZGVEEDvB+ofVVtfYctq50YVCPXfHSQsvu4SUum/IDBj06z2kx6plH/NRJ1jIiCDUN7Tl8DP+cZX+9BrciisABNu61vohKekqyZdcOvnNHyqtw+FhlbQI0vxJMBuc04SPNkjhcafcfKa/NbBqNPSXHcOGz3+i+vlXoVXqzVu7Cc/M3WCOMBxFF4AQ2jFDTUqz5af/15UY8FtKTuuj5BRj5tHsaBD9jxgCg30Nz8Pjs2D3ldbuOYEXRYcx3SUF4LebW0EN8NFjShCgCB7DDVmmFz/aew8fwxOy12BVidtq0txTbDzhvHw+lyvYoU3fYnoMN3UOfrTJ0nc0aHAGCHkrXvu6OMqnSsBtDFIEDvPjVRsvvkZps/k874JG5qPSAv/Zpj841fI1v1+/DnCh5c9xI0Ab+2bKdms+pZq430f39pv0xz7M0SjgONM0RuP/RdQxRBAlKsh/yMkdg3xFjNvtjldW48pWFuOENbWk/gpHcTuf6DwaU6/GXf+P7rXFNdLvt8dLSyIcqC1EKdRFFkKDYnZX5UFklikvdMWlqlEgpHf73cxG2F5fV2ZY3fjoOlrljsjyoAPQElu+LM9bAbQ2plhFBoVLNb8n2g1KbIAxRBIIpjHnxO5zzzy+dFsMUfh0hD89t7y7B8Ke/qW1EIjWi63eXOJLmI9i21dYs0EC8YrqtGdXSrr9bGCiPftHzC7B2t/Wee15CFEGCYnehlvV7jkSsGZtIHCmvqv0/Rz+nnjLhvCe/xmfLfjHlfnrNTXoVULw943g71Fa5TuuVp9rHiRnVEEVgM1p8tM3AZSZcz3Kssn6qji9W7cayHQexN4pZZXtxme3eSzXMuhv2eEcu8SqQZIs6KHrzCM1zidurW0hxWgC/cfZj6mkh3M6bP2x1WgRHmLbkF1zav12dbXd8sCzmef/8Yh3++UUgLfO8289Gp9xGlsgXCrN+k42aHtDiaBCvaSj82oFyq8aVQ2m5vtxaRh0KEg1TRgREVEBEa4loAxGNV9mfTkTvKvsXElGesj2PiI4S0RLl86IZ8riZeCpKxYOeouZaWLBhX8xj3ORSmDd+OraaUI+3vEpfA7NbJbXHLpt+83gaZ7XEhdecnhf7PBMmW//435/RccIMw9cBgN++9IMp1/ErhhUBESUDeB7AcAA9AfyWiHqGHXY9gAPM3AXAkwD+HrJvIzP3UT43G5VHCGD2CFzL9aqqGTsOlMU+0CYueMp4AroXv9qky3wy8JH6MQzlNpmInpu3Xr+tXOWEBqmx05METexNG6YC0G4qenjG6trlaUvNmUcRjGPGiGAAgA3MvImZKwC8A2B02DGjAUxVlj8AMJTsns0UbGFFUXylE63gWKXxBrjo4FHsKTHWo4+3ULxeO/yc1XtQVqEvv1BQyYUXtYl5niLbwbJArimtrsMzV+zSdR/BHsxQBG0AbA9Z36FsUz2GmasAHALQTNnXkYh+JqKviGiwCfIIMG+y+Ok565E3frpmO266RTmOnOSLlbt1N7ChBL+6eWt266oTce7jX+m+l16dEwyGGxXiAaXlpw5VbuWVNVEnztX4Zv3e2uUPF1ufjTfIT9sO2HYvL+H0W7sTQHtm7gvgLwDeJqLGagcS0TgiKiSiwr1796odIoRg1oDryTlKHVqNDcy/vrQ+fYaVbNtf37R137SVmLE8/p5sMLfUda8X4sOftDd6m/fpn+PQa7sPVrNT844KsmT7wXoeUKHmshpm3VXxrnrlx9rl299fqutcI1z1sqROV8MMRVAEINStoq2yTfUYIkoB0ATAfmYuZ+b9AMDMiwFsBHCi2k2YeQoz5zNzfm5urgli28/8tfa5rH38cxEmf77GtOuVa3R7/XFLsWn3NAO9LpwPfLpSdbtZIyyrA830KoKgPITjpqjwpIgXPb8A05fXzV8UepcaZs/UyRaLtDpmKIJFALoSUUciSgNwOYBpYcdMAzBWWR4DYB4zMxHlKpPNIKJOALoC2GSCTK5ES1ZHMzFzGOxESL4ZMRcHXJD+Yc2uktrl+6apKxqz0Psz7VGikEvKq2o7DkT1A79Ckw3OWbUbEz5aXrs+6NF5ltYjNpMjcdRo8AOGFYFi878VwCwAqwG8x8wriWgSEY1SDnsFQDMi2oCACSjoYnoWgGVEtASBSeSbmdldXUoPk2JiZjAn+nvvLtpm+BoPTzeWkjmIkY7k32fWHZlZ2Xs2cu3QrKNDH/8KRREmkN9auDXuHEWRsHO0LNTHlIAyZp4BYEbYtntDlo8BuETlvA8BfGiGDF7g5+0Hbb2fmcnQnJhkqzAh5fUeHXl3AHsUXg0zki2K/TaiCJbtOATguPkkkreTFeaVb9fvwzndWph+3Vh8sHgHxvRra/t93YZEFtvIpzb7Ta/epc2V81hlNVKSCClRahjoUSo7DpRh6ndbUF0D3Pur8JASe9HTZkWbMDXa9oUGmllpZRv13LeGrxEcSB4N+T5CzXRWqLDDDuWpWhMh06zfcNpryDfocRs0C2Zttv3THp2LP79X13PDSGTuut0leOXbzXh1wea4zg9i57TeoaOV6H7PTBRZVG0tNNDsCgujYM2YE3ng04A57faQZ+KRkEAwo7y1sH66khnLtRfTMROZOw4gisAmzAjJj4d/zFqLT5YURU1NfLCsEou3FGNEWO3hrSqulFogIlMK45jxkgZNJU/MXocNeyJnvgyaQdbuLlHdb2Z50cKt3vBlX150qHY5NysdAOpVM4uHiR+vqLet1IGOEmB+KhavIorAJpxSBKt3Hsaf3lmC/g/PiXrcL4eOYVXYMDleiaurzUkkZsYrGjRzPzN3PSZ8tCyiDf0HDeUZ7cSuLLWxCH5fF/RqBSDgITTXosydRmstx4O4kwYQRWATTqU//3KtvuC7UL/7eJVXZnpKxHTDI5/5RrOZzAzV2TgjtXZ50ZYDmLtavQ7xLW/9FPU6T89db4I0x4kVLOaW2g6/ejYw53CssrreiNFsXv7WmCkxHtxWctMpRBHYhFpyLzdSUV1TayapjtNr595PVkQ066z85TCKy7TlpTHjK5uzeje+XX88c2pVnF418UT5RiPWpG60iWs7CY4Sd6mMGBMBtyhcpxFFYBNujbwM9xXfsq+sdlu8jeb6PUdUba/BiWu7g9OufOV4WgG1uYvwOsR2EKuhj/e7F/Tx1sJtEWtU+wlRBDYRy0bvFGdMnldnfcQz39TWHq4wkD5ZLYIz2LZpNZO99p35poLMtPoe004M1oKms7zx01UnsZ3yoonEzJXHcy01zrDO6/z8J/Un2gO0FdOJxHCLTV5eQBSBDdz3SX0vCSdYExJXMH/tnohFV4K9UbMnLINzDlU1NZpGBduLzXflvOGNRfW2GVF48ZKcRLXRudtVajhMX+YuRRDK4WPWpWlYF2dReSdSoCQSoghsYOr37ijzOPW7LVinuEde+9oifLY0emNz1GQ7ddA8du7jX+EFh7KUqtUoqM2waiPJSVSraIN/3164Fa8qE6bSrGmnvKpad/ptoS6iCHzEf3/cjoKnvq5tkGOl/73nf8ZHMmUVVTh0tBJ546fjlAe+qN3+3LwNMc8deXJrw/ePRkVVDUqOVWKPSnlJqzl8rKr2d2jeKA3b9pfhro9XYJLiQtmmaYbtMrmFw8f0TeA+aILbad746Yav4WVEEVhMvNWprKKGgVIDRVb00vPeWfjzu0sA1E1lfUxDLeDcRumWyJQ3fjqKSysw/sNlOOn+L9CqSQNL7hOLexST4dpdR/C/JXUztw/u6s1U62Zw8v1f6HpvzPbo8iOSa8hiXvgyds/XbuzuAc9TCUA6s0tzW2UI59QHZ9cun9G5me15oIDjMR53fby83r5KB+Yt3ERVDSNNmQA+Ul6FBRv2ITcrHae2z6537Jqd6tHgejlUVokmDVNjH5iAiCKwmNW7zHlIzWTYE8aLuhulb/tsrN55GN1bZalGd57ywBe2+Xh/t9FdUcX//XEbSn2eNz9oNnt9wWbcr+Q+atIgFUvvO7/esfs11kuOxZ0fLsOLV/Uz5VpeQ0xDVuMuy5BrWLPzMIY//Q1mrdxdp3e+YMM+5I2fbmugzzQHRgPRmPDRcvy07aDTYjjK1uJSfL1ub60SUMNsZbm86BAu+/f3pl7TK4giEBzhi1WBVA83v7kYxaUVtUFdoYnO/IzfRwQFT32Dq1/9sc628FCBXvfNwnuLtqNBarIp9yw6eBQLNxdj0KOBTLHrd5dETdaYSIhpyGJ2OeCR4kUG/2M+crPSffPixcIrGUrtpKyiGoVbivHsvA14/NJTAAB3fLjM9PvsPHQMX6zchXH/WQwAmP3ns9C1ZRYAYPqyX3Bqh2y0dsjBwCrIi4EY+fn5XFhY6LQYmjj7H/Ox1YEUBoKQyJzYslHcwWfxsO6h4UhOInS+K1CIccvkkZbfc29JeW3678rqGqQmJ+FIeRUapcfffyeixcycH77dFNMQERUQ0Voi2kBE41X2pxPRu8r+hUSUF7JvgrJ9LRFdYIY8bkKUgCCYz+7D9o4c+z74Be6ftrLOttLyKqwwYMqsqWG8vXAbqmsYh8oq8dScdTikFBZiZvR/eA7W7y5BRVUNuk78HPPW7Ebv+2ZhT4n5VgbDioCIkgE8D2A4gJ4AfktE4fUJrwdwgJm7AHgSwN+Vc3sCuBxALwAFAF5QrqeL8qpqw+kQmFk1TL2sogoFT31de/2t+0s1ZYZk5noyScpbIRx5JOLD7qyhpeXV+M8PxzMElFdVo9d9s3ChkqZ7za7DWKc02sFU7rHaiU37juCuj5dj9c7DmPr9Fjw1Zz3+/fVG/LTtAK55LZAKZe+R8tpMtde9HrCC3PTGYny4eAfOffzLOterqKrBzBU7sWmv+kgp2ryTGXMEAwBsYOZNAEBE7wAYDSB0un80gPuV5Q8APEcBn8HRAN5h5nIAm4log3K9mFP3P24uxvbiMgzslIORz3wb0M4PXIAMZeLoaEU1UpPr1+FlDhRNYWbUMPDLwaNol9MQHSfMQIusdPw4cRiYGW8u3IaUJEJmegrW7CrBDW8U4ut1Ab/viSN6YGCnHMxcsQt/HNoV24vL0LVlFm57Zwn+t6QIn/3fmXh67nrMXlU3973LYssEFyCPhDfpdvfM2uW1u0pQ8NTxxHXDerTE0B4tMOGj5dgyeSTW7S5Bg9RkNG2Yite/24K8ZpnISE3Ggg2B9OgXPvstRvc5AQDwwpcb66Rf+deXG7EmzAX95+0H8fP2gwACOalyMtNQdKAMM1bswrw1e9C0QSp+3bcNGqQlY39pBfI7ZKO0vAr3f7oKyY1btFX7fwzPERDRGAAFzHyDsn4VgIHMfGvIMSuUY3Yo6xsBDERAOfzAzG8q218B8DkzfxDtnrkde3LmZY+p7rtyYHvkZKbhmXkbMKBjDpo0SEWbpg3QPqchpny9ybLJ2yHdcnUXgREEQbCTopduPly5f3uT8O2e8RoionEAxgFAcuNcZEY47s2F22qXf9xcbINkAUQJCILgerhG1V5lxmRxEYB2IettlW2qxxBRCoAmAPZrPBcAwMxTmDmfmfMzsuqHmQfp1jIL/Toc39+0QSo65WaiQKm5ahVm+TILgiBYiOq0lBkjgkUAuhJRRwQa8csBXBF2zDQAYxGw/Y8BMI+ZmYimAXibiJ4AcAKArgB+RAy6t8rCXy8+CYVbinHVoDxc8u/vUVFVg2X3n19bo3Z7cRmaNkxFVkbd3CF7S8rBzMhMT8Huw8ewvOgQRvdpU5t9cM2DBQCAcf9ZjM65mejaIgt3fbwcHZo1xNb9AQ+g24Z1Rc/WjfGvLzfi4V+fhJ2HjuLc7i1w4t2fo7Ka8faNA3HrWz9rLskoCIJ3GT+8OyZ/vqZ2/aQ2TTCgYw5e+XYzFt41FOt3l6BhegraZjfArW//jNM65aCiirF212HMVywJw3q0wJzV9XNytctugO0HItflGN67FU5u2xTbikvx07aDWKvMJ/RonYWiA0eRkZqMgt6tsOPAUSXnl3oRWVPiCIhoBICnACQDeJWZHyaiSQAKmXkaEWUA+A+AvgCKAVweMrk8EcB1AKoA3MbMn8e6X3gcweZ9pchMT0aLrPhT927bX4ZmjdKQGeaju/9IOfo9NAcrHwh4ti7cvB8nt22K5hEyY9bUMJKSCKXlVThWWY1+D7mzMpkgCPGRlpKE78efW/tub5k8El+v24OU5CSc2DILqclJaJyRgq37y5DXPJIRG1hRdAgXPvstXrumP37adgDPztuA35zaBoO75uI2JWPvf64fgD/+92ccKDvuJdU4IwW3DOmMv89cWyee4UBpBf799Uac17NVHasIEHCSWV50CKe0y1aNI5CAMovxe55zQbCCJg1SbXchHd67FT5fESjZuWXySGzaewRLdxzEr/uqOuLEpKq6BvdNW4l7LuyJ3YeP4bZ3l+CZy/uiXU5DMDM6TpiBz/7vTLRukoF+D83B45ecgtvfX4o5fzkbHZtn4lhldb2OaywiBZSJIrCYbnd/XicPvyAIxmnZON3WoLJFE4chMz0ZPe+dBcCeyOKftx1An3ZNQUQoOngUJzTJwNrdJejeqnHc17Q0sliITPdWWU6LIAgJw4Th3QEAb1w30NL7jDurU+3y2zcORG5WOhqmpeCOgm6YdusZlt47SN/22bUp2ts0bQAiMqQEouEZ91Gv0ja7IZbukIyasXjrhoHo1yEbz83bgOfmu6+Yj92c3LYJlslzU4eczDTcdHZn3HR259ptE0f0wJNz1qGswtz62neN6IHTOuWgXXbD2oRzAPD7IV1MvY9bkBGB4AjBkdJ4pYd3RpfmyEhNRo/W1vR4vEbDNH/30aZeN6D22QhSHRaaP/f2s3HtGXmmKYHGGYHvPGj2Obd7yzpKIJERRWA1kkxGlQt6tcKLV56KGwd3wuZHR9RuH3FSK6yaZG/uwbxmDW29Xywu6dcW+R0ix8r4gf552bj57M64elCH2m3h85mdcxvVSyFjhJPaNrHF9u9GRBFYTEsDLq1W8fq1/Z0WAZv3laKgd2skJ1GdUpVEhIZpKdgyeSQuy28X5QrmcXOIqcENPHbJKWjq09q5QVKSAk3TpNG9sWD8ubhxcEc8MLqX6rHpKeY0Y5MvPtmU63gRUQQWc0dBN6dFqEdflQLgdvPpstjlIRumWxetPfvPZ2FAxxwAzlVFa9Ig0NgX9GqFMf3quiCm+DxVbXLI/9+maQNMHNkzopvmSW3qpc6Ji3Y57hoZ2okoAovJcGHqicw0+2T6YcJQFN49LK5zj5o8ARhkzYMF6NoyCy9dnY+v/3YO1u0uiX2SBXz2f2cCAG48q2MdEwgAX08Uz7xtcB1FEItWTdw36vYaogh8RM/WjTH1ugG1dtW/XRB9tHLrOcY9JFo1yUDzRunY+MiI2vQdAHDlwA5RzgrwzqLthu+vRlA5N2mQivbNGjqirBtnpCBJaez2H6nAyW2b4sbBnXDlaYHvZfUuZ5STG9DrIvnQRb0N39OvcwNBRBHYQE+XeMI8evFJOPvEXACBRv6KAe2jHh8tPF4vyUmEJGUu4MNbBuG+X4XXLnKOOwu6xz7IZGoYSFa+j1TFxj1xZI/aRs3fhiF9NG2YJkWfDCKKwAZm/Gmw0yIACPimB/nrBd2QnZkW9fg0kybhggSH+7mNMjR5e1hhJ3/p6npBlbWJCu2kqqam1qTRPLN+3qrg/IUbCc5tuAlSz6UmaEQUgU189bchToug+rJ88oe6UZKvXdsfH94yCACQlhz/y6VWYDvYridpfOomjOgR9/0jofYf1TiQZiXoE7/mwQKc1Lb+ZGf4nIHThKZxtzLHT7wmmvAYAz24wYvOaUQR2ER4Omy3cEq7pnXW++floE+7gFdRitYWWwVWKcIYVERae29m9fEmX3xS7XJFdf28T+0d8BYJfreR5ieMfPeCdkae3BpDurVwWgzHkafNJrxiw8xISaqVVY/nRigf3DwINVHy7GVlaIuaNeM769chG5f1jx6PkKThRlk6szzG4sWr+kXdn5Hqrlczp1F0M6JX6eBjl9FQ3PW0JTBaGhsrGKjT1pySnBTSc4/vngfKKiOaW7ZMHmmrTT40YK1Jg9TayfJwbhvWNep17jV5cjuSHEEau8QOv/GRQNR344xUyz1rnEjQaMCilFCIIrCJZIcms/KaZWJMv7b49NYzox6n1vtPilPm1GQyxe5uxjsa/LfO6NIML12dHzF/+zWn50W9jtkT57FwS/xJ8Ln4cfN+AMCHt5yOId2iK7F4+dwBpwovpuG3An9ntvIBD4zupalR6Z+XjXfGDaqzLTdLvQpbLJiNTd6FXscoQWX21g2nabpXVkYKSo5VWSKL12ib3aB2+adtBwEETG1GOzV/PLcLnplXP8OsE54/PvxZVZERgU3orSRkFlqUwMK7hmJKmFvluoeGx50JtE12Awzp1gJnxTB/2IHWtiU7Mw1f/nUI+uepm9LUJr/18PHvT69dXvtQQZQjnSeYFuXpy/vWbrvnQvNMY38+78R6235zanxVvoxSI7YhAAYVARHlENFsIlqv/FVNYkNEY5Vj1hPR2JDtXxLRWiJaonwSevq+a4tGtt6vo8aAsJaNM+rZ7Y2YQk5smYVXr+mPN64bEPc1APt7a3nNMyN6KhkdEYTmd4rX5KYFM8wrGSmBzkOLkBFhaOyA0d9FrefvVGxCy8aSngIwPiIYD2AuM3cFMFdZrwMR5QC4D8BAAAMA3BemMH7HzH2Uzx6D8riaKwZGj+Q1m9Ym5mAZ2t1+HW2G/TbVxDTFZmGlIjDSoAbTccea37HCrt7zBGei7687s6Mj93UbRt+S0QCmKstTAVykcswFAGYzczEzHwAwG4C7x8YWYbeducrEYa8Tc92X9DOehvrB0cbz0ADGfrvwdNrxuuVqwYiSOTWkBsIrY/PRpmkD1ePOtaBTEJ591S6s/C28hFFF0JKZdyrLuwC0VDmmDYDQ7GE7lG1BXlPMQvdQgseJN7PZF7tdtrd9pJuYkJO/ReP4JrzNpKD38ajc+y3OsaS3XQt1CHjk14HAO2ZgaI+WEV2erxqUhykhcRBf/nUI/vcHe+r4GsXseJBEIaYiIKI5RLRC5TM69DgOjBf19pt+x8wnARisfK6KIsc4IiokosK9e/fqvI07GHXKCbbda1iPlnjkYnN6w4D2OYNmMfIX2U16ij43zBtDipaHYtbYysyKWmro7UsF8zllpafUOhaET4w/NubkOsoMqDvyaJCW7Jn6CTI1rE7Mp5KZhzFzb5XPJwB2E1FrAFD+qtn4iwCEjo3bKtvAzMG/JQDeRmAOIZIcU5g5n5nzc3Od90aJBzsHPFkZKbobQTWG9QiYAbTK/s9LTzF8Tyc5rVOzetsu6nMCBndtHvc1gw3ruLM64byeaoNm89Br6khR8klVR7F9XZLfrl7uqND7EOm/75+GHg/gs9MsdP8o9Spnfsdo92QagKAX0FgAn6gcMwvA+USUrUwSnw9gFhGlEFFzACCiVAAXAlhhUB5BwaxEai+P7R+IKNV4ucqqKLklPMqdw7sb8i4J/hR3jeih6zrrHx6u+156O+bBmIC3b4weZxFOaL8gLTlJd2nNUBfSf15iX+fBqbkIt2PUYDYZwHtEdD2ArQAuBQAiygdwMzPfwMzFRPQggEXKOZOUbZkIKIRUAMkA5gB4yaA8gkVoVSztXVYI3gyyGxozd8Ufoa2/n9ZAZ/W5oKmqT0jyQS0/dWhSvKYN09BE4/Oh1aVZsBdDioCZ9wMYqrK9EMANIeuvAng17JhSANEzbwlx40QkbGoy6a4uZSXBUpBGmKQxMjvIWzcMxO9eXlhnm13pKf73hzNAOnO2qkUJa/E2C0+OqtV0GBpbkpOZhuLSCk3nCdbiPidrwRTMzrGvJRiusto9U3ErHrgAvU0oaq63V35Gl/pzCXrNJvFC0O/mq3b8f3/cFvM8M2IhFt89DD/eVa8fGRdPX97HlOv4FVEENhNvIXenuW1Y/bQAbkatME48qE3uXnN6Hp67oi/SoiiJX/dtg6X3no8fJw5FrxOMKyQtEOmv4aDWoB/WUHgmXjUQ2kEhIrQwKbI3r5mYnIwgisBmmjeyx6/d7L65U2m0nUbt97ptWFdcePIJUXvfAzvmoEnDVLTIsi+FAYF0e6bFG1AVrwfcsUprnAn0jlAuzZdJ41BEESQodqfXzbbJ/OEGgtXmpv9RPa/P69f2x0V926ju08vP95yn+dh43Djj1e/xWoZObGlNvi3dJjHT6t8lBhJmJ5jCtFvPRKVKGUgv8tq1/XHta4vqbR87qANG9TmhtrHtEmHexMzSh9k6AvSCjWFuVjr2lpRrOsfukZ5VsTRaLts/L5BC47ExJ6umGvczMiJIUOz2GmqX0xCdcu3NrmoVfdo2Vd3+wOje6Nehbprqn3T02K0m2MvV09RmqAQdanl03GYp1NLDD+auuiS/neQYCkNGBAmKGYVh/Ep2Zhomje6FI+Wxe405mWlo3igd+46UW17KMRbBXrGeTvcNgzvizuHddd/LbYV6wt1Z1ZDGPzIyInCAm85Wz2djJmZmHg0Sq9ylW1hsgmfW1YPy8PshXUyQxj6CE6anRBjRqNEgNblOMJnW893kKgzot/kndnpL/YgiSFCssNef1LZJPW+LrIwUZKS66zFqZpNn1nHc0SgGG7cXfneqoeu0y4kdHR4ccd41Qv9owgq0dPZDG3/RA3UR05AT2NBulFuU8+cfY07BwI7NcPv7SwE4U3BcUCfYuBnJcDrztsER6xCEEqxnPO6sznHfy0wSPIO95YgicIAcG1I1V1iY/C3UFt3W4zUPEonwxjArPQUlGuY5QtGaIiSveabjcyKhaNEDoccM6twMp7Zvapk8XsNdY3qfcL0N5fGszA8ffKGeuqyPZffwEu/eNMiUvEZGCW8MNXWSE6QjrTegrEuLLHz0e28U07EDUQQOYFZxkk8iVIV6ZWw+/nWl9fn8RvcxJ2jKTF6/tr/t9+yc28iUvEZGSQ1znfGTl4w2neef70Mvogg8TPMs9UnRri2y6pQgNJsuuVmWXdso3Vq5VzarCRaZAYABHXO0pc92xzy3YcxIgudnRBF4mEiPfmiDYAUntW3iKvuwn/nNqce9uEJjR967aVBc9Qys5M3rB1p2bdEDxnDXkyLoItLQ3yv1YwVtvHjlqXX+hpKvpE0AUG8UGF572Gn0Fs3Rg5/MYFYgisDDROoF+fmlaKCjiIxXKOjdWlmK/rvqKaBjRe98aPfoOZaqLMxFpaXzI6OGyIgicAmTRusvqk0g3H5e/ToBmSbl4vcas247C00NlpX0Gh00BH+p0aSB+dliX7km+kS9FdHutUgjbwhDioCIcohoNhGtV/5mRzhuJhEdJKLPwrZ3JKKFRLSBiN4lIn+9xSEM61G/AEosiIB+efW/cj09w0QhOYl8N1G8ZfLIiBlQo3HN6XnHzzO5Af3tgHYR91npWSWTxcYwOiIYD2AuM3cFMFdZV+MxAFepbP87gCeZuQuAAwCuNyiPZ4mnr0QQlzi/MUOpgbBQKfGo57kZ0i0XAHD/qF6W2ev/rDJCDWLFKCSIXQWfEhWjimA0gKnK8lQAF6kdxMxzAZSEbqNAGOS5AD6Idb4fsLuQTKLhB3VIBPQ8oXHtMqAvC6hatHlLk0pF1uLgYzzqlBOcu7nHMaoIWjLzTmV5FwA99o1mAA4yczAGfgcA90Uo2UQ8ekDyqxzHUvuzSwg1+QVHgjU6HpxrTs+rs75l8kh0TpAaEkBgpCPER0xFQERziGiFymd06HEc6NJa9jYS0TgiKiSiwr1791p1G9toZkK+IYKMJEb3CfQCO+UmdvHytJQkdA2ZD6gdEei4xvm9WmHVpAvMFcxFxOoWSccpMjEVATMPY+beKp9PAOwmotYAoPzdo+Pe+wE0JaKgi0tbAEVR5JjCzPnMnJ+bm6vjNu5kcVhlq3ja8yQi1cliP9KpeWIrgnUPDccJIVlBGymeYXo7Ag3TrPUoc7Jb0ihD/X9b82CBzZJ4D6OmoWkAxirLYwF8ovVEZQQxH8CYeM5PNKIN8SM+yASkq5QaFBKbLZNH1pqJ0lwWPezkADVSJLUfvej0YvQpmgzgPCJaD2CYsg4iyieil4MHEdE3AN4HMJSIdhBRcHx6J4C/ENEGBOYMXjEoj2eJ9v5EepAb+TReIJTgYN+vFrIWUSZ7/fqdCPox1JIw834AQ1W2FwK4IWRdtXoJM28CMMCIDIlCPLb+YARxw7RklFVUmy2SJ2AAN53VCWd2be60KI6Rk5mG4tIKp8UQPIy7xpU+ZcvkkYZsq3eP7GmaLG7j4lNjO5JNGNEDg7t6f97IbC7NjxzcZRVuy28UikwVR0YUgUvQOyB447rjA6krBrY3WRr30LSBb4PNNXOsUn00eONZnWyWJDK/H+KOkpaCOqIIXMIJTTMwWId5o1+Hut5C5/XUn6LC63TOzcSZXfxrEgry0e9Px/Q/Ol8hDYjcobmjwB1F7gV1RBG4hIZpKfiPjoyQ4YnlXro632yRXM/c24fgEgfMH26je6vG6HWC8xXSAOtrYcTLyJNbY0DHHKfFcC3idiK4GokB8hYtskxOWWGQ684I1Ad//or6tRyE48iIwEHUcudf0Mt/Jh4hsdj4yAjcOLij02IAAE7t0NRpETyBKAIHUZvkvfK0DoaumWg96AT7d3xBchJJOgePIaYhB7mjoBtuOttczw4pUym4gbxmx1N+bHpkhIOSCFqQEYGDpKckm25TTbQCHQn27/iG3w5oVxv5nuRg50TqdWhDFIHLMJoWINHqFYuJwZsQES7vb79Hl9kjbL8gisAjFPRqpek4K6tACYIefFAiImGQOQKXEakDrMU/+9s7z4mYgdGryHjAu+gpmmMWYgqKj8RqNRKINiG55wFtk8BtsxuaX3rQaeS99ixOFE1yc64jNyOKwCMkJ/nzp5Ienndxg2nIiVGJF/Fn6+JBEszioxmZKzaGnvxVZlPtAtPQnNW7bZfBi8gcgUdING8gwV6euPQU2+95y9md0bddU9vvG0ppuT/rdOjFp/1M7zHqlNh5+RMRUX/mcFEf+5+fdjkNbU8K2Kpxep11MQ1pQxSBy+ic20h1+6DOzdDH4d6VE0T0opIRkiaCzgNOBnXZydWD8uqsd22h/j4JdTGkCIgoh4hmE9F65W92hONmEtFBIvosbPvrRLSZiJYonz5G5EkETgjzFgqlfU5DAECrRPMMikKkyeIF48+1WRJv8tBFvbFo4jCnxbCNcIU34qTWDkniLYyOCMYDmMvMXQHMVdbVeAzAVRH2/Y2Z+yifJQblSWiCveMJI/xT5CPSiKBFVrr6DqEOGanJyPXxd5VoKVeswqgiGA1gqrI8FcBFagcx81wAJQbv5TuW338+1j88vHY9RXEh7RCS0Mtv3HpOFwCSekLQhjwm2jCqCFoy805leReAeJLpP0xEy4joSSLyb9clhDsKuuGOgm7IykitEyl8z4U9AADJPnq6w//Tm6X2raADGRFoI6b7KBHNAaCW6GZi6AozMxHpnaKfgIACSQMwBcCdACZFkGMcgHEA0L594hZrB4DfD+miur1pw0Ahd1+5koa9yE5EqwrepWlDyb2lhZiKgJkjzjQR0W4ias3MO4moNYA9em4eMpooJ6LXAPw1yrFTEFAWyM/P93VrkJ3pn4fbRypPsIBozhfCcYyahqYBGKssjwXwiZ6TFeUBChh8LwKwwqA8Cc+mR0agdRP/PNwtwvzCfd0DEDQx67aznBbBcxhVBJMBnEdE6wEMU9ZBRPlE9HLwICL6BsD7AIYS0Q4iukDZ9RYRLQewHEBzAA8ZlCfh8Ys/eJDf9m+PxXcfH5SKZUiIhVotcCE6hlJMMPN+AENVthcCuCFkfXCE88UZXIhKUhKhWaOQUYEoAiEGMj+sH4ksFjxFeqo8skJ0/DZqNgN5qwRPkZGajC2TRzothuBiRA/oRxSBIAgJhZ/ibMxCFIEgCAlFY6nbrRtRBIIgJBQZqcno3irLaTE8hSgCQRAEnyOKQPAMF/U5wWkRBI8gSQn1IYpA8AwN06WyqqANUQP6EEUgCELCIQMCfYgiEDyDvNuCVkQR6EMUgeAZ5OUWtBKpxKmgjhhdBUFIOP5wThes/OWQ02J4BlEEgiAkHAW9W6Ggt1o9LUENMQ0JnqBzbibO7JLrtBiCkJDIiEDwBHNvH+K0CIKQsMiIQBAEweeIIhAEQfA5oggEQRB8jiFFQEQ5RDSbiNYrf7NVjulDRN8T0UoiWkZEl4Xs60hEC4loAxG9S0RpRuQRBEEQ9GN0RDAewFxm7gpgrrIeThmAq5m5F4ACAE8RUVNl398BPMnMXQAcAHC9QXkEQRAEnRhVBKMBTFWWpwK4KPwAZl7HzOuV5V8A7AGQS4H0gOcC+CDa+YIgCIK1GFUELZl5p7K8C0DLaAcT0QAAaQA2AmgG4CAzVym7dwBoY1AeQRAEQScx4wiIaA4AtRC9iaErzMxExFGu0xrAfwCMZeYavfnCiWgcgHEA0L59e13nCoIgCJGJqQiYeVikfUS0m4haM/NOpaHfE+G4xgCmA5jIzD8om/cDaEpEKcqooC2AoihyTAEwRbleCRGtjSW7gzQHsM9pIWLgdhndLh8gMpqB2+UD3C+jHvk6qG00Glk8DcBYAJOVv5+EH6B4An0M4A1mDs4HBEcQ8wGMAfBOpPMjsJaZ8w3KbhlEVOhm+QD3y+h2+QCR0QzcLh/gfhnNkM/oHMFkAOcR0XoAw5R1EFE+Eb2sHHMpgLMAXENES5RPH2XfnQD+QkQbEJgzeMWgPIIgCIJODI0ImHk/gKEq2wsB3KAsvwngzQjnbwIwwIgMgiAIgjG8Glk8xWkBYuB2+QD3y+h2+QCR0QzcLh/gfhkNy0fMER19BEEQBB/g1RGBIAiCYBKeUgREVEBEa5XcRGrpLFwlDxFdQ0R7QybJb3BCzjCZXiWiPUS0wmlZgNjyENEQIjoU8h3ea7eMKjK1I6L5RLRKyaH1JzfL4tLvMIOIfiSipYrcD7hZFje+y0GIKJmIfiaiz+K+CDN74gMgGYGI5E4IRCcvBdDTzfIAuAbAc05/d2EynQXgVAArnJZFizwAhgD4zGk5w2RqDeBUZTkLwDqnnkUtsrj0OyQAjZTlVAALAZzmVlnc+C6HyPYXAG8b+Y29NCIYAGADM29i5goEYg9Gizz6YOavARQ7LUcQt8mjBWbeycw/KcslAFbDofQobpJFDxzgiLKaqnwcmbB0kyx6IaK2AEYCeDnWsdHwkiJoA2B7yLrTuYm0yvMbJf32B0TUzh7REo5ByrD9cyLq5bQwoRBRHoC+CPQiHSWGLK77DhWTxhIEMhLMZmbHvkONsrjxXX4KwB0AaoxcxEuKwIt8CiCPmU8GMBvHM7UK2vkJQAdmPgXAswD+56w4xyGiRgA+BHAbMx92sSyu/A6ZuZqZ+yCQXmYAEfV2sSyue5eJ6EIAe5h5sdFreUkRFAEI1cJRcxPZQEx5mHk/M5crqy8D6GeTbAkDMx8ODtuZeQaAVCJq7rBYIKJUBBret5j5IzfL4tbvMAgzHwQwH4F6JY4SSRaXvstnABhFRFsQME2fS0Sqwbux8JIiWASgKwWqmqUBuByBXEeulUdJxBdkFAL2W0EHRNSKlFS1FEhjnoRAwkInZSIE0qGsZuYn3C6LS7/DXFIKVBFRAwDnAVjjVlnc+C4z8wRmbsvMeQi0P/OY+cp4rmU06ZxtMHMVEd0KYBYCHjuvMvNKt8lDRJMAFDLzNAB/JKJRAKoQmBC9xil5gxDRfxHwImlORDsA3MfMjuV4UpMHgck6MPOLCCQlvIWIqgAcBXA5K64SDnIGgKsALFfsygBwl9LbdoUsANoDrv4OWwOYSkTJCCim95g5fvdHC2Rx+7tsJhJZLAiC4HO8ZBoSBEEQLEAUgSAIgs8RRSAIguBzRBEIgiD4HFEEgiAIPkcUgSBEgYiahWSc3EVERcryESJ6wWn5BMEMxH1UEDRCRPcDOMLM/3RaFkEwExkRCEIcKDn+P1OW7yeiqUT0DRFtJaKLiegfRLSciGYqKSBARP2I6CsiWkxEs8KiVQXBMUQRCII5dAZwLgLpB94EMJ+ZT0IgknekogyeBTCGmfsBeBXAw04JKwiheCbFhCC4nM+ZuZKIliOQcmSmsn05gDwA3QD0BjBbSfuTDGCnA3IKQj1EEQiCOZQDADPXEFFlSC6fGgTeMwKwkpkHOSWgIERCTEOCYA9rAeQS0SAgkDraLQViBEEUgSDYgFLOdAyAvxPRUgBLAJzuqFCCoCDuo4IgCD5HRgSCIAg+RxSBIAiCzxFFIAiC4HNEEQiCIPgcUQSCIAg+RxSBIAiCzxFFIAiC4HNEEQiCIPic/wcvziJ0eY2VRAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "librosa.display.waveplot(samples_out, sr=sr)\n", - "print(len(samples_out))" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "infectious-welcome", - "metadata": {}, - "outputs": [], - "source": [ - "import librosa\n", - "x, sr = librosa.load(wav, sr=16000)" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "id": "musical-anatomy", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "float32\n", - "float64\n" - ] - } - ], - "source": [ - "print(x.dtype)\n", - "print(samples.dtype)" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "lucky-paraguay", - "metadata": {}, - "outputs": [], - "source": [ - "sf.read?" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "annual-christmas", - "metadata": {}, - "outputs": [], - "source": [ - "librosa.load?" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "id": "infectious-seeker", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 47, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "np.allclose(x, samples)" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "pregnant-conditioning", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import random" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "id": "logical-happiness", - "metadata": {}, - "outputs": [], - "source": [ - "np.random.uniform?" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "id": "rocky-plastic", - "metadata": {}, - "outputs": [], - "source": [ - "random.uniform?" - ] - }, - { - "cell_type": "code", - "execution_count": 60, - "id": "focused-compensation", - "metadata": {}, - "outputs": [], - "source": [ - "np.random.RandomState?" - ] - }, - { - "cell_type": "code", - "execution_count": 66, - "id": "centered-repository", - "metadata": {}, - "outputs": [], - "source": [ - "random.sample?" - ] - }, - { - "cell_type": "code", - "execution_count": 95, - "id": "inner-invite", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array(['3', '5'], dtype=' 1.0, speed up the audio;\n", - " speed_rate = 1.0, unchanged;\n", - " speed_rate < 1.0, slow down the audio;\n", - " speed_rate <= 0.0, not allowed, raise ValueError.\n", - " :type speed_rate: float\n", - " :raises ValueError: If speed_rate <= 0.0.\n", - " \"\"\"\n", - " if speed_rate <= 0:\n", - " raise ValueError(\"speed_rate should be greater than zero.\")\n", - " old_length = samples.shape[0]\n", - " new_length = int(old_length / speed_rate)\n", - " old_indices = np.arange(old_length)\n", - " new_indices = np.linspace(start=0, stop=old_length, num=new_length)\n", - " samples = np.interp(new_indices, old_indices, samples)\n", - " return samples" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "tracked-purse", - "metadata": {}, - "outputs": [], - "source": [ - "samples, sr = sf.read(wav)\n", - "samples_out = change_speed(samples, 1.0)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "steady-mileage", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ipd.Audio(samples, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "regulated-google", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ipd.Audio(samples_out, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "homeless-forge", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "samples_out = change_speed(samples, 1.1)\n", - "ipd.Audio(samples_out, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "id": "exciting-blocking", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "samples_out = change_speed(samples, 0.9)\n", - "ipd.Audio(samples_out, rate=sr) # load a NumPy array" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "through-botswana", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "66078\n" - ] - } - ], - "source": [ - "print(len(samples_out))" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "cellular-violence", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Collecting matplotlib\n", - " Downloading matplotlib-3.4.1-cp37-cp37m-manylinux1_x86_64.whl (10.3 MB)\n", - "\u001b[K |████████████████████████████████| 10.3 MB 691 kB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: pillow>=6.2.0 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from matplotlib) (8.1.0)\n", - "Requirement already satisfied: numpy>=1.16 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from matplotlib) (1.20.1)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from matplotlib) (2.8.1)\n", - "Collecting kiwisolver>=1.0.1\n", - " Downloading kiwisolver-1.3.1-cp37-cp37m-manylinux1_x86_64.whl (1.1 MB)\n", - "\u001b[K |████████████████████████████████| 1.1 MB 45.9 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: pyparsing>=2.2.1 in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from matplotlib) (2.4.7)\n", - "Collecting cycler>=0.10\n", - " Downloading cycler-0.10.0-py2.py3-none-any.whl (6.5 kB)\n", - "Requirement already satisfied: six in /workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages (from cycler>=0.10->matplotlib) (1.15.0)\n", - "Installing collected packages: kiwisolver, cycler, matplotlib\n", - "Successfully installed cycler-0.10.0 kiwisolver-1.3.1 matplotlib-3.4.1\n" - ] - } - ], - "source": [ - "!pip install matplotlib\n", - "%matplotlib inline\n", - "import matplotlib.pyplot as plt\n", - "import librosa.display" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "undefined-parade", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAA8K0lEQVR4nO2dd3hUZfbHvycdQoAEQpEWmlQVJICoKApqABdcF8u6KlbUXX+77rqrIFZs7Lr2si5WXHXtrigI0myoSFB67xDpoYQEUs/vj7kTJpM7M/fO7XPP53nmye33ZObe97zveU8hZoYgCILgX5KcFkAQBEFwFlEEgiAIPkcUgSAIgs8RRSAIguBzRBEIgiD4HFEEgiAIPscURUBEBUS0log2ENF4lf1/IaJVRLSMiOYSUYeQfWOJaL3yGWuGPIIgCIJ2yGgcARElA1gH4DwAOwAsAvBbZl4Vcsw5ABYycxkR3QJgCDNfRkQ5AAoB5ANgAIsB9GPmA4aEEgRBEDRjxohgAIANzLyJmSsAvANgdOgBzDyfmcuU1R8AtFWWLwAwm5mLlcZ/NoACE2QSBEEQNJJiwjXaANgesr4DwMAox18P4PMo57aJdcPmzZtzXl6ePikFQRB8zuLFi/cxc274djMUgWaI6EoEzEBnx3HuOADjAKB9+/YoLCw0WTpBEITEhoi2qm03wzRUBKBdyHpbZVu4AMMATAQwipnL9ZwLAMw8hZnzmTk/N7eeQhMEQRDixAxFsAhAVyLqSERpAC4HMC30ACLqC+DfCCiBPSG7ZgE4n4iyiSgbwPnKNkEQBMEmDJuGmLmKiG5FoAFPBvAqM68kokkACpl5GoDHADQC8D4RAcA2Zh7FzMVE9CACygQAJjFzsVGZBEEQBO0Ydh91gvz8fJY5AkEQBH0Q0WJmzg/fLpHFgiAIPkcUgSAIgs8RRSAIguBzRBEIgiD4HFEEgif42/tLsXirpKASBCuwNbJYEOLl/cU7kJ6ahH4dsp0WRRASDhkRCIKQcKwoOoSZK3Y5LYZnEEUgCELCMeGj5bj5zcVOi+EZRBEInsGDsY+CQzDkYdGDKALBM8irLWhFOg36EEUgCELCIYpAH6IIBEFIOEQP6EMUgeAZ3l64zWkRBI/gxWSaTiKKQBAEweeIIhAEIaGorK7Bml0lTovhKUQRCIKQUBworXBaBM8hikAQhISiRqYHdCOKQPAUFVU1OPHuz50WQ3AxNTJRrBtTFAERFRDRWiLaQETjVfafRUQ/EVEVEY0J21dNREuUz7TwcwUhlKMV1aioqnFaDMHFiCLQj+Hso0SUDOB5AOcB2AFgERFNY+ZVIYdtA3ANgL+qXOIoM/cxKofgE8hpAQS3UyP9BN2YMSIYAGADM29i5goA7wAYHXoAM29h5mUA5CcSdDFj+U7kjZ/utBiCh5ARgX7MUARtAGwPWd+hbNNKBhEVEtEPRHSRCfIkNHnjp2P/kXKnxbCNtWFugEkyIhBiMPKZb5wWwXO4YbK4AzPnA7gCwFNE1FntICIapyiMwr1799orocvYU+IfRRDetyMSTSBEp7SiunZ5n486TUYwQxEUAWgXst5W2aYJZi5S/m4C8CWAvhGOm8LM+cycn5ubG7+0HmD+mj1Ysv1gve3HKgMPeLWf/ONkmC8YYM9hUQRaMEMRLALQlYg6ElEagMsBaPL+IaJsIkpXlpsDOAPAquhnJT7Xvr4If3jrp3rb/zFzLQB/KYLw/3Tu6t2OyCF4E5kv0IZhRcDMVQBuBTALwGoA7zHzSiKaRESjAICI+hPRDgCXAPg3Ea1UTu8BoJCIlgKYD2BymLeRr8kbP73OROn+0kDv5kCZfyMn//TOEgCSVEzQhjwm2jCleD0zzwAwI2zbvSHLixAwGYWf9x2Ak8yQIdFQM4UHH+oHPl2FId1a2CuQQ0R6kcurapCRmmyvMB6kpoZRw4yUZDdMB9qPjAi04c+nw8UcVHr7as9vsZJDZfO+UjtFcpRIJQeHPfGVzZJ4k7/PWoPu98x0WgzH+GHTfqdF8ASiCFzG8qJDqts37DmCbzfss1ka54nUodtx4Ki9gniUVb8cRpWP5pQ+WVLXT0UUgTZEEXiEV77d7LQIjuCfJsxanGgQDx+rxLIdB2295+qddeNOkpOkidOCfEseoVri5gUDXD7lB9vv+cQX6zDquQW23zcUn06N6Ea+Jo/gp+F9KDLXZ4yFm4odu3eVCzovBb1bOS2CJxBF4BFq/KoIxDhkiIpq5xrjJAeiwMOfl7Rk8SzTgigCl1J0sO5kaLWG9rDkWCXKKqosksghRA94FicUgRAfoghcRiRTiJYRwbAnvnLEFmwloge8i+gB72BKQJlgPdOX74x5zO7D5ThyLMFGBIJnkRGBd5ARgcsw+u5UJ9jsqqSS8C4rIsTEWMl7i7bHPkiohygCB2FmVJk8mZdoCelED3iTWSt3YeFm+z2WDpRV1lkXZwNtiCJwkBe+3IguE80txJ5oikDwJou3HqhdPmPyPAclEbQgisBBPltW3+4f+gLFQ6LpgQT7d3xDqIUz3ANOcB8yWewgq3cerrftqTnrHZBEEMzjdy//gAUb3JHjp0hyUmlCRgSCq5E5Am9xoLTCNUoAAB79fA0A4L8/bkNpuXjURUIUgUuoqq7Bawu0J5Yrr6qus/7yN5vMFsn1/PX9pfhmvb/rVwPA7sPHsOvQMafFAAAcrayOfZADTPhoOeau2eO0GK5FFIFL2LK/FA98qr0427pdR+qsPzR9tdkiuZ4PFu/AB4t3OC2G4/z6+QU4+7H5TosRFSkx6m5EEbgE0hlA8Kvnvq1dTuSc6+L+F5tjVTUor6rvhjxzxS7bZYn0GF8/tdBeQQRdmKIIiKiAiNYS0QYiGq+y/ywi+omIqohoTNi+sUS0XvmMNUMerzH6uW9hJI7s2tcWmSaL23htwZaYx0xfthMb9hyJeZzfePyLtbbfkww9yYJTGFYERJQM4HkAwwH0BPBbIuoZdtg2ANcAeDvs3BwA9wEYCGAAgPuIKNuoTF5j6Y5DukcEwPGo20oHM0y6gT+8/RMeneE/01iQSHmoJMODoBUzRgQDAGxg5k3MXAHgHQCjQw9g5i3MvAxAeIt1AYDZzFzMzAcAzAZQYIJMniPaOxvpRT+s5BXya60C4Pj35tdGr7i0AgePVsY+UBCiYIYiaAMgNMHHDmWb1ecmFNESdHW6a4b6DpZcPMf/e/9ogsv+/T0qlDmBaC6RTjwaTipkv9bsMAPPTBYT0TgiKiSiwr17ve8yOPbVH+usx/MCMRiLthiLRE4U9h4pd1oES7lhaiEOllUAABZuLsbhY4FRQFKSuxSgk9IcijAyyhs/3WZJvIcZiqAIQLuQ9bbKNlPPZeYpzJzPzPm5ublxCeomvlpnXJkxu6McoJN8suQXAMDS7QedFcRi5qzejeUh2TyDvX09De/KXw4ldKMo44H4MUMRLALQlYg6ElEagMsBTNN47iwA5xNRtjJJfL6yzXfENyIQL40gLusYW8LRiuPBWkG3Wj05/x/6rO6E+okTP8cvCZQH6JVvowdV+t2MGg3DioCZqwDcikADvhrAe8y8kogmEdEoACCi/kS0A8AlAP5NRCuVc4sBPIiAMlkEYJKyzXfE6zXk10nScOL5/rwGA9hbUn58BdE7EOH7UlPqvu4V1TXYsr/UPAEd5sWv/BddbxamzBEw8wxmPpGZOzPzw8q2e5l5mrK8iJnbMnMmMzdj5l4h577KzF2Uz2tmyONF4mnGGJKLx2/0f3gOAODCZwMBhXqem68Vc+ScVbstm1j9YlXkCGIr3ZwPllVICnYDeGayONFRy0QaC2ZgW3H9Hp0f4wqqaxj7E3zCOJQ9JeXIGz8dO+PIMXTDG4VY8Ysy32By23n3/1ZE3Ldwk3WD/QofPvNmIorAJcQTgs9g3Pnh8nrbS3xat7jfQ3NwJIEzTKqN/lbF0YEArIk9eXZu9BTqKckWmu9kMGAIUQQeJpJZyM+eREcSUAlGGy2GNq16TCMXv/CdAYnUeXz2uqj7Uy1UBFpqdYsZNTKiCDxMpBffz7bSRExSN/zpbwAAN7+5uN6+X0JMQ+EeQJo8ymycY09Jsq65qapOvN/dTkQReJhIj77VL8XmfaU49/EvLb2HoI1nQswxySE+tC99vUlTL9lORj+/wLJra/lXE7GTYBaiCDxMJM+PvUfK6/icm83S7Qexaa873Q79Oj8C1B0JPjxjta8mz2tcpvS8higCBzArsGXwP9SLkVz8wnf44zs/m3IPNYI9q2U7Dlp2j3g5/8mvbb/n/iPl2HnI+cCscM8ZTSbCBGk/tfwbEnwZGVEEDvB+ofVVtfYctq50YVCPXfHSQsvu4SUum/IDBj06z2kx6plH/NRJ1jIiCDUN7Tl8DP+cZX+9BrciisABNu61vohKekqyZdcOvnNHyqtw+FhlbQI0vxJMBuc04SPNkjhcafcfKa/NbBqNPSXHcOGz3+i+vlXoVXqzVu7Cc/M3WCOMBxFF4AQ2jFDTUqz5af/15UY8FtKTuuj5BRj5tHsaBD9jxgCg30Nz8Pjs2D3ldbuOYEXRYcx3SUF4LebW0EN8NFjShCgCB7DDVmmFz/aew8fwxOy12BVidtq0txTbDzhvHw+lyvYoU3fYnoMN3UOfrTJ0nc0aHAGCHkrXvu6OMqnSsBtDFIEDvPjVRsvvkZps/k874JG5qPSAv/Zpj841fI1v1+/DnCh5c9xI0Ab+2bKdms+pZq430f39pv0xz7M0SjgONM0RuP/RdQxRBAlKsh/yMkdg3xFjNvtjldW48pWFuOENbWk/gpHcTuf6DwaU6/GXf+P7rXFNdLvt8dLSyIcqC1EKdRFFkKDYnZX5UFklikvdMWlqlEgpHf73cxG2F5fV2ZY3fjoOlrljsjyoAPQElu+LM9bAbQ2plhFBoVLNb8n2g1KbIAxRBIIpjHnxO5zzzy+dFsMUfh0hD89t7y7B8Ke/qW1EIjWi63eXOJLmI9i21dYs0EC8YrqtGdXSrr9bGCiPftHzC7B2t/Wee15CFEGCYnehlvV7jkSsGZtIHCmvqv0/Rz+nnjLhvCe/xmfLfjHlfnrNTXoVULw943g71Fa5TuuVp9rHiRnVEEVgM1p8tM3AZSZcz3Kssn6qji9W7cayHQexN4pZZXtxme3eSzXMuhv2eEcu8SqQZIs6KHrzCM1zidurW0hxWgC/cfZj6mkh3M6bP2x1WgRHmLbkF1zav12dbXd8sCzmef/8Yh3++UUgLfO8289Gp9xGlsgXCrN+k42aHtDiaBCvaSj82oFyq8aVQ2m5vtxaRh0KEg1TRgREVEBEa4loAxGNV9mfTkTvKvsXElGesj2PiI4S0RLl86IZ8riZeCpKxYOeouZaWLBhX8xj3ORSmDd+OraaUI+3vEpfA7NbJbXHLpt+83gaZ7XEhdecnhf7PBMmW//435/RccIMw9cBgN++9IMp1/ErhhUBESUDeB7AcAA9AfyWiHqGHXY9gAPM3AXAkwD+HrJvIzP3UT43G5VHCGD2CFzL9aqqGTsOlMU+0CYueMp4AroXv9qky3wy8JH6MQzlNpmInpu3Xr+tXOWEBqmx05METexNG6YC0G4qenjG6trlaUvNmUcRjGPGiGAAgA3MvImZKwC8A2B02DGjAUxVlj8AMJTsns0UbGFFUXylE63gWKXxBrjo4FHsKTHWo4+3ULxeO/yc1XtQVqEvv1BQyYUXtYl5niLbwbJArimtrsMzV+zSdR/BHsxQBG0AbA9Z36FsUz2GmasAHALQTNnXkYh+JqKviGiwCfIIMG+y+Ok565E3frpmO266RTmOnOSLlbt1N7ChBL+6eWt266oTce7jX+m+l16dEwyGGxXiAaXlpw5VbuWVNVEnztX4Zv3e2uUPF1ufjTfIT9sO2HYvL+H0W7sTQHtm7gvgLwDeJqLGagcS0TgiKiSiwr1796odIoRg1oDryTlKHVqNDcy/vrQ+fYaVbNtf37R137SVmLE8/p5sMLfUda8X4sOftDd6m/fpn+PQa7sPVrNT844KsmT7wXoeUKHmshpm3VXxrnrlx9rl299fqutcI1z1sqROV8MMRVAEINStoq2yTfUYIkoB0ATAfmYuZ+b9AMDMiwFsBHCi2k2YeQoz5zNzfm5urgli28/8tfa5rH38cxEmf77GtOuVa3R7/XFLsWn3NAO9LpwPfLpSdbtZIyyrA830KoKgPITjpqjwpIgXPb8A05fXzV8UepcaZs/UyRaLtDpmKIJFALoSUUciSgNwOYBpYcdMAzBWWR4DYB4zMxHlKpPNIKJOALoC2GSCTK5ES1ZHMzFzGOxESL4ZMRcHXJD+Yc2uktrl+6apKxqz0Psz7VGikEvKq2o7DkT1A79Ckw3OWbUbEz5aXrs+6NF5ltYjNpMjcdRo8AOGFYFi878VwCwAqwG8x8wriWgSEY1SDnsFQDMi2oCACSjoYnoWgGVEtASBSeSbmdldXUoPk2JiZjAn+nvvLtpm+BoPTzeWkjmIkY7k32fWHZlZ2Xs2cu3QrKNDH/8KRREmkN9auDXuHEWRsHO0LNTHlIAyZp4BYEbYtntDlo8BuETlvA8BfGiGDF7g5+0Hbb2fmcnQnJhkqzAh5fUeHXl3AHsUXg0zki2K/TaiCJbtOATguPkkkreTFeaVb9fvwzndWph+3Vh8sHgHxvRra/t93YZEFtvIpzb7Ta/epc2V81hlNVKSCClRahjoUSo7DpRh6ndbUF0D3Pur8JASe9HTZkWbMDXa9oUGmllpZRv13LeGrxEcSB4N+T5CzXRWqLDDDuWpWhMh06zfcNpryDfocRs0C2Zttv3THp2LP79X13PDSGTuut0leOXbzXh1wea4zg9i57TeoaOV6H7PTBRZVG0tNNDsCgujYM2YE3ng04A57faQZ+KRkEAwo7y1sH66khnLtRfTMROZOw4gisAmzAjJj4d/zFqLT5YURU1NfLCsEou3FGNEWO3hrSqulFogIlMK45jxkgZNJU/MXocNeyJnvgyaQdbuLlHdb2Z50cKt3vBlX150qHY5NysdAOpVM4uHiR+vqLet1IGOEmB+KhavIorAJpxSBKt3Hsaf3lmC/g/PiXrcL4eOYVXYMDleiaurzUkkZsYrGjRzPzN3PSZ8tCyiDf0HDeUZ7cSuLLWxCH5fF/RqBSDgITTXosydRmstx4O4kwYQRWATTqU//3KtvuC7UL/7eJVXZnpKxHTDI5/5RrOZzAzV2TgjtXZ50ZYDmLtavQ7xLW/9FPU6T89db4I0x4kVLOaW2g6/ejYw53CssrreiNFsXv7WmCkxHtxWctMpRBHYhFpyLzdSUV1TayapjtNr595PVkQ066z85TCKy7TlpTHjK5uzeje+XX88c2pVnF418UT5RiPWpG60iWs7CY4Sd6mMGBMBtyhcpxFFYBNujbwM9xXfsq+sdlu8jeb6PUdUba/BiWu7g9OufOV4WgG1uYvwOsR2EKuhj/e7F/Tx1sJtEWtU+wlRBDYRy0bvFGdMnldnfcQz39TWHq4wkD5ZLYIz2LZpNZO99p35poLMtPoe004M1oKms7zx01UnsZ3yoonEzJXHcy01zrDO6/z8J/Un2gO0FdOJxHCLTV5eQBSBDdz3SX0vCSdYExJXMH/tnohFV4K9UbMnLINzDlU1NZpGBduLzXflvOGNRfW2GVF48ZKcRLXRudtVajhMX+YuRRDK4WPWpWlYF2dReSdSoCQSoghsYOr37ijzOPW7LVinuEde+9oifLY0emNz1GQ7ddA8du7jX+EFh7KUqtUoqM2waiPJSVSraIN/3164Fa8qE6bSrGmnvKpad/ptoS6iCHzEf3/cjoKnvq5tkGOl/73nf8ZHMmUVVTh0tBJ546fjlAe+qN3+3LwNMc8deXJrw/ePRkVVDUqOVWKPSnlJqzl8rKr2d2jeKA3b9pfhro9XYJLiQtmmaYbtMrmFw8f0TeA+aILbad746Yav4WVEEVhMvNWprKKGgVIDRVb00vPeWfjzu0sA1E1lfUxDLeDcRumWyJQ3fjqKSysw/sNlOOn+L9CqSQNL7hOLexST4dpdR/C/JXUztw/u6s1U62Zw8v1f6HpvzPbo8iOSa8hiXvgyds/XbuzuAc9TCUA6s0tzW2UI59QHZ9cun9G5me15oIDjMR53fby83r5KB+Yt3ERVDSNNmQA+Ul6FBRv2ITcrHae2z6537Jqd6tHgejlUVokmDVNjH5iAiCKwmNW7zHlIzWTYE8aLuhulb/tsrN55GN1bZalGd57ywBe2+Xh/t9FdUcX//XEbSn2eNz9oNnt9wWbcr+Q+atIgFUvvO7/esfs11kuOxZ0fLsOLV/Uz5VpeQ0xDVuMuy5BrWLPzMIY//Q1mrdxdp3e+YMM+5I2fbmugzzQHRgPRmPDRcvy07aDTYjjK1uJSfL1ub60SUMNsZbm86BAu+/f3pl7TK4giEBzhi1WBVA83v7kYxaUVtUFdoYnO/IzfRwQFT32Dq1/9sc628FCBXvfNwnuLtqNBarIp9yw6eBQLNxdj0KOBTLHrd5dETdaYSIhpyGJ2OeCR4kUG/2M+crPSffPixcIrGUrtpKyiGoVbivHsvA14/NJTAAB3fLjM9PvsPHQMX6zchXH/WQwAmP3ns9C1ZRYAYPqyX3Bqh2y0dsjBwCrIi4EY+fn5XFhY6LQYmjj7H/Ox1YEUBoKQyJzYslHcwWfxsO6h4UhOInS+K1CIccvkkZbfc29JeW3678rqGqQmJ+FIeRUapcfffyeixcycH77dFNMQERUQ0Voi2kBE41X2pxPRu8r+hUSUF7JvgrJ9LRFdYIY8bkKUgCCYz+7D9o4c+z74Be6ftrLOttLyKqwwYMqsqWG8vXAbqmsYh8oq8dScdTikFBZiZvR/eA7W7y5BRVUNuk78HPPW7Ebv+2ZhT4n5VgbDioCIkgE8D2A4gJ4AfktE4fUJrwdwgJm7AHgSwN+Vc3sCuBxALwAFAF5QrqeL8qpqw+kQmFk1TL2sogoFT31de/2t+0s1ZYZk5noyScpbIRx5JOLD7qyhpeXV+M8PxzMElFdVo9d9s3ChkqZ7za7DWKc02sFU7rHaiU37juCuj5dj9c7DmPr9Fjw1Zz3+/fVG/LTtAK55LZAKZe+R8tpMtde9HrCC3PTGYny4eAfOffzLOterqKrBzBU7sWmv+kgp2ryTGXMEAwBsYOZNAEBE7wAYDSB0un80gPuV5Q8APEcBn8HRAN5h5nIAm4log3K9mFP3P24uxvbiMgzslIORz3wb0M4PXIAMZeLoaEU1UpPr1+FlDhRNYWbUMPDLwaNol9MQHSfMQIusdPw4cRiYGW8u3IaUJEJmegrW7CrBDW8U4ut1Ab/viSN6YGCnHMxcsQt/HNoV24vL0LVlFm57Zwn+t6QIn/3fmXh67nrMXlU3973LYssEFyCPhDfpdvfM2uW1u0pQ8NTxxHXDerTE0B4tMOGj5dgyeSTW7S5Bg9RkNG2Yite/24K8ZpnISE3Ggg2B9OgXPvstRvc5AQDwwpcb66Rf+deXG7EmzAX95+0H8fP2gwACOalyMtNQdKAMM1bswrw1e9C0QSp+3bcNGqQlY39pBfI7ZKO0vAr3f7oKyY1btFX7fwzPERDRGAAFzHyDsn4VgIHMfGvIMSuUY3Yo6xsBDERAOfzAzG8q218B8DkzfxDtnrkde3LmZY+p7rtyYHvkZKbhmXkbMKBjDpo0SEWbpg3QPqchpny9ybLJ2yHdcnUXgREEQbCTopduPly5f3uT8O2e8RoionEAxgFAcuNcZEY47s2F22qXf9xcbINkAUQJCILgerhG1V5lxmRxEYB2IettlW2qxxBRCoAmAPZrPBcAwMxTmDmfmfMzsuqHmQfp1jIL/Toc39+0QSo65WaiQKm5ahVm+TILgiBYiOq0lBkjgkUAuhJRRwQa8csBXBF2zDQAYxGw/Y8BMI+ZmYimAXibiJ4AcAKArgB+RAy6t8rCXy8+CYVbinHVoDxc8u/vUVFVg2X3n19bo3Z7cRmaNkxFVkbd3CF7S8rBzMhMT8Huw8ewvOgQRvdpU5t9cM2DBQCAcf9ZjM65mejaIgt3fbwcHZo1xNb9AQ+g24Z1Rc/WjfGvLzfi4V+fhJ2HjuLc7i1w4t2fo7Ka8faNA3HrWz9rLskoCIJ3GT+8OyZ/vqZ2/aQ2TTCgYw5e+XYzFt41FOt3l6BhegraZjfArW//jNM65aCiirF212HMVywJw3q0wJzV9XNytctugO0HItflGN67FU5u2xTbikvx07aDWKvMJ/RonYWiA0eRkZqMgt6tsOPAUSXnl3oRWVPiCIhoBICnACQDeJWZHyaiSQAKmXkaEWUA+A+AvgCKAVweMrk8EcB1AKoA3MbMn8e6X3gcweZ9pchMT0aLrPhT927bX4ZmjdKQGeaju/9IOfo9NAcrHwh4ti7cvB8nt22K5hEyY9bUMJKSCKXlVThWWY1+D7mzMpkgCPGRlpKE78efW/tub5k8El+v24OU5CSc2DILqclJaJyRgq37y5DXPJIRG1hRdAgXPvstXrumP37adgDPztuA35zaBoO75uI2JWPvf64fgD/+92ccKDvuJdU4IwW3DOmMv89cWyee4UBpBf799Uac17NVHasIEHCSWV50CKe0y1aNI5CAMovxe55zQbCCJg1SbXchHd67FT5fESjZuWXySGzaewRLdxzEr/uqOuLEpKq6BvdNW4l7LuyJ3YeP4bZ3l+CZy/uiXU5DMDM6TpiBz/7vTLRukoF+D83B45ecgtvfX4o5fzkbHZtn4lhldb2OaywiBZSJIrCYbnd/XicPvyAIxmnZON3WoLJFE4chMz0ZPe+dBcCeyOKftx1An3ZNQUQoOngUJzTJwNrdJejeqnHc17Q0sliITPdWWU6LIAgJw4Th3QEAb1w30NL7jDurU+3y2zcORG5WOhqmpeCOgm6YdusZlt47SN/22bUp2ts0bQAiMqQEouEZ91Gv0ja7IZbukIyasXjrhoHo1yEbz83bgOfmu6+Yj92c3LYJlslzU4eczDTcdHZn3HR259ptE0f0wJNz1qGswtz62neN6IHTOuWgXXbD2oRzAPD7IV1MvY9bkBGB4AjBkdJ4pYd3RpfmyEhNRo/W1vR4vEbDNH/30aZeN6D22QhSHRaaP/f2s3HtGXmmKYHGGYHvPGj2Obd7yzpKIJERRWA1kkxGlQt6tcKLV56KGwd3wuZHR9RuH3FSK6yaZG/uwbxmDW29Xywu6dcW+R0ix8r4gf552bj57M64elCH2m3h85mdcxvVSyFjhJPaNrHF9u9GRBFYTEsDLq1W8fq1/Z0WAZv3laKgd2skJ1GdUpVEhIZpKdgyeSQuy28X5QrmcXOIqcENPHbJKWjq09q5QVKSAk3TpNG9sWD8ubhxcEc8MLqX6rHpKeY0Y5MvPtmU63gRUQQWc0dBN6dFqEdflQLgdvPpstjlIRumWxetPfvPZ2FAxxwAzlVFa9Ig0NgX9GqFMf3quiCm+DxVbXLI/9+maQNMHNkzopvmSW3qpc6Ji3Y57hoZ2okoAovJcGHqicw0+2T6YcJQFN49LK5zj5o8ARhkzYMF6NoyCy9dnY+v/3YO1u0uiX2SBXz2f2cCAG48q2MdEwgAX08Uz7xtcB1FEItWTdw36vYaogh8RM/WjTH1ugG1dtW/XRB9tHLrOcY9JFo1yUDzRunY+MiI2vQdAHDlwA5RzgrwzqLthu+vRlA5N2mQivbNGjqirBtnpCBJaez2H6nAyW2b4sbBnXDlaYHvZfUuZ5STG9DrIvnQRb0N39OvcwNBRBHYQE+XeMI8evFJOPvEXACBRv6KAe2jHh8tPF4vyUmEJGUu4MNbBuG+X4XXLnKOOwu6xz7IZGoYSFa+j1TFxj1xZI/aRs3fhiF9NG2YJkWfDCKKwAZm/Gmw0yIACPimB/nrBd2QnZkW9fg0kybhggSH+7mNMjR5e1hhJ3/p6npBlbWJCu2kqqam1qTRPLN+3qrg/IUbCc5tuAlSz6UmaEQUgU189bchToug+rJ88oe6UZKvXdsfH94yCACQlhz/y6VWYDvYridpfOomjOgR9/0jofYf1TiQZiXoE7/mwQKc1Lb+ZGf4nIHThKZxtzLHT7wmmvAYAz24wYvOaUQR2ER4Omy3cEq7pnXW++floE+7gFdRitYWWwVWKcIYVERae29m9fEmX3xS7XJFdf28T+0d8BYJfreR5ieMfPeCdkae3BpDurVwWgzHkafNJrxiw8xISaqVVY/nRigf3DwINVHy7GVlaIuaNeM769chG5f1jx6PkKThRlk6szzG4sWr+kXdn5Hqrlczp1F0M6JX6eBjl9FQ3PW0JTBaGhsrGKjT1pySnBTSc4/vngfKKiOaW7ZMHmmrTT40YK1Jg9TayfJwbhvWNep17jV5cjuSHEEau8QOv/GRQNR344xUyz1rnEjQaMCilFCIIrCJZIcms/KaZWJMv7b49NYzox6n1vtPilPm1GQyxe5uxjsa/LfO6NIML12dHzF/+zWn50W9jtkT57FwS/xJ8Ln4cfN+AMCHt5yOId2iK7F4+dwBpwovpuG3An9ntvIBD4zupalR6Z+XjXfGDaqzLTdLvQpbLJiNTd6FXscoQWX21g2nabpXVkYKSo5VWSKL12ib3aB2+adtBwEETG1GOzV/PLcLnplXP8OsE54/PvxZVZERgU3orSRkFlqUwMK7hmJKmFvluoeGx50JtE12Awzp1gJnxTB/2IHWtiU7Mw1f/nUI+uepm9LUJr/18PHvT69dXvtQQZQjnSeYFuXpy/vWbrvnQvNMY38+78R6235zanxVvoxSI7YhAAYVARHlENFsIlqv/FVNYkNEY5Vj1hPR2JDtXxLRWiJaonwSevq+a4tGtt6vo8aAsJaNM+rZ7Y2YQk5smYVXr+mPN64bEPc1APt7a3nNMyN6KhkdEYTmd4rX5KYFM8wrGSmBzkOLkBFhaOyA0d9FrefvVGxCy8aSngIwPiIYD2AuM3cFMFdZrwMR5QC4D8BAAAMA3BemMH7HzH2Uzx6D8riaKwZGj+Q1m9Ym5mAZ2t1+HW2G/TbVxDTFZmGlIjDSoAbTccea37HCrt7zBGei7687s6Mj93UbRt+S0QCmKstTAVykcswFAGYzczEzHwAwG4C7x8YWYbeducrEYa8Tc92X9DOehvrB0cbz0ADGfrvwdNrxuuVqwYiSOTWkBsIrY/PRpmkD1ePOtaBTEJ591S6s/C28hFFF0JKZdyrLuwC0VDmmDYDQ7GE7lG1BXlPMQvdQgseJN7PZF7tdtrd9pJuYkJO/ReP4JrzNpKD38ajc+y3OsaS3XQt1CHjk14HAO2ZgaI+WEV2erxqUhykhcRBf/nUI/vcHe+r4GsXseJBEIaYiIKI5RLRC5TM69DgOjBf19pt+x8wnARisfK6KIsc4IiokosK9e/fqvI07GHXKCbbda1iPlnjkYnN6w4D2OYNmMfIX2U16ij43zBtDipaHYtbYysyKWmro7UsF8zllpafUOhaET4w/NubkOsoMqDvyaJCW7Jn6CTI1rE7Mp5KZhzFzb5XPJwB2E1FrAFD+qtn4iwCEjo3bKtvAzMG/JQDeRmAOIZIcU5g5n5nzc3Od90aJBzsHPFkZKbobQTWG9QiYAbTK/s9LTzF8Tyc5rVOzetsu6nMCBndtHvc1gw3ruLM64byeaoNm89Br6khR8klVR7F9XZLfrl7uqND7EOm/75+GHg/gs9MsdP8o9Spnfsdo92QagKAX0FgAn6gcMwvA+USUrUwSnw9gFhGlEFFzACCiVAAXAlhhUB5BwaxEai+P7R+IKNV4ucqqKLklPMqdw7sb8i4J/hR3jeih6zrrHx6u+156O+bBmIC3b4weZxFOaL8gLTlJd2nNUBfSf15iX+fBqbkIt2PUYDYZwHtEdD2ArQAuBQAiygdwMzPfwMzFRPQggEXKOZOUbZkIKIRUAMkA5gB4yaA8gkVoVSztXVYI3gyyGxozd8Ufoa2/n9ZAZ/W5oKmqT0jyQS0/dWhSvKYN09BE4/Oh1aVZsBdDioCZ9wMYqrK9EMANIeuvAng17JhSANEzbwlx40QkbGoy6a4uZSXBUpBGmKQxMjvIWzcMxO9eXlhnm13pKf73hzNAOnO2qkUJa/E2C0+OqtV0GBpbkpOZhuLSCk3nCdbiPidrwRTMzrGvJRiusto9U3ErHrgAvU0oaq63V35Gl/pzCXrNJvFC0O/mq3b8f3/cFvM8M2IhFt89DD/eVa8fGRdPX97HlOv4FVEENhNvIXenuW1Y/bQAbkatME48qE3uXnN6Hp67oi/SoiiJX/dtg6X3no8fJw5FrxOMKyQtEOmv4aDWoB/WUHgmXjUQ2kEhIrQwKbI3r5mYnIwgisBmmjeyx6/d7L65U2m0nUbt97ptWFdcePIJUXvfAzvmoEnDVLTIsi+FAYF0e6bFG1AVrwfcsUprnAn0jlAuzZdJ41BEESQodqfXzbbJ/OEGgtXmpv9RPa/P69f2x0V926ju08vP95yn+dh43Djj1e/xWoZObGlNvi3dJjHT6t8lBhJmJ5jCtFvPRKVKGUgv8tq1/XHta4vqbR87qANG9TmhtrHtEmHexMzSh9k6AvSCjWFuVjr2lpRrOsfukZ5VsTRaLts/L5BC47ExJ6umGvczMiJIUOz2GmqX0xCdcu3NrmoVfdo2Vd3+wOje6Nehbprqn3T02K0m2MvV09RmqAQdanl03GYp1NLDD+auuiS/neQYCkNGBAmKGYVh/Ep2Zhomje6FI+Wxe405mWlo3igd+46UW17KMRbBXrGeTvcNgzvizuHddd/LbYV6wt1Z1ZDGPzIyInCAm85Wz2djJmZmHg0Sq9ylW1hsgmfW1YPy8PshXUyQxj6CE6anRBjRqNEgNblOMJnW893kKgzot/kndnpL/YgiSFCssNef1LZJPW+LrIwUZKS66zFqZpNn1nHc0SgGG7cXfneqoeu0y4kdHR4ccd41Qv9owgq0dPZDG3/RA3UR05AT2NBulFuU8+cfY07BwI7NcPv7SwE4U3BcUCfYuBnJcDrztsER6xCEEqxnPO6sznHfy0wSPIO95YgicIAcG1I1V1iY/C3UFt3W4zUPEonwxjArPQUlGuY5QtGaIiSveabjcyKhaNEDoccM6twMp7Zvapk8XsNdY3qfcL0N5fGszA8ffKGeuqyPZffwEu/eNMiUvEZGCW8MNXWSE6QjrTegrEuLLHz0e28U07EDUQQOYFZxkk8iVIV6ZWw+/nWl9fn8RvcxJ2jKTF6/tr/t9+yc28iUvEZGSQ1znfGTl4w2neef70Mvogg8TPMs9UnRri2y6pQgNJsuuVmWXdso3Vq5VzarCRaZAYABHXO0pc92xzy3YcxIgudnRBF4mEiPfmiDYAUntW3iKvuwn/nNqce9uEJjR967aVBc9Qys5M3rB1p2bdEDxnDXkyLoItLQ3yv1YwVtvHjlqXX+hpKvpE0AUG8UGF572Gn0Fs3Rg5/MYFYgisDDROoF+fmlaKCjiIxXKOjdWlmK/rvqKaBjRe98aPfoOZaqLMxFpaXzI6OGyIgicAmTRusvqk0g3H5e/ToBmSbl4vcas247C00NlpX0Gh00BH+p0aSB+dliX7km+kS9FdHutUgjbwhDioCIcohoNhGtV/5mRzhuJhEdJKLPwrZ3JKKFRLSBiN4lIn+9xSEM61G/AEosiIB+efW/cj09w0QhOYl8N1G8ZfLIiBlQo3HN6XnHzzO5Af3tgHYR91npWSWTxcYwOiIYD2AuM3cFMFdZV+MxAFepbP87gCeZuQuAAwCuNyiPZ4mnr0QQlzi/MUOpgbBQKfGo57kZ0i0XAHD/qF6W2ev/rDJCDWLFKCSIXQWfEhWjimA0gKnK8lQAF6kdxMxzAZSEbqNAGOS5AD6Idb4fsLuQTKLhB3VIBPQ8oXHtMqAvC6hatHlLk0pF1uLgYzzqlBOcu7nHMaoIWjLzTmV5FwA99o1mAA4yczAGfgcA90Uo2UQ8ekDyqxzHUvuzSwg1+QVHgjU6HpxrTs+rs75l8kh0TpAaEkBgpCPER0xFQERziGiFymd06HEc6NJa9jYS0TgiKiSiwr1791p1G9toZkK+IYKMJEb3CfQCO+UmdvHytJQkdA2ZD6gdEei4xvm9WmHVpAvMFcxFxOoWSccpMjEVATMPY+beKp9PAOwmotYAoPzdo+Pe+wE0JaKgi0tbAEVR5JjCzPnMnJ+bm6vjNu5kcVhlq3ja8yQi1cliP9KpeWIrgnUPDccJIVlBGymeYXo7Ag3TrPUoc7Jb0ihD/X9b82CBzZJ4D6OmoWkAxirLYwF8ovVEZQQxH8CYeM5PNKIN8SM+yASkq5QaFBKbLZNH1pqJ0lwWPezkADVSJLUfvej0YvQpmgzgPCJaD2CYsg4iyieil4MHEdE3AN4HMJSIdhBRcHx6J4C/ENEGBOYMXjEoj2eJ9v5EepAb+TReIJTgYN+vFrIWUSZ7/fqdCPox1JIw834AQ1W2FwK4IWRdtXoJM28CMMCIDIlCPLb+YARxw7RklFVUmy2SJ2AAN53VCWd2be60KI6Rk5mG4tIKp8UQPIy7xpU+ZcvkkYZsq3eP7GmaLG7j4lNjO5JNGNEDg7t6f97IbC7NjxzcZRVuy28UikwVR0YUgUvQOyB447rjA6krBrY3WRr30LSBb4PNNXOsUn00eONZnWyWJDK/H+KOkpaCOqIIXMIJTTMwWId5o1+Hut5C5/XUn6LC63TOzcSZXfxrEgry0e9Px/Q/Ol8hDYjcobmjwB1F7gV1RBG4hIZpKfiPjoyQ4YnlXro632yRXM/c24fgEgfMH26je6vG6HWC8xXSAOtrYcTLyJNbY0DHHKfFcC3idiK4GokB8hYtskxOWWGQ684I1Ad//or6tRyE48iIwEHUcudf0Mt/Jh4hsdj4yAjcOLij02IAAE7t0NRpETyBKAIHUZvkvfK0DoaumWg96AT7d3xBchJJOgePIaYhB7mjoBtuOttczw4pUym4gbxmx1N+bHpkhIOSCFqQEYGDpKckm25TTbQCHQn27/iG3w5oVxv5nuRg50TqdWhDFIHLMJoWINHqFYuJwZsQES7vb79Hl9kjbL8gisAjFPRqpek4K6tACYIefFAiImGQOQKXEakDrMU/+9s7z4mYgdGryHjAu+gpmmMWYgqKj8RqNRKINiG55wFtk8BtsxuaX3rQaeS99ixOFE1yc64jNyOKwCMkJ/nzp5Ienndxg2nIiVGJF/Fn6+JBEszioxmZKzaGnvxVZlPtAtPQnNW7bZfBi8gcgUdING8gwV6euPQU2+95y9md0bddU9vvG0ppuT/rdOjFp/1M7zHqlNh5+RMRUX/mcFEf+5+fdjkNbU8K2Kpxep11MQ1pQxSBy+ic20h1+6DOzdDH4d6VE0T0opIRkiaCzgNOBnXZydWD8uqsd22h/j4JdTGkCIgoh4hmE9F65W92hONmEtFBIvosbPvrRLSZiJYonz5G5EkETgjzFgqlfU5DAECrRPMMikKkyeIF48+1WRJv8tBFvbFo4jCnxbCNcIU34qTWDkniLYyOCMYDmMvMXQHMVdbVeAzAVRH2/Y2Z+yifJQblSWiCveMJI/xT5CPSiKBFVrr6DqEOGanJyPXxd5VoKVeswqgiGA1gqrI8FcBFagcx81wAJQbv5TuW338+1j88vHY9RXEh7RCS0Mtv3HpOFwCSekLQhjwm2jCqCFoy805leReAeJLpP0xEy4joSSLyb9clhDsKuuGOgm7IykitEyl8z4U9AADJPnq6w//Tm6X2raADGRFoI6b7KBHNAaCW6GZi6AozMxHpnaKfgIACSQMwBcCdACZFkGMcgHEA0L594hZrB4DfD+miur1pw0Ahd1+5koa9yE5EqwrepWlDyb2lhZiKgJkjzjQR0W4ias3MO4moNYA9em4eMpooJ6LXAPw1yrFTEFAWyM/P93VrkJ3pn4fbRypPsIBozhfCcYyahqYBGKssjwXwiZ6TFeUBChh8LwKwwqA8Cc+mR0agdRP/PNwtwvzCfd0DEDQx67aznBbBcxhVBJMBnEdE6wEMU9ZBRPlE9HLwICL6BsD7AIYS0Q4iukDZ9RYRLQewHEBzAA8ZlCfh8Ys/eJDf9m+PxXcfH5SKZUiIhVotcCE6hlJMMPN+AENVthcCuCFkfXCE88UZXIhKUhKhWaOQUYEoAiEGMj+sH4ksFjxFeqo8skJ0/DZqNgN5qwRPkZGajC2TRzothuBiRA/oRxSBIAgJhZ/ibMxCFIEgCAlFY6nbrRtRBIIgJBQZqcno3irLaTE8hSgCQRAEnyOKQPAMF/U5wWkRBI8gSQn1IYpA8AwN06WyqqANUQP6EEUgCELCIQMCfYgiEDyDvNuCVkQR6EMUgeAZ5OUWtBKpxKmgjhhdBUFIOP5wThes/OWQ02J4BlEEgiAkHAW9W6Ggt1o9LUENMQ0JnqBzbibO7JLrtBiCkJDIiEDwBHNvH+K0CIKQsMiIQBAEweeIIhAEQfA5oggEQRB8jiFFQEQ5RDSbiNYrf7NVjulDRN8T0UoiWkZEl4Xs60hEC4loAxG9S0RpRuQRBEEQ9GN0RDAewFxm7gpgrrIeThmAq5m5F4ACAE8RUVNl398BPMnMXQAcAHC9QXkEQRAEnRhVBKMBTFWWpwK4KPwAZl7HzOuV5V8A7AGQS4H0gOcC+CDa+YIgCIK1GFUELZl5p7K8C0DLaAcT0QAAaQA2AmgG4CAzVym7dwBoY1AeQRAEQScx4wiIaA4AtRC9iaErzMxExFGu0xrAfwCMZeYavfnCiWgcgHEA0L59e13nCoIgCJGJqQiYeVikfUS0m4haM/NOpaHfE+G4xgCmA5jIzD8om/cDaEpEKcqooC2AoihyTAEwRbleCRGtjSW7gzQHsM9pIWLgdhndLh8gMpqB2+UD3C+jHvk6qG00Glk8DcBYAJOVv5+EH6B4An0M4A1mDs4HBEcQ8wGMAfBOpPMjsJaZ8w3KbhlEVOhm+QD3y+h2+QCR0QzcLh/gfhnNkM/oHMFkAOcR0XoAw5R1EFE+Eb2sHHMpgLMAXENES5RPH2XfnQD+QkQbEJgzeMWgPIIgCIJODI0ImHk/gKEq2wsB3KAsvwngzQjnbwIwwIgMgiAIgjG8Glk8xWkBYuB2+QD3y+h2+QCR0QzcLh/gfhkNy0fMER19BEEQBB/g1RGBIAiCYBKeUgREVEBEa5XcRGrpLFwlDxFdQ0R7QybJb3BCzjCZXiWiPUS0wmlZgNjyENEQIjoU8h3ea7eMKjK1I6L5RLRKyaH1JzfL4tLvMIOIfiSipYrcD7hZFje+y0GIKJmIfiaiz+K+CDN74gMgGYGI5E4IRCcvBdDTzfIAuAbAc05/d2EynQXgVAArnJZFizwAhgD4zGk5w2RqDeBUZTkLwDqnnkUtsrj0OyQAjZTlVAALAZzmVlnc+C6HyPYXAG8b+Y29NCIYAGADM29i5goEYg9Gizz6YOavARQ7LUcQt8mjBWbeycw/KcslAFbDofQobpJFDxzgiLKaqnwcmbB0kyx6IaK2AEYCeDnWsdHwkiJoA2B7yLrTuYm0yvMbJf32B0TUzh7REo5ByrD9cyLq5bQwoRBRHoC+CPQiHSWGLK77DhWTxhIEMhLMZmbHvkONsrjxXX4KwB0AaoxcxEuKwIt8CiCPmU8GMBvHM7UK2vkJQAdmPgXAswD+56w4xyGiRgA+BHAbMx92sSyu/A6ZuZqZ+yCQXmYAEfV2sSyue5eJ6EIAe5h5sdFreUkRFAEI1cJRcxPZQEx5mHk/M5crqy8D6GeTbAkDMx8ODtuZeQaAVCJq7rBYIKJUBBret5j5IzfL4tbvMAgzHwQwH4F6JY4SSRaXvstnABhFRFsQME2fS0Sqwbux8JIiWASgKwWqmqUBuByBXEeulUdJxBdkFAL2W0EHRNSKlFS1FEhjnoRAwkInZSIE0qGsZuYn3C6LS7/DXFIKVBFRAwDnAVjjVlnc+C4z8wRmbsvMeQi0P/OY+cp4rmU06ZxtMHMVEd0KYBYCHjuvMvNKt8lDRJMAFDLzNAB/JKJRAKoQmBC9xil5gxDRfxHwImlORDsA3MfMjuV4UpMHgck6MPOLCCQlvIWIqgAcBXA5K64SDnIGgKsALFfsygBwl9LbdoUsANoDrv4OWwOYSkTJCCim95g5fvdHC2Rx+7tsJhJZLAiC4HO8ZBoSBEEQLEAUgSAIgs8RRSAIguBzRBEIgiD4HFEEgiAIPkcUgSBEgYiahWSc3EVERcryESJ6wWn5BMEMxH1UEDRCRPcDOMLM/3RaFkEwExkRCEIcKDn+P1OW7yeiqUT0DRFtJaKLiegfRLSciGYqKSBARP2I6CsiWkxEs8KiVQXBMUQRCII5dAZwLgLpB94EMJ+ZT0IgknekogyeBTCGmfsBeBXAw04JKwiheCbFhCC4nM+ZuZKIliOQcmSmsn05gDwA3QD0BjBbSfuTDGCnA3IKQj1EEQiCOZQDADPXEFFlSC6fGgTeMwKwkpkHOSWgIERCTEOCYA9rAeQS0SAgkDraLQViBEEUgSDYgFLOdAyAvxPRUgBLAJzuqFCCoCDuo4IgCD5HRgSCIAg+RxSBIAiCzxFFIAiC4HNEEQiCIPgcUQSCIAg+RxSBIAiCzxFFIAiC4HNEEQiCIPic/wcvziJ0eY2VRAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "librosa.display.waveplot(samples_out, sr=sr)" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "id": "special-delicious", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "id": "seasonal-consensus", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['GetPassWarning',\n", - " '__all__',\n", - " '__builtins__',\n", - " '__cached__',\n", - " '__doc__',\n", - " '__file__',\n", - " '__loader__',\n", - " '__name__',\n", - " '__package__',\n", - " '__spec__',\n", - " '_raw_input',\n", - " 'contextlib',\n", - " 'fallback_getpass',\n", - " 'getpass',\n", - " 'getuser',\n", - " 'io',\n", - " 'os',\n", - " 'sys',\n", - " 'termios',\n", - " 'unix_getpass',\n", - " 'warnings',\n", - " 'win_getpass']" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dir(getpass)" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "id": "dress-distinction", - "metadata": {}, - "outputs": [], - "source": [ - "getpass?" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "id": "rental-anthony", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Worker:" - ] - } - ], - "source": [ - "import multiprocessing\n", - "import cProfile\n", - "import time\n", - "\n", - "def worker(num):\n", - " time.sleep(3)\n", - " print('Worker:', num)\n", - "\n", - "def profile_worker(num):\n", - " cProfile.runctx('worker(num)', globals(), locals(), 'profile-%d.out' %num)\n", - "\n", - "\n", - "\n", - "for i in range(5):\n", - " p = multiprocessing.Process(target=profile_worker, args=(i,))\n", - " p.start()" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "id": "separated-restriction", - "metadata": {}, - "outputs": [], - "source": [ - "!ls" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "painted-variable", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(2, 2)\n", - "[ 1 20]\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "l = [(1, 20), (2, 30)]\n", - "scores = np.array(l)\n", - "print(scores.shape)\n", - "print(scores[0])" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "satellite-insider", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0 1]\n" - ] - } - ], - "source": [ - "sort_idx = np.argsort(scores[:, -1])\n", - "print(sort_idx)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "developed-thirty", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 1 20]\n", - " [ 2 30]]\n" - ] - } - ], - "source": [ - "sorted_val_scores = scores[sort_idx][::1]\n", - "print(sorted_val_scores)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "official-bench", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 1 20]\n", - " [ 2 30]]\n" - ] - } - ], - "source": [ - "sorted_val_scores = scores[sort_idx]\n", - "print(sorted_val_scores)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "ranking-camera", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "b'\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\n", - "[ 1 20 2 30]\n", - "[[ 1 20]\n", - " [ 2 30]]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel_launcher.py:1: DeprecationWarning: tostring() is deprecated. Use tobytes() instead.\n", - " \"\"\"Entry point for launching an IPython kernel.\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel_launcher.py:3: DeprecationWarning: The binary mode of fromstring is deprecated, as it behaves surprisingly on unicode inputs. Use frombuffer instead\n", - " This is separate from the ipykernel package so we can avoid doing imports until\n" - ] - } - ], - "source": [ - "a = scores.tostring()\n", - "print(a)\n", - "b = np.fromstring(a, scores.dtype)\n", - "print(b)\n", - "print(scores)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "breeding-proxy", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "numpy.int16" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "np.int16" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "coordinate-hungary", - "metadata": {}, - "outputs": [], - "source": [ - "dtype = np.dtype('int16')" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "specified-jackson", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "int16\n", - "16\n" - ] - } - ], - "source": [ - "print(dtype)\n", - "dtype is np.int16\n", - "print(np.iinfo(dtype).bits)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "activated-insight", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/train_test.ipynb b/.notebook/train_test.ipynb deleted file mode 100644 index 67212e50a..000000000 --- a/.notebook/train_test.ipynb +++ /dev/null @@ -1,1887 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "cloudy-glass", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "os.environ['CUDA_VISISBLE_DEVICES'] = '0'" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "grand-stephen", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " def convert_to_list(value, n, name, dtype=np.int):\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.0.0\n" - ] - } - ], - "source": [ - "import paddle\n", - "print(paddle.__version__)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "isolated-prize", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "romance-samuel", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'num_samples': 5, 'beam_size': 500, 'num_proc_bsearch': 8, 'num_conv_layers': 2, 'num_rnn_layers': 3, 'rnn_layer_size': 2048, 'alpha': 2.5, 'beta': 0.3, 'cutoff_prob': 1.0, 'cutoff_top_n': 40, 'use_gru': False, 'use_gpu': True, 'share_rnn_weights': True, 'infer_manifest': 'examples/aishell/data/manifest.dev', 'mean_std_path': 'examples/aishell/data/mean_std.npz', 'vocab_path': 'examples/aishell/data/vocab.txt', 'lang_model_path': 'models/lm/common_crawl_00.prune01111.trie.klm', 'model_path': 'examples/aishell/checkpoints/step_final', 'decoding_method': 'ctc_beam_search', 'error_rate_type': 'wer', 'specgram_type': 'linear'}\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], - "source": [ - "import sys\n", - "import argparse\n", - "import functools\n", - "from utils.utility import add_arguments, print_arguments\n", - "parser = argparse.ArgumentParser(description=__doc__)\n", - "add_arg = functools.partial(add_arguments, argparser=parser)\n", - "# yapf: disable\n", - "add_arg('num_samples', int, 5, \"# of samples to infer.\")\n", - "add_arg('beam_size', int, 500, \"Beam search width.\")\n", - "add_arg('num_proc_bsearch', int, 8, \"# of CPUs for beam search.\")\n", - "add_arg('num_conv_layers', int, 2, \"# of convolution layers.\")\n", - "add_arg('num_rnn_layers', int, 3, \"# of recurrent layers.\")\n", - "add_arg('rnn_layer_size', int, 2048, \"# of recurrent cells per layer.\")\n", - "add_arg('alpha', float, 2.5, \"Coef of LM for beam search.\")\n", - "add_arg('beta', float, 0.3, \"Coef of WC for beam search.\")\n", - "add_arg('cutoff_prob', float, 1.0, \"Cutoff probability for pruning.\")\n", - "add_arg('cutoff_top_n', int, 40, \"Cutoff number for pruning.\")\n", - "add_arg('use_gru', bool, False, \"Use GRUs instead of simple RNNs.\")\n", - "add_arg('use_gpu', bool, True, \"Use GPU or not.\")\n", - "add_arg('share_rnn_weights',bool, True, \"Share input-hidden weights across \"\n", - " \"bi-directional RNNs. Not for GRU.\")\n", - "add_arg('infer_manifest', str,\n", - " 'examples/aishell/data/manifest.dev',\n", - " \"Filepath of manifest to infer.\")\n", - "add_arg('mean_std_path', str,\n", - " 'examples/aishell/data/mean_std.npz',\n", - " \"Filepath of normalizer's mean & std.\")\n", - "add_arg('vocab_path', str,\n", - " 'examples/aishell/data/vocab.txt',\n", - " \"Filepath of vocabulary.\")\n", - "add_arg('lang_model_path', str,\n", - " 'models/lm/common_crawl_00.prune01111.trie.klm',\n", - " \"Filepath for language model.\")\n", - "add_arg('model_path', str,\n", - " 'examples/aishell/checkpoints/step_final',\n", - " \"If None, the training starts from scratch, \"\n", - " \"otherwise, it resumes from the pre-trained model.\")\n", - "add_arg('decoding_method', str,\n", - " 'ctc_beam_search',\n", - " \"Decoding method. Options: ctc_beam_search, ctc_greedy\",\n", - " choices = ['ctc_beam_search', 'ctc_greedy'])\n", - "add_arg('error_rate_type', str,\n", - " 'wer',\n", - " \"Error rate type for evaluation.\",\n", - " choices=['wer', 'cer'])\n", - "add_arg('specgram_type', str,\n", - " 'linear',\n", - " \"Audio feature type. Options: linear, mfcc.\",\n", - " choices=['linear', 'mfcc'])\n", - "# yapf: disable\n", - "args = parser.parse_args([])\n", - "print(vars(args))" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "timely-bikini", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/fftpack/__init__.py:103: DeprecationWarning: The module numpy.dual is deprecated. Instead of using dual, use the functions directly from numpy or scipy.\n", - " from numpy.dual import register_func\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/special/orthogonal.py:81: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:108: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " long_ = _make_signed(np.long)\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:109: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " ulong = _make_unsigned(np.long)\n" - ] - } - ], - "source": [ - "from data_utils.dataset import create_dataloader\n", - "batch_reader = create_dataloader(\n", - " manifest_path=args.infer_manifest,\n", - " vocab_filepath=args.vocab_path,\n", - " mean_std_filepath=args.mean_std_path,\n", - " augmentation_config='{}',\n", - " #max_duration=float('inf'),\n", - " max_duration=27.0,\n", - " min_duration=0.0,\n", - " stride_ms=10.0,\n", - " window_ms=20.0,\n", - " max_freq=None,\n", - " specgram_type=args.specgram_type,\n", - " use_dB_normalization=True,\n", - " random_seed=0,\n", - " keep_transcription_text=False,\n", - " is_training=False,\n", - " batch_size=args.num_samples,\n", - " sortagrad=True,\n", - " shuffle_method=None,\n", - " dist=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "organized-warrior", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n", - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py:354: DeprecationWarning: `np.object` is a deprecated alias for the builtin `object`. To silence this warning, use `object` by itself. Doing this will not modify any behavior and is safe. \n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " if arr.dtype == np.object:\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "test Tensor(shape=[5, 6], dtype=int32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[14 , 34 , 322 , 233 , 0 , 0 ],\n", - " [238 , 38 , 122 , 164 , 0 , 0 ],\n", - " [8 , 52 , 49 , 42 , 0 , 0 ],\n", - " [109 , 47 , 146 , 193 , 210 , 479 ],\n", - " [3330, 1751, 208 , 1923, 0 , 0 ]])\n", - "test raw 大时代里的的\n", - "test raw 煲汤受宠的的\n", - "audio len Tensor(shape=[5], dtype=int64, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [163, 167, 180, 186, 186])\n", - "test len Tensor(shape=[5], dtype=int64, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [4, 4, 4, 6, 4])\n", - "audio Tensor(shape=[5, 161, 186], dtype=float32, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [[[ 1.11669052, 0.79015088, 0.93658292, ..., 0. , 0. , 0. ],\n", - " [ 0.83549136, 0.72643483, 0.83578080, ..., 0. , 0. , 0. ],\n", - " [-0.89155018, -0.18894747, -0.53357804, ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [ 0.33386710, -0.81240511, 0.12869737, ..., 0. , 0. , 0. ],\n", - " [-0.17537928, 0.58380985, 0.70696265, ..., 0. , 0. , 0. ],\n", - " [-0.84175998, 1.22041416, 0.07929770, ..., 0. , 0. , 0. ]],\n", - "\n", - " [[-0.35964420, 0.77392709, 0.71409988, ..., 0. , 0. , 0. ],\n", - " [-0.15990183, 0.42962283, 0.06222462, ..., 0. , 0. , 0. ],\n", - " [-0.31166190, -0.74864638, -0.52836996, ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [-0.27546275, 0.32889456, 0.12410031, ..., 0. , 0. , 0. ],\n", - " [ 0.16264282, 0.49418071, -0.15960945, ..., 0. , 0. , 0. ],\n", - " [ 0.12476666, 0.00516864, 1.16021466, ..., 0. , 0. , 0. ]],\n", - "\n", - " [[ 0.90202141, 1.48541915, 0.92062062, ..., 0. , 0. , 0. ],\n", - " [ 0.82661545, 1.37171340, 0.86746097, ..., 0. , 0. , 0. ],\n", - " [-0.62287915, -0.48645937, 0.35041964, ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [ 0.07376949, 0.07138316, 0.76355994, ..., 0. , 0. , 0. ],\n", - " [-0.32306790, 0.43247896, 1.27311838, ..., 0. , 0. , 0. ],\n", - " [-0.97667056, 0.60747612, 0.79181534, ..., 0. , 0. , 0. ]],\n", - "\n", - " [[ 0.72022128, 0.95428467, 0.92766261, ..., 0.29105374, -0.45564806, -0.62151009],\n", - " [ 0.42083180, 0.49279949, 0.82724041, ..., -0.17333922, -1.45363355, -0.61673522],\n", - " [-0.76116520, -0.84750438, -0.09512503, ..., -1.01497340, -1.42781055, -0.80859023],\n", - " ...,\n", - " [-0.23009977, 1.06155431, 1.09065628, ..., 0.25581080, 0.53794998, -1.22650719],\n", - " [-1.37693381, 0.30778193, 0.17152318, ..., 0.51650339, 0.25580606, 0.83097816],\n", - " [-1.62180591, 1.30567718, 1.09928656, ..., -0.77590007, 1.27712476, 0.53189957]],\n", - "\n", - " [[ 1.03205252, -0.51535392, 0.21077573, ..., 0.76618457, 1.27425683, 1.52250278],\n", - " [ 0.82059991, 0.43990925, 0.13090958, ..., 0.86662549, 1.01687658, 1.48495352],\n", - " [-0.75489789, -0.01997089, -0.65174174, ..., 0.09061214, -0.55211234, -0.01614586],\n", - " ...,\n", - " [ 0.50985396, 1.84555030, 0.79185146, ..., 1.13666189, 1.19898069, 1.98158395],\n", - " [ 1.98721015, 2.52385354, 1.11714780, ..., 0.19416514, 1.11329341, 0.64460152],\n", - " [ 2.69512844, 1.90993905, 0.50245082, ..., -0.50902629, 0.03333465, -1.24584770]]])\n" - ] - } - ], - "source": [ - " for idx, (audio, audio_len, text, text_len) in enumerate(batch_reader()):\n", - " print('test', text)\n", - " print(\"test raw\", ''.join(batch_reader.dataset.vocab_list[i] for i in text[0]))\n", - " print(\"test raw\", ''.join(batch_reader.dataset.vocab_list[i] for i in text[-1]))\n", - " print('audio len', audio_len)\n", - " print('test len', text_len)\n", - " print('audio', audio)\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "confidential-radius", - "metadata": {}, - "outputs": [], - "source": [ - "# reader = batch_reader()\n", - "# audio, test , audio_len, text_len = reader.next()\n", - "# print('test', text)\n", - "# print('t len', text_len) #[B, T]\n", - "# print('audio len', audio_len)\n", - "# print(audio)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "future-vermont", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "煲汤受宠\n" - ] - } - ], - "source": [ - "print(u'\\u7172\\u6c64\\u53d7\\u5ba0')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dental-sweden", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "sunrise-contact", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "hispanic-asthma", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "hearing-leadership", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "skilled-friday", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "copyrighted-measure", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "employed-lightweight", - "metadata": {}, - "outputs": [], - "source": [ - "from model_utils.network import DeepSpeech2, DeepSpeech2Loss\n", - "\n", - "from data_utils.dataset import create_dataloader\n", - "batch_reader = create_dataloader(\n", - " manifest_path=args.infer_manifest,\n", - " vocab_filepath=args.vocab_path,\n", - " mean_std_filepath=args.mean_std_path,\n", - " augmentation_config='{}',\n", - " #max_duration=float('inf'),\n", - " max_duration=27.0,\n", - " min_duration=0.0,\n", - " stride_ms=10.0,\n", - " window_ms=20.0,\n", - " max_freq=None,\n", - " specgram_type=args.specgram_type,\n", - " use_dB_normalization=True,\n", - " random_seed=0,\n", - " keep_transcription_text=False,\n", - " is_training=False,\n", - " batch_size=args.num_samples,\n", - " sortagrad=True,\n", - " shuffle_method=None,\n", - " dist=False)\n", - "\n", - "\n", - "import paddle\n", - "from paddle import nn\n", - "from paddle.nn import functional as F\n", - "from paddle.nn import initializer as I\n", - "\n", - "import math\n", - "\n", - "def brelu(x, t_min=0.0, t_max=24.0, name=None):\n", - " t_min = paddle.to_tensor(t_min)\n", - " t_max = paddle.to_tensor(t_max)\n", - " return x.maximum(t_min).minimum(t_max)\n", - "\n", - "def sequence_mask(x_len, max_len=None, dtype='float32'):\n", - " max_len = max_len or x_len.max()\n", - " x_len = paddle.unsqueeze(x_len, -1)\n", - " row_vector = paddle.arange(max_len)\n", - " mask = row_vector > x_len # maybe a bug\n", - " mask = paddle.cast(mask, dtype)\n", - " print(f'seq mask: {mask}')\n", - " return mask\n", - "\n", - "\n", - "class ConvBn(nn.Layer):\n", - " def __init__(self, num_channels_in, num_channels_out, kernel_size, stride,\n", - " padding, act):\n", - "\n", - " super().__init__()\n", - " self.kernel_size = kernel_size\n", - " self.stride = stride\n", - " self.padding = padding\n", - "\n", - " self.conv = nn.Conv2D(\n", - " num_channels_in,\n", - " num_channels_out,\n", - " kernel_size=kernel_size,\n", - " stride=stride,\n", - " padding=padding,\n", - " weight_attr=None,\n", - " bias_attr=None,\n", - " data_format='NCHW')\n", - "\n", - " self.bn = nn.BatchNorm2D(\n", - " num_channels_out,\n", - " weight_attr=None,\n", - " bias_attr=None,\n", - " data_format='NCHW')\n", - " self.act = F.relu if act == 'relu' else brelu\n", - "\n", - " def forward(self, x, x_len):\n", - " \"\"\"\n", - " x(Tensor): audio, shape [B, C, D, T]\n", - " \"\"\"\n", - " x = self.conv(x)\n", - " x = self.bn(x)\n", - " x = self.act(x)\n", - "\n", - " x_len = (x_len - self.kernel_size[1] + 2 * self.padding[1]\n", - " ) // self.stride[1] + 1\n", - "\n", - " # reset padding part to 0\n", - " masks = sequence_mask(x_len) #[B, T]\n", - " masks = masks.unsqueeze(1).unsqueeze(1) # [B, 1, 1, T]\n", - " x = x.multiply(masks)\n", - "\n", - " return x, x_len\n", - "\n", - "\n", - "class ConvStack(nn.Layer):\n", - " def __init__(self, feat_size, num_stacks):\n", - " super().__init__()\n", - " self.feat_size = feat_size # D\n", - " self.num_stacks = num_stacks\n", - "\n", - " self.conv_in = ConvBn(\n", - " num_channels_in=1,\n", - " num_channels_out=32,\n", - " kernel_size=(41, 11), #[D, T]\n", - " stride=(2, 3),\n", - " padding=(20, 5),\n", - " act='brelu')\n", - "\n", - " out_channel = 32\n", - " self.conv_stack = nn.Sequential([\n", - " ConvBn(\n", - " num_channels_in=32,\n", - " num_channels_out=out_channel,\n", - " kernel_size=(21, 11),\n", - " stride=(2, 1),\n", - " padding=(10, 5),\n", - " act='brelu') for i in range(num_stacks - 1)\n", - " ])\n", - "\n", - " # conv output feat_dim\n", - " output_height = (feat_size - 1) // 2 + 1\n", - " for i in range(self.num_stacks - 1):\n", - " output_height = (output_height - 1) // 2 + 1\n", - " self.output_height = out_channel * output_height\n", - "\n", - " def forward(self, x, x_len):\n", - " \"\"\"\n", - " x: shape [B, C, D, T]\n", - " x_len : shape [B]\n", - " \"\"\"\n", - " print(f\"conv in: {x_len}\")\n", - " x, x_len = self.conv_in(x, x_len)\n", - " for i, conv in enumerate(self.conv_stack):\n", - " print(f\"conv in: {x_len}\")\n", - " x, x_len = conv(x, x_len)\n", - " print(f\"conv out: {x_len}\")\n", - " return x, x_len\n", - " \n", - " \n", - "\n", - "class RNNCell(nn.RNNCellBase):\n", - " r\"\"\"\n", - " Elman RNN (SimpleRNN) cell. Given the inputs and previous states, it \n", - " computes the outputs and updates states.\n", - " The formula used is as follows:\n", - " .. math::\n", - " h_{t} & = act(x_{t} + b_{ih} + W_{hh}h_{t-1} + b_{hh})\n", - " y_{t} & = h_{t}\n", - " \n", - " where :math:`act` is for :attr:`activation`.\n", - " \"\"\"\n", - "\n", - " def __init__(self,\n", - " hidden_size,\n", - " activation=\"tanh\",\n", - " weight_ih_attr=None,\n", - " weight_hh_attr=None,\n", - " bias_ih_attr=None,\n", - " bias_hh_attr=None,\n", - " name=None):\n", - " super().__init__()\n", - " std = 1.0 / math.sqrt(hidden_size)\n", - " self.weight_hh = self.create_parameter(\n", - " (hidden_size, hidden_size),\n", - " weight_hh_attr,\n", - " default_initializer=I.Uniform(-std, std))\n", - " # self.bias_ih = self.create_parameter(\n", - " # (hidden_size, ),\n", - " # bias_ih_attr,\n", - " # is_bias=True,\n", - " # default_initializer=I.Uniform(-std, std))\n", - " self.bias_ih = None\n", - " self.bias_hh = self.create_parameter(\n", - " (hidden_size, ),\n", - " bias_hh_attr,\n", - " is_bias=True,\n", - " default_initializer=I.Uniform(-std, std))\n", - "\n", - " self.hidden_size = hidden_size\n", - " if activation not in [\"tanh\", \"relu\", \"brelu\"]:\n", - " raise ValueError(\n", - " \"activation for SimpleRNNCell should be tanh or relu, \"\n", - " \"but get {}\".format(activation))\n", - " self.activation = activation\n", - " self._activation_fn = paddle.tanh \\\n", - " if activation == \"tanh\" \\\n", - " else F.relu\n", - " if activation == 'brelu':\n", - " self._activation_fn = brelu\n", - "\n", - " def forward(self, inputs, states=None):\n", - " if states is None:\n", - " states = self.get_initial_states(inputs, self.state_shape)\n", - " pre_h = states\n", - " i2h = inputs\n", - " if self.bias_ih is not None:\n", - " i2h += self.bias_ih\n", - " h2h = paddle.matmul(pre_h, self.weight_hh, transpose_y=True)\n", - " if self.bias_hh is not None:\n", - " h2h += self.bias_hh\n", - " h = self._activation_fn(i2h + h2h)\n", - " return h, h\n", - "\n", - " @property\n", - " def state_shape(self):\n", - " return (self.hidden_size, )\n", - "\n", - "\n", - "class GRUCellShare(nn.RNNCellBase):\n", - " r\"\"\"\n", - " Gated Recurrent Unit (GRU) RNN cell. Given the inputs and previous states, \n", - " it computes the outputs and updates states.\n", - " The formula for GRU used is as follows:\n", - " .. math::\n", - " r_{t} & = \\sigma(W_{ir}x_{t} + b_{ir} + W_{hr}h_{t-1} + b_{hr})\n", - " z_{t} & = \\sigma(W_{iz}x_{t} + b_{iz} + W_{hz}h_{t-1} + b_{hz})\n", - " \\widetilde{h}_{t} & = \\tanh(W_{ic}x_{t} + b_{ic} + r_{t} * (W_{hc}h_{t-1} + b_{hc}))\n", - " h_{t} & = z_{t} * h_{t-1} + (1 - z_{t}) * \\widetilde{h}_{t}\n", - " y_{t} & = h_{t}\n", - " \n", - " where :math:`\\sigma` is the sigmoid fucntion, and * is the elemetwise \n", - " multiplication operator.\n", - " \"\"\"\n", - "\n", - " def __init__(self,\n", - " input_size,\n", - " hidden_size,\n", - " weight_ih_attr=None,\n", - " weight_hh_attr=None,\n", - " bias_ih_attr=None,\n", - " bias_hh_attr=None,\n", - " name=None):\n", - " super().__init__()\n", - " std = 1.0 / math.sqrt(hidden_size)\n", - " self.weight_hh = self.create_parameter(\n", - " (3 * hidden_size, hidden_size),\n", - " weight_hh_attr,\n", - " default_initializer=I.Uniform(-std, std))\n", - " # self.bias_ih = self.create_parameter(\n", - " # (3 * hidden_size, ),\n", - " # bias_ih_attr,\n", - " # is_bias=True,\n", - " # default_initializer=I.Uniform(-std, std))\n", - " self.bias_ih = None\n", - " self.bias_hh = self.create_parameter(\n", - " (3 * hidden_size, ),\n", - " bias_hh_attr,\n", - " is_bias=True,\n", - " default_initializer=I.Uniform(-std, std))\n", - "\n", - " self.hidden_size = hidden_size\n", - " self.input_size = input_size\n", - " self._gate_activation = F.sigmoid\n", - " #self._activation = paddle.tanh\n", - " self._activation = F.relu\n", - "\n", - " def forward(self, inputs, states=None):\n", - " if states is None:\n", - " states = self.get_initial_states(inputs, self.state_shape)\n", - "\n", - " pre_hidden = states\n", - " x_gates = inputs\n", - " if self.bias_ih is not None:\n", - " x_gates = x_gates + self.bias_ih\n", - " h_gates = paddle.matmul(pre_hidden, self.weight_hh, transpose_y=True)\n", - " if self.bias_hh is not None:\n", - " h_gates = h_gates + self.bias_hh\n", - "\n", - " x_r, x_z, x_c = paddle.split(x_gates, num_or_sections=3, axis=1)\n", - " h_r, h_z, h_c = paddle.split(h_gates, num_or_sections=3, axis=1)\n", - "\n", - " r = self._gate_activation(x_r + h_r)\n", - " z = self._gate_activation(x_z + h_z)\n", - " c = self._activation(x_c + r * h_c) # apply reset gate after mm\n", - " h = (pre_hidden - c) * z + c\n", - " # https://www.paddlepaddle.org.cn/documentation/docs/zh/api/paddle/fluid/layers/dynamic_gru_cn.html#dynamic-gru\n", - " #h = (1-z) * pre_hidden + z * c\n", - "\n", - " return h, h\n", - "\n", - " @property\n", - " def state_shape(self):\n", - " r\"\"\"\n", - " The `state_shape` of GRUCell is a shape `[hidden_size]` (-1 for batch\n", - " size would be automatically inserted into shape). The shape corresponds\n", - " to the shape of :math:`h_{t-1}`.\n", - " \"\"\"\n", - " return (self.hidden_size, )\n", - "\n", - "\n", - "class BiRNNWithBN(nn.Layer):\n", - " \"\"\"Bidirectonal simple rnn layer with sequence-wise batch normalization.\n", - " The batch normalization is only performed on input-state weights.\n", - "\n", - " :param name: Name of the layer parameters.\n", - " :type name: string\n", - " :param size: Dimension of RNN cells.\n", - " :type size: int\n", - " :param share_weights: Whether to share input-hidden weights between\n", - " forward and backward directional RNNs.\n", - " :type share_weights: bool\n", - " :return: Bidirectional simple rnn layer.\n", - " :rtype: Variable\n", - " \"\"\"\n", - "\n", - " def __init__(self, i_size, h_size, share_weights):\n", - " super().__init__()\n", - " self.share_weights = share_weights\n", - " if self.share_weights:\n", - " #input-hidden weights shared between bi-directional rnn.\n", - " self.fw_fc = nn.Linear(i_size, h_size, bias_attr=False)\n", - " # batch norm is only performed on input-state projection\n", - " self.fw_bn = nn.BatchNorm1D(\n", - " h_size, bias_attr=None, data_format='NLC')\n", - " self.bw_fc = self.fw_fc\n", - " self.bw_bn = self.fw_bn\n", - " else:\n", - " self.fw_fc = nn.Linear(i_size, h_size, bias_attr=False)\n", - " self.fw_bn = nn.BatchNorm1D(\n", - " h_size, bias_attr=None, data_format='NLC')\n", - " self.bw_fc = nn.Linear(i_size, h_size, bias_attr=False)\n", - " self.bw_bn = nn.BatchNorm1D(\n", - " h_size, bias_attr=None, data_format='NLC')\n", - "\n", - " self.fw_cell = RNNCell(hidden_size=h_size, activation='brelu')\n", - " self.bw_cell = RNNCell(hidden_size=h_size, activation='brelu')\n", - " self.fw_rnn = nn.RNN(\n", - " self.fw_cell, is_reverse=False, time_major=False) #[B, T, D]\n", - " self.bw_rnn = nn.RNN(\n", - " self.fw_cell, is_reverse=True, time_major=False) #[B, T, D]\n", - "\n", - " def forward(self, x, x_len):\n", - " # x, shape [B, T, D]\n", - " fw_x = self.fw_bn(self.fw_fc(x))\n", - " bw_x = self.bw_bn(self.bw_fc(x))\n", - " fw_x, _ = self.fw_rnn(inputs=fw_x, sequence_length=x_len)\n", - " bw_x, _ = self.bw_rnn(inputs=bw_x, sequence_length=x_len)\n", - " x = paddle.concat([fw_x, bw_x], axis=-1)\n", - " return x, x_len\n", - "\n", - "\n", - "class BiGRUWithBN(nn.Layer):\n", - " \"\"\"Bidirectonal gru layer with sequence-wise batch normalization.\n", - " The batch normalization is only performed on input-state weights.\n", - "\n", - " :param name: Name of the layer.\n", - " :type name: string\n", - " :param input: Input layer.\n", - " :type input: Variable\n", - " :param size: Dimension of GRU cells.\n", - " :type size: int\n", - " :param act: Activation type.\n", - " :type act: string\n", - " :return: Bidirectional GRU layer.\n", - " :rtype: Variable\n", - " \"\"\"\n", - "\n", - " def __init__(self, i_size, h_size, act):\n", - " super().__init__()\n", - " hidden_size = h_size * 3\n", - " self.fw_fc = nn.Linear(i_size, hidden_size, bias_attr=False)\n", - " self.fw_bn = nn.BatchNorm1D(\n", - " hidden_size, bias_attr=None, data_format='NLC')\n", - " self.bw_fc = nn.Linear(i_size, hidden_size, bias_attr=False)\n", - " self.bw_bn = nn.BatchNorm1D(\n", - " hidden_size, bias_attr=None, data_format='NLC')\n", - "\n", - " self.fw_cell = GRUCellShare(input_size=hidden_size, hidden_size=h_size)\n", - " self.bw_cell = GRUCellShare(input_size=hidden_size, hidden_size=h_size)\n", - " self.fw_rnn = nn.RNN(\n", - " self.fw_cell, is_reverse=False, time_major=False) #[B, T, D]\n", - " self.bw_rnn = nn.RNN(\n", - " self.fw_cell, is_reverse=True, time_major=False) #[B, T, D]\n", - "\n", - " def forward(self, x, x_len):\n", - " # x, shape [B, T, D]\n", - " fw_x = self.fw_bn(self.fw_fc(x))\n", - " bw_x = self.bw_bn(self.bw_fc(x))\n", - " fw_x, _ = self.fw_rnn(inputs=fw_x, sequence_length=x_len)\n", - " bw_x, _ = self.bw_rnn(inputs=bw_x, sequence_length=x_len)\n", - " x = paddle.concat([fw_x, bw_x], axis=-1)\n", - " return x, x_len\n", - "\n", - "\n", - "class RNNStack(nn.Layer):\n", - " \"\"\"RNN group with stacked bidirectional simple RNN or GRU layers.\n", - "\n", - " :param input: Input layer.\n", - " :type input: Variable\n", - " :param size: Dimension of RNN cells in each layer.\n", - " :type size: int\n", - " :param num_stacks: Number of stacked rnn layers.\n", - " :type num_stacks: int\n", - " :param use_gru: Use gru if set True. Use simple rnn if set False.\n", - " :type use_gru: bool\n", - " :param share_rnn_weights: Whether to share input-hidden weights between\n", - " forward and backward directional RNNs.\n", - " It is only available when use_gru=False.\n", - " :type share_weights: bool\n", - " :return: Output layer of the RNN group.\n", - " :rtype: Variable\n", - " \"\"\"\n", - "\n", - " def __init__(self, i_size, h_size, num_stacks, use_gru, share_rnn_weights):\n", - " super().__init__()\n", - " self.rnn_stacks = nn.LayerList()\n", - " for i in range(num_stacks):\n", - " if use_gru:\n", - " #default:GRU using tanh\n", - " self.rnn_stacks.append(\n", - " BiGRUWithBN(i_size=i_size, h_size=h_size, act=\"relu\"))\n", - " else:\n", - " self.rnn_stacks.append(\n", - " BiRNNWithBN(\n", - " i_size=i_size,\n", - " h_size=h_size,\n", - " share_weights=share_rnn_weights))\n", - " i_size = h_size * 2\n", - "\n", - " def forward(self, x, x_len):\n", - " \"\"\"\n", - " x: shape [B, T, D]\n", - " x_len: shpae [B]\n", - " \"\"\"\n", - " for i, rnn in enumerate(self.rnn_stacks):\n", - " x, x_len = rnn(x, x_len)\n", - " masks = sequence_mask(x_len) #[B, T]\n", - " masks = masks.unsqueeze(-1) # [B, T, 1]\n", - " x = x.multiply(masks)\n", - " return x, x_len\n", - "\n", - " \n", - "class DeepSpeech2Test(DeepSpeech2):\n", - " def __init__(self,\n", - " feat_size,\n", - " dict_size,\n", - " num_conv_layers=2,\n", - " num_rnn_layers=3,\n", - " rnn_size=256,\n", - " use_gru=False,\n", - " share_rnn_weights=True):\n", - " super().__init__(feat_size,\n", - " dict_size,\n", - " num_conv_layers=2,\n", - " num_rnn_layers=3,\n", - " rnn_size=256,\n", - " use_gru=False,\n", - " share_rnn_weights=True)\n", - " self.feat_size = feat_size # 161 for linear\n", - " self.dict_size = dict_size\n", - "\n", - " self.conv = ConvStack(feat_size, num_conv_layers)\n", - " \n", - "# self.fc = nn.Linear(1312, dict_size + 1)\n", - "\n", - " i_size = self.conv.output_height # H after conv stack\n", - " self.rnn = RNNStack(\n", - " i_size=i_size,\n", - " h_size=rnn_size,\n", - " num_stacks=num_rnn_layers,\n", - " use_gru=use_gru,\n", - " share_rnn_weights=share_rnn_weights)\n", - " \n", - " self.fc = nn.Linear(rnn_size * 2, dict_size + 1)\n", - " \n", - " def infer(self, audio, audio_len):\n", - " # [B, D, T] -> [B, C=1, D, T]\n", - " audio = audio.unsqueeze(1)\n", - "\n", - " # convolution group\n", - " x, audio_len = self.conv(audio, audio_len)\n", - " print('conv out', x.shape)\n", - "\n", - " # convert data from convolution feature map to sequence of vectors\n", - " B, C, D, T = paddle.shape(x)\n", - " x = x.transpose([0, 3, 1, 2]) #[B, T, C, D]\n", - " x = x.reshape([B, T, C * D]) #[B, T, C*D]\n", - " print('rnn input', x.shape)\n", - "\n", - " # remove padding part\n", - " x, audio_len = self.rnn(x, audio_len) #[B, T, D]\n", - " print('rnn output', x.shape)\n", - "\n", - " logits = self.fc(x) #[B, T, V + 1]\n", - "\n", - " #ctcdecoder need probs, not log_probs\n", - " probs = F.softmax(logits)\n", - "\n", - " return logits, probs, audio_len\n", - "\n", - " def forward(self, audio, audio_len, text, text_len):\n", - " \"\"\"\n", - " audio: shape [B, D, T]\n", - " text: shape [B, T]\n", - " audio_len: shape [B]\n", - " text_len: shape [B]\n", - " \"\"\"\n", - " return self.infer(audio, audio_len)\n", - " \n", - "\n", - "feat_dim=161\n", - "\n", - "model = DeepSpeech2Test(\n", - " feat_size=feat_dim,\n", - " dict_size=batch_reader.dataset.vocab_size,\n", - " num_conv_layers=args.num_conv_layers,\n", - " num_rnn_layers=args.num_rnn_layers,\n", - " rnn_size=1024,\n", - " use_gru=args.use_gru,\n", - " share_rnn_weights=args.share_rnn_weights,\n", - " )\n", - "dp_model = model\n", - "#dp_model = paddle.DataParallel(model)\n", - "\n", - "loss_fn = DeepSpeech2Loss(batch_reader.dataset.vocab_size)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "divided-incentive", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "discrete-conjunction", - "metadata": {}, - "outputs": [], - "source": [ - "audio, audio_len, text, text_len = None, None, None, None\n", - "\n", - "for idx, inputs in enumerate(batch_reader):\n", - " audio, audio_len, text, text_len = inputs\n", - "# print(idx)\n", - "# print('a', audio.shape, audio.place)\n", - "# print('t', text)\n", - "# print('al', audio_len)\n", - "# print('tl', text_len)\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "protected-announcement", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "conv in: Tensor(shape=[5], dtype=int64, place=CUDAPinnedPlace, stop_gradient=True,\n", - " [163, 167, 180, 186, 186])\n", - "seq mask: Tensor(shape=[5, 62], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])\n", - "conv in: Tensor(shape=[5], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [55, 56, 60, 62, 62])\n", - "seq mask: Tensor(shape=[5, 62], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])\n", - "conv out: Tensor(shape=[5], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [55, 56, 60, 62, 62])\n", - "conv out [5, 32, 41, 62]\n", - "rnn input [5, 62, 1312]\n", - "seq mask: Tensor(shape=[5, 62], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])\n", - "seq mask: Tensor(shape=[5, 62], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:77: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working\n", - " return (isinstance(seq, collections.Sequence) and\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "seq mask: Tensor(shape=[5, 62], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n", - " [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])\n", - "rnn output [5, 62, 2048]\n", - "logits len Tensor(shape=[5], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [55, 56, 60, 62, 62])\n", - "loss Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [2316.82153320])\n" - ] - } - ], - "source": [ - "outputs = dp_model(audio, audio_len, text, text_len)\n", - "logits, _, logits_len = outputs\n", - "print('logits len', logits_len)\n", - "loss = loss_fn.forward(logits, text, logits_len, text_len)\n", - "print('loss', loss)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "universal-myrtle", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "param grad: conv.conv_in.conv.weight: shape: [32, 1, 41, 11] stop_grad: False grad: None\n", - "param grad: conv.conv_in.conv.bias: shape: [32] stop_grad: False grad: None\n", - "param grad: conv.conv_in.bn.weight: shape: [32] stop_grad: False grad: None\n", - "param grad: conv.conv_in.bn.bias: shape: [32] stop_grad: False grad: None\n", - "param grad: conv.conv_in.bn._mean: shape: [32] stop_grad: True grad: None\n", - "param grad: conv.conv_in.bn._variance: shape: [32] stop_grad: True grad: None\n", - "param grad: conv.conv_stack.0.conv.weight: shape: [32, 32, 21, 11] stop_grad: False grad: None\n", - "param grad: conv.conv_stack.0.conv.bias: shape: [32] stop_grad: False grad: None\n", - "param grad: conv.conv_stack.0.bn.weight: shape: [32] stop_grad: False grad: None\n", - "param grad: conv.conv_stack.0.bn.bias: shape: [32] stop_grad: False grad: None\n", - "param grad: conv.conv_stack.0.bn._mean: shape: [32] stop_grad: True grad: None\n", - "param grad: conv.conv_stack.0.bn._variance: shape: [32] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_fc.weight: shape: [1312, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_bn.weight: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_bn.bias: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_bn._mean: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_bn._variance: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.bw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.bw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_fc.weight: shape: [2048, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_bn.weight: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_bn.bias: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_bn._mean: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_bn._variance: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.bw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.bw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_fc.weight: shape: [2048, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_bn.weight: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_bn.bias: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_bn._mean: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_bn._variance: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.bw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.bw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: fc.weight: shape: [2048, 4299] stop_grad: False grad: None\n", - "param grad: fc.bias: shape: [4299] stop_grad: False grad: None\n" - ] - } - ], - "source": [ - "for n, p in dp_model.named_parameters():\n", - " print(\n", - " f\"param grad: {n}: shape: {p.shape} stop_grad: {p.stop_gradient} grad: {p.grad}\")" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "referenced-double", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "param grad: conv.conv_in.conv.weight: shape: [32, 1, 41, 11] stop_grad: False grad: [[[[ 2.1243238 1.696022 3.770659 ... 5.234652 5.4865217\n", - " 4.757795 ]\n", - " [ 2.651376 2.3109848 4.428488 ... 5.353201 8.703288\n", - " 5.1787405 ]\n", - " [ 2.7511077 1.8823049 2.1875212 ... 3.4821286 6.386543\n", - " 3.5026932 ]\n", - " ...\n", - " [ 1.9173846 1.8623551 0.5601456 ... 2.8375719 3.8496673\n", - " 2.359191 ]\n", - " [ 2.3827765 2.497965 1.5914664 ... 2.220721 3.4617734\n", - " 4.829253 ]\n", - " [ 1.6855702 1.5040786 1.8793598 ... 4.0773935 3.176893\n", - " 3.7477999 ]]]\n", - "\n", - "\n", - " [[[ 1.8451455 2.0091445 1.5225713 ... 1.524528 0.17764974\n", - " 1.0245132 ]\n", - " [ 1.9388857 1.3873467 2.044691 ... 0.92544 -0.9746763\n", - " -0.41603735]\n", - " [ 2.6814485 2.6096234 1.6802506 ... 1.902397 1.6837387\n", - " -0.96788657]\n", - " ...\n", - " [ 4.3675485 1.9822174 1.1695029 ... 1.4672399 3.2029557\n", - " 2.6364415 ]\n", - " [ 3.2536 1.1792442 -0.5618002 ... 2.101127 1.904225\n", - " 3.3839993 ]\n", - " [ 1.9118482 1.0651072 0.5409893 ... 2.6783593 1.6871439\n", - " 4.1078367 ]]]\n", - "\n", - "\n", - " [[[-4.412424 -1.7111907 -1.7722387 ... -4.3383503 -6.2393785\n", - " -6.139402 ]\n", - " [-2.260428 -1.0250616 -2.0550888 ... -5.353946 -4.29947\n", - " -6.158736 ]\n", - " [-1.4927872 0.7552787 -0.0702923 ... -4.485656 -4.0794134\n", - " -5.416684 ]\n", - " ...\n", - " [ 2.9100134 4.156195 4.357041 ... -3.569804 -1.8634341\n", - " -0.8772557 ]\n", - " [ 1.6895763 3.4314504 4.1192107 ... -1.380024 -2.3234155\n", - " -3.6650617 ]\n", - " [ 2.4190075 1.007498 3.1173465 ... -0.96318084 -3.6175003\n", - " -2.5240796 ]]]\n", - "\n", - "\n", - " ...\n", - "\n", - "\n", - " [[[-0.6865506 -0.60106415 -1.5555015 ... 2.0853553 1.900961\n", - " 2.101063 ]\n", - " [-0.31686288 -1.4362946 -1.4929098 ... 0.15085456 1.4540495\n", - " 1.4128599 ]\n", - " [-0.57852304 -0.8204216 -2.3264258 ... 1.4970423 0.54599845\n", - " 1.6222539 ]\n", - " ...\n", - " [ 0.32624918 0.96004546 -0.7476514 ... 2.2786083 2.1000178\n", - " 2.7494807 ]\n", - " [-1.6967826 -0.78979015 -1.8424999 ... 1.0620685 2.0544293\n", - " 2.2483966 ]\n", - " [ 0.8192332 2.601636 -2.6636481 ... 0.26625186 1.7610842\n", - " 1.7467536 ]]]\n", - "\n", - "\n", - " [[[ 0.9140297 0.42424175 1.4352363 ... -2.3022954 -3.001058\n", - " -2.6987422 ]\n", - " [ 0.4491998 -0.10698095 1.5089144 ... -3.2831016 -3.6055021\n", - " -3.6595795 ]\n", - " [ 2.6818252 -1.5750014 -0.34812498 ... -4.4137015 -4.250422\n", - " -3.481941 ]\n", - " ...\n", - " [ 1.4232106 2.9689102 3.9547806 ... -0.481165 0.28190404\n", - " -1.2167063 ]\n", - " [ 2.2297084 4.8198485 4.2857304 ... 0.57483846 1.4093391\n", - " 0.0715822 ]\n", - " [ 1.679745 4.768068 5.416195 ... 0.17254728 0.4623217\n", - " 1.4772662 ]]]\n", - "\n", - "\n", - " [[[-2.0860114 -2.9508173 -1.4945896 ... -4.067145 -2.5652342\n", - " -3.5771027 ]\n", - " [-2.697845 -1.9273603 -2.3885014 ... -2.196533 -2.8573706\n", - " -2.0113711 ]\n", - " [-2.413383 -2.7204053 -1.0502659 ... -3.001385 -3.36447\n", - " -4.3225455 ]\n", - " ...\n", - " [ 1.2754489 0.9560999 1.5239805 ... -0.0105865 -1.00876\n", - " 2.6247358 ]\n", - " [ 1.1965859 1.0378222 1.1025598 ... -0.5394704 0.49838027\n", - " -0.9618193 ]\n", - " [ 1.1361816 1.3232857 0.687318 ... -0.23925456 -0.43679112\n", - " -0.79297894]]]]\n", - "param grad: conv.conv_in.conv.bias: shape: [32] stop_grad: False grad: [ 5.9604645e-07 -3.9339066e-06 -1.0728836e-06 -1.6689301e-06\n", - " 1.1920929e-06 -2.5033951e-06 -2.3841858e-07 4.7683716e-07\n", - " 4.2915344e-06 -1.9073486e-06 -1.9073486e-06 3.0994415e-06\n", - " -2.6822090e-06 3.3378601e-06 -4.2915344e-06 5.2452087e-06\n", - " 3.8146973e-06 2.3841858e-07 7.1525574e-07 -3.6954880e-06\n", - " 2.0563602e-06 -2.6226044e-06 3.0994415e-06 -3.5762787e-07\n", - " -4.7683716e-06 1.2218952e-06 3.3378601e-06 -2.5629997e-06\n", - " 2.3841858e-07 -1.7881393e-06 4.7683716e-07 -2.7418137e-06]\n", - "param grad: conv.conv_in.bn.weight: shape: [32] stop_grad: False grad: [ 2.363316 3.286464 1.9607866 -1.6367784 -1.6325372 -1.7729434\n", - " -0.9261875 2.0950415 0.1155543 -0.8857083 0.70079553 0.33920464\n", - " 2.6953902 -0.64524114 0.8845749 -1.2271115 0.6578167 -2.939814\n", - " 5.5728893 -1.0917969 0.01470797 1.395206 4.8009634 -0.744532\n", - " 0.944651 -1.092311 1.4877632 -3.042566 0.51686054 -5.4768667\n", - " -5.628145 -1.0894046 ]\n", - "param grad: conv.conv_in.bn.bias: shape: [32] stop_grad: False grad: [ 1.5193373 1.8838218 3.7722278 0.28052303 0.5386534 -0.44620085\n", - " -1.6977876 3.115642 0.03312349 -2.9121587 3.8925257 0.2288351\n", - " -2.273387 -1.3597974 4.3708124 -0.23374033 0.116272 -0.7064927\n", - " 6.5267463 -1.5318865 1.0288429 0.7928574 -0.24655592 -2.1116853\n", - " 2.922772 -3.3462617 1.7016437 -3.5471547 0.29777628 -3.2820854\n", - " -4.116946 -0.9909375 ]\n", - "param grad: conv.conv_in.bn._mean: shape: [32] stop_grad: True grad: None\n", - "param grad: conv.conv_in.bn._variance: shape: [32] stop_grad: True grad: None\n", - "param grad: conv.conv_stack.0.conv.weight: shape: [32, 32, 21, 11] stop_grad: False grad: [[[[ 6.20494843e-01 5.95983505e-01 -1.48909020e+00 ... -6.86620831e-01\n", - " 6.71104014e-01 -1.95339048e+00]\n", - " [-3.91837955e-03 1.27062631e+00 -1.63248098e+00 ... 1.07290137e+00\n", - " -9.42245364e-01 -3.34277248e+00]\n", - " [ 2.41821265e+00 2.36212373e-01 -1.84433365e+00 ... 1.23182368e+00\n", - " 1.36039746e+00 -2.94621849e+00]\n", - " ...\n", - " [ 1.55153418e+00 7.25861669e-01 2.08785534e+00 ... -6.40172660e-01\n", - " -3.23889256e-02 -2.30832791e+00]\n", - " [ 3.69824195e+00 1.27163112e-01 4.09263194e-01 ... -8.60729575e-01\n", - " -3.51897454e+00 -2.10093403e+00]\n", - " [-4.94779050e-01 -3.74262631e-01 -1.19801068e+00 ... -2.05930543e+00\n", - " -7.38576293e-01 -9.44581270e-01]]\n", - "\n", - " [[-2.04341412e+00 -3.70606273e-01 -1.40429378e+00 ... -1.71711946e+00\n", - " -4.09437418e-01 -1.74107194e+00]\n", - " [-8.72247815e-01 -1.06301677e+00 -9.19306517e-01 ... -2.98976970e+00\n", - " -3.03250861e+00 -2.37099743e+00]\n", - " [-5.00457406e-01 -1.11882675e+00 -5.91526508e-01 ... 4.23921436e-01\n", - " -2.08650708e+00 -1.82109618e+00]\n", - " ...\n", - " [ 2.07773042e+00 1.40735030e-01 -2.60543615e-01 ... -1.55956164e-01\n", - " -1.31862307e+00 -2.07174897e+00]\n", - " [ 7.95007765e-01 1.14988625e-01 -1.43308258e+00 ... 8.29253554e-01\n", - " -9.57888126e-01 -3.82121086e-01]\n", - " [ 8.34397674e-02 1.38636863e+00 -1.21593380e+00 ... -2.65783578e-01\n", - " 1.78124309e-02 -3.40287232e+00]]\n", - "\n", - " [[ 6.27344131e-01 5.71699142e-02 -3.58010936e+00 ... -4.53077674e-01\n", - " 1.65331578e+00 2.58466601e-02]\n", - " [ 2.66681361e+00 2.02069378e+00 -1.52052927e+00 ... 2.94914508e+00\n", - " 1.94632411e+00 -1.06698799e+00]\n", - " [ 1.57839453e+00 -1.03649735e-01 -4.22528505e+00 ... 2.28863955e+00\n", - " 4.27859402e+00 3.66381669e+00]\n", - " ...\n", - " [-2.44603205e+00 -2.09621000e+00 -2.57623529e+00 ... 9.00211930e-01\n", - " 4.30536079e+00 -2.49779320e+00]\n", - " [-2.52187514e+00 -3.36546659e+00 -1.26748765e+00 ... 8.11533451e-01\n", - " 2.55930424e-01 4.50821817e-02]\n", - " [-3.40082574e+00 -3.26924801e+00 -5.86932135e+00 ... -1.18203712e+00\n", - " 1.09565187e+00 -4.96661961e-01]]\n", - "\n", - " ...\n", - "\n", - " [[ 8.20469666e+00 6.96195841e+00 2.73753977e+00 ... 8.34498823e-01\n", - " 2.56748104e+00 1.67592216e+00]\n", - " [ 9.85801792e+00 8.81465149e+00 6.09280396e+00 ... 1.42389655e+00\n", - " 2.92086434e+00 2.08308399e-01]\n", - " [ 8.00702763e+00 7.97301006e+00 4.64527416e+00 ... 8.61916900e-01\n", - " 3.55370259e+00 4.75085378e-01]\n", - " ...\n", - " [ 5.61662769e+00 -4.72857296e-01 -1.04519971e-01 ... -4.03000236e-01\n", - " -1.66419971e+00 -1.70375630e-01]\n", - " [ 4.52409792e+00 -3.70670676e-01 4.54190969e-02 ... -8.20453286e-01\n", - " 9.49141383e-02 8.88008535e-01]\n", - " [ 3.27219462e+00 8.93201411e-01 1.94810414e+00 ... -2.86915004e-02\n", - " 1.93200278e+00 8.19505215e-01]]\n", - "\n", - " [[ 5.84066296e+00 6.72855520e+00 5.21399307e+00 ... 4.55058670e+00\n", - " 3.19132543e+00 3.17435169e+00]\n", - " [ 6.04594421e+00 6.88997173e+00 5.00542831e+00 ... 2.23561144e+00\n", - " 2.76059532e+00 4.83479440e-01]\n", - " [ 5.36118126e+00 4.13896275e+00 3.68701124e+00 ... 3.64462805e+00\n", - " 2.80596399e+00 1.52781498e+00]\n", - " ...\n", - " [ 2.87856674e+00 5.84320784e-01 1.74297714e+00 ... 2.83938944e-01\n", - " -2.26546407e-01 -1.18434143e+00]\n", - " [ 2.08510804e+00 1.74915957e+00 1.58637917e+00 ... 6.41967297e-01\n", - " -1.31319761e-01 -3.85830402e-01]\n", - " [ 4.41666174e+00 2.58244562e+00 2.97712159e+00 ... 1.42317235e-01\n", - " 1.68037796e+00 -6.50003672e-01]]\n", - "\n", - " [[ 1.05511594e+00 6.74880028e-01 -7.64639139e-01 ... -2.15282440e-01\n", - " 2.07197094e+00 4.48752761e-01]\n", - " [ 2.12095881e+00 3.44118834e+00 1.61375272e+00 ... -1.18487728e+00\n", - " 1.88659012e+00 1.48252523e+00]\n", - " [ 8.33427787e-01 4.35035896e+00 -3.59877385e-02 ... 8.70242774e-01\n", - " 3.75945044e+00 -3.09408635e-01]\n", - " ...\n", - " [ 5.08510351e+00 4.73114061e+00 1.97346115e+00 ... -2.25924397e+00\n", - " -1.26373076e+00 -1.37826729e+00]\n", - " [ 6.17275095e+00 4.16016817e+00 3.15675950e+00 ... -2.02416754e+00\n", - " 1.50002241e-02 1.84633851e+00]\n", - " [ 7.32995272e+00 5.34601831e+00 4.58857203e+00 ... -1.88874304e+00\n", - " 1.53240371e+00 7.47349262e-02]]]\n", - "\n", - "\n", - " [[[-1.80918843e-01 -2.52616453e+00 -2.78145695e+00 ... 1.44283652e+00\n", - " -1.08945215e+00 4.19084758e-01]\n", - " [-9.66833949e-01 -2.41106153e+00 -3.48886085e+00 ... -1.87193304e-01\n", - " 8.21905077e-01 1.89097953e+00]\n", - " [-1.59118319e+00 -2.56997013e+00 -3.10426521e+00 ... 2.05900550e+00\n", - " -2.78253704e-01 6.96343541e-01]\n", - " ...\n", - " [ 6.66302443e-02 -2.00887346e+00 -3.17550874e+00 ... 7.97579706e-01\n", - " -9.71581042e-02 1.71877682e+00]\n", - " [-8.01679730e-01 -2.02678037e+00 -3.21915555e+00 ... 8.35528374e-01\n", - " -1.15296638e+00 4.35728967e-01]\n", - " [ 1.45292446e-01 -2.15479851e+00 -1.51839817e+00 ... -3.07936192e-01\n", - " -5.39051890e-01 1.13107657e+00]]\n", - "\n", - " [[-2.43341160e+00 -3.35346818e+00 -9.87014294e-01 ... 1.34049034e+00\n", - " 2.95773447e-02 1.27177119e+00]\n", - " [-2.61602497e+00 -9.76761580e-01 -2.52060473e-01 ... -1.38134825e+00\n", - " 3.85564029e-01 4.57195908e-01]\n", - " [-2.23676014e+00 -4.00404739e+00 -2.23409963e+00 ... -1.41846514e+00\n", - " -6.58698231e-02 -3.61778140e-01]\n", - " ...\n", - " [-1.13604403e+00 -6.03917837e-02 -4.95491922e-01 ... 2.14673686e+00\n", - " 1.21484184e+00 2.22764325e+00]\n", - " [-1.05162430e+00 -1.59828448e+00 3.15489501e-01 ... 2.28046751e+00\n", - " 2.39702511e+00 2.43942714e+00]\n", - " [-1.27370405e+00 -2.05736399e-01 -1.12124372e+00 ... 2.21597219e+00\n", - " 2.50086927e+00 1.91134131e+00]]\n", - "\n", - " [[-4.53170598e-01 -1.59644139e+00 -3.63470483e+00 ... -4.35066032e+00\n", - " -3.79540777e+00 -1.09796596e+00]\n", - " [-2.21036464e-01 -2.53353834e+00 -1.28269875e+00 ... -3.38615727e+00\n", - " -2.59143281e+00 7.74220943e-01]\n", - " [-6.89323783e-01 -1.44375205e+00 6.66438341e-02 ... -1.30736077e+00\n", - " -1.23293114e+00 1.58148706e+00]\n", - " ...\n", - " [ 1.63751483e+00 -4.08427984e-01 -8.15176964e-01 ... 3.70807743e+00\n", - " 2.04232907e+00 1.97716308e+00]\n", - " [ 2.13261342e+00 1.85947633e+00 -8.06532025e-01 ... 1.98311245e+00\n", - " 2.27003932e+00 -1.11734614e-01]\n", - " [ 1.28702402e+00 3.98628891e-01 -1.63712263e+00 ... 8.00528765e-01\n", - " 5.78273535e-01 -2.59924948e-01]]\n", - "\n", - " ...\n", - "\n", - " [[ 3.96233416e+00 4.66794682e+00 1.39437711e+00 ... 7.52061129e-01\n", - " -1.53534544e+00 -6.67162359e-01]\n", - " [ 2.33841681e+00 3.35811281e+00 9.80114818e-01 ... 1.48806703e+00\n", - " 2.68609226e-01 -1.35124445e+00]\n", - " [ 2.08177710e+00 4.28519583e+00 1.52450514e+00 ... 7.45321214e-01\n", - " -5.04359961e-01 -1.81241560e+00]\n", - " ...\n", - " [ 2.95398951e-01 4.30877179e-01 -2.03731894e+00 ... -4.20221925e-01\n", - " 3.29260826e-01 5.83679557e-01]\n", - " [ 1.30742240e+00 -6.32183790e-01 -3.13741422e+00 ... 9.63868052e-02\n", - " 2.91730791e-01 1.33400351e-01]\n", - " [ 5.43292165e-01 -2.83665359e-01 -1.88138187e+00 ... 2.15468198e-01\n", - " 4.90157723e-01 2.40562439e+00]]\n", - "\n", - " [[ 1.57632053e+00 6.27885723e+00 2.87853765e+00 ... 3.07016110e+00\n", - " 1.91490650e+00 1.76274943e+00]\n", - " [ 2.57776356e+00 4.07256317e+00 2.52231169e+00 ... 4.09494352e+00\n", - " 2.53548074e+00 2.44395185e+00]\n", - " [ 2.43037057e+00 4.35728836e+00 1.96233964e+00 ... 2.26702976e+00\n", - " 2.94634581e+00 2.21452284e+00]\n", - " ...\n", - " [-2.72509992e-01 -8.41220498e-01 -1.89133918e+00 ... -1.80079627e+00\n", - " -2.00367713e+00 -7.09145784e-01]\n", - " [ 8.21575999e-01 -1.13323164e+00 -2.62418866e+00 ... -2.38889670e+00\n", - " -7.83945560e-01 -1.01922750e-01]\n", - " [-1.14730227e+00 -1.42182577e+00 -2.00993991e+00 ... -2.11025667e+00\n", - " 1.60286129e-02 -7.26446986e-01]]\n", - "\n", - " [[ 4.20389509e+00 3.75917768e+00 4.97653627e+00 ... 1.23642838e+00\n", - " 8.52760911e-01 1.27920091e-01]\n", - " [ 5.29409122e+00 5.29002380e+00 3.96404648e+00 ... 1.91227329e+00\n", - " 3.97556186e-01 1.69182217e+00]\n", - " [ 4.60112572e+00 4.12772799e+00 2.10280085e+00 ... 3.24303842e+00\n", - " -1.07720590e+00 -3.81854475e-01]\n", - " ...\n", - " [ 1.81884170e-02 -3.11472058e+00 -8.23525012e-01 ... -2.40161085e+00\n", - " -4.48192549e+00 -6.14600539e-01]\n", - " [ 1.16305006e+00 -1.15409636e+00 -3.48765063e+00 ... -1.97504926e+00\n", - " -4.44984436e+00 -2.28429958e-01]\n", - " [ 1.29197860e+00 6.17720246e-01 -5.87171853e-01 ... -1.35258228e-01\n", - " -1.29259872e+00 1.30360842e-01]]]\n", - "\n", - "\n", - " [[[-1.26687372e+00 -2.33633637e+00 -1.49625254e+00 ... 2.52396107e+00\n", - " -6.68072224e-01 -1.13282454e+00]\n", - " [-1.34229445e+00 -2.87080932e+00 -2.57388353e+00 ... -8.75385761e-01\n", - " -1.00205469e+00 -3.58956242e+00]\n", - " [-9.49853599e-01 -5.78684711e+00 -3.52962446e+00 ... 8.88233304e-01\n", - " 2.25133196e-01 -1.02802217e+00]\n", - " ...\n", - " [-7.38113701e-01 -3.47510982e+00 -3.23011065e+00 ... -1.25624001e+00\n", - " -1.63268471e+00 6.00247443e-01]\n", - " [-2.29733467e+00 -5.72547615e-01 -1.98301303e+00 ... -1.90137398e+00\n", - " -1.47013855e+00 -1.45779204e+00]\n", - " [-2.24628520e+00 -3.36337948e+00 -3.91878939e+00 ... -1.53652275e+00\n", - " -1.36285520e+00 -1.68160331e+00]]\n", - "\n", - " [[-8.11348319e-01 -7.17824280e-01 -1.02243233e+00 ... -2.69050407e+00\n", - " -2.32403350e+00 -4.25943947e+00]\n", - " [-2.35056520e+00 -2.35941172e+00 -1.24398732e+00 ... -2.08313870e+00\n", - " -1.16508257e+00 -1.30353463e+00]\n", - " [-2.25146723e+00 -1.94972813e+00 -1.13295293e+00 ... -2.61496377e+00\n", - " -1.91106403e+00 -1.07801402e+00]\n", - " ...\n", - " [-2.67012739e+00 -3.20916414e+00 -2.41768575e+00 ... 2.65138328e-01\n", - " -5.27612507e-01 1.44604075e+00]\n", - " [-3.54237866e+00 -3.62832785e+00 -2.40270257e+00 ... -9.76106226e-02\n", - " 4.67946082e-01 -7.24248111e-01]\n", - " [-2.49844384e+00 -3.42463255e+00 -2.99040008e+00 ... 4.28889185e-01\n", - " -7.51657963e-01 -1.00530767e+00]]\n", - "\n", - " [[-8.42589438e-02 1.42022014e-01 -8.51281703e-01 ... 4.21745628e-01\n", - " -2.35717297e-02 -1.71374834e+00]\n", - " [-1.05496287e+00 3.82416457e-01 -4.40595537e-01 ... 1.03381336e-01\n", - " -1.41204190e+00 -7.58325040e-01]\n", - " [-2.28930283e+00 -2.03857040e+00 -9.16261196e-01 ... -3.94939929e-01\n", - " -1.07798588e+00 -1.48433352e+00]\n", - " ...\n", - " [-3.11473966e-01 -1.40877593e+00 -2.42908645e+00 ... 7.88682699e-01\n", - " 1.24199319e+00 1.89949930e-01]\n", - " [ 5.44084549e-01 -1.02425671e+00 -1.53991556e+00 ... -4.36764538e-01\n", - " -5.78772545e-01 2.62665659e-01]\n", - " [ 1.26812792e+00 -9.89493608e-01 -1.47972977e+00 ... 2.21440494e-02\n", - " 2.79776216e-01 7.63269484e-01]]\n", - "\n", - " ...\n", - "\n", - " [[ 6.02095068e-01 5.93243122e-01 -1.06838238e+00 ... 3.56546330e+00\n", - " 1.16390383e+00 -1.47593319e-01]\n", - " [ 1.80458140e+00 1.68401957e+00 4.17516947e-01 ... 3.33444500e+00\n", - " 1.89411759e+00 1.03220642e-01]\n", - " [ 2.74264169e+00 2.92038846e+00 1.00775683e+00 ... 3.53285050e+00\n", - " 2.07282662e+00 -2.56800652e-01]\n", - " ...\n", - " [ 4.88933468e+00 3.72433925e+00 3.58677816e+00 ... 1.98363388e+00\n", - " 1.80851030e+00 8.32634747e-01]\n", - " [ 4.01546288e+00 4.78934765e+00 2.94778132e+00 ... 2.99637699e+00\n", - " 1.30439472e+00 3.61029744e-01]\n", - " [ 3.13628030e+00 2.01894832e+00 2.82585931e+00 ... 2.54264188e+00\n", - " -9.16651785e-02 9.93353873e-02]]\n", - "\n", - " [[ 2.35585642e+00 8.42678428e-01 1.57331872e+00 ... 3.65935063e+00\n", - " 3.94066262e+00 4.89832020e+00]\n", - " [ 1.85791731e+00 1.34373701e+00 1.30812299e+00 ... 2.71434736e+00\n", - " 3.22004294e+00 2.99872303e+00]\n", - " [ 1.67675853e+00 -4.05569375e-02 1.85539150e+00 ... 3.73934364e+00\n", - " 2.98195982e+00 3.37315011e+00]\n", - " ...\n", - " [ 2.14539170e+00 2.86586595e+00 2.20222116e+00 ... 1.20492995e+00\n", - " 2.13971066e+00 1.94932449e+00]\n", - " [ 4.68422651e+00 3.80044746e+00 4.23209000e+00 ... 2.40658951e+00\n", - " 2.29117441e+00 2.52368808e+00]\n", - " [ 3.10694575e+00 2.49402595e+00 4.53786707e+00 ... 9.08902645e-01\n", - " 1.86903965e+00 2.27776885e+00]]\n", - "\n", - " [[ 1.45200038e+00 5.17961740e-01 -1.58403587e+00 ... 5.07019472e+00\n", - " 7.87163258e-01 1.20610237e+00]\n", - " [ 3.39321136e+00 2.21043849e+00 -6.31202877e-01 ... 4.97822762e+00\n", - " 9.66498017e-01 1.18883348e+00]\n", - " [ 1.20627856e+00 1.82759428e+00 5.91053367e-01 ... 4.14318657e+00\n", - " 5.25399208e-01 -1.16850233e+00]\n", - " ...\n", - " [ 1.05183899e+00 5.80030501e-01 1.89724147e+00 ... 2.54626465e+00\n", - " -1.49128008e+00 -1.85064209e+00]\n", - " [ 1.50983357e+00 2.85973406e+00 2.61224055e+00 ... 4.83481932e+00\n", - " 9.67048705e-02 -4.37043965e-01]\n", - " [ 2.57720876e+00 2.09961963e+00 4.11754288e-02 ... 3.80421424e+00\n", - " -7.83308804e-01 -1.64871216e+00]]]\n", - "\n", - "\n", - " ...\n", - "\n", - "\n", - " [[[-1.16345096e+00 -2.53971386e+00 -8.99101734e-01 ... -4.35583591e-01\n", - " -1.29671764e+00 -1.61429560e+00]\n", - " [ 3.72841507e-01 3.45808208e-01 -1.82167351e+00 ... -2.14515448e+00\n", - " -1.26383066e+00 -2.27464601e-01]\n", - " [ 1.58568513e+00 2.58181524e+00 1.86554670e+00 ... -1.10401320e+00\n", - " -3.68550658e-01 -2.58849680e-01]\n", - " ...\n", - " [-9.15827155e-01 -1.25424683e+00 -4.04716206e+00 ... 2.13138080e+00\n", - " 2.67662477e+00 2.31014514e+00]\n", - " [-3.19453120e-01 -6.71132684e-01 -1.51378751e+00 ... 1.86080432e+00\n", - " 2.77418542e+00 1.22875953e+00]\n", - " [-1.20453942e+00 -3.93669218e-01 -1.51751983e+00 ... 1.17620552e+00\n", - " 1.95602298e+00 7.64306366e-01]]\n", - "\n", - " [[-8.73186827e-01 -2.12537169e+00 -1.91664994e+00 ... -2.90821463e-01\n", - " 1.90896463e+00 8.02283168e-01]\n", - " [-1.06389821e+00 -2.15300727e+00 -1.82113051e+00 ... -4.34280694e-01\n", - " 1.53455496e+00 1.94702053e+00]\n", - " [-2.08403468e+00 -4.72900331e-01 -1.10610819e+00 ... -8.79420400e-01\n", - " 7.79394627e-01 2.02670670e+00]\n", - " ...\n", - " [-4.28208113e-01 -7.90894389e-01 -1.06713009e+00 ... 1.12579381e+00\n", - " 9.61961091e-01 1.40342009e+00]\n", - " [ 4.40416574e-01 -1.65901780e-02 -1.05338669e+00 ... 1.40698349e+00\n", - " 9.43485856e-01 2.34856772e+00]\n", - " [-1.20572495e+00 -2.03134632e+00 4.88817632e-01 ... 2.20770907e+00\n", - " 1.38143206e+00 2.00714707e+00]]\n", - "\n", - " [[ 9.00486887e-01 -9.50459957e-01 -1.42935121e+00 ... -1.30648065e+00\n", - " -2.52133775e+00 -8.87715697e-01]\n", - " [ 3.73431134e+00 1.69571114e+00 5.99429727e-01 ... 6.64332986e-01\n", - " -6.10453069e-01 2.06534386e+00]\n", - " [ 1.59800696e+00 -4.59622175e-01 -6.73136234e-01 ... 2.18770742e-01\n", - " -1.12928271e+00 4.87097502e-02]\n", - " ...\n", - " [ 1.92336845e+00 1.37130380e-01 -3.51048648e-01 ... 5.41638851e-01\n", - " 1.06069386e+00 1.36404145e+00]\n", - " [ 1.29641414e+00 -2.79530913e-01 -2.63607264e-01 ... -8.62445176e-01\n", - " 1.48393130e+00 2.69196725e+00]\n", - " [ 1.14442182e+00 -1.24098969e+00 3.70959163e-01 ... -1.12241995e+00\n", - " 3.67927134e-01 2.55976987e+00]]\n", - "\n", - " ...\n", - "\n", - " [[ 5.32017851e+00 3.64207411e+00 3.84571218e+00 ... 3.60754800e+00\n", - " 2.57500267e+00 -1.38083458e-01]\n", - " [ 5.69058084e+00 3.93056583e+00 2.93337941e+00 ... 3.17091584e+00\n", - " 2.34770632e+00 6.48133337e-01]\n", - " [ 5.98239613e+00 6.16548634e+00 3.04750896e+00 ... 5.51510525e+00\n", - " 4.34810448e+00 1.31588542e+00]\n", - " ...\n", - " [ 5.09930992e+00 3.32360983e+00 2.29228449e+00 ... 3.45123887e-01\n", - " 1.06280947e+00 -5.93325794e-02]\n", - " [ 4.19760656e+00 3.97779059e+00 1.66905916e+00 ... 3.68937254e-01\n", - " 8.06131065e-02 8.08142900e-01]\n", - " [ 4.52498960e+00 3.45109749e+00 1.01074433e+00 ... -2.54036248e-01\n", - " 3.13675582e-01 2.13851762e+00]]\n", - "\n", - " [[ 6.93927193e+00 6.05758238e+00 4.60648441e+00 ... 4.32221603e+00\n", - " 3.17874146e+00 1.47012353e+00]\n", - " [ 7.88523865e+00 6.62228966e+00 4.77496338e+00 ... 4.45868683e+00\n", - " 2.73698759e+00 2.17057824e+00]\n", - " [ 7.12061214e+00 6.01714134e+00 4.52996492e+00 ... 3.97184372e+00\n", - " 3.43153954e+00 1.21802723e+00]\n", - " ...\n", - " [ 2.85720730e+00 1.89639473e+00 1.96340394e+00 ... 1.89643729e+00\n", - " 1.64856291e+00 1.15853786e+00]\n", - " [ 3.88248491e+00 2.16386199e+00 1.53069091e+00 ... 2.71704245e+00\n", - " 2.24890351e+00 2.22156644e+00]\n", - " [ 5.27136230e+00 1.68400204e+00 2.09500480e+00 ... 2.75956345e+00\n", - " 3.71970820e+00 1.69852686e+00]]\n", - "\n", - " [[ 2.55598164e+00 1.64588141e+00 6.70431674e-01 ... 3.24091220e+00\n", - " 1.48759770e+00 -1.72001183e+00]\n", - " [ 4.33942318e+00 8.40826690e-01 -7.40000725e-01 ... 7.24577069e-01\n", - " 1.74327165e-01 -1.83029580e+00]\n", - " [ 4.39864540e+00 2.28395438e+00 -1.90353513e-01 ... 5.58019161e+00\n", - " 1.05627227e+00 -8.02519619e-01]\n", - " ...\n", - " [ 1.97654784e+00 3.26888156e+00 1.52879453e+00 ... 3.15013933e+00\n", - " 4.66731453e+00 4.98701715e+00]\n", - " [ 1.40016854e+00 3.45761251e+00 3.68359756e+00 ... 1.14207900e+00\n", - " 3.32219076e+00 3.83035636e+00]\n", - " [ 1.99269783e+00 2.15428829e+00 3.35396528e-01 ... 2.45916694e-01\n", - " 2.13785577e+00 4.33214951e+00]]]\n", - "\n", - "\n", - " [[[ 1.35320330e+00 5.05850911e-02 1.04915988e+00 ... 1.82023585e-01\n", - " 2.72914767e-01 3.92112255e-01]\n", - " [ 1.04646444e+00 7.60913491e-01 1.93323612e+00 ... 1.19493449e+00\n", - " -1.44200325e-01 4.07531261e-02]\n", - " [-9.88207340e-01 -1.46165287e+00 1.05884135e-01 ... -3.23057353e-01\n", - " -2.28934169e+00 -7.38609374e-01]\n", - " ...\n", - " [ 1.01198792e+00 2.34331083e+00 1.04566610e+00 ... 1.29697472e-01\n", - " -1.23878837e+00 2.21006930e-01]\n", - " [-3.75360101e-01 1.53673506e+00 -1.32206869e+00 ... -2.55255580e-01\n", - " -6.22699618e-01 -1.73162484e+00]\n", - " [ 4.34735864e-01 5.08327007e-01 -3.49233925e-01 ... -1.04749084e+00\n", - " -1.15777385e+00 -1.13671994e+00]]\n", - "\n", - " [[ 1.67839336e+00 -1.80224836e-01 1.02194118e+00 ... 8.44027162e-01\n", - " 8.81283879e-02 -1.37762165e+00]\n", - " [ 8.39694083e-01 1.32322550e+00 4.02442753e-01 ... -4.21785116e-01\n", - " -9.98012185e-01 -1.11348581e+00]\n", - " [ 7.64424682e-01 8.58965695e-01 2.94626594e-01 ... -6.65519595e-01\n", - " -3.65677416e-01 -2.25250268e+00]\n", - " ...\n", - " [-1.10193872e+00 1.18070498e-01 1.04604781e-01 ... -1.44486964e+00\n", - " -2.52748466e+00 -2.16131711e+00]\n", - " [-1.06079710e+00 -1.48379254e+00 3.80138367e-01 ... -1.62288392e+00\n", - " -2.44736362e+00 -8.78590107e-01]\n", - " [ 3.44401300e-02 -2.60935068e+00 -2.35597759e-01 ... -2.41114974e+00\n", - " -2.45255780e+00 -1.82384634e+00]]\n", - "\n", - " [[ 1.37670958e+00 1.58661580e+00 -2.85664916e-01 ... 1.49081087e+00\n", - " 4.13422853e-01 1.12761199e+00]\n", - " [ 1.54148173e+00 6.22704089e-01 1.41886568e+00 ... 1.59678531e+00\n", - " -8.72656107e-01 1.52415514e-01]\n", - " [ 3.30207205e+00 2.89925170e+00 1.91855145e+00 ... 3.18863559e+00\n", - " 1.87347198e+00 9.48901057e-01]\n", - " ...\n", - " [-1.53920484e+00 1.77375078e-02 -1.02018684e-01 ... 1.94011092e+00\n", - " -6.83587790e-01 1.49154460e+00]\n", - " [-2.27719522e+00 1.02481163e+00 -2.11300224e-01 ... -8.18020821e-01\n", - " 1.54248989e+00 -1.46732473e+00]\n", - " [-4.50206220e-01 3.62383485e+00 1.07175660e+00 ... 4.25961137e-01\n", - " 1.12405360e-01 -6.87821358e-02]]\n", - "\n", - " ...\n", - "\n", - " [[-3.40477467e-01 -2.99311423e+00 -2.12096786e+00 ... 2.27393007e+00\n", - " 4.03424358e+00 3.73335361e+00]\n", - " [-6.99971199e-01 -2.97719741e+00 -2.72910309e+00 ... 1.50101089e+00\n", - " 2.29408574e+00 3.14105940e+00]\n", - " [-1.41648722e+00 -1.86292887e+00 -1.84006739e+00 ... 2.78402638e+00\n", - " 3.91481900e+00 5.32456112e+00]\n", - " ...\n", - " [ 5.97958088e-01 1.50512588e+00 6.23718500e-01 ... 2.83813477e+00\n", - " 3.87909842e+00 3.33359623e+00]\n", - " [ 1.65542316e+00 3.56163192e+00 4.01527691e+00 ... 3.38367462e+00\n", - " 1.55827272e+00 2.50741863e+00]\n", - " [ 2.82036042e+00 2.53322673e+00 4.38798475e+00 ... 4.64642382e+00\n", - " 3.28739667e+00 3.02895570e+00]]\n", - "\n", - " [[-3.47941303e+00 -3.49006844e+00 -2.25583363e+00 ... 1.45181656e-01\n", - " 1.52944064e+00 2.08810711e+00]\n", - " [-2.27786446e+00 -4.59218550e+00 -2.74722624e+00 ... -1.73136210e+00\n", - " 7.46028006e-01 1.74789345e+00]\n", - " [-3.35524082e+00 -4.58244705e+00 -2.40820456e+00 ... -5.04051924e-01\n", - " 1.49640536e+00 2.16613841e+00]\n", - " ...\n", - " [ 5.26107132e-01 2.05329061e+00 2.84252572e+00 ... 1.33222675e+00\n", - " 3.87935114e+00 3.69385266e+00]\n", - " [ 4.38092083e-01 2.15028906e+00 3.13363624e+00 ... 3.36048746e+00\n", - " 5.36551809e+00 2.94915986e+00]\n", - " [ 2.75497317e+00 3.25929213e+00 2.33522987e+00 ... 1.69926262e+00\n", - " 3.93462896e+00 3.68200874e+00]]\n", - "\n", - " [[ 1.10951948e+00 5.31419516e-02 -1.58864903e+00 ... 5.24887085e+00\n", - " 1.60273385e+00 4.90113163e+00]\n", - " [-2.94517064e+00 -2.81092644e+00 -4.89631557e+00 ... 3.99868512e+00\n", - " 1.40544355e+00 2.84833241e+00]\n", - " [-3.51893663e-01 -3.53325534e+00 -2.21239805e+00 ... 4.26225853e+00\n", - " 6.87886119e-01 2.58609629e+00]\n", - " ...\n", - " [ 2.92248201e+00 5.40264511e+00 4.65721560e+00 ... 5.24537373e+00\n", - " 2.30406880e+00 1.29892707e+00]\n", - " [ 1.43473256e+00 4.61167526e+00 3.57578802e+00 ... 5.12181854e+00\n", - " 8.59923482e-01 1.38731599e+00]\n", - " [-6.50881350e-01 2.18233657e+00 2.74669623e+00 ... 4.86368895e+00\n", - " 1.44120216e+00 1.79993320e+00]]]\n", - "\n", - "\n", - " [[[ 1.64106202e+00 3.54410499e-01 -3.54172409e-01 ... 2.32646990e+00\n", - " 1.65043330e+00 3.45897645e-01]\n", - " [ 2.16236949e+00 1.28213906e+00 2.26082468e+00 ... 6.10507369e-01\n", - " 9.12241280e-01 1.27429694e-01]\n", - " [ 2.07962990e+00 7.03816175e-01 2.01272345e+00 ... -2.26959705e-01\n", - " 1.00041127e+00 5.87104559e-02]\n", - " ...\n", - " [-1.62972426e+00 -3.04028845e+00 -1.39124167e+00 ... 2.47561097e+00\n", - " 2.35047388e+00 1.61532843e+00]\n", - " [-1.97368932e+00 -5.44541061e-01 -5.92882216e-01 ... 1.39800012e+00\n", - " 2.32770801e+00 9.96662021e-01]\n", - " [-1.15636075e+00 -1.34654212e+00 -8.50648999e-01 ... 1.85655832e+00\n", - " 2.05776072e+00 5.34575820e-01]]\n", - "\n", - " [[-1.02104437e+00 3.08469892e-01 2.81789303e-01 ... -8.24654043e-01\n", - " -9.85817850e-01 -2.05517030e+00]\n", - " [ 9.50192690e-01 3.35105330e-01 5.31637192e-01 ... -1.42974198e-01\n", - " -1.79659498e+00 -1.58266973e+00]\n", - " [-2.51316994e-01 -1.28709340e+00 3.01498562e-01 ... -1.32253516e+00\n", - " -1.55507576e+00 -9.37123299e-01]\n", - " ...\n", - " [ 2.33016998e-01 2.92454743e+00 3.15420461e+00 ... 1.15574491e+00\n", - " 1.27850962e+00 1.35487700e+00]\n", - " [ 3.81013602e-01 1.44239831e+00 6.64825320e-01 ... -3.89374971e-01\n", - " 1.50716826e-01 1.33641326e+00]\n", - " [ 1.71373415e+00 1.67357373e+00 1.76596940e+00 ... 1.57941079e+00\n", - " 1.60940981e+00 1.78091609e+00]]\n", - "\n", - " [[-5.16522598e+00 -1.68099070e+00 -3.24440050e+00 ... -3.46229005e+00\n", - " -2.18273020e+00 -1.98621082e+00]\n", - " [-3.05743694e+00 9.15392339e-01 -1.93508530e+00 ... -1.82306373e+00\n", - " -2.12960863e+00 -3.45255351e+00]\n", - " [-4.32777822e-01 -1.00303245e+00 -1.61397791e+00 ... -2.08376765e+00\n", - " -3.72989595e-01 -1.36516929e+00]\n", - " ...\n", - " [-5.83641946e-01 4.14125490e+00 1.58227599e+00 ... 2.03144050e+00\n", - " 2.13982654e+00 -1.81909311e+00]\n", - " [-1.74230576e+00 2.39347410e+00 2.44080925e+00 ... 5.43732524e-01\n", - " 2.07899213e+00 -3.71748984e-01]\n", - " [ 3.80016506e-01 7.84988403e-01 1.20596504e+00 ... -2.32057095e+00\n", - " -2.81265080e-01 -3.69353056e+00]]\n", - "\n", - " ...\n", - "\n", - " [[-3.48024845e+00 -2.60937548e+00 -3.84952760e+00 ... 6.68736577e-01\n", - " -1.75104141e-02 -3.54720926e+00]\n", - " [-2.59637117e+00 -5.18190145e+00 -2.33887696e+00 ... 9.13373232e-02\n", - " -3.58282638e+00 -2.40778995e+00]\n", - " [-2.50912881e+00 -1.22113395e+00 -2.34372020e+00 ... 1.40071487e+00\n", - " -1.67449510e+00 -1.14655948e+00]\n", - " ...\n", - " [-5.75253534e+00 -6.67348385e+00 -5.05184650e+00 ... -2.73145151e+00\n", - " -1.48933101e+00 -1.36807609e+00]\n", - " [-3.29049587e+00 -3.73956156e+00 -2.85064268e+00 ... -3.92481357e-01\n", - " -8.00529659e-01 -8.39800835e-01]\n", - " [-4.30351114e+00 -4.21471930e+00 -2.41703367e+00 ... -1.27081513e+00\n", - " 1.67839837e+00 8.47821474e-01]]\n", - "\n", - " [[-5.27856112e-01 -1.09752083e+00 3.39107156e-01 ... 2.00062895e+00\n", - " 8.83528054e-01 2.57416844e-01]\n", - " [-1.58655810e+00 -3.36268663e-01 1.16161990e+00 ... 1.54868484e+00\n", - " 2.38878536e+00 1.84097290e+00]\n", - " [ 5.96052647e-01 2.15484858e-01 1.85280466e+00 ... 2.74587560e+00\n", - " 1.61432290e+00 1.13214278e+00]\n", - " ...\n", - " [-4.57659864e+00 -5.42679739e+00 -4.35204458e+00 ... -1.82452416e+00\n", - " -2.18670201e+00 -3.91811800e+00]\n", - " [-1.32477629e+00 -4.19110394e+00 -3.41308069e+00 ... 1.39622003e-01\n", - " -1.59393203e+00 -9.08105671e-01]\n", - " [-3.60161018e+00 -4.05932713e+00 -2.23674798e+00 ... 9.09647286e-01\n", - " 9.73127842e-01 1.19991803e+00]]\n", - "\n", - " [[ 2.04062796e+00 7.95603275e-01 -1.28833270e+00 ... 4.64749050e+00\n", - " 2.25974560e+00 1.02396965e+00]\n", - " [ 1.68882537e+00 2.63353348e+00 2.53597498e-02 ... 4.69063854e+00\n", - " -4.19382691e-01 2.91669458e-01]\n", - " [ 7.71395087e-01 1.20833695e+00 -2.58601785e-01 ... 1.21794045e+00\n", - " -1.51922226e-01 7.44265199e-01]\n", - " ...\n", - " [-6.66095781e+00 -4.81577682e+00 -5.39921665e+00 ... -2.20548606e+00\n", - " 5.72486281e-01 -4.35207397e-01]\n", - " [-7.51608658e+00 -6.67776871e+00 -3.73199415e+00 ... -1.70327055e+00\n", - " 1.01334639e-02 -3.20627165e+00]\n", - " [-5.73050356e+00 -2.74379373e+00 -3.70248461e+00 ... -1.09794116e+00\n", - " -1.73590891e-02 -1.80156028e+00]]]]\n", - "param grad: conv.conv_stack.0.conv.bias: shape: [32] stop_grad: False grad: [-1.4305115e-06 0.0000000e+00 -4.0531158e-06 -1.6689301e-06\n", - " 2.3841858e-07 -7.1525574e-07 1.1920929e-06 1.5497208e-06\n", - " -2.3841858e-07 1.6689301e-06 9.5367432e-07 9.5367432e-07\n", - " -2.6226044e-06 1.1920929e-06 1.3113022e-06 1.9669533e-06\n", - " -4.7683716e-07 1.1920929e-06 -1.6689301e-06 -1.5497208e-06\n", - " -2.2649765e-06 4.7683716e-07 2.3841858e-06 -3.5762787e-06\n", - " 2.3841858e-07 2.1457672e-06 -3.5762787e-07 8.3446503e-07\n", - " -3.5762787e-07 -7.1525574e-07 2.6524067e-06 -1.1920929e-06]\n", - "param grad: conv.conv_stack.0.bn.weight: shape: [32] stop_grad: False grad: [-3.7669735 1.5226867 1.759756 4.501629 -2.2077336 0.18411277\n", - " 1.3558264 -1.0269645 3.9628277 3.9300344 -2.80754 1.8462183\n", - " -0.03385968 2.1284049 0.46124816 -4.364863 0.78491163 0.25565645\n", - " -5.3538237 3.2606194 0.79100513 -1.4652673 2.769378 1.2283417\n", - " -4.7466464 -1.3404545 -6.9374166 0.710248 2.0944448 0.4334769\n", - " -0.24313992 0.31392363]\n", - "param grad: conv.conv_stack.0.bn.bias: shape: [32] stop_grad: False grad: [-0.6251638 2.833331 0.6993131 3.7106915 -2.262496 0.7390424\n", - " 0.5360477 -2.803875 2.1646228 2.117193 -1.9988279 1.5135905\n", - " -2.0181084 2.6450465 0.06302822 -3.0530102 1.4788482 0.5941844\n", - " -3.1690063 1.8753575 -0.0737313 -2.7806277 -0.04483938 0.16129279\n", - " -1.2960215 -0.38020235 -0.55218065 0.10754502 2.065371 -1.4703183\n", - " -0.40964937 -1.4454535 ]\n", - "param grad: conv.conv_stack.0.bn._mean: shape: [32] stop_grad: True grad: None\n", - "param grad: conv.conv_stack.0.bn._variance: shape: [32] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_fc.weight: shape: [1312, 1024] stop_grad: False grad: [[-0.46178514 0.1095643 0.06441769 ... 0.42020613 -0.34181893\n", - " -0.0658682 ]\n", - " [-0.03619978 0.21653323 0.01727325 ... 0.05731536 -0.37822944\n", - " -0.05464617]\n", - " [-0.32397318 0.04158126 -0.08091418 ... 0.0928297 -0.06518176\n", - " -0.40110156]\n", - " ...\n", - " [-0.2702023 0.05126935 0.11825457 ... 0.0069707 -0.36951366\n", - " 0.37071258]\n", - " [-0.11326203 0.19305304 -0.133317 ... -0.13030824 -0.09068564\n", - " 0.32735693]\n", - " [-0.04543798 0.09902512 -0.10745425 ... -0.06685166 -0.3055201\n", - " 0.0752247 ]]\n", - "param grad: rnn.rnn_stacks.0.fw_bn.weight: shape: [1024] stop_grad: False grad: [-0.07338604 0.64991236 0.5465856 ... 0.507725 0.14061031\n", - " 0.3020359 ]\n", - "param grad: rnn.rnn_stacks.0.fw_bn.bias: shape: [1024] stop_grad: False grad: [-0.41395143 -0.28493872 0.36796764 ... 0.2387953 0.06732331\n", - " 0.16263628]\n", - "param grad: rnn.rnn_stacks.0.fw_bn._mean: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_bn._variance: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.0.fw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: [[-0.09370177 -0.12264141 -0.08237482 ... -0.50241685 -0.149155\n", - " -0.25661892]\n", - " [-0.37426725 0.44987115 0.10685667 ... -0.65946174 -0.4499248\n", - " -0.17545304]\n", - " [-0.03753807 0.33422717 0.12750985 ... 0.05405155 -0.17648363\n", - " 0.05315325]\n", - " ...\n", - " [ 0.15721183 0.03064088 -0.00751081 ... 0.27183983 0.3881693\n", - " -0.01544908]\n", - " [ 0.26047793 0.16917065 0.00915196 ... 0.18076143 -0.05080506\n", - " 0.14791614]\n", - " [ 0.19052255 0.03642382 -0.14313167 ... 0.2611448 0.20763844\n", - " 0.26846847]]\n", - "param grad: rnn.rnn_stacks.0.fw_cell.bias_hh: shape: [1024] stop_grad: False grad: [-0.4139514 -0.28493875 0.36796758 ... 0.23879525 0.06732336\n", - " 0.16263627]\n", - "param grad: rnn.rnn_stacks.0.bw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.0.bw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_fc.weight: shape: [2048, 1024] stop_grad: False grad: [[ 0.04214853 -0.1710323 0.17557406 ... 0.11926915 0.21577051\n", - " -0.30598596]\n", - " [-0.02370887 -0.03498494 -0.05991999 ... -0.06049232 -0.14527473\n", - " -0.5335691 ]\n", - " [-0.21417995 -0.10263194 -0.05903128 ... -0.26958284 0.05936668\n", - " 0.25522667]\n", - " ...\n", - " [ 0.31594425 -0.29487017 0.15871571 ... 0.3504135 -0.1418606\n", - " -0.07482046]\n", - " [ 0.22316164 0.7682122 -0.22191924 ... -0.00535548 -0.6497105\n", - " -0.2011079 ]\n", - " [-0.05800886 0.13750821 0.02450509 ... 0.245736 0.07425706\n", - " -0.17761081]]\n", - "param grad: rnn.rnn_stacks.1.fw_bn.weight: shape: [1024] stop_grad: False grad: [-0.45080703 0.19005743 0.077441 ... -0.24504453 0.19666554\n", - " -0.10503208]\n", - "param grad: rnn.rnn_stacks.1.fw_bn.bias: shape: [1024] stop_grad: False grad: [-0.55867654 0.04237206 0.03389215 ... -0.35602498 0.25528812\n", - " 0.11344345]\n", - "param grad: rnn.rnn_stacks.1.fw_bn._mean: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_bn._variance: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.1.fw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: [[-0.48457903 0.04466334 -0.19785863 ... -0.0254025 -0.10338341\n", - " -0.29202533]\n", - " [-0.15261276 0.00412052 0.22198747 ... 0.22460426 -0.03752084\n", - " 0.05170784]\n", - " [-0.09337254 0.02530848 0.1263681 ... -0.02056236 0.33342454\n", - " -0.08760723]\n", - " ...\n", - " [-0.28645608 -0.19169135 -0.1361257 ... -0.00444204 -0.06552711\n", - " -0.14726155]\n", - " [ 0.21883707 0.2049045 0.23723911 ... 0.4626113 -0.14110637\n", - " 0.02569831]\n", - " [ 0.37554163 -0.19249167 0.14591683 ... 0.25602737 0.40088275\n", - " 0.41056633]]\n", - "param grad: rnn.rnn_stacks.1.fw_cell.bias_hh: shape: [1024] stop_grad: False grad: [-0.55867654 0.04237211 0.0338921 ... -0.35602498 0.2552881\n", - " 0.11344352]\n", - "param grad: rnn.rnn_stacks.1.bw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.1.bw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_fc.weight: shape: [2048, 1024] stop_grad: False grad: [[-0.28007814 -0.09206 -0.01297755 ... -0.2557205 -0.2693453\n", - " 0.05862035]\n", - " [-0.34194735 -0.01383794 -0.06490533 ... -0.11063005 0.16226721\n", - " -0.3197178 ]\n", - " [-0.3646778 0.15443833 0.02241019 ... -0.15093157 -0.09886418\n", - " -0.44295847]\n", - " ...\n", - " [-0.01041886 -0.57636976 -0.03988511 ... -0.2260822 0.49646813\n", - " -0.15528557]\n", - " [-0.19385241 -0.56451964 -0.05551083 ... -0.5638106 0.43611372\n", - " -0.61484563]\n", - " [ 0.1051331 -0.4762463 0.11194798 ... -0.26766616 -0.30734932\n", - " 0.17856634]]\n", - "param grad: rnn.rnn_stacks.2.fw_bn.weight: shape: [1024] stop_grad: False grad: [-0.02791309 -0.992517 0.63012564 ... -1.1830902 1.4646478\n", - " 1.6333911 ]\n", - "param grad: rnn.rnn_stacks.2.fw_bn.bias: shape: [1024] stop_grad: False grad: [-0.10834587 -1.7079136 0.81259465 ... -1.4478713 1.455745\n", - " 2.069446 ]\n", - "param grad: rnn.rnn_stacks.2.fw_bn._mean: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_bn._variance: shape: [1024] stop_grad: True grad: None\n", - "param grad: rnn.rnn_stacks.2.fw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: [[-0.14363798 -0.06933184 0.02901152 ... -0.19233373 -0.03206367\n", - " -0.00845779]\n", - " [-0.44314507 -0.8921327 -1.031872 ... -0.558997 -0.53070104\n", - " -0.855925 ]\n", - " [ 0.15673254 0.28793585 0.13351494 ... 0.38433537 0.5040767\n", - " 0.11303265]\n", - " ...\n", - " [-0.22923109 -0.62508404 -0.6195032 ... -0.6876448 -0.41718128\n", - " -0.74844164]\n", - " [ 0.18024652 0.45618314 0.81391454 ... 0.5780604 0.87566674\n", - " 0.71526295]\n", - " [ 0.3763076 0.54033077 0.9940485 ... 1.087821 0.72288674\n", - " 1.2852117 ]]\n", - "param grad: rnn.rnn_stacks.2.fw_cell.bias_hh: shape: [1024] stop_grad: False grad: [-0.10834593 -1.7079139 0.8125948 ... -1.4478711 1.4557447\n", - " 2.0694466 ]\n", - "param grad: rnn.rnn_stacks.2.bw_cell.weight_hh: shape: [1024, 1024] stop_grad: False grad: None\n", - "param grad: rnn.rnn_stacks.2.bw_cell.bias_hh: shape: [1024] stop_grad: False grad: None\n", - "param grad: fc.weight: shape: [2048, 4299] stop_grad: False grad: [[ 1.4382483e-02 2.0160766e-02 1.2322801e-02 ... 1.0075266e-02\n", - " 7.4421698e-03 -2.3925617e+01]\n", - " [ 3.7887424e-02 5.7105277e-02 2.8803380e-02 ... 2.4820438e-02\n", - " 1.8560058e-02 -5.0687141e+01]\n", - " [ 4.5566272e-02 5.4415584e-02 3.2858539e-02 ... 3.2725763e-02\n", - " 2.1536341e-02 -6.1036335e+01]\n", - " ...\n", - " [ 2.8015019e-02 3.5967816e-02 2.3228688e-02 ... 2.1284629e-02\n", - " 1.3860047e-02 -5.2543671e+01]\n", - " [ 2.8445240e-02 4.2448867e-02 2.7125146e-02 ... 2.2253662e-02\n", - " 1.7470375e-02 -4.3619675e+01]\n", - " [ 4.7438074e-02 5.8287360e-02 3.4546286e-02 ... 3.0827176e-02\n", - " 2.2168703e-02 -6.7901680e+01]]\n", - "param grad: fc.bias: shape: [4299] stop_grad: False grad: [ 8.8967547e-02 1.0697905e-01 6.5251388e-02 ... 6.1503030e-02\n", - " 4.3404289e-02 -1.3512518e+02]\n" - ] - } - ], - "source": [ - "loss.backward(retain_graph=False)\n", - "for n, p in dp_model.named_parameters():\n", - " print(\n", - " f\"param grad: {n}: shape: {p.shape} stop_grad: {p.stop_gradient} grad: {p.grad}\")" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "id": "selected-crazy", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[1.]\n" - ] - } - ], - "source": [ - "print(loss.grad)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bottom-engineer", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "stuffed-yeast", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} \ No newline at end of file diff --git a/.notebook/u2_confermer_model_wenet.ipynb b/.notebook/u2_confermer_model_wenet.ipynb deleted file mode 100644 index 4f2c9632f..000000000 --- a/.notebook/u2_confermer_model_wenet.ipynb +++ /dev/null @@ -1,4608 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "choice-grade", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x\n" - ] - }, - { - "data": { - "text/plain": [ - "'/workspace/DeepSpeech-2.x'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd ..\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "broke-broad", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " def convert_to_list(value, n, name, dtype=np.int):\n", - "register user softmax to paddle, remove this when fixed!\n", - "register user log_softmax to paddle, remove this when fixed!\n", - "register user sigmoid to paddle, remove this when fixed!\n", - "register user log_sigmoid to paddle, remove this when fixed!\n", - "register user relu to paddle, remove this when fixed!\n", - "override cat of paddle if exists or register, remove this when fixed!\n", - "override item of paddle.Tensor if exists or register, remove this when fixed!\n", - "override long of paddle.Tensor if exists or register, remove this when fixed!\n", - "override new_full of paddle.Tensor if exists or register, remove this when fixed!\n", - "override eq of paddle.Tensor if exists or register, remove this when fixed!\n", - "override eq of paddle if exists or register, remove this when fixed!\n", - "override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", - "override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", - "register user view to paddle.Tensor, remove this when fixed!\n", - "register user view_as to paddle.Tensor, remove this when fixed!\n", - "register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "register user fill_ to paddle.Tensor, remove this when fixed!\n", - "register user repeat to paddle.Tensor, remove this when fixed!\n", - "register user softmax to paddle.Tensor, remove this when fixed!\n", - "register user sigmoid to paddle.Tensor, remove this when fixed!\n", - "register user relu to paddle.Tensor, remove this when fixed!\n", - "register user type_as to paddle.Tensor, remove this when fixed!\n", - "register user to to paddle.Tensor, remove this when fixed!\n", - "register user float to paddle.Tensor, remove this when fixed!\n", - "register user tolist to paddle.Tensor, remove this when fixed!\n", - "register user glu to paddle.nn.functional, remove this when fixed!\n", - "override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", - "register user Module to paddle.nn, remove this when fixed!\n", - "register user ModuleList to paddle.nn, remove this when fixed!\n", - "register user GLU to paddle.nn, remove this when fixed!\n", - "register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "register user export to paddle.jit, remove this when fixed!\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import paddle\n", - "from yacs.config import CfgNode as CN\n", - "\n", - "from deepspeech.models.u2 import U2Model\n", - "from deepspeech.utils.layer_tools import print_params\n", - "from deepspeech.utils.layer_tools import summary" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "permanent-summary", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n", - "[INFO 2021/04/20 03:32:21 u2.py:834] U2 Encoder type: conformer\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "encoder.embed.conv.0.weight | [256, 1, 3, 3] | 2304 | True\n", - "encoder.embed.conv.0.bias | [256] | 256 | True\n", - "encoder.embed.conv.2.weight | [256, 256, 3, 3] | 589824 | True\n", - "encoder.embed.conv.2.bias | [256] | 256 | True\n", - "encoder.embed.out.0.weight | [4864, 256] | 1245184 | True\n", - "encoder.embed.out.0.bias | [256] | 256 | True\n", - "encoder.after_norm.weight | [256] | 256 | True\n", - "encoder.after_norm.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.0.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.0.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.0.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.0.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.0.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.0.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.0.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.0.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.0.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.0.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.0.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.0.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.0.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.1.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.1.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.1.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.1.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.1.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.1.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.1.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.1.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.1.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.1.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.1.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.1.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.1.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.2.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.2.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.2.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.2.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.2.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.2.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.2.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.2.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.2.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.2.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.2.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.2.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.2.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.3.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.3.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.3.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.3.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.3.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.3.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.3.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.3.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.3.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.3.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.3.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.3.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.3.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.4.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.4.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.4.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.4.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.4.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.4.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.4.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.4.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.4.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.4.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.4.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.4.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.4.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.5.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.5.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.5.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.5.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.5.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.5.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.5.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.5.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.5.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.5.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.5.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.5.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.5.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.6.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.6.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.6.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.6.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.6.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.6.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.6.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.6.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.6.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.6.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.6.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.6.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.6.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.7.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.7.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.7.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.7.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.7.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.7.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.7.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.7.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.7.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.7.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.7.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.7.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.7.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.8.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.8.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.8.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.8.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.8.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.8.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.8.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.8.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.8.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.8.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.8.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.8.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.8.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.9.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.9.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.9.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.9.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.9.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.9.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.9.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.9.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.9.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.9.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.9.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.9.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.9.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.10.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.10.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.10.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.10.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.10.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.10.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.10.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.10.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.10.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.10.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.10.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.10.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.10.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.pos_bias_u | [4, 64] | 256 | True\n", - "encoder.encoders.11.self_attn.pos_bias_v | [4, 64] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_pos.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.11.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.11.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.11.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv1.bias | [512] | 512 | True\n", - "encoder.encoders.11.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840 | True\n", - "encoder.encoders.11.conv_module.depthwise_conv.bias | [256] | 256 | True\n", - "encoder.encoders.11.conv_module.norm.weight | [256] | 256 | True\n", - "encoder.encoders.11.conv_module.norm.bias | [256] | 256 | True\n", - "encoder.encoders.11.conv_module.norm._mean | [256] | 256 | False\n", - "encoder.encoders.11.conv_module.norm._variance | [256] | 256 | False\n", - "encoder.encoders.11.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv2.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm_ff.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm_ff.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm_mha.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm_mha.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm_ff_macaron.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm_ff_macaron.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm_conv.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm_conv.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm_final.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm_final.bias | [256] | 256 | True\n", - "encoder.encoders.11.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.11.concat_linear.bias | [256] | 256 | True\n", - "decoder.embed.0.weight | [4233, 256] | 1083648 | True\n", - "decoder.after_norm.weight | [256] | 256 | True\n", - "decoder.after_norm.bias | [256] | 256 | True\n", - "decoder.output_layer.weight | [256, 4233] | 1083648 | True\n", - "decoder.output_layer.bias | [4233] | 4233 | True\n", - "decoder.decoders.0.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.0.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.0.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.0.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.0.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.0.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.0.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.0.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.0.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.0.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.0.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.0.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.0.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.0.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.0.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.0.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.0.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.0.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.1.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.1.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.1.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.1.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.1.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.1.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.1.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.1.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.1.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.1.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.1.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.1.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.1.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.1.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.2.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.2.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.2.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.2.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.2.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.2.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.2.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.2.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.2.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.2.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.2.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.2.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.2.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.2.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.3.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.3.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.3.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.3.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.3.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.3.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.3.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.3.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.3.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.3.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.3.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.3.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.3.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.3.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.4.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.4.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.4.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.4.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.4.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.4.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.4.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.4.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.4.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.4.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.4.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.4.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.4.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.4.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.5.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.5.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.5.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.5.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.5.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.5.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.5.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.5.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.5.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.5.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.5.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.5.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.5.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.5.concat_linear2.bias | [256] | 256 | True\n", - "ctc.ctc_lo.weight | [256, 4233] | 1083648 | True\n", - "ctc.ctc_lo.bias | [4233] | 4233 | True\n", - "Total parameters: 687.0, 49355282.0 elements.\n" - ] - } - ], - "source": [ - "conf_str='examples/aishell/s1/conf/conformer.yaml'\n", - "cfg = CN().load_cfg(open(conf_str))\n", - "cfg.model.input_dim = 80\n", - "cfg.model.output_dim = 4233\n", - "cfg.model.cmvn_file = \"/workspace/wenet/examples/aishell/s0/raw_wav/train/global_cmvn\"\n", - "cfg.model.cmvn_file_type = 'json'\n", - "cfg.freeze()\n", - "\n", - "model = U2Model(cfg.model)\n", - "print_params(model)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "sapphire-agent", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "encoder.global_cmvn.mean | [80] | 80\n", - "encoder.global_cmvn.istd | [80] | 80\n", - "encoder.embed.conv.0.weight | [256, 1, 3, 3] | 2304\n", - "encoder.embed.conv.0.bias | [256] | 256\n", - "encoder.embed.conv.2.weight | [256, 256, 3, 3] | 589824\n", - "encoder.embed.conv.2.bias | [256] | 256\n", - "encoder.embed.out.0.weight | [4864, 256] | 1245184\n", - "encoder.embed.out.0.bias | [256] | 256\n", - "encoder.after_norm.weight | [256] | 256\n", - "encoder.after_norm.bias | [256] | 256\n", - "encoder.encoders.0.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.0.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.0.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.0.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.0.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.0.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.0.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.0.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.0.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.0.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.0.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.0.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.0.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.0.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.0.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.0.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.0.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.0.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.0.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.0.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.0.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.0.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.0.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.0.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.0.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.0.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.0.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.0.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.0.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.0.norm_ff.weight | [256] | 256\n", - "encoder.encoders.0.norm_ff.bias | [256] | 256\n", - "encoder.encoders.0.norm_mha.weight | [256] | 256\n", - "encoder.encoders.0.norm_mha.bias | [256] | 256\n", - "encoder.encoders.0.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.0.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.0.norm_conv.weight | [256] | 256\n", - "encoder.encoders.0.norm_conv.bias | [256] | 256\n", - "encoder.encoders.0.norm_final.weight | [256] | 256\n", - "encoder.encoders.0.norm_final.bias | [256] | 256\n", - "encoder.encoders.0.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.0.concat_linear.bias | [256] | 256\n", - "encoder.encoders.1.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.1.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.1.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.1.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.1.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.1.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.1.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.1.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.1.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.1.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.1.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.1.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.1.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.1.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.1.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.1.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.1.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.1.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.1.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.1.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.1.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.1.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.1.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.1.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.1.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.1.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.1.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.1.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.1.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.1.norm_ff.weight | [256] | 256\n", - "encoder.encoders.1.norm_ff.bias | [256] | 256\n", - "encoder.encoders.1.norm_mha.weight | [256] | 256\n", - "encoder.encoders.1.norm_mha.bias | [256] | 256\n", - "encoder.encoders.1.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.1.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.1.norm_conv.weight | [256] | 256\n", - "encoder.encoders.1.norm_conv.bias | [256] | 256\n", - "encoder.encoders.1.norm_final.weight | [256] | 256\n", - "encoder.encoders.1.norm_final.bias | [256] | 256\n", - "encoder.encoders.1.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.1.concat_linear.bias | [256] | 256\n", - "encoder.encoders.2.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.2.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.2.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.2.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.2.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.2.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.2.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.2.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.2.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.2.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.2.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.2.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.2.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.2.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.2.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.2.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.2.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.2.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.2.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.2.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.2.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.2.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.2.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.2.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.2.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.2.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.2.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.2.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.2.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.2.norm_ff.weight | [256] | 256\n", - "encoder.encoders.2.norm_ff.bias | [256] | 256\n", - "encoder.encoders.2.norm_mha.weight | [256] | 256\n", - "encoder.encoders.2.norm_mha.bias | [256] | 256\n", - "encoder.encoders.2.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.2.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.2.norm_conv.weight | [256] | 256\n", - "encoder.encoders.2.norm_conv.bias | [256] | 256\n", - "encoder.encoders.2.norm_final.weight | [256] | 256\n", - "encoder.encoders.2.norm_final.bias | [256] | 256\n", - "encoder.encoders.2.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.2.concat_linear.bias | [256] | 256\n", - "encoder.encoders.3.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.3.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.3.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.3.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.3.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.3.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.3.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.3.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.3.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.3.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.3.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.3.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.3.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.3.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.3.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.3.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.3.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.3.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.3.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.3.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.3.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.3.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.3.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.3.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.3.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.3.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.3.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.3.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.3.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.3.norm_ff.weight | [256] | 256\n", - "encoder.encoders.3.norm_ff.bias | [256] | 256\n", - "encoder.encoders.3.norm_mha.weight | [256] | 256\n", - "encoder.encoders.3.norm_mha.bias | [256] | 256\n", - "encoder.encoders.3.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.3.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.3.norm_conv.weight | [256] | 256\n", - "encoder.encoders.3.norm_conv.bias | [256] | 256\n", - "encoder.encoders.3.norm_final.weight | [256] | 256\n", - "encoder.encoders.3.norm_final.bias | [256] | 256\n", - "encoder.encoders.3.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.3.concat_linear.bias | [256] | 256\n", - "encoder.encoders.4.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.4.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.4.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.4.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.4.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.4.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.4.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.4.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.4.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.4.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.4.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.4.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.4.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.4.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.4.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.4.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.4.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.4.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.4.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.4.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.4.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.4.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.4.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.4.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.4.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.4.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.4.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.4.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.4.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.4.norm_ff.weight | [256] | 256\n", - "encoder.encoders.4.norm_ff.bias | [256] | 256\n", - "encoder.encoders.4.norm_mha.weight | [256] | 256\n", - "encoder.encoders.4.norm_mha.bias | [256] | 256\n", - "encoder.encoders.4.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.4.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.4.norm_conv.weight | [256] | 256\n", - "encoder.encoders.4.norm_conv.bias | [256] | 256\n", - "encoder.encoders.4.norm_final.weight | [256] | 256\n", - "encoder.encoders.4.norm_final.bias | [256] | 256\n", - "encoder.encoders.4.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.4.concat_linear.bias | [256] | 256\n", - "encoder.encoders.5.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.5.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.5.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.5.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.5.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.5.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.5.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.5.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.5.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.5.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.5.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.5.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.5.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.5.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.5.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.5.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.5.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.5.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.5.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.5.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.5.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.5.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.5.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.5.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.5.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.5.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.5.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.5.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.5.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.5.norm_ff.weight | [256] | 256\n", - "encoder.encoders.5.norm_ff.bias | [256] | 256\n", - "encoder.encoders.5.norm_mha.weight | [256] | 256\n", - "encoder.encoders.5.norm_mha.bias | [256] | 256\n", - "encoder.encoders.5.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.5.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.5.norm_conv.weight | [256] | 256\n", - "encoder.encoders.5.norm_conv.bias | [256] | 256\n", - "encoder.encoders.5.norm_final.weight | [256] | 256\n", - "encoder.encoders.5.norm_final.bias | [256] | 256\n", - "encoder.encoders.5.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.5.concat_linear.bias | [256] | 256\n", - "encoder.encoders.6.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.6.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.6.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.6.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.6.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.6.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.6.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.6.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.6.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.6.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.6.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.6.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.6.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.6.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.6.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.6.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.6.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.6.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.6.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.6.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.6.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.6.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.6.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.6.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.6.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.6.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.6.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.6.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.6.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.6.norm_ff.weight | [256] | 256\n", - "encoder.encoders.6.norm_ff.bias | [256] | 256\n", - "encoder.encoders.6.norm_mha.weight | [256] | 256\n", - "encoder.encoders.6.norm_mha.bias | [256] | 256\n", - "encoder.encoders.6.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.6.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.6.norm_conv.weight | [256] | 256\n", - "encoder.encoders.6.norm_conv.bias | [256] | 256\n", - "encoder.encoders.6.norm_final.weight | [256] | 256\n", - "encoder.encoders.6.norm_final.bias | [256] | 256\n", - "encoder.encoders.6.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.6.concat_linear.bias | [256] | 256\n", - "encoder.encoders.7.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.7.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.7.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.7.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.7.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.7.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.7.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.7.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.7.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.7.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.7.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.7.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.7.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.7.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.7.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.7.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.7.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.7.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.7.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.7.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.7.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.7.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.7.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.7.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.7.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.7.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.7.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.7.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.7.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.7.norm_ff.weight | [256] | 256\n", - "encoder.encoders.7.norm_ff.bias | [256] | 256\n", - "encoder.encoders.7.norm_mha.weight | [256] | 256\n", - "encoder.encoders.7.norm_mha.bias | [256] | 256\n", - "encoder.encoders.7.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.7.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.7.norm_conv.weight | [256] | 256\n", - "encoder.encoders.7.norm_conv.bias | [256] | 256\n", - "encoder.encoders.7.norm_final.weight | [256] | 256\n", - "encoder.encoders.7.norm_final.bias | [256] | 256\n", - "encoder.encoders.7.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.7.concat_linear.bias | [256] | 256\n", - "encoder.encoders.8.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.8.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.8.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.8.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.8.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.8.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.8.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.8.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.8.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.8.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.8.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.8.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.8.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.8.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.8.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.8.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.8.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.8.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.8.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.8.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.8.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.8.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.8.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.8.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.8.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.8.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.8.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.8.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.8.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.8.norm_ff.weight | [256] | 256\n", - "encoder.encoders.8.norm_ff.bias | [256] | 256\n", - "encoder.encoders.8.norm_mha.weight | [256] | 256\n", - "encoder.encoders.8.norm_mha.bias | [256] | 256\n", - "encoder.encoders.8.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.8.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.8.norm_conv.weight | [256] | 256\n", - "encoder.encoders.8.norm_conv.bias | [256] | 256\n", - "encoder.encoders.8.norm_final.weight | [256] | 256\n", - "encoder.encoders.8.norm_final.bias | [256] | 256\n", - "encoder.encoders.8.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.8.concat_linear.bias | [256] | 256\n", - "encoder.encoders.9.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.9.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.9.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.9.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.9.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.9.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.9.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.9.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.9.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.9.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.9.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.9.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.9.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.9.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.9.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.9.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.9.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.9.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.9.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.9.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.9.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.9.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.9.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.9.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.9.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.9.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.9.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.9.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.9.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.9.norm_ff.weight | [256] | 256\n", - "encoder.encoders.9.norm_ff.bias | [256] | 256\n", - "encoder.encoders.9.norm_mha.weight | [256] | 256\n", - "encoder.encoders.9.norm_mha.bias | [256] | 256\n", - "encoder.encoders.9.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.9.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.9.norm_conv.weight | [256] | 256\n", - "encoder.encoders.9.norm_conv.bias | [256] | 256\n", - "encoder.encoders.9.norm_final.weight | [256] | 256\n", - "encoder.encoders.9.norm_final.bias | [256] | 256\n", - "encoder.encoders.9.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.9.concat_linear.bias | [256] | 256\n", - "encoder.encoders.10.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.10.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.10.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.10.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.10.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.10.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.10.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.10.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.10.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.10.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.10.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.10.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.10.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.10.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.10.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.10.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.10.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.10.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.10.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.10.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.10.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.10.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.10.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.10.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.10.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.10.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.10.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.10.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.10.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.10.norm_ff.weight | [256] | 256\n", - "encoder.encoders.10.norm_ff.bias | [256] | 256\n", - "encoder.encoders.10.norm_mha.weight | [256] | 256\n", - "encoder.encoders.10.norm_mha.bias | [256] | 256\n", - "encoder.encoders.10.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.10.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.10.norm_conv.weight | [256] | 256\n", - "encoder.encoders.10.norm_conv.bias | [256] | 256\n", - "encoder.encoders.10.norm_final.weight | [256] | 256\n", - "encoder.encoders.10.norm_final.bias | [256] | 256\n", - "encoder.encoders.10.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.10.concat_linear.bias | [256] | 256\n", - "encoder.encoders.11.self_attn.pos_bias_u | [4, 64] | 256\n", - "encoder.encoders.11.self_attn.pos_bias_v | [4, 64] | 256\n", - "encoder.encoders.11.self_attn.linear_q.weight | [256, 256] | 65536\n", - "encoder.encoders.11.self_attn.linear_q.bias | [256] | 256\n", - "encoder.encoders.11.self_attn.linear_k.weight | [256, 256] | 65536\n", - "encoder.encoders.11.self_attn.linear_k.bias | [256] | 256\n", - "encoder.encoders.11.self_attn.linear_v.weight | [256, 256] | 65536\n", - "encoder.encoders.11.self_attn.linear_v.bias | [256] | 256\n", - "encoder.encoders.11.self_attn.linear_out.weight | [256, 256] | 65536\n", - "encoder.encoders.11.self_attn.linear_out.bias | [256] | 256\n", - "encoder.encoders.11.self_attn.linear_pos.weight | [256, 256] | 65536\n", - "encoder.encoders.11.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.11.feed_forward.w_1.bias | [2048] | 2048\n", - "encoder.encoders.11.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.11.feed_forward.w_2.bias | [256] | 256\n", - "encoder.encoders.11.feed_forward_macaron.w_1.weight | [256, 2048] | 524288\n", - "encoder.encoders.11.feed_forward_macaron.w_1.bias | [2048] | 2048\n", - "encoder.encoders.11.feed_forward_macaron.w_2.weight | [2048, 256] | 524288\n", - "encoder.encoders.11.feed_forward_macaron.w_2.bias | [256] | 256\n", - "encoder.encoders.11.conv_module.pointwise_conv1.weight | [512, 256, 1] | 131072\n", - "encoder.encoders.11.conv_module.pointwise_conv1.bias | [512] | 512\n", - "encoder.encoders.11.conv_module.depthwise_conv.weight | [256, 1, 15] | 3840\n", - "encoder.encoders.11.conv_module.depthwise_conv.bias | [256] | 256\n", - "encoder.encoders.11.conv_module.norm.weight | [256] | 256\n", - "encoder.encoders.11.conv_module.norm.bias | [256] | 256\n", - "encoder.encoders.11.conv_module.norm._mean | [256] | 256\n", - "encoder.encoders.11.conv_module.norm._variance | [256] | 256\n", - "encoder.encoders.11.conv_module.pointwise_conv2.weight | [256, 256, 1] | 65536\n", - "encoder.encoders.11.conv_module.pointwise_conv2.bias | [256] | 256\n", - "encoder.encoders.11.norm_ff.weight | [256] | 256\n", - "encoder.encoders.11.norm_ff.bias | [256] | 256\n", - "encoder.encoders.11.norm_mha.weight | [256] | 256\n", - "encoder.encoders.11.norm_mha.bias | [256] | 256\n", - "encoder.encoders.11.norm_ff_macaron.weight | [256] | 256\n", - "encoder.encoders.11.norm_ff_macaron.bias | [256] | 256\n", - "encoder.encoders.11.norm_conv.weight | [256] | 256\n", - "encoder.encoders.11.norm_conv.bias | [256] | 256\n", - "encoder.encoders.11.norm_final.weight | [256] | 256\n", - "encoder.encoders.11.norm_final.bias | [256] | 256\n", - "encoder.encoders.11.concat_linear.weight | [512, 256] | 131072\n", - "encoder.encoders.11.concat_linear.bias | [256] | 256\n", - "decoder.embed.0.weight | [4233, 256] | 1083648\n", - "decoder.after_norm.weight | [256] | 256\n", - "decoder.after_norm.bias | [256] | 256\n", - "decoder.output_layer.weight | [256, 4233] | 1083648\n", - "decoder.output_layer.bias | [4233] | 4233\n", - "decoder.decoders.0.self_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.0.self_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.0.self_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.0.self_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.0.self_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.0.self_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.0.self_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.0.self_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.0.src_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.0.src_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.0.src_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.0.src_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.0.src_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.0.src_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.0.src_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.0.src_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.0.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "decoder.decoders.0.feed_forward.w_1.bias | [2048] | 2048\n", - "decoder.decoders.0.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "decoder.decoders.0.feed_forward.w_2.bias | [256] | 256\n", - "decoder.decoders.0.norm1.weight | [256] | 256\n", - "decoder.decoders.0.norm1.bias | [256] | 256\n", - "decoder.decoders.0.norm2.weight | [256] | 256\n", - "decoder.decoders.0.norm2.bias | [256] | 256\n", - "decoder.decoders.0.norm3.weight | [256] | 256\n", - "decoder.decoders.0.norm3.bias | [256] | 256\n", - "decoder.decoders.0.concat_linear1.weight | [512, 256] | 131072\n", - "decoder.decoders.0.concat_linear1.bias | [256] | 256\n", - "decoder.decoders.0.concat_linear2.weight | [512, 256] | 131072\n", - "decoder.decoders.0.concat_linear2.bias | [256] | 256\n", - "decoder.decoders.1.self_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.1.self_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.1.self_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.1.self_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.1.self_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.1.self_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.1.self_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.1.self_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.1.src_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.1.src_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.1.src_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.1.src_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.1.src_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.1.src_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.1.src_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.1.src_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.1.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "decoder.decoders.1.feed_forward.w_1.bias | [2048] | 2048\n", - "decoder.decoders.1.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "decoder.decoders.1.feed_forward.w_2.bias | [256] | 256\n", - "decoder.decoders.1.norm1.weight | [256] | 256\n", - "decoder.decoders.1.norm1.bias | [256] | 256\n", - "decoder.decoders.1.norm2.weight | [256] | 256\n", - "decoder.decoders.1.norm2.bias | [256] | 256\n", - "decoder.decoders.1.norm3.weight | [256] | 256\n", - "decoder.decoders.1.norm3.bias | [256] | 256\n", - "decoder.decoders.1.concat_linear1.weight | [512, 256] | 131072\n", - "decoder.decoders.1.concat_linear1.bias | [256] | 256\n", - "decoder.decoders.1.concat_linear2.weight | [512, 256] | 131072\n", - "decoder.decoders.1.concat_linear2.bias | [256] | 256\n", - "decoder.decoders.2.self_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.2.self_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.2.self_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.2.self_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.2.self_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.2.self_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.2.self_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.2.self_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.2.src_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.2.src_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.2.src_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.2.src_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.2.src_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.2.src_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.2.src_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.2.src_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.2.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "decoder.decoders.2.feed_forward.w_1.bias | [2048] | 2048\n", - "decoder.decoders.2.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "decoder.decoders.2.feed_forward.w_2.bias | [256] | 256\n", - "decoder.decoders.2.norm1.weight | [256] | 256\n", - "decoder.decoders.2.norm1.bias | [256] | 256\n", - "decoder.decoders.2.norm2.weight | [256] | 256\n", - "decoder.decoders.2.norm2.bias | [256] | 256\n", - "decoder.decoders.2.norm3.weight | [256] | 256\n", - "decoder.decoders.2.norm3.bias | [256] | 256\n", - "decoder.decoders.2.concat_linear1.weight | [512, 256] | 131072\n", - "decoder.decoders.2.concat_linear1.bias | [256] | 256\n", - "decoder.decoders.2.concat_linear2.weight | [512, 256] | 131072\n", - "decoder.decoders.2.concat_linear2.bias | [256] | 256\n", - "decoder.decoders.3.self_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.3.self_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.3.self_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.3.self_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.3.self_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.3.self_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.3.self_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.3.self_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.3.src_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.3.src_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.3.src_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.3.src_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.3.src_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.3.src_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.3.src_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.3.src_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.3.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "decoder.decoders.3.feed_forward.w_1.bias | [2048] | 2048\n", - "decoder.decoders.3.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "decoder.decoders.3.feed_forward.w_2.bias | [256] | 256\n", - "decoder.decoders.3.norm1.weight | [256] | 256\n", - "decoder.decoders.3.norm1.bias | [256] | 256\n", - "decoder.decoders.3.norm2.weight | [256] | 256\n", - "decoder.decoders.3.norm2.bias | [256] | 256\n", - "decoder.decoders.3.norm3.weight | [256] | 256\n", - "decoder.decoders.3.norm3.bias | [256] | 256\n", - "decoder.decoders.3.concat_linear1.weight | [512, 256] | 131072\n", - "decoder.decoders.3.concat_linear1.bias | [256] | 256\n", - "decoder.decoders.3.concat_linear2.weight | [512, 256] | 131072\n", - "decoder.decoders.3.concat_linear2.bias | [256] | 256\n", - "decoder.decoders.4.self_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.4.self_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.4.self_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.4.self_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.4.self_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.4.self_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.4.self_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.4.self_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.4.src_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.4.src_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.4.src_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.4.src_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.4.src_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.4.src_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.4.src_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.4.src_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.4.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "decoder.decoders.4.feed_forward.w_1.bias | [2048] | 2048\n", - "decoder.decoders.4.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "decoder.decoders.4.feed_forward.w_2.bias | [256] | 256\n", - "decoder.decoders.4.norm1.weight | [256] | 256\n", - "decoder.decoders.4.norm1.bias | [256] | 256\n", - "decoder.decoders.4.norm2.weight | [256] | 256\n", - "decoder.decoders.4.norm2.bias | [256] | 256\n", - "decoder.decoders.4.norm3.weight | [256] | 256\n", - "decoder.decoders.4.norm3.bias | [256] | 256\n", - "decoder.decoders.4.concat_linear1.weight | [512, 256] | 131072\n", - "decoder.decoders.4.concat_linear1.bias | [256] | 256\n", - "decoder.decoders.4.concat_linear2.weight | [512, 256] | 131072\n", - "decoder.decoders.4.concat_linear2.bias | [256] | 256\n", - "decoder.decoders.5.self_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.5.self_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.5.self_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.5.self_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.5.self_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.5.self_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.5.self_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.5.self_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.5.src_attn.linear_q.weight | [256, 256] | 65536\n", - "decoder.decoders.5.src_attn.linear_q.bias | [256] | 256\n", - "decoder.decoders.5.src_attn.linear_k.weight | [256, 256] | 65536\n", - "decoder.decoders.5.src_attn.linear_k.bias | [256] | 256\n", - "decoder.decoders.5.src_attn.linear_v.weight | [256, 256] | 65536\n", - "decoder.decoders.5.src_attn.linear_v.bias | [256] | 256\n", - "decoder.decoders.5.src_attn.linear_out.weight | [256, 256] | 65536\n", - "decoder.decoders.5.src_attn.linear_out.bias | [256] | 256\n", - "decoder.decoders.5.feed_forward.w_1.weight | [256, 2048] | 524288\n", - "decoder.decoders.5.feed_forward.w_1.bias | [2048] | 2048\n", - "decoder.decoders.5.feed_forward.w_2.weight | [2048, 256] | 524288\n", - "decoder.decoders.5.feed_forward.w_2.bias | [256] | 256\n", - "decoder.decoders.5.norm1.weight | [256] | 256\n", - "decoder.decoders.5.norm1.bias | [256] | 256\n", - "decoder.decoders.5.norm2.weight | [256] | 256\n", - "decoder.decoders.5.norm2.bias | [256] | 256\n", - "decoder.decoders.5.norm3.weight | [256] | 256\n", - "decoder.decoders.5.norm3.bias | [256] | 256\n", - "decoder.decoders.5.concat_linear1.weight | [512, 256] | 131072\n", - "decoder.decoders.5.concat_linear1.bias | [256] | 256\n", - "decoder.decoders.5.concat_linear2.weight | [512, 256] | 131072\n", - "decoder.decoders.5.concat_linear2.bias | [256] | 256\n", - "ctc.ctc_lo.weight | [256, 4233] | 1083648\n", - "ctc.ctc_lo.bias | [4233] | 4233\n", - "Total parameters: 689, 49355442 elements.\n" - ] - } - ], - "source": [ - "summary(model)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "ruled-invitation", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "U2Model(\n", - " (encoder): ConformerEncoder(\n", - " (global_cmvn): GlobalCMVN()\n", - " (embed): Conv2dSubsampling4(\n", - " (pos_enc): RelPositionalEncoding(\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " )\n", - " (conv): Sequential(\n", - " (0): Conv2D(1, 256, kernel_size=[3, 3], stride=[2, 2], data_format=NCHW)\n", - " (1): ReLU()\n", - " (2): Conv2D(256, 256, kernel_size=[3, 3], stride=[2, 2], data_format=NCHW)\n", - " (3): ReLU()\n", - " )\n", - " (out): Sequential(\n", - " (0): Linear(in_features=4864, out_features=256, dtype=float32)\n", - " )\n", - " )\n", - " (after_norm): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (encoders): LayerList(\n", - " (0): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (1): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (2): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (3): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (4): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (5): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (6): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (7): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (8): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (9): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (10): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (11): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " (linear_pos): Linear(in_features=256, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1D(256, 512, kernel_size=[1], data_format=NCL)\n", - " (depthwise_conv): Conv1D(256, 256, kernel_size=[15], padding=7, groups=256, data_format=NCL)\n", - " (norm): BatchNorm1D(num_features=256, momentum=0.9, epsilon=1e-05)\n", - " (pointwise_conv2): Conv1D(256, 256, kernel_size=[1], data_format=NCL)\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_mha): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_ff_macaron): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_conv): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm_final): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " )\n", - " )\n", - " (decoder): TransformerDecoder(\n", - " (embed): Sequential(\n", - " (0): Embedding(4233, 256, sparse=False)\n", - " (1): PositionalEncoding(\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " )\n", - " )\n", - " (after_norm): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (output_layer): Linear(in_features=256, out_features=4233, dtype=float32)\n", - " (decoders): LayerList(\n", - " (0): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (norm1): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm2): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm3): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, dtype=float32)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (1): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (norm1): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm2): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm3): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, dtype=float32)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (2): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (norm1): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm2): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm3): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, dtype=float32)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (3): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (norm1): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm2): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm3): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, dtype=float32)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (4): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (norm1): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm2): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm3): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, dtype=float32)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " (5): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_k): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_v): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (linear_out): Linear(in_features=256, out_features=256, dtype=float32)\n", - " (dropout): Dropout(p=0.0, axis=None, mode=upscale_in_train)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, dtype=float32)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (w_2): Linear(in_features=2048, out_features=256, dtype=float32)\n", - " )\n", - " (norm1): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm2): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (norm3): LayerNorm(normalized_shape=[256], epsilon=1e-12)\n", - " (dropout): Dropout(p=0.1, axis=None, mode=upscale_in_train)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, dtype=float32)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, dtype=float32)\n", - " )\n", - " )\n", - " )\n", - " (ctc): CTCDecoder(\n", - " (ctc_lo): Linear(in_features=256, out_features=4233, dtype=float32)\n", - " (criterion): CTCLoss(\n", - " (loss): CTCLoss()\n", - " )\n", - " )\n", - " (criterion_att): LabelSmoothingLoss(\n", - " (criterion): KLDivLoss()\n", - " )\n", - ")\n" - ] - } - ], - "source": [ - "print(model)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "fossil-means", - "metadata": {}, - "outputs": [], - "source": [ - "# load feat" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "fleet-despite", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "compute_cmvn_loader_test.ipynb encoder.npz\r\n", - "dataloader.ipynb hack_api_test.ipynb\r\n", - "dataloader_with_tokens_tokenids.ipynb jit_infer.ipynb\r\n", - "data.npz layer_norm_test.ipynb\r\n", - "decoder.npz Linear_test.ipynb\r\n", - "enc_0_ff_out.npz mask_and_masked_fill_test.ipynb\r\n", - "enc_0_norm_ff.npz model.npz\r\n", - "enc_0.npz position_embeding_check.ipynb\r\n", - "enc_0_selattn_out.npz python_test.ipynb\r\n", - "enc_2.npz train_test.ipynb\r\n", - "enc_all.npz u2_model.ipynb\r\n", - "enc_embed.npz\r\n" - ] - } - ], - "source": [ - "%ls .notebook" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "abroad-oracle", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['BAC009S0739W0246' 'BAC009S0727W0424' 'BAC009S0753W0412'\n", - " 'BAC009S0756W0206' 'BAC009S0740W0414' 'BAC009S0728W0426'\n", - " 'BAC009S0739W0214' 'BAC009S0753W0423' 'BAC009S0734W0201'\n", - " 'BAC009S0740W0427' 'BAC009S0730W0423' 'BAC009S0728W0367'\n", - " 'BAC009S0730W0418' 'BAC009S0727W0157' 'BAC009S0749W0409'\n", - " 'BAC009S0727W0418']\n", - "(16, 207, 80)\n", - "[[[ 8.994624 9.538309 9.191589 ... 10.507416 9.563305 8.256403 ]\n", - " [ 9.798841 10.405224 9.26511 ... 10.251211 9.543982 8.873768 ]\n", - " [10.6890745 10.395469 8.053548 ... 9.906749 10.064903 8.050915 ]\n", - " ...\n", - " [ 9.217986 9.65069 8.505259 ... 9.687183 8.742463 7.9865475]\n", - " [10.129122 9.935194 9.37982 ... 9.563894 9.825992 8.979543 ]\n", - " [ 9.095531 7.1338377 9.468001 ... 9.472748 9.021235 7.447914 ]]\n", - "\n", - " [[11.430976 10.671858 6.0841026 ... 9.382682 8.729745 7.5315614]\n", - " [ 9.731717 7.8104815 7.5714607 ... 10.043035 9.243595 7.3540792]\n", - " [10.65017 10.600604 8.467784 ... 9.281448 9.186885 8.070343 ]\n", - " ...\n", - " [ 9.096987 9.2637 8.075275 ... 8.431845 8.370505 8.002926 ]\n", - " [10.461651 10.147784 6.7693496 ... 9.779426 9.577453 8.080652 ]\n", - " [ 7.794432 5.621059 7.9750648 ... 9.997245 9.849678 8.031287 ]]\n", - "\n", - " [[ 7.3455667 7.896357 7.5795946 ... 11.631024 10.451254 9.123633 ]\n", - " [ 8.628678 8.4630575 7.499242 ... 12.415986 10.975749 8.9425745]\n", - " [ 9.831394 10.2812805 8.97241 ... 12.1386795 10.40175 9.005517 ]\n", - " ...\n", - " [ 7.089641 7.405548 6.8142557 ... 9.325196 9.273162 8.353427 ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]]\n", - "\n", - " ...\n", - "\n", - " [[10.933237 10.464394 7.7202725 ... 10.348816 9.302338 7.1553144]\n", - " [10.449866 9.907033 9.029272 ... 9.952465 9.414051 7.559279 ]\n", - " [10.487655 9.81259 9.895244 ... 9.58662 9.341254 7.7849016]\n", - " ...\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]]\n", - "\n", - " [[ 9.944384 9.585867 8.220328 ... 11.588647 11.045029 8.817075 ]\n", - " [ 7.678356 8.322397 7.533047 ... 11.055085 10.535685 9.27465 ]\n", - " [ 8.626197 9.675917 9.841045 ... 11.378827 10.922112 8.991444 ]\n", - " ...\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]]\n", - "\n", - " [[ 8.107938 7.759043 6.710301 ... 12.650573 11.466156 11.061517 ]\n", - " [11.380332 11.222007 8.658889 ... 12.810616 12.222216 11.689288 ]\n", - " [10.677676 9.920579 8.046089 ... 13.572894 12.5624075 11.155033 ]\n", - " ...\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]\n", - " [ 0. 0. 0. ... 0. 0. 0. ]]]\n", - "[207 207 205 205 203 203 198 197 195 188 186 186 185 180 166 163]\n", - "[[2995 3116 1209 565 -1 -1]\n", - " [ 236 1176 331 66 3925 4077]\n", - " [2693 524 234 1145 366 -1]\n", - " [3875 4211 3062 700 -1 -1]\n", - " [ 272 987 1134 494 2959 -1]\n", - " [1936 3715 120 2553 2695 2710]\n", - " [ 25 1149 3930 -1 -1 -1]\n", - " [1753 1778 1237 482 3925 110]\n", - " [3703 2 565 3827 -1 -1]\n", - " [1150 2734 10 2478 3490 -1]\n", - " [ 426 811 95 489 144 -1]\n", - " [2313 2006 489 975 -1 -1]\n", - " [3702 3414 205 1488 2966 1347]\n", - " [ 70 1741 702 1666 -1 -1]\n", - " [ 703 1778 1030 849 -1 -1]\n", - " [ 814 1674 115 3827 -1 -1]]\n", - "[4 6 5 4 5 6 3 6 4 5 5 4 6 4 4 4]\n" - ] - } - ], - "source": [ - "data = np.load('.notebook/data.npz', allow_pickle=True)\n", - "keys=data['keys']\n", - "feat=data['feat']\n", - "feat_len=data['feat_len']\n", - "text=data['text']\n", - "text_len=data['text_len']\n", - "print(keys)\n", - "print(feat.shape)\n", - "print(feat)\n", - "print(feat_len)\n", - "print(text)\n", - "print(text_len)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "false-instrument", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "arctic-proxy", - "metadata": {}, - "outputs": [], - "source": [ - "# ['BAC009S0739W0246', 'BAC009S0727W0424', 'BAC009S0753W0412', 'BAC009S0756W0206', 'BAC009S0740W0414', 'BAC009S0728W0426', 'BAC009S0739W0214', 'BAC009S0753W0423', 'BAC009S0734W0201', 'BAC009S0740W0427', 'BAC009S0730W0423', 'BAC009S0728W0367', 'BAC009S0730W0418', 'BAC009S0727W0157', 'BAC009S0749W0409', 'BAC009S0727W0418']\n", - "# torch.Size([16, 207, 80])\n", - "# tensor([[[ 8.9946, 9.5383, 9.1916, ..., 10.5074, 9.5633, 8.2564],\n", - "# [ 9.7988, 10.4052, 9.2651, ..., 10.2512, 9.5440, 8.8738],\n", - "# [10.6891, 10.3955, 8.0535, ..., 9.9067, 10.0649, 8.0509],\n", - "# ...,\n", - "# [ 9.2180, 9.6507, 8.5053, ..., 9.6872, 8.7425, 7.9865],\n", - "# [10.1291, 9.9352, 9.3798, ..., 9.5639, 9.8260, 8.9795],\n", - "# [ 9.0955, 7.1338, 9.4680, ..., 9.4727, 9.0212, 7.4479]],\n", - "\n", - "# [[11.4310, 10.6719, 6.0841, ..., 9.3827, 8.7297, 7.5316],\n", - "# [ 9.7317, 7.8105, 7.5715, ..., 10.0430, 9.2436, 7.3541],\n", - "# [10.6502, 10.6006, 8.4678, ..., 9.2814, 9.1869, 8.0703],\n", - "# ...,\n", - "# [ 9.0970, 9.2637, 8.0753, ..., 8.4318, 8.3705, 8.0029],\n", - "# [10.4617, 10.1478, 6.7693, ..., 9.7794, 9.5775, 8.0807],\n", - "# [ 7.7944, 5.6211, 7.9751, ..., 9.9972, 9.8497, 8.0313]],\n", - "\n", - "# [[ 7.3456, 7.8964, 7.5796, ..., 11.6310, 10.4513, 9.1236],\n", - "# [ 8.6287, 8.4631, 7.4992, ..., 12.4160, 10.9757, 8.9426],\n", - "# [ 9.8314, 10.2813, 8.9724, ..., 12.1387, 10.4017, 9.0055],\n", - "# ...,\n", - "# [ 7.0896, 7.4055, 6.8143, ..., 9.3252, 9.2732, 8.3534],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]],\n", - "\n", - "# ...,\n", - "\n", - "# [[10.9332, 10.4644, 7.7203, ..., 10.3488, 9.3023, 7.1553],\n", - "# [10.4499, 9.9070, 9.0293, ..., 9.9525, 9.4141, 7.5593],\n", - "# [10.4877, 9.8126, 9.8952, ..., 9.5866, 9.3413, 7.7849],\n", - "# ...,\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]],\n", - "\n", - "# [[ 9.9444, 9.5859, 8.2203, ..., 11.5886, 11.0450, 8.8171],\n", - "# [ 7.6784, 8.3224, 7.5330, ..., 11.0551, 10.5357, 9.2746],\n", - "# [ 8.6262, 9.6759, 9.8410, ..., 11.3788, 10.9221, 8.9914],\n", - "# ...,\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]],\n", - "\n", - "# [[ 8.1079, 7.7590, 6.7103, ..., 12.6506, 11.4662, 11.0615],\n", - "# [11.3803, 11.2220, 8.6589, ..., 12.8106, 12.2222, 11.6893],\n", - "# [10.6777, 9.9206, 8.0461, ..., 13.5729, 12.5624, 11.1550],\n", - "# ...,\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - "# [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]]])\n", - "# tensor([207, 207, 205, 205, 203, 203, 198, 197, 195, 188, 186, 186, 185, 180,\n", - "# 166, 163], dtype=torch.int32)\n", - "# tensor([[2995, 3116, 1209, 565, -1, -1],\n", - "# [ 236, 1176, 331, 66, 3925, 4077],\n", - "# [2693, 524, 234, 1145, 366, -1],\n", - "# [3875, 4211, 3062, 700, -1, -1],\n", - "# [ 272, 987, 1134, 494, 2959, -1],\n", - "# [1936, 3715, 120, 2553, 2695, 2710],\n", - "# [ 25, 1149, 3930, -1, -1, -1],\n", - "# [1753, 1778, 1237, 482, 3925, 110],\n", - "# [3703, 2, 565, 3827, -1, -1],\n", - "# [1150, 2734, 10, 2478, 3490, -1],\n", - "# [ 426, 811, 95, 489, 144, -1],\n", - "# [2313, 2006, 489, 975, -1, -1],\n", - "# [3702, 3414, 205, 1488, 2966, 1347],\n", - "# [ 70, 1741, 702, 1666, -1, -1],\n", - "# [ 703, 1778, 1030, 849, -1, -1],\n", - "# [ 814, 1674, 115, 3827, -1, -1]], dtype=torch.int32)\n", - "# tensor([4, 6, 5, 4, 5, 6, 3, 6, 4, 5, 5, 4, 6, 4, 4, 4], dtype=torch.int32)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "seasonal-switch", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "defined-brooks", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "compute_cmvn_loader_test.ipynb\t encoder.npz\r\n", - "dataloader.ipynb\t\t hack_api_test.ipynb\r\n", - "dataloader_with_tokens_tokenids.ipynb jit_infer.ipynb\r\n", - "data.npz\t\t\t layer_norm_test.ipynb\r\n", - "decoder.npz\t\t\t Linear_test.ipynb\r\n", - "enc_0_ff_out.npz\t\t mask_and_masked_fill_test.ipynb\r\n", - "enc_0_norm_ff.npz\t\t model.npz\r\n", - "enc_0.npz\t\t\t position_embeding_check.ipynb\r\n", - "enc_0_selattn_out.npz\t\t python_test.ipynb\r\n", - "enc_2.npz\t\t\t train_test.ipynb\r\n", - "enc_all.npz\t\t\t u2_model.ipynb\r\n", - "enc_embed.npz\r\n" - ] - } - ], - "source": [ - "# load model param\n", - "!ls .notebook\n", - "data = np.load('.notebook/model.npz', allow_pickle=True)\n", - "state_dict = data['state'].item()\n", - "\n", - "for key, _ in model.state_dict().items():\n", - " if key not in state_dict:\n", - " print(f\"{key} not find.\")\n", - "\n", - "model.set_state_dict(state_dict)\n", - "\n", - "now_state_dict = model.state_dict()\n", - "for key, value in now_state_dict.items():\n", - " if not np.allclose(value.numpy(), state_dict[key]):\n", - " print(key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "exempt-viewer", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "confident-piano", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/framework.py:687: DeprecationWarning: `np.bool` is a deprecated alias for the builtin `bool`. To silence this warning, use `bool` by itself. Doing this will not modify any behavior and is safe. If you specifically wanted the numpy scalar type, use `np.bool_` here.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " elif dtype == np.bool:\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [142.48880005]) Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [41.84146118]) Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [377.33258057])\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dygraph/math_op_patch.py:238: UserWarning: The dtype of left and right variables are not the same, left dtype is VarType.FP32, but right dtype is VarType.INT32, the right dtype will convert to VarType.FP32\n", - " format(lhs_dtype, rhs_dtype, lhs_dtype))\n" - ] - } - ], - "source": [ - "# compute loss\n", - "import paddle\n", - "feat=paddle.to_tensor(feat)\n", - "feat_len=paddle.to_tensor(feat_len, dtype='int64')\n", - "text=paddle.to_tensor(text, dtype='int64')\n", - "text_len=paddle.to_tensor(text_len, dtype='int64')\n", - "\n", - "model.eval()\n", - "total_loss, attention_loss, ctc_loss = model(feat, feat_len,\n", - " text, text_len)\n", - "print(total_loss, attention_loss, ctc_loss )" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "better-senator", - "metadata": {}, - "outputs": [], - "source": [ - "# tensor(142.4888, device='cuda:0', grad_fn=) \n", - "# tensor(41.8415, device='cuda:0', grad_fn=) \n", - "# tensor(377.3326, device='cuda:0', grad_fn=)\n", - "# 142.4888 41.84146 377.33258" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "related-banking", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "olympic-problem", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[16, 51, 256]\n", - "[16, 1, 51]\n", - "Tensor(shape=[51, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [[-0.70194179, 0.56254166, 0.68803459, ..., 1.12373221, 0.78039235, 1.13693869],\n", - " [-0.77877808, 0.39126658, 0.71887815, ..., 1.25188220, 0.88616788, 1.31734526],\n", - " [-0.95908946, 0.63460249, 0.87671334, ..., 0.98183727, 0.74401081, 1.29032660],\n", - " ...,\n", - " [-1.07322502, 0.67236906, 0.92303109, ..., 0.90754563, 0.81767166, 1.32396567],\n", - " [-1.16541159, 0.68199694, 0.69394493, ..., 1.22383487, 0.80282891, 1.45065081],\n", - " [-1.27320945, 0.71458030, 0.75819558, ..., 0.94154912, 0.87748396, 1.26230514]])\n" - ] - } - ], - "source": [ - "# ecnoder\n", - "encoder_out, encoder_mask = model.encoder(feat, feat_len)\n", - "print(encoder_out.shape)\n", - "print(encoder_mask.shape)\n", - "print(encoder_out[0])" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "shaped-alaska", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "deepspeech examples README_cn.md\tsetup.sh tools\r\n", - "docs\t LICENSE README.md\t\ttests\t utils\r\n", - "env.sh\t log requirements.txt\tthird_party\r\n" - ] - } - ], - "source": [ - "!ls\n", - "data = np.load('.notebook/encoder.npz', allow_pickle=True)\n", - "torch_mask = data['mask']\n", - "torch_encoder_out = data['out']" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "federal-rover", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "None\n" - ] - } - ], - "source": [ - "print(np.testing.assert_equal(torch_mask, encoder_mask.numpy()))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "regulated-interstate", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "False\n", - "[[-0.7019424 0.56254166 0.6880345 ... 1.1237322 0.78039217\n", - " 1.1369387 ]\n", - " [-0.778778 0.39126638 0.7188779 ... 1.2518823 0.8861681\n", - " 1.3173454 ]\n", - " [-0.9590891 0.6346026 0.87671363 ... 0.9818373 0.74401116\n", - " 1.2903274 ]\n", - " ...\n", - " [-1.0732253 0.6723689 0.9230311 ... 0.9075457 0.8176713\n", - " 1.3239657 ]\n", - " [-1.165412 0.6819976 0.69394535 ... 1.2238353 0.80282927\n", - " 1.4506509 ]\n", - " [-1.2732087 0.71458083 0.7581961 ... 0.9415482 0.877484\n", - " 1.2623053 ]]\n", - "----\n", - "[[-0.7019418 0.56254166 0.6880346 ... 1.1237322 0.78039235\n", - " 1.1369387 ]\n", - " [-0.7787781 0.39126658 0.71887815 ... 1.2518822 0.8861679\n", - " 1.3173453 ]\n", - " [-0.95908946 0.6346025 0.87671334 ... 0.9818373 0.7440108\n", - " 1.2903266 ]\n", - " ...\n", - " [-1.073225 0.67236906 0.9230311 ... 0.9075456 0.81767166\n", - " 1.3239657 ]\n", - " [-1.1654116 0.68199694 0.69394493 ... 1.2238349 0.8028289\n", - " 1.4506508 ]\n", - " [-1.2732095 0.7145803 0.7581956 ... 0.9415491 0.87748396\n", - " 1.2623051 ]]\n", - "True\n", - "False\n" - ] - } - ], - "source": [ - "print(np.allclose(torch_encoder_out, encoder_out.numpy()))\n", - "print(torch_encoder_out[0])\n", - "print(\"----\")\n", - "print(encoder_out.numpy()[0])\n", - "print(np.allclose(torch_encoder_out, encoder_out.numpy(), atol=1e-5, rtol=1e-6))\n", - "print(np.allclose(torch_encoder_out, encoder_out.numpy(), atol=1e-6, rtol=1e-6))" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "proof-scheduling", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [377.33258057])\n", - "[1.]\n", - "[[ 3.16902876e+00 -1.51763987e-02 4.91095744e-02 ... -2.47971853e-03\n", - " -5.93360700e-03 -7.26609165e-03]\n", - " [-1.74184477e+00 7.75874173e-03 -4.49434854e-02 ... 9.92412097e-04\n", - " 2.46337592e-03 2.31892057e-03]\n", - " [-2.33343339e+00 1.30475955e-02 -2.66557075e-02 ... 2.27532350e-03\n", - " 5.76924905e-03 7.48788286e-03]\n", - " ...\n", - " [-4.30358458e+00 2.46054661e-02 -9.00950655e-02 ... 4.43156436e-03\n", - " 1.16122244e-02 1.44715561e-02]\n", - " [-3.36921120e+00 1.73153952e-02 -6.36872873e-02 ... 3.28363618e-03\n", - " 8.58010259e-03 1.07794888e-02]\n", - " [-6.62045336e+00 3.49955931e-02 -1.23962618e-01 ... 6.36671018e-03\n", - " 1.60814095e-02 2.03891303e-02]]\n", - "[-4.3777819e+00 2.3245810e-02 -9.3339294e-02 ... 4.2569344e-03\n", - " 1.0919910e-02 1.3787797e-02]\n" - ] - } - ], - "source": [ - "from paddle.nn import functional as F\n", - "def ctc_loss(logits,\n", - " labels,\n", - " input_lengths,\n", - " label_lengths,\n", - " blank=0,\n", - " reduction='mean',\n", - " norm_by_times=False):\n", - " loss_out = paddle.fluid.layers.warpctc(logits, labels, blank, norm_by_times,\n", - " input_lengths, label_lengths)\n", - " loss_out = paddle.fluid.layers.squeeze(loss_out, [-1])\n", - " assert reduction in ['mean', 'sum', 'none']\n", - " if reduction == 'mean':\n", - " loss_out = paddle.mean(loss_out / label_lengths)\n", - " elif reduction == 'sum':\n", - " loss_out = paddle.sum(loss_out)\n", - " return loss_out\n", - "\n", - "F.ctc_loss = ctc_loss\n", - "\n", - "torch_mask_t = paddle.to_tensor(torch_mask, dtype='int64')\n", - "encoder_out_lens = torch_mask_t.squeeze(1).sum(1)\n", - "loss_ctc = model.ctc(paddle.to_tensor(torch_encoder_out), encoder_out_lens, text, text_len)\n", - "print(loss_ctc)\n", - "loss_ctc.backward()\n", - "print(loss_ctc.grad)\n", - "print(model.ctc.ctc_lo.weight.grad)\n", - "print(model.ctc.ctc_lo.bias.grad)\n", - "\n", - "\n", - "# tensor(377.3326, device='cuda:0', grad_fn=)\n", - "# None\n", - "# [[ 3.16902351e+00 -1.51765049e-02 4.91097234e-02 ... -2.47973716e-03\n", - "# -5.93366381e-03 -7.26613170e-03]\n", - "# [-1.74185038e+00 7.75875803e-03 -4.49435972e-02 ... 9.92415240e-04\n", - "# 2.46338220e-03 2.31891591e-03]\n", - "# [-2.33343077e+00 1.30476682e-02 -2.66557615e-02 ... 2.27533933e-03\n", - "# 5.76929189e-03 7.48792710e-03]\n", - "# ...\n", - "# [-4.30356789e+00 2.46056803e-02 -9.00955945e-02 ... 4.43160534e-03\n", - "# 1.16123557e-02 1.44716976e-02]\n", - "# [-3.36919212e+00 1.73155665e-02 -6.36875406e-02 ... 3.28367390e-03\n", - "# 8.58021621e-03 1.07796099e-02]\n", - "# [-6.62039661e+00 3.49958315e-02 -1.23963736e-01 ... 6.36674836e-03\n", - "# 1.60815325e-02 2.03892551e-02]]\n", - "# [-4.3777566e+00 2.3245990e-02 -9.3339972e-02 ... 4.2569702e-03\n", - "# 1.0920014e-02 1.3787906e-02]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "enclosed-consolidation", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "synthetic-hungarian", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [41.84146118]) 0.0\n" - ] - } - ], - "source": [ - "loss_att, acc_att = model._calc_att_loss(paddle.to_tensor(torch_encoder_out), paddle.to_tensor(torch_mask),\n", - " text, text_len)\n", - "print(loss_att, acc_att)\n", - "#tensor(41.8416, device='cuda:0', grad_fn=) 0.0" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "indian-sweden", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 202, - "id": "marine-cuisine", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[-3.7638968e-01 -8.2272053e-01 7.4276292e-01 ... 3.4200522e-01\n", - " 1.5034772e-02 4.0337229e-01]\n", - " [-8.7386459e-01 -3.1389427e-01 4.1987866e-01 ... 3.7723729e-01\n", - " -1.4352810e-01 -1.0023664e+00]\n", - " [-4.3505096e-01 3.4504786e-02 -2.8710306e-01 ... 7.7274129e-02\n", - " -1.1672243e+00 -2.6848501e-01]\n", - " ...\n", - " [ 4.2471480e-01 5.8885634e-01 2.0203922e-02 ... 3.7405500e-01\n", - " 4.5470044e-02 -3.7139410e-01]\n", - " [-3.7978446e-01 -8.1084180e-01 7.5725085e-01 ... 2.6038891e-01\n", - " -7.9347193e-04 4.2537671e-01]\n", - " [-3.8279903e-01 -8.1206715e-01 7.4943429e-01 ... 2.6173013e-01\n", - " -1.0499060e-03 4.2678756e-01]]\n" - ] - } - ], - "source": [ - "data = np.load(\".notebook/decoder.npz\", allow_pickle=True)\n", - "torch_decoder_out = data['decoder_out']\n", - "print(torch_decoder_out[0])" - ] - }, - { - "cell_type": "code", - "execution_count": 180, - "id": "several-result", - "metadata": {}, - "outputs": [], - "source": [ - "def add_sos_eos(ys_pad: paddle.Tensor, sos: int, eos: int,\n", - " ignore_id: int):\n", - " \"\"\"Add and labels.\n", - " Args:\n", - " ys_pad (paddle.Tensor): batch of padded target sequences (B, Lmax)\n", - " sos (int): index of \n", - " eos (int): index of \n", - " ignore_id (int): index of padding\n", - " Returns:\n", - " ys_in (paddle.Tensor) : (B, Lmax + 1)\n", - " ys_out (paddle.Tensor) : (B, Lmax + 1)\n", - " Examples:\n", - " >>> sos_id = 10\n", - " >>> eos_id = 11\n", - " >>> ignore_id = -1\n", - " >>> ys_pad\n", - " tensor([[ 1, 2, 3, 4, 5],\n", - " [ 4, 5, 6, -1, -1],\n", - " [ 7, 8, 9, -1, -1]], dtype=paddle.int32)\n", - " >>> ys_in,ys_out=add_sos_eos(ys_pad, sos_id , eos_id, ignore_id)\n", - " >>> ys_in\n", - " tensor([[10, 1, 2, 3, 4, 5],\n", - " [10, 4, 5, 6, 11, 11],\n", - " [10, 7, 8, 9, 11, 11]])\n", - " >>> ys_out\n", - " tensor([[ 1, 2, 3, 4, 5, 11],\n", - " [ 4, 5, 6, 11, -1, -1],\n", - " [ 7, 8, 9, 11, -1, -1]])\n", - " \"\"\"\n", - " # TODO(Hui Zhang): using comment code, \n", - " #_sos = paddle.to_tensor(\n", - " # [sos], dtype=paddle.long, stop_gradient=True, place=ys_pad.place)\n", - " #_eos = paddle.to_tensor(\n", - " # [eos], dtype=paddle.long, stop_gradient=True, place=ys_pad.place)\n", - " #ys = [y[y != ignore_id] for y in ys_pad] # parse padded ys\n", - " #ys_in = [paddle.cat([_sos, y], dim=0) for y in ys]\n", - " #ys_out = [paddle.cat([y, _eos], dim=0) for y in ys]\n", - " #return pad_sequence(ys_in, padding_value=eos), pad_sequence(ys_out, padding_value=ignore_id)\n", - " B = ys_pad.size(0)\n", - " _sos = paddle.ones([B, 1], dtype=ys_pad.dtype) * sos\n", - " _eos = paddle.ones([B, 1], dtype=ys_pad.dtype) * eos\n", - " ys_in = paddle.cat([_sos, ys_pad], dim=1)\n", - " mask_pad = (ys_in == ignore_id)\n", - " ys_in = ys_in.masked_fill(mask_pad, eos)\n", - " \n", - "\n", - " ys_out = paddle.cat([ys_pad, _eos], dim=1)\n", - " ys_out = ys_out.masked_fill(mask_pad, eos)\n", - " mask_eos = (ys_out == ignore_id)\n", - " ys_out = ys_out.masked_fill(mask_eos, eos)\n", - " ys_out = ys_out.masked_fill(mask_pad, ignore_id)\n", - " return ys_in, ys_out" - ] - }, - { - "cell_type": "code", - "execution_count": 181, - "id": "possible-bulgaria", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[16, 7], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [[4232, 2995, 3116, 1209, 565 , 4232, 4232],\n", - " [4232, 236 , 1176, 331 , 66 , 3925, 4077],\n", - " [4232, 2693, 524 , 234 , 1145, 366 , 4232],\n", - " [4232, 3875, 4211, 3062, 700 , 4232, 4232],\n", - " [4232, 272 , 987 , 1134, 494 , 2959, 4232],\n", - " [4232, 1936, 3715, 120 , 2553, 2695, 2710],\n", - " [4232, 25 , 1149, 3930, 4232, 4232, 4232],\n", - " [4232, 1753, 1778, 1237, 482 , 3925, 110 ],\n", - " [4232, 3703, 2 , 565 , 3827, 4232, 4232],\n", - " [4232, 1150, 2734, 10 , 2478, 3490, 4232],\n", - " [4232, 426 , 811 , 95 , 489 , 144 , 4232],\n", - " [4232, 2313, 2006, 489 , 975 , 4232, 4232],\n", - " [4232, 3702, 3414, 205 , 1488, 2966, 1347],\n", - " [4232, 70 , 1741, 702 , 1666, 4232, 4232],\n", - " [4232, 703 , 1778, 1030, 849 , 4232, 4232],\n", - " [4232, 814 , 1674, 115 , 3827, 4232, 4232]])\n", - "Tensor(shape=[16, 7], dtype=int64, place=CUDAPlace(0), stop_gradient=True,\n", - " [[2995, 3116, 1209, 565, 4232, -1 , -1 ],\n", - " [ 236, 1176, 331, 66 , 3925, 4077, 4232],\n", - " [2693, 524, 234, 1145, 366, 4232, -1 ],\n", - " [3875, 4211, 3062, 700, 4232, -1 , -1 ],\n", - " [ 272, 987, 1134, 494, 2959, 4232, -1 ],\n", - " [1936, 3715, 120, 2553, 2695, 2710, 4232],\n", - " [ 25 , 1149, 3930, 4232, -1 , -1 , -1 ],\n", - " [1753, 1778, 1237, 482, 3925, 110, 4232],\n", - " [3703, 2 , 565, 3827, 4232, -1 , -1 ],\n", - " [1150, 2734, 10 , 2478, 3490, 4232, -1 ],\n", - " [ 426, 811, 95 , 489, 144, 4232, -1 ],\n", - " [2313, 2006, 489, 975, 4232, -1 , -1 ],\n", - " [3702, 3414, 205, 1488, 2966, 1347, 4232],\n", - " [ 70 , 1741, 702, 1666, 4232, -1 , -1 ],\n", - " [ 703, 1778, 1030, 849, 4232, -1 , -1 ],\n", - " [ 814, 1674, 115, 3827, 4232, -1 , -1 ]])\n" - ] - } - ], - "source": [ - "ys_pad = text\n", - "ys_pad_lens = text_len\n", - "ys_in_pad, ys_out_pad = add_sos_eos(ys_pad, model.sos, model.eos,\n", - " model.ignore_id)\n", - "ys_in_lens = ys_pad_lens + 1\n", - "print(ys_in_pad)\n", - "print(ys_out_pad)" - ] - }, - { - "cell_type": "code", - "execution_count": 285, - "id": "north-walter", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "False\n", - "True\n", - "False\n", - "[[-3.76389682e-01 -8.22720408e-01 7.42762923e-01 ... 3.42005253e-01\n", - " 1.50350705e-02 4.03372347e-01]\n", - " [-8.73864174e-01 -3.13894272e-01 4.19878662e-01 ... 3.77237231e-01\n", - " -1.43528014e-01 -1.00236630e+00]\n", - " [-4.35050905e-01 3.45046446e-02 -2.87102997e-01 ... 7.72742853e-02\n", - " -1.16722476e+00 -2.68485069e-01]\n", - " ...\n", - " [ 4.24714804e-01 5.88856399e-01 2.02039629e-02 ... 3.74054879e-01\n", - " 4.54700664e-02 -3.71394157e-01]\n", - " [-3.79784584e-01 -8.10841978e-01 7.57250786e-01 ... 2.60389000e-01\n", - " -7.93404877e-04 4.25376773e-01]\n", - " [-3.82798851e-01 -8.12067091e-01 7.49434292e-01 ... 2.61730075e-01\n", - " -1.04988366e-03 4.26787734e-01]]\n", - "---\n", - "[[-3.7638968e-01 -8.2272053e-01 7.4276292e-01 ... 3.4200522e-01\n", - " 1.5034772e-02 4.0337229e-01]\n", - " [-8.7386459e-01 -3.1389427e-01 4.1987866e-01 ... 3.7723729e-01\n", - " -1.4352810e-01 -1.0023664e+00]\n", - " [-4.3505096e-01 3.4504786e-02 -2.8710306e-01 ... 7.7274129e-02\n", - " -1.1672243e+00 -2.6848501e-01]\n", - " ...\n", - " [ 4.2471480e-01 5.8885634e-01 2.0203922e-02 ... 3.7405500e-01\n", - " 4.5470044e-02 -3.7139410e-01]\n", - " [-3.7978446e-01 -8.1084180e-01 7.5725085e-01 ... 2.6038891e-01\n", - " -7.9347193e-04 4.2537671e-01]\n", - " [-3.8279903e-01 -8.1206715e-01 7.4943429e-01 ... 2.6173013e-01\n", - " -1.0499060e-03 4.2678756e-01]]\n" - ] - } - ], - "source": [ - "decoder_out, _ = model.decoder(encoder_out, encoder_mask, ys_in_pad,\n", - " ys_in_lens)\n", - "\n", - "print(np.allclose(decoder_out.numpy(), torch_decoder_out))\n", - "print(np.allclose(decoder_out.numpy(), torch_decoder_out, atol=1e-6))\n", - "print(np.allclose(decoder_out.numpy(), torch_decoder_out, atol=1e-7))\n", - "print(decoder_out.numpy()[0])\n", - "print('---')\n", - "print(torch_decoder_out[0])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "armed-cowboy", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fifty-earth", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "proud-commonwealth", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 183, - "id": "assisted-fortune", - "metadata": {}, - "outputs": [], - "source": [ - "from paddle import nn\n", - "import paddle\n", - "from paddle.nn import functional as F\n", - "\n", - "class LabelSmoothingLoss(nn.Layer):\n", - "\n", - " def __init__(self,\n", - " size: int,\n", - " padding_idx: int,\n", - " smoothing: float,\n", - " normalize_length: bool=False):\n", - " super().__init__()\n", - " self.size = size\n", - " self.padding_idx = padding_idx\n", - " self.smoothing = smoothing\n", - " self.confidence = 1.0 - smoothing\n", - " self.normalize_length = normalize_length\n", - " self.criterion = nn.KLDivLoss(reduction=\"none\")\n", - "\n", - " def forward(self, x: paddle.Tensor, target: paddle.Tensor) -> paddle.Tensor:\n", - " \"\"\"Compute loss between x and target.\n", - " The model outputs and data labels tensors are flatten to\n", - " (batch*seqlen, class) shape and a mask is applied to the\n", - " padding part which should not be calculated for loss.\n", - " \n", - " Args:\n", - " x (paddle.Tensor): prediction (batch, seqlen, class)\n", - " target (paddle.Tensor):\n", - " target signal masked with self.padding_id (batch, seqlen)\n", - " Returns:\n", - " loss (paddle.Tensor) : The KL loss, scalar float value\n", - " \"\"\"\n", - " B, T, D = paddle.shape(x)\n", - " assert D == self.size\n", - " x = x.reshape((-1, self.size))\n", - " target = target.reshape([-1])\n", - "\n", - " # use zeros_like instead of torch.no_grad() for true_dist,\n", - " # since no_grad() can not be exported by JIT\n", - " true_dist = paddle.full_like(x, self.smoothing / (self.size - 1))\n", - " ignore = target == self.padding_idx # (B,)\n", - " print(self.smoothing / (self.size - 1))\n", - " print(true_dist)\n", - "\n", - " #target = target * (1 - ignore) # avoid -1 index\n", - " target = target.masked_fill(ignore, 0) # avoid -1 index\n", - " \n", - " \n", - " #true_dist += F.one_hot(target, self.size) * self.confidence\n", - " target_mask = F.one_hot(target, self.size)\n", - " true_dist *= (1 - target_mask)\n", - " true_dist += target_mask * self.confidence\n", - " \n", - "\n", - " kl = self.criterion(F.log_softmax(x, axis=1), true_dist)\n", - " \n", - " #TODO(Hui Zhang): sum not support bool type\n", - " #total = len(target) - int(ignore.sum())\n", - " total = len(target) - int(ignore.type_as(target).sum())\n", - " denom = total if self.normalize_length else B\n", - "\n", - " #numer = (kl * (1 - ignore)).sum()\n", - " numer = kl.masked_fill(ignore.unsqueeze(1), 0).sum()\n", - " return numer / denom\n" - ] - }, - { - "cell_type": "code", - "execution_count": 184, - "id": "weighted-delight", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.3629489603024576e-05\n", - "Tensor(shape=[112, 4233], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[0.00002363, 0.00002363, 0.00002363, ..., 0.00002363, 0.00002363, 0.00002363],\n", - " [0.00002363, 0.00002363, 0.00002363, ..., 0.00002363, 0.00002363, 0.00002363],\n", - " [0.00002363, 0.00002363, 0.00002363, ..., 0.00002363, 0.00002363, 0.00002363],\n", - " ...,\n", - " [0.00002363, 0.00002363, 0.00002363, ..., 0.00002363, 0.00002363, 0.00002363],\n", - " [0.00002363, 0.00002363, 0.00002363, ..., 0.00002363, 0.00002363, 0.00002363],\n", - " [0.00002363, 0.00002363, 0.00002363, ..., 0.00002363, 0.00002363, 0.00002363]])\n", - "Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [41.84146118])\n", - "VarType.INT64\n" - ] - } - ], - "source": [ - "criteron = LabelSmoothingLoss(4233, -1, 0.1, False)\n", - "loss_att = criteron(paddle.to_tensor(torch_decoder_out), ys_out_pad.astype('int64'))\n", - "print(loss_att)\n", - "print(ys_out_pad.dtype)\n", - "# tensor(41.8416, device='cuda:0', grad_fn=)" - ] - }, - { - "cell_type": "code", - "execution_count": 286, - "id": "dress-shelter", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [41.84146118])\n", - "Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [41.84146118])\n", - "4233\n", - "-1\n", - "0.1\n", - "False\n" - ] - } - ], - "source": [ - "decoder_out, _ = model.decoder(encoder_out, encoder_mask, ys_in_pad,\n", - " ys_in_lens)\n", - "\n", - "loss_att = model.criterion_att(paddle.to_tensor(torch_decoder_out), ys_out_pad)\n", - "print(loss_att)\n", - "\n", - "loss_att = model.criterion_att(decoder_out, ys_out_pad)\n", - "print(loss_att)\n", - "\n", - "print(model.criterion_att.size)\n", - "print(model.criterion_att.padding_idx)\n", - "print(model.criterion_att.smoothing)\n", - "print(model.criterion_att.normalize_length)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "growing-tooth", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "going-hungary", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "naughty-citizenship", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "experimental-emerald", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "adverse-saskatchewan", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 27, - "id": "speaking-shelf", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import List\n", - "from typing import Optional\n", - "from typing import Tuple\n", - "\n", - "import paddle\n", - "from paddle import nn\n", - "from typeguard import check_argument_types\n", - "\n", - "from deepspeech.modules.activation import get_activation\n", - "from deepspeech.modules.attention import MultiHeadedAttention\n", - "from deepspeech.modules.attention import RelPositionMultiHeadedAttention\n", - "from deepspeech.modules.conformer_convolution import ConvolutionModule\n", - "from deepspeech.modules.embedding import PositionalEncoding\n", - "from deepspeech.modules.embedding import RelPositionalEncoding\n", - "from deepspeech.modules.encoder_layer import ConformerEncoderLayer\n", - "from deepspeech.modules.encoder_layer import TransformerEncoderLayer\n", - "from deepspeech.modules.mask import add_optional_chunk_mask\n", - "from deepspeech.modules.mask import make_non_pad_mask\n", - "from deepspeech.modules.positionwise_feed_forward import PositionwiseFeedForward\n", - "from deepspeech.modules.subsampling import Conv2dSubsampling4\n", - "from deepspeech.modules.subsampling import Conv2dSubsampling6\n", - "from deepspeech.modules.subsampling import Conv2dSubsampling8\n", - "from deepspeech.modules.subsampling import LinearNoSubsampling\n", - "\n", - "class BaseEncoder(nn.Layer):\n", - " def __init__(\n", - " self,\n", - " input_size: int,\n", - " output_size: int=256,\n", - " attention_heads: int=4,\n", - " linear_units: int=2048,\n", - " num_blocks: int=6,\n", - " dropout_rate: float=0.1,\n", - " positional_dropout_rate: float=0.1,\n", - " attention_dropout_rate: float=0.0,\n", - " input_layer: str=\"conv2d\",\n", - " pos_enc_layer_type: str=\"abs_pos\",\n", - " normalize_before: bool=True,\n", - " concat_after: bool=False,\n", - " static_chunk_size: int=0,\n", - " use_dynamic_chunk: bool=False,\n", - " global_cmvn: paddle.nn.Layer=None,\n", - " use_dynamic_left_chunk: bool=False, ):\n", - " \"\"\"\n", - " Args:\n", - " input_size (int): input dim, d_feature\n", - " output_size (int): dimension of attention, d_model\n", - " attention_heads (int): the number of heads of multi head attention\n", - " linear_units (int): the hidden units number of position-wise feed\n", - " forward\n", - " num_blocks (int): the number of encoder blocks\n", - " dropout_rate (float): dropout rate\n", - " attention_dropout_rate (float): dropout rate in attention\n", - " positional_dropout_rate (float): dropout rate after adding\n", - " positional encoding\n", - " input_layer (str): input layer type.\n", - " optional [linear, conv2d, conv2d6, conv2d8]\n", - " pos_enc_layer_type (str): Encoder positional encoding layer type.\n", - " opitonal [abs_pos, scaled_abs_pos, rel_pos]\n", - " normalize_before (bool):\n", - " True: use layer_norm before each sub-block of a layer.\n", - " False: use layer_norm after each sub-block of a layer.\n", - " concat_after (bool): whether to concat attention layer's input\n", - " and output.\n", - " True: x -> x + linear(concat(x, att(x)))\n", - " False: x -> x + att(x)\n", - " static_chunk_size (int): chunk size for static chunk training and\n", - " decoding\n", - " use_dynamic_chunk (bool): whether use dynamic chunk size for\n", - " training or not, You can only use fixed chunk(chunk_size > 0)\n", - " or dyanmic chunk size(use_dynamic_chunk = True)\n", - " global_cmvn (Optional[paddle.nn.Layer]): Optional GlobalCMVN layer\n", - " use_dynamic_left_chunk (bool): whether use dynamic left chunk in\n", - " dynamic chunk training\n", - " \"\"\"\n", - " assert check_argument_types()\n", - " super().__init__()\n", - " self._output_size = output_size\n", - "\n", - " if pos_enc_layer_type == \"abs_pos\":\n", - " pos_enc_class = PositionalEncoding\n", - " elif pos_enc_layer_type == \"rel_pos\":\n", - " pos_enc_class = RelPositionalEncoding\n", - " else:\n", - " raise ValueError(\"unknown pos_enc_layer: \" + pos_enc_layer_type)\n", - "\n", - " if input_layer == \"linear\":\n", - " subsampling_class = LinearNoSubsampling\n", - " elif input_layer == \"conv2d\":\n", - " subsampling_class = Conv2dSubsampling4\n", - " elif input_layer == \"conv2d6\":\n", - " subsampling_class = Conv2dSubsampling6\n", - " elif input_layer == \"conv2d8\":\n", - " subsampling_class = Conv2dSubsampling8\n", - " else:\n", - " raise ValueError(\"unknown input_layer: \" + input_layer)\n", - "\n", - " self.global_cmvn = global_cmvn\n", - " self.embed = subsampling_class(\n", - " idim=input_size,\n", - " odim=output_size,\n", - " dropout_rate=dropout_rate,\n", - " pos_enc_class=pos_enc_class(\n", - " d_model=output_size, dropout_rate=positional_dropout_rate), )\n", - "\n", - " self.normalize_before = normalize_before\n", - " self.after_norm = nn.LayerNorm(output_size, epsilon=1e-12)\n", - " self.static_chunk_size = static_chunk_size\n", - " self.use_dynamic_chunk = use_dynamic_chunk\n", - " self.use_dynamic_left_chunk = use_dynamic_left_chunk\n", - "\n", - " def output_size(self) -> int:\n", - " return self._output_size\n", - "\n", - " def forward(\n", - " self,\n", - " xs: paddle.Tensor,\n", - " xs_lens: paddle.Tensor,\n", - " decoding_chunk_size: int=0,\n", - " num_decoding_left_chunks: int=-1,\n", - " ) -> Tuple[paddle.Tensor, paddle.Tensor]:\n", - " \"\"\"Embed positions in tensor.\n", - " Args:\n", - " xs: padded input tensor (B, L, D)\n", - " xs_lens: input length (B)\n", - " decoding_chunk_size: decoding chunk size for dynamic chunk\n", - " 0: default for training, use random dynamic chunk.\n", - " <0: for decoding, use full chunk.\n", - " >0: for decoding, use fixed chunk size as set.\n", - " num_decoding_left_chunks: number of left chunks, this is for decoding,\n", - " the chunk size is decoding_chunk_size.\n", - " >=0: use num_decoding_left_chunks\n", - " <0: use all left chunks\n", - " Returns:\n", - " encoder output tensor, lens and mask\n", - " \"\"\"\n", - " masks = make_non_pad_mask(xs_lens).unsqueeze(1) # (B, 1, L)\n", - "\n", - " if self.global_cmvn is not None:\n", - " xs = self.global_cmvn(xs)\n", - " #TODO(Hui Zhang): self.embed(xs, masks, offset=0), stride_slice not support bool tensor\n", - " xs, pos_emb, masks = self.embed(xs, masks.type_as(xs), offset=0)\n", - " #TODO(Hui Zhang): remove mask.astype, stride_slice not support bool tensor\n", - " masks = masks.astype(paddle.bool)\n", - " #TODO(Hui Zhang): mask_pad = ~masks\n", - " mask_pad = masks.logical_not()\n", - " chunk_masks = add_optional_chunk_mask(\n", - " xs, masks, self.use_dynamic_chunk, self.use_dynamic_left_chunk,\n", - " decoding_chunk_size, self.static_chunk_size,\n", - " num_decoding_left_chunks)\n", - " for layer in self.encoders:\n", - " xs, chunk_masks, _ = layer(xs, chunk_masks, pos_emb, mask_pad)\n", - " if self.normalize_before:\n", - " xs = self.after_norm(xs)\n", - " # Here we assume the mask is not changed in encoder layers, so just\n", - " # return the masks before encoder layers, and the masks will be used\n", - " # for cross attention with decoder later\n", - " return xs, masks" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "id": "sharp-municipality", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "class ConformerEncoder(BaseEncoder):\n", - " \"\"\"Conformer encoder module.\"\"\"\n", - "\n", - " def __init__(\n", - " self,\n", - " input_size: int,\n", - " output_size: int=256,\n", - " attention_heads: int=4,\n", - " linear_units: int=2048,\n", - " num_blocks: int=6,\n", - " dropout_rate: float=0.1,\n", - " positional_dropout_rate: float=0.1,\n", - " attention_dropout_rate: float=0.0,\n", - " input_layer: str=\"conv2d\",\n", - " pos_enc_layer_type: str=\"rel_pos\",\n", - " normalize_before: bool=True,\n", - " concat_after: bool=False,\n", - " static_chunk_size: int=0,\n", - " use_dynamic_chunk: bool=False,\n", - " global_cmvn: nn.Layer=None,\n", - " use_dynamic_left_chunk: bool=False,\n", - " positionwise_conv_kernel_size: int=1,\n", - " macaron_style: bool=True,\n", - " selfattention_layer_type: str=\"rel_selfattn\",\n", - " activation_type: str=\"swish\",\n", - " use_cnn_module: bool=True,\n", - " cnn_module_kernel: int=15,\n", - " causal: bool=False,\n", - " cnn_module_norm: str=\"batch_norm\", ):\n", - " \"\"\"Construct ConformerEncoder\n", - " Args:\n", - " input_size to use_dynamic_chunk, see in BaseEncoder\n", - " positionwise_conv_kernel_size (int): Kernel size of positionwise\n", - " conv1d layer.\n", - " macaron_style (bool): Whether to use macaron style for\n", - " positionwise layer.\n", - " selfattention_layer_type (str): Encoder attention layer type,\n", - " the parameter has no effect now, it's just for configure\n", - " compatibility.\n", - " activation_type (str): Encoder activation function type.\n", - " use_cnn_module (bool): Whether to use convolution module.\n", - " cnn_module_kernel (int): Kernel size of convolution module.\n", - " causal (bool): whether to use causal convolution or not.\n", - " cnn_module_norm (str): cnn conv norm type, Optional['batch_norm','layer_norm']\n", - " \"\"\"\n", - " assert check_argument_types()\n", - " super().__init__(input_size, output_size, attention_heads, linear_units,\n", - " num_blocks, dropout_rate, positional_dropout_rate,\n", - " attention_dropout_rate, input_layer,\n", - " pos_enc_layer_type, normalize_before, concat_after,\n", - " static_chunk_size, use_dynamic_chunk, global_cmvn,\n", - " use_dynamic_left_chunk)\n", - " activation = get_activation(activation_type)\n", - "\n", - " # self-attention module definition\n", - " encoder_selfattn_layer = RelPositionMultiHeadedAttention\n", - " encoder_selfattn_layer_args = (attention_heads, output_size,\n", - " attention_dropout_rate)\n", - " # feed-forward module definition\n", - " positionwise_layer = PositionwiseFeedForward\n", - " positionwise_layer_args = (output_size, linear_units, dropout_rate,\n", - " activation)\n", - " # convolution module definition\n", - " convolution_layer = ConvolutionModule\n", - " convolution_layer_args = (output_size, cnn_module_kernel, activation,\n", - " cnn_module_norm, causal)\n", - "\n", - " self.encoders = nn.ModuleList([\n", - " ConformerEncoderLayer(\n", - " size=output_size,\n", - " self_attn=encoder_selfattn_layer(*encoder_selfattn_layer_args),\n", - " feed_forward=positionwise_layer(*positionwise_layer_args),\n", - " feed_forward_macaron=positionwise_layer(\n", - " *positionwise_layer_args) if macaron_style else None,\n", - " conv_module=convolution_layer(*convolution_layer_args)\n", - " if use_cnn_module else None,\n", - " dropout_rate=dropout_rate,\n", - " normalize_before=normalize_before,\n", - " concat_after=concat_after) for _ in range(num_blocks)\n", - " ])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "id": "tutorial-syndication", - "metadata": {}, - "outputs": [], - "source": [ - "from deepspeech.frontend.utility import load_cmvn\n", - "from deepspeech.modules.cmvn import GlobalCMVN\n", - "\n", - "configs=cfg.model\n", - "mean, istd = load_cmvn(configs['cmvn_file'],\n", - " configs['cmvn_file_type'])\n", - "global_cmvn = GlobalCMVN(\n", - " paddle.to_tensor(mean, dtype=paddle.float),\n", - " paddle.to_tensor(istd, dtype=paddle.float))\n", - "\n", - "\n", - "input_dim = configs['input_dim']\n", - "vocab_size = configs['output_dim']\n", - "encoder_type = configs.get('encoder', 'transformer')\n", - " \n", - "encoder = ConformerEncoder(\n", - " input_dim, global_cmvn=global_cmvn, **configs['encoder_conf'])" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "fuzzy-register", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n" - ] - } - ], - "source": [ - "o = global_cmvn(feat)\n", - "o2 = model.encoder.global_cmvn(feat)\n", - "print(np.allclose(o.numpy(), o2.numpy()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "explicit-triumph", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "humanitarian-belgium", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dying-proposal", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "honest-quick", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bound-cholesterol", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "viral-packaging", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 203, - "id": "balanced-locator", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[16, 1, 207], dtype=bool, place=CUDAPlace(0), stop_gradient=True,\n", - " [[[True , True , True , ..., True , True , True ]],\n", - "\n", - " [[True , True , True , ..., True , True , True ]],\n", - "\n", - " [[True , True , True , ..., True , False, False]],\n", - "\n", - " ...,\n", - "\n", - " [[True , True , True , ..., False, False, False]],\n", - "\n", - " [[True , True , True , ..., False, False, False]],\n", - "\n", - " [[True , True , True , ..., False, False, False]]])\n" - ] - } - ], - "source": [ - "from deepspeech.modules.mask import make_non_pad_mask\n", - "from deepspeech.modules.mask import make_pad_mask\n", - "masks = make_non_pad_mask(feat_len).unsqueeze(1)\n", - "print(masks)" - ] - }, - { - "cell_type": "code", - "execution_count": 204, - "id": "induced-proposition", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[16, 207, 80], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[[-0.53697914, -0.19910523, -0.34997201, ..., -0.82427669, -1.02650309, -0.96300691],\n", - " [-0.04464225, 0.23176001, -0.32538742, ..., -0.90158713, -1.03248465, -0.75986791],\n", - " [ 0.50035292, 0.22691160, -0.73052198, ..., -1.00552964, -0.87123060, -1.03062117],\n", - " ...,\n", - " [-0.40023831, -0.14325078, -0.57947433, ..., -1.07178426, -1.28059900, -1.05180073],\n", - " [ 0.15755332, -0.00184949, -0.28702953, ..., -1.10898709, -0.94518697, -0.72506356],\n", - " [-0.47520429, -1.39415145, -0.25754252, ..., -1.13649082, -1.19430351, -1.22903371]],\n", - "\n", - " [[ 0.95454037, 0.36427975, -1.38908529, ..., -1.16366839, -1.28453600, -1.20151031],\n", - " [-0.08573537, -1.05785275, -0.89172721, ..., -0.96440506, -1.12547100, -1.25990939],\n", - " [ 0.47653601, 0.32886592, -0.59200549, ..., -1.19421589, -1.14302588, -1.02422845],\n", - " ...,\n", - " [-0.47431335, -0.33558893, -0.72325647, ..., -1.45058632, -1.39574063, -1.04641151],\n", - " [ 0.36112556, 0.10380996, -1.15994537, ..., -1.04394984, -1.02212358, -1.02083635],\n", - " [-1.27172923, -2.14601755, -0.75676596, ..., -0.97822225, -0.93785471, -1.03707945]],\n", - "\n", - " [[-1.54652190, -1.01517177, -0.88900733, ..., -0.48522446, -0.75163364, -0.67765164],\n", - " [-0.76100892, -0.73351598, -0.91587651, ..., -0.24835993, -0.58927339, -0.73722762],\n", - " [-0.02471367, 0.17015894, -0.42326337, ..., -0.33203802, -0.76695800, -0.71651691],\n", - " ...,\n", - " [-1.70319796, -1.25910866, -1.14492917, ..., -1.18101490, -1.11631835, -0.93108195],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063]],\n", - "\n", - " ...,\n", - "\n", - " [[ 0.64982772, 0.26116797, -0.84196597, ..., -0.87213463, -1.10728693, -1.32531130],\n", - " [ 0.35391113, -0.01584581, -0.40424931, ..., -0.99173468, -1.07270539, -1.19239008],\n", - " [ 0.37704495, -0.06278508, -0.11467686, ..., -1.10212946, -1.09524000, -1.11815071],\n", - " ...,\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063]],\n", - "\n", - " [[ 0.04445776, -0.17546852, -0.67475224, ..., -0.49801198, -0.56782746, -0.77852231],\n", - " [-1.34279025, -0.80342549, -0.90457231, ..., -0.65901577, -0.72549772, -0.62796098],\n", - " [-0.76252806, -0.13071291, -0.13280024, ..., -0.56132573, -0.60587686, -0.72114766],\n", - " ...,\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063]],\n", - "\n", - " [[-1.07980299, -1.08341801, -1.17969072, ..., -0.17757270, -0.43746525, -0.04000654],\n", - " [ 0.92353648, 0.63770926, -0.52810186, ..., -0.12927933, -0.20342292, 0.16655664],\n", - " [ 0.49337494, -0.00911332, -0.73301607, ..., 0.10074048, -0.09811471, -0.00923573],\n", - " ...,\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063],\n", - " [-6.04343224, -4.93973970, -3.42354989, ..., -3.99492049, -3.98687553, -3.67971063]]])\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "print(xs)" - ] - }, - { - "cell_type": "code", - "execution_count": 205, - "id": "cutting-julian", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[16, 256, 51, 19], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [[[[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0.00209083],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0.01194306, 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0.04610471, 0. ],\n", - " [0. , 0. , 0. , ..., 0.00967231, 0.04613467, 0. ]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " ...,\n", - "\n", - " [[0.22816099, 0.24614786, 0.25304127, ..., 0.20401822, 0.23248228, 0.31190544],\n", - " [0.13587360, 0.28877240, 0.27991283, ..., 0.19210319, 0.20346391, 0.19934426],\n", - " [0.25739068, 0.39348233, 0.27877361, ..., 0.27482539, 0.19302306, 0.23810163],\n", - " ...,\n", - " [0.11939213, 0.28473237, 0.33082074, ..., 0.23838061, 0.22104350, 0.23905794],\n", - " [0.17387670, 0.20402060, 0.40263173, ..., 0.24782266, 0.26742202, 0.15426503],\n", - " [0. , 0.29080707, 0.27725950, ..., 0.17539823, 0.18478745, 0.22483408]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.35446781, 0.38861471, 0.39724261, ..., 0.38680089, 0.33568040, 0.34552398],\n", - " [0.41739127, 0.51038563, 0.41729912, ..., 0.33992639, 0.37081629, 0.35109508],\n", - " [0.36116859, 0.40744874, 0.48490953, ..., 0.34848654, 0.32321057, 0.35188958],\n", - " ...,\n", - " [0.23143977, 0.38021481, 0.51526314, ..., 0.36499465, 0.37411752, 0.39986172],\n", - " [0.34678638, 0.40238205, 0.50076538, ..., 0.36184520, 0.31596646, 0.36334658],\n", - " [0.36498138, 0.37943166, 0.51718897, ..., 0.31798238, 0.33656698, 0.34130475]]],\n", - "\n", - "\n", - " [[[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.01456045, 0.09447514, 0. , ..., 0. , 0. , 0. ],\n", - " [0.01500242, 0.02963220, 0. , ..., 0. , 0. , 0. ],\n", - " [0.03295187, 0. , 0. , ..., 0.04584959, 0.02043908, 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0.04425837],\n", - " [0. , 0. , 0.02556529, ..., 0. , 0.00900441, 0.04908358]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0.11141267, 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " ...,\n", - "\n", - " [[0.33696529, 0.38526866, 0.32900479, ..., 0.28703830, 0.23351061, 0.19004467],\n", - " [0.13575366, 0.35783342, 0.33573425, ..., 0.22081660, 0.15854910, 0.13587447],\n", - " [0.21928655, 0.28900093, 0.28255141, ..., 0.20602837, 0.23927397, 0.21909429],\n", - " ...,\n", - " [0.23291890, 0.39096734, 0.36399242, ..., 0.20598020, 0.25373828, 0.23137446],\n", - " [0.18739152, 0.30793777, 0.30296701, ..., 0.27250600, 0.25191751, 0.20836820],\n", - " [0.22454213, 0.41402060, 0.54082996, ..., 0.31874508, 0.25079906, 0.25938687]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.26456982, 0.49519050, 0.56702250, ..., 0.30954638, 0.35292268, 0.32668519],\n", - " [0.21576807, 0.51833367, 0.49183372, ..., 0.36043224, 0.38523889, 0.36154741],\n", - " [0.20067888, 0.42784205, 0.52817714, ..., 0.31871423, 0.32452232, 0.31036487],\n", - " ...,\n", - " [0.49855131, 0.51001430, 0.52278662, ..., 0.36450142, 0.34338164, 0.33602941],\n", - " [0.41233343, 0.55517823, 0.52827710, ..., 0.40675971, 0.33873138, 0.36724189],\n", - " [0.40820011, 0.46187383, 0.47338152, ..., 0.38690975, 0.36039269, 0.38022059]]],\n", - "\n", - "\n", - " [[[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0. , 0.00578516, 0. , ..., 0.00748384, 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0.03035110, 0. , 0.00026720],\n", - " [0.00094807, 0. , 0. , ..., 0.00795512, 0. , 0. ],\n", - " ...,\n", - " [0.02032628, 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0.01080076, 0. ],\n", - " [0.18470290, 0. , 0. , ..., 0.05058352, 0.09475817, 0.05914564]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " ...,\n", - "\n", - " [[0.38708323, 0.28021947, 0.35892880, ..., 0.16595127, 0.16031364, 0.21136315],\n", - " [0.15595171, 0.30544323, 0.24666184, ..., 0.22675267, 0.25765014, 0.19682154],\n", - " [0.29517862, 0.41209796, 0.20063159, ..., 0.17595036, 0.22536841, 0.22214051],\n", - " ...,\n", - " [0.24744980, 0.26258564, 0.38654143, ..., 0.23620218, 0.23157144, 0.18514194],\n", - " [0.25714791, 0.29592845, 0.47744542, ..., 0.23545510, 0.25072727, 0.20976165],\n", - " [1.20154655, 0.84644288, 0.73385584, ..., 1.02517247, 0.95309550, 1.00134516]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.45013186, 0.47484034, 0.40540054, ..., 0.19346163, 0.17825794, 0.14776605],\n", - " [0.47545874, 0.48186573, 0.36760187, ..., 0.27809089, 0.32997063, 0.32337096],\n", - " [0.46160024, 0.40050328, 0.39060861, ..., 0.36612910, 0.35242686, 0.29738861],\n", - " ...,\n", - " [0.55148494, 0.51017821, 0.40132499, ..., 0.38948193, 0.35737294, 0.33088297],\n", - " [0.41972569, 0.45475486, 0.45320493, ..., 0.38343129, 0.40125814, 0.36180776],\n", - " [0.34279808, 0.31606171, 0.44701228, ..., 0.21665487, 0.23984617, 0.23903391]]],\n", - "\n", - "\n", - " ...,\n", - "\n", - "\n", - " [[[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0.04178291, 0. , 0.01580476, ..., 0. , 0.02250817, 0. ],\n", - " [0.04323414, 0.07786420, 0. , ..., 0.01634724, 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.03209178, 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0.13563479, 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " ...,\n", - "\n", - " [[0. , 0.25187218, 0.24979387, ..., 0.24774717, 0.22354351, 0.19149347],\n", - " [0.16540922, 0.19585510, 0.19812922, ..., 0.27344131, 0.20928150, 0.26150429],\n", - " [0.10494646, 0.06329897, 0.33843631, ..., 0.25138417, 0.12470355, 0.23926635],\n", - " ...,\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163],\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163],\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.11428106, 0.45667490, 0.46820879, ..., 0.32057840, 0.33578536, 0.39012644],\n", - " [0.10441341, 0.45739070, 0.46107352, ..., 0.38467997, 0.38291249, 0.36685589],\n", - " [0.19867736, 0.35519636, 0.44313061, ..., 0.40679252, 0.38067645, 0.30645671],\n", - " ...,\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700],\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700],\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700]]],\n", - "\n", - "\n", - " [[[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.02465414, 0. , 0. , ..., 0. , 0. , 0.03390232],\n", - " [0. , 0. , 0.01830704, ..., 0.05166877, 0.00948385, 0.07453502],\n", - " [0.09921519, 0. , 0.01587192, ..., 0.01620276, 0.05140074, 0.00192392],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " ...,\n", - "\n", - " [[0.40034360, 0.25306445, 0.20217699, ..., 0.09816189, 0.07064310, 0.04974059],\n", - " [0.12567598, 0.21030979, 0.11181555, ..., 0.04278110, 0.11968569, 0.12005232],\n", - " [0.28786880, 0.24030517, 0.22565845, ..., 0. , 0.06418110, 0.05872961],\n", - " ...,\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163],\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163],\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.38404641, 0.30990323, 0.37156230, ..., 0.18125033, 0.15050662, 0.19619957],\n", - " [0.47285745, 0.40528792, 0.39718056, ..., 0.24709940, 0.04565683, 0.11500744],\n", - " [0.32620737, 0.30072594, 0.30477354, ..., 0.23529193, 0.21356541, 0.16985542],\n", - " ...,\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700],\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700],\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700]]],\n", - "\n", - "\n", - " [[[0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.03343770, 0.00123780, 0.05297198, ..., 0.07271163, 0.08656286, 0.14493589],\n", - " [0.11043239, 0.06143146, 0.06362963, ..., 0.08127750, 0.06259022, 0.08315435],\n", - " [0.01767678, 0.00201111, 0.07875030, ..., 0.06963293, 0.08979890, 0.05326346],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.10033827, 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0.15627117, 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0.05144687, 0. , 0. , ..., 0. , 0. , 0.00436414],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " ...,\n", - "\n", - " [[0.25142455, 0.45964020, 0.37346074, ..., 0.04763087, 0. , 0. ],\n", - " [0.19760093, 0.26626948, 0.11190540, ..., 0.03044968, 0. , 0. ],\n", - " [0.16340607, 0.32938001, 0.25689697, ..., 0.05569421, 0. , 0. ],\n", - " ...,\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163],\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163],\n", - " [1.12572610, 0.87340784, 0.78169060, ..., 1.04576325, 1.00935984, 1.02209163]],\n", - "\n", - " [[0. , 0. , 0. , ..., 0. , 0.02218930, 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0.02848953],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " ...,\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ],\n", - " [0. , 0. , 0. , ..., 0. , 0. , 0. ]],\n", - "\n", - " [[0.25810039, 0.63016868, 0.37037861, ..., 0.18704373, 0.08269356, 0.09912672],\n", - " [0.17292863, 0.50678611, 0.40738991, ..., 0.16006103, 0.11725381, 0.09940521],\n", - " [0.24175072, 0.41616210, 0.41256818, ..., 0.13519743, 0.07912572, 0.12846369],\n", - " ...,\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700],\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700],\n", - " [1.44883108, 1.02119160, 0.94472742, ..., 1.23630035, 1.21888959, 1.23804700]]]])\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = make_non_pad_mask(feat_len).unsqueeze(1)\n", - "\n", - "\n", - "#xs, pos_emb, masks = model.encoder.embed(xs, masks.type_as(xs), offset=0)\n", - "# print(xs)\n", - "\n", - "x = xs.unsqueeze(1)\n", - "x = model.encoder.embed.conv(x)\n", - "print(x)" - ] - }, - { - "cell_type": "code", - "execution_count": 206, - "id": "friendly-nightlife", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[16, 51, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [[[-0.03426375, 0.14291267, -0.06718873, ..., 0.09064753, 0.01809387, -0.04340880],\n", - " [-0.05007839, 0.11054724, -0.10399298, ..., 0.11457238, 0.04244684, -0.01249714],\n", - " [-0.10695291, 0.16910909, -0.08352133, ..., 0.07710276, 0.01168563, -0.03584499],\n", - " ...,\n", - " [-0.06060536, 0.14455931, -0.05470302, ..., 0.05364908, 0.03033342, -0.02610814],\n", - " [-0.08505894, 0.13611752, -0.11132983, ..., 0.13079923, 0.01580139, -0.02281028],\n", - " [-0.10604677, 0.14714901, -0.10885533, ..., 0.08543444, 0.03719445, -0.04634233]],\n", - "\n", - " [[-0.12392755, 0.14486063, -0.05674079, ..., 0.02573164, 0.03128851, 0.00545091],\n", - " [-0.04775286, 0.08473608, -0.08507854, ..., 0.04573154, 0.04240163, 0.01053247],\n", - " [-0.05940291, 0.10023535, -0.08143730, ..., 0.03596500, 0.01673085, 0.02089563],\n", - " ...,\n", - " [-0.09222981, 0.15823206, -0.07700447, ..., 0.08122957, 0.03136991, -0.00646474],\n", - " [-0.07331756, 0.14482647, -0.07838815, ..., 0.10869440, 0.01356864, -0.02777974],\n", - " [-0.07937264, 0.20143102, -0.05544947, ..., 0.10287814, 0.00608235, -0.04799180]],\n", - "\n", - " [[-0.03670349, 0.08931590, -0.08718812, ..., 0.01314050, 0.00642052, 0.00573716],\n", - " [ 0.01089254, 0.11146393, -0.10263617, ..., 0.05070438, 0.01960694, 0.03521532],\n", - " [-0.02182280, 0.11443964, -0.06678198, ..., 0.04327708, 0.00861394, 0.02871092],\n", - " ...,\n", - " [-0.06792898, 0.14376275, -0.07899005, ..., 0.11248926, 0.03208683, -0.03264240],\n", - " [-0.07884051, 0.17024788, -0.08583611, ..., 0.09028331, 0.03588808, -0.02075090],\n", - " [-0.13792302, 0.27163863, -0.23930418, ..., 0.13391261, 0.07521040, -0.08621951]],\n", - "\n", - " ...,\n", - "\n", - " [[-0.02446348, 0.11595841, -0.03591986, ..., 0.06288970, 0.02895011, -0.06532725],\n", - " [-0.05378424, 0.12607370, -0.09023033, ..., 0.09078894, 0.01035743, 0.03701983],\n", - " [-0.04566649, 0.14275314, -0.06686870, ..., 0.09890588, -0.00612222, 0.03439377],\n", - " ...,\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698],\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698],\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698]],\n", - "\n", - " [[-0.01012144, 0.03909408, -0.07077143, ..., 0.00452683, -0.01377654, 0.02897627],\n", - " [-0.00519154, 0.03594019, -0.06831125, ..., 0.05693541, -0.00406374, 0.04561640],\n", - " [-0.01762631, 0.00500899, -0.05886075, ..., 0.02112178, -0.00729015, 0.02782153],\n", - " ...,\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698],\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698],\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698]],\n", - "\n", - " [[-0.03411558, -0.04318277, -0.08497842, ..., -0.04886402, 0.04296734, 0.06151697],\n", - " [ 0.00263296, -0.06913657, -0.08993219, ..., -0.00149064, 0.05696633, 0.03304394],\n", - " [-0.01818341, -0.01178640, -0.09679577, ..., -0.00870231, 0.00362198, 0.01916483],\n", - " ...,\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698],\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698],\n", - " [-0.31763062, 0.53700209, -0.26335421, ..., 0.39182857, 0.00337184, -0.18293698]]])\n", - "Tensor(shape=[16, 51, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [[[-0.54821998, 2.28660274, -1.07501972, ..., 1.45036042, 0.28950194, -0.69454080],\n", - " [-0.80125421, 1.76875579, -1.66388774, ..., 1.83315802, 0.67914939, -0.19995420],\n", - " [-1.71124649, 2.70574546, -1.33634126, ..., 1.23364413, 0.18697014, -0.57351983],\n", - " ...,\n", - " [-0.96968573, 2.31294894, -0.87524825, ..., 0.85838526, 0.48533469, -0.41773027],\n", - " [-1.36094308, 2.17788029, -1.78127730, ..., 2.09278774, 0.25282228, -0.36496443],\n", - " [-1.69674826, 2.35438418, -1.74168527, ..., 1.36695099, 0.59511113, -0.74147725]],\n", - "\n", - " [[-1.98284078, 2.31777000, -0.90785271, ..., 0.41170627, 0.50061619, 0.08721463],\n", - " [-0.76404583, 1.35577726, -1.36125672, ..., 0.73170459, 0.67842603, 0.16851945],\n", - " [-0.95044655, 1.60376561, -1.30299675, ..., 0.57544005, 0.26769355, 0.33433008],\n", - " ...,\n", - " [-1.47567701, 2.53171301, -1.23207152, ..., 1.29967308, 0.50191855, -0.10343577],\n", - " [-1.17308092, 2.31722355, -1.25421047, ..., 1.73911047, 0.21709818, -0.44447583],\n", - " [-1.26996231, 3.22289634, -0.88719147, ..., 1.64605021, 0.09731755, -0.76786882]],\n", - "\n", - " [[-0.58725590, 1.42905438, -1.39500988, ..., 0.21024795, 0.10272825, 0.09179455],\n", - " [ 0.17428070, 1.78342295, -1.64217877, ..., 0.81127012, 0.31371105, 0.56344515],\n", - " [-0.34916472, 1.83103430, -1.06851172, ..., 0.69243336, 0.13782299, 0.45937473],\n", - " ...,\n", - " [-1.08686376, 2.30020404, -1.26384079, ..., 1.79982817, 0.51338923, -0.52227837],\n", - " [-1.26144814, 2.72396612, -1.37337780, ..., 1.44453299, 0.57420933, -0.33201432],\n", - " [-2.20676827, 4.34621811, -3.82886696, ..., 2.14260173, 1.20336640, -1.37951219]],\n", - "\n", - " ...,\n", - "\n", - " [[-0.39141566, 1.85533464, -0.57471782, ..., 1.00623512, 0.46320182, -1.04523599],\n", - " [-0.86054784, 2.01717925, -1.44368529, ..., 1.45262301, 0.16571884, 0.59231722],\n", - " [-0.73066384, 2.28405023, -1.06989920, ..., 1.58249414, -0.09795550, 0.55030036],\n", - " ...,\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170],\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170],\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170]],\n", - "\n", - " [[-0.16194311, 0.62550521, -1.13234293, ..., 0.07242929, -0.22042468, 0.46362036],\n", - " [-0.08306468, 0.57504302, -1.09298003, ..., 0.91096652, -0.06501988, 0.72986233],\n", - " [-0.28202093, 0.08014385, -0.94177192, ..., 0.33794850, -0.11664233, 0.44514441],\n", - " ...,\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170],\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170],\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170]],\n", - "\n", - " [[-0.54584920, -0.69092435, -1.35965478, ..., -0.78182435, 0.68747747, 0.98427159],\n", - " [ 0.04212743, -1.10618520, -1.43891501, ..., -0.02385022, 0.91146135, 0.52870303],\n", - " [-0.29093450, -0.18858244, -1.54873240, ..., -0.13923697, 0.05795169, 0.30663735],\n", - " ...,\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170],\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170],\n", - " [-5.08208990, 8.59203339, -4.21366739, ..., 6.26925707, 0.05394945, -2.92699170]]])\n", - "Tensor(shape=[1, 51, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[[ 0. , 1. , 0. , ..., 1. , 0. , 1. ],\n", - " [ 0.84147102, 0.54030228, 0.80196184, ..., 1. , 0.00010746, 1. ],\n", - " [ 0.90929747, -0.41614681, 0.95814437, ..., 1. , 0.00021492, 1. ],\n", - " ...,\n", - " [-0.76825470, -0.64014435, 0.63279730, ..., 0.99998462, 0.00515809, 0.99998671],\n", - " [-0.95375264, 0.30059254, 0.99899054, ..., 0.99998397, 0.00526555, 0.99998611],\n", - " [-0.26237485, 0.96496606, 0.56074661, ..., 0.99998331, 0.00537301, 0.99998558]]])\n" - ] - } - ], - "source": [ - "b, c, t, f = paddle.shape(x)\n", - "x = model.encoder.embed.out(x.transpose([0, 2, 1, 3]).reshape([b, t, c * f]))\n", - "print(x)\n", - "x, pos_emb = model.encoder.embed.pos_enc(x, 0)\n", - "print(x)\n", - "print(pos_emb)" - ] - }, - { - "cell_type": "code", - "execution_count": 207, - "id": "guilty-cache", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tensor(shape=[1, 51, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=True,\n", - " [[[ 0. , 1. , 0. , ..., 1. , 0. , 1. ],\n", - " [ 0.84147102, 0.54030228, 0.80196184, ..., 1. , 0.00010746, 1. ],\n", - " [ 0.90929747, -0.41614681, 0.95814437, ..., 1. , 0.00021492, 1. ],\n", - " ...,\n", - " [-0.76825470, -0.64014435, 0.63279730, ..., 0.99998462, 0.00515809, 0.99998671],\n", - " [-0.95375264, 0.30059254, 0.99899054, ..., 0.99998397, 0.00526555, 0.99998611],\n", - " [-0.26237485, 0.96496606, 0.56074661, ..., 0.99998331, 0.00537301, 0.99998558]]])\n" - ] - } - ], - "source": [ - "print(pos_emb)" - ] - }, - { - "cell_type": "code", - "execution_count": 208, - "id": "iraqi-payday", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[ 0.0000000e+00 1.0000000e+00 0.0000000e+00 ... 1.0000000e+00\n", - " 0.0000000e+00 1.0000000e+00]\n", - " [ 8.4147096e-01 5.4030234e-01 8.0196178e-01 ... 1.0000000e+00\n", - " 1.0746076e-04 1.0000000e+00]\n", - " [ 9.0929741e-01 -4.1614684e-01 9.5814437e-01 ... 1.0000000e+00\n", - " 2.1492151e-04 1.0000000e+00]\n", - " ...\n", - " [ 9.5625257e-01 -2.9254240e-01 4.8925215e-01 ... 8.3807874e-01\n", - " 5.1154459e-01 8.5925674e-01]\n", - " [ 2.7049953e-01 -9.6272010e-01 9.9170387e-01 ... 8.3801574e-01\n", - " 5.1163691e-01 8.5920173e-01]\n", - " [-6.6394955e-01 -7.4777740e-01 6.9544029e-01 ... 8.3795273e-01\n", - " 5.1172924e-01 8.5914677e-01]]]\n", - "[1, 5000, 256]\n" - ] - } - ], - "source": [ - "import torch\n", - "import math\n", - "import numpy as np\n", - "\n", - "max_len=5000\n", - "d_model=256\n", - "\n", - "pe = torch.zeros(max_len, d_model)\n", - "position = torch.arange(0, max_len,\n", - " dtype=torch.float32).unsqueeze(1)\n", - "toruch_position = position\n", - "div_term = torch.exp(\n", - " torch.arange(0, d_model, 2, dtype=torch.float32) *\n", - " -(math.log(10000.0) / d_model))\n", - "tourch_div_term = div_term.cpu().detach().numpy()\n", - "\n", - "torhc_sin = torch.sin(position * div_term)\n", - "torhc_cos = torch.cos(position * div_term)\n", - "\n", - "np_sin = np.sin((position * div_term).cpu().detach().numpy())\n", - "np_cos = np.cos((position * div_term).cpu().detach().numpy())\n", - "pe[:, 0::2] = torhc_sin\n", - "pe[:, 1::2] = torhc_cos\n", - "pe = pe.unsqueeze(0) \n", - "tourch_pe = pe.cpu().detach().numpy()\n", - "print(tourch_pe)\n", - "bak_pe = model.encoder.embed.pos_enc.pe\n", - "print(bak_pe.shape)\n", - "model.encoder.embed.pos_enc.pe = paddle.to_tensor(tourch_pe)" - ] - }, - { - "cell_type": "code", - "execution_count": 210, - "id": "exempt-cloud", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = make_non_pad_mask(feat_len).unsqueeze(1)\n", - "\n", - "xs, pos_emb, masks = model.encoder.embed(xs, masks.type_as(xs), offset=0)\n", - "#print(xs)\n", - "data = np.load(\".notebook/enc_embed.npz\")\n", - "torch_pos_emb=data['pos_emb']\n", - "torch_xs = data['embed_out']\n", - "print(np.allclose(xs.numpy(), torch_xs))\n", - "print(np.allclose(pos_emb.numpy(), torch_pos_emb))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "composite-involvement", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 269, - "id": "handed-harris", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n", - "True\n", - "True\n", - "True\n", - "True\n", - "True\n", - "False\n", - "True\n", - "[256, 2048]\n", - "[2048]\n", - "[2048, 256]\n", - "[256]\n", - "--------ff-------\n", - "True\n", - "False\n", - "False\n", - "False\n", - "False\n", - "True\n", - "linear_714.w_0 True\n", - "linear_714.b_0 True\n", - "linear_715.w_0 True\n", - "linear_715.b_0 True\n", - "False\n", - "True\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = make_non_pad_mask(feat_len).unsqueeze(1)\n", - "\n", - "xs, pos_emb, masks = model.encoder.embed(xs, masks.type_as(xs), offset=0)\n", - "masks = masks.astype(paddle.bool)\n", - "mask_pad = masks.logical_not()\n", - "decoding_chunk_size=0\n", - "num_decoding_left_chunks=-1\n", - "chunk_masks = add_optional_chunk_mask(\n", - " xs, masks, model.encoder.use_dynamic_chunk, model.encoder.use_dynamic_left_chunk,\n", - " decoding_chunk_size, model.encoder.static_chunk_size,\n", - " num_decoding_left_chunks)\n", - "\n", - "#print(chunk_masks)\n", - "data = np.load(\".notebook/enc_embed.npz\")\n", - "torch_pos_emb=data['pos_emb']\n", - "torch_xs = data['embed_out']\n", - "torch_chunk_masks = data['chunk_masks']\n", - "torch_mask_pad = data['mask_pad']\n", - "print(np.allclose(xs.numpy(), torch_xs))\n", - "print(np.allclose(pos_emb.numpy(), torch_pos_emb))\n", - "np.testing.assert_equal(chunk_masks.numpy(), torch_chunk_masks)\n", - "np.testing.assert_equal(mask_pad.numpy(), ~torch_mask_pad)\n", - "\n", - "for layer in model.encoder.encoders:\n", - " #xs, chunk_masks, _ = layer(xs, chunk_masks, pos_emb, mask_pad)\n", - " print(layer.feed_forward_macaron is not None)\n", - " print(layer.normalize_before)\n", - " \n", - " data = np.load('.notebook/enc_0_norm_ff.npz')\n", - " t_norm_ff = data['norm_ff']\n", - " t_xs = data['xs']\n", - " \n", - " \n", - " x = xs\n", - " print(np.allclose(t_xs, x.numpy()))\n", - " residual = x\n", - " print(np.allclose(t_xs, residual.numpy()))\n", - " x_nrom = layer.norm_ff_macaron(x)\n", - " print(np.allclose(t.numpy(), x_nrom.numpy()))\n", - " print(np.allclose(t_norm_ff, x_nrom.numpy()))\n", - "# for n, p in layer.norm_ff_macaron.state_dict().items():\n", - "# print(n, p)\n", - "# pass\n", - "\n", - " layer.eval()\n", - " x_nrom = paddle.to_tensor(t_norm_ff)\n", - " print(np.allclose(t_norm_ff, x_nrom.numpy()))\n", - " x = residual + layer.ff_scale * layer.feed_forward_macaron(x_nrom)\n", - " \n", - " ps=[]\n", - " for n, p in layer.feed_forward_macaron.state_dict().items():\n", - " #print(n, p)\n", - " ps.append(p)\n", - " print(p.shape)\n", - " pass\n", - "\n", - " x_nrom = paddle.to_tensor(t_norm_ff)\n", - " ff_l_x = layer.feed_forward_macaron.w_1(x_nrom)\n", - " ff_l_a_x = layer.feed_forward_macaron.activation(ff_l_x)\n", - " ff_l_a_l_x = layer.feed_forward_macaron.w_2(ff_l_a_x)\n", - " data = np.load('.notebook/enc_0_ff_out.npz', allow_pickle=True)\n", - " t_norm_ff = data['norm_ff']\n", - " t_ff_out = data['ff_out']\n", - " t_ff_l_x = data['ff_l_x']\n", - " t_ff_l_a_x = data['ff_l_a_x']\n", - " t_ff_l_a_l_x = data['ff_l_a_l_x']\n", - " t_ps = data['ps']\n", - " \n", - " print(\"--------ff-------\")\n", - " print(np.allclose(x_nrom.numpy(), t_norm_ff))\n", - " print(np.allclose(x.numpy(), t_ff_out))\n", - " print(np.allclose(ff_l_x.numpy(), t_ff_l_x))\n", - " print(np.allclose(ff_l_a_x.numpy(), t_ff_l_a_x))\n", - " print(np.allclose(ff_l_a_l_x.numpy(), t_ff_l_a_l_x))\n", - " \n", - " print(np.allclose(ff_l_x.numpy(), t_ff_l_x, atol=1e-6))\n", - " for p, t_p in zip(ps, t_ps):\n", - " print(p.name, np.allclose(p.numpy(), t_p.T))\n", - " \n", - " \n", - "# residual = x\n", - "# x = layer.norm_mha(x)\n", - "# x_q = x\n", - " \n", - " data = np.load('.notebook/enc_0_selattn_out.npz', allow_pickle=True)\n", - " tx_q = data['x_q']\n", - " tx = data['x']\n", - " tpos_emb=data['pos_emb']\n", - " tmask=data['mask']\n", - " tt_x_att=data['x_att']\n", - " x_q = paddle.to_tensor(tx_q)\n", - " x = paddle.to_tensor(tx)\n", - " pos_emb = paddle.to_tensor(tpos_emb)\n", - " mask = paddle.to_tensor(tmask)\n", - " \n", - " x_att = layer.self_attn(x_q, x, x, pos_emb, mask)\n", - " print(np.allclose(x_att.numpy(), t_x_att))\n", - " print(np.allclose(x_att.numpy(), t_x_att, atol=1e-6))\n", - " \n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": 270, - "id": "sonic-thumb", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n", - "False\n", - "True\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = make_non_pad_mask(feat_len).unsqueeze(1)\n", - "\n", - "xs, pos_emb, masks = model.encoder.embed(xs, masks.type_as(xs), offset=0)\n", - "masks = masks.astype(paddle.bool)\n", - "mask_pad = masks.logical_not()\n", - "decoding_chunk_size=0\n", - "num_decoding_left_chunks=-1\n", - "chunk_masks = add_optional_chunk_mask(\n", - " xs, masks, model.encoder.use_dynamic_chunk, model.encoder.use_dynamic_left_chunk,\n", - " decoding_chunk_size, model.encoder.static_chunk_size,\n", - " num_decoding_left_chunks)\n", - "\n", - "#print(chunk_masks)\n", - "data = np.load(\".notebook/enc_embed.npz\")\n", - "torch_pos_emb=data['pos_emb']\n", - "torch_xs = data['embed_out']\n", - "torch_chunk_masks = data['chunk_masks']\n", - "torch_mask_pad = data['mask_pad']\n", - "print(np.allclose(xs.numpy(), torch_xs))\n", - "print(np.allclose(pos_emb.numpy(), torch_pos_emb))\n", - "np.testing.assert_equal(chunk_masks.numpy(), torch_chunk_masks)\n", - "np.testing.assert_equal(mask_pad.numpy(), ~torch_mask_pad)\n", - "\n", - "\n", - "for layer in model.encoder.encoders:\n", - " xs, chunk_masks, _ = layer(xs, chunk_masks, pos_emb, mask_pad)\n", - " break\n", - "data = np.load('.notebook/enc_0.npz')\n", - "torch_xs = data['enc_0']\n", - "print(np.allclose(xs.numpy(), torch_xs))\n", - "print(np.allclose(xs.numpy(), torch_xs, atol=1e-6))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 273, - "id": "brave-latino", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "True\n", - "--------layers_______\n", - "False\n", - "True\n", - "[[-0.70194244 0.56254214 0.6880346 ... 1.1237319 0.7803924\n", - " 1.1369387 ]\n", - " [-0.7787783 0.3912667 0.71887773 ... 1.251882 0.886168\n", - " 1.3173451 ]\n", - " [-0.95908964 0.6346029 0.87671334 ... 0.98183745 0.7440111\n", - " 1.2903278 ]\n", - " ...\n", - " [-1.0732255 0.67236906 0.92303115 ... 0.9075458 0.8176712\n", - " 1.3239655 ]\n", - " [-1.1654118 0.6819967 0.6939453 ... 1.2238353 0.8028295\n", - " 1.4506507 ]\n", - " [-1.2732092 0.7145806 0.75819594 ... 0.94154835 0.8774845\n", - " 1.2623049 ]]\n", - "xxxxxx\n", - "[[-0.7019424 0.56254166 0.6880345 ... 1.1237322 0.78039217\n", - " 1.1369387 ]\n", - " [-0.778778 0.39126638 0.7188779 ... 1.2518823 0.8861681\n", - " 1.3173454 ]\n", - " [-0.9590891 0.6346026 0.87671363 ... 0.9818373 0.74401116\n", - " 1.2903274 ]\n", - " ...\n", - " [-1.0732253 0.6723689 0.9230311 ... 0.9075457 0.8176713\n", - " 1.3239657 ]\n", - " [-1.165412 0.6819976 0.69394535 ... 1.2238353 0.80282927\n", - " 1.4506509 ]\n", - " [-1.273209 0.71458095 0.75819623 ... 0.9415484 0.8774842\n", - " 1.2623055 ]]\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = make_non_pad_mask(feat_len).unsqueeze(1)\n", - "\n", - "xs, pos_emb, masks = model.encoder.embed(xs, masks.type_as(xs), offset=0)\n", - "masks = masks.astype(paddle.bool)\n", - "mask_pad = masks.logical_not()\n", - "decoding_chunk_size=0\n", - "num_decoding_left_chunks=-1\n", - "chunk_masks = add_optional_chunk_mask(\n", - " xs, masks, model.encoder.use_dynamic_chunk, model.encoder.use_dynamic_left_chunk,\n", - " decoding_chunk_size, model.encoder.static_chunk_size,\n", - " num_decoding_left_chunks)\n", - "\n", - "#print(chunk_masks)\n", - "data = np.load(\".notebook/enc_embed.npz\")\n", - "torch_pos_emb=data['pos_emb']\n", - "torch_xs = data['embed_out']\n", - "torch_chunk_masks = data['chunk_masks']\n", - "torch_mask_pad = data['mask_pad']\n", - "print(np.allclose(xs.numpy(), torch_xs))\n", - "print(np.allclose(pos_emb.numpy(), torch_pos_emb))\n", - "np.testing.assert_equal(chunk_masks.numpy(), torch_chunk_masks)\n", - "np.testing.assert_equal(mask_pad.numpy(), ~torch_mask_pad)\n", - "\n", - "print(\"--------layers_______\")\n", - "i =0\n", - "for layer in model.encoder.encoders:\n", - " xs, chunk_masks, _ = layer(xs, chunk_masks, pos_emb, mask_pad)\n", - " i+=1\n", - "# if i == 2:\n", - "# data = np.load('.notebook/enc_2.npz')\n", - "# torch_xs = data['enc_2']\n", - "# print(np.allclose(xs.numpy(), torch_xs))\n", - "# print(np.allclose(xs.numpy(), torch_xs, atol=1e-5))\n", - "# print(xs[0].numpy())\n", - "# print('xxxxxx')\n", - "# print(torch_xs[0])\n", - "# print('----i==2')\n", - "data = np.load('.notebook/enc_all.npz')\n", - "torch_xs = data['enc_all']\n", - "print(np.allclose(xs.numpy(), torch_xs))\n", - "print(np.allclose(xs.numpy(), torch_xs, atol=1e-5))\n", - "print(xs[0].numpy())\n", - "print('xxxxxx')\n", - "print(torch_xs[0])" - ] - }, - { - "cell_type": "code", - "execution_count": 64, - "id": "municipal-stock", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 278, - "id": "macro-season", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[-0.7019424 0.5625421 0.68803453 ... 1.1237317 0.7803923\n", - " 1.1369386 ]\n", - " [-0.7787783 0.39126673 0.71887773 ... 1.251882 0.886168\n", - " 1.3173451 ]\n", - " [-0.95908964 0.6346029 0.87671334 ... 0.98183745 0.7440111\n", - " 1.2903278 ]\n", - " ...\n", - " [-1.0732255 0.67236906 0.92303115 ... 0.9075458 0.8176712\n", - " 1.3239655 ]\n", - " [-1.1654117 0.68199664 0.6939452 ... 1.2238352 0.8028294\n", - " 1.4506506 ]\n", - " [-1.2732091 0.71458054 0.7581958 ... 0.9415482 0.8774844\n", - " 1.2623048 ]]\n", - "---\n", - "[[-0.7019424 0.56254166 0.6880345 ... 1.1237322 0.78039217\n", - " 1.1369387 ]\n", - " [-0.778778 0.39126638 0.7188779 ... 1.2518823 0.8861681\n", - " 1.3173454 ]\n", - " [-0.9590891 0.6346026 0.87671363 ... 0.9818373 0.74401116\n", - " 1.2903274 ]\n", - " ...\n", - " [-1.0732253 0.6723689 0.9230311 ... 0.9075457 0.8176713\n", - " 1.3239657 ]\n", - " [-1.165412 0.6819976 0.69394535 ... 1.2238353 0.80282927\n", - " 1.4506509 ]\n", - " [-1.2732087 0.71458083 0.7581961 ... 0.9415482 0.877484\n", - " 1.2623053 ]]\n", - "False\n", - "True\n", - "False\n" - ] - } - ], - "source": [ - "encoder_out, mask = model.encoder(feat, feat_len)\n", - "print(encoder_out.numpy()[0])\n", - "print(\"---\")\n", - "print(torch_encoder_out[0])\n", - "print(np.allclose(torch_encoder_out, encoder_out.numpy()))\n", - "print(np.allclose(torch_encoder_out, encoder_out.numpy(), atol=1e-5))\n", - "print(np.allclose(torch_encoder_out, encoder_out.numpy(), atol=1e-6))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "associate-sampling", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/u2_tansformer_model_espnet.ipynb b/.notebook/u2_tansformer_model_espnet.ipynb deleted file mode 100644 index 75c2ea5c6..000000000 --- a/.notebook/u2_tansformer_model_espnet.ipynb +++ /dev/null @@ -1,1672 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "choice-grade", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x\n" - ] - }, - { - "data": { - "text/plain": [ - "'/workspace/DeepSpeech-2.x'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd ..\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "broke-broad", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " def convert_to_list(value, n, name, dtype=np.int):\n", - "register user softmax to paddle, remove this when fixed!\n", - "register user log_softmax to paddle, remove this when fixed!\n", - "register user sigmoid to paddle, remove this when fixed!\n", - "register user log_sigmoid to paddle, remove this when fixed!\n", - "register user relu to paddle, remove this when fixed!\n", - "override cat of paddle if exists or register, remove this when fixed!\n", - "override item of paddle.Tensor if exists or register, remove this when fixed!\n", - "override long of paddle.Tensor if exists or register, remove this when fixed!\n", - "override new_full of paddle.Tensor if exists or register, remove this when fixed!\n", - "override eq of paddle.Tensor if exists or register, remove this when fixed!\n", - "override eq of paddle if exists or register, remove this when fixed!\n", - "override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", - "override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", - "register user view to paddle.Tensor, remove this when fixed!\n", - "register user view_as to paddle.Tensor, remove this when fixed!\n", - "register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "register user fill_ to paddle.Tensor, remove this when fixed!\n", - "register user repeat to paddle.Tensor, remove this when fixed!\n", - "register user softmax to paddle.Tensor, remove this when fixed!\n", - "register user sigmoid to paddle.Tensor, remove this when fixed!\n", - "register user relu to paddle.Tensor, remove this when fixed!\n", - "register user type_as to paddle.Tensor, remove this when fixed!\n", - "register user to to paddle.Tensor, remove this when fixed!\n", - "register user float to paddle.Tensor, remove this when fixed!\n", - "register user tolist to paddle.Tensor, remove this when fixed!\n", - "register user glu to paddle.nn.functional, remove this when fixed!\n", - "override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", - "register user Module to paddle.nn, remove this when fixed!\n", - "register user ModuleList to paddle.nn, remove this when fixed!\n", - "register user GLU to paddle.nn, remove this when fixed!\n", - "register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "register user export to paddle.jit, remove this when fixed!\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import paddle\n", - "from yacs.config import CfgNode as CN\n", - "\n", - "from deepspeech.models.u2 import U2Model\n", - "from deepspeech.utils.layer_tools import print_params\n", - "from deepspeech.utils.layer_tools import summary" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "permanent-summary", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n", - "[INFO 2021/05/31 03:23:22 u2.py:839] U2 Encoder type: transformer\n", - "[INFO 2021/05/31 03:23:22 u2.py:840] attention_dropout_rate: 0.0\n", - "attention_heads: 4\n", - "dropout_rate: 0.1\n", - "input_layer: conv2d\n", - "linear_units: 2048\n", - "normalize_before: True\n", - "num_blocks: 12\n", - "output_size: 256\n", - "positional_dropout_rate: 0.1\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "encoder.embed.conv.0.weight | [256, 1, 3, 3] | 2304 | True\n", - "encoder.embed.conv.0.bias | [256] | 256 | True\n", - "encoder.embed.conv.2.weight | [256, 256, 3, 3] | 589824 | True\n", - "encoder.embed.conv.2.bias | [256] | 256 | True\n", - "encoder.embed.out.0.weight | [5120, 256] | 1310720 | True\n", - "encoder.embed.out.0.bias | [256] | 256 | True\n", - "encoder.after_norm.weight | [256] | 256 | True\n", - "encoder.after_norm.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.0.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.0.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.0.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.0.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.0.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.0.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.0.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.0.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.0.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.1.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.1.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.1.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.1.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.1.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.1.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.1.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.1.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.1.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.2.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.2.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.2.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.2.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.2.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.2.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.2.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.2.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.2.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.3.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.3.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.3.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.3.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.3.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.3.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.3.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.3.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.3.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.4.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.4.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.4.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.4.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.4.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.4.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.4.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.4.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.4.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.5.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.5.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.5.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.5.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.5.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.5.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.5.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.5.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.5.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.6.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.6.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.6.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.6.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.6.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.6.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.6.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.6.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.6.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.7.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.7.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.7.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.7.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.7.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.7.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.7.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.7.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.7.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.8.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.8.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.8.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.8.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.8.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.8.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.8.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.8.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.8.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.9.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.9.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.9.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.9.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.9.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.9.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.9.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.9.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.9.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.10.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.10.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.10.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.10.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.10.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.10.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.10.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.10.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.10.concat_linear.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_q.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_k.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_v.bias | [256] | 256 | True\n", - "encoder.encoders.11.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_out.bias | [256] | 256 | True\n", - "encoder.encoders.11.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "encoder.encoders.11.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "encoder.encoders.11.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "encoder.encoders.11.feed_forward.w_2.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm1.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm1.bias | [256] | 256 | True\n", - "encoder.encoders.11.norm2.weight | [256] | 256 | True\n", - "encoder.encoders.11.norm2.bias | [256] | 256 | True\n", - "encoder.encoders.11.concat_linear.weight | [512, 256] | 131072 | True\n", - "encoder.encoders.11.concat_linear.bias | [256] | 256 | True\n", - "decoder.embed.0.weight | [4233, 256] | 1083648 | True\n", - "decoder.after_norm.weight | [256] | 256 | True\n", - "decoder.after_norm.bias | [256] | 256 | True\n", - "decoder.output_layer.weight | [256, 4233] | 1083648 | True\n", - "decoder.output_layer.bias | [4233] | 4233 | True\n", - "decoder.decoders.0.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.0.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.0.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.0.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.0.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.0.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.0.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.0.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.0.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.0.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.0.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.0.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.0.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.0.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.0.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.0.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.0.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.0.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.0.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.1.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.1.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.1.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.1.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.1.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.1.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.1.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.1.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.1.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.1.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.1.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.1.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.1.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.1.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.1.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.1.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.2.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.2.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.2.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.2.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.2.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.2.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.2.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.2.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.2.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.2.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.2.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.2.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.2.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.2.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.2.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.2.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.3.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.3.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.3.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.3.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.3.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.3.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.3.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.3.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.3.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.3.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.3.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.3.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.3.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.3.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.3.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.3.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.4.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.4.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.4.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.4.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.4.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.4.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.4.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.4.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.4.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.4.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.4.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.4.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.4.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.4.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.4.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.4.concat_linear2.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.5.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_q.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_k.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_v.bias | [256] | 256 | True\n", - "decoder.decoders.5.src_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_out.bias | [256] | 256 | True\n", - "decoder.decoders.5.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "decoder.decoders.5.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "decoder.decoders.5.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "decoder.decoders.5.feed_forward.w_2.bias | [256] | 256 | True\n", - "decoder.decoders.5.norm1.weight | [256] | 256 | True\n", - "decoder.decoders.5.norm1.bias | [256] | 256 | True\n", - "decoder.decoders.5.norm2.weight | [256] | 256 | True\n", - "decoder.decoders.5.norm2.bias | [256] | 256 | True\n", - "decoder.decoders.5.norm3.weight | [256] | 256 | True\n", - "decoder.decoders.5.norm3.bias | [256] | 256 | True\n", - "decoder.decoders.5.concat_linear1.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.5.concat_linear1.bias | [256] | 256 | True\n", - "decoder.decoders.5.concat_linear2.weight | [512, 256] | 131072 | True\n", - "decoder.decoders.5.concat_linear2.bias | [256] | 256 | True\n", - "ctc.ctc_lo.weight | [256, 4233] | 1083648 | True\n", - "ctc.ctc_lo.bias | [4233] | 4233 | True\n", - "Total parameters: 411.0, 32.01M elements.\n" - ] - } - ], - "source": [ - "conf_str='examples/tiny/s1/conf/transformer.yaml'\n", - "cfg = CN().load_cfg(open(conf_str))\n", - "cfg.model.input_dim = 83\n", - "cfg.model.output_dim = 4233\n", - "cfg.model.cmvn_file = None\n", - "cfg.model.cmvn_file_type = 'json'\n", - "#cfg.model.encoder_conf.concat_after=True\n", - "cfg.freeze()\n", - "model = U2Model(cfg.model)\n", - "\n", - "print_params(model)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "sapphire-agent", - "metadata": {}, - "outputs": [], - "source": [ - "#summary(model)\n", - "#print(model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ruled-invitation", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "fossil-means", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "embed.npz feat.npz l1.npz l11.npz l3.npz l5.npz l7.npz l9.npz\r\n", - "encoder.npz l0.npz l10.npz l2.npz l4.npz l6.npz l8.npz model.npz\r\n" - ] - } - ], - "source": [ - "%ls .notebook/espnet" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "45c2b75f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "state\n", - "odict_keys(['mask_feature', 'encoder.embed.conv.0.weight', 'encoder.embed.conv.0.bias', 'encoder.embed.conv.2.weight', 'encoder.embed.conv.2.bias', 'encoder.embed.out.0.weight', 'encoder.embed.out.0.bias', 'encoder.encoders.0.self_attn.linear_q.weight', 'encoder.encoders.0.self_attn.linear_q.bias', 'encoder.encoders.0.self_attn.linear_k.weight', 'encoder.encoders.0.self_attn.linear_k.bias', 'encoder.encoders.0.self_attn.linear_v.weight', 'encoder.encoders.0.self_attn.linear_v.bias', 'encoder.encoders.0.self_attn.linear_out.weight', 'encoder.encoders.0.self_attn.linear_out.bias', 'encoder.encoders.0.feed_forward.w_1.weight', 'encoder.encoders.0.feed_forward.w_1.bias', 'encoder.encoders.0.feed_forward.w_2.weight', 'encoder.encoders.0.feed_forward.w_2.bias', 'encoder.encoders.0.norm1.weight', 'encoder.encoders.0.norm1.bias', 'encoder.encoders.0.norm2.weight', 'encoder.encoders.0.norm2.bias', 'encoder.encoders.1.self_attn.linear_q.weight', 'encoder.encoders.1.self_attn.linear_q.bias', 'encoder.encoders.1.self_attn.linear_k.weight', 'encoder.encoders.1.self_attn.linear_k.bias', 'encoder.encoders.1.self_attn.linear_v.weight', 'encoder.encoders.1.self_attn.linear_v.bias', 'encoder.encoders.1.self_attn.linear_out.weight', 'encoder.encoders.1.self_attn.linear_out.bias', 'encoder.encoders.1.feed_forward.w_1.weight', 'encoder.encoders.1.feed_forward.w_1.bias', 'encoder.encoders.1.feed_forward.w_2.weight', 'encoder.encoders.1.feed_forward.w_2.bias', 'encoder.encoders.1.norm1.weight', 'encoder.encoders.1.norm1.bias', 'encoder.encoders.1.norm2.weight', 'encoder.encoders.1.norm2.bias', 'encoder.encoders.2.self_attn.linear_q.weight', 'encoder.encoders.2.self_attn.linear_q.bias', 'encoder.encoders.2.self_attn.linear_k.weight', 'encoder.encoders.2.self_attn.linear_k.bias', 'encoder.encoders.2.self_attn.linear_v.weight', 'encoder.encoders.2.self_attn.linear_v.bias', 'encoder.encoders.2.self_attn.linear_out.weight', 'encoder.encoders.2.self_attn.linear_out.bias', 'encoder.encoders.2.feed_forward.w_1.weight', 'encoder.encoders.2.feed_forward.w_1.bias', 'encoder.encoders.2.feed_forward.w_2.weight', 'encoder.encoders.2.feed_forward.w_2.bias', 'encoder.encoders.2.norm1.weight', 'encoder.encoders.2.norm1.bias', 'encoder.encoders.2.norm2.weight', 'encoder.encoders.2.norm2.bias', 'encoder.encoders.3.self_attn.linear_q.weight', 'encoder.encoders.3.self_attn.linear_q.bias', 'encoder.encoders.3.self_attn.linear_k.weight', 'encoder.encoders.3.self_attn.linear_k.bias', 'encoder.encoders.3.self_attn.linear_v.weight', 'encoder.encoders.3.self_attn.linear_v.bias', 'encoder.encoders.3.self_attn.linear_out.weight', 'encoder.encoders.3.self_attn.linear_out.bias', 'encoder.encoders.3.feed_forward.w_1.weight', 'encoder.encoders.3.feed_forward.w_1.bias', 'encoder.encoders.3.feed_forward.w_2.weight', 'encoder.encoders.3.feed_forward.w_2.bias', 'encoder.encoders.3.norm1.weight', 'encoder.encoders.3.norm1.bias', 'encoder.encoders.3.norm2.weight', 'encoder.encoders.3.norm2.bias', 'encoder.encoders.4.self_attn.linear_q.weight', 'encoder.encoders.4.self_attn.linear_q.bias', 'encoder.encoders.4.self_attn.linear_k.weight', 'encoder.encoders.4.self_attn.linear_k.bias', 'encoder.encoders.4.self_attn.linear_v.weight', 'encoder.encoders.4.self_attn.linear_v.bias', 'encoder.encoders.4.self_attn.linear_out.weight', 'encoder.encoders.4.self_attn.linear_out.bias', 'encoder.encoders.4.feed_forward.w_1.weight', 'encoder.encoders.4.feed_forward.w_1.bias', 'encoder.encoders.4.feed_forward.w_2.weight', 'encoder.encoders.4.feed_forward.w_2.bias', 'encoder.encoders.4.norm1.weight', 'encoder.encoders.4.norm1.bias', 'encoder.encoders.4.norm2.weight', 'encoder.encoders.4.norm2.bias', 'encoder.encoders.5.self_attn.linear_q.weight', 'encoder.encoders.5.self_attn.linear_q.bias', 'encoder.encoders.5.self_attn.linear_k.weight', 'encoder.encoders.5.self_attn.linear_k.bias', 'encoder.encoders.5.self_attn.linear_v.weight', 'encoder.encoders.5.self_attn.linear_v.bias', 'encoder.encoders.5.self_attn.linear_out.weight', 'encoder.encoders.5.self_attn.linear_out.bias', 'encoder.encoders.5.feed_forward.w_1.weight', 'encoder.encoders.5.feed_forward.w_1.bias', 'encoder.encoders.5.feed_forward.w_2.weight', 'encoder.encoders.5.feed_forward.w_2.bias', 'encoder.encoders.5.norm1.weight', 'encoder.encoders.5.norm1.bias', 'encoder.encoders.5.norm2.weight', 'encoder.encoders.5.norm2.bias', 'encoder.encoders.6.self_attn.linear_q.weight', 'encoder.encoders.6.self_attn.linear_q.bias', 'encoder.encoders.6.self_attn.linear_k.weight', 'encoder.encoders.6.self_attn.linear_k.bias', 'encoder.encoders.6.self_attn.linear_v.weight', 'encoder.encoders.6.self_attn.linear_v.bias', 'encoder.encoders.6.self_attn.linear_out.weight', 'encoder.encoders.6.self_attn.linear_out.bias', 'encoder.encoders.6.feed_forward.w_1.weight', 'encoder.encoders.6.feed_forward.w_1.bias', 'encoder.encoders.6.feed_forward.w_2.weight', 'encoder.encoders.6.feed_forward.w_2.bias', 'encoder.encoders.6.norm1.weight', 'encoder.encoders.6.norm1.bias', 'encoder.encoders.6.norm2.weight', 'encoder.encoders.6.norm2.bias', 'encoder.encoders.7.self_attn.linear_q.weight', 'encoder.encoders.7.self_attn.linear_q.bias', 'encoder.encoders.7.self_attn.linear_k.weight', 'encoder.encoders.7.self_attn.linear_k.bias', 'encoder.encoders.7.self_attn.linear_v.weight', 'encoder.encoders.7.self_attn.linear_v.bias', 'encoder.encoders.7.self_attn.linear_out.weight', 'encoder.encoders.7.self_attn.linear_out.bias', 'encoder.encoders.7.feed_forward.w_1.weight', 'encoder.encoders.7.feed_forward.w_1.bias', 'encoder.encoders.7.feed_forward.w_2.weight', 'encoder.encoders.7.feed_forward.w_2.bias', 'encoder.encoders.7.norm1.weight', 'encoder.encoders.7.norm1.bias', 'encoder.encoders.7.norm2.weight', 'encoder.encoders.7.norm2.bias', 'encoder.encoders.8.self_attn.linear_q.weight', 'encoder.encoders.8.self_attn.linear_q.bias', 'encoder.encoders.8.self_attn.linear_k.weight', 'encoder.encoders.8.self_attn.linear_k.bias', 'encoder.encoders.8.self_attn.linear_v.weight', 'encoder.encoders.8.self_attn.linear_v.bias', 'encoder.encoders.8.self_attn.linear_out.weight', 'encoder.encoders.8.self_attn.linear_out.bias', 'encoder.encoders.8.feed_forward.w_1.weight', 'encoder.encoders.8.feed_forward.w_1.bias', 'encoder.encoders.8.feed_forward.w_2.weight', 'encoder.encoders.8.feed_forward.w_2.bias', 'encoder.encoders.8.norm1.weight', 'encoder.encoders.8.norm1.bias', 'encoder.encoders.8.norm2.weight', 'encoder.encoders.8.norm2.bias', 'encoder.encoders.9.self_attn.linear_q.weight', 'encoder.encoders.9.self_attn.linear_q.bias', 'encoder.encoders.9.self_attn.linear_k.weight', 'encoder.encoders.9.self_attn.linear_k.bias', 'encoder.encoders.9.self_attn.linear_v.weight', 'encoder.encoders.9.self_attn.linear_v.bias', 'encoder.encoders.9.self_attn.linear_out.weight', 'encoder.encoders.9.self_attn.linear_out.bias', 'encoder.encoders.9.feed_forward.w_1.weight', 'encoder.encoders.9.feed_forward.w_1.bias', 'encoder.encoders.9.feed_forward.w_2.weight', 'encoder.encoders.9.feed_forward.w_2.bias', 'encoder.encoders.9.norm1.weight', 'encoder.encoders.9.norm1.bias', 'encoder.encoders.9.norm2.weight', 'encoder.encoders.9.norm2.bias', 'encoder.encoders.10.self_attn.linear_q.weight', 'encoder.encoders.10.self_attn.linear_q.bias', 'encoder.encoders.10.self_attn.linear_k.weight', 'encoder.encoders.10.self_attn.linear_k.bias', 'encoder.encoders.10.self_attn.linear_v.weight', 'encoder.encoders.10.self_attn.linear_v.bias', 'encoder.encoders.10.self_attn.linear_out.weight', 'encoder.encoders.10.self_attn.linear_out.bias', 'encoder.encoders.10.feed_forward.w_1.weight', 'encoder.encoders.10.feed_forward.w_1.bias', 'encoder.encoders.10.feed_forward.w_2.weight', 'encoder.encoders.10.feed_forward.w_2.bias', 'encoder.encoders.10.norm1.weight', 'encoder.encoders.10.norm1.bias', 'encoder.encoders.10.norm2.weight', 'encoder.encoders.10.norm2.bias', 'encoder.encoders.11.self_attn.linear_q.weight', 'encoder.encoders.11.self_attn.linear_q.bias', 'encoder.encoders.11.self_attn.linear_k.weight', 'encoder.encoders.11.self_attn.linear_k.bias', 'encoder.encoders.11.self_attn.linear_v.weight', 'encoder.encoders.11.self_attn.linear_v.bias', 'encoder.encoders.11.self_attn.linear_out.weight', 'encoder.encoders.11.self_attn.linear_out.bias', 'encoder.encoders.11.feed_forward.w_1.weight', 'encoder.encoders.11.feed_forward.w_1.bias', 'encoder.encoders.11.feed_forward.w_2.weight', 'encoder.encoders.11.feed_forward.w_2.bias', 'encoder.encoders.11.norm1.weight', 'encoder.encoders.11.norm1.bias', 'encoder.encoders.11.norm2.weight', 'encoder.encoders.11.norm2.bias', 'encoder.after_norm.weight', 'encoder.after_norm.bias', 'decoder.embed.0.weight', 'decoder.decoders.0.self_attn.linear_q.weight', 'decoder.decoders.0.self_attn.linear_q.bias', 'decoder.decoders.0.self_attn.linear_k.weight', 'decoder.decoders.0.self_attn.linear_k.bias', 'decoder.decoders.0.self_attn.linear_v.weight', 'decoder.decoders.0.self_attn.linear_v.bias', 'decoder.decoders.0.self_attn.linear_out.weight', 'decoder.decoders.0.self_attn.linear_out.bias', 'decoder.decoders.0.src_attn.linear_q.weight', 'decoder.decoders.0.src_attn.linear_q.bias', 'decoder.decoders.0.src_attn.linear_k.weight', 'decoder.decoders.0.src_attn.linear_k.bias', 'decoder.decoders.0.src_attn.linear_v.weight', 'decoder.decoders.0.src_attn.linear_v.bias', 'decoder.decoders.0.src_attn.linear_out.weight', 'decoder.decoders.0.src_attn.linear_out.bias', 'decoder.decoders.0.feed_forward.w_1.weight', 'decoder.decoders.0.feed_forward.w_1.bias', 'decoder.decoders.0.feed_forward.w_2.weight', 'decoder.decoders.0.feed_forward.w_2.bias', 'decoder.decoders.0.norm1.weight', 'decoder.decoders.0.norm1.bias', 'decoder.decoders.0.norm2.weight', 'decoder.decoders.0.norm2.bias', 'decoder.decoders.0.norm3.weight', 'decoder.decoders.0.norm3.bias', 'decoder.after_norm.weight', 'decoder.after_norm.bias', 'decoder.output_layer.weight', 'decoder.output_layer.bias', 'sfc.weight', 'sfc.bias', 'deconv.0.weight', 'deconv.0.bias', 'deconv.1.weight', 'deconv.1.bias', 'xlm_embed.0.weight', 'xlm_pred.weight', 'xlm_pred.bias'])\n" - ] - } - ], - "source": [ - "#!pip install torch\n", - "import torch\n", - "\n", - "e_model = np.load('.notebook/espnet/model.npz',allow_pickle=True)\n", - "for k in e_model.files:\n", - " print(k)\n", - "state_dict = e_model['state']\n", - "state_dict = state_dict.tolist()\n", - "print(state_dict.keys())" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "f187bb55", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], - "source": [ - "# embed.conv.0.weight None torch.Size([256, 1, 3, 3]) \tencoder.embed.conv.0.weight | [256, 1, 3, 3] | 2304 | True\n", - "# embed.conv.0.bias None torch.Size([256]) \tencoder.embed.conv.0.bias | [256] | 256 | True\n", - "# embed.conv.2.weight None torch.Size([256, 256, 3, 3]) \tencoder.embed.conv.2.weight | [256, 256, 3, 3] | 589824 | True\n", - "# embed.conv.2.bias None torch.Size([256]) \tencoder.embed.conv.2.bias | [256] | 256 | True\n", - "# embed.out.0.weight None torch.Size([256, 5120]) 83 feature\tencoder.embed.out.0.weight | [4864, 256] | 1245184 | True 80 feature\n", - "# embed.out.0.bias None torch.Size([256]) \tencoder.embed.out.0.bias | [256] | 256 | True\n", - "# after_norm.weight None torch.Size([256]) \tencoder.after_norm.weight | [256] | 256 | True\n", - "# after_norm.bias None torch.Size([256]) \tencoder.after_norm.bias | [256] | 256 | True\n", - "# encoders.9.self_attn.linear_q.weight None torch.Size([256, 256]) \tencoder.encoders.0.self_attn.linear_q.weight | [256, 256] | 65536 | True\n", - "# encoders.9.self_attn.linear_q.bias None torch.Size([256]) \tencoder.encoders.0.self_attn.linear_q.bias | [256] | 256 | True\n", - "# encoders.9.self_attn.linear_k.weight None torch.Size([256, 256]) \tencoder.encoders.0.self_attn.linear_k.weight | [256, 256] | 65536 | True\n", - "# encoders.9.self_attn.linear_k.bias None torch.Size([256]) \tencoder.encoders.0.self_attn.linear_k.bias | [256] | 256 | True\n", - "# encoders.9.self_attn.linear_v.weight None torch.Size([256, 256]) \tencoder.encoders.0.self_attn.linear_v.weight | [256, 256] | 65536 | True\n", - "# encoders.9.self_attn.linear_v.bias None torch.Size([256]) \tencoder.encoders.0.self_attn.linear_v.bias | [256] | 256 | True\n", - "# encoders.9.self_attn.linear_out.weight None torch.Size([256, 256]) \tencoder.encoders.0.self_attn.linear_out.weight | [256, 256] | 65536 | True\n", - "# encoders.9.self_attn.linear_out.bias None torch.Size([256]) \tencoder.encoders.0.self_attn.linear_out.bias | [256] | 256 | True\n", - "# encoders.9.feed_forward.w_1.weight None torch.Size([2048, 256]) \tencoder.encoders.0.feed_forward.w_1.weight | [256, 2048] | 524288 | True\n", - "# encoders.9.feed_forward.w_1.bias None torch.Size([2048]) \tencoder.encoders.0.feed_forward.w_1.bias | [2048] | 2048 | True\n", - "# encoders.9.feed_forward.w_2.weight None torch.Size([256, 2048]) \tencoder.encoders.0.feed_forward.w_2.weight | [2048, 256] | 524288 | True\n", - "# encoders.9.feed_forward.w_2.bias None torch.Size([256]) \tencoder.encoders.0.feed_forward.w_2.bias | [256] | 256 | True\n", - "# encoders.9.norm1.weight None torch.Size([256]) \tencoder.encoders.0.norm1.weight | [256] | 256 | True\n", - "# encoders.9.norm1.bias None torch.Size([256]) \tencoder.encoders.0.norm1.bias | [256] | 256 | True\n", - "# encoders.9.norm2.weight None torch.Size([256]) \tencoder.encoders.0.norm2.weight | [256] | 256 | True\n", - "# encoders.9.norm2.bias None torch.Size([256]) \tencoder.encoders.0.norm2.bias | [256] | 256 | True\n", - "# \tencoder.encoders.0.concat_linear.weight | [512, 256] | 131072 | True\n", - "# \tencoder.encoders.0.concat_linear.bias | [256] | 256 | True\n", - "# espnet transformer\tconcat_linear只是保存了,但是未使用\n", - "\t\n", - "# \tpaddle transformer" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "2a0428ae", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-> encoder.embed.conv.0.weight\n", - "-> encoder.embed.conv.0.bias\n", - "-> encoder.embed.conv.2.weight\n", - "-> encoder.embed.conv.2.bias\n", - "-> encoder.embed.out.0.weight\n", - "encoder.embed.out.0.weight: (256, 5120) -> (5120, 256)\n", - "-> encoder.embed.out.0.bias\n", - "-> encoder.encoders.0.self_attn.linear_q.weight\n", - "encoder.encoders.0.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.0.self_attn.linear_q.bias\n", - "-> encoder.encoders.0.self_attn.linear_k.weight\n", - "encoder.encoders.0.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.0.self_attn.linear_k.bias\n", - "-> encoder.encoders.0.self_attn.linear_v.weight\n", - "encoder.encoders.0.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.0.self_attn.linear_v.bias\n", - "-> encoder.encoders.0.self_attn.linear_out.weight\n", - "encoder.encoders.0.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.0.self_attn.linear_out.bias\n", - "-> encoder.encoders.0.feed_forward.w_1.weight\n", - "encoder.encoders.0.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.0.feed_forward.w_1.bias\n", - "-> encoder.encoders.0.feed_forward.w_2.weight\n", - "encoder.encoders.0.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.0.feed_forward.w_2.bias\n", - "-> encoder.encoders.0.norm1.weight\n", - "-> encoder.encoders.0.norm1.bias\n", - "-> encoder.encoders.0.norm2.weight\n", - "-> encoder.encoders.0.norm2.bias\n", - "-> encoder.encoders.1.self_attn.linear_q.weight\n", - "encoder.encoders.1.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.1.self_attn.linear_q.bias\n", - "-> encoder.encoders.1.self_attn.linear_k.weight\n", - "encoder.encoders.1.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.1.self_attn.linear_k.bias\n", - "-> encoder.encoders.1.self_attn.linear_v.weight\n", - "encoder.encoders.1.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.1.self_attn.linear_v.bias\n", - "-> encoder.encoders.1.self_attn.linear_out.weight\n", - "encoder.encoders.1.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.1.self_attn.linear_out.bias\n", - "-> encoder.encoders.1.feed_forward.w_1.weight\n", - "encoder.encoders.1.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.1.feed_forward.w_1.bias\n", - "-> encoder.encoders.1.feed_forward.w_2.weight\n", - "encoder.encoders.1.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.1.feed_forward.w_2.bias\n", - "-> encoder.encoders.1.norm1.weight\n", - "-> encoder.encoders.1.norm1.bias\n", - "-> encoder.encoders.1.norm2.weight\n", - "-> encoder.encoders.1.norm2.bias\n", - "-> encoder.encoders.2.self_attn.linear_q.weight\n", - "encoder.encoders.2.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.2.self_attn.linear_q.bias\n", - "-> encoder.encoders.2.self_attn.linear_k.weight\n", - "encoder.encoders.2.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.2.self_attn.linear_k.bias\n", - "-> encoder.encoders.2.self_attn.linear_v.weight\n", - "encoder.encoders.2.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.2.self_attn.linear_v.bias\n", - "-> encoder.encoders.2.self_attn.linear_out.weight\n", - "encoder.encoders.2.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.2.self_attn.linear_out.bias\n", - "-> encoder.encoders.2.feed_forward.w_1.weight\n", - "encoder.encoders.2.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.2.feed_forward.w_1.bias\n", - "-> encoder.encoders.2.feed_forward.w_2.weight\n", - "encoder.encoders.2.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.2.feed_forward.w_2.bias\n", - "-> encoder.encoders.2.norm1.weight\n", - "-> encoder.encoders.2.norm1.bias\n", - "-> encoder.encoders.2.norm2.weight\n", - "-> encoder.encoders.2.norm2.bias\n", - "-> encoder.encoders.3.self_attn.linear_q.weight\n", - "encoder.encoders.3.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.3.self_attn.linear_q.bias\n", - "-> encoder.encoders.3.self_attn.linear_k.weight\n", - "encoder.encoders.3.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.3.self_attn.linear_k.bias\n", - "-> encoder.encoders.3.self_attn.linear_v.weight\n", - "encoder.encoders.3.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.3.self_attn.linear_v.bias\n", - "-> encoder.encoders.3.self_attn.linear_out.weight\n", - "encoder.encoders.3.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.3.self_attn.linear_out.bias\n", - "-> encoder.encoders.3.feed_forward.w_1.weight\n", - "encoder.encoders.3.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.3.feed_forward.w_1.bias\n", - "-> encoder.encoders.3.feed_forward.w_2.weight\n", - "encoder.encoders.3.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.3.feed_forward.w_2.bias\n", - "-> encoder.encoders.3.norm1.weight\n", - "-> encoder.encoders.3.norm1.bias\n", - "-> encoder.encoders.3.norm2.weight\n", - "-> encoder.encoders.3.norm2.bias\n", - "-> encoder.encoders.4.self_attn.linear_q.weight\n", - "encoder.encoders.4.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.4.self_attn.linear_q.bias\n", - "-> encoder.encoders.4.self_attn.linear_k.weight\n", - "encoder.encoders.4.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.4.self_attn.linear_k.bias\n", - "-> encoder.encoders.4.self_attn.linear_v.weight\n", - "encoder.encoders.4.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.4.self_attn.linear_v.bias\n", - "-> encoder.encoders.4.self_attn.linear_out.weight\n", - "encoder.encoders.4.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.4.self_attn.linear_out.bias\n", - "-> encoder.encoders.4.feed_forward.w_1.weight\n", - "encoder.encoders.4.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.4.feed_forward.w_1.bias\n", - "-> encoder.encoders.4.feed_forward.w_2.weight\n", - "encoder.encoders.4.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.4.feed_forward.w_2.bias\n", - "-> encoder.encoders.4.norm1.weight\n", - "-> encoder.encoders.4.norm1.bias\n", - "-> encoder.encoders.4.norm2.weight\n", - "-> encoder.encoders.4.norm2.bias\n", - "-> encoder.encoders.5.self_attn.linear_q.weight\n", - "encoder.encoders.5.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.5.self_attn.linear_q.bias\n", - "-> encoder.encoders.5.self_attn.linear_k.weight\n", - "encoder.encoders.5.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.5.self_attn.linear_k.bias\n", - "-> encoder.encoders.5.self_attn.linear_v.weight\n", - "encoder.encoders.5.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.5.self_attn.linear_v.bias\n", - "-> encoder.encoders.5.self_attn.linear_out.weight\n", - "encoder.encoders.5.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.5.self_attn.linear_out.bias\n", - "-> encoder.encoders.5.feed_forward.w_1.weight\n", - "encoder.encoders.5.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.5.feed_forward.w_1.bias\n", - "-> encoder.encoders.5.feed_forward.w_2.weight\n", - "encoder.encoders.5.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.5.feed_forward.w_2.bias\n", - "-> encoder.encoders.5.norm1.weight\n", - "-> encoder.encoders.5.norm1.bias\n", - "-> encoder.encoders.5.norm2.weight\n", - "-> encoder.encoders.5.norm2.bias\n", - "-> encoder.encoders.6.self_attn.linear_q.weight\n", - "encoder.encoders.6.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.6.self_attn.linear_q.bias\n", - "-> encoder.encoders.6.self_attn.linear_k.weight\n", - "encoder.encoders.6.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.6.self_attn.linear_k.bias\n", - "-> encoder.encoders.6.self_attn.linear_v.weight\n", - "encoder.encoders.6.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.6.self_attn.linear_v.bias\n", - "-> encoder.encoders.6.self_attn.linear_out.weight\n", - "encoder.encoders.6.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.6.self_attn.linear_out.bias\n", - "-> encoder.encoders.6.feed_forward.w_1.weight\n", - "encoder.encoders.6.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.6.feed_forward.w_1.bias\n", - "-> encoder.encoders.6.feed_forward.w_2.weight\n", - "encoder.encoders.6.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.6.feed_forward.w_2.bias\n", - "-> encoder.encoders.6.norm1.weight\n", - "-> encoder.encoders.6.norm1.bias\n", - "-> encoder.encoders.6.norm2.weight\n", - "-> encoder.encoders.6.norm2.bias\n", - "-> encoder.encoders.7.self_attn.linear_q.weight\n", - "encoder.encoders.7.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.7.self_attn.linear_q.bias\n", - "-> encoder.encoders.7.self_attn.linear_k.weight\n", - "encoder.encoders.7.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.7.self_attn.linear_k.bias\n", - "-> encoder.encoders.7.self_attn.linear_v.weight\n", - "encoder.encoders.7.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.7.self_attn.linear_v.bias\n", - "-> encoder.encoders.7.self_attn.linear_out.weight\n", - "encoder.encoders.7.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.7.self_attn.linear_out.bias\n", - "-> encoder.encoders.7.feed_forward.w_1.weight\n", - "encoder.encoders.7.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.7.feed_forward.w_1.bias\n", - "-> encoder.encoders.7.feed_forward.w_2.weight\n", - "encoder.encoders.7.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.7.feed_forward.w_2.bias\n", - "-> encoder.encoders.7.norm1.weight\n", - "-> encoder.encoders.7.norm1.bias\n", - "-> encoder.encoders.7.norm2.weight\n", - "-> encoder.encoders.7.norm2.bias\n", - "-> encoder.encoders.8.self_attn.linear_q.weight\n", - "encoder.encoders.8.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.8.self_attn.linear_q.bias\n", - "-> encoder.encoders.8.self_attn.linear_k.weight\n", - "encoder.encoders.8.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.8.self_attn.linear_k.bias\n", - "-> encoder.encoders.8.self_attn.linear_v.weight\n", - "encoder.encoders.8.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.8.self_attn.linear_v.bias\n", - "-> encoder.encoders.8.self_attn.linear_out.weight\n", - "encoder.encoders.8.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.8.self_attn.linear_out.bias\n", - "-> encoder.encoders.8.feed_forward.w_1.weight\n", - "encoder.encoders.8.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.8.feed_forward.w_1.bias\n", - "-> encoder.encoders.8.feed_forward.w_2.weight\n", - "encoder.encoders.8.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.8.feed_forward.w_2.bias\n", - "-> encoder.encoders.8.norm1.weight\n", - "-> encoder.encoders.8.norm1.bias\n", - "-> encoder.encoders.8.norm2.weight\n", - "-> encoder.encoders.8.norm2.bias\n", - "-> encoder.encoders.9.self_attn.linear_q.weight\n", - "encoder.encoders.9.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.9.self_attn.linear_q.bias\n", - "-> encoder.encoders.9.self_attn.linear_k.weight\n", - "encoder.encoders.9.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.9.self_attn.linear_k.bias\n", - "-> encoder.encoders.9.self_attn.linear_v.weight\n", - "encoder.encoders.9.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.9.self_attn.linear_v.bias\n", - "-> encoder.encoders.9.self_attn.linear_out.weight\n", - "encoder.encoders.9.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.9.self_attn.linear_out.bias\n", - "-> encoder.encoders.9.feed_forward.w_1.weight\n", - "encoder.encoders.9.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.9.feed_forward.w_1.bias\n", - "-> encoder.encoders.9.feed_forward.w_2.weight\n", - "encoder.encoders.9.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.9.feed_forward.w_2.bias\n", - "-> encoder.encoders.9.norm1.weight\n", - "-> encoder.encoders.9.norm1.bias\n", - "-> encoder.encoders.9.norm2.weight\n", - "-> encoder.encoders.9.norm2.bias\n", - "-> encoder.encoders.10.self_attn.linear_q.weight\n", - "encoder.encoders.10.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.10.self_attn.linear_q.bias\n", - "-> encoder.encoders.10.self_attn.linear_k.weight\n", - "encoder.encoders.10.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.10.self_attn.linear_k.bias\n", - "-> encoder.encoders.10.self_attn.linear_v.weight\n", - "encoder.encoders.10.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.10.self_attn.linear_v.bias\n", - "-> encoder.encoders.10.self_attn.linear_out.weight\n", - "encoder.encoders.10.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.10.self_attn.linear_out.bias\n", - "-> encoder.encoders.10.feed_forward.w_1.weight\n", - "encoder.encoders.10.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.10.feed_forward.w_1.bias\n", - "-> encoder.encoders.10.feed_forward.w_2.weight\n", - "encoder.encoders.10.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.10.feed_forward.w_2.bias\n", - "-> encoder.encoders.10.norm1.weight\n", - "-> encoder.encoders.10.norm1.bias\n", - "-> encoder.encoders.10.norm2.weight\n", - "-> encoder.encoders.10.norm2.bias\n", - "-> encoder.encoders.11.self_attn.linear_q.weight\n", - "encoder.encoders.11.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.11.self_attn.linear_q.bias\n", - "-> encoder.encoders.11.self_attn.linear_k.weight\n", - "encoder.encoders.11.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.11.self_attn.linear_k.bias\n", - "-> encoder.encoders.11.self_attn.linear_v.weight\n", - "encoder.encoders.11.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.11.self_attn.linear_v.bias\n", - "-> encoder.encoders.11.self_attn.linear_out.weight\n", - "encoder.encoders.11.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "-> encoder.encoders.11.self_attn.linear_out.bias\n", - "-> encoder.encoders.11.feed_forward.w_1.weight\n", - "encoder.encoders.11.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "-> encoder.encoders.11.feed_forward.w_1.bias\n", - "-> encoder.encoders.11.feed_forward.w_2.weight\n", - "encoder.encoders.11.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "-> encoder.encoders.11.feed_forward.w_2.bias\n", - "-> encoder.encoders.11.norm1.weight\n", - "-> encoder.encoders.11.norm1.bias\n", - "-> encoder.encoders.11.norm2.weight\n", - "-> encoder.encoders.11.norm2.bias\n", - "-> encoder.after_norm.weight\n", - "-> encoder.after_norm.bias\n" - ] - } - ], - "source": [ - "# dump torch model to paddle\n", - "#state_dict = model.state_dict()\n", - "paddle_state_dict = {}\n", - "\n", - "for n, p in state_dict.items():\n", - " if 'encoder' not in n:\n", - " continue \n", - " print(f'-> {n}')\n", - " \n", - " \n", - " name_change=True\n", - " if 'norm.running_mean' in n:\n", - " new_n = n.replace('norm.running_', 'norm._')\n", - " elif 'norm.running_var' in n:\n", - " new_n = n.replace('norm.running_var', 'norm._variance')\n", - " else:\n", - " name_change=False\n", - " new_n = n\n", - " if name_change:\n", - " print(f\"{n} -> {new_n}\")\n", - " \n", - " \n", - " p = p.cpu().detach().numpy()\n", - " if n.endswith('weight') and p.ndim == 2:\n", - " new_p = p.T\n", - " print(f\"{n}: {p.shape} -> {new_p.shape}\")\n", - " else:\n", - " new_p = p\n", - " \n", - " if 'global_cmvn.mean' in n:\n", - " print(p, p.dtype)\n", - " \n", - " paddle_state_dict[new_n] = new_p\n", - " \n", - "# np.savez('/workspace/DeepSpeech-2.x/.notebook/model',\n", - "# state=paddle_state_dict)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "a1d97e9f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.0.concat_linear.weight. encoder.encoders.0.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.0.concat_linear.bias. encoder.encoders.0.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.1.concat_linear.weight. encoder.encoders.1.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.1.concat_linear.bias. encoder.encoders.1.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.2.concat_linear.weight. encoder.encoders.2.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.2.concat_linear.bias. encoder.encoders.2.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.3.concat_linear.weight. encoder.encoders.3.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.3.concat_linear.bias. encoder.encoders.3.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.4.concat_linear.weight. encoder.encoders.4.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.4.concat_linear.bias. encoder.encoders.4.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.5.concat_linear.weight. encoder.encoders.5.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.5.concat_linear.bias. encoder.encoders.5.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.6.concat_linear.weight. encoder.encoders.6.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.6.concat_linear.bias. encoder.encoders.6.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.7.concat_linear.weight. encoder.encoders.7.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.7.concat_linear.bias. encoder.encoders.7.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.8.concat_linear.weight. encoder.encoders.8.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.8.concat_linear.bias. encoder.encoders.8.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.9.concat_linear.weight. encoder.encoders.9.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.9.concat_linear.bias. encoder.encoders.9.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.10.concat_linear.weight. encoder.encoders.10.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.10.concat_linear.bias. encoder.encoders.10.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.11.concat_linear.weight. encoder.encoders.11.concat_linear.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for encoder.encoders.11.concat_linear.bias. encoder.encoders.11.concat_linear.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.embed.0.weight. decoder.embed.0.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.after_norm.weight. decoder.after_norm.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.after_norm.bias. decoder.after_norm.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.output_layer.weight. decoder.output_layer.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.output_layer.bias. decoder.output_layer.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_q.weight. decoder.decoders.0.self_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_q.bias. decoder.decoders.0.self_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_k.weight. decoder.decoders.0.self_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_k.bias. decoder.decoders.0.self_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_v.weight. decoder.decoders.0.self_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_v.bias. decoder.decoders.0.self_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_out.weight. decoder.decoders.0.self_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.self_attn.linear_out.bias. decoder.decoders.0.self_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_q.weight. decoder.decoders.0.src_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_q.bias. decoder.decoders.0.src_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_k.weight. decoder.decoders.0.src_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_k.bias. decoder.decoders.0.src_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_v.weight. decoder.decoders.0.src_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_v.bias. decoder.decoders.0.src_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_out.weight. decoder.decoders.0.src_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.src_attn.linear_out.bias. decoder.decoders.0.src_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.feed_forward.w_1.weight. decoder.decoders.0.feed_forward.w_1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.feed_forward.w_1.bias. decoder.decoders.0.feed_forward.w_1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.feed_forward.w_2.weight. decoder.decoders.0.feed_forward.w_2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.feed_forward.w_2.bias. decoder.decoders.0.feed_forward.w_2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.norm1.weight. decoder.decoders.0.norm1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.norm1.bias. decoder.decoders.0.norm1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.norm2.weight. decoder.decoders.0.norm2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.norm2.bias. decoder.decoders.0.norm2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.norm3.weight. decoder.decoders.0.norm3.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.norm3.bias. decoder.decoders.0.norm3.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.concat_linear1.weight. decoder.decoders.0.concat_linear1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.concat_linear1.bias. decoder.decoders.0.concat_linear1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.concat_linear2.weight. decoder.decoders.0.concat_linear2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.0.concat_linear2.bias. decoder.decoders.0.concat_linear2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_q.weight. decoder.decoders.1.self_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_q.bias. decoder.decoders.1.self_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_k.weight. decoder.decoders.1.self_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_k.bias. decoder.decoders.1.self_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_v.weight. decoder.decoders.1.self_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_v.bias. decoder.decoders.1.self_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_out.weight. decoder.decoders.1.self_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.self_attn.linear_out.bias. decoder.decoders.1.self_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_q.weight. decoder.decoders.1.src_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_q.bias. decoder.decoders.1.src_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_k.weight. decoder.decoders.1.src_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_k.bias. decoder.decoders.1.src_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_v.weight. decoder.decoders.1.src_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_v.bias. decoder.decoders.1.src_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_out.weight. decoder.decoders.1.src_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.src_attn.linear_out.bias. decoder.decoders.1.src_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.feed_forward.w_1.weight. decoder.decoders.1.feed_forward.w_1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.feed_forward.w_1.bias. decoder.decoders.1.feed_forward.w_1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.feed_forward.w_2.weight. decoder.decoders.1.feed_forward.w_2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.feed_forward.w_2.bias. decoder.decoders.1.feed_forward.w_2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.norm1.weight. decoder.decoders.1.norm1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.norm1.bias. decoder.decoders.1.norm1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.norm2.weight. decoder.decoders.1.norm2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.norm2.bias. decoder.decoders.1.norm2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.norm3.weight. decoder.decoders.1.norm3.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.norm3.bias. decoder.decoders.1.norm3.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.concat_linear1.weight. decoder.decoders.1.concat_linear1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.concat_linear1.bias. decoder.decoders.1.concat_linear1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.concat_linear2.weight. decoder.decoders.1.concat_linear2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.1.concat_linear2.bias. decoder.decoders.1.concat_linear2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_q.weight. decoder.decoders.2.self_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_q.bias. decoder.decoders.2.self_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_k.weight. decoder.decoders.2.self_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_k.bias. decoder.decoders.2.self_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_v.weight. decoder.decoders.2.self_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_v.bias. decoder.decoders.2.self_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_out.weight. decoder.decoders.2.self_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.self_attn.linear_out.bias. decoder.decoders.2.self_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_q.weight. decoder.decoders.2.src_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_q.bias. decoder.decoders.2.src_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_k.weight. decoder.decoders.2.src_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_k.bias. decoder.decoders.2.src_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_v.weight. decoder.decoders.2.src_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_v.bias. decoder.decoders.2.src_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_out.weight. decoder.decoders.2.src_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.src_attn.linear_out.bias. decoder.decoders.2.src_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.feed_forward.w_1.weight. decoder.decoders.2.feed_forward.w_1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.feed_forward.w_1.bias. decoder.decoders.2.feed_forward.w_1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.feed_forward.w_2.weight. decoder.decoders.2.feed_forward.w_2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.feed_forward.w_2.bias. decoder.decoders.2.feed_forward.w_2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.norm1.weight. decoder.decoders.2.norm1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.norm1.bias. decoder.decoders.2.norm1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.norm2.weight. decoder.decoders.2.norm2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.norm2.bias. decoder.decoders.2.norm2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.norm3.weight. decoder.decoders.2.norm3.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.norm3.bias. decoder.decoders.2.norm3.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.concat_linear1.weight. decoder.decoders.2.concat_linear1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.concat_linear1.bias. decoder.decoders.2.concat_linear1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.concat_linear2.weight. decoder.decoders.2.concat_linear2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.2.concat_linear2.bias. decoder.decoders.2.concat_linear2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_q.weight. decoder.decoders.3.self_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_q.bias. decoder.decoders.3.self_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_k.weight. decoder.decoders.3.self_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_k.bias. decoder.decoders.3.self_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_v.weight. decoder.decoders.3.self_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_v.bias. decoder.decoders.3.self_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_out.weight. decoder.decoders.3.self_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.self_attn.linear_out.bias. decoder.decoders.3.self_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_q.weight. decoder.decoders.3.src_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_q.bias. decoder.decoders.3.src_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_k.weight. decoder.decoders.3.src_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_k.bias. decoder.decoders.3.src_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_v.weight. decoder.decoders.3.src_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_v.bias. decoder.decoders.3.src_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_out.weight. decoder.decoders.3.src_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.src_attn.linear_out.bias. decoder.decoders.3.src_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.feed_forward.w_1.weight. decoder.decoders.3.feed_forward.w_1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.feed_forward.w_1.bias. decoder.decoders.3.feed_forward.w_1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.feed_forward.w_2.weight. decoder.decoders.3.feed_forward.w_2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.feed_forward.w_2.bias. decoder.decoders.3.feed_forward.w_2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.norm1.weight. decoder.decoders.3.norm1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.norm1.bias. decoder.decoders.3.norm1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.norm2.weight. decoder.decoders.3.norm2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.norm2.bias. decoder.decoders.3.norm2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.norm3.weight. decoder.decoders.3.norm3.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.norm3.bias. decoder.decoders.3.norm3.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.concat_linear1.weight. decoder.decoders.3.concat_linear1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.concat_linear1.bias. decoder.decoders.3.concat_linear1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.concat_linear2.weight. decoder.decoders.3.concat_linear2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.3.concat_linear2.bias. decoder.decoders.3.concat_linear2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_q.weight. decoder.decoders.4.self_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_q.bias. decoder.decoders.4.self_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_k.weight. decoder.decoders.4.self_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_k.bias. decoder.decoders.4.self_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_v.weight. decoder.decoders.4.self_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_v.bias. decoder.decoders.4.self_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_out.weight. decoder.decoders.4.self_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.self_attn.linear_out.bias. decoder.decoders.4.self_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_q.weight. decoder.decoders.4.src_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_q.bias. decoder.decoders.4.src_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_k.weight. decoder.decoders.4.src_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_k.bias. decoder.decoders.4.src_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_v.weight. decoder.decoders.4.src_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_v.bias. decoder.decoders.4.src_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_out.weight. decoder.decoders.4.src_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.src_attn.linear_out.bias. decoder.decoders.4.src_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.feed_forward.w_1.weight. decoder.decoders.4.feed_forward.w_1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.feed_forward.w_1.bias. decoder.decoders.4.feed_forward.w_1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.feed_forward.w_2.weight. decoder.decoders.4.feed_forward.w_2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.feed_forward.w_2.bias. decoder.decoders.4.feed_forward.w_2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.norm1.weight. decoder.decoders.4.norm1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.norm1.bias. decoder.decoders.4.norm1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.norm2.weight. decoder.decoders.4.norm2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.norm2.bias. decoder.decoders.4.norm2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.norm3.weight. decoder.decoders.4.norm3.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.norm3.bias. decoder.decoders.4.norm3.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.concat_linear1.weight. decoder.decoders.4.concat_linear1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.concat_linear1.bias. decoder.decoders.4.concat_linear1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.concat_linear2.weight. decoder.decoders.4.concat_linear2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.4.concat_linear2.bias. decoder.decoders.4.concat_linear2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_q.weight. decoder.decoders.5.self_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_q.bias. decoder.decoders.5.self_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_k.weight. decoder.decoders.5.self_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_k.bias. decoder.decoders.5.self_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_v.weight. decoder.decoders.5.self_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_v.bias. decoder.decoders.5.self_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_out.weight. decoder.decoders.5.self_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.self_attn.linear_out.bias. decoder.decoders.5.self_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_q.weight. decoder.decoders.5.src_attn.linear_q.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_q.bias. decoder.decoders.5.src_attn.linear_q.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_k.weight. decoder.decoders.5.src_attn.linear_k.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_k.bias. decoder.decoders.5.src_attn.linear_k.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_v.weight. decoder.decoders.5.src_attn.linear_v.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_v.bias. decoder.decoders.5.src_attn.linear_v.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_out.weight. decoder.decoders.5.src_attn.linear_out.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.src_attn.linear_out.bias. decoder.decoders.5.src_attn.linear_out.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.feed_forward.w_1.weight. decoder.decoders.5.feed_forward.w_1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.feed_forward.w_1.bias. decoder.decoders.5.feed_forward.w_1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.feed_forward.w_2.weight. decoder.decoders.5.feed_forward.w_2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.feed_forward.w_2.bias. decoder.decoders.5.feed_forward.w_2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.norm1.weight. decoder.decoders.5.norm1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.norm1.bias. decoder.decoders.5.norm1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.norm2.weight. decoder.decoders.5.norm2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.norm2.bias. decoder.decoders.5.norm2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.norm3.weight. decoder.decoders.5.norm3.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.norm3.bias. decoder.decoders.5.norm3.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.concat_linear1.weight. decoder.decoders.5.concat_linear1.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.concat_linear1.bias. decoder.decoders.5.concat_linear1.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.concat_linear2.weight. decoder.decoders.5.concat_linear2.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for decoder.decoders.5.concat_linear2.bias. decoder.decoders.5.concat_linear2.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for ctc.ctc_lo.weight. ctc.ctc_lo.weight is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n", - "/workspace/DeepSpeech-2.x/tools/venv-2p1/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py:1303: UserWarning: Skip loading for ctc.ctc_lo.bias. ctc.ctc_lo.bias is not found in the provided dict.\n", - " warnings.warn((\"Skip loading for {}. \".format(key) + str(err)))\n" - ] - } - ], - "source": [ - "model.set_state_dict(paddle_state_dict)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "fc7edf1e", - "metadata": {}, - "outputs": [], - "source": [ - "e_state = model.encoder.state_dict()\n", - "for key, value in e_state.items():\n", - " if 'concat_linear' in key:\n", - " continue\n", - " if not np.allclose(value.numpy(), paddle_state_dict['encoder.' + key]):\n", - " print(key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "572097d0", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "748250b7", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "91e5deee", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "fleet-despite", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "embed.npz feat.npz l1.npz l11.npz l3.npz l5.npz l7.npz l9.npz\r\n", - "encoder.npz l0.npz l10.npz l2.npz l4.npz l6.npz l8.npz model.npz\r\n" - ] - } - ], - "source": [ - "%ls .notebook/espnet" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "abroad-oracle", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(8, 57, 83)\n", - "(8, 1, 57)\n", - "[57 50 48 38 32 31 28 25]\n" - ] - } - ], - "source": [ - "data = np.load('.notebook/espnet/feat.npz', allow_pickle=True)\n", - "xs=data['xs']\n", - "masks=data['masks']\n", - "print(xs.shape)\n", - "print(masks.shape)\n", - "xs_lens = masks.sum(axis=-1).squeeze()\n", - "print(xs_lens)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "false-instrument", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[8, 13, 256]\n", - "[8, 1, 13]\n" - ] - } - ], - "source": [ - "# ecnoder\n", - "xs = paddle.to_tensor(xs, dtype='float32')\n", - "x_lens = paddle.to_tensor(xs_lens, dtype='int32')\n", - "model.eval()\n", - "encoder_out, encoder_mask = model.encoder(xs, x_lens)\n", - "print(encoder_out.shape)\n", - "print(encoder_mask.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "arctic-proxy", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(8, 13, 256)\n", - "(8, 1, 13)\n", - "False\n", - "False\n", - "True\n", - "True\n" - ] - } - ], - "source": [ - "data = np.load('.notebook/espnet/encoder.npz', allow_pickle=True)\n", - "xs = data['xs']\n", - "masks = data['masks']\n", - "print(xs.shape)\n", - "print(masks.shape)\n", - "print(np.allclose(xs, encoder_out.numpy()))\n", - "print(np.allclose(xs, encoder_out.numpy(), atol=1e-6))\n", - "print(np.allclose(xs, encoder_out.numpy(), atol=1e-5))\n", - "print(np.allclose(masks, encoder_mask.numpy()))" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "id": "seasonal-switch", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 2.1380312 1.8675405 -1.1873871 ... -0.30456656 0.56382173\n", - " -0.6526459 ]\n", - " [ 2.1926146 2.1373641 -0.6548196 ... -0.897318 0.6044322\n", - " -0.63332295]\n", - " [ 1.6367635 2.3320658 -0.8848577 ... -0.9640939 1.2420733\n", - " -0.05243584]\n", - " ...\n", - " [ 1.8533031 1.8421621 -0.6728406 ... 0.04810616 0.6459763\n", - " -0.18188554]\n", - " [ 2.0894065 1.7813934 -1.1591585 ... -0.09513803 0.8321831\n", - " -0.72916794]\n", - " [ 1.6488649 2.0984242 -1.3490562 ... 0.42678255 0.5903866\n", - " -0.32597935]]\n", - "Tensor(shape=[13, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [[ 2.13803196, 1.86753929, -1.18738675, ..., -0.30456796, 0.56382364, -0.65264463],\n", - " [ 2.19261336, 2.13736486, -0.65482187, ..., -0.89731705, 0.60443199, -0.63332343],\n", - " [ 1.63676369, 2.33206534, -0.88485885, ..., -0.96409231, 1.24207270, -0.05243752],\n", - " ...,\n", - " [ 1.85330284, 1.84216177, -0.67284071, ..., 0.04810715, 0.64597648, -0.18188696],\n", - " [ 2.08940673, 1.78139246, -1.15916038, ..., -0.09513779, 0.83218288, -0.72916913],\n", - " [ 1.64886570, 2.09842515, -1.34905660, ..., 0.42678308, 0.59038705, -0.32598034]])\n" - ] - } - ], - "source": [ - "print(xs[0])\n", - "print(encoder_out[0])" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "id": "defined-brooks", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 2.209824 1.5208759 0.1417884 ... -0.73617566 1.6538682\n", - " -0.16355833]\n", - " [ 2.1441019 1.4377339 0.3629197 ... -0.91226125 1.3739952\n", - " 0.11874156]\n", - " [ 1.8725398 1.5417286 0.38919652 ... -0.89621615 1.1841662\n", - " 0.27621832]\n", - " ...\n", - " [ 2.4591084 0.7238764 -1.1456345 ... -0.24188249 0.8232168\n", - " -0.9794884 ]\n", - " [ 2.5156236 1.1919155 -0.97032744 ... -0.7360675 1.0647209\n", - " -1.3076135 ]\n", - " [ 2.160009 0.98425585 -1.2231126 ... -0.03393313 1.9141548\n", - " -1.0099151 ]]\n", - "Tensor(shape=[13, 256], dtype=float32, place=CUDAPlace(0), stop_gradient=False,\n", - " [[ 2.20982409, 1.52087593, 0.14178854, ..., -0.73617446, 1.65386844, -0.16355731],\n", - " [ 2.14410043, 1.43773460, 0.36291891, ..., -0.91226172, 1.37399518, 0.11874183],\n", - " [ 1.87254059, 1.54172909, 0.38919681, ..., -0.89621687, 1.18416822, 0.27621880],\n", - " ...,\n", - " [ 2.45910931, 0.72387671, -1.14563596, ..., -0.24188218, 0.82321703, -0.97948682],\n", - " [ 2.51562238, 1.19191694, -0.97032893, ..., -0.73606837, 1.06472087, -1.30761123],\n", - " [ 2.16000915, 0.98425680, -1.22311163, ..., -0.03393326, 1.91415381, -1.00991392]])\n" - ] - } - ], - "source": [ - "print(xs[1])\n", - "print(encoder_out[1])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0504e3f8", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/.notebook/wenet_model.ipynb b/.notebook/wenet_model.ipynb deleted file mode 100644 index 8e10b6c4b..000000000 --- a/.notebook/wenet_model.ipynb +++ /dev/null @@ -1,5015 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "cfb832c0", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/workspace/wenet\n" - ] - }, - { - "data": { - "text/plain": [ - "'/workspace/wenet'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%cd /workspace/wenet/\n", - "%pwd" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "62277538", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "import argparse\n", - "import copy\n", - "import logging\n", - "import os\n", - "\n", - "import torch\n", - "import torch.distributed as dist\n", - "import torch.optim as optim\n", - "import yaml\n", - "from tensorboardX import SummaryWriter\n", - "from torch.utils.data import DataLoader\n", - "\n", - "from wenet.dataset.dataset import AudioDataset, CollateFunc\n", - "from wenet.transformer.asr_model import init_asr_model\n", - "from wenet.utils.checkpoint import load_checkpoint, save_checkpoint\n", - "from wenet.utils.executor import Executor\n", - "from wenet.utils.scheduler import WarmupLR\n", - "\n", - "os.environ['CUDA_VISIBLE_DEVICES'] = \"0\"" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "2f6ea33a", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'config': 'examples/aishell/s0/conf/train_conformer.yaml', 'train_data': 'examples/aishell/s0/raw_wav/train/format.data', 'cv_data': 'examples/aishell/s0/raw_wav/dev/format.data', 'gpu': -1, 'model_dir': None, 'checkpoint': None, 'tensorboard_dir': 'tensorboard', 'rank': 0, 'world_size': -1, 'dist_backend': 'nccl', 'init_method': None, 'num_workers': 0, 'pin_memory': False, 'cmvn': 'examples/aishell/s0/raw_wav/train/global_cmvn'}\n" - ] - } - ], - "source": [ - "parser = argparse.ArgumentParser(description='training your network')\n", - "parser.add_argument('--config', default=\"examples/aishell/s0/conf/train_conformer.yaml\", help='config file')\n", - "parser.add_argument('--train_data', default=\"examples/aishell/s0/raw_wav/train/format.data\", help='train data file')\n", - "parser.add_argument('--cv_data', default=\"examples/aishell/s0/raw_wav/dev/format.data\", help='cv data file')\n", - "parser.add_argument('--gpu',\n", - " type=int,\n", - " default=-1,\n", - " help='gpu id for this local rank, -1 for cpu')\n", - "parser.add_argument('--model_dir' , help='save model dir')\n", - "parser.add_argument('--checkpoint', help='checkpoint model')\n", - "parser.add_argument('--tensorboard_dir',\n", - " default='tensorboard',\n", - " help='tensorboard log dir')\n", - "parser.add_argument('--ddp.rank',\n", - " dest='rank',\n", - " default=0,\n", - " type=int,\n", - " help='global rank for distributed training')\n", - "parser.add_argument('--ddp.world_size',\n", - " dest='world_size',\n", - " default=-1,\n", - " type=int,\n", - " help='''number of total processes/gpus for\n", - " distributed training''')\n", - "parser.add_argument('--ddp.dist_backend',\n", - " dest='dist_backend',\n", - " default='nccl',\n", - " choices=['nccl', 'gloo'],\n", - " help='distributed backend')\n", - "parser.add_argument('--ddp.init_method',\n", - " dest='init_method',\n", - " default=None,\n", - " help='ddp init method')\n", - "parser.add_argument('--num_workers',\n", - " default=0,\n", - " type=int,\n", - " help='num of subprocess workers for reading')\n", - "parser.add_argument('--pin_memory',\n", - " action='store_true',\n", - " default=False,\n", - " help='Use pinned memory buffers used for reading')\n", - "parser.add_argument('--cmvn', default=\"examples/aishell/s0/raw_wav/train/global_cmvn\", help='global cmvn file')\n", - "\n", - "args = parser.parse_args([])\n", - "print(vars(args))" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "f5d6af9b", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Namespace(checkpoint=None, cmvn='examples/aishell/s0/raw_wav/train/global_cmvn', config='examples/aishell/s0/conf/train_conformer.yaml', cv_data='examples/aishell/s0/raw_wav/dev/format.data', dist_backend='nccl', gpu=-1, init_method=None, model_dir=None, num_workers=0, pin_memory=False, rank=0, tensorboard_dir='tensorboard', train_data='examples/aishell/s0/raw_wav/train/format.data', world_size=-1)\n" - ] - } - ], - "source": [ - "# Set random seed\n", - "torch.manual_seed(777)\n", - "print(args)\n", - "with open(args.config, 'r') as fin:\n", - " configs = yaml.load(fin, Loader=yaml.FullLoader)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "264bd353", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "7507 batches\n", - "896\n" - ] - } - ], - "source": [ - "raw_wav = configs['raw_wav']\n", - "\n", - "train_collate_func = CollateFunc(**configs['collate_conf'],\n", - " raw_wav=raw_wav)\n", - "\n", - "cv_collate_conf = copy.deepcopy(configs['collate_conf'])\n", - "# no augmenation on cv set\n", - "cv_collate_conf['spec_aug'] = False\n", - "cv_collate_conf['spec_sub'] = False\n", - "if raw_wav:\n", - " cv_collate_conf['feature_dither'] = 0.0\n", - " cv_collate_conf['speed_perturb'] = False\n", - " cv_collate_conf['wav_distortion_conf']['wav_distortion_rate'] = 0\n", - "cv_collate_func = CollateFunc(**cv_collate_conf, raw_wav=raw_wav)\n", - "\n", - "dataset_conf = configs.get('dataset_conf', {})\n", - "train_dataset = AudioDataset(args.train_data,\n", - " **dataset_conf,\n", - " raw_wav=raw_wav)\n", - "cv_dataset = AudioDataset(args.cv_data, **dataset_conf, raw_wav=raw_wav)\n", - "# 120098 data/train/wav.scp\n", - "print(len(train_dataset), 'batches')\n", - "# 14326 data/dev/wav.scp\n", - "print(len(cv_dataset))" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "88863d3c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "896\n" - ] - } - ], - "source": [ - "train_sampler = None\n", - "cv_sampler = None\n", - "train_data_loader = DataLoader(train_dataset,\n", - " collate_fn=train_collate_func,\n", - " sampler=train_sampler,\n", - " #shuffle=(train_sampler is None),\n", - " shuffle=False,\n", - " pin_memory=args.pin_memory,\n", - " batch_size=1,\n", - " num_workers=args.num_workers)\n", - "cv_data_loader = DataLoader(cv_dataset,\n", - " collate_fn=cv_collate_func,\n", - " sampler=cv_sampler,\n", - " shuffle=False,\n", - " batch_size=1,\n", - " pin_memory=args.pin_memory,\n", - " num_workers=args.num_workers)\n", - "print(len(cv_data_loader))" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "10d5acd4", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "4233 vocab\n", - "80 feat dim\n" - ] - } - ], - "source": [ - "if raw_wav:\n", - " input_dim = configs['collate_conf']['feature_extraction_conf'][\n", - " 'mel_bins']\n", - "else:\n", - " input_dim = train_dataset.input_dim\n", - "vocab_size = train_dataset.output_dim\n", - "print(vocab_size, 'vocab')\n", - "print(input_dim , 'feat dim')" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "0380ef5a", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "examples/aishell/s0/raw_wav/train/global_cmvn\n" - ] - } - ], - "source": [ - "# Save configs to model_dir/train.yaml for inference and export\n", - "configs['input_dim'] = input_dim\n", - "configs['output_dim'] = vocab_size\n", - "configs['cmvn_file'] = args.cmvn\n", - "configs['is_json_cmvn'] = raw_wav\n", - "print(args.cmvn)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "15ebf2bf", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(80,)\n", - "(80,)\n", - "[ 9.87176362 9.93891555 10.23818678 10.85971412 11.68652649 12.2548801\n", - " 12.65768161 12.86138996 12.80733912 12.56625574 12.32007066 12.13879205\n", - " 12.31318868 12.55255216 12.61223855 12.56974526 12.38972728 12.14383338\n", - " 12.09285066 11.79395822 11.62259065 11.9263303 11.8154422 11.95122567\n", - " 11.83180553 11.88788759 11.79014437 11.88072035 11.90005711 11.97348142\n", - " 12.00982189 12.00881339 12.02619706 12.10479646 12.21555081 12.34399304\n", - " 12.45014401 12.4966879 12.48653775 12.3550783 12.39291732 12.2553737\n", - " 12.26496277 12.25314244 12.32545763 12.43359839 12.54867439 12.6763342\n", - " 12.80920698 12.92934681 12.96115138 12.96883353 12.99593057 13.04728142\n", - " 13.0588804 13.05737948 12.99921175 12.93402238 12.87429219 12.71652995\n", - " 12.48942004 12.27478385 12.26163069 12.28631891 12.31956049 12.4229073\n", - " 12.51480191 12.5785164 12.64719411 12.73762568 12.80017069 12.86872766\n", - " 12.96666856 13.06478583 13.15915908 13.27284306 13.31081821 13.23904279\n", - " 12.87936075 11.18310185]\n", - "[0.61219383 0.49700994 0.33439025 0.31503119 0.29640823 0.28411759\n", - " 0.26972922 0.25610475 0.24632936 0.24610228 0.24733299 0.24426536\n", - " 0.23751781 0.22987273 0.22659963 0.2268427 0.23059031 0.23420722\n", - " 0.23771761 0.2411352 0.24404673 0.24557175 0.24724932 0.25055198\n", - " 0.25482755 0.2602407 0.26363878 0.26503898 0.2648467 0.26435072\n", - " 0.26353625 0.26364794 0.26411054 0.26339948 0.26212082 0.26146597\n", - " 0.26196556 0.26365859 0.26592959 0.26963884 0.27392766 0.27818809\n", - " 0.28313664 0.2863325 0.28713431 0.28649323 0.28636648 0.2867843\n", - " 0.28635904 0.28562022 0.28492711 0.28429201 0.28402977 0.28401045\n", - " 0.28560797 0.28728033 0.28969549 0.29351627 0.29826453 0.30572631\n", - " 0.31811682 0.32887739 0.33288219 0.33326245 0.33014147 0.32403202\n", - " 0.31903576 0.31316258 0.30741037 0.30370692 0.30204833 0.30049064\n", - " 0.29901079 0.29824511 0.29812308 0.29753329 0.29779342 0.30175296\n", - " 0.30955538 0.32904205]\n" - ] - } - ], - "source": [ - "import json\n", - "import math\n", - "import numpy as np\n", - "def _load_json_cmvn(json_cmvn_file):\n", - " \"\"\" Load the json format cmvn stats file and calculate cmvn\n", - "\n", - " Args:\n", - " json_cmvn_file: cmvn stats file in json format\n", - "\n", - " Returns:\n", - " a numpy array of [means, vars]\n", - " \"\"\"\n", - " with open(json_cmvn_file) as f:\n", - " cmvn_stats = json.load(f)\n", - "\n", - " means = cmvn_stats['mean_stat']\n", - " variance = cmvn_stats['var_stat']\n", - " count = cmvn_stats['frame_num']\n", - " for i in range(len(means)):\n", - " means[i] /= count\n", - " variance[i] = variance[i] / count - means[i] * means[i]\n", - " if variance[i] < 1.0e-20:\n", - " variance[i] = 1.0e-20\n", - " variance[i] = 1.0 / math.sqrt(variance[i])\n", - " cmvn = np.array([means, variance])\n", - " return cmvn\n", - "\n", - "\n", - "def _load_kaldi_cmvn(kaldi_cmvn_file):\n", - " \"\"\" Load the kaldi format cmvn stats file and calculate cmvn\n", - "\n", - " Args:\n", - " kaldi_cmvn_file: kaldi text style global cmvn file, which\n", - " is generated by:\n", - " compute-cmvn-stats --binary=false scp:feats.scp global_cmvn\n", - "\n", - " Returns:\n", - " a numpy array of [means, vars]\n", - " \"\"\"\n", - " means = []\n", - " variance = []\n", - " with open(kaldi_cmvn_file, 'r') as fid:\n", - " # kaldi binary file start with '\\0B'\n", - " if fid.read(2) == '\\0B':\n", - " logger.error('kaldi cmvn binary file is not supported, please '\n", - " 'recompute it by: compute-cmvn-stats --binary=false '\n", - " ' scp:feats.scp global_cmvn')\n", - " sys.exit(1)\n", - " fid.seek(0)\n", - " arr = fid.read().split()\n", - " assert (arr[0] == '[')\n", - " assert (arr[-2] == '0')\n", - " assert (arr[-1] == ']')\n", - " feat_dim = int((len(arr) - 2 - 2) / 2)\n", - " for i in range(1, feat_dim + 1):\n", - " means.append(float(arr[i]))\n", - " count = float(arr[feat_dim + 1])\n", - " for i in range(feat_dim + 2, 2 * feat_dim + 2):\n", - " variance.append(float(arr[i]))\n", - "\n", - " for i in range(len(means)):\n", - " means[i] /= count\n", - " variance[i] = variance[i] / count - means[i] * means[i]\n", - " if variance[i] < 1.0e-20:\n", - " variance[i] = 1.0e-20\n", - " variance[i] = 1.0 / math.sqrt(variance[i])\n", - " cmvn = np.array([means, variance])\n", - " return cmvn\n", - "\n", - "\n", - "def _load_npz_cmvn(npz_cmvn_file, eps=1e-20):\n", - " npzfile = np.load(npz_cmvn_file)\n", - " means = npzfile[\"mean\"] #(1, D)\n", - " std = npzfile[\"std\"] #(1, D)\n", - " std = np.clip(std, eps, None)\n", - " variance = 1.0 / std\n", - " cmvn = np.array([means, variance])\n", - " return cmvn\n", - "\n", - "\n", - "def load_cmvn(cmvn_file: str, filetype: str):\n", - " \"\"\"load cmvn from file.\n", - "\n", - " Args:\n", - " cmvn_file (str): cmvn path.\n", - " filetype (str): file type, optional[npz, json, kaldi].\n", - "\n", - " Raises:\n", - " ValueError: file type not support.\n", - "\n", - " Returns:\n", - " Tuple[np.ndarray, np.ndarray]: mean, istd\n", - " \"\"\"\n", - " assert filetype in ['npz', 'json', 'kaldi'], filetype\n", - " filetype = filetype.lower()\n", - " if filetype == \"json\":\n", - " cmvn = _load_json_cmvn(cmvn_file)\n", - " elif filetype == \"kaldi\":\n", - " cmvn = _load_kaldi_cmvn(cmvn_file)\n", - " elif filetype == \"npz\":\n", - " cmvn = _load_npz_cmvn(cmvn_file)\n", - " else:\n", - " raise ValueError(f\"cmvn file type no support: {filetype}\")\n", - " return cmvn[0], cmvn[1]\n", - "\n", - "mean, istd = load_cmvn(args.cmvn, 'json')\n", - "print(mean.shape)\n", - "print(istd.shape)\n", - "print(mean)\n", - "print(istd)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "3cfa5e23", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ASRModel(\n", - " (encoder): ConformerEncoder(\n", - " (global_cmvn): GlobalCMVN()\n", - " (embed): Conv2dSubsampling4(\n", - " (conv): Sequential(\n", - " (0): Conv2d(1, 256, kernel_size=(3, 3), stride=(2, 2))\n", - " (1): ReLU()\n", - " (2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2))\n", - " (3): ReLU()\n", - " )\n", - " (out): Sequential(\n", - " (0): Linear(in_features=4864, out_features=256, bias=True)\n", - " )\n", - " (pos_enc): RelPositionalEncoding(\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " )\n", - " )\n", - " (after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (encoders): ModuleList(\n", - " (0): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (1): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (2): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (3): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (4): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (5): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (6): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (7): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (8): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (9): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (10): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (11): ConformerEncoderLayer(\n", - " (self_attn): RelPositionMultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " (linear_pos): Linear(in_features=256, out_features=256, bias=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (feed_forward_macaron): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): Swish()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (conv_module): ConvolutionModule(\n", - " (pointwise_conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,))\n", - " (depthwise_conv): Conv1d(256, 256, kernel_size=(15,), stride=(1,), padding=(7,), groups=256)\n", - " (norm): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (pointwise_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))\n", - " (activation): Swish()\n", - " )\n", - " (norm_ff): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_mha): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_ff_macaron): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_conv): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm_final): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " )\n", - " )\n", - " (decoder): TransformerDecoder(\n", - " (embed): Sequential(\n", - " (0): Embedding(4233, 256)\n", - " (1): PositionalEncoding(\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " )\n", - " )\n", - " (after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (output_layer): Linear(in_features=256, out_features=4233, bias=True)\n", - " (decoders): ModuleList(\n", - " (0): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm3): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, bias=True)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (1): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm3): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, bias=True)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (2): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm3): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, bias=True)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (3): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm3): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, bias=True)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (4): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm3): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, bias=True)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " (5): DecoderLayer(\n", - " (self_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (src_attn): MultiHeadedAttention(\n", - " (linear_q): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_k): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_v): Linear(in_features=256, out_features=256, bias=True)\n", - " (linear_out): Linear(in_features=256, out_features=256, bias=True)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (feed_forward): PositionwiseFeedForward(\n", - " (w_1): Linear(in_features=256, out_features=2048, bias=True)\n", - " (activation): ReLU()\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (w_2): Linear(in_features=2048, out_features=256, bias=True)\n", - " )\n", - " (norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (norm3): LayerNorm((256,), eps=1e-12, elementwise_affine=True)\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (concat_linear1): Linear(in_features=512, out_features=256, bias=True)\n", - " (concat_linear2): Linear(in_features=512, out_features=256, bias=True)\n", - " )\n", - " )\n", - " )\n", - " (ctc): CTC(\n", - " (ctc_lo): Linear(in_features=256, out_features=4233, bias=True)\n", - " (ctc_loss): CTCLoss()\n", - " )\n", - " (criterion_att): LabelSmoothingLoss(\n", - " (criterion): KLDivLoss()\n", - " )\n", - ")\n" - ] - } - ], - "source": [ - "# Init asr model from configs\n", - "model = init_asr_model(configs)\n", - "print(model)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "3c780af5", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "def summary(layer, print_func=print):\n", - " num_params = num_elements = 0\n", - " for name, param in layer.state_dict().items():\n", - " if print_func:\n", - " print_func(\n", - " \"{} | {} | {}\".format(name, param.shape, np.prod(param.shape)))\n", - " num_elements += np.prod(param.shape)\n", - " num_params += 1\n", - " if print_func:\n", - " print_func(\n", - " f\"Total parameters: {num_params}, {num_elements} elements.\"\n", - " )\n", - " \n", - "def print_params(model, print_func=print):\n", - " if print_func is None:\n", - " return\n", - " total = 0.0\n", - " num_params = 0.0\n", - " for n, p in model.named_parameters():\n", - " msg = f\"{n} | {p.shape} | {np.prod(p.shape)} | {p.requires_grad}\"\n", - " total += np.prod(p.shape)\n", - " num_params += 1\n", - " if print_func:\n", - " print_func(msg)\n", - " if print_func:\n", - " print_func(f\"Total parameters: {num_params}, {total} elements.\")" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "e159a200", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "encoder.global_cmvn.mean | torch.Size([80]) | 80\n", - "encoder.global_cmvn.istd | torch.Size([80]) | 80\n", - "encoder.embed.conv.0.weight | torch.Size([256, 1, 3, 3]) | 2304\n", - "encoder.embed.conv.0.bias | torch.Size([256]) | 256\n", - "encoder.embed.conv.2.weight | torch.Size([256, 256, 3, 3]) | 589824\n", - "encoder.embed.conv.2.bias | torch.Size([256]) | 256\n", - "encoder.embed.out.0.weight | torch.Size([256, 4864]) | 1245184\n", - "encoder.embed.out.0.bias | torch.Size([256]) | 256\n", - "encoder.after_norm.weight | torch.Size([256]) | 256\n", - "encoder.after_norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.0.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.0.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.0.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.0.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.0.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.0.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.0.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.0.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.0.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.0.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.0.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.0.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.0.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.0.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.0.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.0.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.0.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.0.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.0.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.0.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.0.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.0.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.0.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.0.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.1.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.1.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.1.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.1.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.1.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.1.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.1.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.1.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.1.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.1.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.1.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.1.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.1.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.1.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.1.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.1.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.1.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.1.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.1.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.1.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.1.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.1.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.1.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.1.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.2.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.2.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.2.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.2.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.2.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.2.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.2.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.2.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.2.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.2.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.2.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.2.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.2.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.2.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.2.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.2.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.2.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.2.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.2.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.2.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.2.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.2.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.2.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.2.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.3.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.3.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.3.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.3.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.3.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.3.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.3.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.3.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.3.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.3.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.3.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.3.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.3.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.3.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.3.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.3.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.3.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.3.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.3.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.3.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.3.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.3.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.3.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.3.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.4.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.4.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.4.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.4.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.4.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.4.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.4.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.4.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.4.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.4.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.4.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.4.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.4.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.4.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.4.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.4.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.4.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.4.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.4.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.4.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.4.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.4.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.4.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.4.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.5.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.5.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.5.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.5.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.5.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.5.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.5.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.5.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.5.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.5.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.5.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.5.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.5.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.5.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.5.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.5.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.5.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.5.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.5.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.5.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.5.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.5.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.5.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.5.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.6.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.6.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.6.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.6.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.6.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.6.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.6.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.6.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.6.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.6.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.6.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.6.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.6.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.6.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.6.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.6.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.6.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.6.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.6.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.6.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.6.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.6.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.6.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.6.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.7.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.7.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.7.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.7.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.7.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.7.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.7.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.7.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.7.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.7.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.7.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.7.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.7.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.7.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.7.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.7.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.7.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.7.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.7.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.7.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.7.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.7.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.7.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.7.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.8.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.8.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.8.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.8.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.8.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.8.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.8.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.8.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.8.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.8.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.8.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.8.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.8.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.8.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.8.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.8.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.8.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.8.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.8.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.8.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.8.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.8.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.8.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.8.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.9.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.9.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.9.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.9.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.9.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.9.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.9.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.9.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.9.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.9.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.9.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.9.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.9.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.9.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.9.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.9.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.9.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.9.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.9.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.9.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.9.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.9.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.9.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.9.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.10.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.10.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.10.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.10.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.10.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.10.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.10.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.10.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.10.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.10.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.10.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.10.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.10.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.10.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.10.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.10.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.10.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.10.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.10.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.10.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.10.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.10.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.10.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.10.concat_linear.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.self_attn.pos_bias_u | torch.Size([4, 64]) | 256\n", - "encoder.encoders.11.self_attn.pos_bias_v | torch.Size([4, 64]) | 256\n", - "encoder.encoders.11.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.11.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.11.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.11.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.11.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536\n", - "encoder.encoders.11.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.11.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.11.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.11.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "encoder.encoders.11.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048\n", - "encoder.encoders.11.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "encoder.encoders.11.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072\n", - "encoder.encoders.11.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512\n", - "encoder.encoders.11.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840\n", - "encoder.encoders.11.conv_module.depthwise_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.conv_module.norm.weight | torch.Size([256]) | 256\n", - "encoder.encoders.11.conv_module.norm.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.conv_module.norm.running_mean | torch.Size([256]) | 256\n", - "encoder.encoders.11.conv_module.norm.running_var | torch.Size([256]) | 256\n", - "encoder.encoders.11.conv_module.norm.num_batches_tracked | torch.Size([]) | 1.0\n", - "encoder.encoders.11.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536\n", - "encoder.encoders.11.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_ff.weight | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_ff.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_mha.weight | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_mha.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_ff_macaron.weight | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_ff_macaron.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_conv.weight | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_conv.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_final.weight | torch.Size([256]) | 256\n", - "encoder.encoders.11.norm_final.bias | torch.Size([256]) | 256\n", - "encoder.encoders.11.concat_linear.weight | torch.Size([256, 512]) | 131072\n", - "encoder.encoders.11.concat_linear.bias | torch.Size([256]) | 256\n", - "decoder.embed.0.weight | torch.Size([4233, 256]) | 1083648\n", - "decoder.after_norm.weight | torch.Size([256]) | 256\n", - "decoder.after_norm.bias | torch.Size([256]) | 256\n", - "decoder.output_layer.weight | torch.Size([4233, 256]) | 1083648\n", - "decoder.output_layer.bias | torch.Size([4233]) | 4233\n", - "decoder.decoders.0.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.src_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.src_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.src_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.0.src_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "decoder.decoders.0.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "decoder.decoders.0.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "decoder.decoders.0.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.norm1.weight | torch.Size([256]) | 256\n", - "decoder.decoders.0.norm1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.norm2.weight | torch.Size([256]) | 256\n", - "decoder.decoders.0.norm2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.norm3.weight | torch.Size([256]) | 256\n", - "decoder.decoders.0.norm3.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.concat_linear1.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.0.concat_linear1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.0.concat_linear2.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.0.concat_linear2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.src_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.src_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.src_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.1.src_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "decoder.decoders.1.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "decoder.decoders.1.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "decoder.decoders.1.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.norm1.weight | torch.Size([256]) | 256\n", - "decoder.decoders.1.norm1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.norm2.weight | torch.Size([256]) | 256\n", - "decoder.decoders.1.norm2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.norm3.weight | torch.Size([256]) | 256\n", - "decoder.decoders.1.norm3.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.concat_linear1.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.1.concat_linear1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.1.concat_linear2.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.1.concat_linear2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.src_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.src_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.src_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.2.src_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "decoder.decoders.2.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "decoder.decoders.2.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "decoder.decoders.2.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.norm1.weight | torch.Size([256]) | 256\n", - "decoder.decoders.2.norm1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.norm2.weight | torch.Size([256]) | 256\n", - "decoder.decoders.2.norm2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.norm3.weight | torch.Size([256]) | 256\n", - "decoder.decoders.2.norm3.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.concat_linear1.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.2.concat_linear1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.2.concat_linear2.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.2.concat_linear2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.src_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.src_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.src_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.3.src_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "decoder.decoders.3.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "decoder.decoders.3.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "decoder.decoders.3.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.norm1.weight | torch.Size([256]) | 256\n", - "decoder.decoders.3.norm1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.norm2.weight | torch.Size([256]) | 256\n", - "decoder.decoders.3.norm2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.norm3.weight | torch.Size([256]) | 256\n", - "decoder.decoders.3.norm3.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.concat_linear1.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.3.concat_linear1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.3.concat_linear2.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.3.concat_linear2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.src_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.src_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.src_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.4.src_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "decoder.decoders.4.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "decoder.decoders.4.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "decoder.decoders.4.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.norm1.weight | torch.Size([256]) | 256\n", - "decoder.decoders.4.norm1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.norm2.weight | torch.Size([256]) | 256\n", - "decoder.decoders.4.norm2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.norm3.weight | torch.Size([256]) | 256\n", - "decoder.decoders.4.norm3.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.concat_linear1.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.4.concat_linear1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.4.concat_linear2.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.4.concat_linear2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.self_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.self_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.self_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.self_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.src_attn.linear_q.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.src_attn.linear_k.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.src_attn.linear_v.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536\n", - "decoder.decoders.5.src_attn.linear_out.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288\n", - "decoder.decoders.5.feed_forward.w_1.bias | torch.Size([2048]) | 2048\n", - "decoder.decoders.5.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288\n", - "decoder.decoders.5.feed_forward.w_2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.norm1.weight | torch.Size([256]) | 256\n", - "decoder.decoders.5.norm1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.norm2.weight | torch.Size([256]) | 256\n", - "decoder.decoders.5.norm2.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.norm3.weight | torch.Size([256]) | 256\n", - "decoder.decoders.5.norm3.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.concat_linear1.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.5.concat_linear1.bias | torch.Size([256]) | 256\n", - "decoder.decoders.5.concat_linear2.weight | torch.Size([256, 512]) | 131072\n", - "decoder.decoders.5.concat_linear2.bias | torch.Size([256]) | 256\n", - "ctc.ctc_lo.weight | torch.Size([4233, 256]) | 1083648\n", - "ctc.ctc_lo.bias | torch.Size([4233]) | 4233\n", - "Total parameters: 701, 49355454.0 elements.\n" - ] - } - ], - "source": [ - "summary(model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8494c6ab", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "0648a969", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "encoder.embed.conv.0.weight | torch.Size([256, 1, 3, 3]) | 2304 | True\n", - "encoder.embed.conv.0.bias | torch.Size([256]) | 256 | True\n", - "encoder.embed.conv.2.weight | torch.Size([256, 256, 3, 3]) | 589824 | True\n", - "encoder.embed.conv.2.bias | torch.Size([256]) | 256 | True\n", - "encoder.embed.out.0.weight | torch.Size([256, 4864]) | 1245184 | True\n", - "encoder.embed.out.0.bias | torch.Size([256]) | 256 | True\n", - "encoder.after_norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.after_norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.0.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.0.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.0.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.0.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.0.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.0.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.0.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.0.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.0.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.0.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.0.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.0.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.0.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.1.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.1.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.1.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.1.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.1.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.1.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.1.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.1.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.1.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.1.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.1.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.1.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.1.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.2.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.2.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.2.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.2.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.2.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.2.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.2.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.2.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.2.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.2.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.2.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.2.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.2.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.3.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.3.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.3.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.3.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.3.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.3.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.3.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.3.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.3.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.3.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.3.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.3.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.3.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.4.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.4.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.4.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.4.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.4.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.4.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.4.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.4.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.4.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.4.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.4.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.4.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.4.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.5.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.5.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.5.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.5.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.5.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.5.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.5.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.5.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.5.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.5.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.5.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.5.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.5.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.6.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.6.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.6.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.6.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.6.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.6.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.6.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.6.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.6.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.6.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.6.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.6.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.6.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.7.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.7.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.7.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.7.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.7.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.7.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.7.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.7.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.7.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.7.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.7.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.7.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.7.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.8.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.8.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.8.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.8.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.8.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.8.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.8.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.8.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.8.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.8.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.8.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.8.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.8.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.9.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.9.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.9.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.9.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.9.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.9.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.9.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.9.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.9.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.9.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.9.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.9.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.9.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.10.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.10.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.10.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.10.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.10.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.10.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.10.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.10.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.10.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.10.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.10.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.10.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.10.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.self_attn.pos_bias_u | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.11.self_attn.pos_bias_v | torch.Size([4, 64]) | 256 | True\n", - "encoder.encoders.11.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.11.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.self_attn.linear_pos.weight | torch.Size([256, 256]) | 65536 | True\n", - "encoder.encoders.11.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.11.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.11.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.11.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "encoder.encoders.11.feed_forward_macaron.w_2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv1.weight | torch.Size([512, 256, 1]) | 131072 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv1.bias | torch.Size([512]) | 512 | True\n", - "encoder.encoders.11.conv_module.depthwise_conv.weight | torch.Size([256, 1, 15]) | 3840 | True\n", - "encoder.encoders.11.conv_module.depthwise_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.conv_module.norm.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.conv_module.norm.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv2.weight | torch.Size([256, 256, 1]) | 65536 | True\n", - "encoder.encoders.11.conv_module.pointwise_conv2.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_ff.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_ff.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_mha.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_mha.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_ff_macaron.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_ff_macaron.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_conv.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_conv.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_final.weight | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.norm_final.bias | torch.Size([256]) | 256 | True\n", - "encoder.encoders.11.concat_linear.weight | torch.Size([256, 512]) | 131072 | True\n", - "encoder.encoders.11.concat_linear.bias | torch.Size([256]) | 256 | True\n", - "decoder.embed.0.weight | torch.Size([4233, 256]) | 1083648 | True\n", - "decoder.after_norm.weight | torch.Size([256]) | 256 | True\n", - "decoder.after_norm.bias | torch.Size([256]) | 256 | True\n", - "decoder.output_layer.weight | torch.Size([4233, 256]) | 1083648 | True\n", - "decoder.output_layer.bias | torch.Size([4233]) | 4233 | True\n", - "decoder.decoders.0.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.0.src_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "decoder.decoders.0.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "decoder.decoders.0.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "decoder.decoders.0.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.norm1.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.norm1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.norm2.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.norm2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.norm3.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.norm3.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.concat_linear1.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.0.concat_linear1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.0.concat_linear2.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.0.concat_linear2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.1.src_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "decoder.decoders.1.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "decoder.decoders.1.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "decoder.decoders.1.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.norm1.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.norm1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.norm2.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.norm2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.norm3.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.norm3.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.concat_linear1.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.1.concat_linear1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.1.concat_linear2.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.1.concat_linear2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.2.src_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "decoder.decoders.2.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "decoder.decoders.2.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "decoder.decoders.2.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.norm1.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.norm1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.norm2.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.norm2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.norm3.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.norm3.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.concat_linear1.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.2.concat_linear1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.2.concat_linear2.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.2.concat_linear2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.3.src_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "decoder.decoders.3.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "decoder.decoders.3.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "decoder.decoders.3.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.norm1.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.norm1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.norm2.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.norm2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.norm3.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.norm3.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.concat_linear1.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.3.concat_linear1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.3.concat_linear2.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.3.concat_linear2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.4.src_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "decoder.decoders.4.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "decoder.decoders.4.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "decoder.decoders.4.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.norm1.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.norm1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.norm2.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.norm2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.norm3.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.norm3.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.concat_linear1.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.4.concat_linear1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.4.concat_linear2.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.4.concat_linear2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.self_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.self_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.self_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.self_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.self_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.src_attn.linear_q.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_q.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.src_attn.linear_k.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_k.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.src_attn.linear_v.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_v.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.src_attn.linear_out.weight | torch.Size([256, 256]) | 65536 | True\n", - "decoder.decoders.5.src_attn.linear_out.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.feed_forward.w_1.weight | torch.Size([2048, 256]) | 524288 | True\n", - "decoder.decoders.5.feed_forward.w_1.bias | torch.Size([2048]) | 2048 | True\n", - "decoder.decoders.5.feed_forward.w_2.weight | torch.Size([256, 2048]) | 524288 | True\n", - "decoder.decoders.5.feed_forward.w_2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.norm1.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.norm1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.norm2.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.norm2.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.norm3.weight | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.norm3.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.concat_linear1.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.5.concat_linear1.bias | torch.Size([256]) | 256 | True\n", - "decoder.decoders.5.concat_linear2.weight | torch.Size([256, 512]) | 131072 | True\n", - "decoder.decoders.5.concat_linear2.bias | torch.Size([256]) | 256 | True\n", - "ctc.ctc_lo.weight | torch.Size([4233, 256]) | 1083648 | True\n", - "ctc.ctc_lo.bias | torch.Size([4233]) | 4233 | True\n", - "Total parameters: 663.0, 49349138.0 elements.\n" - ] - } - ], - "source": [ - "print_params(model)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "5ad6de2a", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['BAC009S0739W0246', 'BAC009S0727W0424', 'BAC009S0753W0412', 'BAC009S0756W0206', 'BAC009S0740W0414', 'BAC009S0728W0426', 'BAC009S0739W0214', 'BAC009S0753W0423', 'BAC009S0734W0201', 'BAC009S0740W0427', 'BAC009S0730W0423', 'BAC009S0728W0367', 'BAC009S0730W0418', 'BAC009S0727W0157', 'BAC009S0749W0409', 'BAC009S0727W0418']\n", - "torch.Size([16, 207, 80])\n", - "tensor([[[ 8.9946, 9.5383, 9.1916, ..., 10.5074, 9.5633, 8.2564],\n", - " [ 9.7988, 10.4052, 9.2651, ..., 10.2512, 9.5440, 8.8738],\n", - " [10.6891, 10.3955, 8.0535, ..., 9.9067, 10.0649, 8.0509],\n", - " ...,\n", - " [ 9.2180, 9.6507, 8.5053, ..., 9.6872, 8.7425, 7.9865],\n", - " [10.1291, 9.9352, 9.3798, ..., 9.5639, 9.8260, 8.9795],\n", - " [ 9.0955, 7.1338, 9.4680, ..., 9.4727, 9.0212, 7.4479]],\n", - "\n", - " [[11.4310, 10.6719, 6.0841, ..., 9.3827, 8.7297, 7.5316],\n", - " [ 9.7317, 7.8105, 7.5715, ..., 10.0430, 9.2436, 7.3541],\n", - " [10.6502, 10.6006, 8.4678, ..., 9.2814, 9.1869, 8.0703],\n", - " ...,\n", - " [ 9.0970, 9.2637, 8.0753, ..., 8.4318, 8.3705, 8.0029],\n", - " [10.4617, 10.1478, 6.7693, ..., 9.7794, 9.5775, 8.0807],\n", - " [ 7.7944, 5.6211, 7.9751, ..., 9.9972, 9.8497, 8.0313]],\n", - "\n", - " [[ 7.3456, 7.8964, 7.5796, ..., 11.6310, 10.4513, 9.1236],\n", - " [ 8.6287, 8.4631, 7.4992, ..., 12.4160, 10.9757, 8.9426],\n", - " [ 9.8314, 10.2813, 8.9724, ..., 12.1387, 10.4017, 9.0055],\n", - " ...,\n", - " [ 7.0896, 7.4055, 6.8143, ..., 9.3252, 9.2732, 8.3534],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]],\n", - "\n", - " ...,\n", - "\n", - " [[10.9332, 10.4644, 7.7203, ..., 10.3488, 9.3023, 7.1553],\n", - " [10.4499, 9.9070, 9.0293, ..., 9.9525, 9.4141, 7.5593],\n", - " [10.4877, 9.8126, 9.8952, ..., 9.5866, 9.3413, 7.7849],\n", - " ...,\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]],\n", - "\n", - " [[ 9.9444, 9.5859, 8.2203, ..., 11.5886, 11.0450, 8.8171],\n", - " [ 7.6784, 8.3224, 7.5330, ..., 11.0551, 10.5357, 9.2746],\n", - " [ 8.6262, 9.6759, 9.8410, ..., 11.3788, 10.9221, 8.9914],\n", - " ...,\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]],\n", - "\n", - " [[ 8.1079, 7.7590, 6.7103, ..., 12.6506, 11.4662, 11.0615],\n", - " [11.3803, 11.2220, 8.6589, ..., 12.8106, 12.2222, 11.6893],\n", - " [10.6777, 9.9206, 8.0461, ..., 13.5729, 12.5624, 11.1550],\n", - " ...,\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000],\n", - " [ 0.0000, 0.0000, 0.0000, ..., 0.0000, 0.0000, 0.0000]]])\n", - "tensor([207, 207, 205, 205, 203, 203, 198, 197, 195, 188, 186, 186, 185, 180,\n", - " 166, 163], dtype=torch.int32)\n", - "tensor([[2995, 3116, 1209, 565, -1, -1],\n", - " [ 236, 1176, 331, 66, 3925, 4077],\n", - " [2693, 524, 234, 1145, 366, -1],\n", - " [3875, 4211, 3062, 700, -1, -1],\n", - " [ 272, 987, 1134, 494, 2959, -1],\n", - " [1936, 3715, 120, 2553, 2695, 2710],\n", - " [ 25, 1149, 3930, -1, -1, -1],\n", - " [1753, 1778, 1237, 482, 3925, 110],\n", - " [3703, 2, 565, 3827, -1, -1],\n", - " [1150, 2734, 10, 2478, 3490, -1],\n", - " [ 426, 811, 95, 489, 144, -1],\n", - " [2313, 2006, 489, 975, -1, -1],\n", - " [3702, 3414, 205, 1488, 2966, 1347],\n", - " [ 70, 1741, 702, 1666, -1, -1],\n", - " [ 703, 1778, 1030, 849, -1, -1],\n", - " [ 814, 1674, 115, 3827, -1, -1]], dtype=torch.int32)\n", - "tensor([4, 6, 5, 4, 5, 6, 3, 6, 4, 5, 5, 4, 6, 4, 4, 4], dtype=torch.int32)\n" - ] - } - ], - "source": [ - "for batch in cv_data_loader:\n", - " keys, feat, text, feat_len, text_len = batch\n", - " print(keys)\n", - " print(feat.shape)\n", - " print(feat)\n", - " print(feat_len)\n", - " print(text)\n", - " print(text_len)\n", - " np.savez('data.npz', keys=keys, feat=feat.numpy(), feat_len=feat_len.numpy(), text=text.numpy(), text_len=text_len.numpy())\n", - " break" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "852a9c95", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CODE_OF_CONDUCT.md data.npz install.sh README.md\t tools\r\n", - "CONTRIBUTING.md docs LICENSE\t requirements.txt venv\r\n", - "CPPLINT.cfg\t examples Makefile\t runtime\t wenet\r\n" - ] - } - ], - "source": [ - "!ls\n", - "!cp data.npz /workspace/DeepSpeech-2.x/.notebook" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "cde24c4e", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(111.9988)\n", - "tensor(830.9634, grad_fn=)\n", - "tensor([False, False, False, False, False, True, True, False, False, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " True, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, False, True, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, True, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " False, True, True, False, False, False, False, False, False, True,\n", - " False, False, False, False, False, False, True, False, False, False,\n", - " False, False, True, True, False, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, True, True, False, False, False, False, False,\n", - " True, True])\n", - "tensor(669.4633, grad_fn=)\n", - "tensor(142.4888, grad_fn=) tensor(41.8415, grad_fn=) tensor(377.3326, grad_fn=)\n" - ] - } - ], - "source": [ - "model.cpu().eval()\n", - "total_loss, attention_loss, ctc_loss = model(feat, feat_len,\n", - " text, text_len)\n", - "print(total_loss, attention_loss, ctc_loss )" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "be5b2a2c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "cpu\n" - ] - } - ], - "source": [ - "print(total_loss.device)" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "5b791771", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(112., device='cuda:0')\n", - "tensor(830.9634, device='cuda:0', grad_fn=)\n", - "tensor([False, False, False, False, False, True, True, False, False, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " True, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, False, True, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, True, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " False, True, True, False, False, False, False, False, False, True,\n", - " False, False, False, False, False, False, True, False, False, False,\n", - " False, False, True, True, False, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, True, True, False, False, False, False, False,\n", - " True, True], device='cuda:0')\n", - "tensor(669.4634, device='cuda:0', grad_fn=)\n", - "cuda:0\n", - "142.4888 41.84146 377.33258\n" - ] - } - ], - "source": [ - "model.cuda().eval()\n", - "feat=feat.cuda()\n", - "feat_len=feat_len.cuda()\n", - "text=text.cuda()\n", - "text_len=text_len.cuda()\n", - "\n", - "total_loss, attention_loss, ctc_loss = model(feat, feat_len,\n", - " text, text_len)\n", - "print(total_loss.device)\n", - "print(total_loss.cpu().data.numpy(), attention_loss.cpu().data.numpy(), ctc_loss.cpu().data.numpy() )" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "1baef537", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([16, 51, 256])\n", - "torch.Size([16, 1, 51])\n", - "tensor([[-0.7019, 0.5625, 0.6880, ..., 1.1237, 0.7804, 1.1369],\n", - " [-0.7788, 0.3913, 0.7189, ..., 1.2519, 0.8862, 1.3173],\n", - " [-0.9591, 0.6346, 0.8767, ..., 0.9818, 0.7440, 1.2903],\n", - " ...,\n", - " [-1.0732, 0.6724, 0.9230, ..., 0.9075, 0.8177, 1.3240],\n", - " [-1.1654, 0.6820, 0.6939, ..., 1.2238, 0.8028, 1.4507],\n", - " [-1.2732, 0.7146, 0.7582, ..., 0.9415, 0.8775, 1.2623]],\n", - " device='cuda:0', grad_fn=)\n" - ] - } - ], - "source": [ - "encoder_out, encoder_mask = model.encoder(feat, feat_len)\n", - "print(encoder_out.shape)\n", - "print(encoder_mask.shape)\n", - "print(encoder_out[0])\n", - "\n", - "np.savez('/workspace/DeepSpeech-2.x/.notebook/encoder.npz',\n", - " mask=encoder_mask.cpu().detach().numpy(), \n", - " out=encoder_out.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3e22c782", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "30b6b946", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ 9.871763 9.938915 10.238187 10.8597145 11.686526 12.25488\n", - " 12.657681 12.86139 12.807339 12.566256 12.32007 12.138792\n", - " 12.313189 12.552552 12.612239 12.569745 12.389728 12.143833\n", - " 12.092851 11.793959 11.622591 11.926331 11.815442 11.951225\n", - " 11.831805 11.887888 11.790144 11.88072 11.900057 11.973481\n", - " 12.009822 12.008814 12.026197 12.104796 12.21555 12.343993\n", - " 12.450144 12.496688 12.486538 12.355079 12.392918 12.255374\n", - " 12.264963 12.253142 12.325458 12.4335985 12.548675 12.676334\n", - " 12.809207 12.929347 12.961151 12.968834 12.995931 13.047281\n", - " 13.058881 13.05738 12.999211 12.934022 12.874292 12.71653\n", - " 12.48942 12.274784 12.261631 12.286319 12.31956 12.422907\n", - " 12.514802 12.578516 12.647194 12.737626 12.800171 12.868728\n", - " 12.966668 13.064786 13.159159 13.272843 13.310819 13.239043\n", - " 12.879361 11.183102 ] float32\n", - "encoder.embed.out.0.weight: (256, 4864) -> (4864, 256)\n", - "encoder.encoders.0.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.0.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.0.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.0.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.0.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.0.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.0.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.0.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.0.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.0.conv_module.norm.running_mean -> encoder.encoders.0.conv_module.norm._mean\n", - "encoder.encoders.0.conv_module.norm.running_var -> encoder.encoders.0.conv_module.norm._variance\n", - "encoder.encoders.0.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.1.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.1.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.1.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.1.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.1.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.1.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.1.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.1.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.1.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.1.conv_module.norm.running_mean -> encoder.encoders.1.conv_module.norm._mean\n", - "encoder.encoders.1.conv_module.norm.running_var -> encoder.encoders.1.conv_module.norm._variance\n", - "encoder.encoders.1.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.2.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.2.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.2.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.2.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.2.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.2.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.2.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.2.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.2.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.2.conv_module.norm.running_mean -> encoder.encoders.2.conv_module.norm._mean\n", - "encoder.encoders.2.conv_module.norm.running_var -> encoder.encoders.2.conv_module.norm._variance\n", - "encoder.encoders.2.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.3.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.3.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.3.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.3.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.3.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.3.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.3.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.3.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.3.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.3.conv_module.norm.running_mean -> encoder.encoders.3.conv_module.norm._mean\n", - "encoder.encoders.3.conv_module.norm.running_var -> encoder.encoders.3.conv_module.norm._variance\n", - "encoder.encoders.3.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.4.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.4.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.4.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.4.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.4.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.4.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.4.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.4.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.4.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.4.conv_module.norm.running_mean -> encoder.encoders.4.conv_module.norm._mean\n", - "encoder.encoders.4.conv_module.norm.running_var -> encoder.encoders.4.conv_module.norm._variance\n", - "encoder.encoders.4.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.5.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.5.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.5.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.5.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.5.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.5.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.5.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.5.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.5.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.5.conv_module.norm.running_mean -> encoder.encoders.5.conv_module.norm._mean\n", - "encoder.encoders.5.conv_module.norm.running_var -> encoder.encoders.5.conv_module.norm._variance\n", - "encoder.encoders.5.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.6.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.6.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.6.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.6.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.6.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.6.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.6.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.6.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.6.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.6.conv_module.norm.running_mean -> encoder.encoders.6.conv_module.norm._mean\n", - "encoder.encoders.6.conv_module.norm.running_var -> encoder.encoders.6.conv_module.norm._variance\n", - "encoder.encoders.6.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.7.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.7.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.7.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.7.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.7.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.7.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.7.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.7.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.7.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.7.conv_module.norm.running_mean -> encoder.encoders.7.conv_module.norm._mean\n", - "encoder.encoders.7.conv_module.norm.running_var -> encoder.encoders.7.conv_module.norm._variance\n", - "encoder.encoders.7.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.8.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.8.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.8.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.8.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.8.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.8.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.8.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.8.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.8.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.8.conv_module.norm.running_mean -> encoder.encoders.8.conv_module.norm._mean\n", - "encoder.encoders.8.conv_module.norm.running_var -> encoder.encoders.8.conv_module.norm._variance\n", - "encoder.encoders.8.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.9.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.9.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.9.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.9.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.9.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.9.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.9.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.9.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.9.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.9.conv_module.norm.running_mean -> encoder.encoders.9.conv_module.norm._mean\n", - "encoder.encoders.9.conv_module.norm.running_var -> encoder.encoders.9.conv_module.norm._variance\n", - "encoder.encoders.9.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.10.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.10.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.10.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.10.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.10.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.10.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.10.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.10.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.10.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.10.conv_module.norm.running_mean -> encoder.encoders.10.conv_module.norm._mean\n", - "encoder.encoders.10.conv_module.norm.running_var -> encoder.encoders.10.conv_module.norm._variance\n", - "encoder.encoders.10.concat_linear.weight: (256, 512) -> (512, 256)\n", - "encoder.encoders.11.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.11.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.11.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.11.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.11.self_attn.linear_pos.weight: (256, 256) -> (256, 256)\n", - "encoder.encoders.11.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.11.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.11.feed_forward_macaron.w_1.weight: (2048, 256) -> (256, 2048)\n", - "encoder.encoders.11.feed_forward_macaron.w_2.weight: (256, 2048) -> (2048, 256)\n", - "encoder.encoders.11.conv_module.norm.running_mean -> encoder.encoders.11.conv_module.norm._mean\n", - "encoder.encoders.11.conv_module.norm.running_var -> encoder.encoders.11.conv_module.norm._variance\n", - "encoder.encoders.11.concat_linear.weight: (256, 512) -> (512, 256)\n", - "decoder.output_layer.weight: (4233, 256) -> (256, 4233)\n", - "decoder.decoders.0.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.src_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.src_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.src_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.src_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.0.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "decoder.decoders.0.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "decoder.decoders.0.concat_linear1.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.0.concat_linear2.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.1.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.src_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.src_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.src_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.src_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.1.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "decoder.decoders.1.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "decoder.decoders.1.concat_linear1.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.1.concat_linear2.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.2.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.src_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.src_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.src_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.src_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.2.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "decoder.decoders.2.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "decoder.decoders.2.concat_linear1.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.2.concat_linear2.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.3.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.src_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.src_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.src_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.src_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.3.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "decoder.decoders.3.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "decoder.decoders.3.concat_linear1.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.3.concat_linear2.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.4.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.src_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.src_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.src_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.src_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.4.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "decoder.decoders.4.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "decoder.decoders.4.concat_linear1.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.4.concat_linear2.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.5.self_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.self_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.self_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.self_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.src_attn.linear_q.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.src_attn.linear_k.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.src_attn.linear_v.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.src_attn.linear_out.weight: (256, 256) -> (256, 256)\n", - "decoder.decoders.5.feed_forward.w_1.weight: (2048, 256) -> (256, 2048)\n", - "decoder.decoders.5.feed_forward.w_2.weight: (256, 2048) -> (2048, 256)\n", - "decoder.decoders.5.concat_linear1.weight: (256, 512) -> (512, 256)\n", - "decoder.decoders.5.concat_linear2.weight: (256, 512) -> (512, 256)\n", - "ctc.ctc_lo.weight: (4233, 256) -> (256, 4233)\n" - ] - } - ], - "source": [ - "# dump torch model to paddle\n", - "import numpy as np\n", - "state_dict = model.state_dict()\n", - "paddle_state_dict = {}\n", - "\n", - "for n, p in state_dict.items():\n", - " name_change=True\n", - "\n", - " if 'norm.running_mean' in n:\n", - " new_n = n.replace('norm.running_', 'norm._')\n", - " elif 'norm.running_var' in n:\n", - " new_n = n.replace('norm.running_var', 'norm._variance')\n", - " else:\n", - " name_change=False\n", - " new_n = n\n", - " \n", - " if name_change:\n", - " print(f\"{n} -> {new_n}\")\n", - " \n", - " p = p.cpu().detach().numpy()\n", - " if n.endswith('weight') and p.ndim == 2 and 'embed.0.weight' not in n:\n", - " new_p = p.T\n", - " print(f\"{n}: {p.shape} -> {new_p.shape}\")\n", - " else:\n", - " new_p = p\n", - " \n", - " if 'global_cmvn.mean' in n:\n", - " print(p, p.dtype)\n", - " \n", - " paddle_state_dict[new_n] = new_p\n", - " \n", - "np.savez('/workspace/DeepSpeech-2.x/.notebook/model',\n", - " state=paddle_state_dict)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7307dc5b", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "d99b29bc", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(377.3326, device='cuda:0', grad_fn=)\n", - "None\n", - "[[ 3.16902351e+00 -1.51765049e-02 4.91097234e-02 ... -2.47973716e-03\n", - " -5.93366381e-03 -7.26613170e-03]\n", - " [-1.74185038e+00 7.75875803e-03 -4.49435972e-02 ... 9.92415240e-04\n", - " 2.46338220e-03 2.31891591e-03]\n", - " [-2.33343077e+00 1.30476682e-02 -2.66557615e-02 ... 2.27533933e-03\n", - " 5.76929189e-03 7.48792710e-03]\n", - " ...\n", - " [-4.30356789e+00 2.46056803e-02 -9.00955945e-02 ... 4.43160534e-03\n", - " 1.16123557e-02 1.44716976e-02]\n", - " [-3.36919212e+00 1.73155665e-02 -6.36875406e-02 ... 3.28367390e-03\n", - " 8.58021621e-03 1.07796099e-02]\n", - " [-6.62039661e+00 3.49958315e-02 -1.23963736e-01 ... 6.36674836e-03\n", - " 1.60815325e-02 2.03892551e-02]]\n", - "[-4.3777566e+00 2.3245990e-02 -9.3339972e-02 ... 4.2569702e-03\n", - " 1.0920014e-02 1.3787906e-02]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - ":6: UserWarning: The .grad attribute of a Tensor that is not a leaf Tensor is being accessed. Its .grad attribute won't be populated during autograd.backward(). If you indeed want the gradient for a non-leaf Tensor, use .retain_grad() on the non-leaf Tensor. If you access the non-leaf Tensor by mistake, make sure you access the leaf Tensor instead. See github.com/pytorch/pytorch/pull/30531 for more informations.\n", - " print(loss_ctc.grad)\n" - ] - } - ], - "source": [ - "encoder_out_lens = encoder_mask.squeeze(1).sum(1)\n", - "loss_ctc = model.ctc(encoder_out, encoder_out_lens, text, text_len)\n", - "print(loss_ctc)\n", - "dir(loss_ctc)\n", - "loss_ctc.backward()\n", - "print(loss_ctc.grad)\n", - "#print(model.ctc.ctc_lo.weight.grad)\n", - "print(model.ctc.ctc_lo.weight.grad.T.cpu().data.numpy())\n", - "print(model.ctc.ctc_lo.bias.grad.cpu().data.numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "49b05d6d", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(112., device='cuda:0')\n", - "tensor(830.9634, device='cuda:0', grad_fn=)\n", - "tensor([False, False, False, False, False, True, True, False, False, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " True, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, False, True, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, True, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " False, True, True, False, False, False, False, False, False, True,\n", - " False, False, False, False, False, False, True, False, False, False,\n", - " False, False, True, True, False, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, True, True, False, False, False, False, False,\n", - " True, True], device='cuda:0')\n", - "tensor(669.4634, device='cuda:0', grad_fn=)\n", - "tensor(41.8415, device='cuda:0', grad_fn=) 0.0\n" - ] - } - ], - "source": [ - "loss_att, acc_att = model._calc_att_loss(encoder_out, encoder_mask,\n", - " text, text_len)\n", - "print(loss_att, acc_att)" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "413b413f", - "metadata": {}, - "outputs": [], - "source": [ - "def pad_list(xs, pad_value: int):\n", - " n_batch = len(xs)\n", - " max_len = max([x.size(0) for x in xs])\n", - " pad = torch.zeros(n_batch, max_len, dtype=xs[0].dtype, device=xs[0].device)\n", - " pad = pad.fill_(pad_value)\n", - " for i in range(n_batch):\n", - " pad[i, :xs[i].size(0)] = xs[i]\n", - "\n", - " return pad\n", - "\n", - "def add_sos_eos(ys_pad: torch.Tensor, sos: int, eos: int,\n", - " ignore_id: int):\n", - "\n", - " _sos = torch.tensor([sos],\n", - " dtype=torch.long,\n", - " requires_grad=False,\n", - " device=ys_pad.device)\n", - " _eos = torch.tensor([eos],\n", - " dtype=torch.long,\n", - " requires_grad=False,\n", - " device=ys_pad.device)\n", - " ys = [y[y != ignore_id] for y in ys_pad] # parse padded ys\n", - " ys_in = [torch.cat([_sos, y], dim=0) for y in ys]\n", - " ys_out = [torch.cat([y, _eos], dim=0) for y in ys]\n", - " return pad_list(ys_in, eos), pad_list(ys_out, ignore_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "id": "ff0c2400", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[4232, 2995, 3116, 1209, 565, 4232, 4232],\n", - " [4232, 236, 1176, 331, 66, 3925, 4077],\n", - " [4232, 2693, 524, 234, 1145, 366, 4232],\n", - " [4232, 3875, 4211, 3062, 700, 4232, 4232],\n", - " [4232, 272, 987, 1134, 494, 2959, 4232],\n", - " [4232, 1936, 3715, 120, 2553, 2695, 2710],\n", - " [4232, 25, 1149, 3930, 4232, 4232, 4232],\n", - " [4232, 1753, 1778, 1237, 482, 3925, 110],\n", - " [4232, 3703, 2, 565, 3827, 4232, 4232],\n", - " [4232, 1150, 2734, 10, 2478, 3490, 4232],\n", - " [4232, 426, 811, 95, 489, 144, 4232],\n", - " [4232, 2313, 2006, 489, 975, 4232, 4232],\n", - " [4232, 3702, 3414, 205, 1488, 2966, 1347],\n", - " [4232, 70, 1741, 702, 1666, 4232, 4232],\n", - " [4232, 703, 1778, 1030, 849, 4232, 4232],\n", - " [4232, 814, 1674, 115, 3827, 4232, 4232]], device='cuda:0')\n", - "tensor([[2995, 3116, 1209, 565, 4232, -1, -1],\n", - " [ 236, 1176, 331, 66, 3925, 4077, 4232],\n", - " [2693, 524, 234, 1145, 366, 4232, -1],\n", - " [3875, 4211, 3062, 700, 4232, -1, -1],\n", - " [ 272, 987, 1134, 494, 2959, 4232, -1],\n", - " [1936, 3715, 120, 2553, 2695, 2710, 4232],\n", - " [ 25, 1149, 3930, 4232, -1, -1, -1],\n", - " [1753, 1778, 1237, 482, 3925, 110, 4232],\n", - " [3703, 2, 565, 3827, 4232, -1, -1],\n", - " [1150, 2734, 10, 2478, 3490, 4232, -1],\n", - " [ 426, 811, 95, 489, 144, 4232, -1],\n", - " [2313, 2006, 489, 975, 4232, -1, -1],\n", - " [3702, 3414, 205, 1488, 2966, 1347, 4232],\n", - " [ 70, 1741, 702, 1666, 4232, -1, -1],\n", - " [ 703, 1778, 1030, 849, 4232, -1, -1],\n", - " [ 814, 1674, 115, 3827, 4232, -1, -1]], device='cuda:0')\n" - ] - } - ], - "source": [ - "ys_pad = text\n", - "ys_pad_lens = text_len\n", - "ys_in_pad, ys_out_pad = add_sos_eos(ys_pad, model.sos, model.eos,\n", - " model.ignore_id)\n", - "ys_in_lens = ys_pad_lens + 1\n", - "print(ys_in_pad)\n", - "print(ys_out_pad)" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "id": "3e84da38", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([16, 7, 4233])\n", - "tensor([[-3.7639e-01, -8.2272e-01, 7.4276e-01, ..., 3.4201e-01,\n", - " 1.5035e-02, 4.0337e-01],\n", - " [-8.7386e-01, -3.1389e-01, 4.1988e-01, ..., 3.7724e-01,\n", - " -1.4353e-01, -1.0024e+00],\n", - " [-4.3505e-01, 3.4505e-02, -2.8710e-01, ..., 7.7274e-02,\n", - " -1.1672e+00, -2.6849e-01],\n", - " ...,\n", - " [ 4.2471e-01, 5.8886e-01, 2.0204e-02, ..., 3.7405e-01,\n", - " 4.5470e-02, -3.7139e-01],\n", - " [-3.7978e-01, -8.1084e-01, 7.5725e-01, ..., 2.6039e-01,\n", - " -7.9347e-04, 4.2538e-01],\n", - " [-3.8280e-01, -8.1207e-01, 7.4943e-01, ..., 2.6173e-01,\n", - " -1.0499e-03, 4.2679e-01]], device='cuda:0', grad_fn=)\n" - ] - } - ], - "source": [ - "decoder_out, _ = model.decoder(encoder_out, encoder_mask, ys_in_pad,\n", - " ys_in_lens)\n", - "print(decoder_out.shape)\n", - "print(decoder_out[0])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "aac441ea", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 28, - "id": "5ddbca73", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.float32\n", - "torch.int64\n", - "tensor(112., device='cuda:0')\n", - "tensor(830.9634, device='cuda:0', grad_fn=)\n", - "tensor([False, False, False, False, False, True, True, False, False, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " True, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, False, True, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, True, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " False, True, True, False, False, False, False, False, False, True,\n", - " False, False, False, False, False, False, True, False, False, False,\n", - " False, False, True, True, False, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, True, True, False, False, False, False, False,\n", - " True, True], device='cuda:0')\n", - "tensor(669.4634, device='cuda:0', grad_fn=)\n", - "tensor(41.8415, device='cuda:0', grad_fn=)\n", - "tensor([[2995, 3116, 1209, 565, 4232, -1, -1],\n", - " [ 236, 1176, 331, 66, 3925, 4077, 4232],\n", - " [2693, 524, 234, 1145, 366, 4232, -1],\n", - " [3875, 4211, 3062, 700, 4232, -1, -1],\n", - " [ 272, 987, 1134, 494, 2959, 4232, -1],\n", - " [1936, 3715, 120, 2553, 2695, 2710, 4232],\n", - " [ 25, 1149, 3930, 4232, -1, -1, -1],\n", - " [1753, 1778, 1237, 482, 3925, 110, 4232],\n", - " [3703, 2, 565, 3827, 4232, -1, -1],\n", - " [1150, 2734, 10, 2478, 3490, 4232, -1],\n", - " [ 426, 811, 95, 489, 144, 4232, -1],\n", - " [2313, 2006, 489, 975, 4232, -1, -1],\n", - " [3702, 3414, 205, 1488, 2966, 1347, 4232],\n", - " [ 70, 1741, 702, 1666, 4232, -1, -1],\n", - " [ 703, 1778, 1030, 849, 4232, -1, -1],\n", - " [ 814, 1674, 115, 3827, 4232, -1, -1]], device='cuda:0')\n", - "tensor([[-3.7639e-01, -8.2272e-01, 7.4276e-01, ..., 3.4201e-01,\n", - " 1.5035e-02, 4.0337e-01],\n", - " [-8.7386e-01, -3.1389e-01, 4.1988e-01, ..., 3.7724e-01,\n", - " -1.4353e-01, -1.0024e+00],\n", - " [-4.3505e-01, 3.4505e-02, -2.8710e-01, ..., 7.7274e-02,\n", - " -1.1672e+00, -2.6849e-01],\n", - " ...,\n", - " [ 4.2471e-01, 5.8886e-01, 2.0204e-02, ..., 3.7405e-01,\n", - " 4.5470e-02, -3.7139e-01],\n", - " [-3.7978e-01, -8.1084e-01, 7.5725e-01, ..., 2.6039e-01,\n", - " -7.9347e-04, 4.2538e-01],\n", - " [-3.8280e-01, -8.1207e-01, 7.4943e-01, ..., 2.6173e-01,\n", - " -1.0499e-03, 4.2679e-01]], device='cuda:0', grad_fn=)\n" - ] - } - ], - "source": [ - "print(decoder_out.dtype)\n", - "print(ys_out_pad.dtype)\n", - "loss_att = model.criterion_att(decoder_out, ys_out_pad)\n", - "print(loss_att)\n", - "print(ys_out_pad)\n", - "print(decoder_out[0])\n", - "np.savez('/workspace/DeepSpeech-2.x/.notebook/decoder',\n", - " decoder_out=decoder_out.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "78f98c0b", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 29, - "id": "8d968cd3", - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "from torch import nn\n", - "\n", - "\n", - "class LabelSmoothingLoss(nn.Module):\n", - " def __init__(self,\n", - " size: int,\n", - " padding_idx: int,\n", - " smoothing: float,\n", - " normalize_length: bool = False):\n", - " \"\"\"Construct an LabelSmoothingLoss object.\"\"\"\n", - " super(LabelSmoothingLoss, self).__init__()\n", - " self.criterion = nn.KLDivLoss(reduction=\"none\")\n", - " self.padding_idx = padding_idx\n", - " self.confidence = 1.0 - smoothing\n", - " self.smoothing = smoothing\n", - " self.size = size\n", - " self.normalize_length = normalize_length\n", - "\n", - " def forward(self, x: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n", - " \"\"\"Compute loss between x and target.\n", - "\n", - " The model outputs and data labels tensors are flatten to\n", - " (batch*seqlen, class) shape and a mask is applied to the\n", - " padding part which should not be calculated for loss.\n", - "\n", - " Args:\n", - " x (torch.Tensor): prediction (batch, seqlen, class)\n", - " target (torch.Tensor):\n", - " target signal masked with self.padding_id (batch, seqlen)\n", - " Returns:\n", - " loss (torch.Tensor) : The KL loss, scalar float value\n", - " \"\"\"\n", - " assert x.size(2) == self.size\n", - " batch_size = x.size(0)\n", - " x = x.view(-1, self.size)\n", - " target = target.view(-1)\n", - " # use zeros_like instead of torch.no_grad() for true_dist,\n", - " # since no_grad() can not be exported by JIT\n", - " true_dist = torch.zeros_like(x)\n", - " true_dist.fill_(self.smoothing / (self.size - 1))\n", - " ignore = target == self.padding_idx # (B,)\n", - " print(self.smoothing / (self.size - 1))\n", - " print(true_dist)\n", - " total = len(target) - ignore.sum().item()\n", - " target = target.masked_fill(ignore, 0) # avoid -1 index\n", - " true_dist.scatter_(1, target.unsqueeze(1), self.confidence)\n", - " print(true_dist.dtype)\n", - " print(true_dist.square().sum())\n", - " kl = self.criterion(torch.log_softmax(x, dim=1), true_dist)\n", - " print(kl.sum())\n", - " denom = total if self.normalize_length else batch_size\n", - " print(ignore)\n", - " numer= kl.masked_fill(ignore.unsqueeze(1), 0).sum()\n", - " print(numer)\n", - " return numer /denom" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "3df340ec", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.3629489603024576e-05\n", - "tensor([[2.3629e-05, 2.3629e-05, 2.3629e-05, ..., 2.3629e-05, 2.3629e-05,\n", - " 2.3629e-05],\n", - " [2.3629e-05, 2.3629e-05, 2.3629e-05, ..., 2.3629e-05, 2.3629e-05,\n", - " 2.3629e-05],\n", - " [2.3629e-05, 2.3629e-05, 2.3629e-05, ..., 2.3629e-05, 2.3629e-05,\n", - " 2.3629e-05],\n", - " ...,\n", - " [2.3629e-05, 2.3629e-05, 2.3629e-05, ..., 2.3629e-05, 2.3629e-05,\n", - " 2.3629e-05],\n", - " [2.3629e-05, 2.3629e-05, 2.3629e-05, ..., 2.3629e-05, 2.3629e-05,\n", - " 2.3629e-05],\n", - " [2.3629e-05, 2.3629e-05, 2.3629e-05, ..., 2.3629e-05, 2.3629e-05,\n", - " 2.3629e-05]], device='cuda:0')\n", - "torch.float32\n", - "tensor(90.7203, device='cuda:0')\n", - "tensor(830.9634, device='cuda:0', grad_fn=)\n", - "tensor([False, False, False, False, False, True, True, False, False, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " True, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, False, True, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, True, False,\n", - " False, False, False, False, False, False, False, False, False, False,\n", - " False, True, True, False, False, False, False, False, False, True,\n", - " False, False, False, False, False, False, True, False, False, False,\n", - " False, False, True, True, False, False, False, False, False, False,\n", - " False, False, False, False, False, False, True, True, False, False,\n", - " False, False, False, True, True, False, False, False, False, False,\n", - " True, True], device='cuda:0')\n", - "tensor(669.4634, device='cuda:0', grad_fn=)\n", - "tensor(41.8415, device='cuda:0', grad_fn=)\n", - "torch.int64\n" - ] - } - ], - "source": [ - "criteron = LabelSmoothingLoss(4233, -1, 0.1, False)\n", - "loss_att = criteron(decoder_out, ys_out_pad)\n", - "print(loss_att)\n", - "print(ys_out_pad.dtype)" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "badc410d", - "metadata": {}, - "outputs": [ - { - "ename": "RuntimeError", - "evalue": "Trying to backward through the graph a second time, but the saved intermediate results have already been freed. Specify retain_graph=True when calling backward the first time.", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mloss_att\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mloss_att\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgrad\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdecoder_out\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgrad\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/workspace/wenet/venv/lib/python3.8/site-packages/torch/tensor.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(self, gradient, retain_graph, create_graph)\u001b[0m\n\u001b[1;32m 183\u001b[0m \u001b[0mproducts\u001b[0m\u001b[0;34m.\u001b[0m \u001b[0mDefaults\u001b[0m \u001b[0mto\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 184\u001b[0m \"\"\"\n\u001b[0;32m--> 185\u001b[0;31m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mautograd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgradient\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mretain_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcreate_graph\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 186\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mregister_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/workspace/wenet/venv/lib/python3.8/site-packages/torch/autograd/__init__.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0mretain_graph\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcreate_graph\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 125\u001b[0;31m Variable._execution_engine.run_backward(\n\u001b[0m\u001b[1;32m 126\u001b[0m \u001b[0mtensors\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgrad_tensors\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mretain_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcreate_graph\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 127\u001b[0m allow_unreachable=True) # allow_unreachable flag\n", - "\u001b[0;31mRuntimeError\u001b[0m: Trying to backward through the graph a second time, but the saved intermediate results have already been freed. Specify retain_graph=True when calling backward the first time." - ] - } - ], - "source": [ - "loss_att.backward()\n", - "print(loss_att.grad)\n", - "print(decoder_out.grad)" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "id": "219eb41f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([ 0.0024, 0.0019, -0.1098, ..., 0.0028, 0.0020, -1.7978],\n", - " device='cuda:0')\n", - "tensor([[ 6.5052e-04, 6.4419e-05, -6.1955e-06, ..., 9.8220e-04,\n", - " -2.5918e-05, 3.3754e-04],\n", - " [ 3.9305e-04, 4.5799e-04, 1.4362e-04, ..., 4.6800e-04,\n", - " 1.6911e-04, 2.7067e-04],\n", - " [-1.3593e-01, 5.2201e-02, 3.2895e-02, ..., 2.4580e-02,\n", - " 1.4590e-01, -4.6850e-02],\n", - " ...,\n", - " [ 1.0434e-03, 4.2251e-04, 6.5688e-04, ..., 1.2144e-03,\n", - " 2.1159e-04, 6.6838e-04],\n", - " [ 6.4997e-04, 4.4301e-04, 4.1550e-04, ..., 1.0420e-03,\n", - " 2.4114e-04, 1.5338e-04],\n", - " [-9.9337e-01, 5.4573e-01, -1.1371e-02, ..., -4.3175e-01,\n", - " -2.7850e-01, -4.4679e-01]], device='cuda:0')\n" - ] - } - ], - "source": [ - "print(model.decoder.output_layer.bias.grad)\n", - "print(model.decoder.output_layer.weight.grad)" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "id": "40d00a54", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[[-5.3698e-01, -1.9911e-01, -3.4997e-01, ..., -8.2428e-01,\n", - " -1.0265e+00, -9.6301e-01],\n", - " [-4.4642e-02, 2.3176e-01, -3.2539e-01, ..., -9.0159e-01,\n", - " -1.0325e+00, -7.5987e-01],\n", - " [ 5.0035e-01, 2.2691e-01, -7.3052e-01, ..., -1.0055e+00,\n", - " -8.7123e-01, -1.0306e+00],\n", - " ...,\n", - " [-4.0024e-01, -1.4325e-01, -5.7947e-01, ..., -1.0718e+00,\n", - " -1.2806e+00, -1.0518e+00],\n", - " [ 1.5755e-01, -1.8495e-03, -2.8703e-01, ..., -1.1090e+00,\n", - " -9.4519e-01, -7.2506e-01],\n", - " [-4.7520e-01, -1.3942e+00, -2.5754e-01, ..., -1.1365e+00,\n", - " -1.1943e+00, -1.2290e+00]],\n", - "\n", - " [[ 9.5454e-01, 3.6428e-01, -1.3891e+00, ..., -1.1637e+00,\n", - " -1.2845e+00, -1.2015e+00],\n", - " [-8.5735e-02, -1.0579e+00, -8.9173e-01, ..., -9.6441e-01,\n", - " -1.1255e+00, -1.2599e+00],\n", - " [ 4.7654e-01, 3.2887e-01, -5.9201e-01, ..., -1.1942e+00,\n", - " -1.1430e+00, -1.0242e+00],\n", - " ...,\n", - " [-4.7431e-01, -3.3559e-01, -7.2326e-01, ..., -1.4506e+00,\n", - " -1.3957e+00, -1.0464e+00],\n", - " [ 3.6113e-01, 1.0381e-01, -1.1599e+00, ..., -1.0439e+00,\n", - " -1.0221e+00, -1.0208e+00],\n", - " [-1.2717e+00, -2.1460e+00, -7.5677e-01, ..., -9.7822e-01,\n", - " -9.3785e-01, -1.0371e+00]],\n", - "\n", - " [[-1.5465e+00, -1.0152e+00, -8.8901e-01, ..., -4.8522e-01,\n", - " -7.5163e-01, -6.7765e-01],\n", - " [-7.6101e-01, -7.3352e-01, -9.1588e-01, ..., -2.4836e-01,\n", - " -5.8927e-01, -7.3723e-01],\n", - " [-2.4714e-02, 1.7016e-01, -4.2326e-01, ..., -3.3204e-01,\n", - " -7.6696e-01, -7.1652e-01],\n", - " ...,\n", - " [-1.7032e+00, -1.2591e+00, -1.1449e+00, ..., -1.1810e+00,\n", - " -1.1163e+00, -9.3108e-01],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[ 6.4983e-01, 2.6117e-01, -8.4197e-01, ..., -8.7213e-01,\n", - " -1.1073e+00, -1.3253e+00],\n", - " [ 3.5391e-01, -1.5846e-02, -4.0425e-01, ..., -9.9173e-01,\n", - " -1.0727e+00, -1.1924e+00],\n", - " [ 3.7704e-01, -6.2785e-02, -1.1468e-01, ..., -1.1021e+00,\n", - " -1.0952e+00, -1.1182e+00],\n", - " ...,\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00]],\n", - "\n", - " [[ 4.4458e-02, -1.7547e-01, -6.7475e-01, ..., -4.9801e-01,\n", - " -5.6783e-01, -7.7852e-01],\n", - " [-1.3428e+00, -8.0343e-01, -9.0457e-01, ..., -6.5902e-01,\n", - " -7.2550e-01, -6.2796e-01],\n", - " [-7.6253e-01, -1.3071e-01, -1.3280e-01, ..., -5.6133e-01,\n", - " -6.0588e-01, -7.2115e-01],\n", - " ...,\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00]],\n", - "\n", - " [[-1.0798e+00, -1.0834e+00, -1.1797e+00, ..., -1.7757e-01,\n", - " -4.3747e-01, -4.0007e-02],\n", - " [ 9.2354e-01, 6.3771e-01, -5.2810e-01, ..., -1.2928e-01,\n", - " -2.0342e-01, 1.6656e-01],\n", - " [ 4.9337e-01, -9.1133e-03, -7.3302e-01, ..., 1.0074e-01,\n", - " -9.8115e-02, -9.2357e-03],\n", - " ...,\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00],\n", - " [-6.0434e+00, -4.9397e+00, -3.4235e+00, ..., -3.9949e+00,\n", - " -3.9869e+00, -3.6797e+00]]], device='cuda:0')\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "print(xs)" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "id": "505ca294", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[[ True, True, True, ..., True, True, True]],\n", - "\n", - " [[ True, True, True, ..., True, True, True]],\n", - "\n", - " [[ True, True, True, ..., True, False, False]],\n", - "\n", - " ...,\n", - "\n", - " [[ True, True, True, ..., False, False, False]],\n", - "\n", - " [[ True, True, True, ..., False, False, False]],\n", - "\n", - " [[ True, True, True, ..., False, False, False]]], device='cuda:0')\n" - ] - } - ], - "source": [ - "from wenet.utils.mask import make_pad_mask\n", - "masks = ~make_pad_mask(feat_len).unsqueeze(1) # (B, 1, L)\n", - "print(masks)" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "id": "aa03c2b9", - "metadata": {}, - "outputs": [], - "source": [ - "xs, pos_emb, masks = model.encoder.embed(xs, masks)" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "id": "ebc0ea12", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[[-0.5482, 2.2866, -1.0750, ..., 1.4504, 0.2895, -0.6945],\n", - " [-0.8013, 1.7688, -1.6639, ..., 1.8332, 0.6791, -0.2000],\n", - " [-1.7112, 2.7057, -1.3363, ..., 1.2336, 0.1870, -0.5735],\n", - " ...,\n", - " [-0.9697, 2.3129, -0.8752, ..., 0.8584, 0.4853, -0.4177],\n", - " [-1.3609, 2.1779, -1.7813, ..., 2.0928, 0.2528, -0.3650],\n", - " [-1.6967, 2.3544, -1.7417, ..., 1.3670, 0.5951, -0.7415]],\n", - "\n", - " [[-1.9828, 2.3178, -0.9079, ..., 0.4117, 0.5006, 0.0872],\n", - " [-0.7640, 1.3558, -1.3613, ..., 0.7317, 0.6784, 0.1685],\n", - " [-0.9504, 1.6038, -1.3030, ..., 0.5754, 0.2677, 0.3343],\n", - " ...,\n", - " [-1.4757, 2.5317, -1.2321, ..., 1.2997, 0.5019, -0.1034],\n", - " [-1.1731, 2.3172, -1.2542, ..., 1.7391, 0.2171, -0.4445],\n", - " [-1.2700, 3.2229, -0.8872, ..., 1.6461, 0.0973, -0.7679]],\n", - "\n", - " [[-0.5873, 1.4291, -1.3950, ..., 0.2102, 0.1027, 0.0918],\n", - " [ 0.1743, 1.7834, -1.6422, ..., 0.8113, 0.3137, 0.5634],\n", - " [-0.3492, 1.8310, -1.0685, ..., 0.6924, 0.1378, 0.4594],\n", - " ...,\n", - " [-1.0869, 2.3002, -1.2638, ..., 1.7998, 0.5134, -0.5223],\n", - " [-1.2614, 2.7240, -1.3734, ..., 1.4445, 0.5742, -0.3320],\n", - " [-2.2068, 4.3462, -3.8289, ..., 2.1426, 1.2034, -1.3795]],\n", - "\n", - " ...,\n", - "\n", - " [[-0.3914, 1.8553, -0.5747, ..., 1.0062, 0.4632, -1.0452],\n", - " [-0.8605, 2.0172, -1.4437, ..., 1.4526, 0.1657, 0.5923],\n", - " [-0.7307, 2.2841, -1.0699, ..., 1.5825, -0.0980, 0.5503],\n", - " ...,\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270],\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270],\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270]],\n", - "\n", - " [[-0.1619, 0.6255, -1.1323, ..., 0.0724, -0.2204, 0.4636],\n", - " [-0.0831, 0.5750, -1.0930, ..., 0.9110, -0.0650, 0.7299],\n", - " [-0.2820, 0.0801, -0.9418, ..., 0.3379, -0.1166, 0.4451],\n", - " ...,\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270],\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270],\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270]],\n", - "\n", - " [[-0.5458, -0.6909, -1.3597, ..., -0.7818, 0.6875, 0.9843],\n", - " [ 0.0421, -1.1062, -1.4389, ..., -0.0239, 0.9115, 0.5287],\n", - " [-0.2909, -0.1886, -1.5487, ..., -0.1392, 0.0580, 0.3066],\n", - " ...,\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270],\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270],\n", - " [-5.0821, 8.5920, -4.2137, ..., 6.2693, 0.0539, -2.9270]]],\n", - " device='cuda:0', grad_fn=)\n", - "tensor([[[ 0.0000e+00, 1.0000e+00, 0.0000e+00, ..., 1.0000e+00,\n", - " 0.0000e+00, 1.0000e+00],\n", - " [ 8.4147e-01, 5.4030e-01, 8.0196e-01, ..., 1.0000e+00,\n", - " 1.0746e-04, 1.0000e+00],\n", - " [ 9.0930e-01, -4.1615e-01, 9.5814e-01, ..., 1.0000e+00,\n", - " 2.1492e-04, 1.0000e+00],\n", - " ...,\n", - " [-7.6825e-01, -6.4014e-01, 6.3280e-01, ..., 9.9998e-01,\n", - " 5.1581e-03, 9.9999e-01],\n", - " [-9.5375e-01, 3.0059e-01, 9.9899e-01, ..., 9.9998e-01,\n", - " 5.2656e-03, 9.9999e-01],\n", - " [-2.6237e-01, 9.6497e-01, 5.6075e-01, ..., 9.9998e-01,\n", - " 5.3730e-03, 9.9999e-01]]], device='cuda:0')\n", - "tensor([[[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, False, False, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, False, False, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, False, False, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, False, False, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, False, False, False, False, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, False, False, False, False, False, False, False, False,\n", - " False]],\n", - "\n", - " [[ True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, True, True, True, True, True, True, True, True, True,\n", - " True, False, False, False, False, False, False, False, False, False,\n", - " False]]], device='cuda:0')\n", - "torch.Size([16, 1, 51])\n" - ] - } - ], - "source": [ - "print(xs)\n", - "print(pos_emb)\n", - "print(masks)\n", - "print(masks.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "id": "4289461b", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[-0.54822 2.2866027 -1.0750197 ... 1.4503604 0.28950194\n", - " -0.6945408 ]\n", - " [-0.8012542 1.7687558 -1.6638877 ... 1.833158 0.6791494\n", - " -0.1999542 ]\n", - " [-1.7112465 2.7057455 -1.3363413 ... 1.2336441 0.18697014\n", - " -0.5735198 ]\n", - " ...\n", - " [-0.96968573 2.312949 -0.87524825 ... 0.85838526 0.4853347\n", - " -0.41773027]\n", - " [-1.3609431 2.1778803 -1.7812773 ... 2.0927877 0.25282228\n", - " -0.36496443]\n", - " [-1.6967483 2.3543842 -1.7416853 ... 1.366951 0.59511113\n", - " -0.74147725]]\n", - "\n", - " [[-1.9828408 2.31777 -0.9078527 ... 0.41170627 0.5006162\n", - " 0.08721463]\n", - " [-0.76404583 1.3557773 -1.3612567 ... 0.7317046 0.678426\n", - " 0.16851945]\n", - " [-0.95044655 1.6037656 -1.3029968 ... 0.57544005 0.26769355\n", - " 0.33433008]\n", - " ...\n", - " [-1.475677 2.531713 -1.2320715 ... 1.2996731 0.50191855\n", - " -0.10343577]\n", - " [-1.1730809 2.3172235 -1.2542105 ... 1.7391105 0.21709818\n", - " -0.44447583]\n", - " [-1.2699623 3.2228963 -0.8871915 ... 1.6460502 0.09731755\n", - " -0.7678688 ]]\n", - "\n", - " [[-0.5872559 1.4290544 -1.3950099 ... 0.21024795 0.10272825\n", - " 0.09179455]\n", - " [ 0.1742807 1.783423 -1.6421788 ... 0.8112701 0.31371105\n", - " 0.56344515]\n", - " [-0.34916472 1.8310343 -1.0685117 ... 0.69243336 0.13782299\n", - " 0.45937473]\n", - " ...\n", - " [-1.0868638 2.300204 -1.2638408 ... 1.7998282 0.5133892\n", - " -0.52227837]\n", - " [-1.2614481 2.7239661 -1.3733778 ... 1.444533 0.57420933\n", - " -0.33201432]\n", - " [-2.2067683 4.346218 -3.828867 ... 2.1426017 1.2033664\n", - " -1.3795122 ]]\n", - "\n", - " ...\n", - "\n", - " [[-0.39141566 1.8553346 -0.5747178 ... 1.0062351 0.46320182\n", - " -1.045236 ]\n", - " [-0.86054784 2.0171793 -1.4436853 ... 1.452623 0.16571884\n", - " 0.5923172 ]\n", - " [-0.73066384 2.2840502 -1.0698992 ... 1.5824941 -0.0979555\n", - " 0.55030036]\n", - " ...\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]]\n", - "\n", - " [[-0.16194311 0.6255052 -1.1323429 ... 0.07242929 -0.22042468\n", - " 0.46362036]\n", - " [-0.08306468 0.575043 -1.09298 ... 0.9109665 -0.06501988\n", - " 0.72986233]\n", - " [-0.28202093 0.08014385 -0.9417719 ... 0.3379485 -0.11664233\n", - " 0.44514441]\n", - " ...\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]]\n", - "\n", - " [[-0.5458492 -0.69092435 -1.3596548 ... -0.78182435 0.68747747\n", - " 0.9842716 ]\n", - " [ 0.04212743 -1.1061852 -1.438915 ... -0.02385022 0.91146135\n", - " 0.52870303]\n", - " [-0.2909345 -0.18858244 -1.5487324 ... -0.13923697 0.05795169\n", - " 0.30663735]\n", - " ...\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]]]\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = ~make_pad_mask(feat_len).unsqueeze(1) # (B, 1, L)\n", - "xs, pos_emb, masks = model.encoder.embed(xs, masks, offset=0)\n", - "print(xs.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "id": "67e10d73", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[[[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 2.0908e-03],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 1.1943e-02, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 4.6105e-02, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 9.6723e-03,\n", - " 4.6135e-02, 0.0000e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[2.2816e-01, 2.4615e-01, 2.5304e-01, ..., 2.0402e-01,\n", - " 2.3248e-01, 3.1191e-01],\n", - " [1.3587e-01, 2.8877e-01, 2.7991e-01, ..., 1.9210e-01,\n", - " 2.0346e-01, 1.9934e-01],\n", - " [2.5739e-01, 3.9348e-01, 2.7877e-01, ..., 2.7483e-01,\n", - " 1.9302e-01, 2.3810e-01],\n", - " ...,\n", - " [1.1939e-01, 2.8473e-01, 3.3082e-01, ..., 2.3838e-01,\n", - " 2.2104e-01, 2.3906e-01],\n", - " [1.7388e-01, 2.0402e-01, 4.0263e-01, ..., 2.4782e-01,\n", - " 2.6742e-01, 1.5427e-01],\n", - " [0.0000e+00, 2.9081e-01, 2.7726e-01, ..., 1.7540e-01,\n", - " 1.8479e-01, 2.2483e-01]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[3.5447e-01, 3.8861e-01, 3.9724e-01, ..., 3.8680e-01,\n", - " 3.3568e-01, 3.4552e-01],\n", - " [4.1739e-01, 5.1039e-01, 4.1730e-01, ..., 3.3993e-01,\n", - " 3.7082e-01, 3.5110e-01],\n", - " [3.6117e-01, 4.0745e-01, 4.8491e-01, ..., 3.4849e-01,\n", - " 3.2321e-01, 3.5189e-01],\n", - " ...,\n", - " [2.3144e-01, 3.8021e-01, 5.1526e-01, ..., 3.6499e-01,\n", - " 3.7412e-01, 3.9986e-01],\n", - " [3.4679e-01, 4.0238e-01, 5.0077e-01, ..., 3.6185e-01,\n", - " 3.1597e-01, 3.6335e-01],\n", - " [3.6498e-01, 3.7943e-01, 5.1719e-01, ..., 3.1798e-01,\n", - " 3.3657e-01, 3.4130e-01]]],\n", - "\n", - "\n", - " [[[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[1.4560e-02, 9.4475e-02, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [1.5002e-02, 2.9632e-02, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [3.2952e-02, 0.0000e+00, 0.0000e+00, ..., 4.5850e-02,\n", - " 2.0439e-02, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 4.4258e-02],\n", - " [0.0000e+00, 0.0000e+00, 2.5565e-02, ..., 0.0000e+00,\n", - " 9.0044e-03, 4.9084e-02]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [1.1141e-01, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[3.3697e-01, 3.8527e-01, 3.2900e-01, ..., 2.8704e-01,\n", - " 2.3351e-01, 1.9004e-01],\n", - " [1.3575e-01, 3.5783e-01, 3.3573e-01, ..., 2.2082e-01,\n", - " 1.5855e-01, 1.3587e-01],\n", - " [2.1929e-01, 2.8900e-01, 2.8255e-01, ..., 2.0603e-01,\n", - " 2.3927e-01, 2.1909e-01],\n", - " ...,\n", - " [2.3292e-01, 3.9097e-01, 3.6399e-01, ..., 2.0598e-01,\n", - " 2.5374e-01, 2.3137e-01],\n", - " [1.8739e-01, 3.0794e-01, 3.0297e-01, ..., 2.7251e-01,\n", - " 2.5192e-01, 2.0837e-01],\n", - " [2.2454e-01, 4.1402e-01, 5.4083e-01, ..., 3.1875e-01,\n", - " 2.5080e-01, 2.5939e-01]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[2.6457e-01, 4.9519e-01, 5.6702e-01, ..., 3.0955e-01,\n", - " 3.5292e-01, 3.2669e-01],\n", - " [2.1577e-01, 5.1833e-01, 4.9183e-01, ..., 3.6043e-01,\n", - " 3.8524e-01, 3.6155e-01],\n", - " [2.0068e-01, 4.2784e-01, 5.2818e-01, ..., 3.1871e-01,\n", - " 3.2452e-01, 3.1036e-01],\n", - " ...,\n", - " [4.9855e-01, 5.1001e-01, 5.2279e-01, ..., 3.6450e-01,\n", - " 3.4338e-01, 3.3603e-01],\n", - " [4.1233e-01, 5.5518e-01, 5.2828e-01, ..., 4.0676e-01,\n", - " 3.3873e-01, 3.6724e-01],\n", - " [4.0820e-01, 4.6187e-01, 4.7338e-01, ..., 3.8691e-01,\n", - " 3.6039e-01, 3.8022e-01]]],\n", - "\n", - "\n", - " [[[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[0.0000e+00, 5.7852e-03, 0.0000e+00, ..., 7.4838e-03,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 3.0351e-02,\n", - " 0.0000e+00, 2.6720e-04],\n", - " [9.4807e-04, 0.0000e+00, 0.0000e+00, ..., 7.9551e-03,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [2.0326e-02, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 1.0801e-02, 0.0000e+00],\n", - " [1.8470e-01, 0.0000e+00, 0.0000e+00, ..., 5.0584e-02,\n", - " 9.4758e-02, 5.9146e-02]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[3.8708e-01, 2.8022e-01, 3.5893e-01, ..., 1.6595e-01,\n", - " 1.6031e-01, 2.1136e-01],\n", - " [1.5595e-01, 3.0544e-01, 2.4666e-01, ..., 2.2675e-01,\n", - " 2.5765e-01, 1.9682e-01],\n", - " [2.9518e-01, 4.1210e-01, 2.0063e-01, ..., 1.7595e-01,\n", - " 2.2537e-01, 2.2214e-01],\n", - " ...,\n", - " [2.4745e-01, 2.6259e-01, 3.8654e-01, ..., 2.3620e-01,\n", - " 2.3157e-01, 1.8514e-01],\n", - " [2.5715e-01, 2.9593e-01, 4.7745e-01, ..., 2.3546e-01,\n", - " 2.5073e-01, 2.0976e-01],\n", - " [1.2015e+00, 8.4644e-01, 7.3386e-01, ..., 1.0252e+00,\n", - " 9.5310e-01, 1.0013e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[4.5013e-01, 4.7484e-01, 4.0540e-01, ..., 1.9346e-01,\n", - " 1.7826e-01, 1.4777e-01],\n", - " [4.7546e-01, 4.8187e-01, 3.6760e-01, ..., 2.7809e-01,\n", - " 3.2997e-01, 3.2337e-01],\n", - " [4.6160e-01, 4.0050e-01, 3.9061e-01, ..., 3.6613e-01,\n", - " 3.5243e-01, 2.9739e-01],\n", - " ...,\n", - " [5.5148e-01, 5.1018e-01, 4.0132e-01, ..., 3.8948e-01,\n", - " 3.5737e-01, 3.3088e-01],\n", - " [4.1973e-01, 4.5475e-01, 4.5320e-01, ..., 3.8343e-01,\n", - " 4.0126e-01, 3.6181e-01],\n", - " [3.4280e-01, 3.1606e-01, 4.4701e-01, ..., 2.1665e-01,\n", - " 2.3985e-01, 2.3903e-01]]],\n", - "\n", - "\n", - " ...,\n", - "\n", - "\n", - " [[[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [4.1783e-02, 0.0000e+00, 1.5805e-02, ..., 0.0000e+00,\n", - " 2.2508e-02, 0.0000e+00],\n", - " [4.3234e-02, 7.7864e-02, 0.0000e+00, ..., 1.6347e-02,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[3.2092e-02, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [1.3563e-01, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[0.0000e+00, 2.5187e-01, 2.4979e-01, ..., 2.4775e-01,\n", - " 2.2354e-01, 1.9149e-01],\n", - " [1.6541e-01, 1.9586e-01, 1.9813e-01, ..., 2.7344e-01,\n", - " 2.0928e-01, 2.6150e-01],\n", - " [1.0495e-01, 6.3299e-02, 3.3844e-01, ..., 2.5138e-01,\n", - " 1.2470e-01, 2.3927e-01],\n", - " ...,\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00],\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00],\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[1.1428e-01, 4.5667e-01, 4.6821e-01, ..., 3.2058e-01,\n", - " 3.3579e-01, 3.9013e-01],\n", - " [1.0441e-01, 4.5739e-01, 4.6107e-01, ..., 3.8468e-01,\n", - " 3.8291e-01, 3.6686e-01],\n", - " [1.9868e-01, 3.5520e-01, 4.4313e-01, ..., 4.0679e-01,\n", - " 3.8068e-01, 3.0646e-01],\n", - " ...,\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00],\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00],\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00]]],\n", - "\n", - "\n", - " [[[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[2.4654e-02, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 3.3902e-02],\n", - " [0.0000e+00, 0.0000e+00, 1.8307e-02, ..., 5.1669e-02,\n", - " 9.4838e-03, 7.4535e-02],\n", - " [9.9215e-02, 0.0000e+00, 1.5872e-02, ..., 1.6203e-02,\n", - " 5.1401e-02, 1.9239e-03],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[4.0034e-01, 2.5306e-01, 2.0218e-01, ..., 9.8162e-02,\n", - " 7.0643e-02, 4.9741e-02],\n", - " [1.2568e-01, 2.1031e-01, 1.1182e-01, ..., 4.2781e-02,\n", - " 1.1969e-01, 1.2005e-01],\n", - " [2.8787e-01, 2.4031e-01, 2.2566e-01, ..., 0.0000e+00,\n", - " 6.4181e-02, 5.8730e-02],\n", - " ...,\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00],\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00],\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[3.8405e-01, 3.0990e-01, 3.7156e-01, ..., 1.8125e-01,\n", - " 1.5051e-01, 1.9620e-01],\n", - " [4.7286e-01, 4.0529e-01, 3.9718e-01, ..., 2.4710e-01,\n", - " 4.5657e-02, 1.1501e-01],\n", - " [3.2621e-01, 3.0073e-01, 3.0477e-01, ..., 2.3529e-01,\n", - " 2.1357e-01, 1.6986e-01],\n", - " ...,\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00],\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00],\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00]]],\n", - "\n", - "\n", - " [[[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[3.3438e-02, 1.2378e-03, 5.2972e-02, ..., 7.2712e-02,\n", - " 8.6563e-02, 1.4494e-01],\n", - " [1.1043e-01, 6.1431e-02, 6.3630e-02, ..., 8.1278e-02,\n", - " 6.2590e-02, 8.3154e-02],\n", - " [1.7677e-02, 2.0111e-03, 7.8750e-02, ..., 6.9633e-02,\n", - " 8.9799e-02, 5.3263e-02],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[1.0034e-01, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [1.5627e-01, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [5.1447e-02, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 4.3641e-03],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " ...,\n", - "\n", - " [[2.5142e-01, 4.5964e-01, 3.7346e-01, ..., 4.7631e-02,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [1.9760e-01, 2.6627e-01, 1.1191e-01, ..., 3.0450e-02,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [1.6341e-01, 3.2938e-01, 2.5690e-01, ..., 5.5694e-02,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00],\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00],\n", - " [1.1257e+00, 8.7341e-01, 7.8169e-01, ..., 1.0458e+00,\n", - " 1.0094e+00, 1.0221e+00]],\n", - "\n", - " [[0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 2.2189e-02, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 2.8490e-02],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " ...,\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00],\n", - " [0.0000e+00, 0.0000e+00, 0.0000e+00, ..., 0.0000e+00,\n", - " 0.0000e+00, 0.0000e+00]],\n", - "\n", - " [[2.5810e-01, 6.3017e-01, 3.7038e-01, ..., 1.8704e-01,\n", - " 8.2694e-02, 9.9127e-02],\n", - " [1.7293e-01, 5.0679e-01, 4.0739e-01, ..., 1.6006e-01,\n", - " 1.1725e-01, 9.9405e-02],\n", - " [2.4175e-01, 4.1616e-01, 4.1257e-01, ..., 1.3520e-01,\n", - " 7.9126e-02, 1.2846e-01],\n", - " ...,\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00],\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00],\n", - " [1.4488e+00, 1.0212e+00, 9.4473e-01, ..., 1.2363e+00,\n", - " 1.2189e+00, 1.2380e+00]]]], device='cuda:0',\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "xs = model.encoder.global_cmvn(feat)\n", - "masks = ~make_pad_mask(feat_len).unsqueeze(1) # (B, 1, L)\n", - "\n", - "x = xs.unsqueeze(1)\n", - "x = model.encoder.embed.conv(x)\n", - "print(x)" - ] - }, - { - "cell_type": "code", - "execution_count": 48, - "id": "9a9478ad", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[-0.03426375 0.14291267 -0.06718873 ... 0.09064753 0.01809387\n", - " -0.0434088 ]\n", - " [-0.05007839 0.11054724 -0.10399298 ... 0.11457238 0.04244684\n", - " -0.01249714]\n", - " [-0.10695291 0.16910909 -0.08352133 ... 0.07710276 0.01168563\n", - " -0.03584499]\n", - " ...\n", - " [-0.06060536 0.14455931 -0.05470302 ... 0.05364908 0.03033342\n", - " -0.02610814]\n", - " [-0.08505894 0.13611752 -0.11132983 ... 0.13079923 0.01580139\n", - " -0.02281028]\n", - " [-0.10604677 0.14714901 -0.10885533 ... 0.08543444 0.03719445\n", - " -0.04634233]]\n", - "\n", - " [[-0.12392755 0.14486063 -0.05674079 ... 0.02573164 0.03128851\n", - " 0.00545091]\n", - " [-0.04775286 0.08473608 -0.08507854 ... 0.04573154 0.04240163\n", - " 0.01053247]\n", - " [-0.05940291 0.10023535 -0.0814373 ... 0.035965 0.01673085\n", - " 0.02089563]\n", - " ...\n", - " [-0.09222981 0.15823206 -0.07700447 ... 0.08122957 0.03136991\n", - " -0.00646474]\n", - " [-0.07331756 0.14482647 -0.07838815 ... 0.1086944 0.01356864\n", - " -0.02777974]\n", - " [-0.07937264 0.20143102 -0.05544947 ... 0.10287814 0.00608235\n", - " -0.0479918 ]]\n", - "\n", - " [[-0.03670349 0.0893159 -0.08718812 ... 0.0131405 0.00642052\n", - " 0.00573716]\n", - " [ 0.01089254 0.11146393 -0.10263617 ... 0.05070438 0.01960694\n", - " 0.03521532]\n", - " [-0.0218228 0.11443964 -0.06678198 ... 0.04327708 0.00861394\n", - " 0.02871092]\n", - " ...\n", - " [-0.06792898 0.14376275 -0.07899005 ... 0.11248926 0.03208683\n", - " -0.0326424 ]\n", - " [-0.07884051 0.17024788 -0.08583611 ... 0.09028331 0.03588808\n", - " -0.0207509 ]\n", - " [-0.13792302 0.27163863 -0.23930418 ... 0.13391261 0.0752104\n", - " -0.08621951]]\n", - "\n", - " ...\n", - "\n", - " [[-0.02446348 0.11595841 -0.03591986 ... 0.0628897 0.02895011\n", - " -0.06532725]\n", - " [-0.05378424 0.1260737 -0.09023033 ... 0.09078894 0.01035743\n", - " 0.03701983]\n", - " [-0.04566649 0.14275314 -0.0668687 ... 0.09890588 -0.00612222\n", - " 0.03439377]\n", - " ...\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]]\n", - "\n", - " [[-0.01012144 0.03909408 -0.07077143 ... 0.00452683 -0.01377654\n", - " 0.02897627]\n", - " [-0.00519154 0.03594019 -0.06831125 ... 0.05693541 -0.00406374\n", - " 0.0456164 ]\n", - " [-0.01762631 0.00500899 -0.05886075 ... 0.02112178 -0.00729015\n", - " 0.02782153]\n", - " ...\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]]\n", - "\n", - " [[-0.03411558 -0.04318277 -0.08497842 ... -0.04886402 0.04296734\n", - " 0.06151697]\n", - " [ 0.00263296 -0.06913657 -0.08993219 ... -0.00149064 0.05696633\n", - " 0.03304394]\n", - " [-0.01818341 -0.0117864 -0.09679577 ... -0.00870231 0.00362198\n", - " 0.01916483]\n", - " ...\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]\n", - " [-0.31763062 0.5370021 -0.2633542 ... 0.39182857 0.00337184\n", - " -0.18293698]]]\n", - "torch.Size([16, 51, 256])\n" - ] - } - ], - "source": [ - "b, c, t, f = x.size()\n", - "x = model.encoder.embed.out(x.transpose(1, 2).contiguous().view(b, t, c * f))\n", - "print(x.cpu().detach().numpy())\n", - "print(x.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": 49, - "id": "fd69003f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[[-0.54822 2.2866027 -1.0750197 ... 1.4503604 0.28950194\n", - " -0.6945408 ]\n", - " [-0.8012542 1.7687558 -1.6638877 ... 1.833158 0.6791494\n", - " -0.1999542 ]\n", - " [-1.7112465 2.7057455 -1.3363413 ... 1.2336441 0.18697014\n", - " -0.5735198 ]\n", - " ...\n", - " [-0.96968573 2.312949 -0.87524825 ... 0.85838526 0.4853347\n", - " -0.41773027]\n", - " [-1.3609431 2.1778803 -1.7812773 ... 2.0927877 0.25282228\n", - " -0.36496443]\n", - " [-1.6967483 2.3543842 -1.7416853 ... 1.366951 0.59511113\n", - " -0.74147725]]\n", - "\n", - " [[-1.9828408 2.31777 -0.9078527 ... 0.41170627 0.5006162\n", - " 0.08721463]\n", - " [-0.76404583 1.3557773 -1.3612567 ... 0.7317046 0.678426\n", - " 0.16851945]\n", - " [-0.95044655 1.6037656 -1.3029968 ... 0.57544005 0.26769355\n", - " 0.33433008]\n", - " ...\n", - " [-1.475677 2.531713 -1.2320715 ... 1.2996731 0.50191855\n", - " -0.10343577]\n", - " [-1.1730809 2.3172235 -1.2542105 ... 1.7391105 0.21709818\n", - " -0.44447583]\n", - " [-1.2699623 3.2228963 -0.8871915 ... 1.6460502 0.09731755\n", - " -0.7678688 ]]\n", - "\n", - " [[-0.5872559 1.4290544 -1.3950099 ... 0.21024795 0.10272825\n", - " 0.09179455]\n", - " [ 0.1742807 1.783423 -1.6421788 ... 0.8112701 0.31371105\n", - " 0.56344515]\n", - " [-0.34916472 1.8310343 -1.0685117 ... 0.69243336 0.13782299\n", - " 0.45937473]\n", - " ...\n", - " [-1.0868638 2.300204 -1.2638408 ... 1.7998282 0.5133892\n", - " -0.52227837]\n", - " [-1.2614481 2.7239661 -1.3733778 ... 1.444533 0.57420933\n", - " -0.33201432]\n", - " [-2.2067683 4.346218 -3.828867 ... 2.1426017 1.2033664\n", - " -1.3795122 ]]\n", - "\n", - " ...\n", - "\n", - " [[-0.39141566 1.8553346 -0.5747178 ... 1.0062351 0.46320182\n", - " -1.045236 ]\n", - " [-0.86054784 2.0171793 -1.4436853 ... 1.452623 0.16571884\n", - " 0.5923172 ]\n", - " [-0.73066384 2.2840502 -1.0698992 ... 1.5824941 -0.0979555\n", - " 0.55030036]\n", - " ...\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]]\n", - "\n", - " [[-0.16194311 0.6255052 -1.1323429 ... 0.07242929 -0.22042468\n", - " 0.46362036]\n", - " [-0.08306468 0.575043 -1.09298 ... 0.9109665 -0.06501988\n", - " 0.72986233]\n", - " [-0.28202093 0.08014385 -0.9417719 ... 0.3379485 -0.11664233\n", - " 0.44514441]\n", - " ...\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]]\n", - "\n", - " [[-0.5458492 -0.69092435 -1.3596548 ... -0.78182435 0.68747747\n", - " 0.9842716 ]\n", - " [ 0.04212743 -1.1061852 -1.438915 ... -0.02385022 0.91146135\n", - " 0.52870303]\n", - " [-0.2909345 -0.18858244 -1.5487324 ... -0.13923697 0.05795169\n", - " 0.30663735]\n", - " ...\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]\n", - " [-5.08209 8.592033 -4.2136674 ... 6.269257 0.05394945\n", - " -2.9269917 ]]]\n" - ] - } - ], - "source": [ - "x, pos_emb = model.encoder.embed.pos_enc(x, 0)\n", - "print(x.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "id": "8ed88489", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.float32\n", - "[[[ 0.0000000e+00 1.0000000e+00 0.0000000e+00 ... 1.0000000e+00\n", - " 0.0000000e+00 1.0000000e+00]\n", - " [ 8.4147096e-01 5.4030234e-01 8.0196178e-01 ... 1.0000000e+00\n", - " 1.0746076e-04 1.0000000e+00]\n", - " [ 9.0929741e-01 -4.1614684e-01 9.5814437e-01 ... 1.0000000e+00\n", - " 2.1492151e-04 1.0000000e+00]\n", - " ...\n", - " [-7.6825464e-01 -6.4014435e-01 6.3279724e-01 ... 9.9998462e-01\n", - " 5.1580933e-03 9.9998671e-01]\n", - " [-9.5375264e-01 3.0059254e-01 9.9899054e-01 ... 9.9998397e-01\n", - " 5.2655530e-03 9.9998611e-01]\n", - " [-2.6237485e-01 9.6496606e-01 5.6074661e-01 ... 9.9998331e-01\n", - " 5.3730118e-03 9.9998558e-01]]]\n" - ] - } - ], - "source": [ - "print(pos_emb.dtype)\n", - "print(pos_emb.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": 54, - "id": "5e277881", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([16, 51, 256])\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'mask' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 142\u001b[0m \u001b[0mpos_emb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpos_emb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 143\u001b[0;31m \u001b[0mmask\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 144\u001b[0m \u001b[0mx_att\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mx_att\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 145\u001b[0m )\n", - "\u001b[0;31mNameError\u001b[0m: name 'mask' is not defined" - ] - } - ], - "source": [ - "def add_optional_chunk_mask(xs: torch.Tensor, masks: torch.Tensor,\n", - " use_dynamic_chunk: bool,\n", - " use_dynamic_left_chunk: bool,\n", - " decoding_chunk_size: int, static_chunk_size: int,\n", - " num_decoding_left_chunks: int):\n", - " \"\"\" Apply optional mask for encoder.\n", - " Args:\n", - " xs (torch.Tensor): padded input, (B, L, D), L for max length\n", - " mask (torch.Tensor): mask for xs, (B, 1, L)\n", - " use_dynamic_chunk (bool): whether to use dynamic chunk or not\n", - " use_dynamic_left_chunk (bool): whether to use dynamic left chunk for\n", - " training.\n", - " decoding_chunk_size (int): decoding chunk size for dynamic chunk, it's\n", - " 0: default for training, use random dynamic chunk.\n", - " <0: for decoding, use full chunk.\n", - " >0: for decoding, use fixed chunk size as set.\n", - " static_chunk_size (int): chunk size for static chunk training/decoding\n", - " if it's greater than 0, if use_dynamic_chunk is true,\n", - " this parameter will be ignored\n", - " num_decoding_left_chunks: number of left chunks, this is for decoding,\n", - " the chunk size is decoding_chunk_size.\n", - " >=0: use num_decoding_left_chunks\n", - " <0: use all left chunks\n", - " Returns:\n", - " torch.Tensor: chunk mask of the input xs.\n", - " \"\"\"\n", - " # Whether to use chunk mask or not\n", - " if use_dynamic_chunk:\n", - " max_len = xs.size(1)\n", - " if decoding_chunk_size < 0:\n", - " chunk_size = max_len\n", - " num_left_chunks = -1\n", - " elif decoding_chunk_size > 0:\n", - " chunk_size = decoding_chunk_size\n", - " num_left_chunks = num_decoding_left_chunks\n", - " else:\n", - " # chunk size is either [1, 25] or full context(max_len).\n", - " # Since we use 4 times subsampling and allow up to 1s(100 frames)\n", - " # delay, the maximum frame is 100 / 4 = 25.\n", - " chunk_size = torch.randint(1, max_len, (1, )).item()\n", - " num_left_chunks = -1\n", - " if chunk_size > max_len // 2:\n", - " chunk_size = max_len\n", - " else:\n", - " chunk_size = chunk_size % 25 + 1\n", - " if use_dynamic_left_chunk:\n", - " max_left_chunks = (max_len - 1) // chunk_size\n", - " num_left_chunks = torch.randint(0, max_left_chunks,\n", - " (1, )).item()\n", - " chunk_masks = subsequent_chunk_mask(xs.size(1), chunk_size,\n", - " num_left_chunks,\n", - " xs.device) # (L, L)\n", - " chunk_masks = chunk_masks.unsqueeze(0) # (1, L, L)\n", - " chunk_masks = masks & chunk_masks # (B, L, L)\n", - " elif static_chunk_size > 0:\n", - " num_left_chunks = num_decoding_left_chunks\n", - " chunk_masks = subsequent_chunk_mask(xs.size(1), static_chunk_size,\n", - " num_left_chunks,\n", - " xs.device) # (L, L)\n", - " chunk_masks = chunk_masks.unsqueeze(0) # (1, L, L)\n", - " chunk_masks = masks & chunk_masks # (B, L, L)\n", - " else:\n", - " chunk_masks = masks\n", - " return chunk_masks\n", - "\n", - "from wenet.utils.mask import make_pad_mask\n", - "\n", - "\n", - "masks = ~make_pad_mask(feat_len).unsqueeze(1)\n", - "xs = model.encoder.global_cmvn(feat)\n", - "xs, pos_emb, masks = model.encoder.embed(xs, masks, offset=0)\n", - "\n", - "mask_pad = masks\n", - "decoding_chunk_size=0\n", - "num_decoding_left_chunks=-1\n", - "use_dynamic_left_chunk=-1\n", - "use_dynamic_chunk=False\n", - "static_chunk_size=-1\n", - "chunk_masks = add_optional_chunk_mask(\n", - " xs, \n", - " masks, \n", - " use_dynamic_chunk,\n", - " use_dynamic_left_chunk,\n", - " decoding_chunk_size, \n", - " static_chunk_size,\n", - " num_decoding_left_chunks)\n", - "\n", - "np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_embed', \n", - " embed_out=xs.cpu().detach().numpy(), \n", - " pos_emb=pos_emb.cpu().detach().numpy(),\n", - " chunk_masks=chunk_masks.cpu().detach().numpy(),\n", - " mask_pad=mask_pad.cpu().detach().numpy())\n", - "\n", - "model.eval()\n", - "# print(chunk_masks)\n", - "print(xs.shape)\n", - "for layer in model.encoder.encoders:\n", - " #xs, chunk_masks, _ = layer(xs, chunk_masks, pos_emb, mask_pad)\n", - " #np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_0', enc_0=xs.cpu().detach().numpy())\n", - " \n", - " x = xs\n", - " residual = x\n", - " x_norm = layer.norm_ff_macaron(x)\n", - " !rm /workspace/DeepSpeech-2.x/.notebook/enc_0_norm_ff.npz\n", - " np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_0_norm_ff', \n", - " norm_ff=x_norm.cpu().detach().numpy(),\n", - " xs=xs.cpu().detach().numpy())\n", - " #print(x.cpu().detach().numpy())\n", - " for p in layer.norm_ff_macaron.parameters():\n", - " #print(p, p.sum())\n", - " pass\n", - " \n", - " x = residual + layer.ff_scale * layer.feed_forward_macaron(x_norm)\n", - " \n", - " ps = []\n", - " for n, p in layer.feed_forward_macaron.state_dict().items():\n", - " #print(n, p.cpu().data.numpy())\n", - " ps.append(p.cpu().data.numpy())\n", - " pass\n", - "\n", - " ff_l_x = layer.feed_forward_macaron.w_1(x_norm)\n", - " ff_l_a_x = layer.feed_forward_macaron.activation(ff_l_x)\n", - " ff_l_a_l_x = layer.feed_forward_macaron.w_2(ff_l_a_x)\n", - " np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_0_ff_out', \n", - " norm_ff=x_norm.cpu().detach().numpy(),\n", - " ff_out=x.cpu().detach().numpy(),\n", - " ff_l_x = ff_l_x.cpu().detach().numpy(),\n", - " ff_l_a_x=ff_l_a_x.cpu().detach().numpy(),\n", - " ff_l_a_l_x=ff_l_a_l_x.cpu().detach().numpy(),\n", - " ps=ps,\n", - " )\n", - " \n", - " \n", - " residual = x\n", - " x = layer.norm_mha(x)\n", - " x_q = x\n", - " \n", - " x_att = layer.self_attn(x_q, x, x, pos_emb, masks)\n", - " np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_0_selattn_out', \n", - " x_q=x_q.cpu().detach().numpy(),\n", - " x=x.cpu().detach().numpy(),\n", - " pos_emb = pos_emb.cpu().detach().numpy(),\n", - " mask=mask.cpu().detach().numpy(),\n", - " x_att=x_att.cpu().detach().numpy(),\n", - " )\n", - " \n", - " break\n", - "#print(xs.cpu().detach().numpy())\n", - "\n", - "\n", - "i = 0\n", - "for layer in model.encoder.encoders:\n", - " xs, chunk_masks, _ = layer(xs, chunk_masks, pos_emb, mask_pad)\n", - " i += 1\n", - " if i == 2:\n", - " np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_2', enc_2=xs.cpu().detach().numpy())\n", - " \n", - "np.savez('/workspace/DeepSpeech-2.x/.notebook/enc_all', enc_all=xs.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c43fd4f1", - "metadata": {}, - "outputs": [], - "source": [ - "out, mask = model.encoder(feat, feat_len)\n", - "#print(out.cpu().detach().numpy())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0e73db22", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8f506114", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/README.md b/README.md index 424dc485e..7d4a2e2d3 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,37 @@ -[中文版](README_cn.md) - -# PaddlePaddle ASR toolkit +# PaddlePaddle Speech to Any toolkit ![License](https://img.shields.io/badge/license-Apache%202-red.svg) ![python version](https://img.shields.io/badge/python-3.7+-orange.svg) ![support os](https://img.shields.io/badge/os-linux-yellow.svg) -*PaddleASR* is an open-source implementation of end-to-end Automatic Speech Recognition (ASR) engine, with [PaddlePaddle](https://github.com/PaddlePaddle/Paddle) platform. Our vision is to empower both industrial application and academic research on speech recognition, via an easy-to-use, efficient, samller and scalable implementation, including training, inference & testing module, and deployment. +*DeepSpeech* is an open-source implementation of end-to-end Automatic Speech Recognition engine, with [PaddlePaddle](https://github.com/PaddlePaddle/Paddle) platform. Our vision is to empower both industrial application and academic research on speech recognition, via an easy-to-use, efficient, samller and scalable implementation, including training, inference & testing module, and deployment. ## Features - See [feature list](doc/src/feature_list.md) for more information. + See [feature list](docs/src/feature_list.md) for more information. ## Setup +All tested under: +* Ubuntu 16.04 * python>=3.7 -* paddlepaddle>=2.1.0 +* paddlepaddle>=2.1.2 -Please see [install](doc/src/install.md). +Please see [install](docs/src/install.md). ## Getting Started -Please see [Getting Started](doc/src/getting_started.md) and [tiny egs](examples/tiny/s0/README.md). +Please see [Getting Started](docs/src/getting_started.md) and [tiny egs](examples/tiny/s0/README.md). ## More Information -* [Data Prepration](doc/src/data_preparation.md) -* [Data Augmentation](doc/src/augmentation.md) -* [Ngram LM](doc/src/ngram_lm.md) -* [Server Demo](doc/src/server.md) -* [Benchmark](doc/src/benchmark.md) -* [Relased Model](doc/src/released_model.md) -* [FAQ](doc/src/faq.md) +* [Data Prepration](docs/src/data_preparation.md) +* [Data Augmentation](docs/src/augmentation.md) +* [Ngram LM](docs/src/ngram_lm.md) +* [Benchmark](docs/src/benchmark.md) +* [Relased Model](docs/src/released_model.md) ## Questions and Help @@ -43,8 +41,8 @@ You are welcome to submit questions in [Github Discussions](https://github.com/P ## License -DeepASR is provided under the [Apache-2.0 License](./LICENSE). +DeepSpeech is provided under the [Apache-2.0 License](./LICENSE). ## Acknowledgement -We depends on many open source repos. See [References](doc/src/reference.md) for more information. +We depends on many open source repos. See [References](docs/src/reference.md) for more information. diff --git a/README_cn.md b/README_cn.md deleted file mode 100644 index d762ec2ba..000000000 --- a/README_cn.md +++ /dev/null @@ -1,48 +0,0 @@ -[English](README.md) - -# PaddlePaddle ASR toolkit - -![License](https://img.shields.io/badge/license-Apache%202-red.svg) -![python version](https://img.shields.io/badge/python-3.7+-orange.svg) -![support os](https://img.shields.io/badge/os-linux-yellow.svg) - -*PaddleASR*是一个采用[PaddlePaddle](https://github.com/PaddlePaddle/Paddle)平台的端到端自动语音识别(ASR)引擎的开源项目, -我们的愿景是为语音识别在工业应用和学术研究上,提供易于使用、高效、小型化和可扩展的工具,包括训练,推理,以及 部署。 - -## 特性 - - 参看 [特性列表](doc/src/feature_list.md)。 - - -## 安装 - -* python>=3.7 -* paddlepaddle>=2.1.0 - -参看 [安装](doc/src/install.md)。 - -## 开始 - -请查看 [开始](doc/src/getting_started.md) 和 [tiny egs](examples/tiny/s0/README.md)。 - -## 更多信息 - -* [数据处理](doc/src/data_preparation.md) -* [数据增强](doc/src/augmentation.md) -* [语言模型](doc/src/ngram_lm.md) -* [服务部署](doc/src/server.md) -* [Benchmark](doc/src/benchmark.md) -* [Relased Model](doc/src/released_model.md) -* [FAQ](doc/src/faq.md) - -## 问题和帮助 - -欢迎您在[Github讨论](https://github.com/PaddlePaddle/DeepSpeech/discussions)提交问题,[Github问题](https://github.com/PaddlePaddle/models/issues)中反馈bug。也欢迎您为这个项目做出贡献。 - -## License - -DeepASR 遵循[Apache-2.0开源协议](./LICENSE)。 - -## 感谢 - -开发中参考一些优秀的仓库,详情参见 [References](doc/src/reference.md)。 diff --git a/doc/images/multi_gpu_speedup.png b/doc/images/multi_gpu_speedup.png deleted file mode 100755 index 286de51519203bb070ce9a539a21627808b7403c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 211490 zcmZ5{c~sKd`>xd~8`U)Dsg9|goKkZ@4eNN)%9PBU2TUtdoN+=?>sZcWWr+&PDNA!e zG{srWR1^ou3FiqEQk-!Dr_1^N?!D`N?;otS*Jc5q{l0s@&$FNBCGml!*}2nFr}_B! z&fUH9*F!$OW3Tx5j@q9*!J9E?yE4YdclMr_iOGYzCMFjj1o=Pn@_EX~cPH_K9si^B zyF9u8%};&2E%fH<@X5OuKelEH{k5ptb+sn+{E?8`cDJ2_)n1f{ zhG3K3SHSOYKYi+=I{bj6betMOIXq-GF}n>x%i}OVQui*I?;kr#`n@v`qWDfL-v0La zR{(W3<-S;<^qUtt*N$X-f8UNYA4yCMZ63zC)aK!e7TA)Zl1e}#y0DCquyT@XsthT zu|M3%L7lJ8_W|W9gZZc2gp*L@o!{I~@kKmIFxmM%myafqU?wg`e)RY`Kf0o6nzYaF zxph)G4QcqdPVLv%GM6&U&xAt{FmpRiU&?N3F1DQ4&Aua+{<%|T3V-Sc2K1@3S=iEcgDB-P4$RZ!w+J^(o;~wJm!%^V@&S zXJ=}9m+Py^xw|A}t?$j+r=EKTzDjEq9P7Anw`gbSlE8&~pIUVGhBss!9=)Tw+Fp?- zi#z=;45j01s_wiO&YK(QVHVQ$Ue{P$d0ZgCJ~@|8kAt2+sqni_tq1b;yGOo+%!iH_ zazE$nK6HHZ72jU??n6P5g75d>T?rG5Thii4!!;=hSkN&Y(6iKFa?I64ZM_a{P=41pEu04Xlm#71axRIFlLMW88_?HeW&)g zY<;Y$W_MSgbk${3VTAil7U zX#B8aCb|-VupY7*@_cpMre8zlbD~b<=Th|Bx43;M;cF$|Ryo+o z-MnmnSaR{VPyxdf{86#cvyc{bce6*~Pek`iU2nHcz8hGZHIZ9u+FPpx+I)B)c^$S9 z0Ut;aJ!SrA#q4|5A2|vl%%miHpB;wyw;JJt%wAOuK?)z=qp9;fkOfie{ivIn%tPu2 z0H09GxpU`a51hnzkGwb*u=3zT3=8cv&6*bA^R!zN{pOx3V0Yu>i_;s^2(4qb-v#tr z;0-S{P9A-HQTRUJsf)*-yqGWR{Jkdkshe&lzgX<)2ghyy4*5sS=Y&(U<*LXqzhm>` zfTMe-)n9Z?8`PY!0||zo;5JKNOSpLQ)JKK0t!K+kWrXxv&#amK`R2}LfxCZdy-~d^ z{`TVOCvxlO9=cM6llI-?=w6{u#1TLYX-<%jrm*Qz62dGc?}4;)Q!EqYJ; zrDfU0n)63bIsbU_r}|^vkAFX~7`D{Eqkk^UT5#sv+dEH$a}uHh^qvW^6QGG*3DSv? z3GIn4t$sAV#J{u@YTy2S_ta&%!h}b+MsJQ<7?{@mxp1jbf%OJUa-F>rq~!3Yx`JJL z*xR&sxHp(L{C{vS@ZY_2<*)bptiP=y7B z+h1|P!NEQuM}w8-6GFu12YxLM0*cuWm^XUfFz(TR=J#EwoU4?rq*snr+65a-yp=Ph zd=~FT_Djzz&kO!q36`w$2(GO=I`O(Lq)x5QE*QhI+K|~8j_BXKw^_8g#d7Im^%wjy zT!3!CxMwz48-nvw1M=70B*LXGOI(mr)gFEtK%OqVMEco#t?z-GygUIL$2^8L z_0Iz7(nMXNmucGh-VRa%31y)VvAJ_Eh*XhD^1G)n7o)R(mNl@1DZqe?j zZs~48r3@t>k&mQ@>Ce(tl>9Bsvf{E%Tc}uUXS(X`yAidLuXpJ=J*9ewPRZzn>0q7! z$KdH4FE+u-H1BWIH8UH;sNMqNxc7|r+b5bs?!|4llaH9IbUimOoIBB#o)KkxzxZ0Q zQ8C`u3>$%!q+X)Bv+&gRl0!Giis6!@w&z$Eil>M_l!&H}E86n=pZ%ngbO)=tL(`U` z42yD_To4nE^#b+Lk&xZQE%qX6aYg@u%%1FD0xVgwenUub(EYGIa{1s|VS1s+P~ku! zX+Bqb0x$r}fX1?Q#6KlBnG?E9kzJ)Vv?!qzq-&`w z9A|)oOa`x9S$RF_JE=jeO%*|mU0==+Z1Ma=2?NRkG4Or(Q$i1+F?uW7A$l`fZ$EWU zWFN`FoV)njD}L3p`ZBA)pL%6i0>IPYTg$gNx9b1SMTALd$Ta!F!^(WGX@r}XX}uDC zC%SHupf#ZMUVK!Dd!<|bzEIVT#ZzOibJ{0+qUDy)tDiBJILHv`$f5w`Uf%x8?2&1( zsg!BHDb*C1@GZ47)jMe+Z7VTXI?5kCb5;l9oHyAo|RAEY4vMW zdkWl7M1S+UM|p%P6t(uh56!mKS{e^#&mZqk>;8#qMwJ^Uzxj}hS79k=yd3&2-NAS^ z}~FO{(Y_4@fPlyeU0PJWo3QO5OV+c^teuK%~Sl(52dI#j9^xAH355==6I(BXz46qALF^LW}A{CQ70mN_^^N-h$GrMJ1b$ z?cRP}L33(rp}nEi(ZVYFpRH(UUU~c88BI6voJT8vvHD_6Zey-9L%S=os>3H+IEx%7 zv5RNEi3h7zI@uG-Jgqi98CF$wi<^4$QSs9ol+U z{w9ny{09ErwXb#{az5@0M&oC$lTVM5>%r&OxFGgYs>%8`r+LN zJ=@o%qqU=MA@d8%IwC`rB~P(f=iP{jln`c7yN$mq$OUJQD@iQ&8gpxUJQtUS|1`=9 znF)zpP+LG!n4c5dypVAXn^9HnHWi>igI#r&fh)O^RXykjJkVkwW#fGX zhGaE?-)L7hpL2RYdVFa=fiO?o&gr<(JE56JUnY^1aXHz+MeN|k`HCjf{fPCmyWQm# z&7OjoGe%**8V)(sGX!x*b*{6~4vON>Zy06hq|R~iu9pd@ZG^ywdR0E!SvsfIa9KLb z_%lOvT-d~&#y}Qh0M+UAw-d-;d^|d^V2372X2b6lv6>CxpK>?3ypj2?~NmJ?_$nv+3b0(dhSD+ zO#Xw@yDay^*@Im1Z{j8z$VRP5JbklfNcLFbF~P|T1WC9PhrBpJ-ac5N!0#NcJ_*<| z>ub*jsbfZs+oK73kMyr!o+E`XjHN2%Wr zU+IM1!tWgYdDd_+Fb690qOlBc>wbLW!q;5k6A%1X-sX$K5Y@=IZx z;*YJ~)xCbbCh+;MeqD)0Up@q-Z(PIX&sSlw@_UbdEmWl@f45r)-*SI(=Xvs_Mc(D& zRKOhvC?B7g;=ivWcOPC}<>R}_clWPbkHU{E=A5bti2@vkk*3}}xoRCg_5S(qpB@t5 zpAeS1fBWsdq!V`c{*oH}a{t`J3s!HP?=aqcyhHp`%QNFiN68`Efw_;~-W{jxJ4G%< zk)gS4D%WKQxV>M$va%gH#R?;jKyZyz*WIAQRl&nmMwiQS5K6C!(<5EdVozREL1Yfk ziclVx(v!#XJ>(i9f>J}cF89?!t`Zx#3TRF(*ZrY2*hSNkzR}iEOR!g4tCMAvG5YNw zh=MpcfM}?~(C4m1R3PirWT^)RmUGE2OLa$Cr%P1IO27X*aIUhu2o{-To!|F$3cCsbn{9a!AS!Z`xs>z~-? zwV+H2x}@bd2i8<_Ks4pw8lLbE7E6h%$gJ$Rehts|MzF#)U4TsIHPReO5x!{P1F=m)tqFqo!B&;6pQ%bJPax z>lYo2Zsc8XN_mOr{WH1{A?D-z#m`0u?(k-OH`_dBR?A9lu}LAh=<~K}zO+7&1$=d( zzyd)K%O&ikWGQ4Q(02k1ql0W?TwaD8r;6I!x&1q^+&*0cYG)88OCc#y#x45Pp<~3^ zq^|Jwxu^I2f=X@)_{@>+s_++Qh@qn$;gMEd4Nx{Q-Q#|VZA$3S!Qfqpnhr_Q9KBYO zMX;~+1E(75`MY3Y*ofhc9>|>)`ztYr;V6EZ8^v^4lx3haQkfpBu$3%%U(3flOiIKCIB$r2K# zq6P_DaH!YzOuyawH;w&nCyER-s)V4=9(GjGOhmPOTKNhoG9^|#<9dI_jsnDy<)s2J zI!QAF&h|>jX)tTJcFK`j8I8Zj_@R-_#(x0u)bjsYkodFX3y|No4@WErwjKHhKsbQ< z5bOle2f4cUQ*vAV=_n)$x>tnZl$tX7#ZkI!ZdQSAti4`qw)9L#vW1F;b4&Rs%%g z%LSk`G{n%iR^d$bdQO;(_W+lcVhA$qGeU5^)2gP7_Ae6;x`z%@W6i<2flzNlkTaKh zzQ7`dkPQI;u!DjLoBv-)5syATjHy2?klteFMMVwy&K1AJ;zS1qi|hi4cv1fb$G}m` zY9hLY#6QaL&OyhH#8qHsXNGE^YIR&k9kkOFM|h&his|h;rVbNTo`O4;yL%tD_K=F% zR?em25Ss=V-H|@p2^ydAJUPLaK6>G9$|w74j8@2?CMFy@h`81&N$n5aOCj}B(FW9> zhOt}Z_?^}-;E>Fr$M;pK_U>sF(FI_Z6K>3ulcgOuXpA_pHt|)fEzOV#@{1pyP0~gV zt3OloUAO!1bEdv{dN#UgtbnrAL?R7Qv3Cj4qZ?S9tXK091#xFCNx^lEEE12&UN2)P z73@7QuRmw*S~5y36MzmU>GA9C`%P%!z`lcl0%WrSHFAhZo+3tp6`CnkQ6=?Z1r6tt z)4_O=ytI(71*Yx%<_AHzn0*#yxB`4r<+JRm%ps-H{>%Q(D&qC;HO$}ziunuA#+}<& z-181Z>MNCZ?^t*!poUAbqN#T6ne4X~&fZDY(d0G5lFZ8Wep^CqXBTYli8_Zntf&y^ zmmYd5rtSYC*?$Hk?9J*lUFyhaR%VKblfl-Ea=RnpAOL6wPQe9U)!o!q#t}<=8(Okj z@(K1jXk}OMq>R$02y(0lVZYeNb%TD+SSx|3`UV-4aLD-rguO-Z!H@_fO~hH&zU9>pws*@M1WY6F|zQE#Q7Mo(mM`H>Ql=!!xtc#UX1#>GE#FS67F z`R*ra=Z;cw_RKBCp>RAcf#pyC95-AWEv#JxeH6`uG{sF7&m7$U!Q}s}gadv#e#r8W z3^|+#QHVz$$beHE$#W9z51WW>>3Tym9n3e?XUqsPOog#QJ``NjTOX7Hz}AkFH`=^J zK;5-KXZ~7%8i0nEE%0{md}K^t2n`jmgX9VVv+?BTyMilP2BYDxeJVwM&wxh(Mq4FW zs`?AuU)S)s^QPz~%}_tIs4`6hB)}9psXn_Wz#>PZibA5)Ekb(b*ayj>n)_ zCHBy<9Z?MpI`{eDEpmbWxYu&H;rP`! zqcka8n{WXpbC$k)=RTL(O8qFN3kk9JFkg6y&Bj+r(4h7#Z_p5=VHO_|Bca8q5+4+A%_-&8)hyY z{hg0X!L{y*$=XWousK`H^-YxmZzZz9DB{diLlrH>DH+a9$KO$eGhA;INI4$S|DG?u6Y zG^k7;3`N!NdYA0_t0aXWbZj}Z13xI2;_bQA`>F@RvH)4CJ@y0JQ8{vchuZ&1Q8|k3 zg)Qzu<11%Zn~PB82#9NLrX$FLy&mLu-)GBav%|r<$$QS3pbK3m&1f4qrzT<@r?2Yj zkQ}sYcf6A{Z8v8@VUovxyZP|%{`lGPKLaFNVF`sWrhBHe;e(ou0-XU1bZyFvbkn1` zK4a7J(biur%#XHh2I4K_61pswGzh)veHJn5SZ=?@$dqmo4*SKMlJFqUUBmHsvqY4d zj9fQI@T~!0i-8<%9D3}s-14%ikma+O z5TupJ$qSx#hm_-LiQBOylL~Kd8|Iq}l`YhoQk(Pgk&7WJqpJl4!pJERRdN z>x0GU?+u`wVqo4(=NNJ)Yf&Z*eZy}oqbMEk?dO-<-~u7fm)8E*d9|-PT6;K%sj#t8 z4U8&jiE3nf&j}5IqT7aQ(TebprMpf#IR#Z*Mut7dOND~wLT|i$n$T<6%rNs#&%pd_ z{S{U->tu(Dm@k+-sbac+o(x~G17pAJ+ePP4=6mJ_jzGiX-ndQZODmECT%#C?=L;0U z@c=B0=@`=Ev1AP?C8ttG9OG*>ednCaoY>teLjbgl4tYFNcWs9{sHDE%*;T8LjndqH ztX5Jp&=Ef8SsJwH1R;>kbOyyRH5=A;2+cr8I#jouI|`W0cKRQt`M)V=9+qzxeqz^U z`>^oWfjtVacixyDWGXcaiQ)vc>Ej&9`1PDF?c4}Sk$6RcsrXs}|D2?rnmzL06>>Zh zlUke;A!(gvPZaT&U8g~E1VAPI-S!JljW&6FQ`=INzNLD?9LNfh2*}e1m|{PK^ou83 z^dtdT5M|+$|0ln#Uk!d|0*PMCp2 zn&}VNE!krM4@LZM__SB!Di?g{Z{*h=Dg@4ZVz?ie;|A-g9`#F|Mh8|L8~^_gMZ%8< zO*u!|FC+1i#YG^PVIdJ9A(yt$MD_YjC(KdO^u`N+z4gsA?Nc)U(BBY$%46=GSd0x65Vl zvU&W}=_`ve5Cw<(Q-3D5udGWJM-P%3A`(ePgxZanR9cRbrrE-f1eDFC7TVMC8i5MM zz`X$>|6G`NfreN#ZO&~_tXR~a(2uoe4$l^Ab{1@PfO^Q)rt4TUguY-LdxN4Jb2UP8 zXKm_me+o6VlR7nw=J@sX#Ox2n_#R9h?#7^sU6#C2Ebt*0`EohTe+}y!83z9Nfk#&7 z9baz7Yz6NFEk8+b4=v{vjx2ShKAk-12-#~@0?SoV!*Xj|Sx(jsn>oqmD($Yb`k)dX zzS^&mK+cR!L!joaO>(9Tv{i!I(Gch%Qs!pp8<=BLOch7L+{r!CV?=&sqFhcv1~5cR zqku|*aSLNHdr2O-_*vWfb9n*{^Dl%v5tnbN$g#e{854KV64-D}FO?HMsZV3P&x<&* zN!1w*-IB#s?nS_zT5e$&VPPzmI|AkAxsEIG${AfG%qhq2evX~xI`a8|>mr_dccKLL zaBr02m1E2VP)3Yc4itleM%?}=ZUR2u9yJbFi#m3=CVyDYDVN_Z?}31KM|+y~x|)0@ zIXP1>Xd}zTADZkm?#|ZrXY1~EE_WLiE=|;yfB26yuynfQuwv=kqO6e`rEN_2dlX&_ zz6mu3Z?<~RbzR~CTcX8y2hv{-MF>erRJQDn7(O3*)MecSG29)ZxgON|Ok;is&gcWl z00D8E%rcRHGqx#p-G)6$PL9U-KG2OyEhJDMLYj9EkQoFK03LWTDff-GB8)8$0Jy7y zf+w@=Z-UPpnX;lg{AG-7sAMG0FdrB%om2*uu)Rr!!o~iR1cNhE`q4q%kLxdGpmjt% zUH8dKuCl7bl~C@q7UtlxFbrxR7m@;MHjlncgyR$2k$K?FIb}-7TMzYeuA}DQJ5PH% zl@-w821qZ4gN%WRr5?-J4o}-2hEMZHPd|3frM@yC_aCsD4pltX1>4(2iWhuQ*lum)B=fyIag{)){%wi*_* z9X93op=rjhi93kWYnVA_vM5>_@VZA77v9zr*|Z%AI+QkXyW6l}gBx8nCMG{T2rCW# zqT&l&YytWdG(}B5+G#P~Ye{XY2!pqWp~=siD7y`y!qB?>!!7^ADp;F}t0q>ZNspSv z6eD-WeCLOMXVeWfF3)@Kybq?Y)K+OH*^L_*M;^EAg079Y$aas%xJxdBA!xJ&+^kLZ z1FlpTlFy@Qp@3^Tlbg!m61HP3tX&)_hHZUkZ5$QInEm>6yjB;sReu2z1@q3g*iT`3 zy2`S&DP7M*&VU;nhi5|rJpv?c`WvdcoJ`K@24?BGYLX^0M2a)G@%*ojmicAsdI-0L zNd$=r7gzwt(aFuvj3AeJ5DDcn2JX0ABNhvi(PAlZ53-0L`G`7{)>9yAt_q;u;$jomu4Sjjqgwn7AbzYtFM%02njl+bN{SL6K16I#oLLK?-p@~TwFD( zIiV^G=M%C27NSN@?g}TjhjO?_@z?)pZ=ipzVPeToFDGm6=4a{c`l)^0rrr9cu`i>; zh7B!-K7K*zC?7vqmy@4gc#E_EW#`~vT$FMkE+S21Y~29 zCt|!uT(^gITC0279!rj`>IfDT`>U?G$+osVPfT51uM~|pZ3p>AU7zieo);F+6D`io z2wz}03sfznxRZMAzz}EDTB$&dy5Ct~v|j6gGYxC}qf>S!B^K5HHA+c$x7@Wr+477M ztE*Y-yR4j$)_L?%{-5Trwd_!GYbvGGA{fN4#Q{WMk19muImb_kp;%q6<0n|&#i2PZ z(rp7(glu9?7#v!NX_y_Z4?!!7g9$JQk3<)T!?K3-klDlmoq|&6I-pxRo#G5OPpP=7 z0VA0Q#M3nnXd1eLQNK*|oPV?$?vmOmZ#Mx@HZMwk+!R;$J|LxL-wyS!hD~ipP4&Ff*4`}nI<~w*;dnFcUT8x=(d+4Y8WFw8`C&Jhz07A| zcJ|PH|NDF21^2!)P=t9@zIH+)mu-B=&Jj2<6Om-#?sdraIQmBzgV$F)ZQ#g`f*!2bH-@tLc?tQ{^y>C zXhw0V1a>${!8LxkQnaOk(Yw5HwTd=yJ|1h*9k@BQ=>?|Vi#(p5N7Tx-pc#Tv2&us} zK78=O#{#D4Y*A%B$UG&mCnSRUu%IPV1Xa~CR!|egD$dzV1c0WbsFnOQodQ4iwZRV% z*MkJRK(b-O4NtXIvLONxuwLw+Ml46nO1Bhk+Qn?zCA+#EPE74jFvMfl#08DP%JNvN zefj-f`J5XPuezq+_4OO&at8|h^xu4q-NBcJV5NW%^3!DWU{P#rXNaoP%O(!0X~byp zvEbNVFYj|&Kh7p~>Bg)p#{@y+4aa9xFJM-TI;Vrk|E7I%RdOJ0Cm~HgX~5~iHZyve zDS(+Wl_vJ&=pWCI?DVUN3xjEPzf~(blVs#G13&}C%6yZQ8__~W~>cTvvD$P$4GM( zJ4jE_Q@LVSq5&9fyCi@rPcjEa55>6u6h_u@-E#_T>fIB@kN~y4`N)%}x?ie(wx{)c zIgY%o6|Vt?Ov+VCR#$0a@!dRnT+=$87MIOHx(#OJ;RV^=8oiH9N~rp}n;VHv;!~F} zXnPO0K4hK%Kz_aL(HToQ0L(rfQe5EEQ>EutNu7aSa$jhfnHoO0T4KE1ckBTx);!mK%B&h4l0^W3y%Y3Vdr zw?`b(43m~!;F4ZZFm{3>Dx+V@yZ0!nl&!GWydu;?4@p}L$XOcaD`3b|Ji5Jbuhkx7 zPtZLdd)&hk0^|_;vctw^AtK3wgi$FJ-&!8^&D3C-pk`3HtSh?VJTcT8M;<(nghrN` zGB&Wq7C9T(c#FpK#8H|^yua%d*O9(nPHdG>@bfvUW87+cuj29q#rek^c;M<|O zS+BNE#ayF6mne_n*)A((EC35&ES&dv;&+Aa)npFl3moW1w=rdwI;&l2ra-;8 z@O>88xG^2m;8))qR6FdnTf@Au0*E4{pcWeDE&N9ty6PLcf78d|-F+Ohb!^Htw}q1FbEoC8^i3VzjjrCqqxP$Z^D= z%PUE}APd&_xBO&%0swC2t{n#=zcr8)Nr&kes)(N{SkLMbQ7CYqbJ|_iekF9F_v$Ab zRra8Geb4AgeN>?go%+E;reU*uUfI+oq*bxW9qp7}3$z8R;S-Z{&^wn*1qwHE%&p9G zbyAc2i@xZT+O#*Y-{x+%e)*7Ri)}X2B&)c}+3u%29giN6ok>tOoj~}L4cv>io|6qZ z4Rp#}s2|D}p)>iAEKuiSR_^>~jjL(R?&;{Eu&AM6<=*q@=4a*iruqGW3wc2A-$oji zLvSd+%ncLc4U>M)n@6~M2A6ODZJ0!r{t=+Gnh1i^HkyQfnjp=;3?2K?6A&dJepP09 zx${f&%BJgzXz!ql-E_F!^qbnpxCjWsp(U~6P;!WNnc0^eAp z1|w`td`4T(uT+ZQfaR8Y9QS&u4;uOO;E;DyfgRHAp7zHu&I{RHHPN((+VtmIv%fre z7tl;AOI3KhJ;SC2y$@(#56JY)4P=kbil78eDyn2dZ<6s2-zD_J2*(PH-^xW)98*Z-VU=!CggD7mjQD-P#p|mHsx^2zB zL)jmvU}vz<16<7?LY}W+0pH0E^ndC66nrN>W@jVjo6W57_du_ljYIS5sP^7t6mz43 z^}__pN?NhBdJLxvq!`22i-Fd~s>ZNq{Q_d8eSq9N<2s0Eguq_T6enliU&yFQ+3z8d zt=*&&Y=$8LOPO>T5tEsaz=Ob5t(B$;qg$lweDo?4E-Q7o>q*7Gewcm>yq9rca6&v(>+}g zP;3OPO%kMHW{$XIc8fJW_)Zb0F4fnKhPUnG!mE*)0n^R4Z0b}P{vX(-2DvVes1G`1^2w* z84^^Mw~tuaK@^4(H+h$`rs#;~6`3x?28Y7i5{1u*)tvE~>EjyV&8myiO>5GW&VG61 zne|WK*ZzgO{t3+-2&X7DZLcGZ*QVCIDM<+@&d9{0-ov+edp~%)OZmq9-K8T2`U3E# z#V&#ml;t#)Jzp;1x*Cv=Sjw!=Q}pcHDVg&1gQ7eNL5;l=0AAfO4sNlNT-4IHXQrN& zYuDTX{zyCANt2(f-5nhQA|6IUR*gGG?h*21W54%KuQ08FbJn`sgzZK_@N-0& zIsHFK-8hZiN#kUujRf56Iea2pFDJCVy|YnZ92G%-AwD=U%}LV}-02i>ElVQ?dbxZ( zM2?rz7hf8h4>7;gPi~g>zcj=k_iNWjKz@YHe|H@!S-tMLcOousCejYJH=lLvAK%!O z&keg1D?nL$r7iv3G$c^m(J_ng4#qe`m_CzV{M_jIv6g7f?Y2ebx{%jh zw=RbLDTfvJpM_5hMNQ0XhjATmLK;V%$Q9zSQH1~DXlp)3rw>{1fiB}@)PWKlZCzqi4FnfrSdK5rMi2`>lLv(8U;TA5dN3AX-D%wT??DrjyM z8+p(MJS<&N_L}ze>4Bf2`W~yZ!o@O-S-d-b;j_&#RJ8j>FHdukKsrXbW1^hvFGIsdK6Uxr+w)uSRzQE-RF597kWl2# zK15Ba`A4S^3VbmKrh4f!M7cavWH{sL-5ccG00;G`G~Tz)a1x6}dGHr$;Yl@IqW9Ik zIn?kQ3f%sAL!)u#oc}O<&@O5az#qGHGq-sqJ=Tn%{wz2^!K_Pio8M^q2+9-4&Wclt zl%cenW0EVbQJ%ssu*;^#Sb~A~ z^F)@ND!{|Bj3)$~IBxq9P2c777rsz(= zVW06JwD#GF!x?mj#w_!iIdtO4C1l&0WT_zMjo^s$W|5um7X#Ph>o5~?LfV1p)m(ty zZ-3jh^NxJSp?LND=ZNV(h@X_Mlq-R(I0g00H-ppAS+m8?GP^+Ba#x-@@)DG|{6!C zS<0+72yH+VQBn7B01H$J1P;%}noqpNpo=b(f$Youfiyr~IZM`Ji7otYc{*`7Q*s-tmRvoC4%H#QmOV2a3Z%`XFtX`7H{O@EVAhdj17Z!$Ez zvn*C!bDW{2%@gX17YA8{hQ=hF%j&nY4L$lEj0&wRq^Xb0vK_=-f_sAjer61|H{8~L zs+sxDlpb1LKXE057O0L{KEa;^MJk7)=_zZ!zc=5gYo2@N+}X*@U4K_4Q171-Te4iQ zSL^&f+o(E)EF6k4>;{GtW`cqiOJiey$g~%RTsj1g=lB0S)>uQ9D;b_r&tR7M?^FG6 zd|upbsMARM0fKc}3;6#PGTb2`V0S9uE_69&bDOh^{Zf1SMeMR~He-fm4F(%7kl<7# zP7w*$C#zs;j5G^VHRQ+ERCwxnC&+o>iAy;NKQVGKp)wE&8n!K1-I%o5vM3#Tr}XE`>&BNMpafE*S}1KI0rMPXQYN|h>K6t zK0rS*tLp}_`R1>Nbthp=6@`WA{Hx=`ZOxwi6y{4lJ<(~toMWXCUPU(o@=)OYLGZ-* z4pC3Hvf@92%0xrHFlSL5d~C+$2JF_4`hkgNeoXbuGre7z;DER?>AjU;R@YNscrE8$ zvx{o2W{VZSMg&h&V^(Q7->aj9R=Y?3W(M9|o#)B0{Y;}96{K@o`T@*`9_sU5O>;Lo zccf>C=ID``iWGr^Xs8U5`uZT| zG0M_JuvE=oLUSR49n&ryyXVI-o@Gz$O$7dIeOezfd#=Nh%gO920B?Om4rb&7^!DoZ zm;8-yh*yBa2a8?tGbLVOy6?=VIOAGli_zaA&;1mou@mI?n&pjh3T1M<=Dt~x-N~ur zhp~A;!QCwSyJGm-I-ZF4^PF=sgt<@WuNYzB?uhfssbyBzUwY!aUbYsAU#CmDO}Co9|Mzg^KDOA@5x)RrHJE`RZL(;wr`-v`RM_ zVy3j7Zl6}^YZZP>GrED-*0kTL8IOf~4A*|tm4L^ER9at)woiVr{<@=I3hWM>*;<-9 zSYqCUT_SnDuetPkGiGTh8oN){_fu*aE;shmtsJxHAzwdFh6jyg^P4d%)Lg&+ ztKf6ux)fz)QjC_oK+uScxi*rOz)8;8`+7BAY5qswF#X<+&!@*)yIz=^X32%$(u;VE z8D2;us|I|x=rPFMU-Vqr;M@hWcq;5#y5V{?X1IYuQ5$`D&DNF-8)< zeLoX=kRQIvxhn7#VW3~Zeur$Bm66mC*J*y5hi;ttjujBNQr{$9Sz4HryfIBY^goIid)ln8qQBr!b>bZhnq(r5?fN=p4z!{MfK`S6_< z`N_%qOZ7Z~S#SJB`#!ij&DOlj;tg=AALv^rea#>Eal$3P>rhQErt1)*Q{v%*4Ge5e z9K2+iOReEqx`2V7NM6wp@dzHCmHj6ISc3oq$ZgsyRfSv6Q~=V;$U+UNrE6ngd#BPB#I1Svsi4+DylrEhwD9*z5XrQy*q5v z8jPl*>tw-;R|?|vRW;2v>wPacL8*^S&H(p$y|K<@ZS{5ue2J$(eJU@VY)ijwT1uGd ze+-?!onkg7VjCbD`3pEUPp8=?$_>5M7d5+$zy8`}48O{Fys&cefC#F^LyT-op+C)^0%*9=s zYeoe^JR3(E7W+-W#De8*U64G;b47YIa=M0;1ThENNOqkst&B^{$;$#l;{#O%YpB=w zsSO0UuRGRX9pLRrhEpjK?$}G}bViB|gh16p@cL&{0C+Sc)kFJ4jNjzi_pb8j_~Ox^ z*>t48r7Y8nVsP<7g2l%ax{BK+UvXUdz~7#(E_h*nQy+#I&^agzbYnl8;@T=CJt!V~ zTZOYr#ohQgF^K86I|czsnxtE?-Frw`-+R)st0||kb>y&_XO<-OB{JWL?&I40!;$2LA$@+Ak{qo=X3&#Y)#ARe_@)?{B4Z`eM3vBaC z>cnzrb&ue|w4kMDxA%cQBagxt^yQXp9?CZG=a9=$qq@Yi$lrsAf!D)t44Fv6aZ1u`zj18a-GI&EJau-wtkm5o$V@2EUflh-C(ks;%;UP)ta@m!tLSV&{L|9wYF-a&{d)x6BVttJrRA_3uxOr?rX<%usiht{z4)v^7nhe4Mj zb{cpt=zP}zveb~W?_B$x=>%&@!r6acH~oD#wRgp%@F0-3Bhc&DEjwEg1t+4KHn}#k zvo%T0%=Oy+eD+|R)sl_fc`wedwCiS5?1pfJydijp5nCxzwIMSJh zkdpUG8dC4Y3;I&urR4@L@p?q>-V=Ks>qxgJo&=GP(#iyQ>KP=r762%O0AY;spvviP zX=rZvX4$lsvTKS#VjIZPeO9fF*K3Zipp;;u6r;V2Nu){dMyniwc*aSdgL~b$dI(q! zkBjX(&Z^%satNfpG(S;K84mO~r*Woh${brk-gY|T=l6Z2e0kkKU15~@B4GKoL)6C# zEUQ-hD%9Eko|Pt?m>{=sL;-2HmjBm5)AzZvpX?XnCs>v*Y%B+W^F z?VVx(^;!Vx*tY_Z;D?7A;-BJspOQBHV<}C6h3m<#8=tn=bD}X)jJ#uuy$<&yLJx`T z2iuWMtw#b|LGn$$PHfH**Qm*{4{5Gc(7vS#`vcQi4jL!cL>KW~>@;@E*{xXk9Lg(c zij$iT_^aS^oGd#KL>2&O4;#pCO?5Ey!BdAiMLoxJ(I{~+ovMck-*3H4c5E#wb*9Od zdMh6A{jMa=SXAivmViV!~$p8rjNd<7x!*VWJ=K+KRcu}xYI z9{RO@q(|4nRX^yDk#~mgB2*5G09*{MPkV!NcBra3M+gwvV#C+)&qP>?y7*j)kRFNm z=S(-!M9{xdtz2KY)kLa_>spdaxNfMkW5LYf-1Uv0YPv{UcFyNK%2+76Uq~A3dE;X7 z?o|fN>$zVt65rJv+QMu7Bo6!vC(e4MuMnJ7l(9y>>zipms>QetCcf-ZMf~^6@)HIn zAH-6)jxZXpLA*-9qwqs?yfP*B5*QWk?(Lw{2#szK+z6OqrhkoDd{`lH z@{ifFixz{uzArLcmDTC)<+Z)@U08bt!!$dn6(A0R4_y+^t&?PJv}TE5{`EVXf_krk zp$m>?DwYs<;_H?>OB;h_%FNLc2bg#AV7~2o52PnG_}T#%SMsy)YjBctf(? z_r}06LTk*wHZ*|$RhF?IW|QY+%Nvl^9j>c`=5GGfm~wsSAN=fPET`eUk0m5nQo1oX zHny90cLDaN#^-XnJuUg+vSbPVJD`ZZ^nB?T5@T&ax;49J*VLs%yN*1Q6Z&>AAu;jl zYm1cO|CB$ehyWM7k=~kQOV?=g#r-sDMJ@*lo&&z)JU?uejfyJG9TP7qw24H}vpwd# z3=hy5ymTaL;^3NN3tiPyC71N@b&f&()M%{&e{3bmTppoM<9geaWVX(T;Ouq!lKvNC zZyMFqx%P3lwMttRsyvFwR1Xze5fCFo7?N5~p%x*nRAz{P8X-V{03nc}l@>%9RH|VJ zq%BHBCXq1=iO3{El42kX0Rn*}%prt85=ioH&-0v~v)=d1`(?9Oiy_Ir?)$!m|Npn| z=Y!S54jI+P@xYw47>8RPO<|4jGHP3%#h4b;HY-l#w)j`$Pxg)-MpmOC7d`^-7X{Bo z{}~?hZ58*HJyhUF#$T#?X1n+Q2~zAz0b!ye*Se<}G~f~I}3JUVqgPa(#!#FsGiF8Em|znt1m~?m6_N$elfOfVG$*XQhs+!=Ju||uIsrTa&Kw|MDet_ zu+mbNt{?k>ovI|!*&{3dtAXN=%WCar(}Ye|Zq5Vf-zLQA1Cm?pH$u|mpiD{rW{!Tu zNW$+u7|-OnKkwQsy%KlV0Dc8j%H(qB_Qd*;17$`Z#&hoAbwBceyA!?5T|R7?Undb3 zNxFKkJrEa^H7=|s8`@*m#{XvT?<)SvCW386{B-bab{d{8e!2d1uB9~3gJ`s}v+FXT z0cwovTT{i>!5OPZ_R^rMO$H&2R@@ZfN$&#=b8)T5;^X>x87C=abNQB`Y4;Fr@ms#t zDbJ3**X}QW9^MewcHdtKjAbFTB80BHv2S)$rPt}Y z#mX<;A<}1sW8I6$RW`P??8(t~XMdT$3>fiFKc!>s^MmZIxURnNOMy`cytMx!-}JWKDgN$n(S$_RtKs1r zWak5D{|Ld^JXGlEEhlAwA6#9<76xJSb$}|a{?s@zv)V2SXqa<5*5`AJwi#NrBd&Q& zd92K!$U{kIJi*g}flWc|oA0G$uwh3T;kUNf+|UE@H?q=Ss6Q5ZcIw;5Y5VcM`ptipc4bhcQ1NnwJrbe@jYt z%xH;ZHuWuFPeMsf&giZHFP3rMz7A>6G`9ZU+*{{^T3hNLnXOxZn6DCfvq*M*c}knK zh6)V-ki9LF-gHyC%%wS?|7zyRUK!BO_+)Apeo}w>sctIdBPt++s%t4}0&7-q)&t_PVP%7kI=Q{^@T<4}hqEtQ zjP?vh2rYFfDZ1g|&tk*%E#alIh5$IZiQAtN^;plHEOmCwEvclm3^VETmcGzdtNG?a9ek*W=6fd>#9J=ahb@40~|8Q5gG6ndef`WtU9%j&{M3TBr7Z-p1s5 zIBLddS5LUA7hx3ou_=RPvYlX{)C2L)yaez`+uz!Z9zT!4^+xj?HVU2AoOn~$o7|Wj)Zd<(c`>=>s~}%Vn>RNDrvX*_MJdGb=Imw8niayv8NIb}C@-Zu@l5agt%tQZ;p|yc5;syo1Rr!W zwplr;t4*Jpx%}2SMc2&a5+I3?sd0nFr^_n!6bymq@)W$THav(T%|={8)VC2l_^`3C zspbedW}Nuud=TJ8jar>FrG@v8oS1Gw1LH0*+sWjH-`VwmHoV+qvIWO_j$n}$7Y zEC9T!Jhv>bEBjA)Fh}?GOuUB%i?YM&17sBy-h#8XmW`(zmMam_kDt^iz+QFX4ULxZ zW8Qu^!7~(pB1YY1P;4(w6yCm%YUe=`dK>L~C+~MWMiH*{MAaWP7NIYcHf@Mo=I7&^ zhe-6zC-reQ*3}Zh8;bc27+UfSvNO(x;NJfMx&WmzP_c=+zZ6eUPoXE8$eI4sIb=7pE*195|)7PaXGF*XXf-X>lsJ%6)vzRx`gH z&_Bi68a;6!#U%uMVY@c23o)C(n_Sq!O49ggs_zQ2XVDHM5YkogcI#iGnbF>L%;2ck zdKyk6qrf9MxV8P$L9Yt0miBmN!@>D=_t}QsYvt$K7cVdX@eB z#ep{Usr?#Xnf7y(BL3gm?)Su6oNfybw$6CNKYbz>rbXBvYBT8(jNCNdPAs-I~jUU$FlXhR2FJMU)9HY2?WQ1FJ!pF7$^eJxE< z(Q|q0hHUlr)7+G0J4wnzcCfK8V`HSxf+~1mn|aI}D_HUP|21YG;3pUvBrU2cKz$uG1ox%g3!K3Lh#(nP_+7708 zlp0+1gqU^jzhp{{kGrwlsYh7PNN*Xs3)dKc6(%O{iId~^mG0??o!44m-WSzfTmL#O z(PQ`!#c|Kl^~7+cVCPBKz>w@cZyO!5@keLZ5=b>opMCpp7C_r2Vlgd8;O_@PyQn5( zOg}ZH{@*5{NPC&^eE47~v}I@Ehu({>W_%?JQ#$?oBBY*$K7_iwKIWhxib35piWg(9 z&Gww1xVQ3~Q8j}5boY*@w`vJBm+pD#o%dEwT3@Y<5+~m{XwSJppBpDNc8?bI)+KD# zp%sHq^lEiCvUL&KM!K=ggdYsdu8(v)j_tE7?|q<(x+y%17^d;!?zv%hkX~_~s@j5q z3zzJEJsXP-MEiRhbgzcE;janmA7@36p%FS!!1X3R#dvPfO>bT`|+cc zxJ8hTB@7u{^(X}}(OOdcbra(7_qt%YgdDLk@!TnR6}PCLbfHQG^m!cjd6fDB;=2&V z)wcVAlV4b%_QIv@F6@9TAp4Yp5kZI3s4i1qa)Q($@WI$4?H|50e2ENM4!k>4=9zQ! ze%?m{Sy)?5fZqeYo$a;Uvs)b%Ue#D>A!dxS)j*B0X{3>(|!X=3I_$ z8%~d1SOYXX8CnwOuk__R!oycJ)9gN{EO7)k-{_1`lQbGLyK@noU$Ap-_eqi90bhQV zz6SN!&9=0}FMZBx9Ve=?46OKI2QZo-{K<LSv$KewsIy!pf1BA7;Q-=Re_i>l6f>k^E=C+Lw z^6AIN7JoW(@yZwbe+>SlA5cTr-GBB?@PXs=J>}(f^He>Dq)kqFY4Aq)Yrw>BKYXc@ zerzQepEhh^i$`9&L@~Zl90=rVl=cY|vtZr2?oG~NvyrOC`Tnr!g94ML-czHHQw%{9 zlNOzL&edh1>F4^j+ZSBZ5?orOzasXDTIO5+nFaFmjEdnbZL~W*C3PmZ3^DDTR~{TA zty@*+8ykG~p=;kl}Y@=Dj7Tak8}CWZ4Jn?lnm#Rp6p+bY>!xsy7x$E;;^+a;Hdh$Yph zZ*}v79aA7PzyH;F-kt)0puWgv2M{DvQ^9$j%jb3(mkh)+zJvuy2YUpPSIyc( z@YM!I?jg$VQ@GQ{a2?QK4eq-_4Saqjh<-G8LXmJPFWBVW7*+899itXh5`O~BhW_?| zmr*0P^0k*m9d@+XXJy83muG_=VXl3Nq@e=~p2p0!6o+V2q;&WaBRmS2tKF&#&6I>E z_`rgML2>LL_rHZOP$`~5_Aco%|`TCk*BqirY@|F7hmBx zr9xg_YfmV^$dx}4I)sk4pNd2Ol6gZ&iQLY@{WbwrN!@j2RS4;VQUF5y+}asX{!Zn= z2|46}aI3)ytx#)(?KYUsh2RZey{j#_7j3^h82DlN=cAHOWQH}d&pU7s=eOHtw}aVh(i?{;qrywWlf@qzJMqPk?eVw>i&Cpr z;Bf9!YZH!Z5ZjBL_BU3Qr_Z96M#O=3yWVT5qix>W;!zqa=>Y6eJ$Lf=Dx)1&GfBO* zD^=Fqb&^(QWH)ZMqf3Y17hlr!EyppwCFdO(wlCyqU(yx^uN;=sZtYoH!|unK1elqK zAg7v&ZD&U{mo1U2m-(%M_REtgjWbmqHOujjAGUWNWBXSJi2r9%sYTW-xa;Xm@H(I8mPe^j{V4z9|}Ly z{PO%`E$yJ=8_dKJfHkuE3%E=G6a}~KE&FhpN6e{#_TspuyI0J?i^Z@|65u554}&>F%~y!K zO<_J1UY=~+y`{>K_tLO+$fe(N$yN9zEE2WZDQ4l~nO4oCcL+j%A0#_ zvl!8Rk|q9tmZSx0@79Bq*?A1RN%p&5=={7S+!F=4(H zBlkLi9M~uQR~Y3@>{))J`PB#|o&4F9XzofJar~cM?c6p?dxkMU>y#i#;<*^!tx`61 zk++8^cHNc%@;yz8?6-8L%5g$EWXIw=v4hr#W1-tulC;tIQrS$R7P;W;j@tm*&9r;F zRV8Xg+nJ0;NsMuP!sUp044Kw=sbmF_8*vb*ElF+&$nq7qn*eQ$ylocNEX=DUER7if z@D-O8hw4F?&M#j+xQIKNal+kYFMId(uN)F5>~=}`qinF}CdgvCHBg{H zUjTfwz!4?$&v309DI$#ueLRDEPg#n`jpCIiy#>Z1)z+8p$l$4ljh^J>Zk-J3DMhAbG^p~{8@Wd^uUDpe_io$0U_^iTSbmPiiQ8!Y-hL8 zxq!@JN}qbJ9jGR#mk&T44L=@>$8B$c`wFWy_jnTQCRlk$$#BFtgvG{0Iqq z_x`|r=^F!*lvV(a7LIv_-wuES{scuoq7(~`MADb;+SiBznOLj z$;#K0oN2s_QDo=r4tsg6n^5oeUl?fj#aqXlCPZn0J}#Y{J>Un85xCP8gO!DK_bE8+ z#39Q$aXO`F5ra3>CpB$-{OFh-Hn47DIN-m*4o%fR?dVlOjCUus$9!z-02fGK`O#xZ zQ>2qHI7BwzMLR7OH}PYVjFu-?6RjJv9B-5ie>?ewE}NeoXntCXA2q8{#rpL=+JdK< z$WhkFYs}4l5DSAxUg{BK;#ngGdvc~%&0LPhb)U=_n+0Z<-k(Cg+W*C)W9VKoa^xk~ z`A9@)aWGD&ecmGUfN!?qkara}I>E<>-;R#ymDjlrQ@S2_l7r`>p8dg8#BWkkRgJbt z#N;tw(?=dXg)nci#N=xDS$J z`wa8|jziSpT05mbc0!lebJ;w}Ioco}6%x~PTnCOG1l8UUoJF-yyfK$tprh*|`?Ema z*>&e^yi1+C0VTh$sUmw0F->cY)JKpD%mI5!E+RNrZRhytxS(f*tjw+~vlkmLmo43? zkUIN;<`S4;wO@UDuYZ$y@_cabynpmB_aj0}rSrwh;o~IwQERzWhy)z-!n>o-@+Q1_JoF4=3;44{XbN0bPx318M$$Qasn89 zq1(97QngMr`AMVIZD=kOZHF^o+zqAH;(V z^(jSy?64uwv^4Abv??kQ7ZLvaK$7ka5-TUd&_h_6_YFu++N9v@S1!^!ieSaN-G+Vx z*ub>vBxjSl$f3*dN_q>>x2o>)k9xwN?U!ABJ!%0EA^={NW$xJF=;~VEiD8q-WV~PW zQlk1gAqbv{_$cPb4`k69DI#Z_D8Cqh=Nz`Rs5bhCw-vNbAwo`)u}YSUYu!n?k2xOs zxJg+ADjloJ)4TDD{@tYS%eE-Ml;RR&>kHG$*xqM+=}0Yj?&f1kQ8&}X0PeQ>Ky`Jy zf-oga_MBxY*0K9$zHMp2h2Rn+ zSXIGEU(Mm@%=?ULx|2NsCL$OV&s9D-Qh6m02YFqr&kH!?dKrp$VpU$Leo zx*xl0g}%ZIj+_oSB!D?@9~$LbN}89=YrnfHu|EK@sf=n zXfnoTrt%a4`(t~krUXITaZlyaO{9r9Fsa8cC?0I!d0JgV#7^7^0HW;5eqPUQM2YU) z7OKm_CTW;d>vg~`!*2Eb<_yBXyo`XzhrYVgujwi3DwU`LB7U^m>13C3U}Wj=x$<6a zZOMDLUp~0+^ocOoWG+y!h^XgQIrr9O!2phsLGh#DaOR=WzTAvsQo&_T!C1q*bCq$l zAF=xoGPF3=9B!k0(%xzvh1u=C0a4EhD)W;`s(udtSAI-BPX4_2IoO#Xk_$nwoF}-X z|G=Wwxy>I;(O>x^NLBaR@7U*FpVwH`3C=zuh%k<|c%^EFLzyeCB-i*-zqC!t7CIcSY#~$W@{zdG6txX05bcB>n?F7B8_BtMWeVHl z)Evt_w@;LSn8p+9!wU-l&DpmP57dO8ejIYEVuI^;0f`I3BcHWZd&G}{0g};z)5v`Z!B4!*_P%{g&s`vdWr3iF5z(wSu+pScfW}m( zYZCCSacYwz)x%8HMx~9cyU_9xV1aVCQFoWyS}1!POiByz{XJ(-!6tvp#`TVTn(q*A z4=F$WCGE*I~Hv!st z6v*NXRQ`5>Dc#HWA|`3BRhn=8Y#fvu4W5dqtHMZ72K4xF*lUhW1xo874AQSe!F|xA z-RGvq@Kwgv>$!l)@;K==tT(uK9aKZXMNIqvudssa`ci@&FZ&S$Gnn8ceJeiD(Bu5% z2{=wYSOrcoTwg1>Guh6%ebb{c8^NqNiukDV+gop`-#Zk5?L`}ck;1v#L#!|#KO)PQ zscYrF-`$Lt6Qe9kxXgzyJP1APvb}RrT{9X}SGMlK8yacQ1*=0871%$N;xbaJO&MQ1 z$^Uw*QT6wZjta1vkue@sS8C#;?TQX;H?~FjYHFNOk-JZp#gezHwe5pDs{UhRhiuWx zwr`0+)!^0Kpa^iSo4Dz7U3iroz!~O`hJh$_Rrh?7(uUNs>EVxzoITx>8%JXI=s#wb zyWf3y3_ug3W7*xegsb=mrNbCJn7^8+MhhhJ4sXFAR zN?uSze|76J?(h3(jodqF*+Pjk`zc8~d-QHnXmzk)Lte*yiTJVO4fLxLb>I&Nnl^eO zdKS}x>TDu^hBr!A4^T%ZFrxM9uy%n@da&>{I^+}un{!1sIO0fvh8cF zxLcd+AWL}1HVV9e6Q<{r=qc6hQOt-Dpz{qJivW-2Ft9=AgxOL)JRkVpA|OMta~HT1 z1POcAU2@}*kwX{ZA?Hqcgh;`T7CI%y_BN)MRD(lTjF8j*!UD(Amis7^7Kli3^#v(V zKNbK!VgsF2flgd!UBXhbTalyRaxtK&3kN79q*@k$3QGnEohycLpzErSeyvjrV}ZFI zy~5i6L^befq_8lFZ0(v=17I$`TOF9i0Ve9Kd`diCwhpLLK)tRW32!s!g6UhID~5lE zlvTQ%WbHop5G}g&`2znGLN@O2%Z#vA&d57|^k(&(^ne?4kN3VDK9P~KtYS`}pOsEA9czt~p#y=1*Bv#~RqG_^T{lc%wGKRerE?EWJY}N{ z?tb!WUbOk>W(8&JStNk!ID#~`(rb(PJlWePMBWtEbC>g`mJ2ymD_9B+ow!qhuIrYX zH1>5TjUDRgT!pCl_xYQC}Mhrk23eg7N1wcSh>#=87#1+l|33t-!k=NoBHMnhn z`^-EO+@;Qy-k<7)KPw|t^QCQLHn_TykjqdkuL)XL0u^A!Y%HKLJ%#2qc_Gf-t!opx z@H!KVh;jC}eg|Cf%a0#nSH7+6y8DjXmshEAC|L<>sNQ;qH+p%~{I+KS83g?wu0=*c zyfm`9%qv>eZ>SU?)rU%3k{W@HkfOYKo5NN0df?`=>M1rJm#Unqr-5-vFeIW>3l05!tEP!7#jC$r1Gb(1Ybu-Xc|i z2Yff`?_A7=XLWC(vQP;HN`PgdvZcDE5m`$xa zq9-3OV1qSN`uaN;P-F=IQNouL7LM8~g=#RN+f<}u)?+g(M=yD@5N?(VRW zaL7asSF-AWiuN07}`!qGUG(U_Hzg3VAZB`3})GHC)j(SuMz%k9LfyKihp6h|bB~HdoZP6@SAMq0lV*J5u}vY@$rvb5AHwKH z_G>sJhR9{rzUkkf(zsc_m*OOsStYN4e)N)D$P91-FuK>R0ebE+&25U)IqZ+ofKN?g z%&NgHU)`P!GHUw`#hid1-^zn$y3TEQ#K-%e$?7tb_E1MiUG2HgK6XI*;;7$L-7U)x zZ1f#D>XJF2d6~sbS$Y;Z2tktD0|NoWVt@|WKS>`seN5&&uRm%w`|^A7`rXAC_dnk| zZC-phXik6qaFuf)8aM)`Y67HL%eV+G=anP%(IY3lI**E4i>9bRj|1p+@B!}g_hBk+GC(45AB-7lQN%L&6n93M?xKaIERZ7{5D4bSG?B{zg;&01Qj!MU{I zTgAA>YEJwqXx+|wULaJt6%?}-od?W&;)dUHS1}NhnOvuj@PMMoPCt@ok~6=6oL!)f zz?jGD%j2VSp3v!TAQ%O_r)1dqcIX6E7Fap=l)ASJ3YK1g_y#|=betQ8x_)-)_ngRn zfUDVCNpYfGO*qzPLHK1cF6w1JZs}w>hvhF9h664DtY~pmCAz%~@Td=Yhn&0Im$G^x z2IwJ;2E=bv0i`omJki~Mk`!MT7$<6+KSlge_E@onM6v2?SKz^0qw(2KuJ{zU_P#{8 zDn|@-{OAsKGe8WE(xf9b|3+_;CqGdHugIJqW{rqZ;k`Yt8+k*nbB(!yfq-xSRzJNU zulKed#9rQ02@LSIg+b!S%7D?Bg>cbiY;}B0??{DjbT3dMI9h~y8UxTUq2*CVZ^?i@ zJSpq$5I{*BfrUf}8oPV0SIKh1TuS4S6$6w0Q44p`rM}az?=W23h+tc_!Y0^QXvZ+w_vB${^~wQ#Vz2|D{| zEaqRh-dfGgp!vHIHojKwl}+gAJ~mFZvmuPEESLSj7s2jO@Q+buBRAlxz~>6^vl{?% z^RzO~Pz18X{3qSFZ2JIGi>xRGcs&o6rY6Sy|;XdwA&$HNs;j>&Ri28^d9BAJ- z@7|hK(PD!LiQ!h`(BUJVE>l02n}6_Z&mX$Tc7gS^(Bg^SnR3dpq<+~M5 zKPb8cb*XsyPZWS?X-&>rPB7dvK=#-wQT=x=@b2xJA?s5iM18hcstTsKHXe?+v`jdG z&0;&zU002p63{mM0$CIjRq2Ce!HQ5FQO$C=l(n4fG+G{OaKYmM)=njQS=hyXKO+-M zyE>1^Z9(HQTGGd9*f|s5nj}to246)$nMYzvNM^&rgV2YKi~*;pzT3ToF!K;n%iXSNMVz(E zP(U5#I7-|KLv1)$5`V9%EV3-%MTd?k9@fVh8;8o@94cdtDTW zvZF_vD5`99Z+%#s>kDK72a4m>4T}Rta^Jc^d8`h zmdmC-Qn%PuGy_=8RyGa4{`t0I;ggYnd~`TpV!ZrqNzCpbwLGwp?H}HA%%xw&ZE~!V z_bXrHRHC|#*C4Zlbw@}z+R{CJTQg)Sx!_})Fhp?fMVaR0uR6z`Mf*9JNn!MJxlu$yXi1@E?r&P+66Td?z9*Y`$H<)Gn)-VKtbD#^#uKFrAvYR?RLhHD~? z{cv#3-+d%8))aYZKid_Ojt$rQQwKU*m`{CJdBjr#Fk9|iIW#wN$EQIAT66F36rHi@ z+o$h?w--In3V0S{n}7@v3xIRLHz$S%9QfO_JksQ_C)~Jh$uf4{cJ}k~rF#)88#|sk z10O(lstGk-;x_z4ar@joQ0eZRW~W{^d2QQ+VDfj8;Suyc1=08n+;-tI5lUnU~Ti5EhHRj-#ZO$50 zEnmAAqwean{ayY~J;o20&Z3g+fBNH#irT5$?e`De57>L)pQT5>Ddqmk*X3K8f{Vqh z!9|Q-sB-l?6g|y+wS3XgXS9RGTSAPx^0GH4V4U6hkO>r5bl;2k_VbA4{?ac{8#H!d zo_2rOtCFLSA-@huV_s8mpqb%0L@ZlkOn_|nw#i(Y#WGBJYZ9{}eYYcGY27cAu67#J z#HFN3OK%MIM?!7JG%+jyRkpk^!H39)#nY3}Ta0aa@Lw)0w^n*(F%16(8M0AMA$gni z%2rVpdP;&$w(qmin`q>f15Xi~#&)b_#y-gvtc=8H0X z1BiqOPXyzh(*ne$5rioE?}O{P^tAQIF49#Y^&wvtKa@NLE{0Qd1V(c(%059^&l3lj zTT(;yT?b3!j#LibPrG|1*1s`Z)Jn~OT58(!4oq-DXM;dVi?a*Yw0D#sbn$rbayq|M>eR)Ak`wjbF8jcJ_t}n>|<=iHjBK_#*9+k_u*`dBF~ z1lOE~&NeZqbTDN{qAmpEp^y#D+6`rWvPwD1-&h%OGrj~f!BPJGh8aKC688}N9Bm^CpprjafvidvRLa5Cp3=%^;JzkH=bIjCh5kvC}2YO&igUD>)SWt8pVix@^n@sy_SK z)RDJuHbQTqLlcVT9zrEA-ok1@@|4f22Oa7BmpcdB-(7+kKd){hv^BYD(_Jz2b*g#9 zj=JPDZnxouNPRBI8!>y$Yx0H26;qJC)j!#?Ie~eiu2+cZYkz@0OcM_e&8PQ;U^i2m zz~?#_ZjP)j7S)1H-M-+MY1#a`^e8o811QglvL zT=E~Nsrf(Ei>QyXf(im}s#2LxdIvrdME((qoO9e?ZcA#Gw0uTLEu!X?BTWNu>p7>c zLYa9$;1BKmBB`3h&M$T4XX?EXq_6h%rUw^Csi=_c_1V_goX{7zSB2o^ z$&xnN^&!60Bfmses96p8lcYz578w^-MEbx@Kw_IUY5Akk-NA>yV)(nsze@k2I@rp0 z{`Kkn>$0mkILN0EP%1Y!;58lt0xsTTjLz-(q6ZM+BH)nbiKRE#1 zk=aliExnK4vdh^iXb;-i+BaCYGYqJCjoIC z5OK7$B~#H)0-Glo|4$fw2jr@3R#UD&L1{$%mbOCa8xp#YS6Z`*wM^#*w_tbh&){;) z(E}~aPY@aOfHn3#Mtth#tIBkYUNz$9Vq%n|7rKM}CCLKP2rhYI=6XFQH8ADvFANvr zjlndDR6Z(ZnW zSh>LMh{MC&~IF7_P`A2VCSb|)6E?}y|?pg>20sJc+2ys>cM_y8@_B5M5S5hl>%ne z&eHreR=IXs8I&hFqHWTO9QSXXwkIFoW|d6bEUNcw=^rQ_3)B?yOn16RQ*p?&%o^Tk zJ1u{CFu;SCYEMqZ;BOK!&h0zh3%s(7P3lf>CX&EAZ}xWgn?LUFeWjeA24Yb$e2xkI zg-?InELC-K|N44-v*C>Manp&y9BTTDw|e3(c5_$*4>#_*hUPk?`_@WkC&Lg+R0Dci zMKkAk;d8f;0lQ3sU~}_yoA<%#Y&vn+cDYbHuIaej3aZ`Zp)JbPXY=*xX&e+ci)76y z(c5Tih9u>NO43oeHphU!vh$b2b`2jjquw`-nxRM!k_p;b+0I{*Zsm?Xp8@MICv0C` zmL|SP58+5#FAy-TqU)?u7gSsV9#%4mySz-pW!LSOdlG0Rmz#*vM){FRA8x1_N$&I*2L{= zzm_c%M$yyl1E0J}pB%fpjV&#*LQ1*&+%*qKf_s7AvuSfR7Y7H(?_VF?5d|1>+i}w*f zX?m2Axld99#cp41P`*aNMpq*>Z-^V~IxDM`T68fJA#cMY@HH zgUaL?>aRGXCJPO29s@hbEGpckmMl==HJa{CY->5v+hp+LXN%Wq4C%kZMZHgB$bUJ- zCn`${*t;Qxn6otHPW?^=g2m-0?5JU|ZPD)LD}XA0&AX(ei_?JOR^KJaFy5p6&Pq<% z!|~uHaz1f`KSCTxq&U|XQK#aTN|kQ;RSNOP%~EL-ze-U|!A_-#3e`#+D2*p;&`zRR zMdPYC6^hW`yw6q^*PFkb538h)a^udosRrUfi2=i1tWe% zQ?N8Do7ASP5J#3w-YgOip`uHUzvx}VbKg_vntK+Thoipu~K2ZnGu6HVA zkv4(bTVACZ#6LCt29VOfQ#N9T;|#`h^Aa95#jtA2otFI5bbB+d0=>DOv$QbWS;HCV z#52^IN(iH*>_?t(dyOKyE^UIkffLsfq`T)QMPj6h_tvI3w+!FIh;&>#$7nn^n~-YT z5mzxg+(@;TUgAf*R-9Fru=&Bu((10gJ7d;PpoJps9d*7{yyPT+;H$3ul9E=xvL`zi5eCiB^3xbNfBL-DcD_N-N>L;Tyc+?-zBbx3+%) z2z7AH>bs|~F!rM7OOLMYE~FjR&c70q&oVGthqgxQ>C!AT!;Gz3Ei{uc+%VJMXozbR zCPiW^*|0;LZi>IK(ZT{A?4uURwA*mm4Bs7}PF|6e;1MkN>fyb5EZOW1I4>|m6*+6fvT}4w-A~W+9?UujL1c4b`e_gAahY%w;wX5Rhq>tbHw)zY^X`+ z6jzrcgAAC-H`DXx((p26^I#mvPctQkWgr9xM-g*wAk=bm>Kaz1fohy*f?iG%2co?R z8W0GflDiGmtEqGZbHe3b2^}|~9Hrzj*0fuPcjemMWi!Hfg7-~0dV*3)H)&&B_g0s8 z_9yWF3QfhX^}&}3>te?C08U92r*YY#|H{1k(!O(}T#Vhmc`J{&`pc7lEL|vIoLKpt z`!q5g{JjGdQsg1ANzUcyig4oT}l-}6H_XYGl)CjltG1qMWcj4>-SnsL*!m+ zrJ3_Vzz|g}94JIjwoxWzC{5oQb`3?;SEqVVbaB(^70qAa6ghaF29;*EXiO=sfo$nk#Og>q>Po!Avc>JWz<+zPWZIHlSow zKIn?IBu+#wd2d^37ekV`4;e{Rm1L2QsMB-5HB0?m z+8>Nh_$5y#zmms>Q2CPY-NdF^!>{+;`gkic6n68R+fB~f50#E3WbS$cT{gVfOo8Bw zuIjF|f^kk}!Er7_^4-Ew)4*{Y@F~Dx76qb=%#nexrY*uSGP-(yH ztl-!iLu%#f5Sy5)<<-Bg^N5oTIM&*i*exq--crcX--7BV%g`%bMk`Y{<%_W6w7=75 zD932M?X(M%4<|78BG?jF_FHgE*14RLj_Yre^+>xl;@m~$V5pc$o-StC&(1$t3tcKy ze~Q~V+XyPw+c>YGXV&jG%dT+?S$2~Ze*=}6NqOdDjVse;qcTUm8oBEt)}zG*B#;#n z(5Y*K^G|+to$C9B17=d2JdpGI7Sq+Vx9q{T!1_`qOdBg2PTl8HZdN0+(OQ++%{R=t z)DvT-j6c`W>%vS86uqUj91I#QYa<7wNYW%l8N^fhA^Pnkg%lR6 zb1Tuht&t6M;0jrlnk|dDLT&v{GoO_+-JdZOqsf_rUI!i(*~S=Amnvq59r^NMx!P}) zh#^Qe{!R|9reALD=;$!x^+cPJe=>;j6o=pPLq{7*!uCC;y|MYLlJ!Co1;IhNG}<=SN5&g%oIk#O zwWX3%>cvyDXq8imAVeCU(5s1+Y2Q6Hfnm+vPwh=}8W2!R#hYiUN%2o&m{Th*F3FEt zF+Rj=5R^Dp!Y-{A(^4%rS0!#EiI$Jsibjg2y-ap*!((^-GGo(-evppZt$~BdO|3gV z^>P*MhIZTy#Dgx=d{nOWn4)hm53Ml+XCwi&Nm2E49Yu`m4BG$o9u!AVS2gtF?Ljh` z5AVq1qnRNiXHQhO8I~+#o|>{jzg=urrs+-{dGp(drp6^=PWTsXphpKPK*5&CEC^j(z@#Quhtf&x)OA-x5RWD+;%28|MvDIENM}g!>(%>mOdp!xp{TS_Ol;CI}f~ zoA7AbUXHWb;WR=!3hm`VhcXjdGt2JJdRuWyYL#)js+>h;aOrw`Vnb?) z{O{QCYyWvyvpBc?X^n({P@0TvFLZHsIx zk4;vg*>>8U)tymA7rR~v;Uw)Nx`wb1bZjN7e_%{5N>+x4m?i5CCHAx}ju#a6*Xh8q zG{HolOHxAuPd_)bV{xMFQE^+KJ37_UroD@;|0Cy8c4h{{e(_iKA3zPUoA}(mzEmd1 zhAqbpnF6clP(b?ymyh(YsXv99hd9YM&!O@}&MuTh!-P$eI+YVrmvl7Ih61pE6rI}BT(ba9TnMC9T z2W;`SP;2#QzUKczs71D~hG5xK%Wvdg!E69KPR>v)15^wZ{z^)EyPyL_E7JUe#Y>Kk zKscs(Cn3nYQqfpty#v&G%50Ut)!-=s@-~?VW`iAIIYkz-Bm@Hr;HVSrK$$n$7eUIl zv~Ejy$924Xrx;W_y?1r75I9d>#dJoV`H-a}|KP?W@>aknj&2|;H8-{%`#UWzb^cch zKnWe5pEeW~%^9AsevCZlIF^JQ28j!)@}VuaMr^;NRcvpTm%Ye8TVAd%rX!wcJ$69# zFwyp+S{#esR!mR`e2!k9jz%>;Q^JHeKiiFnIRi`d8g4qe8z@@hm~jS zxsW!@K2&OmSOfW2Q4U=01FZ^@qC|gXLQ*yOJ(UeZW@7tfZG3q#7{A7A7fsZ9Yac3a zNastP!ERRVE{_jxEmnzmnLx#_I#2)I8MnE*=!~bjg=mK~G~yO7dAj9h+V&*cA;zhG zGoBy9(0(^s$ZJpT?V`rQMsZB(!ui6rdj>!c>Y@p=eC}ec7~{1EFnZ`|-Y7+65mb}$ z*9CpYvl+#Ay+aya+*CHZ+-X^c9 zX0LZ`w#McicNY&9?Q_RG;}mP+a`6Jvp#!x#a~D_t5>F?sRr?Hh@$Z)a((1%hDeBA#v}e1GFF4+d~bHN&lKw^I~X6_9G}5gBtyS;0Km>SzE@OBki< zkuFfRC2A!yZQ)CiTYHD%=K8s_lWCd2k0(|v&hIT=o?MJ%gx-Ujl+ey}KwRN=L=o^| zO^xhQG{B0B)iZxwXRy?&rnWCMSLM?iEptpDXWO zPITqdR~lvX=oJyhYul8pN%NPn++w;d&q0cLBLt{k#y+|4{(%)%V#}bz?0UGC)&jW) zeU448TGuQsVLw`k?^?_?BWx^{*)j5e#bW1Xowae9mYJvR3P<@Db&L3WXHQrPQ<_mf z;dGBsPZn2Sja$p^2qM;@!_0e0dTMeFvokp5BUDs>Oru!#=fWd?Q;W2f2DqcbdC)59hjqQ_|9@zqG5@? zVO0rh9zx6wLE3`;pD{E@s4>wvWM{$2CGZ>GZ43DH|@eflqGTDM`O2A2516iyVkIZoTO&6+3K94CCXeaACdVp>`} zNsJE@ntxDum@<;DP&jaa$3MghBPMOU2*8>sIxar87*~EEB4zHqUT0xkIC@k;gyuZ% z;+5ptp#QH3c9`J8d_cjJtJFHAG$w!#Zq;_}D0(hG_hIbv_L{f_CcGhqFSor+L{xNq zDg`mYh*RE7*>U*EG^}J2`(T^GGyR~oDE%;N)qOZ;;i4?2vkPZ!*%9lwt&HH_&3f#Y z9St@tv!t&r1GeK}zt3`A$xGL0n_}!&`t;&`%2S@HhE zQ!^c3%GP-y1zh&@7Twv+Lb3%)yk6chbc8QxhwQDe83&YuqnOk}J(UGYNyM)1^)vP= zMGza%++MZ%WKR6+4F4cDI*@M=i~QyH@73BZ=3ge3wW?h?o-SszYgs-<#m@lx{26IM zu?e#Wo8TMFyc*Vv6kFjAE$0vhtA<$P;zg_P7hnBza*u^*0SUt=CRUe!Wqw5uZye7- zPLIWySJs~2qvGS6k>=;02I%jmd)aAmRqHnTXQ`gCJ0L&3xNp@gh1Fr&#rfi4yLo)N zD94y?`;rXCp=~&FcSl37)wm&F&tC&=gXNqxR= z5i{Ef!%`Iytzf(>ytX%;q;~xKAxa`wy}J%mMo;+z%NkJ5_;}=s$DUenk?q!s?@|(3 zLf0p^p@+SeN?cv_P}@6oik1dp5kYLL>jIkGlu#52J|dv##*nAuFtiIVbnh?|7)_q? zjDUoh%RCJqOGa834M33)Erfon=~kHps^*qk%4WNc0e`+-1e&epy85^74}CFKBcb7$ zA8IgqypEo1?eMFFbq8J(20yq%E~lLcBmO+6rv@b0{n5ts;G!&Z$GwL7R4SCih7=WC_Y3(%Z<>Q_icG0!ExIuq*i(1E4R2P}w%2rg6 zK9{juYqg#PR*D+sKh)`i#k(`0GFC3Gdv!ONaOkwH%?d}v9A^>*LID<+EQ!6oPMUsn zuJ{$%UyA<5R#E_3KLV6yO#I(H@xq!mMVKO*z6HTs6x}_L zfmA5mebJcjko!t?^Y`HNY76?mY>|cyt>~Ad{=2|dv%dCO-pJ+5y?X(NZY*VOO}88Z z0<~21w=IRecmQQ7pJiCgqal5K!7R4^)GN}1kztD~(1oy8Yhf`f;5JB8SY?S< zhUD1T7$YVxi2+Tiw&+&>Yj*qkLxk~9< zz?+pYq3<6qMFs4Pw#I`5Ai}Y>Y05mhCV>ck@{6U-C4iij9*0pQQ#>R23;K6FqB&=U zUlhlIbqSh-Srqh`M&}H4)GWCnibn>-TDj+mF1HG@p04J5<7u0XAya1dmJBt6)6$V(WwVx7v7?p7Xs9zS8X_(5#Uh^-&C`Mou8Z2NP-2cA(a;IbaeP#_mrk0$ImW@UV zdKk$?;jp(Avyx|6uKgq=QCFU7o%MGbKrh&rXt8|wpqA;@#WU<<&9kLooyq01{?nT0 zz!jx?4cWX4!dWc(thKy3_`=)b@S(*k0@=^%HgJXXMs3T?W8BmMVpf}(qv6{*p;CDQ z=Qhub05c`U8Qa*BoY<`z$E^(O^?wz!6lU^)pijqKz#n;eub6(4%(MXH-BR?kBQV#9 zov5%gqXNs^N*3AOh)|vTI0g7ai>{9YYh&c=3W|wkXmj*RykBK{w2IdSTfAN>4-OOR z9~K{%dM{-ntTIj*k0qj)D(_7x%h~*V#~BPYM)D~2B>v3C3X&RU#%{O=?&;S=cGK5+E4<&T)ILvP@*Xi^#WwXaZ zkbKFNJuIfOCM$B%u@ih`V9;(EGSo~L!HRJ@>B^i{qA5r7|Lqt^ZJT{FR)j0yGs2Wk zfJ7KmqUYyt3Gv+aMhJh+^s6bx)gtSog415M^A>OL%GmR3GWwX!b)L+>OY|t&fggmJ z-w*p6nu9m}=I4=ipIF=sG<>g?f^q;KF{`>%phz#GFz*s2PmdBn4Dy29NZm5b>V_uZ z%b}U3lGyLXJC29J1L049`_6MVE$qvhg6m&D{z^zX9dWpaBWh@he$g043)63B?pda0 zYj$>!cT+xNkb$LgG_ssKGNyg_b@}zKWd6kD&xT~zm;3!XYq(L=(ChlSClz+nSuad1 z+fdauE^Mh(F*^(Pj3QWl6s1X7M!17 za9aO@IXk^P;q*xP~a4WL&8Vkx(R@hOORke@!jePvFEQE@TU zLEH|X4qNQv>9BLv0CG_2@shtBW$oUc3y=s(4n8D!o(ys2_LJjs(r6@>Q}%S3TcUY% zE}7}W$CF07VA)D{P9J*OSrV=2k_l-uS#3*+4YXGtl-@GVUm+W-o*xXmW;oYvrWB%g1WSo80*Gx3*a=_sA8(xL;d+vylF(SxDT zG|l9s3Rkj_3i2z1w#Zdxu3!CED!KYG$p|RB3slnBHbne|)hxlIdAe){Mc^;c75oln z{0c77S~&8hHQtstgR!g%{-xW_k=_o_k8}#!&lJfY2Sp2eN}BicD5yW2mCcPYo6UlA zE@oFoDwGBG#Ya*=O7)G=nsFl`Q>en6@PV_PDEHDG8AL-3W(hHY%Es}PG2Ja!(a`~2OUqOpEJUG} z`#eW_=A7NRsuZ>J2Oe8JNL*{Xw=X}W<_F;!XgQ3P1cOFB_*&^}^Q%XYxr^K`^r<zc?|voO>3 zt>hhPjA#Odcs{)2hy8TG+ZPD3t z!(RKB{^fGxv3G-Xy73Md^!vF=U)FJs$t-XW7u%A|y+@@hDFK32#GI)}u+h$N9cVU^vowO- z-EL(MWHyFBSriS(w&xUaZ@+~kEhkIB*%tXTp#fc@vC4HK|Jv9~9wfMy7k>TVX8jj^ zlDnn+e{6k~B|EzPyy}s1R5nLP_@TN#TV=3B;oRxa2Sq^BCBx0XnBZL*WH>RtZ1h)4 zlfz@0HJfNoKr7Bzu*;t2^GdKrKo*yZAX21;H*eW6OSjtxiIxO0?PfH*0x-iQsKtx~sI``REOH9`13_3&^iSR5I~q9tl)k-Sv3HuT`#hQQ<5I|i;ab(k zhcE`%Jk_#oq=A)X@l3EoIUzFB@b(pMV%vg(a{IlS?g<#&pBMh#`=VVwve(^y54QRM zYyNb%KO?t0TL(1KXjbn&*|jRehWUBkzT7=>-p9B+YBG!?8jqDxdh%Sec>EqIoumMG zKQoF|IY~K}oE0hfZdAGuI?S%jow*70{*0WYpl{E8)fPEC&bpHu8t|HXIOtyEeom6$872f7rT3K?$WHty8;&HR3kTV?JkkOl;8XKwm=1 zE_p>aK}S3B{}f)8X~2aWe?>#V&P092W~p6y_Vr%slStugoOdR`9smVDMYvHu)GWjb z4(yyA7$0;E+Bet|Mu1um|D*`?D#9UujjY1NG=KGjm^!f09)*!CXpVkxXpeZ)JER*6 z$HMZ9ZNuyRm8}tiLx?lg|BuRbT&-eizNE;sEw+0D9}i5tiV0z}LMI2Xp3-t-x+VfV zu{OW_Vc<8}?U!YJYvMMo9qXF@ahHFvjwQA7k3_rL_Yt6vVU057jY`Y6daw!!)m&z7 zpX`?&2UIXK65!I3nr5L9u4x0~hVzzN_p>B}DOxSNV5@JMaCJJ~JTr*7pC+O(MjZpcKH&1T(TM&lnaNiFDOKiCu0&Ck(XrzT5>vN~eBO?* z@|&38wFJ=H#-2_RLx#&wdT^}LTOj^s#55BwajY70);%utt!ms83tRz6J3rYuxlL<1N34h?|peS)}Pvq63_fk zx_E9hTqcE6HX6PwU3>&g~P-RNtzX4Ye-pFOvOeYBom5;;NL0`WK zwnrE~SXWUNYJ=)V4Hzma73Sfig6ToeamKsWLCiFx%Pi-W^Ow?t z1x*;R6B~R4ll{?D*yJo%N$u|jZ-HjeL(C?kXPD7#5Zl5jgOaJ^@u#8YTL-;k4`543 zq#b15{YYRBMMcM-j7yEf``h;krrCn^J8J;wa~{_Wzj(1m_xJGIwOI4CsrqCt%w;~3 z0UH#})8iW6?+w|pzs8j*n^)Ds4GbTO@09xH#NS1o4_t7Y$5+-~{=TTtez9v58}dQ1 zVnBbK(!ZR)3;qh@K|olVYa&*>JpxH836yqN>2e}&2iM6;=#E@}!-p-TM_u>)ZC~Df zoIJZSfkAnAGx=}ijRB>osDqvdM#PKgyk#mYaO*NW!qKwfe2AHkaL`B!;HH?ye+xl5 zfa~)}q`^5b*llOA_Va3_o{sc%tYmQ&7Zl+4b8w%tLiyl{#Jc38oC@}e&P_XZ8~THW zU%IA0&mEIJ|5j1WH;k$7X~U#Q8L-qrB@sAo9W7v1pG>?aFbVsFtch5sDcbVa##~Z< z^7m=h=wgNWmk`^PWR_TVG9ra9v2eYGpXf^{)t_+yb55$a1c0TA3k>#6=Sp=ZDTCsP z{CFt*4leP(gS&j@t2~kyJLbnA3fsF|jcM^+J}o@zL}Prk}}D z>$IvvN)306C-tp19t-7DEt#Yx_O<}bryd1{Vr8s6vO`fu`}#nN+1ktoXWc*#Pk#l{ zG>vSWY~qoP5Ky41jH@e_?0bGR^82Z|5t+Ce8eFLZXmfV8jg;D7y)wz~Y#h_L3turi zWh^rD4IRy?dYc@$%ffX%^-{l(rN$SX44m$#wj+SvN3Q}^0E9*oz?SU`j4UmaVYoRC z(#)*_Hxd0e-KFD)gozVcOi~n`yQ*EkXSg=j6AKGe$sZ3$pi|YQo?#8RgO>@Mjm+ul zX9(B;s7r&z8Bep+E&j1F1KF}62Safdm&Gc~p!`mo1n0|qV`-*x>lb~~EBTpugkJLD z@G`CFevzPO9hu^<`tBz3FfmItN__*Wtz4c@^^ZiS(#S?aGh_<-BqCY5==sC%+r5CF zFC88KN5|jr(YOFrY?LBO61O7fJH7ToIQ?wGDj+}z`p@$@|kyuk& zB%GU>$Amu%Q39N|WQUr3fWHbAo7S+;LzE3yfyfA+GR{0@FFT!}J(s8nY}Yi)I~qT= zP1!I4FbFCJrbS^Yjt<__oQMg5`qf_;n8B+wf0+w^cC&Qni}tS+qZdm894lRVwY=dogr( zrqbUDX(@uddUBV2`SW3V_$s%XOLKDkp2Flpz9Z z=v7Nkbt%~~mDb%AR?g)y8pL^RW?@(wg=+$n80~_Vdo>G)k;p11%uH4gxO0Fsm34SF zb@p^<(4};7o-i+{tVEkh`Ow-%xENZzAXlp>Xl@83h!QnvxrZ2c{%D9e_qt54uT22BPz76CRP5ry3%;9xQHL6ZK<$d{ zAmiBKKInB2#U?|L)(>fzp!9AcP9sJZjl5$>iJu#Bg?7Q0@FhiybzV{65UJi@t0kRK zfa2h<1~xEsH!W!KrDFn7i<(MCAb(x^&X4REO?ZMC$Oe9zK8)~My$E&WDhyUEaMT24 zvN~cs5>cJtp1)`ula06FSXIrMlxF5$6vvTWGs)Qa>U@0Z>I)5tUf$RQ;ZHnNu<}>i z2pBXmblc$M`X-iPF@2FinuLwl{1O+>-02b>K+us18`Esd*->XjZ$xjhBhA0witw$R z3yrO)C;8m{AyZF;5GO~b`O(m{rGOn8+={7(9Pf;HQO@$#S)*%y8|KgjJdF+HGRvAB zBWq6n+ej4C`Nn=sI`DVae&`kxDgPDmsNqaeulsQPoRW{!S-*+NPTq&}yl-5VQ zt{o&kPBhwe^@sB3<&%uc#aiD`rxM9KL*kNk4M}X6aaHP@SX|x2E1_)23c|K^TH7FL zu%2?N_0O`H2>+EQ!{Y@)Oznh$O|JYZbq8xF?m}5Ji%!~IbGYlN`%V+Q*tNk`%*s)9 zam{&&Ld+@iBwxI}QTbLzlitYe!clQmLtt*lhV&O(@rHJmd!!vD|H7Q=^#jB)N zFi#`>F++nxBDQ@$^(rw(zszppV#Lvi{pVxqd*g|*IWEF!qUQi#a_=X+M|*O6ET*u^ z5q*E{FYiMOFqKk@kMwKEX;IZFeT39pz33*K5lVIP$3jGo3HuT6AXhBBfT9|TrIZ&10#otkPFFpM4d%|PL{7Sq zc_y|a*V4wo07OY*FzuyJR$o~xq8GzMB{j{~=8^^b9>INv+U2)lPm_X?ibepk+!mU< zHnF(jPQMPkb#Q(|G+_sU+_|e0;^yS43c&Y-_HQfbykUJ{ zARBi|4dnI9EOIV$e~aclmFVfX=Emm9{*Vj683Mt0ssj1JxwAAx;>>$}_b^{4ScXdR z(lnp>;JEZNi^>XHg{n;43bN(2>2p~UC7C7UAC}7a`(1nWfCchqR=F$b@Ui**+Dr4K zzd53=uSk}#3G52xa9^6(BndH711FHeorhr?so^NEl3inj0CYd%wd>SinfV=qeOF+S zm4e?80T)r9F6SQRTadn6`NxIDruPPCM+f%{NkAN5#~&Z-Ck7CI54O8e(n;$%Be!pg z2@|~e9RedD_SL%0n#znAX=Hs+UbNqTg(ihq*YX%B2P!N0Ex!Tq@$4(MdhE45dZRaZ zTt;9dnx@_1-@! z^fAmo$CN{*xBtQ(l|7HiNT-ZyvI@a9sH4)?>J84kYtaA5cv|8N6tuEJqz(F7gDIyb zE`z%lDb1%C3Hg0F#F1MiXbEp+OzNGw)}E{P*^|FHzW)>R&_(4?wy&I}rjqah`PfY% z@KG{c1mh@l_==nDdH1m3d|+1=r@9#yf67ydP!W=SzoC?wA1~#I9n~DqIr1gV%2#kD z;$*zJ^wN6OEak_(K_^?d^tzULC2p>AahYK$BVw5ISJ(rMk>=LMcdL`wJvrC`#;p{L ztRjQ7N7{fWa2UiTBe@-pciN z)uX+EJcCZ!0M7dwhRVft)$4Y9K^O=_=%6*Owd-9OT@pA>5QQ8hTVpz{C|qZmQh|GY zu*Fo0B817b<8$~I%q6C;8YiU&8|M2K=3sc3^jN%g;|eI)YQoPrZk>q3&EjUU7Fl%G z^-K+4uA~o)f2zombuFbLT<(=XWq&ip$GKK=?DanM|Eiv5ap$bWq+-N>yLh)7wlF1z zfmSkF0&7(>!Y(7k55De(wVN#Xnx%%$u;Rx^-QsaUq6f!b%`<4$%?DjER7uxPE8J1n#wWSx7p3M6k4|}ev%&uLGzK^WF3BcK=B%wEKZAUfXT%dp*_umV{ z?+Ocp=B#9k+BDZk4z5#;j_Auaa9q@tNJZYAM-Df z-`{L@{g@#wh|3tEv&eRj<&fQerW`=?CeFSuglxs0I1od6sOTZR)Q;6muQ}=Y&5coc zq4Ww1L*^KhRr_k-ChH?6?#FDFXSYo$a}?|m^q?0mPE1HrY9gdRo8Yj4KDL86c)+(8 zgI=$!=k`o2sZW)68|@c5mXfjmiQMSKS$Fn;yN9Y*Wqqn)a9J6U-@9|(hD+`OmQIuy z3d5kOf7$+h2&5tQl%m7^8`ibcx;qbn@r<43uf0FyWxPCJVJRD?-XcOMVM4}ALET-y z`LCwvXAED*mwjm+7y+qt;i>IuX$A?JRxU>7xjwgz(LJl;wU_wl&YL?bIecS!9q=$Z zA$=xiQg9>k>89V7$5+^HZvp7%9*{#WFgB=fHi`Bpr`Q1PMg!=T_3u}nFw*U18}f}g zo`m19vT<%xvUEhY1`x%Gx`zs8r~5;S1@D*1vd}|zuEgIJ`0DVJLkpI`;E+LK8k95N zLMlGmf?+l0-99gnMX^rJz_vH-oXw)W3 zO32&?`@yPCafq&;s3aa*wx2d-%JQZR#dUr7t0n8&lP#gFv&jPM3O<(u>tB9-0lEKe zfFU$A=iY`O0CYqq`xOu*A(6O&F1K0cZ3pTii$MkuLs4^swj^oDQtD9+OwIeK#U^La zntO}9n`%V$8Jiy@?|ubSUzvDDlO@JhOOw-*o~2pK!;*i(RPL9q+}+MN|4B@Gk-o;b z(Q-_c3)^{Z`ra7duiM1N62D69WygO$8u#|=#)(|~&YbGPX31~*7>%Hl0dbx8%K)_~ z_W-QUGGjwFki3lqBHEMxxr-0$YTYvX)4-zR-aEk3yOCGi8Xg%VxzZW`y6LTK+Q~{1 zq!v#0iSAEWPo4Z^%fd}@SOM9?xppZ3<6YN@+ey^STvCdS3LpVLO?4Y`%yKSfmO0#a z#*|Uv&1fDIzP^`!nR@{(g_zsP@aJ*C8P}Z>kNZQ_I|!5YRMvHaG)KpeN2>rAxsYQi z%P+%!*Azb>kg12557nPdr!nhLB^Kkz>N=>^HFNfaK7p;ip>Nj+=TdF7V>Pc0cI|se z*$4h=tL2Hu?KthtW`$=Iar!f@(s^kK8`JD9#*<+m8oIW4excvY})f3H9wi& zPbRT*_(&obK@2hxnxDK*Ivy-tA^==t5x0~@RnDv<8Vfimr*yDg2KkY*`lll2;yjMB zKt{h=m&3Sz^P!lep7Ga!&rjPf|M%`5+nN?1OahwLqA=z#@SsthJec zXJJ#qv{Pm8y59ZY3*s>q+re%|_`yPw!IiU3W{oG{Ns}3H{7&)ZgageO@A$o1#zciF z6UYM=Xs@tpxH8f`#4e!#LLY?CxftI{@J~YUb=Y2qF97gS(L2uP6s;b9rOuK)qc!r7 zMTSz#Z-r+tY%KEm0vXg;H+^jkVCSZE6aBI`Fk`3A)RWXhU_`1xKKta0i;dD6XMRM1 zXku6oC-IY`u0y`xaXz*}|MHUS9S|y}7g13U#m~+!&ea0tjd%5DQU5|iY+c+BY9CIG zcj1F_7e8JpON7Pd#{_1^`8DO*vN>(-;Krc}fF^sER42(DNFKd9p0lm*lwj{7+u&`h zPl9>ZUq|x#reJ4nV*PhRApp#BrTR#ZzM#azRGph}F;%g3*m z$ed(@tj-E92Og@oxgic81CCrz-%lp)v4$GnoUyGx!G3qvqp&lipEoSW z@?K44#FK0<$>^ZOJPY5h7tL5nMZ=_42(Fc^K6eQb)hrq}@H~qQ4?$>3_U;Z8_la(` z^^P-KFV3>|7mXKmng$j`NFkahdkkStr3JHztP!0pgk`mHzA2 zfDSnAs}JzS8h<0gXUI`Iuru0~@)uYZ7b7sTk<)K+L4%tWJ1m(`>XFiv<6O$Yb!K9 zc64s4bYe@gNp#0oyWu?NT-p8hBtE0t-X#s`3q(=8@-dOVVU^{S#^QB-!XoK+*LL#f zrhp!qAB;2^J|&@l`tvc$(FMu4@?-tE^|A`a=o^h}fb>bPXe0 z6JzATcn=FERMqk+7O)vOOo4)%ddd|e4y(TT7Af^`wKe~^D^q#FIcys@f$fb&#YNm! zgIlZ>>&+wuKxqiB*md1;JJnl>E-Jo4|L~?J9@7A_B}LWjM(UL)>(o9n&ZDl-%b1T{ z%4+es5hyBP`9(plC?cM)t7w5api$5i+1+rYE>7ti2q~g8JawqNcZ-7*#?S(@I=<%ZL z#%8o_zBSfGuQG2F;>m$oZyO18iif6H1_-S}E8CCm^omBM#|J)R#gOH3VK2sERNYhH zWCvD(kcWt3&ESN(sUei#Y)-L3jzy&tyBe(5IxPCXqTx@tii)yjG}<}^qiVUW1^*JA zpjT8$ONO5Mq&tnduBjWkT>f&kVd9~m%iQU|LaCY%c)}Vvy zHQtp~9QNW7^nt3XbtDu1il3Rz5-{Yb=#zoFe%dhxJ@bUET00nmYmN22K^70cwe>L= z81D3-X?Oci6@{;&uP$6v-ZO256nmzN6EiG-_J7sBD>fQ_>a5Yu*sy6Q8n+lC2^+En zEQ8xMEAwIe%176MG+lQjeqEzNP*GMXl(oRfIPsC1ueW)w-$ z&qhvu9KM~$xefUn#iAH=<1#eavRnt<)ERu?bJ|0V7U?YWv3JdtYrLBh0Z5+itWhN! zCf%3@lr}l6rJ~T=7Yp~xGY&(~Qx1bYgF~fNRiiQcqhdle`6r|!Jpf+*)#PUH1IiL> zcCOL7sw4kc5Sgv?Ju=)SYk0kpvCB2{-~F`au|Ls?bcFmp#-n}#`*ye}BxIpT`p1wk z!u%r68qT7|@h0Z(axqC9jZ~ABBYQDeVp&dEh+14~Bq%oj-x( z!H-@X+^Y!+0}f8`ebEq0r>O}@4&sz<6g()ZbkPSEF5)GeYBK3p<^|Oehy)V&O(WIX zgQ>6d+F}9;Z^X~8zz;Yg84)C~B~vy%Do=e>k{9qRtx{}IYDHN4aEmYG-j=l%DOOq+ z6bk)5pA&+pW(GN${!!9~GtfznU2w&wG_pzRFI|GCR`1yp!NBmjKBk7J%d$d+`ye4x z>*QZK=N9!PvCl&C{5Hy)9|wyA`W^(g>SxGEO1wx2ZrG!9P-TFAP4p6NzBD zV@l19GvRoD*&ZTuKdBJMRw$X8ya#`d2j2Vr9Q{H*=SHu;gJm-m%mKDSY6o!o_++;6 zeQykD_I8^lU6)ICh_UqF^k7p<>49$$EA3*pn z=G|ivbo7_DO}LeVj?vX($9M7jR@#`n>wu}sGbyz#8j2M`7`xqgsw<+!6irDWHvGL> z3!Lil8{+Yt{S?pAX5q&On-1!oCvoI?{(vZcROKV1Pce6Cn&AFZjC;2=fY}OEc+Fpx z1CzB`F`m=)-i38#(^#<-iC{IL0xLGgjX1v?F9Y1?-a4c+3hnqXn1%x45_pixt)w!chS+?ORBInQWqL z5=QAQ1E$0bhI$0qX{{dGsfp2SsU~rm0Sm)uIB2U6Jm_HuP!^5%1&cR%r#(hp(m%oq z^lUHq;N`2!yohM9MW%7>6;BU4uE}e-b)$*K^alXECAxqpsK%W(SvJ^YHA%p@KLD{V zvT$?RfPe*Kk^gR@JOrW;DDSJ{VFQj`5%zkIB$w3NEF|@h*b(c4!1!46e9|>tOV{gL z)D*V0Eh>IbYeNg{3`Tam(`wv%T#`gt5Dk9Zb+WcLJ4_Mb`19=i$`Ui)CFDxgyXSXQ z|71#Qv2Jh@Q2Yjq{XK!%kCfE>U55nf3@AqyajI0%NWhA}MCw{6AZM)80eMDI>CiOR zwYS)XE{;~iUg`Jh#%~vm0C3{E*cdJr>wn5zl`pXUpCrZ29qI#i1Q!*P?2}6&QQb}S zHWnVf;aZmtI0?ORs&=vm6R;OVx-V9KqdKM%G+t)+Xn`Ug3HM#5uC2Tj5p1y-%XQ6a zn)6?(@2lSr)gN%(9lEYfF0NwZE=2ZPvi1b$ekBAF-u}*l#Z9ERYEo&3tS$l7@8;MI z4uRCyaqDUkmZ(&Bn_&gPUsKRZgCyWAS!{aPXXfCas=toOK-zL#a6r3mDL_a^)?}M# z$IJj^ecHtXXb|y5Lp!N*gGZfk-c-;}hFu2oobKO-(X2$%0`pAe*r&NMkdjwK@+lL9 zZdLUgye(GsXbjA?+m=h9&?*;*4M3ozwl*@RQ)BSC7w&z%Aw6xK>N&o1 zZljdn=wBc#iNe=PPT9~tqZ=27W5G1`^cvSW!Fr&TrPxUwAf?^dTsggn=HLpF>F|g_ zI;!%oBN#tF&4?fh^u43+c){jhv+H^ZbC)}grOT>;_;~c}O4EYC?e0uC1hqb+?(K2| zbWG0Zm)ZiSNwOtC8utbrJT22bve;Sh$K$xG|nrMgA2Oo0K5jX`7Xd*#Uj z(S#(i)J@#j=tNK81zb`#4D&24nN-uH@#y4FT0=EdwX98X(hV9U=E-&|d=}*&=DuXW zgK~poXT;9*Re4cyxpHl;R5quDn}qQTf3?lHV;nZ%z$I;rEio6o?9fTL?9WOJdJUOw z$>vnxjp{Cnx*1u|Z$(S)(yBGd*9qmeXmZ(rwCkK^;`|KrOX;A_`V}%=)3?HGDUmv_ zl`VTZZ&&mnf|tweivuBjD!})6le#jW2zG6FXF8cdiP!ol%jNy6G>0)$pVcv;tbEY) z-dy>i|IyenKNgrZ137tyuNa>>r<-WG#?het>}Gv!_a~d!k3)$cP_}$A-kdVi5g(Z* zv$4_RiJ6jTUy10`zwc^rn?HqB03M=-yz@Nra5c1zAfBu%FWZCgm#6W>!(N){hRvjc!)%HNNZVvHXIw_=CQc0 zm0a5s(XTU#HB-!Si^Vg0X>5jZBM+qUQtn0XhnbIZHcS68n0HRN{Z0FkJr z3jLPLI3VA(>|lgWCRE0f%TQ$Ih{3965CIq{_=1%M;{dzg^t^4Ky-y4)QDmV=K@txVX1+Rk5nL|21!--QNq^<@y$eBpI+0SX~|JnOJDn$XoE&f zX^lsuBn}S4#*sq2#Z&L1>Tc!a5!k>^(CM^20uCb1psEO|=orYRJOd~;#39>zh~l$o z4>LqRFxlEPtm)a;T(>&XB;oF&7`z(kBu>u9De%s!znpD&gXDC|fA{;k4Bu9r9k2_V0m~9At^CE|0z4O?5R-$2&y8_}XGrLvz2*ui zEhZy4Xaf>Rc&ZeNId1McAAb>K7F~0Cg*#)X`i4JP@sM14PV|Q>y*{Z?Q?zkX5zn(^ z*$;1=$8a}VfWBikyZGzOMiQnwFO`&kZ!R@_s52Y)B_Pa;&xTbs{}F>;9g=z)S=SaF zqE1o>j@GiRQq?SGYnbxNch-l1D;guqGSRySADAi1@lI9Z!Y(h+DomY)WC9ErlW-c(0=iADU&FxvujQ(*fSA7~u*v8~NxjS#Y@ zmvoqosbr9)U(_MQ$T)V7hRPB;=~<(+F!)Ddd(wrCz;ph>Ho19tZO^`9n)Fr902u*F?3TYtZB;I)M$&(wy?i@vW--S|4-aMa;G^fBg3$=MsKw z8DqPOVsZ|Q#&#GyJR{cnMQ?R@`H%s?Csk!hQL*{@7|PKfz;)YQKg)5;w}kxleJ^1S z(V;D=?8|G1Ab~+$D`CUh-1B3@AuSO1IUsARMO@VI9t#;X z*R=!u&Vn;f%I+KR6HIxH{)+Vr)7p9n2f8*fIlyk(4rlGZ`hF_VdZvbW`K0C=-~brp z#Ofhk071e>cA1#BL;Pr=WQB0{bT0L+R>sATUPXNLn~$$P{9xZ_e>+?7_iN+>BhTb< z5s3D@=8ZA=#$UF#Bpyul{p0h49lg_&lM#Qs^t<6>ZxO;zZ$=MqwFxk2>l)aC2~MXl zaej_Tz-sE5R>ylabJ7dUQNqqqx3E)BrN%mQi`sTk%8jCx1qEj0SaKrc-1{Oef2EVm z@RGQFnyn*>3M;*$oCNfB5DWNE?*ENuVj~8_uYoFx96-~uRdlJx01SQGNL!K$cDk&y zIo{aADgGR{`QnVM2X-U|f2BTYxjZT(h7#kG&l@nBaCTw%gcxWRO2bEGu$uP}w6?#6 z7^ZNy$rUTlADPq&uX#p^4NJ#RhtxBP4JyyZ{l;za)|2GN{LCE<1jQP#2t+5?{_Z0& z(E|ovQrLRCW2kE7^{6ZaAaj9F8JEOcnixA2#(|F_hjE=6Scy@?3yLzVWTPQ(O-sL( zxA|JD$zWi{x^vj((Qw-L1oQrN0&!p> z*NsYzU-@58SF=agM{wQQ5rczv624IN$By$U(&V2ia+Fgp13}TPm0$13ndSbk(Z z4?()K`zA9=ielh|3`?_&9zJLZxP5yO;zA+SpZ#jUII>?e;QNzlj^_vk_uD1K%)HdI z*3UUN^n0Ykdz1OxgM+>+RF(796j|y7* zX<$AjMp2f?0F<%f>a;XFIrA_t_xYwE3t zec~{F@*7l$6b99AEO`V&9XX8slw(EYKjAza9B7t$kmL%54}E`i>4A$n@}-nDGd*0< zyA}RzU84dx-mA;I6SjMq9mQ}aQ>@-9iOPmDz|V%qOfbD%YnWejkOEbGzUtD4q&oqe zg~jWzEJ7d`w{OaY$RC7_iCFQwP=Ks+`8ri;$K}rNQC4G`S zU^#bL3CT<*smIt>(j(?od>Cq=gEfHeQ(>{6nF-BZaj#OKB~z|PMOn^=E_zw-jg06A zz-xX#HO&MU|ANu~skW)Uk08iAWz`o9b_l!Vj2SNoiMU{#^d2vERlhtQw)WEXpE#Fz zD{{xmwN6s+FXM)}HZxsr0Wu#DV5o+D5dAow3$##Jd&rtA4E`UQ-aM-5J8%2H=jp<< zDw!Fl;=)qL8Rn*HxM^7!r6#bG?iD{95{! z@9%lPDQ&6Asf*=c`egWI0+6B2Nbhz(KfLEE!Q{xdf71VyoXYT|^o%HN)uc-9TmZ$lRWD66nAnla;-IX}H-AEo=?hc<~{x$?`3~ z%!;YhS=TmX=g2{^3r&PMOMZ%|xM3;$)gc+?5H8gr9d}{TtlkIU<-+JPk5-(w!QNcp z3~T9=_0<*$=_?>{Oj`fiizWsgPNf%53RWIUXLjQ<$G6|PsPU}fPjK%kqdDC^(b~T? z7v_Nqqa3I`rf`rqa>iU$lorRcP07kam0p!)Z^G}9D^2`nr%;sV;s3)!KL9!~`cDL< zAEr{NTk>$B)C32YS&h`T9k=3o+gn>yXUAM8xHM$^($2q0+@~ zy8&gla@Lsw4oZwG>{Bo$+v-?O3%?A{;NYSOFZq=<5M&fDkC@F$Ww26H8g{8Z^miQI zx6i}ZuAQ|kXHokcRlKEmglWaw4=<9sb5jYsq0|}<#+T4INnP-A0ntt`Sc?@9_*G22 zL~3XfgWR_6iE|FyA%Nu|)5$rf!F>r;zP|0I+vq!Y^OTA!JCE|P1 z#eeRby4YqKO0B2QPJHkGpR{X@Igtre#~ca-Pj|oOHScXQ$I1%V{*4igW%$P5Fp}lT z7ebZXPDdd_fZfH0_|R)DQ0;m=)~yGzw8|Rbzp*>q^zo}uLCvLIG#)bFPDdMn3?&`X zU;Tt8EdB-}?oGz*6c@Vm-s|;sJe-9BfDYbP?>M56_D$JGjm5-9n#fiOk62*kPEp%g_$n z)%OS7^Co;+yvD|6CkhRwFIjTj7XVP?k+VeF1rR>o9BCKIwDz(0!qQdg5BY0{qdYId zbl3Kt`hM{JbZk_LuCjD}ESwMg4QAoK`oNH$3GrEA?o*~R*ZiJjlA0^k})(;N^to-JtW`P45t^@7;tva8^|)*bmP>=3sS2 zQ1pFVH5AN6=99ccOA0Ar}xVx~lOoP)g{>KKn zTpS4!M@xnP5VFC5Y-9}rUu08zVNNeRTJ!lH2?srF4m>Q$= zboYmf*Rx^XS^HLt3c&7E@KG+u$iC4Yn1^lzCUp)n9Gg(c7EElcZQN_>yxCEw;X2?{ zV-96;+5n(0cz*L@amb<-HO`bkJ+Zv9Pj&_gBm|YW?DoZqFikR6De8 zR^G}p);Mc+rzczfT&S)?FLlb)($zq|^gSl!FOAMOg_BPV_>(oecbdb=mg~)1i}1*+ z3HMPQtrn3e2bz9M99`^8xB+P4SO2c;0qtnCKmKfQIiF z4kG@?kS`zVOSswrH?`#H*9;Mxr;u5w-V5h#V-pkGEU%_I_g>Ar*8KLser$t({YUb8 zr-=FLB$5}yE7*~NzyVp>MC() z%Qk^;_A|Nf%lcZ=gSL-TA?wM5PZk~wH|34$F6_>Q5X=>fae27(2P19dSJCuG1^NV5 zUKDPuAH-colxVNNbN*1?M~i8s7X|9#x51bdWFy-&@@nZe@g>DwJ-GRsC zzsrC5kA3U^S@(35tCmlk<~NVb?-d>!C^Bb$b?MkH!}$bcuLh6))mxuxc|kmfr=pQZ+QJ4$Dxn2KEf-mHCwB6R$rV;+5SF|4XWq#}&7MBPh z-Gf7?;lu`n>dTR(mFR(LNyUN%+D3~Y05Wxh`wTKv%A*=nQJW5Xz&LH5VXi%6JEt`H zf_q*jRK3(@h)7Ew)a&1=Fbud1S7RaGeTu?YDKrMF1@{+aj&6rE?8<(YBnQqXud2pE z76en;_oLR%wHz7NeS_Y}=FY(W9$ajz>(rwgB$yRViiN6sVg#cc2}k6_nzj-u!TDW6 zb{HRSbk?LMTlzPTfUzb7UL@A}#nDkvvr2vV<(=CnIqS%)J7lOG4R3>pM*#ZaTW4EC z7S!{t)0^{>rqV}byuQ@*yI6XxnGx(|5HE0Tm+Lz3?xu6vi+i0~-c}Cuc zn(dSKN(&;Y00>-<;iUBS_tpn(7{U(9QO^9gwF)IVm$drxK52wdTMQiGdP^b1fg9xl z$b5&sl}5S-9Kp|IWqRnv`n>bm7=^!7|HGu}J!&i$YgzC>a5wrB5hbk)o`9_Dqj1EF zchpfWq6ImD@+@jwY$A0vJNg9MSh;yt$n3ua+u6+op6ZllkwYO_5v}`a9CK4TCTCCn z=|~Cr_Gq^E?AqFX4An8NKT zyXW8AL@NcrYbAJEF8H10;14sztr?sabToqc7a)85C5wiySxm`)J@Na^!y~Wlv5V7p z^v%i>9u>MlE`OV->w%6BX&)DRMWYy$%pOa86rByb9+wO{K(SQFK7|YGjm#a{%a*pXP55MTTMG=@ zori&f_1mWIl`Ou~64IJ)&!?}6ytasq{>2RcQIC$A#Ke+D}Vm;W$A}>x(>{S0m|b#DqJA_Y2jn{OatvUx_q$v zu-aPb>vT_7vpipWInwh8=Xgk~_S^0)iW8hl+aVTB35CY`Mc37z3#HNQeojSs>8@_! zZ!S9`1y|qWFO+|8y`0YG=#Qyew6MLRli2xR-^}%h0H~6URtFbHD+;218mlckkm;3l zD^DR`nIK7)MCDA>uMh^z@rk- zv5CRgVgmCDsq*Tk5X)f3(tH5XWuzKJ^Ef6G%gP7o6PdxgtDEFw-E;3QcOmtajg#oO zDuu06&k6^AUXX{GYvQAHt}`75WiN^$^OivNg=b1<*p-vUP@N&$S<$k#vWppwp$`)w z@fbp6#f_Qp%w!c)d!jj2AM5C4Qe(V>w&gesm-x0)V0>vCqtpH<`4F2$NCn>-^2*=F z)WnW-A|T)fiaH}UFY^x83;wk)FIDP@EZ($EfN;Fsgt zQGY20XI)EwS#KYWXYVIrN;5xCvZxF z*-3sfQt#K@cV=eA9gc#R(iji!xy4Hiz%tvx@=vcqk|H54I}pVeq&Bh0`ZKe<-BGp) zQQFP`9gK}%5_b`?){c=zG#W;(O5uP|@YQUTXJKTxIW3h!*0D>6MhX0rgEN|sjg-m8 zMz$Sb_zm#g)1Ac33MtH*Ln8);I{m;XUsFQ7c>u?{0%T`%CR&T0s_(tu7MFx;5zgdgd%HCZA`cX{5YVhSghXW(d8cE%AR zG1x)qZ-X+7v1GLh*57%~v-VYpY3sdcG_`FjlP^pOthAM}x`0&?IFM;4Q@lxC8J=(f zswHKEkhS|ijUik;yA9h5q;3G(*FtTr45vJSyY@vLYu=VUS&RP7k@od(Q~u;zGyr=G z%@(d?{YKfe-~DFGuZIC8g)9;}#}0dDt0D^1f#DoBsk^E5bjGUc><*wwFSyO-{IJ53 za*N{$c$wjE;e1b2_+I{Gikg~YZYdI5R*}_9(nFCn#tiP}dko^MoR>p7QniMNS`B}8 z7;-(d7o?L@`u*Ju`e^nCwOLUjVCZBj1vSLp+FtbZ9w-B1w6U{@Z8IU(2P4MW4yQRP zv!QGDQnM10t^agV0WD9aV6`^XEWVpz2e#cs>@p}xT4>@rQ>7O2W3Yd)SmfzhW$NdC zdD6W)TNAcfTRMCM;?6pS`V8VXcza}z<#Zq^4spCWL+jW}KCNGW<$6Co75gIXR%4!( z$0<8eKs=*4vCyCL6ZYMv>p+Tq?7cepe$(FtsM41>^NCOf*U5tnS&*tmo+gw%CX=M4NL+dtCqLG)M?ckP?=_K+(Fld(Y5s;u$Y z%U$v34;6V@PoiG_j;P3dD42$-5UN_-yydS2={jVxU-;>WuJ8iRS=UKiN8L<4QJ|o) zEgz<;PO>^o>BFwG{~+ntW=IbozM;UzPmKua8*8T0xCr&4dZajdycMsoZ8xF$tP7R#xr`FR1jyy_d`C zfN};Pz8omjVZo;w4+c~>=;4d2Zqqho7SS+jI~Z0-bX8XOA-0vsaST4L+fym$Jh><} zh#ZpC;n^&-r^YpHG7y$Q`e$FLu^!3SAw+!_I}W1~=?sj*jPbC4)^UO9_;1F39%{Ba zF%Go9R}V(03D}Q{DE;`+!pYLt+s1oC=dY)tbib{l@iFs`42a)0MoaX|)`gBtsxccN zR8<0XX0sO+!r2P<#+c*DLS1ZlxvgfA$?lh#Oq28?D6Y#V{tO2GF%bk>OM!_##P+#g zosWr=PIoypXC|9vpO@*^HpUTuFdtw&EmDR3Vdpsz*)`jFs;cj!NH$h8rTuPWapwBp zxXzc~$_1Ca=#%{3IX%P67yh*x3ia->z}>I>6aMFck2)CLe)hwq#&*u5iOFM6r!yNr z44AX^kN53aB1*p|ZM?-s_fb_byXR;V#fiIv?^OS7elioBUtJ)Fk({7GPO9go^#)de2KkcHvR`y?15i;dIwzDNmvbM1u(h2RbA4=`dtHDYu z9iK(NF_i(fbDFoWF$%43l7E3mM3mompSEr`GZWSQ^vXiCiQTkK8G}(xrzp9x9Mv3F z>blL4i4MO3hIfOW0)~DAq*-xpd19-y6CIrX?6eAN6fGYdV3!4I+?td?@$$x4h6WdW zRoV*~+(#}0PnI-wjhwX(@$&_??Ds^kjSUph}y_?b@|R+?y( zCp27zf_L~phD9Q;tAnaLmyevzRV2T3mvUT>?ph8j7VlNC2uq^pX;Gv8m{^Emc%W@) zS^;Iqu@{hW5K_9?L}}6{A>6`=JU6&CP*a>6&`pn}E9_E8fkHi!4eYnC7ReFRJ~@!` z_)7P#aQWHg@wt0q^bd1?gY+x!TQcj*M3hL}L>epng#G%cF3fwHvnA~D<$ggT&CVFs zFMN_hTe!Go$ptsDYfZT$WeJ)bT0+_%;sn=^wEJd7g~|CQtV?Dk6T~Rc-;gp=y>bd(R!d^c~F(EsGrm<9S^j!iS5T?@EW@ zi#W_=Stz%vcjLE5L||)LfG4B>jxnOnY})b-y2_(rMb%~5b)Y5t!*$PlSy$71i%{b5 z@q-nF#q9%(7bk&ijf1Svh}U;c(K>je()#P^XBA+BA40 zRZ&3X`B6tJRQ+16MhenyK{p>I!Yuy?qug!v+=%5%zTizwv=pPi62nsFz_pQgYB_EnAH6%+U#n#}UE)dgwh4jD9@IDcsds!FVW2E)35 zi~b2qx@JEd1ZIO2#1ou*3ku>d4u zvg4fjK^q4Ng>JUwE~sB}dWh23-Xi3njd|d|H#0)qU`r}Ab2YnemI7p??m92Gqd06# zudQMJiX!cE`C{g(>SK&}?aSWwLg3=gu@vma%XHiMD4KC^Au?sOW_Z$|wd&JT%-coh z;TSp}OWR-rk`p2;4j5SBb@_BZF?Bc1Q!{PlBV8*l@uU8zsMkkLeNo&;9Sie+L`$bN zrsf;FVy*O9M7*&#TfrkBjZQ}|R~b}IUV6B+Mc<_A>R%OcL$7+L43Mx4&PCJZr_hDQ zrm-fz(MBG(rT8%If`1lL7~#G2HB8bxzOEa9j{6?YmqnXTd5CrM6kIJc7kCy*Qfv7r zQ+auU?T~$X@;RMtoSpm`+)8#y?lz6I^tbyvv{o8NasfpAj%=$!a>hegf4Wp~xg>s4 zhAVe}UAb|$Uz0P`7jrHsx*1m0d;@ud7_0DVX-<|EgF(m{ls8o6Ke_oT2XSx8y0R*w@Pu zf5*T)huJyQyIyah4b$lj47#t>B zYPy2w>VBl9T#ZoTl(^Lv^uH`$W@TXvLT4!5$JG3if-QE}HsldVgSrvjmP^gF^!}oo zs)8L`sjDxlcQn%YiRNp0RY(9~YZ4W@TkGLw+jCY;4>iW}wcD$zzLY(JfWaEL{Z-Ix z)IlDtpS42`NN;ICN1j*tH4JOeGin}5eX~;&D{C`+$v20K+&&_t#_W%2JNEWf-hZ(> z{=qbBu^)e~csZnz!+Y!rWNr`J(N2z6?g50;$Gl5P0BVfqDB=zpE7r-i8_^3)nu3%@ zx*9)UrYcxn@01h2;l`4sz#4sG0UNK}vMC!=!M86iX~D)8ZVKciFw|_mF)ZByv$87^ zEsR2x+UAVN!Fsu?XLP3{`$D4ku+lj<2B3xGa^jSF#z4d(E$Vw(6Y;ZXbnk%vl-ESV zjltIZ{@ArOT#8fvX1>DLWxC~CV^`cz7bqr5sYv&$>5n|qk{wah^zQOTPr(|!M~hQT z)0V+bfW(T-(Xl1AXFtS&M;^(Sifcqt)|5}{A@;ab~>71oxtDBc4wo)_!!6* zbvPVT#n=<7o_EOM3{)r_3?4>PZAlvom~5(Azv!=a_<5|#e5n+rx3QKMEIp3RtkKIE z?fOcoO`snBKT=@G(#P{tiwm=+g0BA%>*emR8Nt$OTP7eRZ;d0Dr@@{L=XP- zYB*rP+APC7jHlt=RahOF<FY=p&-EQsfRb_9nDuK2WZBHM445pu#aQdMWnue}rIZi!)1%nz z1;|71*Jf#jS1Aj;N~w!G5m|uJrgDaQN-7UwksZqIppE#P&mo+pWl#mfz-|q!yk1|y zuNxLvf&G;9ViwdR!mM`?sjIDPXaveKV@`1DvpV<1yu|i7xQ%Z&Q&!vepD6h1K$|{U zdT0+l{J@=W%}+8EK;u+&NETymcGSH#>I8@g#g=ct|L(;%|DJIF=K|(ss3mPzb}uZE zJ@U@;OXS|y8uX9;7X9j;XCpSssXuFYdY!nZy$iBSF33@%Oczb#pYfK6u^R_di8({G zL^V+q&mtj#7}~eZKgbafs&1l`zn$xvy`z$sn;PRd%sQid$oy`8vnRX=fOJNN5J3!b zg4kkc(|#*UiW|XHWIH2MnihtGqgx_ikKsI3_~xNB@{7w)fsdrLK|s|rjgQg7C{JYS zKSQ#W&Uc$tTagGOA@~}_+B&phfVAm-nhHYAsSB^$E7HlV*^6ujiHH=x0akCqDUSI4 zFkH3t4ShfdJ{kvP`}Nj04d<2GRDg7uS{?Em|98#(cjsaXV@-16yse6(Picnba&LIQ z+!$dw|5R|^%J+0U!G&ht*H^c&EqE&>HUShV1ZYI_BC z?HVY5A%z##SO$#T@vEx4muODql>S-yR!eL`^02M5$vu5X`fUfW?TR~2C1eeT;}bsO zOcRHTFD$-y5clU%-;;1hmr=yt?@uEG*wwz|olq)9gHjhrBkN$e^J@y>;>RozgP&9Z z$W150Un`Q3H~qlM%bEa;BhiYKG%z<28PkJ$=qXxVX{So&;Ui0;A4`}7<_aN0m;9^^ zEjpIIHSDT9E%ftemt`0oYiK1_Px$?1bmBj zIEk3nJ@V^{xaNEUP;vr9XKbq3d#BPGiBxqyc{N~$V%a`j<+7R()n_^`r1T~~18)A% zYm#F-)`m{(f1OxI<06kH;Wpm6{NbXtQin`~T3RT`b##e)#TG+OcK%HK|B4kV zUJkB`<>YJabyO2E_(H?8)Mz!A>!}C5OZA9kpP1!&rJ)A$1z%ivi@E}*iZy#w&gI;} zZXGHhvqVvB=w3_>le9D$8q_%j!S#T1bYp!D3=bFd4hf@3o0z(&fKgEGx(g!FtF{X^ zK!WAyxGpnVIjhabKNa5JM>(K7{#dt-+1k<`pdQ)uS zyveR1$;!F;pSz|NDxp~>ziY6FW zaFl8$Do2g0dW^{}#Z$rP8B|@@Wm^;6QgQ84^ari&s6uB!*5CxZ2ka=N-S4kHC(a>x zvnP|A8?!1#S1&_b=alMdsv4!!WibFHw4ZBq7nqvr+P0x*^Hygs%5f1?J+t+H>(!FW z4|9F5rzRO3p8i}^aM62riDfBw_$l%E^T>-*9I?TzTR6TV>~(3z*sh=YL4lj^R4;b`H*d3=Val zt+NChoZ*3sXsT4x5pD4gh=m%1P}7YjR9lDD{%!>JS=^E&#eH|81MvrnB1vFfHw9lW zME!?oY-K@xw|fcN3B(`Lt<&4b5nuI~eK(WRF(|?NHYVn4OU)A+UN8Hv{&0^^wtmYh zl-$+DoZru06T@N=d2W zhk8ms^#X^5Gw^7IdeK^qFr~UX`NVD}ZBA9Z65TQ;DJj<>I~ebw4xYbTcwUZEkgL-h zIE2~v6gVSZks!7^w?v7CBp(v^(<#0V)WO(WhauotHVCQBEzU&(IdaFL`JcfkWd<-% z0q=6Gx_kvY?y|Q)`Qeo+5;u6YyRX)tD)s_u-1cpXG@S(Jyf)+zaw_ zpG~_bpYy!9d)V>_RPyS}1u*u0zX5`g#7j#uJu8j|H;;NYVd?wi9fmzLVcw!Dd!ETQ z7jCYt=e#l%#O;R!6Jmq^G%L}>o^sTTKJc%PPB{e_<4V$~c|yqYZ%6PV znG+NXmq&Dl-A6qwII8oiU;fS?;POLkeMk--_pIdo}Bno2byl5uBIq>F}qR*PqJuV9F==IOpHRnH=dvsRpPj8hF8`@}tQ$2%*e0yhMOvSQtsYi<_$t|#)fEs^T z*i1kxZbpF?_B2GP>3RDKujNEsnKcae>+*{cQY|Y3DQ#&@A+yfN?$ve!&@8rd?!nS8 ztkJDt$cl}IgOWWwKN3hmtFA+;;M=PN6XFQkDE};(Z}1~1E+=JWPbmDuK;2r8#c{n+aDR{c4YgY(+=HMlFYr29LSM`; zYT^bwIcj;Mh^IdS+cE@Fc8r+Z;@8aQceIn{X%~OX)w!aB<-RKMgo!qPZVM!uX#*O{#5d{7uPzSlQ_*SX|B7=lJ58n!`{$-tZxg4isGft`l5v8Yp*_nKvjcD(rLMcmMg_SZyEw)ys-&ab(*y=pqR?)4OxHEe0U49 z!yS7gSFnkFdXwM((B0Fn5;S&cd;c*1XCd$>j(>9~%{6=P?E3fPvV^ILcEHD_3e@F| z)IyNW^I)66r2q40W{(Bu)3fPMLzp zYgqq>r&xE3ED&gf`dv0+M>><2##yPs+Y-o89NkiW1O?2TXw@1X)fp+J^45%_z2>2o z{bb;@U}=ITpWw~C*5Y7^nJ6}M^js?B#L?6H_W>`BY~tKGvdav81MFya) zFW$BtqBxe`yO^R#dak|kp131bm7NjGAe;K@4sw{>B&^Y9dpOm3n|>sf)@>Sw7NXr+ zAYkp=*wn6kM_$)VZ%9rL==DQ}zzsED)!V=?3;e8SQAnq~HMI}V*7Rf(td8-8jQu?p zaKEhT<}g0xD<(9}b;MhIW@T^s&d{g9Uj^~=w=qM$bU>%CHBdKcb(02O z_rkr#_WC%VNAeDLa|8s}rUYiyXMzAF&?}J*TV8|9vbMQT+Q`2E75CkSmUMbVhx_Cp z-BtzM&I;MM`RzT}LU|JVlKBlG1TfU?%_Iz;Ot(kV3lihO?|l2Q{@0cH!y$&L$;WTu z4Jde7Bz@7==jbucRCqOU2;W32HK=F7e9f@U`Z|G29az&eQ?#dXkuw`tAELsO3YudKrCGE$|JnxzMrG3i~o6R zvcHpg@D}{xDT}7OL2+(rx;mfe=q>$JX0 zKCVVN0FR9>o7-!2+?fq&D>I3{P)Cx9!$of3pu@JLlJ!;P)WJMBLT3_>!Yym5yZI*- z)koS6q7kcf=3NVvV=k2G0A#XZOA_2WJq9elzCO?8{E?07$6*z5jqH9CND3;cC7#~WGLPa*%E<31~P+2R=V!Xo>?^uxre7(I7a&*Kt=*ADseHl87lrz!;U5h9s%n_YzO`c{n+wygWJWq)>p)(!Nzhuzl ziJ_a%9)&1T9%1VS7vSrEW30_*y8fRYB&1ygwQL;rO(&ptfDnW9mCbk5kp&4zU;KLC zSLW$*x4z*9te|0QKV+nyL2hP3w+af)KaOZTOdf=kRYiJ1j(e{eLEN#wCEVB5D6EgG z$_H$6U4Xom0H{FT5hftAlRWFdjeF)0%Cj-Xrz+A8Me$dPYg1(Z%|~5oeHlqJJ%IeD zu>s_uoESk$oA9O9p5?d18ry>W^F0;F$LMB2m@;?uu(Prfd^QiieiuCe%p%{neCk=3 zt-4RTW^SuDt&5M*xcGTk+3?TaR}~zXind+SkY}uoLAdBf4>Wg_h}C6PnSa&t4xM;O z=+7he5QAsdM~)8W)nVd|kjE_PCVbZ)AD_&7aqOdtyLU^@6)I$+bK!lHB*xy_7koLo*eXLF9I`=S5{D*bx3cX`XMysbi zdZ=N(=l$5-SjW9u^X*G%I)k*MZ2>~`YB{i5a)`Zqf{tAJ_|@a)njcXJ)I#(pl(I(} zgX0Ci_;nR;+|KMFMp3^~V!V5}59gb^!uiv{H8BOB9{^gNP)`c8%<8)Kk9Rw_nUyho z`i)@npzA_&fQD6g^&AlyYMHmz-G#DlUkp!lGAaNCWsjYHoz8v_P%doPhcTU~+TqGvCxD{E=+NY}o$D@TG z{`V#&l_KiB8aW0Y!M8wE#Lpc2f{Na#BG+PV@UNRctbCl<$$Cd!$Df*o zlv8aRy6qH+Xiqje0;0@_?2D$fYGEB?TnFz1|%T|-144zxn=G~(SfoH zQ1xb#I#5IY5xqZ~8b%?WK}J=WTm4v|6MVjZ8GV3^@oB>I2)G&6BSRwqd>Sq)snK=0 z&)lgiv|iLc;1IF4l!RubL($PxtBBr zE@Glov<=Ql6lOF7QupTG#`MU*#DrERf9rhr*jqw<)9zkNc;eMHhrG+D(Oplp17xC^ zS84*2QqfY`VJ%=P&e^--e=)#o2$cEv8{P(5k4sanC(||O^CNvtLa4`ZxbcNk!Yw9fr?16~X7IbWUNHbqOSGYdpf6m+b;^s(V%6PVy=UT$ZaY zDxEa5fS|f;PXw!OLs(H-+Q#GNLfR(KiWm#MJeYCc$;U z6W6=Cp{a|bsMYmxP!}RK_tP0)J#~{;r$G29pXn9puFFETzfM|)kgX`4e-nx=!q@X` z-P{y19mPumt{eBjXu`^e9hN$_K_$xiQM2VsLmAx#j>f%chTc?lM>-b`2cnT(qcSOgee51 z_aDdJ5T0d%Q^?-NM;EIBb}dIR33$^Xownz+~_k`L1MNH32qc zT;=Pm<``;1J^^e&N@3s=lW}o_xHEQ+roHiLsDJvd1#(ya4$PW=G z?T@yk{(EOe*{%dj5$_^+GSD%ebVLYJVm(ox!B}elqI*B?S)4gXZ$gtD>lWW6@B+`{ zcpJ}MWooS3`auNI=s}P7roS#?RAJ@*LFCIs9cU)SKW7QS+&*+M2JBb?>}&u z9hlPjPFqHQ^a}zEPQo0e)=3S)Gtr8R$B+neAI%ur$5iS})eLkK=`e%DPKNkwea zLfB?s-ng3WY}KrNw^2UBNGTfv1cmNF)r_@A+q~tx?;}2DRiYo8`xY0JJlRMXfcd7Y zvlI+jKWbZazV10}$T9h=>=BbKxy@TM3)Da%_HV(6qtu6I-!=g~tP+DiKX7z;rZ*eN zs#HXzW9Ip7yUqa_o02`iTX;B&s?=NjzOmyKF1kaM>SfIjfB)4>&;oNZu^Dvww)@i) zgExjY`v;u=-Lnk)vI=G?yNJWGK-AY4g;Z-U#l0RCTnJ9zc!n$T@&4^@)DTsH#rM($ zmA<3F0E~RG6@b3!bE7uh6D(q2T@kRThc}%&j(EcYi?H+qxgIt!BOlx+c;5#P7+EPk zM;#1vHgLbS+llUFb=dOl)6ZYewdjeAH`rP5ePNdn%6CGRS85vwc1$x3Z|hL@8z<}{ z=2IGNZkED1sR!t02`Nw@PN#3xD$k;+Wrjy{TTG0>We`yKve0%ZrjeNW`W**vY9wh5 zum805bb48!t$3xVjJy+;R1;RS= zSG(H^#T$KwnX4TDmvPq5%t^;UemM^#fCrb{C)z5GL4W%tesU6N^S2mJJCi-sW0;SF zfJut{0#wJI1yo4S1L&%Vaa>86lO@6YvTj|KiPaxrY7UFdpq`86zi4^u{M8t0a!YnV zq`!VyJp#+rw%MeGLm7jl#XU>AvmrIRNH0dAqjGTn#LLZzTah$f^_*5_991q43x2;I5d58>r2ERc{lu_dqu?sV=#Q2=e-^}vrMxQAn1og2 zk!Z?$u)50C^^dHs5j4EU{UVLS)SfafA8xsfQMtj518zzFMjE?Hm+u8jyY$H_6+DbS zd_3;-MX->$j=8A{pU;5dA+4ZhicX9FEXT5DFz%WoMyl%UK9>mT#)XI8fb2p(bGAV9FSs^&p}3Hh8}}r zh$?q?{nK~UcyVW~T#AXM_GDF9%=+K6m=!vp1zTzz^qIF;HRFQ^C`T1}g>YUGrBCw1 z3qmX{ z$>Go5tBonwr>)If2kS>Pn!-gb&obzN|E|gXpdR*Cg8oSuIuIgw9ahVtZag_H$rRWG zJp}ocR+r9eCx4!Ubbl|DA0Aef@A#+Fc^nGP-8I;e*%OW}TT5)JRj-~!Mjn6`Mc!!& zaog_WX|y3oY$~f#rvY%vAR?&ZV%5Vl8D-Q`-y$qg9t|T&*5c+m5hW+vc`!0E8FMR- zx?dV{_O4WOyv~wm1!`x`y#5CmT2L~Oyz45!csS7O<>HRUfR2o-7XgY`s31}@x*pR& z`Bz^IG<4H&%GHxQ%!ibJMEXNUU9AS_*^=FrjNR3t(+gO0sM!Jqw5-S!YVdAX@g4yT zKlh;OBB&?JXMJ@hIw?c_Ux_c)5go@0pSxhcnsJJ(Ij~w(@|5+vSO@{`Vq&Rr_kDx2 zcbP*nO>Ji_yG@pKukCzrTB9EW8iHjNZlS&}`1zlcSWO30Da<3OEw=P3q~~d?3;$!A z*_*^7m>Z)DzRHXSUBXqS9l|EWv-!l`#k0xI7YCzwetA8+k;U&9e|8#f?S@Bg>pQ)A zrpi3sF4!dg{`3#0n0gj95D7b6aAvtR#h}%H5q!KIUf|<%nj4b!oyREi2I|6ss#-+c zxGDJW7lA@n7>l&d8s$}Oq^AenA;G`@#}d$5Qb+YmVdS9xiTYz#eiYvWe?#rN#0CYQ z0?OgH#%1Tb`;C^M&svv+@M*vxN_5--_ZG9-eIu88$lVc^POS;-1J=%3L``7*+j=^} z)!$g;WB%GHfZq?}d&{MTZt@Q_|I>?c!Jjr26z^##vxEX72Yuo0yj#O6Y-=hkKPu=N z2SvG`WmIg(03{O>7)6@U=Qirwj8C8-sd|1|1yA6B^tU#oz7+k!Bgf>qa;cDo39g*6 zW9~K}kX5BSdqC;8z%0x3D5v5Z^+Bqq*Bbzmmu}0{NeM}yS=?(+Lsp`hbEKe+YoDZj zkaP>|WVJ{~jSj9Drhq3|>OeY?-eRF5F_Cc885J9)VXIZ5Z@g|hW){F-{J5;o4GoTF zc!$&V6mvMsv*=Ux--DaQX}6V1QN2)1q;&NK>Z#nG6Zm^)N}kW^ zh3Q9`he2Lu2M^{ZRpq-kd+OPndXi4+9#zk#rO22BYCgq+QlT9AfsuK^4GRTBG5cL! zL2xloCmQ~O^|*uTQ{USRB>hD&<$g3t{TDJpp`;ere$OY_AuNc` zNYynKELtwifq`$c<_9lxBUJA9oWUM}ZzdEL?4(w@P{9=l$RcnN-xhcq1^V@TfpaxdXv+#sl`_^+y;b?ciOFm`5|?rfh@`)cXOZ?JrW;?fd%%| z2|P}7FrqeSpgK9ZI?hDg=fP?2VzB8L%sLka3d*QO5$05hoH>HnX8uFae@9tpO4!M&c$NLQM<6M zheFINV(#*i8{zrHpB)^nX9NRA)aNFQXvDSoYiRAaW*J_Ud*=d4y7G9<4PfxbNN4`$ zeTGOIwYabU=U~wC$E zdUOrmpkIMZNYbJ?0ad}nX2kN{Y%YSLyKq7O8c6?oF4L^EAuiNi)XNW6Gd>xrT#b4O zkkn@WmwaJ<4>{e;aL+HZQ5N*>$$jA)_zQ3am-A z?$VZSR*)#A2R|S638)Vjq5ib$u@q-o{4c`Hau4%v5tM2v<&BC=8@y0mO|z<2ANu^s z#Db8>tY>s`u!3T?g0WXDdId)RbMltd!g#w;zZ~=%QI1%GrIcNGwfVMF@ch1%hg41f zZSUf$H?Ht)I*9;IcD~Nb4f?~N<$6&n4~zrUpR~Uv45ofO>NlZb4JVAB?5fI?=jG4+ zwrfi0!)B9~Y#!3b1`gW|!lruaZUw*Y_y~M6_`u1tuiFtAd)yTkkwx}Q2o+>IS)3Hm zaDf5!&#ckGpkE#0V&Xn9_XdeDuSN)SJ!y<&5=ytab@%Sw3f zR*ScY`gOs$)Or77sl7ev|8jNiQB9utzCU|9m2p()YoHcA$6!mE=jZ~sYbvWAV6+|D2U`DLTtcaR2Rp)OtuFArMw$vF>TX zLoTO*)9Lbt?~5`%ba^V|T7vSU~i2y!^T;N_|qZW;KlTxUU z8JrJ$-@JY$*pa3a3cB|*gslcb3vp0V2ftmW&DqaSh>9kpJbN295YV2@KoPo;6#jTw zu{vIFn0%>SRRx8gcE=mOPe=W1WWCFxk$X7-$z0FQ)nr~$f=I!4#w{NvEx!f~cmQ@W zbuI4=IPWQj{Fl}SbLWO%!XDr(q_j6^YaZ+P1nEJ3BPx-Ds#tOhoGxu7W^aY!L(Cl} zPrmOr(7!+V;6yXfTyVu?!W4ipBbR!il9gTY?c#tc1{1)3w~BsCi9oq{Foe%VcWl5j z0w411;>}Lw`Hg*~q(jv2QJp=H*ooD~D|+LQERqjWrrxzi$^Wnbk{ksavPP;%@e&`L zZ#|B}>CeeYcEqvN+)Ji&je~VZxX6Dxu1J46n?!Q&Y)HVN2_yPB@bs`q>G4qt%STh| zu|&azgu>`fbY;&GH_o#ZYquaeBV#X-Dn*#hxxV#FXR@J|Kv2P8js+0V3?3=smTN5{T>P-!34L3o5 zzQuMyNU6Qng~p{G;o2}rAaHs61yZwO5&q(zDyp|%^)&=sGy&%cY2Zr_d|E=g+dH)Bj~z={qN1h@ukKM14);_qfMcXVd9JNNqdVj-MQcT^46ieRWLw0U5(5*B$a% z+AHVRDXXVEK5f76{L04U#xfz4!$FCbd%PPj6@Ssq7?B%S=CGxVVYqia62(PqL9IOC zNzeEv68yORJ!e0orFx8i))X3)N=tsf09zB{0r}#EUuR&}om?f5MN&Can+3^w_%V%0 zB$HK9>cxdi$z0d7Juui>zOKlr6i!aM?a{XW;by$WdsWRfRO!=M>wC%ELRFUQG7(}Ox~;=w135&>>sn8^l~%W zG-M)|J!9EqOCf)+S^h;`+Pq z_$qqYJ99;R`s)CRL2xQ@Qxux}fKq7WR&{8%%P`wWXGHGXL1UNl0m|-|l)`Yo0O?2> z0eR_0_IShurL6?!dlSUSy1{!jbaeV!a#L4A-SpIRegcz!Wkn3A#VDVH;}Q4&!cyou z$EbmfW|x3?7p&4$cRP979E>Na_|@RGy1MSm5$nY3{aE(K;+9Z)*kTpx03{P4{00%Y z`*CQy<#s*x_c<<+tv)hXiShkgLQKxgTpg!J!#PgYXpBo67ET4ES5{67gE@Z7LQDs@ zT^dO!*U)m^%A)|6H)JE*{yCNsoZAwtb;|a+vH<9u`m;qzs0j5RKQz%>l7%y8lcZS^ z^D2+h8hctz5aBLL$NU+XClx?(%)sdGn{Sps2em-|-=EL72fh^3iL2D{euMC!bju7G zsT589OwOpgrevK5lUMx99;szap~WMiLAr*5A#Rk}Gsr~8zQ>&W_MG!q&A58#fdv?|6HMMaFRrR6RbU0 z4OmFN3&nca?6ZF_%zf75RrXxF;8of6Ggr!;QQ>k?A?_#ZF~GY>S`3bznv2dQe}lJV zg6Wdh1|adIy~JHoggs}y=bD1GxNMIOndXQIINftv0~;th^<;BTY+_Z`&cWqtF|B|E zyjAc8?$++K{aGl0J5DNHU3*75I{oQ(G?Y1ZDQv8~+;*xclmY*veGN>jW4)tEyGd`1kWqw>l62NDFYw?~U(kRJ1eihYs00fVn^cKBB0sNzhv zW{a9Uv*LPZpy_DzwkwIkGz?eLWnZ4263p2 zVOLvUeGSA#NrXh@e{Bi1z-8AhKSzC{bKIzOAH!WKVC}9f%yM0N{qGf^Rb0FK zW%gC^@zItO8md=l6OFg~Dj$o>XLG)#5gT2unVO~jXnRNBxuC}8EAb}46LOz4cJssa zx#5bTqK+s#>jQNoJIjx!>jLAm8mS!w z>o2Lex2gsY+ZV?=>(#ZowzaGT>y9pv>a7V1ZSux(uXMi) zh-&3W&#J$xyTNikm3_{koGxTO8kl#mMg;=1Y?68`M|Ai}XbodT? z^Ugdm6bBHD$(K>^#WvQrgwC-M02i71|Q{H4h)*BMy2YNjkd4p zsa>&Ck!o)v`y_&1dMT2bNg#XHg2T5;iGuN?aBoRc9a`tL-l<~RtnICJiAlvN+3SYI zJN1%}`HGL{zFOaZfh8)D+u8lC@lmL7nT;fSN^B+S*1!d!b)iLV2)*1gG{YXBH5n~8M}-{YS9(OKJZeFK-RfE{w(rY!@!dIJ54-*a zb}ub-vv({7l^)VReE%Xt(%gVnDyQEoGF$W8$y-SCFAG6HGL zLs{rmfln?C^J}x&oe)eb{EJ5U07{CkU=9X*>sk+1)iJto1v^c1o6F_Z+(C03be2xw zk(cJ*fJpeV`_+dhbrDUhlMLgCAR6OZs&8e!8(th0s%(bB>*|J8rxQYIke~1qw!lsx z3urlrrp!|La8sQ4(YpW-bV(yWt_x@xx=}j-#omO4Hql%ij8nSF0(Rcm8ep}_9})an zF6Op{kvSYpo1J%A1vpF(o9|XVadaHsy)D{TkcvwUjZnKEkd{HA?4KJ&%WJ&K-5X-; zJEu3jy*W6xb!q4B8h%3=&A57duu=J!BU_XsuaLnOubk|tjTj7&BF~U-usZ=ZoovX=#H?cmQm1Y3hJ>($PG{ah+Ee z(3K}41l^E`u)P8)S!&u8*yq}75gbs_R6n_1AI{pTEc+R9ZSaXfaPqmK8$Xrp62Eh& z=GLfaL%8`>`t-H04s^&o|DDR)zOqq2^!FS4*w;QGBqqk8epkQLlhA7WR1BB(3)@*9 zi)AVWjQ-Kpn^K#DA5zldHN+FTqz7oO{bVGUL8{>6R?!WT=I}_(x7W0Rw&wdGT?1S3 zsZj7^QrQ8th_n_NQvBHtDarMqUBRa%cay}*>s`1*VH07yiq)|bCsk89* z^%$H-grJmKT&{dK2CH2vPmctB4j$M_qcotJQJhgvzKc@WTxW2<=M&-x%JBp|jzG;d>qs*xVv`AvxeTsEqCSkZLn^ngdB^t%^jE0+$2ZhR8SOqcj;;!}ulw6__{ zcr;$k7r0M~dlhJZUNoPopPk+=zuD%lKnE1fcvdA1$lmHe$;zA8CC1n=N+b+J2h>Y> zko1ZaIu|qJw@p@zMyoxQW*Omfhg?7_lt%gXNV7o^KxHX{(sC=j)wPINT9lnkQ>rR+ z)1i=1jgkKTf9(7E&c|h(iuKDo;?o$aX?~@FbyU~OOFm6}<7Iyg>9a4}v@@>C4cJ1l zp9S`F#6n0Vz2;{<;_~wr;=6)j!LaKnG(p%7-h%79D71`071!RF;lgMr{U-UG_@R{P zw+TSS8fK3!ZukB*@eX-i{S+o#>uS!vzBfpkuyO6D(pIceQNihR=8RpE0W%BE_GfQ~ zTb$MmxB9aP4>^vJ!nk?Oq||oKWVsRwjw)OVx3k#+OL@HOL#bUGDxUB9+z?f>4`2KdAaqw)^Y=|R>)ix7?nYeKU@BNj#R&pOzxfx9CAl;4Y3k#SPAj9OWYp0V_WB`v~ zs?skUvCGJ{)w~dTXEn~q(vkG}jm+&SD=DpJveC5gN6#;xwsN-^{y1e_iM)437#YF8 zlk`JtRIXC@Hfu}W7111bNiefsmLPF z*eY(kQ}Q#vy<>!~9Q1nkYL9T-8VXGbIT70Zx=v{;++)-5K7eh7TGeWI3n5`{HLe}nRW|A3(|i5WfvF);tKqq#@*sI2uleO~Y9O62+khAG?7c3mdSHAu9D<8JL^8=eBr zR%8mppOLa(lN+t;>4;S9qsUJ}Kf{!R+e`Cy#TD*HS$djMGY?4=mP|XZCOC*BpVDC} z{Kq~+ReVSO1Ak9QdlD#Ou7~n%nNn}xx$o?F1n&O?^Btu|pQpZ#c?{K1a#2?7 z0)5!)&F$fn{an|bqT^SgxB6M>URww6*r`Kl+gAFvyJT&7Q}macBe&32pm!v{e;u+( z|M>Tujpcrauk_ytJ(rdnzpUlfwc<65zY3+UWKB|7FU)~JLynA#?yHhHinP!>mLWX? zPq%~mt%^~jiQ-b6Ebd0KCX+5@2_0l&8<9KS=wNHm+kY6r7+Ho^O#>~iAwwYOsnEbe zNNWcw)uq=c?Xt#Ze00wf%{ANCBd=Dvq=<^!lb(Ku$ZHr|fgkz?rH^Zf#A9B=|b8_{z+UsMOl@UjPw!spANa!RB-$&loIJpq?%7twY+)I-GlSLSnrqect z7$Y$a#J$nqNLNSxtPV?{Q!9(pr)K7f$~szyL2OcW&O z{!?S?L;Zm4q)*Gc)!MJGJG?O5oW;-ty%4gbX_ zOimfls!v2W6LX2K1B(o#GkzBLqj(>WWJ;Fu*6xOTG%|)2bM^G3aNp> zD5-FD^1X(ce}s-6VaqNa9?YS623i;5W4Jx9)0G!%KmyV5tp06^HbMSvoc9}S^vpgu zndb24rN38;r_<|6r&a9NwsG2wXC=~l>=hRV?>_=}MBe)u)nHiDdo<@(4u?r>7eyI) zbY-LYI7P1j$+db`Iuwg-;gh=E)h#(~;xRO>=gA+7==01C)P) z^2VBHsZXBh;i(14PU!pOFh*txW8ZDJ#t-KHD+{pS+}Qb>cYaH`)f<9pXO0LCCQz5g z*=xKSZU~)#I&BTNN3%;LLB0{KnbOk^1$lGy9GQrP}i?+HuOq9KKwoJ?Q zisZoyNmN4w=AA(uadTQ>ZQ=DuM9tPPf=DcH7j(8Bv!=(x@$4@9#a57&>xi_y)PZ## zGogK*LGz?`USC@Yk~f&^lknf4Vo8!{_B-Adg^5O7vwYznVO1WAh~nAJF^2}tSW!ow6I9uLD%+E`e}54ANITB5}UK{ZF3%R zJwlObIqxBj@gc;i-zl!g+~$+TNxDzH|Nd9h=|&H1hw3-#xHor6&6x)z!n5hS=B2Ck z?P=pTp)@`9Ey$!@UxPQhuc)#A0w@-#8Mi@O=YJcC!JWT)%)0!i%Cors~JhH>YCDX#Vt$s81}u{ z#tvrHUyq`ej=VCttBBfhD!xp5($=E%uBRVb7XziZgi{IEDT!8!S36wRp0i2Zb$2MGBp_0o|AfDX6!EWsP{T*O5j;= zBAZ0AlEcK`>Z;kNVB2g8Sau5dEiA$p_TeRD(vt@1@OKLRP3r8$$9{cs)uz;G*AhFV zct>EK2|HnK$cWk7j`qointw(u-rfArG}}M)51;2vfq*&8Gv3Eah^67-G0!?}KkU+c zIxT54)!h$%8T^n50krC{zDi!iLJo%K2-TdGVGNS0Gry#->TNzM(POX>25rsw{q(cu zvBSs-b{nZ2CEr6Y6{90IHYeI|&>gXj8Ii+%eaqRZ{V(ra0Y2!(st7_iycfa**pYjD z5tHEDX}hu?o6l8m{0=Rp+aU4hE@8vlbe4erRiN?P zX`&61(vM8LPD$i7dy(L%#%^oucWHsaZr?;%rSpDya!}Ah`K_H;=;b3~7b=eZntPU! z>0M|T@-%qQ$MjaSD53Us+2w5904F)pZwj=lVm5Y^Kz$Od&(@h~yKd}v`GqZJi}SW1 zE&LvJKp9%JgqnuZ#W7_H!35;DP%?ef#Z6EnGC}#Ec5h@@HR?s(WVkLMA32ib8Ib3< zhNVuEsfPHz)gpjdq~L>@<&pTb4*UQ^8Lq zOb~DEoS#e<*Ii${%=Lh!VDs(E!kO5K(+Q08+})>>mS66vOV4#ZFv+a$8zs6v%G=@? z-wm)3bQjVEl|~yuS|UL`}S6ddlp;@hqSYeHQ0N*PO=j{+sAX~l+Y z^LfkhT0nDBH{o4rLU@VoX=C7Fm+`kyr5M$_Mt!OutDjr^GB-QTh#4~|&*1*76ZYcS zk+-?H2NW%Tex(!?o&tDAQymNj*|KbVXhgHiOeQYm3?um1z4TW}ZcP+`f}N12Ngj=t z`d@G`nZmh;c>fpSc-@ik>(zL(EZ<@r3vvt6>7&)eHjz9nDsrL|UOnAQeGq3nA>|Uw z!^5$U3*pV5H40+N(v_nw_DQkF3dP34O&1RnZ`DQ6SM9l7#-)Iq<3liecEdnXirxitD)t5>D;=2=29t+??8%aGf|K>7wSeLm2?sO^C+Dq~raAQKht)C*g-7@p0g%%M+FDyoKu&3<3* z_naM9p@!(&XSp*OP6yW_JNKxzh5CyiK3QLXI=Uv4%y<-q*4-z}3X`A2LcL!3cZxRW zo7shnE?av_Bpr93W|%n2s}`bqWep)*W0hY1<|Zly8%tPMZ)bz>KB!_D#}c>Y9@6yikOG@_8e%=Gxv)Ol<`XSr9}#JXKv&xm0}BY~Ox3ZX(}uPl_p zQ-mBXOQ_U{qt@CUJ01w}%JI@Z*;HX}+V{DI)DkLS@0|z^bb9?bFDXwCEvX9-2%{U^ z80X`{;jp;SSEnlr$8sa7-HWFhX(6@_-30{dv`%+ebK8|9-e5S{;I8;ZkPx53c-UqhxB`eJd zOV2xV*@N&+jd4|_ws&Qqy$Ma3T||8+_Wo_|4%M~;$$%f!?Uwd5IYIlgEQ+4ef%;LZ zlj{1t(S~qBWtzPt!=V_JE=;_ud|(rMy>oW~$@<(?taQ?2S^+&|&bSFA2>r}G8>&eR z*~X9^l|=ne*Yjw*Iw1773Lk1d1`NWw0oFOaidYo!!vkApfshxzPChnDEA6S&T4>!+bdcFC!t!Dfm> zBQjp{oO@}d%%;I6 zOxq9dZlglMP`)H?3a8G}aYFWDiwU;J7&2M=emEfHB3g$pP&^ANS75AFTlsU@>aDpx z)fIegaD%C8#c;bXbTtu)v8Tk>@PzhJt%kD~-d4a@Bx!#3Q;l#`2!^BGjhvN#0IPh~ zctmn1IWFNIHon?G{-#<&F!=p)5V>dvU-B7BuMKRRBdNx=a>p;&PTaG)v%%O&9m)Uh z01g{pF7gSkvp0o5n`!!A-Jx4z1b$xKWLW+3{KwaH5t*pBoL=`q`2)RjSwUZbuMhN8 z;lsA-_^-Kq(e=D(9o6mwKoTU?`%A!RvOFj}%)i;4Z!O3PM}hyMT5SG~G4RO#d18Al z8*KNBHqB8=0Ys~Es3^LVMjU;*nD@`;K_*MjXuPfr2l$>@wce(b#_7|*eBb$KA(l&; zXV?${1(l)G=!C#YGlkc>p6wsqC-pB?&W+tqEM!b=otH<1`h3zZHE0psPvByi} z*5eJ{7o$Pceg~eux||l3rTrA$AG)6k+!dwzi?J5H2{~mLqpvI92|KVQM8~&gai&Bu zB2XA%U-BL$ky{RWcRtn)#kBXlCa=edT(0F{Nzw8W#562PXpvTYdN{p+J}1YQD_;i= zo^Gs<^s)95G9RFgrHSm5<*^Y~&?TE?Q%>ky_Gr*a`ia24*5lRErI}t4<&bYlqe|(9 zkH-jZNPC2y{ho1~=1H@${2=o*-3Z~2#9%iNUa=5v)_|g!Kq>4n{LGWrhs)?^y=3-O zI5C2~@dK`WO5K!I>w7N2gTa>QRebj3cwlF|wf)dLGM*SZXLhwOi zcWyCSsUz@tkJylA{?)JZ8~Ug5<(+(Yp2=<1L`VT562UquO=?Xvdsq7E*dc6RF8$_) zEr2k45qAISx$uSaH{_tz*%Yyr(INZdhWgR)QL@86PqSy{jQ#H{3q; z{%+87u>Y+4+{-JiUl4C4zo#4nHC!1U*i8r;hw6Kk8bN`k-NV4XS%M z|LVq>1v>?N2b?Fj2vQz~(DS_(B+rD#@0c<8GB~#m-%j&0*5z0!siF*on1zcLYD{@? zEL@HFC-sZzLPZFrE!dfo6Bz%R&>I}O!MWIWi73?!>)i4wRatEQeE%Yl{2d%CkqIvy zgv&LEX*JYxb+S3w@N-rXTh@m5k`5B%63w3#uW|Rd;`__W)%EQgVwYe`ade=Vo-0Hp zb)UPi-xWMJ1a|0X*H&Cic;!4qT9PNp>ok=fm6$!I(V%uxJ+z)Gcv|z-ZYMq*)Qk4x zPf-4p`8~(^U1D`c@3MqYxPC_275GV4yM5~I=1=`H#durwMgccTXTxJV42Ri|B=;fVC_Y+&nD2~iSdQ7)`KPxIZ~gwz?d`PA_Bu5Ofik7{bW5)7j)SiqNCCJ| zTpvv>s)Jrb)~t81?qgy+>q2` zg=Ykd8uT9^M6>cQLFM7;#c&mt)L?{zzbwJA6k91A_#L#{1Rn))B<4C4CcYK;qLN8V z1DZM{E)F{xL&D76jDZtmyT%N*(426~)b+>-TpZ;VSTTs4WH^CL_pm!7`Avpi8Y_U* zWx5`CS;AW?;mo^gppB;{a1a%F5u#+?&37%m{*-!GUv*sn+Y5TIedLiY7x2#zK-lDR zfkdoUn8M($opkfqDtkq^{f8R?g)Z5f-0bI#+54QTrbFaz*L(tWJ#VEX$#-p8#h@|s ztaHtU9dQ#0@=LAFn@9Ce?APekkAog~KVRCW&%|W=Na-nfWfqQTevN{Yi4~6QhK)(- z5JlTA<&X(ay|v%(1tAXo(#os!WRjuAx%|flYa=s+vNu%S?UA)Pm#rc@JSL^ce(7=~ zNQn|1V}XhpeMh9-urLu~>V=Z+2)GW>${61bOkDQm#uK|PprBQvXMN#Fzy{08C?KanMi7qc3tV#7mPKt7sbsqAla9t_3^M4KVzx#`|tHc{9 zCAt{8-Q~H;6=Ghb$PP)Y=l5-NI)`tBSg%x&MacJ}llgbJeuHRwZr8NVH(uKKEjJNM z^G30np0ATGNA!4{BXeu53ygtE$F#tFI#1cRX>xjLBczMIVO6^0dXd`w7-d67f8I&skUnm(v5Y>u9!EH5 zy+G+*Cy$H5QX8l8b@>B>+o?-7H%M{4L~oz3$|X;nZis3#W)pV=p4IPXcSIIbJ#{;R zGNvol#9mq6mjuY||1h#TL%Sst)MvoVaCQ!*fv&%D^?&FVcj_%<} zJRQP!$+YZwLk4|Dc(c6mn(Z&wj9c0SI7R`faYWS1Z@89-K9uqU4|V?4+HnnEaPCzX z*6{auQpM#Zfd@EMBAnrQPwS5)BQ98nl*H;Mk~FEXM;d`7)03f=>PuB4^Fy-Tt3V_r zEjb(Oho7s0yclcGY`e($r)0#lAdVQNG^IzTAJqLQTy7aW%y$6k73br9%BX4-QXGuZ z)%URnv?;lV%Z#&A!;I#ytXxva%d8(dDT}wb`0IN^(0`>GvD>h72B+46yIK{wp?yF> zuH)9d;h#@)SpKzro$&jLkwJ(}3}mQb^e!w&k-w$G6hMzffKwPM|I*0rUeAw(xHw^? zKXhZ~)35*go&R|G6SOn>rC&8GBj7o!E*o7T1Sx+KBmy`6ng zi1P3(?Cg9?k`*B@r^h$8GEEVzP{k2Wl5)KDh6J&D}{S7y05xEPSBaseetpADVeQS+?roZoCu%EbPY}KhBpi*I7M;&EeRQSXo^G zKpDHwFtT1RSQJWB9mDi@oapPZS1)teB$D z{@BT-5+_sQ%@A@5iI@ZRW2I`tk1BjiW(7Piu6fXu!C;1#p1!tairE0WN|@5wasHvZ z)Oi76C6jI>4y5^DfW{|0NwDSK(yky6Q4NOzsc}h~=_2RWms-E)xuS!yHx5%USwqD( zoJpr(>+MQyr1x3zny?io1^Io zKZ+Y#ontG59H;-{>PrhIS~8M7`!`%$*IDl}udpV0aiVaC7IW%Q!d50fP(eRO*P8aY zj5_Fv??H&syIMUwDS^7}vQyHAHHu{b52Gbf$VPsrXyCOK4Ncsyw*8uk#si($c{|8{ zC>yof6$!tT9O}B|kIi{%7|b2UBX4QcehKH@Cu^71Doe$xeN%0uFDd_2xt1D`6W--Z z>EcXXGr!RoGF4S~b|8pptjY+mi0qvfcx!rKRn*5=Ul@j4P0(LJ%~S8m^c366G<9X^ z`fJ6|5w$Qw7ncAlnj2$BF@%wGf7!!1*b83OBk!DOQM~G(xV=-k3ee7!W!4QD(ka;M zSemNqb-=xu9)Cc)6PiYNPN9Zf6$Zp=O(nNZn`oXKb8XRO7KrxI2ovyIs^M3Ib*Ec$ zc8I~Ldm)jj4JMiL<;lmy!Y<|rr9wXY^IW~ZGIpU)e8et#bzJ8q-eN2ERrJy;rru~+ zKUxX*i)t)}&xI&t@@bc4C%W*oYll>0MN2MFLQQH=xMo?ZY^S*#5uWEfuWCi5SsCew z*4Dv@FTdL;ZCf8}vlV1iju`GvBE$GR&phKGS3Hs=pL;+2#5Ie&AS2^)Gm1EmbGeW9GxJ~IO1KDj z)-nE!a>5&ZSofUb$;MDm_DZ)qiBrkCzC<(rlFR?k8>&pvll81)Z^=*dY6mc^dKRef zbLua1_hg)+j2*V0yNA43zj6;5?v<^rPPZei$rp#_Z_uCDy8`1^p?$r1Km`XWkn5^! z^ltQHw659R=B3H;Rz(F;lx-Rt8GRsMHZyAXexeRIZ`+r-M(*Z=T*GF=a>K@0K%FJF zLW7iU3K?p@I&2h&2ktbq-{2<1YOlq*;aS_7i^STQ?EdP09!bTsE?9H+r|CYbPAz}j zQ;{bORwtLm7ra-D#^t8&vLy4z%QeZ4-215Tq{_aMzP7odR?iRL);(ZUqr)$-SSxP; z!HpE}>pQ9GVKv*D+TsL-W=C<%AY35Tacb!+J2$dZz2~6-eu2o;Ksk}~L;E`C6wBY? z(dxB6#b1xG*Or{CxE-;DVYwmmU28xjqX}uH4FqIUjA6ss1CHM1|zXB z*<_}49L7897oOJ5p&1RXWwtl7GiA&$Z^{-x50MM0hCymG~VhZPr4~4lF#_X@|D@i;~ zHM$l<@P2^2-S3(b2LiAAoO*Q?ebY4Mi%72PVb#iV5i99LY%Ct%y&GJgii>1L#nAkw z#%r~wSV?W2lTxYp&# z_}+Aq&Ij$m*uRDsl~3`2#Qo&rVYcFFb@M`YC68{OLG(PaIq6#fppuB~MrOFUN$!h> zq1{xq@B2uq+HCSJcX7prNOX6e{r24Y<=hNMJ}kXF7P;}(-7kM7=-TT19FubS%hGi7 zQ`4BZF0xx>0fp&V>N1>PRwlKN`&pH5H3e{Kka>F1u6@vN{H#%>a;F=2i{xT?2!T?-7)NzNB95keVF9KLdR>*xYhcLux%4wD1r_p0Qs$ znt|~DEgN=KS;xN09@w||?IvAW_wM@Kn0YSD;L(0ti19mHt&iH8N9;#@1r3SUs$($e zrKG3!pR$grW+?tx%ME|4Qz}}=qn@a87IS#VbVRh0@P!J;=KG^48+=3-M6)j!s`$IvNWl*COeI=uo*qw zYq*718d4iof!8AoZePgPzkd4u#Xmi@=G$%@)N1*ex$?Pl{n}}O@N43TovhXUmCbjO zA$&^!98FDr58Uco${MliGAn>tFDV1#J%{}=_R&37-?Zn<-(Zm<;BUSizK*WV28n4R#w$H#&939-bPQ`` zq+5yAhD%S4n?vARLS^o$fU0e~>jrpG@I7>|C4{+$>*jgJRstMaqF7+o9^0h|zi2g0 zKROtPIZHW*vw?>#V**LEio5TCa{H{JT;P~!d(|cbZ$_&WES9(e;lYzo`V$crL=UQB z$8w*XYveLJUr?@;4_){{_tD0GF>WbXztkNN$jP^pw$-AK0nfPt;39_mqng#U~Uk9 z!;DW^0Tt9guKkio_5Zy!ADPTw*6=bm{?pSKn843ZyLXd&7aa5;i*y%Q4!Br_aQ!t#n&kFfRrtDB@S|R^)A`<+SHKdizW1{bmGL7kXfw|JD%m2Y@?sm zJ#ga+X&#&KATgAGXNqE)%M>@|EnYlKVtrX#_bKc`vBR_drA$V>CA5!BwO2>t3A|}` zxqHoaZ2P?`1bhrU9Zjw!OIv;xj!}>N1h?aF6pmp^l~e%GRQwo%2r)eLkF{2Frg5VG zEmNrcE{Pms`?|8VjANKB69{)F=eXDF5@(hIzoqN+9UG;@h)go(Ijf` zq`TN2oJ)v3DCGmNe0b+uGHVWF80o9=|)q2I^S?gt@}f-ZOr;v4ernkfpT0 zY3|P^Eq5FaIA6*ik*LPy#89+i$&uoGMI0En4#~vlMzQBLZqmXM3827w4Vs~F^4{(2 z_zc=bbzVX1YdFtoM>9pxg6Txx^5@kGR!W*tR?{U8> zEw2oKIsQL>_s;VlJo~Fr5ut@rk$gaKOJ|wSJmKA_?Swec6I~SWkwEK3BKopz8si7D zG2q7QT=fvFs5FTYYrR8({2H|ON8d76SkG||s@%keJQz!06TsgC`o)QIZh7y3IA~#- zb6s&tr`aDyBNIPmG@HW_DC>V086Qe_*3VqQ9^g4Ix1^+?sPP4SLsvSq z7O_(-O)7&vNq1$d(o|(0)!og#8H5VB?4M(fKL`_it=F)d;-SoLahCb>G-=pQy%j=a z4`*H59uTd9wU_gsIT26k{5>;m3{ z^RLdDwr?obxI3YnhMmA@{^NaSBy7}FWri}9V}?Qc`?9y2f`ip@VU^g4YaO|TG2~T2 z7op$miqk z(YhY<%m*~g0mku8(pt7XC_JL4X5=<+nj74Egup#5$<(2@HJg(CwxJY^v1>3z>uOSb zCw1ZW>H-6c^07Walh8kO0}ti4Zxl!K5LuKBL*2L1VU)=t>fJC{BLa)BA19S9FFj+v zg$Ct=q>slK>>}#IV2142WZ<3B44>-_QE(^O^%uX)ubyMIt+OCDk7auu$W!AZ;(zHk zv)5a{jULPmC``7v`%og5Y&QFE7h^@Ti&G^1yfpw-VFLViBfb znmD4H?{xWJ+a_I4bd>#MqG(@=#eJ##5pz%+meHt9L6-eh-ppi#7~%m-J$L#1AvSx3 zQ`!wid@V^Ab89ksqa@-7S;Q~exn5^VzpA6XkdU{DiPpvGV1f$J>Sf)+jKCQIK)1hN zrPwZT$481rb8)q#=BXL~Q(jeeyYWL2ZB4YOyq^@mh%75NtOeE(g}^T*FY_n+sS>{J zqzg_g8W+xVCVB7p{*NC14YTY|Q}XYa(i_q51(t3Pg}m(R6#P#ON+Qu1yC{&Z&SgnE zv->2+N_~C2evaF<1PNVKmm>F7<{ZFC$PmSHjm)1>Z){zx?wQ5VdYrD7(we zu;HcNm(}sMS0eaPekGakiJGb|*XV8eMNedRNx6V8CJ_2XEAbp{zBa;rU~2q6l<7fc zfK6I4q{M*lMGRfo6sL4SIT|Umq_tVELqyW-|NMZ7jkfHj#eKurAs~68E&#D!)%f-p zZ#vxYeaNhuxI{V=pAlFPFn{*t$J7RCt;Vl}X#=ZkH6y8m+t)gz#uZRhf_n%LB|eq^ zTu9OyG*yv{cU5*y38*&Y9z?UiuG0G<@S?m;QRpTJNV`*@NU3yx@>Qr- z;HjH7X3{C1VC!Ja>cMs-!1j?Yvz_fZFzeDe#IR~*Xn>noSm}bbjxixZdR?9s*bZr8G~uKioU*xh3~sz_v>JG{TXQ^@Eb^12Z(9vy8PH zu~pp@KkNw3c~5DwU~)U^>*rKFiZ0iVm^Gan3Whf0B5GTDc!*VEo36(qv!ZKseqJVw zy56)_lO^4X^50SQF9?Xr#kjctlZ{j4*P@z8-qXXo=1;c6V6tx|N)Pdt1OW_UZ&rb6 zt*zTxN;T>xof*Q#J5+MTDlRu-O;}iD?E}r{}C>-w+rGv$FV7-80sE$ zGp3i8nE9Bx#8{v6e}f{f$N4Dj+Bxw>F{%lF1(>tSN4$a7lY#-ZY`Ii^K9$#UOj(#K zm8>l2KKT77pQtY{=T?XpuI&I_Zu)Z#ra10oriWd2!n=jO@hb@fU--Z2dh@WR?|l9H z%(Rs{Dzt|)T4ig;8JYRHU-o#e>*C_#3Liep{l4$leQwHkX2tWe z#nz%V{)gdF_!g*)M97xS&h_VQn3a1AU7d#K@g>CX_QX#JrjifSNbh4sxEny?M9Y|IO+@?;TgXq98oFtw+jua z47bIA)G&u@FF^dmRn}phqK)LY?F>3wd-h5Ivr952e2fXIsu$Y`U#o|aAa4+idaq;G zi;svj`CW;#sH$sB$7GIWf8d)y2oM!_qHD<1(0L%2fAr_i?;p;HoIg6kgNc?~0q2Bv{_}CP>+<$(vUN)K;%O~|)9&7W3$6$oG{88PCmp0pJ ztovw9{vkNL-8kNtX*$F;Hwky8<%Nzq#@xhugh2?&s-iXcQRtO= zk+|8p|Fdn&Y@E>b{4GUWUk8S+DzC|)2gdB6vPKb2M>z@Iu}b!u`}jP5AwS~*pX^GrT~RAH+}U!dp*?w z0r(8nA@%`wk~ziqM%8y7GRo;eSKe{&cguWknHVroe74U|XCG8|aiWpgn1~w%hR+!Y z{;ed2%eM$vSnh~_i%bv~n6P7_6Lt4qlIjEvQ};ehfFExvQ)B>6VyPzq=|e0p&8dRG z=CDzc{To!VhZ2?Y0kuqF9q;z=uX!gQKqdyK26Zl425_-U6- zM+-^uE}8QcEg@a??)s8f?6YAr11NiOF2d(v`DR|ox*gb8$4`bCSRt3c|2@hB;ROHK z&SMvdZ2x?qTiNPEV-Fa!#=1mWtM3x9^MDFPISceppQ>ksTks~=f4AW&3|)FuqT zeQ8DFl<6QUA`5? z{2O~X+IkfyJH~DCipfT|tST9F-%df)6&|w+!uq`FO-L{HXY@R`tc$|C?*S>YOtr_v z*<8NK^i-LU+WS(i+cBgiuOUC{s3~{ndR9*a2%Ije+3Qx0e8OiB-y_bRC>WP{y_)4r z{Xrv{C1g=LA}nq%$Lnp8Fi{dq2+C;xa}h-dx)sX`jH{wR^Z}tqw7*vD^BKd$LOIT7 zg2X=EvQOvIN6tZt^{4{zYzPJ@SUJ%WnHTKhTf+&6ytDr*s|YB6X-)%Z}jbkw_of=b&btlC;lJze<-bmY7up;ml^Z z4f5(C@Jp<8`+OTRDrNn2_#DJEDO|4QmJAE}mEVYO+qBpR*k&VQP(npn&r<#h<@*kh zuF;2l@7%%3Hr!RtPvY(ES;o-#YyER3L>Evs}H43O==6ePD^lR!wuCE9=s zFrAf9W9@crG%1BUgs!Kl7?YK(cNwKhQyx{Ecp7+cAC6J>>qsMrv$Vcy(wLT`H2tWq zDKW48D072RRZkt(1ZwBW7*pedd&Rxeb5oJ=dq;Kw70TD@x@44!$4xfRU>TLCKENXj z+2T*8#=t_1TTnJ034IjNgXS7s$TfFLU#V)KJtK|Q&VQxQ%B*u-+e7p$hM;a-AQ7=1 z3L0Or983LP=CoxR44-_qn7LNl2kr3fZ+RESJP?NWyDj}ruW4qQc(P_d5qcs2O_?Kw zxY-uVj7t8m9H#veO9ZAkNgXiMl&L@a0^(@4%$RLq9M zl*pA(rz|&XWdbsSY%r075Zv%g(Zh6)2~Ey|yGz-rfQWupSduA=XKwIp?!G zk!=i7Vz1rW+&ghQn}NDM)Y&;lcmd)b*N6veI=MhG22Dpx=?fe;&6XTZt%^5V#MgZv zi%Bz!UX3EN@B$d_77zsp=aWfF2bclMjemCKO=$8JY$z$@%fzYZFcH8_rwlRjPWkcc zSmj#lFD;)B7LyWSu-Ow+wC)~Iu^zkEqj|cp<-(To5M03o<8N>9$OyfhCzYOhd6K06 zuR3(aFUy^ntQO&YWMtj6MpuM;MEY3dq|bOc!0k zfvkw%MC2~wV3~s6nR5te?33)6EzH%4k&8W^)hFXDAM`=jiMKTPH^t=~B5dD?GDw-8U&QX;i*}EW+qRT@?wE#j8%*Wja95Ok&Cp=5QOsG^4pu~f&tB0!j|%1nLNVXbU7g(Gqt|{ z)PCNU{hO`YK@oj$?4#4WDp+nZMu<^9cD*U&&farSVEFe!_FqOtZ`%qA+EH34Rlpqm z!gRMf-ag*jA?JZ@1s~2`W3IKgEPOW>pIO7@Y9-NIjGMts=f0)%f3)+xr4dejQ*8C{ zv82Fpb{5Jx+3a2@shHrKK8Zq4!WL!vzg?O-7O=kgb8PGOPIa{%>t0_g>)iTYZGRcH z8*~^_r)^(mD}1TcQpQ!~uPMH*J=g-$zZ!rZp79*mXh0K1^3;NvG2x!n;44|)oVB@Q zMZM!2{Blfc6e99{vI^i_!haUJ@)aKlkq(TNPUO^imvC7U(>*kb zjm*SSy20SS!?jEqX9=4o2=EDn@@iNZW)%TSq^rOp=S}IJ1u`Sj`cUG-_$?EA?*po;!m3wl{RzWrsDXj#-g$r=Dmuax$&T*D|AeU{+B-Gp zx>Cwq^U=-yzjFVzLveaNd@F&V`|(L<;M7inE8TXia4L6IarA3DBEDE_Z9YA-&amRL zoIoAu>j!!Kb~x_>dfqwMhko>ytt(q00(vicn%MD`U(3~Dzd4R!X1Km3+pYBU;h^vw+ujrL(gj9@4ax?Q!*JYpcL@5MIkXablUmc|VyvZP;XmaeQF$unl_QuM@!>QZ z`|b({nn|{L z%P&F<-kkEZK!l|HinO?>w+NY4J3&voCjcpYE*`kBsUIlE4f+Y1J~4nQ7ZdyL9P*dxUzSIO55_(fZY! ze>rdc%NN-Wj(KDVN63w@v_JDY9m-LtqOq zjtV|MfWfv)mbw!a0EjZyKbf^YcD*cXRW-qwajpTh6{evdV?y+rS1Z1V#a=MdLOM3{ z;19=cQ%dcc*@6prmg?-f{H;OT8B@#60^F4Kdu@Moo2@s|q>bgjg2lp@Gac!CAYvIq zHY&aM#K=sv7Nlg=IL55MXX@{S6CU90GZK#J^l3Yp>AVvr>^~pFEUz49+ zP2xcQG{!gOJeEqzO4X(XiA?-D=>oW?fHs`1p)iq(-`emQ^;!CBKMdyN!dk(zk6@Og z@*OQ(j?6h2kE<_TQ|_Dgs;Co2APn^3f&@78$7G9%y?z2dDfGEjz;s# zrys|*Ui&H3kFXsDeA8```8L@2m~Q^e=S5nQA2{3(!OKILEDCoH|I67+8+N9C*AHs#HM)X<2jQL6o8Q*8r zJbyD~_in*X;XV(^)6^h>EobR{cg2L6F@b|pn%1| zPC~69wx`+;;e4VTgK&+6PHNZ#b|rVI7W!NB=le22LCIr|;TIp@#Sx8YT(MK#TQmMq zI0?-JH=?31vpp%Gt6Wx@5F1MPluW3^lrk16BW22bOs{15r#9+dYvwD3XG4#>tjq|h z8X;;aGlo8jDe%R^ydKfB$G8LNj7)nmF|U}9_6*~~!27Z! zylU~?MemF+DaTZ}^0Pv+y;14ZFWuP*S#T7WvrOF+OP{>cBPJ56k^EosgO&$JXIV-L zY<8`^_Q{8r3?}qDe+c~d5B>k}?SkZ7$UKN>lV*pYgl?o}PL9w=xTkll7`22KkmDEc z^d!!R64r=E9yi3q7Ab=euOb^cbp)~HZ$ot_WD_MjK&E9uk73A}DXPzsWE1=poTyKN z=0eEM5?uizU&4j5R+2B3FUC%}QxVI(jr^yl3*!>14hExkWnA$O8@+k7FkJu7sCat^ zdX>7F6DDM@jCeYZdboBQvft=5xOhbcRM??zBO8>ujx;S+lSetdOoF1aOvdjCoVwnx zHq6kAD`pilhzQB8_$VU^#GyV}H zc+ss+@vKMOhvFf$?4qn^8BM9iv7Q^JjEf44kv60Fu%>B*UsV}OPkc`p{2rzPV`30q zVO;4TGopH4$LY>iZif2BM#y&c8-FghOhdZn-94l{HN89RRm;FYv2^v;upCi%pP0~B zT&9ssLa<$6S|%09Chj>p=bD_SLHZ7Q2&m$4z6gMUywVOF%BgI~I-;EwTUJj+MR3Q0 zt^43>eB%s?LR-HJkIm4&$OhSt?%=0>>tm<{#6d9fYH#?}tq7@jncWA4IK90`@D-J4 z=jeDl=)i5;X`R){mQrR4OL5-=!>4;GzCPO}KXbw!ou+jOLU{C!jL^6U_NulvM z=z|}Un^f?KvO41PK@RmRiUS5qXef@ax66Eg|LdPO<^JxY8-2|1A4oIax%%bu9b$4g z4%_V3JHDa{BVO|TJj_mtZ{|OJmZCMwzd{v-Y7gOBDzlhvBaBF%53VGnY|{Xk!EAT0 zNjf?zAeI?Pdt!GH&6TMS@?ink&YIGV8yP=j&pYy|hze3rjydFPDe^O>dJD zERA~)3jt=oq%Rvk_B@(%!Fs!~wm`h|jOWriFKqoDBycR72O11NvOQTIk(_IjxhmKv z+>z$mu8z&>1+WP1suFZFKUXObc_Pj^$LO~rK67q=srxV;g*2>s7A;v_hnmk!@0t&J zA-_HJ7ME?zjktX=tTAm9sv@Cd1c5woeH4*f{&)K5*{*?L@04MLRen-#3(PuD_|DMO zbpKvv_SdDo3Uc2RIcR-G7n?4QAH0+>wBCgNKkx=doe-zS%75_n~*g9SQTDocf533 zRN#+|2>lcQDr*S%u$3%BSZ1#3irCU7nZo5Bh;=J8$n_qU@exhvhatvI+n)I&w(dw0 zV{=s_az8wX7uh_eL(08Vy3aSR8h*qm4faWw@VMdn5P5!gZxuo&XxIl2wWQESaCI&n z61@55;A5wf--w+;?Yva=59_MAv=Yc95eB9O=X)wP)jMO2pW`6WlyhYb3_e)BaPhw0 z*`?(V1AZ}<52vCy~t5VfdSQj!43=b+`ozHd)=jpsjDY zL){d2U6u9-x4zJgpYLl551My%>7y36iEKs=eu4pptMe$?lD7U0Aq*iBPbuH)`eVE(-_8q&J-oC~(amq=qSw7*_P0PO{ zWQKp4wy?mBtt?oCwkd2rB_-UjYN#a;q6^M&a1R{j4&;QJR<;Q}PsDb&I>u@7<&0jX zq$d4mzc4*-2Wl3>BU!8u6J44mrZrLSh-#{ZMyAV;& zr)<8^$jR0T4Ih7_3dxD3cYN*JYDLY2OOL4x<-Sn|81-NN1a5~79b!sIMhf3<0Rb7q zyYuk1s24R2+tRU#+61xIvJU!}zi5gsYE6X#{2H!umGI=OMW@Tt&`pj@tV2y#niPj{ zoz!+Qd6fs)a#fe8Mvl>@7bqL(BFA2Gq~>JR0+=Lf9GFkv$k%D$nfh0Sg{#y*?4JHM z`W`c4T4{H#9qOyhayu~&t%fXK<@nyJGPXmhF-nhDxE3|&XL~+!Va#$&Q59EHe;|)ZqkI3beCj|u7;c{% z0mj#s0UIM3hEE+Pst_;cuI?lJM50GeMN^9%W(N7Ik(c5Z412K@8Z#F=YXw5WN-#mQt_Z73iaBN&CDql=}r z=}V6ZoC{-B?VcIpEbzr8_wT@arf~wM^ecjQ^%EO!Gh2Q2F$UNXFi^B$bez$M8k(1; zSVvflqcz|lu~{Xy%w)?|ki3R@WJ@wv92@s7X#}3AUDH-GCaO+D31!;k_>QmSebpi^ zDL1jyckqV_%$vkpH@_+B*jaZ0jo9|{9CKX4WBKIdl*$iKJsYItaDY6xe+qt~SXn3B=My4ngQ=sEUjMPk6hhH@L8B0jA)l&kjcwQl< z@$sa137GXc^EOthS5;Gi+af|(G}24C>H0!Hj%;El5L1SPkJC*Yqcg~pVCYP7uYA#Flmpi8-c3-FMo6hP+ zMjbSDYId+J**WCBK4m&kumteKpv`6%Z~{z;#>ei}#khe}d`cWjv`>y4+;Sw0Qs2Bt zy>R8-^0fn(pZxx2Rc+KlaK=FZM?5MH#$8F2GJ3#zwvm4a(b5BKF0|7#?6_w4P4lb& zGW_87r#JQ|anFU2-t_uf{_EG}H{SV<_Ju8hM*q5;#ZUY=Hz(%oC-W1>lD^W5)MLn> z|886A>`QqrTxcP6h0lUQY?+!tj+*-w?F{Z~gNw1`M(fBA*0F_#i7a58QS>JABO4F3 zQ*)`LWY?ft4hn#e^Al})7|V7nOtI832$|O8)gUU?h{9Ue;hBqBhbVa|m1XMl>}{MU zrrRmJa$+Z4~STf=wPc$H**IL`Yv8#i~c zW!r@p95vOucbCiX#_7aR|{Ly&q9srTC=dHu`RB*al@$NbFZiX*a|@|iA5E)@8=SJX&D(}Dv!!P4U^Vc3CU5FPk}F% zeCul8M7H<4fBY4HY0wf$A0B=iwb=Lu8;z*jRK&-mF){Am*q5F`0m(?64C7h!NBh{W zzx+kWm!)dY+(ueVw4A?-_JZjDyUo9(vY+5btRYEz^1N!643JX5$Wo zqQIL+S~pXJU%G8fK)xoS5MN-Wm?olBXViI`!ZW8jXkMU@cWC@RiJqwPLaiGB%!w5Yh7n$RGC(lm} zPPUdSU#X8?=(l}8HCXJAagL8IRSyAVxQ33S+WQEVy5BHl?9+}ADnRU+!(_*(6=^!mq)zeBI`Or*~>+JTug8qhH!0K zHY{Ipa2}lM!cbcHqxbSc6|A8Hsq3$D8KSb&sWf=KH8HV83|Vu#PjR zX>X@yFw@a8C*^vrJ#1~eM`mw1D|5_O>@uk&=TsojH4n})RpamS7tEzHyvy$+$?)~Q zOMjq+Do+$lEG*wD^8(koV#)MvLo#E8$T%Ck?p z$+<>aWs333*gf^Q+{J3faA2}zYp<0Mf!iQj+!W)B4qvKt@7fTI4ImS{S@2v6 z*8nT&TJAexRhH>6R%TGRZOl80e;?Yc-`E$ZxQI4QMsbSHi0SD5b`QH*bv!&GQ8fC# zVUJxu;*pzZ1J>rqyqBYN@`Lnt|Ne%ASxFcrl({l-zk8wQFloayuzxT!XCTksR6IyUo!*gU(@rLsTomx_|k`SPFSPrJa;%hq!P?ng6oJ}o87h84{qnI5x zceH#*%Ve}QA&pCND#s&iXF-|gfVH{2UH|qkpsP_|7!pcY#Z*;UKj9+Pqu2fzRh_FL z7pF{9Y{Ygnsg=hX>8!;k?((#jVO)7U5f%h~75YE=G+x7>0MjuBU08ft?&>mpDV_kJ zMC}KZf2us8NVQ&^QyNtIZSkIyV=vR{V+%av8ulJ#)kE7pOL-bsg6=cAHaR^mJ=5X2 z<@kK)0KvjrVwyrbVi(5_)0`E{($uI2A3zt%UjXX3FvoUJJIS=?jZz+a`ba1PQ$0L3 z=(ybRA%AB;_VL}ZB>-_!rfO^Y%n0=HF?ClWnbpkNZ#reU$ivuH0j6JMn@s7;HE{K} z`sm%DbSKLqL(1%Iwrj8NY7r{2K$5`M=CO~Uv6800?5#pEE)DVyWRX{c`)|m*b*h!l zD@=4+_s&O&)mtve!lSc56}&sX%Vy9LNNF{=?A0|`I^dBfF{!$YgJ;;~0 z!!P;m*hW>0USTH3WlGTr=(8lW%i5NI>apt=AdP1;kym08=*6go%JRU8DKmCl`7h-& z+9yF-lf%{QtAEV$)cCgE{2l)JpSb2W%YCGZyikaW_v}VzE|%9bsgsaxTZNCpf4LVp zgoKJIv{QFBg*t-gVc({{I1?J|gUO5s*30+r-HAU!=x_3@7t)!~!=c71@B5vme!~w^ zC<=*V&32mC)lqEGv${Q|y|wdwyOqkb{NJ`@3w|Bd6|2ax8PCH_lK`dD-m@6vCMV-J znhvMFgXWTIc5SSnf3K$&SXI=&6qv*t8E#HR@~m`Y>%q{ne}vSh70Hf|+|fFQq14s) zIT@6hs6$*;F4B0-p+U?JsOvNeymZabn`3%9#@oKbyss@^M+;VhO{L$MmIZoWA#k4= zi>l!b9uwy<;{b^_E)KiE-OFljEAZTFxx=zCkjBliWc8ffTdO6Wrr*Fmsu^q=8yP#C zRk4h#M}Q4{9E_=-v1qm_W~Z2q=0jW-n$S#`^lK?digPzBzP}2xo^dtVVLx+F z4zixouf6`+tl9UF({E{ zU_rqpQb39W2DuF~=xKl>|Dj2Kz3{#LnQJ7MzN9fs2xZw0{}%kQJ19k;7H;1eBJ-&)l4celaixR*YKQ_7eiPsL>J_All}PTDyCfu!k;=^P5dTe zaY>haP5|zKRfdZ-s>qnz+f=*vrgf9D9k7P0z&HZzY_rL;FPF?Sjfb0Nx7wbv0ItZr zUe$$`y;K$(G>ndmJn&bGH7znN^7R2_%@$Q4Cd{Oo1R>=7Gly%sIvtbpH%+tzll3-A zlU#*h-y22A>d9}+8M{2*qG*df>>^n#!cK-fFk%F`at;Lw$=6$BYmQg*w0~g7&SE{s!wWXV&%Amd zbIbfB&iJO|a=FBNy9K%ue%Fu@Y}BpS0z%kdyeNy$UgA=tE_`C^x(-63CGsiuyke7v(KPVh}9YnDj}5gvO_`bpl&TykXq z8dnN(D?3>hmTQxH&q1CB`btk=h&74xN<;e@+lnh%9crLug`TMs5f*{O-Ex$H@fXK? zV}i50@>=P4x!JPWkkY_^7y)dWdRwf!PZR$Wb#k%sS_9m;EJ*}`h6ktp62 zQ)z?DGKK>YcijQA@h$}$P9;vH^1h{jNdobRPzAlq&7!GI%&!U?I8t+`=xdMXk^Ew3;rCINaj_O-Z0s; zP-j7`El3MhZ!BEv)^8aK{8GMpvV_khjVAD-|5v>}6U`tU36v%th!CRUgzQukm$}m_ zC`0?^O=o0~>nv$K!aK?bO${>{<#S+(=`FD8aAnm$vC`B^VqiYBSa17E zEF9q3ku}XJHt`e=qy#A1PQx?5mah(bI|9@Vj4oj|*(H1DNbu)d&VFBuTwIXpO7k2a z2>Vcf#Zc~(o7f(fMYg`lP3ed^cZht7up}N`dSvkBoXqJ^q8prR*bQO>FP?Tf)obe6s zj$SNh);7^+Y{^{ddho)UsYnsN`19XdfX*Ew-nmqRpe>$|wL)7_S~kmmkE%?PwtHzX z&#+`O#L~zU4~KQicKgOx;fbQu;>AkJFR|Hp=5XC;)hMv6l5x#$&0N=dMo4PM=I{bT zF=3Z?CS10!TQFWPH3RjUM$n?*_Agif!qDGbY<|a==iC2spm8fLP@yTUuVXWsNr&M& z!x^shA49mr-Bi3#`+s0eO%t)+ZJ3HjG;Bc|t@l^~8v}iy!m#Q}Ku^qkqDxQALD%y& z3*|$_>=6BDk}zRNxNm%IkGd;@9^ngbIbM@wE#K>mq7h zUG)Z~o#*ukEN$-0fu&MKpWz3sm>=J8SWb(JubnfE+*+)`C+|3uWXg@RK~|f>Q5*;x z0)?rlxz5^4iMjV#Z4T+zyzPD%{%CcSH7b^y?@(v1>l?P@zh5Xj8x2 za%$v_A|sSwyQaw7gR|PPpIhgRMGeLbdT}Y4)j2n1Vf_ka;_I2s>Hv?B|f%nEkPF!Uj3-^&H%w@HC0kct20Nn|)6@4_2rqyx%7X3TsAr zWUpjo#Gpp1GDr{On5Z!F?m}#HuYD;B@HxypP&ZQaZ|gy6M}kHN{M0vR5Xa}pu7@UB zKN2il+a`BiA59!6zO$3!^>h_ULy`G{aCFv@%AwGV7SfT=h6t}z2x26NX_p%=?rCOJ zg1GbrV-<`76S%F6+zztLEBXvuM1ZL^LUvlRig~9S$=;l-Os>MVDL5V^tnrVTVff&X zN0?Ifnsw|9SJkfycohUsFDznRetlTdRh(RKdqFp$ZNo&Z2~t2#m!dqRQ2CjE=mnFM zUQk%@h5DGDp%Ck1YNw<(;!z3FX8<+fct|!&`L0OKm|z~)C*S8{lN#<7))Nngv9*ax z>$LbRx`c{uvXW86sS39xXA;n`WQ}F$*S}khLn6hm?ntK{Ma&@>y12^^jy=|B+ecZgyE) zKFqfPMb6&7@^ST#u>#a=XeprS!??f)Fai~s-DcIx3!MUG$N;p=2 zMB!R#jS3?>Xu8!RImSSqMsQH{=9P!>RRAE_&#nfGm(<#cWsfSusHI)BHi_;xleT)b zLF~{Nksa`Lbdyggqi&GcY#Z8PCdNw5i$b(#ceB%=f7@!H(ROY_D*`{+)Rvv{?yhO)7-D<>+P-`5Fmgp%JQ=hi2?h$jW? zG(*pBW-YEh!zEeM@GiGanckY9wcSKsK_j=FsiO>@>Wc9WTLiEbdVz1rVz9S6k-*-w z6LB1kKH%(hR5yjm-k3`aq{=2FvDobAL%~uc@cMpNyleLlGwf}^2xjfd%{muOAEzxc zXTa-2Z&ZMl?Ahct91`CNa;2}?7% z3^?$fesB2B_y@BbfUF9#JI-$%A2qmHM_PqZQn6CxoARnGGcdn4wg7>|j=W53Xnnn1jNR2sA!6VuN zc8D3yC*$O$^_(Z@zR@1|;w>4CVjF}$-jQ88&|wHkA604Yu?8R;#pr@Nkl1gGNg*6) zXrJ-f=ZBr6`a-ngst@R#P2cW(@mm(aYKMpbn%eV_({;``Vyl zSNe@=4l1>3V=)OzMQUf6ZF&?Pa9eiP-?z0&{WGS9j~e~)PXPH_6Ou)~5_27+27lX? zJMnwiFi_oDSZC~T%|+~B1IrM(VMch2e=e%mQTJlxV9WX8uXvNF3_x5I@lF_jq*@o| z%O*AHVT{lo@MK;aVR1+Cm8j?$xk*VPP^nbV)nfHvft;EyCO(Xp*VibMoX3;4z)ee= z!!ipNS>)2l@v)cv{zCx`N5P@n@flh+d8t0th}}3 ztTVxxU6A{Yn_J$E+^en6K8YAISh-sD1PkxeMfn z-N_-NKV97w3+^EmrzGI3Mpx#yBGwxiO zQ^Pt$xWPc8u6yd}6l(*HnrmhY#8_X|4&|Ze_*lYMi6HjAsI@h}_DKVI%#$X;PuGEKb-LvS1qi&b-hH+B)OPZyWoOpnY>iciz7A z+`7+Qek)&?mh~J&`e&}7=;P#mR=?b9hr`M0+blLWt*PI&a%ymtZ_rtjLont{0_0Uq zsMgy_7piITLat|Q)p3Hf^rq&xeAC1NV3d{RcD*x2CiT}%<^vSsoV-j>T6=m%4m?H& z`%mDs0E$T@zwrV zx^!UiWx5r|)F+qsZ_#0AAI$HsoUMfgh-|$z@HsfqmZz9c3e58F9BZZ;B3b-XbQ3jd zwrReTN8yu5|YG{!aYprsFRj;}jQmqsr851EigR>&|;5WRS5 zPZz&3ytc4OHpT_vljD?cO4;A|QQsModpk*fRNzm^SFRFLyzeIi6dvkRzC+T=iV^IR zjwxFtHoxo}q=j*3`&h1cGhk}&h9`l8fpWZU+Qbf_YK^ropy+XB*th$dMbanBf z+vZpaiA?ViulIZtVL)CEs%;~Be*AX)BS4-f@Jr|zPkn&#`8D6uw!5(eqsC$D?|P4;uG>Kjl`{5*{qr#Q*jp; z>aspM4E687R3u$vd0pre#4zP?@eZ!u1FrNn)j?46JZ+3{Jrx)xIgjw~h;}rW3ctOO z0?lNaj!YrKbg3Yrzc75SU>0!2pl>0Weq+wzi9BvYSe7I~Ir3-K3+z^|BmqD9U~Ty4 zIasRU_l}t6P@<%Lv2imA%$PA%QMru9eqtH62UV4p`q%*Hqo#UV2C^A` ztBG4&+U7dOjNJvxaE?pBQ8Nlg=1c3m1Kj4~T0KSqn;qso*_ScZaTHmv8>ZG%l-h|9 zY7dpqu}u-Q38BEp_Zl`bpgKFS=6LxaF+k(=U#Li%qBv_8qYLz3DNtor_-h&1z5@Kv zVf~|)HpcMq!J0A4%|RP;d;!}dLdMOMfRZEazIm>z0cm4dEzCUK!^MZP867Vqs^pxh zs1m+7P)L(>hgS*Qps1o=^xy&3yQl=8QM3K30GeWfG>K>Ec z%&nkgsLK@bV?~v8rks`p%4pRy))D_)%AKDqc|(Fg=!tBm<&5l4+EQx215rV7tsh6L z$0YCzq{q0}`VjlJbiTx=>`Rp43_O}~FgUl<{};&Q8iIScydjSc^b(Y1@^HZ87_##q zy@P4H({jH2w$1T_YozdJ;{oPa<|qkGMPsG}ks8dWVO6cie<-y|IgnBXQx z%QRmcIH}xmiDyc0XF52}<*HXQ%O)q--MwyU$4KD!tY8W_|6H{G(Vlzz!!ru_N<&0T z*@6=9J4K9O)N{8Sk(X7@j+6IrjEqcE3^wJPsr9GJHQ^MMeE8~4W)C(~KD)EoG+b!1 zwktjhSd49oPEr}d47UrIJBD7PwsQ?&t+y2E68aMz58fAxY>Qa~*%IH1Lya)r>g$AQ zZLCz3@*+5A|Cc1v#VX%eUmHHJA1Zv$S8iXJhZ(yh%gOsuObv_+;zJv6PE^gv!%a$K zXsK#_^qw?+LcY5(OC(%4dwLf8E8v`nJ(8M1^z4Vmc;o2+s~0Zjl)UL}yw2Ih(@Dyg{jO^>IT$NfV$ej8@<-Vl|Ebw^0m zt9^k1Br3KnY46BBrRrA0RH#$c&osq%jFK5SR`RHp8?Xy0Yl5viFU5ptWC5=I4EFKs&1weT7~{H96A zeHuwW5y#eAO-FW!$^-*zRqtSIPsN&KR~~v@O@4y{udomn{+0)z2B2Qi*#;vm8zX4R zuJA^RVarg1lNJbE)7cfN5`yX?E@^dg_%;zhIuQ5qXJKK~hf)^t0ez^DI7aiE(AOIu z+0KkWajCSK>T1d2F3C;g*peI(mh?@jNQ&F2=WH!AfNsIZuv^(po}-E44+1W?;@4}DP-K{dxN>T-TGFP`pkrK{$zs00|Ir=ULhMjel9D?=>Kk%< zItAg)w(lTin0|ciBV&dU)J2u#t{2TOjI=JnMWvKq^h*pOv!% zo;_TjbLc$FYlBPA9<=-vFf?Ss>Iq>k9%&&D^mMCYwdz}nIIey4xQwgMtMxF3q zvrnsRt3sd0trit%*InpV1xvsXLXvfDrCouvWh*ao(p3qF5Hy64b1PNWR=^f521rsx zNhuPn;UEblMHEDG5W_hn5rKpdlADl_kn{iNzW>8B^PhQV9GM9?LvE8h-{1AUuFrM& zNPpPblu&I$7c_NDyevN(NpsQ4T%oB)l#aYg#94L5(moM_RU^Rk>{yLS0m!3Pc+mDO zA)%L`a=e-9GX1*!Wx1B%H#ps)y+9a}(p)gE{`;J5Gtm)U`R53JnD_dGISznB)Z`#=H2t^l3s40-6#7F2{tw~LLy~X&#EdU0T7&RhjEx|#>fOV0vA&=fxQh$ zi%V)=F|oO2uB}@@DJ?5zh2^!sj!o2j1b}KQ)(vxPhH$70D^qO!hyPn&!i2$BrU*EW;MarBdl`ERVhMukpuE@JYd zhi643EdwmFyrqwMmwHlP))8QfP*7YhP(KL zxwU+HTrX`GChRtpMc7t(a-;KRgA@cjU6&OuYGT6qY|xJ*UvfKoNH5e8^gvm!h*?pi zb=CaX1@+;#&Af`A9E-hnTHAB71f6>|?dWG!-W~(BGoQ@ANBTG-=fHA$!E{(F2Yo|{ zAn82nV0L~~1At(o37wU#9|q~gpK;VN9!juH-9kQAzNxejbGEwYvPF3a?Tk_JSvF|JYCJ(M7Y`|k}%27@5bm81LuZ}$=Ni=RR2RP%1w*}@05 znoU{;f%1;*z;I7>NNdmp(zn8l{@|tTLN82s30CpWhDROa?I7zzs&7K?YSjp<$e zt=i*vwfSAOfcN3|4g;nKlPMTxbngsT8K`i)Z|_s`QGc_0625}*TQN*0_|Z0rN6+-> z2RC9^I!DE?L=%Ld`Pn1$c(U7#M-@J3QHmCedyxVhEG&QFbwY}}sDHuO>-+QSC*%P0 z-zQ0CcYC%BMvOlF@!+B1osVKK>GfM2z_+OJ_xdnyb&tabt@;0@Ck<*Y7D#8bZSkMf zO!vu{eprxe;;-#$-oWfaEO*L*Cba2Ejzl-tqAc0=s-XvA24Qzlg!Sa?J~_1ACe3Zd zjDhro<2rG*_n~SebmkI}QX{kmpKa&}J>J72B})LJi;zR3cOnz&QG{upnNHuH?Jjq6 zndq0$U=b4iW|&&y_{HsPGry;Dzo+_TA9>kNWM9q!~|T_U|HH4K~{x{y(?*I^J*m)hJ(Z zv}uo{5f3t~s#?&ab;Z^m0SuV51ixtt<$!MVkee`snb9VaL`iSrt&xAU)4Q#ZpEEkG z5_krvzE}Trv}DQeWlH=(hu7Tx1(m-m?s=4AQt?4ccl`sgt17E@zW#n}7IU{S4^*Uo zC)$X2_)T4xoI9U z-kJ7UJ|LF_INe(R{Oi<+LYi|T`?K*vJ>(ZYvILwn^_})DKI3@I+obwl3Df>Ws_qm+ ze=yM^Ocp48wu$d33%b8%nc)~o%(Bcg67F6Fm#H|yehn{Mf4Qj?bt>l=fD$Bsr4$vc zkAmkkHT0V0VCFreiQjdXqDyedD%|_}FP9GAYb04;NA-VNQEN&s@3{vRPe(*eb|9yA z?@=_k?8_1iFtOlWjrB~-&~*efA6VIyC6P?5^c;lE%wR||x*@DrGK0|%i!C13LRLeS z=(NW1r2}@yt|DfyGdd0VCT)!Bg7lxhS4f>0z3@Uz5CVM zW)9<7D!RPAO76E(e-R0P$aNT}(4$M4mNV%&h$m_hnw?!zNqY1Tp_lQ=J@a>~+I zUp;0>cdTajRWle0bM`7+gX+?AeYWUDj92qpNSEY0lb30v%*Y(-K&OnolBY z_&Rc!*Fj-8rtd*G&CK1#b2$`mhFR-eJrF5aLDRadOpso?sa zV!`z7-}wZJV@~+I*4G)vX6k~0qPSaA!zMt>U3B(!vi3ut4T#zFl@Y!?pL^HdVgmlC z$aFk{{TDbQOZ8_hsG`)3=>Bb;j?lACM(hg{lbXv*EO!Qo?DR76)bLlJd7FV`cggxu zH3Gx>PXM=V+JqHZ5~@9|HA)U&c0w_FFqR!ci6ch%o`zvl?!S;0`jc%UPDR@}>q_P<60B+m>J%TFe@8$XkCTKh)g01Pa7tvwu<1WoxD*z%A|E0WW+|c8aLhv}zKR zSiP-p{ty|694xP_M(&Y(ObZ3`)p{-1)RH4d7bXXm$kpMfVxv`PU#C_%3%uW28^>POo3LG%G>*B@mZ{I%-h+~=e! zVaJFJ0PBebLXD?1U;+FV4aX-+S8ppaw-xFT0*SzZ=agi1h5O|HsyeE+@{x(Mdd4^* zEhZz|eQPSkJ$jH-1!^S0<+R$0e0g6C79E>?N{9Fgi27(P}i!^0OL*x3uiS8KF#YZp8Yqxcx2McJw4#_~I4Z|G=Hx z+=R7<$9TYc#XQ56rs6r$a4rXa!;12DZQ$leMt82j=b-)tFZ*?RbicCjon-l6kUf&* zPZYou%@$*!&+3P(fVnTF@RGhl=NfN54cz^kpsnC;4S*7(57K#1|Ly|$L&e>)nJoho zYqlqaq40BOZ2PIZh`CFgi~0ghma6(9dCjc7jlzqXMbsqMs;Q6nC}CyM_xa3n}+Z1)y-d zev*zNN`14;LwWnl&7ExrOV)qd1cw)F&zDT!1yb78i0M~}+Fl>aG3EE*f75=V{X>8o zf7pb-Ll>a)9}ZVVup;ENTq81q7?=Xd6IwqlWGa9iePnVhHc))pubVQ*K+(|mjN$We zE7)a+N~i9dYX-GFR(TC6DBi01>()rC4Ww(c(_?govEuXazM33+!e1mq%hVQ}3kRjh zxJxrPaRz2P1{uaC4<@>Bdh*dJ4l=tVI0esGRfTy+U+!42Zh+w{8;W(6@YTm{N-wpx zt`rpD>E8s2Z$M7bhWt~(fp0Z-EZPi4AhHu9*!v*9Xt$d%-OMVxv z;hT&}L`7#7E9bPu5i;7xB43Z#RK&=P=>@gCoYb9JJsy_6U1D_Ge7z=kbwyC__}X=D zXY7OHDT*%?HCe4go+zGh|ot6X^`z+khjw<^vq8Nv_OD#;Pdf?PSyZ|wQnL^{i7GCdyZ!yHs>vcK}8 zYJI*v67X-)GHf;VZ)iGImhwqKKuC7j*~ku1+&s==uLQS>My6|$8azNqYw994!~LU+ z@cFKajakHmuy5!MV{(o{eG_*Mv?dOzT z0L*@DR(q;q>)9~%SZ_RY+_MhI+ZDM7JE*aVVYiJt;E-0_QpeS?)OHcx)y>pB<;D>O zJTA>ut%-`7BIKCAZmiFAq;;h0Zx6sVPZox=T>RC#jJw*;&R-RPwpQ6Pt6}peS|Os* zfJ$w|UulDWb!K3U1~@6q`w>;MR15dAj}R_K0E}q5uu6yWF`$ehZ69GOcqY!{m26$> zl5g0uL;sx9oN@BKE8&Uw?bvlbwAALB>XN_=;qv7@k{*_2%J0JehDS2}0QTnE^5o%A z5|@Qt2N{U`p6@H#4Bt=m2x019aampVM~NrL$;}{VnQQFVgUg=0q@2B(+*TDqBpB!q z9nwI?JC%2vKis5^RSyiCf{R3o`CBMM{)v+25Oe~Yd@t7Ew6uO$khRn>uFx#M?cZnH zlG8i4#U7>bZ^MJ!B{v?*QxyO(@f>T1_rRt>Y=FfyyqAV9iIy&A)Ubk3Y_at~k-TwA zK1yse<7!T9lk#HYb|?-|)BEgjjIr4$%`+WM#7p+}-YqMR`QO_2v2Iv~Vk@f3@r~(n zu6ghxS708qKPk*cK~4Cel+ZS?HO(w>zNoJ`(Ny~}b!kDkQzE99#*m&%9&5Xs)vX2Y zCVL6I=Pu5p1naT%1bP%e43CS)pg>DYO$#)`H|cH;Dni5NKTq7FO2Te^M0W-}yh`u< z34!JQ`Js-ss%~+14L_}ig<4OIyx1YrrPSG#H|Q_5ik&N}Y_Ip(>tm{&leux4!Qqe4)YH55MusW%cFCTIQ+Sl*>s!U2pd`5w)ZTe9Yx zp5}}?ZC;t4z5*+7b&=!ZW3M3{GDHaaM!Ca7vKzY)o_I2!(gJnc27hsw^%YYO#uPnF z=mzFz#$%}2ImZUj0FLI=3YSxj5Fi$|YbDSc*a#F}mx+dgB&*xNM;&e9UbgfgP0393&AVz@N`8*$9_6Y?-Vx2UPM zxuR!3t;hVpaOz;)Pj%cT_b89BnkbPK!`d9LnxPV3%P4O&1xMELGpu*Hk?Th#$Rz(V zHElxK2I?$jHDvVEb;a%=SuOM3naJ*Qy*;4vr3TGyf$6J5zu2aBtSFL@7z?D|l3;bz zNI>DvHqdQay0A5nQq#IgpRHSYG<=0$Al5BkNC1df&Q^?-S5%L8pHo6b5scyL#O$Rq z>&=U)U`cqHPZ)irW_gVzimlA2XT%Vm*E4=+K(rNDekpn7rs}E)SgeGcTZN(=qu)cs zE_+RgF`BN;<$TfN>srQF@5t`O)E!;}N!(RS637p*Hyn8)6!bRHf47fx3v^RDB* z4-@8iW>JLLSDeO*wP zoB)@MwH~qeaLM~|@-FGn8e3GwC#Wj$k+GNt;mYjlO;)hFE0V$h9xmC?Z^j%8M#$Y8 z=a9UF+=a3H5se0X$~!?C;Js>6rxQ>$JgepRi|adKhT|UHL7y%ke#JbDc}sZZ>Sf&< zU-awN>k6{UB6`yIYh>6k>CCL&5G;EHSYdRY!N1Hj!S8u}V zpW-#mh1JUgoR*sR1S1K))I#uDBLeBW#oJRKMU|S)h5#8}zzE+HDn6dzjH#u!Dn$W> zE?jA+q!&Eg0zHO&mA$Ejq1^bNH6^p>m%kXSc}=!+ECJq+&ynRg4gNNl8`WIyT$f!q z1AVF>yj*w&Uwz#zO@gJnwjQ1cPQ`7MiJ*9ryl(?4Ku%lLm8Ez6_Vg^LBKLoSLK@Dm zTn$Kh1qz{>AmrrRkNP!`*xbxx1CjmXh&EL9c^!??mAS(`^YPK$g%9|1`4UuXeYaA3 z%y-;)YlziwmIjn{lHLofKKaE5JJRpH`r1cxF|66`J!}R3@TONaBsL}OHU_jR*%kVB zYJW}6HboCSX9Zm3!)KkFf3NsxJ0KsX|56d*ZQ$XV?u4Wm<%BTRdnG3_;WT|!qS?9P z*6SEKlUOB_iNcH5!0OR)jKoDpxu(h9!FF`y|DqypLC8W%+b z9;hy7AK4yBsMBF73Du$Keif`^9e5cipb3g9cIJauvV!NV;b1tsvOSf%a@afRE_+Q^ z;P$gVXavfV>X%8+)S|fCKbana7X-tP&X(5dd zVVqO=bZ|Cx{qb+EzwMj|jm1WNN?(9)N|6NoG%`G>8u1mU;m&kn$f;CppuX`LTu zFnhFVT4sT4guL1cFZb;4@n&JGFnK|MgH`(ELhBo~f_zI1d={G4rF2I-4`k)aT5fi4 zX6gErH%*AZ-0<%r1-j9zRcTsr9-+AP(|zAX&NQhX*ZvF$2+h=0W_H1X+jFe0GNY2#o8;~ZB zlCDeLLs&ZCZ+g6zogRmTn+7ZgeYPxPd8wU{qT!FfJWl;W0dwH0$^mjxiayW0?@q9K zTDd-i%6ulVy&l*;Ml~Nhn#rez3quEYZjRVmXWR)>MK`i|ppAZ1JvafFE7e8TE_*Oo zWj>}yF-yr2rM>ro1h-66`0V-+!_HKN1INR~Rw%h>9v%2?@V|%dw`n*3m;Sf5Z`-a9 zAAgRSt94$H2*x}v+&vOVMAhh69-s)n+Cj=>o!U-BU9n~L5Ef4LykS^KVUyRzL7^$y zW^S;DfDsd#K)){DHFO$_CP=P`6JtusKJkCBs-XUvhv>HRs!7&*%U{W+IUdXVm+wNK z0_m)i-y!3+k0jzk{{atE1J8@vh7WbJI>Lfr#_|eDomS9b=8|)D-fS{_*%9NlvB{5F z8<1Vj)WX4rxf2T06X~wH4s-`FNoIbg_EpX?x1DZBBbVV7>*IK$R-lhS*J=97%hMQ~ z7oU<|^5DYdCXbzc5Y`{SBqT(9OuLdV4;3_)0Dnm6E4^#raDX{1!oGsJ?Cm0 z=^|d%mC*)`B24unehKhQ&VN6aw-J^Km%G9#FOOs&saI*{Mmxg=cIBmhh2G3z4qqE_ z6;qrKx~VSbvS-X=HX%|Ryn02o9?RVwK?H`Y=5y=jbA1g6k-$xY)PF#MgIs4+9L|tW zIjowQQHU%x)F!TI-r-%ianK=4$aKVI1MYs(k4l#PA;O6Ev33i7ja@G`Z-NQ7+r{>h zD&dF664jGLs5$~h2;NFXLr=mQ1lFoDQBb&vuY|%KN^bb^xs6QcrG_xqS~Vk30(KLddgrh&sbo`A_k>@LztxCT81V)xz+oTa3;jOWj0#J!q}Z4aEJub|C!^1%+d_89KP zEtQ9@wDL2Q?l1771AOcH|8!B~hOcZ3LSkZ*73We#Yv%qo=qwpEoDd3|lC347)iy^D zy;Zxkr!cpQYIv{G>yn;^1Amay`RCgTch9d9FWq*iuH_|D`{Q`EK|Fv^V!uJ zrb1-r*9mq%U^k=ke^5jF#pbzsRFU%KPrU~BpSDgo<0eJZ%wOh=JvDM)^FoD(&Mo#V z9WM~d&mv20A;nkmx?ZBcvtTaNr08{}(8EZV*xH$y?^#Ljn6JsS&bW7&DS9;vHbEYd zhejwfx#S6h{&K{QYy$cn&QbNW^i+FF>F%(Yxu9y*{>-qtJs5gB^O811E0lOS8|uhD zrJEO&(#i>EA>P9LO1k5#A5JvJ1NDy^?Rkie%R4qA_+ z!BP7ZD}a1X-Etx(EqG*MuEhI9#W7{Q2sPEdrVpvBefwG%inFaVeC99pRoGnDK9%R% zU_?|LirvB?uY*m$VqOUxk;fdLaFt+tNeM8?V#LRA5h)n2O!+Ob+SUr=da75cQS=~E zzPZE?@By)G{c~JECEYcCqY$bR3k=a2e(d25hu{5=!Ml=&cF}J-c+DM*)9`Mu@5{3i zeMbiNwH&pj*$K=W%Ytw7Xw56N#zNX+Ga(l+(L=>z|^Ux&2rP_$HsSC}=R3hhuMJ|}m! zOJLav@3kk39e5b}K|TeTuxH1v=HMmhpNp8Zbw2ZLrj{;ZK4=(Uv%;Ba8c8D=Vg(b@ z9GWh+D{t`v3`Vg^^QN&dVrk(MiR?g;rp0R)s%-Uxhx+l`{-OMH037B7m%j(rd!D>D zl6V9|CRaw+)w`)b9*b;43%W6xRY9cI-YnnC>&DQsZEDT90y)@*UgGH>aS05n&BW` z+}kKkq22dU{`-IZVfm}k1R1C)K9anT8RNOii>=3K&`PZUw5bFKNz2fS| z^zL=5${T9{v(`0L(P$EOukf~J6^xaMlr7~YL$L+b&Ym_-sder`X;bL|xitG<%x%sW zWBgNx6I<}})rZyAJ-~tC`_j|R!*Lvny%Dr9)bbQ;4TdKD00yWloog;>{)O3dOY>N0 zW=LQGO7cbXM@siXcUU1f=Xie{%O;kbd7_Gg{?N*%-2-@B5K*t}P1~1~e-xS@a}mjwBq|kY5f?x@CFOp!)Zy z76?Zn(?>48&I|9|be5Cd2{_BVu-hP_PFn4=1L}znSV$%Jjj8TAF#LIKo|Aol;N@S#^MpTPc;_RU%D~Wmhy)M4JKG z&xX|_WkU{7QRsy7Gk)-qmCzSIa-c zl(P+Hi!5F^ei-Jie!x6W(D{&5e zeNWC0r}&1f0pwK0NepJ+6|vF4#QSj&6Y^rbTJi6gd-^<`2f6QCv_i4z){sF`dGwpw zEe0}ZcSKfH44`*RS%!cAs>Rgv?EhfB4if;;8fc68{?5e6v-k&7xavmDc{L7$J5WB<<=I(kk0hGZ#d-8<}HD zcWM`NoURzvWzC&3l#h&T44GLb*XzsAH{OJTA;J5N)=_qSU1x#gjXG5bd-Esw0Mqy@ zM0Fw0>ndv$Y%g+C$C3XogFg-WC|o9<;VN zQHKCHQqLwI{e+7qI)KbzPr|R3iJgkK>=*uZzBpu?;Hx}gQ8mco;yJ~Eg%sk+^={Zq zVuW%N{N-_8RB#r2h$-SOLx@F{Txx0uJTnLDRqyPPI}+$B(et^P0E`{zu6ZnMQ0vCE^kD59p_{6=59? znBEOW^w!aX9TFV;H{(v(Pt33HI4+Bwt$JvARE7^`DcnosS*e*b5D4o*PXc>(9{3nX z&cde2a4LBB8tBn_b%|2jP56<4Ja3wcA&gU-a)OX;U>CL(Bsfap+gn46e4Nvq-H?VZ zn{vsp0c|m90!bPZLzv)@UXDsR3;^|TEkTKaS1LlcUyC5fpLbQuwb#Y;w6#%^WQ>4~ zDKU>_LL8yAPSq|ejFn6+V`(Mkoicl7Pg5{o6UoZmpfC=3w7wH@gX_}Evj?NhuoWpX58 zhhU1tk0+P(t$0e4lPSKkX{{ig$17y6Hy}0vhiHWkOfrQ)nZ`Wyn+EWruwS)-^`bNf zjHCu(d+Xs@}$Bv)saTb z?C4ORaSj`T=IYi=eN!sR%%u|c#}1A+I()jE>+Y|xeyUI&BkB+_4zn-0hE;a}B`)xd z&y|UIM(-?bT9A+26Pr9?{uJq6>E))3=U=KW8oqXDf(jfP)oRN7!wKu3GmsZn>yLO* z4j>)60}0Grl@-s?l@*^~J4kwx6vD7Sc(r-_{8FG?cCXzQNV=C3 zmMpRI7lhh%x8|7q&fbp2?R#?AfQk2YV z60OM|dcOs9M&)(kM0>px~v-eeu_uEjj(OUWC+A!=ybe&++>a&elYKlk7 z6&8w3gUJ#RyG*>80{r`@G;t^c6>CdqMVVMWTZ;Z=IOqHv5g$snGGtvpNW(5*tBB+Zj{H5S>|6KYcDmF&mIuRE-jvVs>f=rWqGQ$1EA`! zrm+yZYr3E$yu>2vB~UEgrR1zcU>K^Y?vmdAUG}r%(3ebW-}8Ga->eoywt^4xFSG11l>P@*Dj2E$iM_DV*bb8vFaD z^?3SHvyq(Zob-Q!(X)xaAerWKT{@)xQCIldb#psLETx3HUH+gqOBY1)#4wwgYQsKp5FU2SA4`Z+!g>f2E7 z4|SuCKnRo8yj4QA42{C&s5AzGEph65DLH2lHC%6ta2yac0NKvsezE51E7k$^gGS)h zb&)!>G;3ERVR{Iky(?%la}~?5M~CAlE+rJ3@o0fIW;Q zYuLq#OfdIzs# zV8Jx#WAc77&yCxrrukO5dOi6yuY2JrIPRbGGk^yELo0lfJrq&H`&xH!F!W#Cyo$YX zL|IEYLKV2HfoxbPQ8$RzdW_@3BRZN6HQWD*Y~2OzEkvd(ck{- zYT(uIum5oS&IbeclfJop^wadf_xBBd{{Fsz!02Mm*9(unNQ^Ij`CIg{J72~>yZxU> zH~et?-8XOi`1=pOS6sSu_4e17d~px>B$HM%A#_+h6Qc2KD!12>UM)5XA)}aYWcd7~ zwVT)!F5&!2`+mV_>UMMh7c>O|Di6h%kQ1o>lC&n^UI(|j+ux7$OwH$+DrT$OM^0-3 z0%W%RG~ftfvy)YE9W2@I7U$cw&Zk=R4#)ZJbq0J^jQwHA1fsPgK<=GASA53Ox68S| zAzhKG*u6;+vFC>1<++~Lf|SCvletqLI#zCIbJO(0$?|!84S8NsC2!iIyOBejV6JxS zmr^_}!IIJIm$)1G?Qxv3-n^d@-tmP?5ZF1D!I5O{qA*C6<5~5ohtge#eT^ukqNYKK z`A7K2Ptb-pF4lK}pbiu>w8)aakZbEk&9P3sTBMz+hP)!&S=k}4z2|FR5sf5UIBKvC zHEa~FEnDuvkcp8Y%RPq5CVfI;9Bk%|IzN?t7b4_XZB25;1zx~Th{i#es{^PaDEUJK(@UVCarpfIT3kv=Z{SwYPzbS z&N#hbu}rW<=1)er z!BF@qiZmGl-0EqFS)Cy(_rB86ONDQdWFLNZw|ht#^Ynr#C=p4fD?kwsknVA8$F<6? zlNW2g!{rV)n$QiR>v6nRT)ee7ilgt6f@9y@op$;dd&T1SY_~L zuF|(h=fYZ*{!=l@DHV4gQZ?t`fHGt}r5VR^_xjXoofu~@#p9SUQ7=;zs|i7x7WfOd zjecj4ZAFFV&!19`K-GxlB*%rAzJ_KKJ4?k}5V|fKm%q$?02SQUn7tXJF(V%{TqT1~D4)ArV-!RSIGBdzh&DMg ztyN}?Vq%6izz7!{&dwWmKFAAK;I@IK__=$8x;Y3D_`Gd@o!uTH-GRH5j(c(R9dlFv zcD0o4n;Im{D|!?yx*RoJ0n^q%=U&>VX?j2kxue5+*P-`felMTf@NmGsgFso2>$?SLasN=F5=Nw^5%rIN$>K{j z11;=sqfW0IPP0Nx^XaF}xvd?>Jf=dIHLVqX83L{+SIsVUmBEa=U(+sTJ=s*J*qngX zu~7xA^G#f|wRvG`Hkl#lUi24Yo6n!09}w{X8`p3DbgN-V-LUww3=>F2h!?ASG5Z{2T`@aSGL?Flm?{9AOiSq;-t+y?< zJwmyDu;Rz5-uS%G_6eF@rVx$1TuXRGVp?&QXgeClZ*Y_55{+M|{)>g~m>J?hO5cj9 z(U_Uph`dsG#Vv}YUZ5Dj;` z6Yny-%VyLTFtlauATJ5ILxafr-rq^WR(92D4|0B@Ebu^8{aClb5fMu6g|B^{3)Kn5 z?qs*A35bjqGt!|z-WVPuRZWN3KqdSp1Bt%A8xz(*Hqk>JitO%EqbcwbVWp>Ra=N&c zH#KYPzY}1S_^6faumgPX@8jxm<3z_SwdoR@#}VBsETWh; zxqzRkWU5@Sb=6ot0@g#_J(O%w{A*oLZ#Pbv;1IppnKo6^*wyGB^P0YaMfG}PJ`2TK zT~Z`0F%D`5WZUk$bQ-hZ!I|Dj%y84?r3+{x@Y|B{{fqi46trb%SBYt=b6 zwt22Qd+>^npAeR!oz(8k+Rv5dhO|MnS|x4i%Seq+GX`^?JIEPrImu;ZA9ar@ggI60 zN)ezWFY94Osdswh*8sV354AloBJRqfp@Gb62zpC|Q&gU4aqpNB6*YnF~ zFUUR)<7D?f2ln}HI34;Ni>BW%BE%OgV6(MRb!By_SZ3KEK&rrEZ{wGDh3*P_l*oHk z^>}{=m_Gu-BvOD!DVzY@$JGmabzkJJ!N=I6N8dOel>mul9 zp1g)Fe;gDlzBs?*&p`uB6ZZF8Uj9AgckP?Ak1$480-W!LB@BjDN}F>QmfMx?9*Vm> z3)4mq>efqsCyClG=hJik8>gK%Bum_KT$ShT&I82Km%=J|FH#n)I1|9{1r3ep^W`?s zD9sxBRNFx=)?F+pgm`fHCg>X@*}8NbGS$KB_)2|y=~zWU&Qe)1NVCp;)5s?Fw0PsD zOi(8ADsH@ImPE`Yp?cxLf*?{mX$j{SI;{y^2oY(*BQ=sHg( zl7tRugGJufv;bAT*l`yq%-+ur=eF$L{XirUJ$uSRW$6S}rAuqYu4TjBV%x#ksKU?o z;Vya(a?-6MOA&T4>upn($1+50D@MSoBL!c2Z+XrCp{L`pdY{29PYuiBE~*-v(t%wI z<9*{q9r~NU$xgIqb-`mLb+0nbkv9lwOgI0B>>0&!XBcIh;?10y*3biMI~N_?V`0nC zT~rNLn?1dfyI;UJ%}t$1K(tVYjL7zskOm$iwjCBq%(Y*H(6LdBGtY;kH|bWgUk#@v zksm9H0eoQP$>7K3sYdcc{p7i^Ebr~dGT?UP0*x{|C4qL8b2$51;znk{QC~4)1d-r6 z#d2g7j}T>~Z&ESt-S}MaH^Ix%^BL6acBZ&o!<{C443}Vzy?;c@=tu`*Z0r_Jspik_ zU3H)$WA%xwfs?|W8o3F4^ZkURF<52G8f?U>gJ-R?Z#-|& z@AWQCexE+Ex-1?BX7N1}K%P(DepW*5%u!Er=D7+VJY&v3o>;{A31&x zng@H>J@I+3wW3-ZnZACilVnfrS{)Q1kxbkDvNbyQy0*dhL`yxT9suJu9|r)t~Rbo1PN$p;f4+vw ze|TEs(~b7+N*_UbT*nCsDCXNsjDfUSF?~?&c4XNEt?e~)<(8IwKtgdbVrHv6x=KsD zDBj8$;eKcS1O1HRKcMqWkvm&dS6X2rOpLKz=KHrAFS72KVQvnwkQoOBIQF`!6~`D_ zS)}6mBG2H)5>)*LZYQ5A+1b2-6+UDRG zEOcn+ZL9kkg}9p@$I*Qu3SijR!;lXXGFX$&mE|G-kl6t-i{0mity_HyF?r7 zeFS(YMuQRy4CG2|L!DWqLsI-+ud<(YC+HWIcIJcK1d8>i1Rvsv7A-6$Av`^5Xekdk z7#(L)il~d>6VrqOQDZH#TUw`7^01Zogzp0|N4xhp|4!1n7uF&HZS5m?HyM(DSOe^h zO81L0TcwHAraO8}C@q~v)C+BFLdl@`HMLDS`I=>`tb8fYCYwUQ7A>5_!`7wCgdk|1 zhs6;0U`%f+ELxiUhzudO{CWZ4Sqih_}7hd$gQgLMsnN-$!X;5t_v`@lY^@{P7A z>&84}f;F3i@rq>3^e8=w?>1L0hHdXKxdwK5htYq%;(Ag!^hvhZZc=@Rf?B)?1|Hlx%as0M?)&Lk2UExHz zYJt>7a45m%_rk2TFAKFWMtEtUfa8-e&(`;f#7#6tPbkd~I4!)1&>l2}n0p3cuz>8# z9BYbZJ6NYXx;ENc`r^Wff=)>_>>Y9Q}*tQ=J zH3mJmCGA5_#$xak4pz&y zI~z^zyDm9(WG&eNXO1{15s=mW(lI6aeOP2g?zn(gpI*WmMm>KPK+-Q3wJgt1a$TR) zIG4)AQqE~6_(Y^jG-GQ$X$oOeK_n5~tDT|SN`~@NlJP+?73-pZ>Py&OgseG@G%Ss< zHjfN{MaTP!Z6b2}$ovz^Q6{d{?wFD5gp6RXm+^(x;jZD%j9DRYdg)xXmSj)pqAwsu z=53yj66S))eciho$l8&=3xH)2ILoJE`b42l3y*`8nkEv%BE5Id&5F=(gn94rVnKnd zwHxx9|ENWQoROxwKtido=xy#>u}9i2$?lj%#N@EEoG|=hd}u>O_|H{2A#}%mT#BQ0 zki^+>SQ(2wsf&5mgPT1{It$njHvm@(^L;JG*2l*51bdTx(#Gr53@~+g?0+Lr>)@NZ zR&gn>Im`DH!FgMX{C{awE`I?6N6Jkprf-HLj^!Qj?(s!35cutOLhrXg+|{V8Ge)F^ zdLt{2s_=SmbJH#QC$Y7gR9Xsy@L+CX_=XU{t1)t12FYcvQz@wfdaGcMZ>YQf%?wdX z%~bbDH;%g86(P9Zi8}Ls+CbLlq5epCydb>`zJqXse1_+I8;3@RXZ*#o$-+jS9a6aF6*Qs)y-r)XI z>b7N+NC?x&g%A{KmAB|`(SLI@U-SCzR9Lh6qZMyq)X#rzc4{6p2qbw@-6Xx|@Z-}Ium;_+W*dz9Y}#+qqU>Iy!@LS(B+M-}PR=wU-9 z!KcpRH>6?6#Yil4`9LoU;Jo3hldn;k69hC)&6D#Rws&i-Q$ZGU4EK(+Z1tS9IGPtp zp_Y-KlL|GH*3tVHvY-ScTC>Y}zaIHUsQ0G=KB%^yojyc!6qYfGWIm943& z#9GbXpE>ca%c&8swc&H(2zhv;ym?M<$~s?jLj1HKs~|$M`f|R11?iv1MV(wNZM_P# zSDAIVBIYeHNcDe=+h)s`IHk|q?s;1fRYm6g6*}8&Ke2jK3VO!;B+%8aZD3BoC9A-| z7TIOV)A-ntJ#==9u6&Q_Tk7Ww$Njcnxe_N|1tEZlxPD!-h8Mmr{p_(3JRC@_{MwBob3ASK-^Db_RZB%k3q?3)z<88vm0M7b0^ z7unGIVt^zQT~?#@gfn$)1XP@X@dC&izGa}fNkRQBz)lMzJ1&jO9Qq>jK$v?>K+_JIy0#P z+WvK@JWE+;Zgc1+vFRGhga@nc*ehIC@zw7{5roWG*~uqFOwr-Q2hR|Cak!x>T-&^sGox*v37be5Pr#3dzt; z@}g7a3f;>TRunbn2=9^kb)uaXzXU{a>lN82khVFsuG6r5aD&?6%Mk_-DxCLVN^84s3As`KZrqw=uP2c|fnXMVs zF8V5r^i$rHv~PQzt6}02Mtunuj4fA}+pGuI>f63+^sI`9aj?96HCNHoEv9tr79=~!_Bf<` z(3e5H%V~q&W|;BdnV^>=$hg?T>u8;q4aj3IOCY~Ey)D^QXX1-$)JEN>JOm)8qas?P z;!^X>$xf-d-xo_Or~GG(Z?v};vj$@ZV?6rWXu`?#xO>Y{(a@DY z0$1p&-nZg#+4zsypuMg-W|B%ux6+D=iFsJh6>85Z1yI{$4`{z>vIhZ{} zwi09{i#D-{R#cMP_(yFT9jaPEjRB{MX{{O4B&LkL2Eyju7&;~~)?3{^$5x0-pRLH> zQNANX4WHa=&Ny6&?h_EBojlv8`hGdve|BoN(X{M|kQ)h_T1Uu$eY#}z)Jyuh4scm- zci=Xkr#@59)nZ=%pDGR}>co6qeX_Q&u>?U;v1XaeCpEmZbn`4ZKWU7p!#i9-q4_j4N=@rQbbB zi42;0zh(QE+2OtPf}pX+OlYDuSx)U8z5V>kV6W^NC`Wappv}JAZSmQ|xu!QN4zQ0` z-?9(>&3%v{@U*NBBk?l5%}(mogX;n*}rW%G4Jd8H&ozlBe^CP>JXo{46Lr3%A-Z;FzWXA&zZ#Qpk z$}ic_Q+eg~s0up6J!@|6Fw&CeCzL03&h6FH8L-7s&A%x@xU<#Hqa5gCu6D@mK0F&A zo<^LF4yuVG@lsaSjlHW;%eYZ0$vN4=_d^vU?|HxfWXlS0hjj0VQ`Yr(vpBN8M^)gS zMGFrO0JII|;6Xfjz9^~SPVc}_X(IQxI84Jxv-jQ~b=pF!Bm=7J=hV#Q?nv>0|KXWn zotJ9c49(r-`&81=?i!{hc3Yj3ej&{J^>}I|eO`hq$jeMrL~*-4BTvhAmuheFpPoQ5 zPaBP?OtYmCS2I&{Lpuu&@RiI0E&bAP>aOSRIk(T&u@$^+cGRHtdP$AhGnquLlZ;~3 z0*bAC?(mTj!NHFYz*wNHJ9B0HG?d0kKH^RsC>?x$U8Wy>m;7jObZz}bcJcCLo};bD zP(8UY8-=PK zup*Zvw2ox%j@l)fJuI4?IzT9%o-%X&p8lu;cGpc(6X95FZiIZ3V=7o&w4}$fTvNf! z$>6(H%lvhFMShP+Yv0ae1z~#zG;WXI$;Pnbtn+q^d1gWhv6el|mE;K_MBTWZ`D~+} zTG>tW_0=qSD*XStfZ4j;M=o2fZ7xL1(i|%5-3%-wIPrLhyQ|Oa#$VBhb!@jP8%;Lg zXeSeu)aw2d-Wf^K$45HDT)t_RE7q>Zti};Ye`fQ{6Dxv$oOg1&IFx^R_!wVT4z7=T zJT9nrJe~*$H!T=}M~tcRYdm5WPqX6wX?8FxFo9t*EBJ}UoeQ~)Vw}0D7){Qe(RE|x z#l0~9(T{pV_{=){9ObT6NGUGQPpk#vZkTEAS)9Up1j|8+|2ggC&blFYqt1PsW%tkr z3sekZLRu?vWT1FKNdHFc=dJN47kU2q0F8*&o`-aej0){LfaT}}hfikUQ?(OI7tJB* zexU}rs;lWR7+?spHQfdqa3F7fbw_8XD@c>(rM}!wYV2gW>B8t> zCMzFqctX35HD&td++YZ@N4&Z=Tk4HgwdptokNWW_c^G=>Y!iRPKIj^{GY&IZf-Zfq z`0VW)rl2FfA98B~A_xyYalGRVo|9~!#%$P)SvdY#Yy0o|;7p*r4sv~3dItxy89H>*>%>y69%V;Z?^2NS+XCsZ{ZYsVLgH^DBvlf^?kt?WBmuxPYEm|f& z;uCg=$%w`f&L8%gng-p{btowCu9~v4@)OF4$LsNW2X3sEDK@q*tP+m}uf<*nysM;t zc%D%`V2qg%l=xB}-#~fw6%1wwjN#w)JMH-|_hN1NA>}dw3J=^*bJ|j9*NZ4BG5>@b z1VkW=Rt^zD=?m2Fn~RXA@NI%^7}$NKmK`w#w+xayV(1w|%Y!rh*bNm-KQYa+EU7`>gbz#twi1~(V*U2< z&==dfxFM7p0(1PCJrL`u*L+qWu>Er2Gwj->bwUc_c?`4-HwUjkW-P;!O@S%S(32*{#WO^7mqD(|6(TAO>TzYo9cZ8lg84) z0h@f-@7DNzWfbpI5`Lq+DCC1*JN1`{pE_%Gb(69 zCG?@!qgsF}aR6yWJUQuu1Kpi`iS5R%|sprAcNa!yWZ*YJSe z{@dXn4TaXHrQ_6^ibC5Sgtv|#lLCJt?rp5~%DQ@>uQ%ZnbT>;9bdA)^q>l%V+wSbr z43oEJcZn(aA)19_uLElyC|oHyxZ)S;p3FT7)P_N;at7D z2Y0K0UtEBbf4D8J;8uGQV~?((c`iMaw8)<0%IoaUpg#=~?Ig?7#{Ttwb-_c#t?bm; z%Ma#y8iXwv7xm~~H_vp;sM=)nCL;NEqVM0);@6<{k9QiFI`$r_CeB?SL|VDj0WvQ6 z_v;+wYu|Ez9e~||R}RK>ZXmoWw4>%F3)4#C;{(xAdcE`%LV#0Qxnvf0Hl9@k_c?MK z$177{6Vxg49LQD&9xDh3teBEX>HC<7SHimdrN)vL9uiC`l9(G2dj93Ff+e@jH~FUf zgL>rZ|7$Z)H4IUHM1zQXfx(3|6c<~JxY~Fznp`vSnIQQYV>oQ;qOAFLK~oQDbmoc@8oS?6P$j?L=4*89V5F#gZ6uM`|u-(`WWN?{Sp4e5WYI<53s?3lDOD0B#waDXK{@A9V z91106eTC#~bw~(5W;LbC8^@PS>=fc{(KY7R$Hxn@kDf6QzhobYo7YOf3Q|Ec{yw^% zOMjtmrIku-cDm`w!pCJ@&>v?&AL@4JJZSv?IgNMd_bTYk; zM1c0Jf@NkV`2RB)dFrRdYPyTM{a!zY;bMauZJn>y5;0C!V}w}K7}QCV-*036Wmf;& zzOp;Lf@Fc+%}wO0!^}?aX#?DwzD?9UWcrzlO4GjG6DEB+4)XRzAa7q=7y6uCV*7jm z^644ncN{6>-E2mJ?4mY98_MvjOd|~(`)sZJ7l`(xX3T^uPIaF&qa(HF)1Ni}|9lu` zRu5FeoC0%yt1LQ7)iys2tzz2&Ou6y^7Rv)?@cPUg7ta1Wa*zXJUnih!k)ct~1a-EN zm%Z`o?GAw{qKiU{kan*YA}2Q`!q|s$3?LpP2f-%{<$)r;6;icpXZzp0N!{9~K+M~E zlrs&NWuli^ak_k|5;2+AVET>W3d4T`Oqj!}zu#3d8n}h0k^YN!K{BC8Yr1r|yIXhH zM+AZpCd-`0XX?0~i`?CAjV15Gxk@N4e$zQjY-HsCT-Q?TyQJGH7U0&Rl=F<)MsBm<31oP=!tA!20@D;R|xtCYQBsZ@cfT&u5^29|eQ}L%k!isQPetS=8yqKj|Bl;m&vpg#a(L4d%|rj`2>Rrb;a@^HBwv$& z{($^mIhb7x&EeW}k-*?Uu@0b=*Z^oiBfqdPv%6|6{KdrA$O6|N@NRW{slvPxS4V+^ zgrZT|SHHWmzI5`H%r)=#s+yJDWFYEY5YM+Vm!~oofv+Rc{A}#20KL?CN^nmd+)A@r z=4QuN&t4gScjgh8^QVW3&&eRy$F;b-_@WHlmI}{HAYzzU%~c1sXYdoaiqHawUVc1p zuqBWk`)iQ)$-(mnP;v{g{6I(qihLCPRkjc3L7$YnhLQ>crvD zOKNQ2yq%{Zh{s!jP4Gd}v96KrsoKcBr45Mov!2-MCTRmt7YVpzm6g3*j=0;?|B~Q7 zE5Cqw*}LhU!-r)`r~SP%l#j$nCd)->(KSkkP%QAB6+vf8UEU&M`hAW;cm%5C6b_pg zBH6<=t(i6LhBxZFSU2FMgMXGFmc}Cvu--}a{eic}4t4-s%9e9!N% zN1pC@7cOqdE^Tt~47W1F7gZt;}rn89`Aqx=eJt>Z}Z8lbVOp_c$-4%X(oU-M2dLGP^a*+1Bf^O8ay6q{{_CT3nD+l56vw&{$G#K&Lrr;aZ6UV2YSnJPtsqNlb* zK3_H6>U#MbybC8*m>ie2dF8);dU2JXvXV88;5gdb7%sBcwD@Vg&W=9C*O7dO zm|7jf?&P@aUx!GpvggXTg({OL)6;cw;JO;bcC?d|-1Ubu$E#d--P0#$xO{Fnns&AM zEV;_}8i5o2k|sS*@7k-G{S8ciaXlc?P^H@qGS%;5LZ326O&DmF>DVvNE57HtuQ2Pk z@T3#>J6R(g=aJmw=KTe_(t*;5E8qyNtD`-n{*+XP$UNM}gj$JM5wL#K|lUbx%-UEhG_%eZxtuM&ulN=oxm zDDU8X8Lb9{yjj;sY(=C!elrt{){C4ae6of~OLD7@m}WAWF4W;7Z|{|!dbn?e*NI8D57ljT~J9e@7K^>=)vk<2~TW4$g;QPspMSUV&GP214ZCO zhInNb+5;(N9Xf{mWwcv6+so;H6qUe8=c9~<9y2mR(5xNjPf=4S-$S;(=|CaR3_dlR z$51rS3Q;WS2PW4kTdww)9w+LMTo3X-ml#_N40d7-6YJS45ai09M%11mu!0v37U+6P zlfOX~*rH)MHif2o7ua{!?lG%_!-X3Ts)tS=W{(^TKeD_nqCDV?#u-2 zl)^eH^d_;?+3uPZJe_0z6?;G&T(E;EQmk_>PLDyCDca5=qWYxPi>{x(Z{EE;UG&`E zOpQ9XyMQaw)D6b7Xc?!BB&nr~IA07TJdBujB8ZoJ8qNKa@V?pt@$>^sU9fqj7Wxfp z>vD(pHcpoFBgC3u`<-Ak&Rm^Lo_+q0wDn9E zyw>v+mNQ@2?I|4Hi#e%HVCfFF9pAVfgca4nlU=<`Ig;j~GkqMuW$))J8Ev<-W4_Rq@Q?k3laS(YDD(`&PGsZ?yIGOK3;*rQ6j}+?Jd)C0o9djJ{Z0MA#K}(Xz7h-neE@UnMz7*b74J4 zOx$Far08f%E*c}B!KvTvBQ=ydekhy({12E)gU@(A{YSMR#FURjBglQ`7m%oMFMS6^ z($@?P+M`KOGiaRwut078@=FVc?6+ONe2h}Oxvm@dUsze_7r<5GPXWN z^(00rc&tIDJLdsaTx-$NbFFE)75>l%Z+*G+9K7j64{~OvGeMQXLQ}YL;Ie*gi!yfL4Vr57j`bqYF z3=QP8PZE4HEhZB7nd&<`YO&Jbd%_)Mgi!PXtrot&3L$;7)6W2;j?m4K(UhZFI1!Alt6LJFU8GjH@ich^>~(0@@aG%BhV?~AE78+( zs5YwM?iLU>KDuxVPK{}TUYPNr=beHhcNp4pypvf)?(#wzzc4|$Yb@F<8RM0Yd>dUD z-^CvvYT%~s;tWHf>u5N!VNOyQx)J>)YewE|EmkWu`+hz5?Cm#u?RBlaIPxfx7p+!%+LLEQAYGl~Y+B<`+W`Ndr>!JJ2-4kDOokK^@lvtL| zD0rNjDwUY#TSRa^v(oS8B3inC;&@_fzHN!zR?Y<8<6{6W^ylS^oZX#4f%`GSdl^HC zM+)CL&TNj{mXkidp@oK(tr7S96G}1ui<;7h>9*K-rtX6q?X??6Ew>85aVo?zSsJNo zlJE6_sV?M9ER~{A`gJzAimI0YmQ5tp*R7 zo?Ld<^JfEt0psEt>LD;=XfX)hw0`A^nVjb%od*uQXKtsE2_Mi#T^TC-OHyLv7oiD~|W z+EIA3gaIbY&fF%Cy0gY?-98}+q$n=F9l|0vGLC-nYBr7U3tkIks_=;wYX7E1PNl#d znADyt8XrO|>9(w^dA*kQu9v!!Sioxk0h4X+Y9#J=IZ+>x^61Y5-gN0l+Zb}mlIRGk zg*LZ|#>z){a4LoTA4dX@=}RgNsz&Lsur{e&bWu!cXc%20bJ8v+ldEsoZyzaM-1S^> zy|6em9K}v1ca28KTOZ2;HAq@qddH-+2Fp-fgyENu-khD{NN5Mw+ZeX9Vkk3<0xY%8C@cr#g(xFTj|73H~x;A!CpiDU2eOy|hUp>u8#i^x4}M@?7D$GpG@lz+c^ zl+u6h43}aOpVd`qT%vvE4zQS_q=2~-6a8_}6i}0!7YpOtL>IZyzh<+?e1BkR!}&{- zNB;rVro0;$2by%Vuj=M=MIW?wHciZdo|mYVmj<-Q=~*w5v)q}dz!wiy*N~QpLvIGj z>2h`N3TnE1%a%Q4>P94XWo#b_dm&;yiQJA}8^KzznH54jQ{FKSQ{yuyFNVR1x}b>F zkIfF&u9~eSp=S)07XrTvAm>a&%{IFz8H;r-#40i7G^?6yr!5^6mDNL)wu1d~4U~m$ z+ki$9?r~~g=avY-q!`ZF!L@t?%jCYeo#Z5> z^KjFvXmo#4Z!X|zD^{BIo{>Tns%iVg=Lq(XL3k^&l|;rdyFGg+EId*x8OPil!dm7xl-LZ= zd_{x~BxJCYWiT(j1DKs2x?T-E@4fTZo7Gpf0&P*z3?*|+_5kEc%`x@%*UZi8!?z4d zCu+iSTwP-So7_71C~RA*PLgcsxg6y!8v6mw9*+_uCDog|w;gXoXu91_(`q9&O{sTp zfRp`_yb0=|IHYwjO6l$?H%(5M)>_;KSK%je9LRf^RrzVfVjdn<>Do}p(N2>pV%7sv z37l6@+8>WC?zR<*VgZt}*;$g{&Rv|?yy3onFaXZon1%#okp%JcQc9s(TYqWY_oWr( zNo+)GC%dvZl(UHERRKNrAb2UMN_J&TDFvsQ(KsLI4*F-bOw>6-Na`&6J-qLa18p2Y znk^jB;{cF^Hqn%gPVs(jUn6e|9Qzshv^IH=Gp#Er9EIgClunMvbxrNc)?BoSJY>xR zxpX*uV#sincm=T(S6Q(=l{QoX@>(ad;8$+}`>a-~8PNoPFQ+r0+qJ}kh#KWuVbB%( zl#Ppr;JVC#-ZE7^SoAqrRd1EV8TUWP5^K)7hL`E|AQ0CY*R2d^}+- zbVu9{wWq1R5LMH_-uv0nGY3K{lCw5|(@t?I8f3ly_guxMX{s&A@!>dVF`OgI_8%q* zhwIztZ4}03=XXP#WIL?}s(3G5)9mc!8mjL1hHaze7S2yk@}^7jz(n%Bgq7%2FrJue z?cverJDA{bF?f9>z_?JHUZ6>%1A2zi5_W&K!cxQ>@V2(@IoE>61jN`zsHeT$V;WWuMFRouyha6MDVQ<#E0i2@a_;PE zmWzAtozG|Pr+*WP;J@E)-@Z+)R*8)v)e0m@}*Eq zoE+QR_281Gd=3W{&z{{)G+n=h7>rGeTH6Xgd4D+bgznEx(_Q3C*5(Ppf#uv>QxG=@ z{h}^;M`>$zyY@Tewu;deXlMCyxC>8>7F8k$nbeJkl?B=5fS}5l(F1K?j`Z7*3e9a> zll1qW4KAQ@Ik78DS>4D6SKjqPiS6#=*i_l)qQ>mcq}>^Dmj2=K+fy+}SL36or=*T; z6Rk4I?J!{v!;}@l=ES8H!LKCGzsq@20<*>a6deAm;SJi4={B&r>lz79KEEXQ8dbx) zc!bL#TK!4DiHjqeY7m7_=O$XECGd7)BdyefPof=ex?9Hj$vq5^E^TKl`KqSO%EmLs z)m=zaZvc)7KI)_QVGjot^P4iA@a)#ktg)G!lMIJ3Q_`BvI=!R!LR^sYHOro0ZK?Kx ziRv^`!~USuQN|#0A1zG#mOo*yd@afjl)HS5CS!_lta*=1megjE-Y5+6EF5E(Fj*PL z=1ca_ifz}jDLE_);~MJ(>2X^{j>&MmICV!FS1UbbY|l2`R|Mcn%f(UYQfW^VqiqX5 zeW}GV$tc`lZ1>-_>qlI+U0{|jq|4=-@g^W-+{Yx+P52E8J zzbt7NUmte#v)9zZjI%)XuF$Dj>6pNK+jki@!$W`7{AidtUF$jF-6lM!*&DgXHQLr; zqLveKvT;nBmg(y^Og<&0clmY$Ec_d-o=Nh;y?KsjNlava6}*OJgg*Ft-JYr)tII@D zAS-+0VgZ*4GP0BNY*(6Af+W#ZnnQk(l|1eB>2Xl+aS^aF4c_OBZsG11|1&h@mD!O8 z@)A81qa#BS1Ih&vcQ|fxpY}V0G+iSgO5o4-_DE8cV<4dgo}!u$FoG5LidpH;Xv20l zd^NA3q>_dAP+RG-M9#K2n554ve>E2@Wu?y_uA=*ffOHoLp1q3(+`Z=#?^3*KXs*7X zjY>dWpI+;-B#=dtx?FRI`kZXdRDJL3y}HVcZMv&)XbtUy3Qj;9v<6JAjrRxh1H+$` z)MtkVuZh^yR(1QSO1g0I-40@&ORqn^G1cBU&9<|>Cf}Wft}P$)vx`%6In|F8^gUdM zGy}O#xpDJCvaX;URCfU8_^T5Qy3Fg{EiCm#%q!#za26=|wdjpuse$KXVujVdqw6sl z4Fp)nHfwB!|Cijr)W6d&OD>rBm*L)`C+$zmwaK7aT=@)Xv{%{r#eH7pxV=Z%7R$E} z%6D^FyfR{xSyrw#B5Fj6R*}&+V==dwzHA>K^$hw(gik=Yhb^W0CRFBH19+&u&jN)R zvkjUr(mLnc==MiS*33XS+A|DwPweEi6|%WyQmqUMn20hJ0-uC`EuE^fEUV!ZQDR*zwW14#j|;2#SY#Jc5P>2|n)ub-X!k$8F_ z3>*n#lDA#gk)&B?473hFo77+JPJZUaRnzlZE}oo$80{RUcS!$ZKEh%%@DhS}7=fMi z-Ycva`cEKDZC7(Kb~21|S}H!9pUHSjd9{g}jWauNalWTwbH41NxB%+t(Ue7OQbLE; z)*k{B;p0}1)JisS0EP=_Hzi5d@@Y2J-brZn-luERKqb;{79OALCbw0vdU1HqOFKi+ z(gxaq&Zc0DO(CYbn5?+~pW~5|F*lHNX{j6In0mwlIW^xSop^BumbN?#F_e$E{+YN4 zpQMX5H?@!86_wcq&KW<-w3ua25qkr1j}ptLf~EosRn(U|7ID>PH@}GxSmmsz#;d$xndg%zfSy6ItX#I-E0H2 zD)@ZQMT$&emmxqiPLf&!%nQ;YT`Wq9_p(O|G+V5((nfbUWoew%F(63*6u}C|$b`k0 zWIk$#^QuO#6mvj2$#T64X&ClSd^R>5OP77RE?7e3XnPt~142dhBr*Po`qf_p)dBHB zH~B_pYx{S!P=q$UDCR{Ec^QY^;1O;sh?Nckvy47JA0Gh)T|---H|QLK;qXGE#6 z8Md2_hQIuLsA3DNKa?0%HC8@kzBXAmGI zOwd~w!9cdL7DaHVE79$oQUlyt&F140bQF$iFiPGvP2!8qX_sAiQmT93#Hi9G)cB{q zqB$#m6j|+W9Z{QgX#b7G$5jO1S1*ISe|6>fk1qcGCQM^C!bA?T#(EKHY`GGgW@7sD z-WHd5-8%bf+i9NZ2}m+G&4~3*^T9~YfO}C8seJ|*l2V+vJX`+l31W6$8;F%^a$63F z6$OP#pby)OiPtsHk2KN<_r@m(`wRPjwkZy(>8E3N)Ua90?NvLS!+c`A!Zqk7-&-`%DcZGWKJSbpG#_LUZVXtZ2aSZ*klA?xA ziVPBgd6?2Mm3E0@Fzo=w04#TO#>H5CP?SWJ9ot`9jX(=~?9$_+emN)M^1>o|Qqnv4 zj}7eeB`DgcLE~G1P3V6kiKteN$Z?}9+}95=eIpyFT$0FeUP1G#A;;pZCd?tq-r(f^ z4)luNkw*mY5~156A+bzj+v9!LpDn3BV4l`xB#GQF7i{iX%BQ0sGwmMgm@lfyW|*@a zH=V-Tdbi?lFP0iUqr<@`&1hUNVH$e&^R zYkR@PiGr>9fy;MQj`zvb8t9W0{&$ckU$7TlG=rhAcI6#iY5a~@o_!*VI7{$DRiTJq z_U+Y8S!Pf1r}I!OP`p76VVQdc%3-9PlkptJAX(bpv61bp4A+JX*C;>7*nI5ile*|s zbflt*m)Vv){jDsEC148&vd;5+ZGr`>#?z>|=U{8x52o!Hs21~g?<3ai;V{OM-?L1F z`CqeRd~JStA1i2b7DtI$x)I1U_lf4GBIwIUl)KQhZnHvc?N+Eso;h4v?TxRra{qH8 z*5h~7i|I>MtT}!-+TpXKN4$e=&gH&Ndmt?6y+??e)5h*^yzZ*kyAR*f;_3zt;)|G~ zONEc95fNpY5L6tdX>=xOY)gXdVKq``A0(Di(1XX9IUl)IZ^vby!SpnpYs~c3ZRTH% zMLi5KzK}jpbTBUsP9z*SLb#J?fa7YH7H&bxnh=zeOjn^;_E&>1JSm)HWAQG zJ}AP^5wPwFb9|q0%`eIl%%D=GGI$^e!qlI2h)f z4`W@ca_W394+LZEW&W8ZzOQ6}!CE}sevK@#__VgzTF36?a<^98jUlmX^?wzpx_kJl8YXAj-7&OcUv1Sd0JfJ%G}uM@9W#9qIl*vEg~wCOPK?!KV$=%~1Mw~dkp zXe3_-sBXK56g*yu=R7J=GxF*Jx^?KQf52Avx-GHRrxnOP4b=ux)^ON-Q}hnai5IzQ zu!{Bfh{a4_EEyx#5TGn;f&&LN5(Sm*y#o9A_#Q*G(!Qsr|7y3?r(XJ0sqqU5{voU1 zrg5Jy(|Em2n7!s4LDLUpg89B3@6nUIyw+m^>j*5(^8`zGJ^aEws^PobZ=V${EOU3e z!bJKNyxoweN#wzUAI3{7W~aAPBXBj9y=}FBSU<>^+tu5dgVXHi5By~hc~EdSncUJ} zw?8dgT0=`>?V1&axZS=+)RFnk6#o1OMn2fg#%*Uok2?V_EDos&36DX)Q0Er*XXISk z7ZE3~2&##SHe71xp3ycht2&AIihi#|2NV`MOqmb3G{<;$grr$_r?-=pC7Jutkg+uk zIoBJM*@NSjc-R$X8Z$4DJ1yd+o9mrLLPZKSFrC4<#=x$%y`n9l`5-{wyQ5uqWgpDRBbuQ|rN(Ix-8&zrQ zFpT-=U*69D_hPQOGFf)QW7+E)VwBtBh_be-onV8Brc$WEUSE%FJM>l+x{r1mx6lMe zkCSw?{0bue-ds(hnTOL@cBi+pn?l%`VcrRNr&ik41-9BfHg$(agj}BjaZ#(N# z7HN_e?t3owg#<%A9$vN0ImqpCt$Ui=1YQH6kwPZC7xJGQms?_)3R$9NeY$| zzf*!)5Wh^!o!;#OZ*co4>WD$!GW$=Fr3nR^(^}77cq;TSfa^J37EMP_=KYYx8A_n_ z8Y3;a^S>)nbvNOJ;r=z;b!+j(g5+Gzh-FQXDuSV0iX^i3s5#S)o+^G%K+dJ%i4!({ z$-;j7DN{P`*_!=F{1Pn`g16Jtc~wo9Nfejt!ik*6u*i~KmT%ts&y9-KZVP%4d=}=L zJpvDVwEYkVwZrKt1Wu)5-1Jk$V?T|tLL4G z-b3`H53v3`_wT@iC=#QKixu7W%Q19O4cefEd)0%EyPylV1qI4Zw=}5V|V_yINmVvZv;;B;z|n{i+D!G9dXX z1Dv%6Ko#dyu>h>TAYbs7vgc4ve#?!sZB#^J8&J)|qx_%yw9~6F70oidlh;NoM)p*M zd$NMS*%`~)GbS0@VWwUixVkE#RTs8Y53@xped42=%Kz)X{_$U5eZKemeuME&08G%r zN}rgi5ZS@^9>g?xsag}+Q{}H1H$;3n_j%Cf)X%QlzkA+g`ogK2e$5UZM(>b%D8%pl{y zIfy3T6zrei`g(Y+4y9iEDcM#&((Ep3fCURjEl+n~xix{Xy4d*GKw+rKAt5GY5APTo zF~E7N&h$xRkeT28Z)r6I&}W}L2`p5NC3oRM&Tl+Z+sBMOu>&UOQnUFS&Y|~muGPtI zwcDGVPRrg6(t;4I+hR1Jie4no%U1WnQioeE-rh&M>fTQxJ&XObeE-^B>qRA~WiH(k z&R-f~AANn8eu02ha$4&Th+n}W3J05a6*mN`M__V>>sX6vVzX+Z0^@!6pMh)1^}|b z4VDC2&u+qxh90mpN~Qh3zZMqIy{JN=`Q_nkO!)uV7_ig4;F7p0UYYKY{cAWePI2WR z5l2+ZuNFp&eo$5`UfgdAT9(9kzlNztsY?p3sCzDB7zM>3iBRZX1Q@#qFN`H{EL!6M zwxZp_XFD3X459`9K-X-+RW@;L*Rxr#^jjbZgfm~^mmUOcc|7rP)!6;l>vi8w*YntE zD3U9=Z#=nL9*fw^9`=rbEGVQ&5+!a~@3L2F^ zFUZ$VTUS-pB!B=E_IzzYv9I;mY4Xgmt)%&kqW2lDRt327$F|M)Yg+9Q%w;{MdcZ$q z?q{{cG67()5+*A0;{>{zkNu(tVsoRW#}>XyWS%B4ma2T`p=UWum8_{P!G53E85Xf@ zkCs7ETQ$acH@sH!QH$UY?5-tz>A=Oz!Ve=@2U%vLK1KU!Hig)|ts+^2NrkSj68+|O zCu_Y{uP3V=FqIQ}kGz+ktLAdL3h3X2$KN9A!N=u0WTMhLFWl35{q#cx;jSOt3P>hk<(r;w7#v+)f5`mHKqe`R8MW5m|2<|5R#UEt6%@~ZYDyX9v?330 z?eM2OJ4OZ+c`sO^8Q*kW0ZI$At!efKICWPeT3DC80=5$1U~_u*ck-7u1NyaO(X<}g=yh1X^FbqgD-I()vI1QzXGQ?bb_ESpGCK_3VtLxJ(G;nqt$ zAztycEOP?X}b0rQvAH&j$ZBhsN)z3TH0xSknuA4iQVM zXT@at((Xs3`Le^cNY}$?*2^i44<|NGF`#ZACc^6^xcY*f+$%D3C(L!HOzc>}@_ar} zvq$WgnD4uNfTB_y9e+HR5H_xt{3gw^1@5ub#$!u1?toe_7mmKH6c~)$Ni0=lS7a|A2<&9uFrMYqN;kxb48iRs;wCbpkS>XzJH0OE^V-y z(Jj<>J?fun0mG)Yib6Eh&D`KwnjEtAKa5kRkW<|?)1(>U>ZfT`=?{^nsQ*J^pT@Fn1o1{Oa=Y{nt1sfmN)|U#9bv>izc(l;{{BQanC z@9mt>j$tVeTOtRsX+d%!CIptxv!M5cGLE?A44*8Ul}8jmA5#?ffa$kwaFM9Jasx`y z*o(qk!qPuKKM-k$-9v z#&t>AdRgb5jPtAi$}WjFdo}Lcysa!dA(yI?^`aV1-PAV^{Y=}@t0M}JSm6$t# zd->lv&i_uGS)IG$?a7s+Uw*##zngzRyd400|M#GaQ|r0!xL&rJ-lH`i6624^i#dL` zD7%H>!#w9dt~*k`Q|G}zESxo|VEA$M7$X-I0O~xsRa(RCd47|%E@M~k=1{=eES^hx zQdv`^mP+cQwk*Zs*YUd0A?6x(U0ln>)+j)B7b{v_r^|rI7VEW40mtM{R`zhsx_vcl zjbsif8s;KM3jYf)is&Xd+s+k!5VQw&$GpKlYyYF17x$Uxsm7%~@vhlGol@ldrP8ID zzfz~_&Z;m{MFXa7+@~rqwcQ~U`*<~q8^tO$Z!vznxC3`_>BfZ8mV`G44=KYG3%fnZ9hSg{n)Bo(u8K&fUq`WHT zH~7!ilhBAnrE7!XZKe!LCHdaHn7Q&!*AYAaE9q7I@&C#6q& zHcWy2!k1|GnZxWYY|ai}IY{))1Lf#xk7Au~4p`!eAfpnRKd&ET@O9@7D^KRTmgh*} zp$Qn~>5d&W8)`N5J)WB+dnUekrZ?gY-8gC`o$$!cS+me7u>>&rZl$`ra0df)b&H-z z!M1ak!=J!f)=%}wbI>h5uv~Zlnn=m`(NV77$nC{YjMX8;@RpFC@H2ydH?8vz_)R_O zYqlLH{}g)rwMni*L$*dGN)I~Eg8HBrZ#ZXWFNj#KuCs-mCG5)aO~qAPmmiou8KHcL z{YJB)7CRo3T3_w@n=aL9sUC*scHn!3@+ z@pWrlJiT%8l%s^@nq_Tttc&n@d;J!Sk7Dx4gZ)RBBQS2YSG?^Ci?G z9;$>^w4&y7U9-l_a?(%o-_qF8)?CEY{+{GMh0Ik4H;{U{)#SDXjUeWGaWKPIkJyR2 zj;ji&9e>mk9EWr_McoZA4&VxoY)n1U(Nyf%JXU@ZtOGN1fg7B1_5uvSgD2Mk+}R0U}EhLLh+v2}#Jly^r&K-#>o; zaKMoqaOCEB?&rEc*Lj_%pG>#2ItgY76NjWk2z1E?V;AFm1-e7B_NIV6)uzrrX534+ z%_h31Y3NQ@Mb_XhnKL?vT0=A?tqVkbdKz0VP*tyrS(T2>3PU7SEuM|5$f`gS#Gk;Y zWNe_DeT7d{8@NQb&sWF8*GQ-4@WB)8{st0u2T`yV8Dp45zeI4}!>qelUV6%xr$s%z zyJlO=Bmbmrm_{%Jo)mBEiJ?1AUZQc=y%+*UN)}UUAOGmE)zJ60Ryj*7?%(j z_L9J@3e@`Vi3VrfaJStRpMBVG$5D=&wxemZ7O>3I+@`v}x5j5tKy0mxP)XoJs`tT) zDo?syiQ0=hQLCIX0IG}Apq5J7d>UE;+%otNsZ#yTo zmt7!de@jB2=r+}{mDbG7^E%-z;=MHYiLT93Wq0AOGzH)+L?$>P-Dq|sys;{#Q7fxm z9GXr{6*MFD6BD}85hyu8KPq2b!mjv90~@drb;?bW?JI}|VQoA=)`kN%j(H zU=pKbP+tDL+e4w_hk#OAlu*op_a2WQt zjEPSK=ys01fSm2ry@5^1V4W0-ka8p{obuA{etPDEiciCTIKSnj*RW&(b(m;AgAXGv zp>j-71{LcDdzFg^LLGTV$y%LrWIgAdC(TvCUz+R96U?F|5MYHW*$KzhOgs3peXu?Q z^R+|lR<{zjUZiL_?ljkiW>#i(eV0vKZw)4dTGcX=hFAr(b6!@vpO#G@aT|s1HI#dZ z;nomsr3@6)5_pGUK)M~lT0QriLlx}$(_Iw74(yq7KIV{2w%#sXp5NBlNxu z^h|&R7$@?d;}T%6M3yL3{S|`Xn+tg@;Q|4A`*Wf?(V2TcW`}NY>{VcRL0>%`<{V{^ z+49Ek1iQSQmE!EIwf9#Ry{tGv#_~Y~J(!t}NO~JOHaM7Vf2aPB9@{&bxA96Fc~h*O z**QF?5yh=&G-Nr+%k10|#|mUUL9N#S?~>WeNTlL2*ac?JP$T%o;7T z#*J~i0;J9!Z+FlF+4)MWAuM|UhXp={Lp0Nfx@rtj?=9ZuFbjwZAQqcW$2Aq+V!hs6#~zijtnz2OJCNgR*zq`E4##zVb#KghGJEfT>-HRzQ$_zx!U{1ACTVTN2Q$Y`k7Ob zLX@~P#FupQ)=IWp0$_x%vQxFqX&MUDzl(^8nn}|x^OEvP0`mLHu+#EuV_uIupDED00|jy%i=vrR1wx|I-iA`k2(rVS%3}E zruZV(w)*n#vhbN*-ogHboLmP_@i0jbbI1qae#E9aSR;lr!8idwr;SX5bvl*8$9GP* zVH9&M2GdI-PsqT$I97s9=%a9Db(O~Xs`|kIe~$}?cBIzHX1*Jfb=8M*Lh`7!v=g!c z$_?t?k_9q;D@Z>*2sGp+4K-4YTDqSfiYXkKgJRwR1 zF51FYxm>|4xX|?4+B_BBg<<{EZ7L65mv$9FIMXXPM8N<5Y;a@)-WHLS?#KIn(qfF( zF>jm(ztPI#X6vk#1?yKf=oDRD^baTuMnuIwxfZk!))@tdb>*4P0!Gj>RxY!pN6Q{8 z4}e@hd59XZI*O!^9#DDBmhYwlevog%sYU#4)kF6CB|0yVwF=Yb5wxd5(6$PUQI@Zy zeceTw`=Uol$tIZjN1U6cN*gOn)X;&95fv-%)RHUWt0dmT)kEKXke3QI-2)KOOPlF<_5-&$TJ02d1k0W9= zO1RRRW(~ZxyWm6rc`?x`PGkQ;xcLdB!V%&5I@r3>F+wpL;BEs(=G1`ZN>(o*+`*eE z(e9u41kEM0Bii+iy8|9W)|lB`;2jt(!EGkGHNAw@-sj{6`{!?$0MEx-QAyVpwt1%q zMwVx3FqQNAydgtms=c4eu&;H|neiMxMc&EZO=jK@!0c-pGeK+C5bV7~`;UGUu2THc z(X%n9bsMhQ9VvX3dx6+yT2D1N1(-WTSNMb<4W`K5EVHj!rXbAuIrwdS7ywDZ4t6GNSOD6d=p+EQ)!R_KYnq%s$brQaFa|9iWqRy-zW=q z8bhOq&e*klPOb!ubu+dGEDHA*tvY)#OVol;d0Sd2QPs#=6t2GZ`4rK}aoHWdLoZ+t@!VJ(jwD#3(9CsfNPG^)4HVtv+A@0PWI)?{$Zi*OG z*9y_8i6L&jCO))0NkyyHeimuF{8@&e=E>t6Wi+AEDxoVh5DF3T z+k)5jS90_})90LH0>`j~?0idR>CHd5dUErj?Xx2^B5?~NIvb{`Rpe7R{0r2Gz&hV> z2lPBVf%Wgn&s?1u)fSBI97^!U{zIuvEeucX*)>kjk{3pU;RV=rrFpDUQQw~m2xRlJ z)0s1mkomsaF&sar67}5$UgdK^gScn7m3@eUfd4>!TY`*rMVSyRQ1oJeL9o@>Z(f_1s*)~K7X1Q=9U|>1ZWqUjrY{Gr3!301%)Y&W z;I1pI-y&^Knq?EhjW2EQ-Z$p3@X^vQ0f#T4eNy{BLAGK2Jl;J47+tgJ1i@Q-`D7)9 zdWXw;PP;PjkQ{xl^5`<)20)~)oZ>Nmt_zBdlC)vzn zj(b(A8s~kMP2_u3uvQbaEb@>3(w6awDu12wy<@r7Z*}Ks0XPQrgLY@i1)Ut5&b1k@ zLVpqYsCV6?Ayl?;<5*?9xIUmzTRAJ68E%XLTnc$22CJ+%DhHmnn(NghO3#WR;fxMfozG-h9AM$1Vy(jV-4RCsS*+SxQ>Y)ZKc*K7Yf(=ZG^^$<*A1 z6VBP!p!n6Aqi+eF=hdTm6$}^;(?&c}ay>U6onTmaX)0ge(|BGPyOw&nZbj_*46GJD zr8+I85!jhUzg7?HNJ=>gRM;4EEgT*mIKCuH&|~y ztOz~I?718PZ3VvejUzgGE}x7>JIYYnt%X{$_XOrB+JU|b!;Q*?_xd@&l$UF8vRv?B<&poY&CvifBH9u z+J~}reDqRx46Q&(-)uh(wf-I9k;tmcZX{G?vud_t3C7c}ygxPLlg<5ivUx#qf{C%y zId0P}ncu*}?>Wi^F>bQFTJ!33b{r&vg=8yM08fz~NrP1J6F_cOSo-=8trzVWTze7W zDV#anYIaD|N=!pTjREem`KLc7Wskghv5S4m9d2(cg!GFsTNql7wX%eCcKy#j>L(=M zg~SZlwd;ACW%co;4q@S#4i@fMHCo7S%t`G@Bi%3eVF2;Uba2qZb2b7f1yrF=?XF>CB$tTd`voHwX~q}NcT zm&1P`Z~ZS528G*NYOw?J>P>tBbl_lur?QmXn+@22KY^i>H^x^Yh3fL^*4u#EYJIR) zR+4+n-`5Wo8MKFBqFA5NU0qY?Z$d$`2I$11a4jx-7@byZ9%;9YKRGxRT?2ycOJ48| zr+Ap;fgR!RIP)ipOa{5;V1Eq&2jz*-VC!s`dfG*4j0h)$K-WJ?r2I@2uf|$&n9!lJ zb>BE&$2jNstB)TkRucuaV_5+&FnTe315|97=8Qi(PDA4(yXE7?cXw}8Y(GY?J;v+hD;Tyw^8XciYsg&rG9TLMhlE1h&!|ZPN~hG3-u7$w0%fqO4*TZb zd5{1RP*DVosvQGbh3F!KagBLJx*VnSG_k{!kEOoSZ9P*5*!kFl*RU%d$isN1BR)oU z((BV%YKNh6W#yq&f6+{3cl-_Dhb!UX*js`af2S~{$>_j`auE~R>S)B;8v)Z8)3ZX` z&QDTZE;1WT#1Y}Q*g)$w2LIZU2>={mdm%(IFxNJaK|{f_bx6@&7$t%WA&2_xm~F4NS1pG+NwHi zFoHRGd@Z}Wx9IE{gw0%QU_2 zcAmqO?<;X~j%S=Y`3FYl_lgY5X94$UibcAJ>QWVhQ7@u@TDLWRj0>JvQd=&@89-w; z_ZX*jkzM!alXfsZRNb zL|XR*%T9HzSUzLt@x|a*9Tnz& zt?gl3_ZmbVL+|H7c;)e4fOn96jFJVIL^Gs$>B{+iBWjapa9nI#VxO-|nFqQX?EaSY zX=9+K)~|I$Oi2lVEZ(R>TaV4X-YuGC_St{1u9cCB zpb00VaU-t-)#WyZ z)$Us#@4OMb^{?+I4*0pBJ6PJioD1Mzam4@plM@_I+nox5#vpzVwd>o8{OeBdVZrVP z_H+QZFo_dJoMO1ws*}XL+wEfS@rEMl3n4IfIjN0@NDttzCpSffk=a&L?V}yC?>MSX za5s3(=>^tWY0U-kA_DtDC$11;e>y15&Qhf|aUrI%9)fUo8Qt(P8vm~_gAn9n)B265_51_z1 zu?r0;QpXi1mKjH+$N@c;;QqF-v5gO|PXzpj^f%#|I8dr_l&;rg2Ccz*`7~`U5-K*=klF11jZj&h9K%h?_w$%sJc8%y zA0j`D>aOT+-+7$1c*wo@Q?!Uoz60_eJ{BH%wOw`Q8YPlSd@?dPovp;mtz+GZ>h22p zh1VbkrO2PPfd}vgkKP^eiZQgT41J!^Zr6ozy*gGMEeHzHcqU8N5=exbcc&;n$HhQ>u8VIWEHHQX74gOX&)mN|n6V|KK# z9Q1piY(-;tR&Sgw?z!Xnyu;akirEUK5%~}UMzz%j7(RoMqFkWK)$aRJ+zC6|Zh!Xv zq*u&w5~(G?FA8_UhflMnP^XrA0k59v<5VS?nUBv^MOqD8uWylhX$8!mm1Mx2#_0Um z86)^NZW;He5#xB9%$eL4&RY67;2xYhF>%S9L$+-OYGLcD%s9a=8kjfy9o%>EN5F`> z%W%%~ZXiN{2qbPZ;hh%&FK$4JMN>8W=<7jtu*5wBrXLhMF1{uTrVEc2zOYq>K)>#Z}*6@5$cS>C=2zi;e%{#?dD@;ZI6|8P7q8QMs{F6mxmMP1|$Vs7{JtJ@kt=oabo?ei&+GD;k}PzB4^|eic6%BX}v@DP!TZ6v@7> zq*n1qxaPjjD=tlY$I*}i?4DSQ>v6Mp1Iu#ogaCey+(d~#?z8sR1u&gL{|?MIo`r5@ zv=Ssn7nioqK&=ZT+uuddbd>9aU}p!b4B1|?(6Zt$=8Icd@=)aF)dy3zH-Uk1TquGy*-`$UJV>U_?5?LOdMpf&psKS?v#6hC3$ z1#iPCqob3h@+btnUm8dOZ_EKt!Vqn`a;$DTrmks*vCzxC(n&o>-i29l4fu1I=dQ+G zPwD{%eE2?(cLky<)1Z5RaNX9u?Ynv_iP!Zf@&IdxEEx9L-vq7AF{-dE@n=LF@3yJV z`CBgs_jT)Yy*y(kI|#3_rhBOHf{8ti&gG${F+5i%tB1KSofsd6g|$kv?l;>>A|KF- zHSra7U)tHRs$|Z0jX5L-^DH95DU2)}dEYwm%Xg~M?i5w&I*k~_fmNV$3cF+tGFS*4 z-$=0k#e2qYa@sX5^Y`DXUca>SS9XzGCJUM@sDiI2e}n32LxP3Ge4S& z{|*(@2|@`zjFE+oK%y688w2OE?qIuF@#d3e zN8W7;9oZ6&BvAea4IwT}))q(~y8LJ)NKGj2R{ZIPs)o4s{)swbZac2^ed8HlqWKNS z+HzV7#BUg>EWpz687Rj#RKJS|yAG7|mDrClqg4NHTdIQUFTH4AlI!z2jI-{3r|APz zkeT!O)!&A`e{uZHi|LQuK^9zZD1iu+3CkbqD|zNK+p%c?%G<$crq#{ii`8WqsnR%e zsT(M8EDsU+-DrZD%odCpe>m8~AK?XRAnYb<3st3H-(Uupq-&trymJs|%*=P|C3HkS+(0=-!+mS~|*9{zb0yoC655FSN8z=~W*iDpp6@9v{qM zUjg#N+7at@$GJYk?T!}VA0Nc{q*6!e*xThDAb5__wzVYV=H-FS!IkMfDLZ067Dsim zk5Ns}% z$}vYPK#HwQ4{4}2`3Tpp&RkiHzDf+UjkjVB`}(IwqfC>n&V`4zc-5={XjxwAohB=X z$$;^vPtHZ9lb?SEuDhZaAbBbCbGy#OZfkm?ils!w{u&ulawL!mLRlIE0%GHsJBVi+ z>h|bgS*bU>kXVvu-ImPI;7&5uGMsOjU(8&{TV4`BgfW$Wh>i=M+1m@4#P7LNPs25z z@L7xKnJS!%(yjfBFW8iO-tjE-4PL)D%c#4N&GCmGN*1iKi&%xPni8)Fv^5?YED4pi zfL~B-9}nwY?H?}`?4^;}trVq4bmeno8L*tn7p8oOkpb~%pKyNe9tO+dGxA6!${pOa z)g!S~`OiP15L~j6Yp$`jO5!w5;7ahU#C!#~xNDQe9TisNYOVBdte;v1Gi$)y!K`oE z5vpc34>D@32`~9&HKKp$5NI55kYM{6+OSU>Ejqx%^H1P6d3pjeDn}CJ0#aQ*GNs&l z-Pqr|mnMZtl|>1xWn;C6{V68cNZFO6lv})H*crxMUiMlaW;fD<^~)}g+3ZB4sU_vN zMsE}aHd9b;|Yb@oMWy8e%ez{hUrI3VK@_SJHA4>jgc(hUqX{`aB5b1tDj$U^x3k-7LbB^E+ zlH{yIZ?3(0vEe5gqZ~^p&$!-}^Je(onAs_2TrD>CQ;t##?>Q_z}OjF;KY^Km^~x2PT3V zrw%^w2<7hhn9A->&xaMT&|F<1z@7ojhQ6shUV?kMcG`!H+gN^&Zlonb@E@a6&Gn28 zK(jEg2&O?7Oyly(bd7_F75W+L{38+AFNnuaz8T6L^!1zz3?|H1^(Oej{0FifUE&|% zGX}f`2&>gSmNsovOM9|qRLbX57jinFH^a&jyAmiF=*?ZC1>=KOSv}nS=s~wi)CS2~ zPZ^OXK&^;(hr@Y4Lq`#6f_VYBv4BQr(dNJ21;HdbruVDroKTN)*>ksQ{o-bgjlKG1~0HkZhx)W%?8y(1Vuy%myDlG?3dBm?NfHznde3t|WAQ2%Zg!Hk`CPwEKy4~1v@U=5kb~X z(mSB_6=iW@tj~@9plly`_YZM?&wE!bEmg;(`V>g#PnE7=kYOv{?fix`gj1)$ifu17 z00IekjDSV6ttSx@*kNS-Mu~4Q84C-Z<4Ty7J!21#yo? zgihLjCm(yfAaWS-E$D93k6|NfL=xl&7QT;*utpa9WY>UHSSp~Xkxt;Knc z{9m*EdJDt7xn1fQ^g{8LoX@mn2x%jp87+KuDXYvc3DP*=JKjS4zy&E)sa0d4zn4bl zJLeoNy>W5%cw63J!_V$V`VmB3a7{s20V~G0v3z#_JA9?}w@fU@R~z2+G%Kb?i0i^q zIIJ`2NB}-8qHZD=`}BYy)4iq36d5}C;SBdjN=jK}$`{sps=uI^D zd{zB^aBFe&*vy#m1HI^$mp}p_u#x|5;B`Usg$wnf-ZvA_H9xWc34rj@#gI^UU%D%?G{sAgdL>g87xV(I? z>@IH(r3>&J-M+bf6Su&sOIG=6;7X#^ROLSE$E zCaO=t4fe>q73%kKUech}nKlfi2Kf@J*-Rm)7eL~(E*mJ>L{_2!C13L}_Momu_oh$9 zD_EdZ$BWTNgi+(Is?%|BJFfv8LH6ev%w11g52&__A?w2#1IWw$sn?yx+wy2i=zx^9 zJd>T?r!>x^{DSI~M%8+9!`}5%Z2_tN2rk2!VNqnazXL+W8Aj=3U@%t-Lyye6&sO9zBBLp8w%n=!T8*fOvT|m$b^fN5ma_IglUAQ5VSJm` zzS!FZzUAdTIo@b)kOZ=_iL0%@^px9Uq)Ht#*h z6*TNCS!fcWf2nsS8a2IjUOBcRCB?G;DuSyUvzox15Ntu-#sl=KV>1WA74xqN?m>wL z`9^hmUjVKdMBg)>@BD`7KOjY$o}40v=H3s8Yoy3CANp6cdkSQ_yYV2?%6a^uAc~T~ zy>|h8MIdvm>X9b|rHiUmG{yjwtIzQ2^{1L3boWT`5=DQERn+Y0r91 z_u!R28|>n0U}7#MGj7h6YkRuty^TZ?+uqSDVH>4FQM&F~-B7s1ZFgk#TIfP`cC)X> z@*S(kaDH+~+I!3ulB5nh35Tr?z0u z!1HYJT%8AohSU#MyMI16eD#;SH@~)=avgM_D;hQ`d{kt@eYP{6CI<4_=WhT45kD*u_&eXOs;_2oy}Q)oSyxy|9@@$g#ZDBFw8UJ(!Dt8oEKQep7@Da7qIisg>&1}_rF6)e3dlcS{&@AEsgThyHu+l@eC!9HVK2O?kz?XEoxaElRYtjoLMsXm302yh> z;QqbL7EJ8672`_7*5-qE+QnPOUjUv2tT;}}L#x@n-8d4I`k41-7gvLo@sJYUiT$u7nTy%DHEF=;Snr?+N3VHah z1uuiKO*5-&6$#duH&#&>jsER@DcpB|3*nM<@aUAhk#)$SYiL*0$8gwN-Lscqw{6hp zv0TgSAGU3P!9AnsHOu{WBG88hCfYB;vfhkMP0!{_7e&3}`t>($9kYj%&0#(rN(`rx zfe<8(hyp<-RituNg1aSRm^IVwWr@;~!o2RvzD@-N4%L@AIv4#OYWm+C~u1{fEGuV$%5Qv=l0cz{z~TM`FGn2X|Flx*^8 z0nx4c_!DDk<`k$|8V>UQ7R+RoO{S4J_CU?vQ2`pj;P&cS-3||fE9@CPN4K1^IwQn) z@j}w15t!w@F+ELI3DG>~K)D;;qi{XMw+!TjdLP| zsJD~?rD1F%_~fBDX@g{LgzC-0qav^{&kh9wwDh~0UiS;ve z9IB9R9(r6{Wl*)P-S))hye+U}AJi%W)iUOJ`6u?}4<~02HLtkjn^WxtQwv~2a@UVuaWl>~68{p>)vDia7_TV1%&iR8ALv#OQ9 ztTzrjlO?Am+oDQtMnXW-OVpM6Er4Weu5MUqPCW#ox<(U`Q$X(+*I`wD#V`WGtub;c zv@`lNo+rO;GqnxFR`$jUR_bbj6i8@b63xBUfpi{iH#XpymDcosq?@@O??CV$4#zPs^c7@oc38SjS4uE; z_)|VLU==9))QJe<8U;}fL%W_GYw9{zX^<*xuE27qLoiqWq1LQahEpW1!n_~ z_01|=A~WsR@XJZXxz_^*h%ovb+9R(&fXVp#_UfA5V5iO~SN$wnuEOu?O(mQ3-8*7D zn);X8HX}lB_HDkbui6)Txj)KTsiioW6)h>~<*}Yw{w%ImmYTPk3t*5|o>kOJpVnKN zo=}-CuO=WDX`%UO|2yKO&j%HRzoIrYs}uv$Jo}KU8>!q(&yRaett+h>GKpSf6zVo# z7h|8-rM476da^`;Dv<4IY_R`$OyCav`y+CdeDHN~A)x?PG%E100;?nvw{7ko$^{Tz zL+Rm9KM&~$VK-oeD07q3ejX@(Snp{&F$el1dVdLu<3*D_p9fl>G|>V2Ku>S)8(ZiP zOLRW4u7?C*fa6F14W_rj@84#I@_Fv{cWbl0QuHJx@BjL`a|p7EPmPQ{lKg9A*K#x< zoGxD}jkM)M9XT*|c$t0EUtR^41|lq%R5>?R&r@3Zi!!2{Cqw-QObE&yZD$5SkQFx< zE$Cw{WDFmfgBT)z39kY@e}2XIFakZC%a7{-#r~=)zYwi3H6JD07DX3`brL)t`-V>l zrIpq${lEhS_#O6Rqw|;!{NOH<>bB7I$g^W1L$`imM7Z>VG8q!wh`$WMSKyE7HV>kR zDo8ZPw$_p+_L|)el0ApxB!F~icjwhNKi%Jo{nH*-%Eqif`Y{ps_zWQaeS~qQ-ppJb zW!bx?1}cpo>5l^0B{X~C7@ga;;+4XMmiI;izN(U5MNxuFQ}7Zfmp1K+l{~3;4VIfP z$LKcke{w;%np&N+P4gZX0g$zVx*~KD&cr@y1s#?VN8;eIskSaJO9$a;Ai`C+XC@TIJdpk~&L1y8d znk$*9PU7G7q$4lWdi+`uVY4}qLHvYbwa9dee@MxNzoaMT+K@SMxM894_?6xXzn;ZV zLY2!-Bpo@u`DU1p|Csqi&Y17Lpm^fEfO{EW+Fq1s__v>cW^%>sZ`Q${G{>wCXRUMF zmA!Fo)zgFGa3cRCA(MdPTYw|W!Lf?`H7CgSwMbhVQPZU-5vk5X^Swa`O{GR^YI0Px z$wSUuGw%?XQ4GuQkNq`!)HvANd}RO#gCJfc6Q2W3aT20UnkW85G@ps2OP^)SjW0+5 z7d&2%e+el(2MX5gi_IM(%#UV*e_W( z(f-tZ2*JsA6{aI#k(`dnh(~+@dWm|FT4d9_ZaoTBWA)D~6WjBadJ%6(F|v1xAmgX6 zOLZH^ASG|)v9N+eaM?!~SB7z+N~<(HjR{WhJSt{>?~Jvlxx9Y+;y&5q`K!hMG1R9v)*Qh#c&MRx`Qx~N5yTLzlgIW-=S@MK;uRj8%WM@8X{K-&o6&4iP zaSpot<{)A%{~lv&BBLXMHAd6n5sd+yREbY-J#c;|VcXN?`$Qy)y0x zP|6i?(Qjp!a8c|<^7HWZ?{G`hkN@Y&<*`pJ_KS}1S5j#**p-SQ@BAJPOTLCt225=q z)1Kc!rc6c0ta=|@{8=O?-=Er+gumXS{G^_{AJsUsgD&J-m1|gvgNxfzOvP8`e7;Zn zBj#6way+I(MOrkUp@ae^lZh42sR1$QQ7vT!=yYkVS>~I%Qq9TGFX)dQulUgkq^x6| zs;o-bNpT3@b$w5J(z~FI*w|q9!#=aP9A=$}tyJxvsG^w%-%pIPH|HRtYG#DB1B*N! z8~ZnO7*+EL|GW}R_vla^jq#ZL3G;;(CJHsGy)Mww`jo}F!pOc9wgM0;&J}MATGpbA zjY{OzoU>!#{*`rRk0~=~X0mHV*o7g??2-a2m&${30q2gNg<+=oW7Lh5)%U;Hb#*&x zBgC~SQtT$;cM$c`_SLecXaNcEzsEfoYVvtyuR6Y3l9f6)FeL*Q>6!}KB|72L0>JyP zoPk^go;pF1u?xU-Z)}jXIL@VDyr=#8Oz&C+@ z_j+w;usl@d_+x_cI>kQtwX?wu(<#7Aa~z!Q42^@~3WHLO0C*DZ^Nr1lh<-YK<6v=| z*qEa^R^X_jz_b3BJ*9Zk?;wIIVI<|}v+jndl8w>4!42#D*f*zxa$>pmYwV&a+L*Sn zGF{m7lA@&T=F_0_sbb4|1hoa% zpd+ETFfidVcN>q^tm8ipN~ouFi2l&n0VpGebl|{*Y}7)BXiXW@I@L*WU<-+LUIL#D z!i%|+oWz`0_Qz5iVBnuN2k7$v>U>vrz&*E=;bwd1+4EZcTh`S=fhZ}ot{z()CN$Uf z?>oFLc!zQrP`IxQSfm>ZF)9e}Z-HV)|6b9?V|KLgO$2erxAGfF-^u(Zp<87I%I`We#_ZG z7&gS2!><&_b6PG;^0pN=`0EsFx3rFX0-uP18w8R*&WQWD&b;}jDUQ?DP0ca=##wa{ z9#kgU74w?fA+xZze>fhzs&7p5JAr9IilgJml&U0HM94i@XO}`DTkkRC%QkyU&quFC z0`s9`bq%TN(F^EIqAUBH_F)YMhEzjk24Raqb{7~+cAFsSu1z!FnOr0YhJR%~F0z+h z3JcAl7F~wKapi0D$_u*ooxMS!*`|ncke2;>|h=rPH9eP~V3; zK+A^t!s=k{o}6)3ru#LK^s6`TlLxn1Ye7?rYxZ?v1SS>g-BE&cT$+2Kwu3O69nX;k zo+;iczwxef(2_zO-koGVsH*o&>_TXmHzt8{L1vs}Of*;$-~o7TR0ScHx&624?L7huQxyg>6HKmi}J6=?^4Vkpw@0yT<&_OU1BN6l*Jhb zpB*>yy#U$ul)Uw2*NUPFGQh_9yYZ^hcm19q zHz1;`k+*&>|6=O}uE*i`scJ+3TK>eEUwD6LgH`>#YlU~$P_gv;QuL^m%53(k-{ zz#QtYlQv>xmB#aVSryoY1Vemw>b6Pan|Npu#jrI2JfE_%Z%4B-B`e4=R=+xfwu;~} zD9a0&V|egaK{yB3tR2H7TO_B**6dF`ijn@VAL;=a6UXnH)l$@9c)R^}w=Il*G%^YL zIe}ET61C&ms89UsAnW4eqk_2t+Xark)i7FGr2wNfla9Zam(~te6;YPQPF9eSQ*~*X zc}Jkdp9s7p4Z?jr8m`D!aCfyX7S*nGhz8fkyo|h z1Y0AZ7unuuRTo7$z8SykFNfpGuKC?ZF!-6!p8OK;1P#7kflxOI-Y+r-oqe`@FmTS3D7hUs|Tdf|*t zXWACI!{17cqQXPG9L~lcO#5+%A@|_7z3^FX9GP;oa=}Y*na06@B5^%YQg{{ z4t1U7?uO_wRGKXm7cTRc04~xSv)<1Eg^Jc^BLxWW?eHHq;+mv$>Jv#S=truq)y9E+ zYv+OENdIxM(vg8gSq~e4J`o%>{{T46Z0?p#zuI7JfeJcp~chWim}m zp>NH!L~-9o4+yq&xibS{{r$hHZm&X8VI?;W-6vw{i!6-AwaGLscMWCuY{;GpW8lH#u&7*Chi6}$5zkhEX&m%KY5av`G zDDUn!p_xis#k??skR?x2ByZFy7xE+mN*@q1C(YCRTGz8MfD~w@rkYT`gL$KPi&Hw5 zq$gAdWhT0m6ty;$Tow@O9yhF7d*~(WnkvKtcgO~}4U2_w-ZR&CU-9l0Q)4I)3-aLq0 z+TI97#zlA9TY+76GmP`2_p<_!@74B*3<8?bXBTyIR%`3GtjH${rO#nxQqjPe?;WK{J8TImqv$8zgkZM>|M8&&~bNqVMJg*6cv$YS}f=Sb@ z-PRI-R5j-MsfMPl#h*7O3l=2^=MS_M3cABJcs^Qh4)QasVV_7tt2nK$rGPWC>e?kg zW_bf>>8oYSXRh6x@-yVqu8g(&mgVKs+VdN_!u6-(^EfvkP@P-;vn}O=Kv&_GY*LudQoE}RtICiu9NG`CqYf$rW+XcAs_t-_~@AjX_HD78{>Z1y5C!D$6*kkX4 zN}=Rh6o2iXTbciaktzBU&IH43?_K+j0VLOUr%9VCU+NZ4w}#9VPZS&P<^ZRAEd*j~ zQSdzD;M)aX+ghpsg22)ZPK_!uQ>A;ZQ75ijhUK%>QR}Qngf}b4%28j;zCP=WwQAjO z%eL`G&z7d;e0wZ^_qM}?MwgU}KUC`(ybf@s&Gqxbxfz7aOGfrt*1~=4uA%bMeBsmg zb@u}YvUgAcepD};*;QQ(5sy_i;|?mdeFc{X76AbH5NIz}3|kjV)R=1{PduE@((IFg z=I=QKvT+tTtU&O&wkp-rSpy)KoBL9_Q%f=*=&776{a+X)3GB(>Eta;ge-HiBRBMP1 zV%vBh$Bux#6D!T36B-66VaGxf^YBH`!ne?-Q)z~M7Xm9&w+#VfP|bj6?hwK~wsO-i z28nb4dA<3OM+ne;z^~Tq&ml$><33rfAlY7+L~{$h$06Es_Y(Z`?%beDuuZy4O&@5g zKS!U2yA$^$*WFd&x0ICW{ag#dt_2B(Z5YDH97-_erz-akIrCit7{e}+IVB94r9xJ8 z2vM@)q#9GlqXS~y*umZeYA&Pi|7Gh^ByQbAc~YAe%%qD6L+Olzyt zB4k=BVUI#3vIay5geMBxZyUu8}NGKjoYpw!!WjWJhRDNaU;vh{=#|& z#`%clyfU(D5gECtZJY#!lWYg#)q!whh7oXUT476x@OmZR`dMJ`{O@sj?sCU$X9jLK z6sqR)i{v3hAdq{rH>lSwUuDTJ@I@1Fh3qz;J5}+ZYIC>7O6gO+CW$QxRu`pW8B__G zClZJ$45@#suZ-pVBrZyY51#Lo;tEcsQgjyH;ivh~2Kn5z`% zYE7C1_q?w|e~RK1=>AfNg$X<8^`&cb+QKb0*;jcNR@5yC!P>0?Jm#etDUKq+Nz1(csY9NhL-VDqvo40RS8x;{64@8iK!BSsTzDQWcmXA!+BL7R_}bkv!@BGdwg>3cMtKH?+O=ru@;?+dDl9$Qlz2W zw8~Ljl4%YK&gc2V?cuX(5HwFQ!f&2Q$Y2H-0a@1j`hNGTX2o6H1v1?+?oJ=HfvW0gfxEyQ2akqGvj@i!Qti)Rde_9%Q z>}`;1Gg2%o@Mq=Drn!OEbBLo1%kP@k+-y3zdDC6+Uyn*oa93QhBdRGi5HjB}kz_%0 zO#uD)5=cZS(|?qpPFBwNPq%09Dr$yDGgzKKXze50Z1bOK@)VNvcevmMB zS3J>}&$@EcT^+F%3qt5f*3?CxAW7SeH~?Z*^csVL6^9=}MbmGwrc$BWIi`NflU_{% z2Y2NA&6Ee=LS-Ki7;AIkkpqA`yj;p$N*D>d$ddjqXoF@s_M?u;SQ<4@f1Kw zY|%#@=zy9+ZNdsyoej{;dP#8!@pn-7a7Q`I*>QSJG$a^DPPFhEz2Rw+(`f#4oY)VH zypq4i+VQ^b^khpQq5Q%YHY-t}ZCFULVs;{U`YH?{9wtL4=xIRQz_*&yMu~NuaXeQON77~Em zoC;jV-clu*lOme_!c|K5voqAizPuIV@|Wjbzafu&06S=UISOOdH=MPvBO(!{9f4kt z$PLEGi=0o1zjvEg3lZ#97>@mW$kd+g!u)@e1Hs*^o6_~C|8ZZHs{F`Nr9iN6u)F6V zp92WG-iz;Bor>b0Spf94OTUR4$-i5#qBrU`lJ##VS$^vw>x+QQ>U$k*YNZzKvD6k zprsjquYlIGwDA{Sh-v<5{Vmfw?Ma_=)>9*vj=}ZNEY!TffSNmEQmp-?9+kbd@Uh+xU4?+Lb<=g=OsDHNN zDF>`WFsL^0dl0;@L0(y)tpb+BXTd zD!2X94-kdh&umU3Kw1>YV)F)Rtycq#kqSw0#Qs9zuWj zPDjyv+2s1b{)ZVHqGk79>gBZz(Ba+w6Q_9o{}n|RryaI`W?j02H@ZHbes2Y{`#;_j zlO;Q^BUQ|{CXRk{_B5}|q_?dA`r|>61Q2NRvzP0YzK$4z#xbox_>zoMm|dY@jn%ff zpY*XP5?yz^G2vneHPdHgu`Qee)Q9$#LNU+Ve5{+Rc)mt1B3i!E~;9}xvw zuC?{LtYFE1;S|$Jh#M;?UI1;>OMlF6MZV$xWl7Q3V_#ci(@uIxpg8GDp^LAP z&$lsci^>nXY~tn`BOvf%p?1}MR@WVC8=I3U34KckNFdTcU+D+>J5*4vkG3$kOx4*l zxaeC%0zQZ&_CmV{&w!?T@vFMlm31$iA@FDYK+lF5P<%8l^@mfJ^6`-JL|;(U|AJ+} z!KZDEg}wV(NcuA;_qgUO3a>xia6T>yj0L zXZ<_tCF$tV=>aH-9d-s|X6`?fFl{KYPC7msdErTXBwzUz&@hw=o1|3XRKr;#@hWBR ze55ytwcd*E&aj%!0p)vctPMwFiS<5n+Wy5`tnV&-vgbjaCo#h+bw1TM1BIN$g-Y{9 z7JpkO{z9$PsQA_8bl1Q^l^46mb(2XF!Rh~TXjx;RZ_@vw2zM5$N1g`WL_CWX$WqVBVLK*{Rkcu_CZfCnC2&%AyydSQY1>$k z1hW4mM})2sl&_y=;U6=G%}2k9pU&<1s&`uP>XZ8Q)W`n%-;AdEqv!Srs8jXv69gF2 z!&>m~{Xz)h>4j5RRo!)AT>_Co?%pIMRg^IsI)iIQ3pW(j# zCvx2^$cQcwOaL?Y2Qq&o$v_<7|C_Pf&^!YMv>;lGlst5iP?A2%gzY}=8M2oRXMNeo zpQySh1X>kG-gB7C$IUl!`Y)pSkzxbDd z&keIHh6NLVf+O)h?9^)bLSFAoU#xdF7fRSZ!A@ldqq#+X0w%y*tStPCjTCk1y`ZHB zb)nxA9%=o^#o2gq5Hf`=d2v|0wyq0R4Y0pAVo}>owMvfbR|LQpdFr2!%00=jl$*il zK1opPA2>rahfGglml(21)&sep1XzOEsbf*)3`yx&Kj>SE?HuXkj~UJAAeEiB#2Sua zry{P3z>;xnt*UUm@b%$g58)drcYoRQk3U~O|J`@VpZ)8-E9c+))#07>lb`?XL(i&z zehuFH`Fmf1-|pZ4-Upu^IPm@6-}Zj$<~F|AAK+ZlbDRJ=hUq7&#tn^i!Cyd_JPMcJ zDzbMZCQdJ2p0>m0i`%trl2|!bU|;knKVr6S-7g|zFrbiVMuvLsG9cP2yX_B8BE?u4 zg%d+%7up|}mNMt}fS2x;pelLud%{V_N+T+HuV{yS50++Sn#$Vm;gVMh7H1-}3(xdL zq208;e8?{BLfN}@0j{IrU%94`KBLUc+($bsLJE}|ZdgI@0a1^Hz4ZYik1byMOvxJ7 zdcEYm&o&EfGGqH0p5-Ivyi7#Xd@Wc?ju*KsoVvu;%1#Q{-To}H)N9*61%z0f*|IG* z{)?CoLFN*BB-_~$K3>Bpv}*lzAV+7WVI*5A*n6Y2xJP+L)hF+#-ZJ~Ve+LRDbvQ<@ zmBGR%#Im`Vr`?LR*O`&_P3qU@Cv;WuTJlvza2M_Q`qZG5=nV0xUt&Q0D4U#}`U&U)NXH7A zD_ECAv>X_^7To3Si5%-Vmnb)6cxAr0^JDKnBhMITeR?F#JddppY}6QRDvO*|399?0 zkikH_MuZo^5ejVr`B00Mh`>l?f!Cd?e= z)#GCTjLQFB5^YcIT{v2>bQ?V0w?E2py92L=+(8$hChamppMJ-ctqMO(05+F+XZmr#Zrg~0q%WV zSLUSv$ro-ekiNH>7#k#+QtcRNJ_in8jVhQHC6O?6tjkE!{7BAYl4=2{1n*+iCeG!V^u(=Ty1p?aotCwvaa78)BY!7#na`f>a{MU;h@X9b+cG|Z zSsm?hFg40gQyA>sS1WMyBT7)85`-+?I;O<#99osRK8GdQ`UWWGI}V;Wg|D%$6qeNC zdB;{g@PwOkTl7D~KlIV<^40BBS%P>BUVv9fpYWivCT1_ftec`~t~K~sw?+Og1QAH6 zM4vM~nWzItWa>w%ltjf^MApqhKFqJ0=o#*Ouyh5*;kxfD4{V&(!L)y>jxc;Z!+C_E zirWJ_DQejVf_h!r)PtiaR>x6~$V_un25UYuV_ra}DfkQ!j3Agd?4&UY;|)Eg5!_kq z59I2v#4gb$qP!F{fpT|b5EwmgQFHdi#q zN;fthC6PLSE7&lGAOpdqE8qIp@VHp_eaOBDR=2-`5ddZ!+x1uxw0KMfHF6jA0*q1OX)6)g`~!CbV-0?Xv2@x17KoB znXurrAzz!=0LgeA4`lhBRiAx0y|g}y=n*(~)dW+B{A=ppr7kL03E9a**rRJ;-Yz`> zC~SK1rp)$sn?aY1?@F2!12?^4XD;R5!8^`)I3wdV;zaNj!g;q2(hZE!Wlc+IoS%i-f;65meaOLoBrclO?+0K!B>!ICqZY z@a|jNSNuqZUenY|FI!eDm!xjz{*z?Y*@T=^h#m)XZQf$NUDmdZ6f9GSS?M>eN6G3| zqRb05ZA)*>jm7s(gHOvzVOXJ|s97Iul^mC9VWEEw#{%;fFsldDY74cY&S3_1o*1C} ze5&D|+KXK-4~J+v27HZ05bH1P_Ll^l?3cpVDIA*^f6Q7!z^?AVtt-m9->9ozQf&0f zds^(iz+q)5xTs7O;_O3dHxtZ%L|ZqtE7Tk_wDAM6Xa9cWZke!#P%-no>~`Sm0%&C4 zW#6EVB>qW)V=IJ+$+7MXq>)nN+)UiDeHotQt{%qd0q4@wP92=nV|TNEt0^7+I;m;&nDYSX~1u5yyN$R|8J?ey8(|8L(LI;!%xzz~Z1`|?J6_huZLG$g4$=L0UG#eb_6%KdC9t~Gh zY%eaPM2Kw6?CzWpx;DP=RJ%);>{7P!p>>At$%<>M*R-*B%aA3u)e+R(CFE@C9Jp`K zvmEZ^+LJ^exr)e&MmGvmiz#yrdHVOJ8<94$ zTQTsw4yO(g8K!Ph#2^P2YvHo_AV+raFx)t#WX;+TV6$ljZ{Uki?;wNm`u*Ts0~Wtt zOc1@5oqKwK^i7>y+DLYpu;*LPJ~m!{~TikY% zTOvm;ZNgPRzObLdre|qurjfh|2{I?GMqa%y0|6` z{vMY^M5N65WAbl_SN#iw+E%~C&wV%_uLfGxaMPbN_9?ife}G^c%phEWp6DK3+CJQ1 z?S(kx6SsSsz*5%bYhr=0TNYJHr`h>jR5jmlmMpQvykfQWU7~NLwohGX* zIf~Te7R1J^^&c4sK!%2?lNat~Ss#Zj zwWOwCF#d?XA>ngMiy$z-pEu2@gd&mY!`$9;TSJtl|GZyz`j780l6UXN=E7p+^YGlO zg^Q4$<7lO%LJC^(;b=uR%lbAw05DxOMj+`5kJ7dO8u5ajcd5lx#vp z@-~e=m$E{Pv+cKqMaLQ%;|fRphY_mlL+u9x56(0i(pUC1EL<`7F71u>OT5~8dS!z4 z{N%;H)s8c$?<*5?VaIGhS?HS$y_EyX(D@$bs=WoTJ74N+u`uA7dN15AZGdFZdG`HC zdX#r>i6Ekna84HDwl6_kc`aGonFKUlE<+^AuQ&BzMaj!%k3dOeHGvZ?BN6F@g=i z_F6{E5uOwzVCT54{OD815(SsCZx38=5FCU1Izd>Cd6)S~Q&(XN9dp>|AeO1k#BUJ< zG;Z#`_*#9ePgT&;5od_vw1oThB%X(w*5kZ9`>gHtl1<;%yjc5~X6}4-h~lDmwkt>e z!XJVg3F?ZCvcHWr;4Jq)&Xp`55&m~I(|H-ns-Jw>b*?7XIv@|Z32VQo@5>^!>R(Bu zMFC5eMb7-*an(m^!ktQ1oA(mtPWpt+kcdjJidV%Aj5Pc;yaofD+Of~)X2QxK`g<-F zk00Y#H8c1dD>u7>+8g?gGjMuR=4+|xw#Tm(Klv$;B4aILwDQa6T7`WhQC^lbC1ztM z0&X87{Kxhu!!|I`N-DCxvz4|Nrm%_`Cke`7i#jaKF?u_9`r*F|&n1$zyM&J@PVMP` zl-kg1{mgLX0rvA>h0mq+4^f@lqJ>lVR-V&R{`1j{Ru1ds&562!Iwyhxqj&H)ycEoe zZoJ0^jN2LG3CY?=5!(yaQek-e%zpKofx%(AsifT5sD-7mYix9U- z2hJueif|3yz9W&9z44%RR59y-#&TI!$Kbpepl@LX6fwiUT=;`MTx1w3MNK)}GP3cS zs#1u45n{PSFHBR8aM}_u?W5QiV+(Vc36)Jfl`6^iX)wcJ#}RUNwMlm*W3(&i=EUtu zu3G~9th;UNK5A<`zpb)YRt6lG0`*U9-CeEoO-MlUb|X*Jq;vx{fkBRL{;3^nu@_ls z%##skXvxLW#aYetTTAG;Xu{4df8bloh_BW@6XFC9kdd8BP{}G%5ka6U_+5{uTYLH) zCD(kt-6I4mz>>I1x%*M(iJGj#__+N~itpt z@JlBLG`V-&^u$OuTvO{WokV44PG{WyoN?KaOI%&LX(tY}VWYZ=f=EGHEwUA2&y)dG(@RAFnx6@Ly92^&y^wH3vKw=ZjeCs`<)GgN6fUtk4jqD?3=y;^Z)>&iXm zp4&}!jAo~j!7)~SDzeR7v{$Wd?6)uN03uprz9U3CUuFF;SxiEH>Z`EQM~XRecZwTW z-y`s}91u4MxuBg(v2%mf=GPp(98yXiyV@UB*Do@=G|Gh4cF;;b+q&eJ-I;TQawd2_ z*4O12D<*56-W^m1n{B}h2;y%RKrlcDNcW-#OQ)^9se(5yE)=9sN4ocoNRT-eqZM9+B9T7xl{Ngj~zkyBRq9h%OZcu}F? z0NEUG4#y>g`|AQ13#~Po;2OFmnjAtYR zDJx7Y=C!;FWz?|p7az|8vbLpKLL!TT({ z3Y>i8y2dY0@#3uQC|}4};C*{jOvZe*nrh8V4nsfQ@<@p z&Z~-VYP!&|b_x_$FXkOX>Pn%uyQ=Is5|}lfL2M3YOp`=!C0!%=vNT1ddbYfs@Wsca zc{8+PjyY>2AkVh?zIto1q>Q3F7Ma9LZr3$)7B1^aSe}(UJZQp3n|miu4b(~u3EST_>q9iNv&C!>JQ}nsmDKo~a$*N1qY23xhXnRNg zv@OrUZ<__&QavG%M9CH2i$@F)*~@bYu`#kb`Rtaus+?5wK{UkBcH31LXLz%U$(z|q zmRMZY5KiaB?}3{g1b+RTa8r@32c=5vBw=@8z7*=x?u!NzDF0(p`$o?QfN^e5fnS_4kKO|(i9|ePLgmg6zN4rf- zX)O={~wV9b?|j+-*G?As=inC+tBm(t(qat95XhdM7C- z%+zta{5(H;JH#~AV7emFIPku#@Ch)Lb3S(YPyb2xqYg{Io_K3!VNfvH+AeNu7H#<^ zNgwuPj1@gW`dw)nx_g8HDKanzS=w{xc?C#tW*oQYX^z&5-7@nfcv^HNxHLZXR7SuIve7CajKO&~YxR z#A#+;@P0gT=(zG0kh+dB?J(>s1c)KiSVTM>rB{A$3>xbzJ?S% z(ec;texF!;_)#@j_k;IgMh&J@*TAPuUu{6u;cg7I#*GFaEcX-w$k6$b7ZJcCG+%JY zvKIKmfso+O|Lh-#9}0Y|j5A3;+y-&240aJ}2z5ac7mMg;$q@xa^3_H6yh{$VEu^~D zKv-pDrmg+J>w>5S|EhzB*HG%m5@Fd;a9 z7C?haL{7d1tB&%%dClk|W8-e>l0-SD=}Eh#f|=^%=+xlgMZ7q>1YG>*{Jec&Yq1co z=%-X0Kz4;_VIfm&r`XCpgDtxh^UHmbX#p5zAmHq%q(=LIXW_^S!#YoJQ`e1J#AnOk zK!-R4vy64H|AYv#tL;{+1N*qe(1n7m1@rM60^Hn6jolBavpwF=vadmQ5}i?}EdYnb z%KeoYF~&v{6mUA6njUc`pPXM1Qoy|%X+Pe|ck-fJJ1R&RCt;(MKREAj7o4llgeZfNXw{%}Fs`$(tOcQ|w=s*TgA+)W$36%frvF8oCS@U%3K#)uzGpm7fr- zoKpb$Sq$lxE%(P7p6<3`R&|aj(fXCHWO~oVWY*|D8P<#ap;hJj>Co4Orz^7V9D;7Q zn63k5WgYI3PTYabiEgc*7rS-FEm?>X2P=<@rt=`}*anByLhetQirVFPuyL z`tKRL=W4qn&Ml!soS=QprGbQi;KG!1sEH2l%VUsgl3%tFrLY9DM8t3b`^}{V!*&CD zRXmy%yKud@pV8T>gDD%qi+LM77jG_gI8u=Ym>Bn$SIx?&jz9m3oROK9JM}Kr5gL{k zl*L;)XHPTh1_~gxJs<2J2;bV50^qU@dCpp_{YG`sX$do{V4J$8^VL~5YkJP(@wLbA zYHn}+rk27Ws9z$Xl!HI>CzhUV<#I{ubYYLZb-M-}LK-V;Prp+Fbqycucv=yWv|5&* zIG>Z<%Rw(fYWI!EvHr;2p{~O8uAnm3m2<0fZ>%H@5fS1Fn>PMMqTf-9&!G-pV^be7Jvh^WD`0Adh;$P)fwW160u1`^8og;N+h_M+HW{e50 zPFis5xuOUJFu^$N;o>_Vh!H#$>=+j-K)l|n-p|Pzqjvi8t{)wvi5*F8aD)C|nldQ6 z;I~x^faaOHLOXQT)i5EU3NpX`fA~guA=;i*{L6eEqmlKiL{Nu zb6vXW7_}>bQjlpbe41RKO9LIgbjamw($?VhjR#Gp!65qbz#rmomk9s$6Odi59vFLtNe@B24=y~oAI{F>BRdwbogi<-4`ogK zaeyDOKZsZVL=*djmJYn)@0E-;E4dCfjo3JMsmIp{Eh<6ZN1Kq!9& z7t^B6HOz_tv({n??tdaNtj_+$qoEBD+w&0*i|`q*c<11XoijjA!deXrRbwVsynV#M z4s<0-e79f2=sxzQ5mp12){oF_1Y4aFVo~C4`wt_rW8ou9B+KJK`?p;{dVOFCxaN4B zXCyD!GAvD7pH41eKm&o;MKIOY7N|ydRYA-@!K|Uq9Q_B*LY4~>jx}Fy)!v8_sD65Q zr*R%V4_`}+Vs}Y_Wp=ZI;uNYb%*yp`bw6~tBBX31nd%b0P$(BV@ZzKhLXIzg?Xytr zYR)`#VJs;pnL5ZP70)At&*D_ya>nR3sMi9DutlTD-StOJPcZjh`?|B2Ek z%WSEDWZ^Toiwyk?E@)C73@Q8nvj7H?)n8Y6nxiNzoWbA)DYm3`W~>1P)_uW z$R@)Vnh`+2_FBLg(iKm8kQ-!L>RkBJbt}Od-82kDy|gM52b(B*kGDMJubp#Z*A@)# zm^01eDnK(?P#gU8^%=SvqesRWHyIYhoXGaX85?VEn3-EyWu`N@UC$Y3f-*X?{5*@; zm>issC!q=8kt!|e#nfV9^T zc*K`r@1lbzt4kOgrLJ(#Aa9EJMPvuNOGaO8nah9v)&8Ywk@x>Op#od$?C87UGgcWout|Ikr@rfLX~}yN?jC^EREi3dGj5B% zv1>X@xA|HxVhoWcI6k24O>D|Y&Wpr~b2eg)>mi<)nc<2i^+;2Gnv0CBeG>_-rmKy816Xl!V5f+z&RoUv%`yYJ%tcduP0w!$&{+5f}2Th+7w1Lnj92A&-|g` zHmE@^vsA21vi?2ec0)|`B;8Za)w&9UadPO@QU4mx2Ab?|DTKkH_+8f2;hCOcV`jHX z{o+AYsb`gbYa-Sh<2~+^nmt_jSkN=P;gZ40Upa~Rpmc0?d2(!Epuse}ifC%Z|B0hM zim8aQ=JLmgSK5)88zO(}WnPFexxl}5{Ym5mz8MAWc!1pQl9mM42F)a0F|ECzt^m+X zSl6`MESAo7ZK(T}<_kW#RG5+Ucj~u{GprC#d`G)YCiisD7A{Xr*C;TSo~c{>{E78w z!(cr*DV`N$(cFmik4N}cOh&#H`EdJ9L2J($-8a^b$Gk7ewl>;o!NfWukv4l7nC2H3 z0)lC?S60TSd&R?y&DrINp_J)w9_9z}KBWjQn?3O_{+TO$x4yJWc`|-KT$(#wb~}G! zSt1+2PDb-ySj2DXj1y&HXhC#dMyCH7mnZYK4L;y`7Yfqhi;oYnS63KAa+h|JHBEY? zv}8X*1^-5vmpHL^rf<9@7tE}0Hu6`wy-#<`2JV+qqa0wR;DF#FC+PUp=;3Gv}E74$APC5)wqKs)_{E8B5Z@$T%Tt? z4V>G5wvn!0KKv%$#WdAeJcXPWPb^@|Cwh5rl@XPq0uxm?-s-rJ-llEEo1CFH*fgf-2%N<+mosS^gBPg|2!CkBtKg#KOV!;MLf?-L&45 zz}`~)L#f0+FOh(|Mox0Cex%>q;6%jtx>E0f}DzENHT&Xd4qsKQHuSHUhI-f76d z1blJYd!dcXr~2XF?NeP4xrNY;0OPB?y1Si`iNhB7UCq;R^*;4{16G}9YxMU$SUrom z;*P?ZfR~+&_B?XB(fkHOz~92R6ezFi1&%Qw`S61y#oXi`HlZmzsnWCID6z43=fp|{ z<0$!T{-?^2^kuH)OU7UGyQmgS#jSP2CMn(CZRVS7m6sGsst;_2Pk$4iJ-;mq4v9XU z>ub6K@(eT4Z^&*gXn$3@*|J5A-xX=MT;)9j#g96_`R1V-S%97zIMg!oX4OL?%~uXF z%G%KFovj+E0>zRime#;=u?><`30ra@1bKn$OROg!)hB6Uu8#JTv>u9b8eyr*?UH4V zhtutpK{1~eK25C<=3*o%0~wZ>^@_maul69v=mEB=z>@+UARajMk54~q-KRW6T+OdM zc??pJA^iYaRqHAApC_U7llC%0m~~kbzV$>AmKenwNoa)7t;7TF6XW>{>VeY7eBOg% zy2xKTRep~=x%S&-gpMo?yTqH>$t@^e+KXm`U^4E0IifKJT?@tfJ>9b1!ZF;TmF?69-o1-q97)yuL zf}X|pR5H?^q0EN8A9MOl$7!CQkARScDGJDv;vWx4nExjnx%ExO9K;_GZ!mBY!pNck zRR2tty0rznx48q-W(OMEmE{Htf5JCIuc z&ZqUl?^bUNZUvh|$}pOtha`mvjdwYRDL+NqS`l|P3>f-2~<%&4mwk0$E+ z);D9A?*K?9^%etsj5`>LdI3ExPhhdazF`^J%_E&W=g&z+nj5w&-uMih?BykoK6zwB zP;7s^XdbvP15LvQ@{J%S2=q7J2_N3?r1IlqT`Oa>zwodA=^qnMSuSJi*XzmRHbGc$ z0&O`{a62MGkf?fShO-q9*HQB!aL)%_(o#fHd(l@`xP6Rm_~jvTG1SkGH`{kQp1Kqv z)6xT-z*%TdyyAxnVPnMSzrS&~!HooJc7S*bqP5V_ICwqE_;F?u!i#N>-kp*jPxo4zw!q~9i@ zP(pQtO9$#`nCg>We9HDaA4*_N{i@^j*5uZ*_NEnJygA0j36a``vg4NxjxfdAE| zL&Mr7SM1jDd8mhTBvlyb_4C}fL-meZE$W9U(kPB~u=Vp1`Xg*n^r^G169As@#lPLY zej_vR8PX^0HlS;YdwM1}qhz8`=E$?9D|G4V25J+PLr`xq*D%;^1E@wf0Bsu{p9qmL znzQbn)}`r{Y>d@W?khkv1~)E(SqH|=9|Jv!JPL|$!QmWXamV{q zNx{pjC&k{hwWy=K{Q<6j)SrNlvTBnhNk{a(ZTRlQRvOcO^W+%`Xl+|&+_$#ID_o5# zp|EQ!vl82y7I6NPO(BVwF??~wrW^NK4sIjP zT_%dAP>HzA!mgylZV88+AdQCDrqI{Q89DVlnD0z^mKO2#^-9n-U!Ofj9 zzKAU~!hL==BLKsCh#;~=p=mjh(t*(j2lT&qexyEQHT2tGdYaCU ze6tr2;h0vMYm(4g&-2#|X;0#A#9N|@dZspn6KZJ+q@bb??m7dret@!{WzN>4P zUlbwUHz93eB)eO}BCNORuCB0O9eO`Hpf5$W-=APxSwYc`zm%{vn}Oe5Tx$7q&!x&k z?bZ*+h9zS2KmNe`(q`AAZEwm$Fku0H6-YXtoT9M)LpibkJeUR0UKKmV|{qw~@7haIP9bsG;@vt-1hrB&cff^QGz^}@-baN4Z=k?=- zo!*yDhn6di5zP7E$Qu}X*F7gO+i0>cKXT_NRU*7(X0iqZLwJhc#$rhJ!6eDl9|VQ= zr`Hb+EAQjS7?`oMi?dT{XXHR9Mtd5^UqO0^7nzq(20xKiHBh{y788C_xcENd)WgMp zLkja`2tF<^yVXu-uoOEqxS3Y!YkuMznym^k2a?{6Gj`4<0m#0%ajn&J`$cKKT{A#n zMcPyD2QKXzO_O&0h>jQQut79$3uLv%M(Zv34j>1zZNPDtDBTq_N>Yk~^QRHTF)x;S z7Omyc>&xc0bVK#u*%QrwUIgE0Ewv(dm$^-V=!krmC6>FT0!fI+QgE!C} zvcMTz8+qnUp1s35TLKTwCE3hf2wXZ2L|DWJea~QP+e<0NdP*?HFyIM(jVwlSn?kid zaU$DPH^GsK&@9W8`NB-TcJ+JQp^kvjg+Uue94q- z&+NT{e{!1X+JwY6EBn&U&DhF2;1m1lLhNgjOTR;)D_(boxV&1-PT%0|H(_Az`}X67P5Uw{}Ky$w>+=i@N?kw?J81& z!rK6R1q|VSm@ttuPtLmJTm4AE-zJ$_ZMsnP&LVr(akjTc61mZ0Vn392HIu^3b%v~I zp)y;4U}11CeY+^#e5tcS%6hebto*bS(j;qqB5UNst&(Z`RB(0__mZTs<71BSCQ;b! zO#PNR0;u@>vfK8BFLYz#iwe?#15j?=Nxh`Gs4VqwxrKTA;-$HBGumMQV%nUYVL(0G zv=ehy#Q_b*SOmuSRorf#ujVXDjR-MuabWmrfiO5m$W>4Lzp$6ZZO531D{^AyN_s*y z=PnAS2_AXOBEG2W&xUUu(6|R{wag-wM1Vh*aEh)yxfEmegXFo70MhQU=A?{N>)39T zcjZKDaSOJg-O9`3AvQ9rB$vI(tv_iv`F&V?M4Gt$rRWWjxJs0YtCmp$Y)sP(7B{-iheYRxq~d=2om8Xw-J( zUs}q7$g}3);u;u_cZs3?CRH z!n6u$rOF;qgs=yQ5JE^&rOHqQYAHJ;t%xZGh#2-I5fBi8L<|rhBq1ybBqRZ{l8y7) z`TfpvUFV!X3CWe^{k+d|Klgn-)&Q`(no8Yz-fKAl+zW9oSzlYy)lxlH)7XT?id# z26XNCW)W4XE0|vrq&5rhlzwv9=0tzj2}l^`vXe)l>6&sKqwN1^X!>gKVn=)Du-vF2 zjZXk? zKM(Jgh*h)omLD`pRt9sR8NK~L*YT)IQQKY_(5a@djd3&c75&}8)V$m+v^pA)lh0SX zZweALolF_A--97{JM&)_)usu2Yi7Sr*6n%RbT4lR_F2DM&BbIQeVfa7I_dNz6R(q? zyt21iSekB*Rk+6B0reMw8ns!r>px!WmE)h`LX>=V4hxEe+-NBvr} zUb}mzv;yAIP=SKA+TTopM=s2rFir?sb&&>}!M{;IyzE&EK8W2Z0w$or5ucA{JH4WupX5c?FYpaqQ2gt2f)Ko$5}b3zQ=)fGOW1ATER={vdQe-WF) zR8V_xQsJ8P?BsLfZFv^)S?!my%jLd@=<+N2x*AhCxUVUc)T%`V$QA9?Q4A^tyN`T8 zDf*O#k=vhNNp1DYn7lI#h+{nb_oAu!JTE0+^BPz;*Ij*#bafdRGp%@p!HY6S%Gyze z=Zg8|O?9HxNqx}c-AmOR9nR6%v_OApl!6UTGY)vmKZ+)bvYMvaxLv!A^S}qJ*_=G`-(H;21N7oO0D*I;6JVu_ACHOK z{~K=QlLp&%n%oDr$F#9`K^;{qgi_+#7@3G%cVmzt1sD;q53kiHNH{cK6Mq_5~8-E*oj^l#tna| zPB469`4ESv+Yu1KukgBdW~3wfGkm1+*l=p-E2^v+eU)IzD?dnQPdk~#Y+H$Dk}`hH zYtyA{qH8sQUw>T5kaaoye zS2pPy+X)j#Ze$52BS!m!fW3#FQ&CXN?8#|)RVC_mzgX{Poc=v# zrcM{HNDR=U&O?}GZk2@!pojC!=kejBr=sYq&r(zT~Rwj>^`;={j@EC{+UypDp4gath&B zoq+T;AytjkFw{5n>fyvaGEz1NzD4-4cN*ZBy4FV5CH3uQkBQ3eID>DiW{m?uR)LDu z?Rnuq8u2_HL@b*`Q)`=7Hk?d@Ogu-x*9$3N!#$#jv358Lz1t8YtHG=ME$GV<@J%gr zZwX}welzhKG(Mv2Z3#S0JH*WFjRqB)1-L{v=SOW(`Fl$>!>(QiOT5^@*0dUBm@5)H ze351L7W3t-B4Y9T%E~)XUt+7DosF$Esa5#4_2_cSMhWBx$FaV5KmMkva^Gg_jt$~T zW3Ut2R{a~;wp}?YUwnL+mx{pctJ2isAr8z-qUTn<0n76@?7`q=iapI-0RO zPfuzFP^Q^G>31^0H0;BcN&qRg@4LhC)&jMHa{uD}5REfh0}u{Y;}`H#s;Xg{`ap8P zGtm(Ii#~O@b#8mdRvOkZ9S~*EyQQ3{lhxYL0*-y&_9I9aAYINya^zsc)^73>QTJ03 zud_>I?I4l>qc{)xky?@Vfd*i0ap)V)_O}$EC4|@ZWOS*6!RPi6-9V;oZZ$aMu;0#& zG@k#%SDfwAB>Huw&6VlXQmN`=&X9*%lmpWxYe4;^xlcU(^WvbEd?E5b5S0h=3Rb*9 zj!T@F`GmK+xjlI)n$fdz^MyfQg|=^sykutx<;;L{DA@AN69$IU)fizl8%Mm~acXg{ zuP0TnuD=^0nYmK{ofmdc9PWXxBr*HDtX>#jjQ2-y9Z>Aw;y+S!D#cSBkBs`?X4R55P4X=2>=7?Vt$3E>Ba}7lrH*?weg<$dXKK~Yt(c)$*%?n zCfu(V{2X%)`dA*8V^&CkM7UNTSplhoFC2R^puYBA!k=G-N7N<^-DxkTzwUGlGiTG5 zdi&=OC?|qKC#vz+nVcm#TXZ&_x zC;_l&Ik#}R=(nRd4B`@ zWhX@64oTmkfVBgslzbHmx4C6)(m}ip>m~FoZPgoAji>Qti7o!w#QpCOK(Ohxfb6VL z?C&#k)v7`s#8eP$xu-!HUI_rD$4WliVcK-XlD4gf)5PW%cV8KoU2_!BcPV6illFv} z5cx6-WXUvh%F->byC{Nn>rG_w#-?WuIVm#t?0>#|cHucNPfSYqfMdF%OysbeO8XWY z`~ReJNN#^;-UORx_vC<|gv_z#XX>x0t~*SE?NyOU5FOYl>g~nnT>!JIy6lS6)sm4O zoFv_cNC3t-&%&5&w^R7AMIZ>R12yb;U6tm-J%8VHboOze6R=tVh&L%SOzh9|wJA~h z{T+@=gf5XFWjFGePodoHqlic_r}hfK=<{wowGvG}PmIV_C#WGP#ONfj#Ivf_xr6D! zL?Dv2c&XgP8nW&arC8GL>;3cTd#%LxZ${wd1D3bOJPYY7o+jH@cd&d6FGnDPPZ-AN zTX~kINCDgeFLuRlZdEa9y8(k5+?z@Im|WX4YWh~Jw^veI$ibqP5aws)e2m7hpjP}m z61LkML|3)7l9W4Buc5t<4>5+JN9t_8`~8m83CgM&>TZG6g zH*qn+`qD&TL~c-)`g_gC#Y1;%)|#4tJ*~`8kuq50V^p=eo=T$F0U}KDg_vGC^i!)B zbqO|nBgpZ>y^>>b;g&-Isn@To!RyGoSSipZJOjy@`(YUa!551Ul9FDv>i#@$Mmj{* zhF`VYJ%YmHy0UeP=kV^EYdy2}!I`T$d%;p7!mtz8EGHvA@ZD>KLUd-X8)|FjOGbp8Pe^}z8dq0+nbX4SK_i~+{R6py32)fSJt&F#B&38y-?{6VzAKr#l7d~muDDHdc;7!Tmq7q)m-1gU&6Z#d zf+*|AHAnS;24Z4eBsSn=BJ{A3Z`4<9>v!AuKJBY9iuWwc>BchqSzSSa>e3?aI(DhV z@LkmhO&{!pxxepEfQ}}<%K4(`dV#2 zlSzB7K84&`5B4*Z8Shez4@Q>^z5{gqTN23kwW|i-=dQ+Ay-vB>V)^d<(IpTz=xx_nNgdv>+8){(ODCre^cgH+Cv9T&b-&M37)2z-*QOys| z-0Ltt4?T&Db%-NM4XQkXBcWesH z3e}4<)$01-;1m6UDpFPZbY%xxnopXYUOd-iT~p0s)3>*lfO`z0zsRE+Zk^7hWuJtk^6+;2!FGWTC)rd$W`_lmk4 zTo=pdU_@@!LBMPdPMQ9(k$v<{_g|Ov7xRKNgP&pj-?$h5SN~5w>bwcwfYz=3km_Dx zhuH?2%++bOTsSPJ>{T6opLZ1(*qOwLNOGjaVCQA#k}xiOb|WIG+A|$Ar=-gO3xX)T z=qKuy6L-{?Z`u(=?PW&o&V7i}0xrW_qkEf{es?K9Ms1rzvbEh&YTD>!ymrr^&xabP zMfO8Zz@=}}^sV^WFr6?0YLN=I7PH$$aH}no`>-rg7cGg^#j2lEAhm^%fBY7IE!Vm< z#r}BTh!yh=$1kMIxlbV$eBSwTMXJua^vvx#h`woY_zfm7zx6cE@BzqZ9{}T$4gur} zY!8mj>Q!)kaf-Fn$9VDlSepjbw13^9HDDZ;bEHS*?zkDdIot|K0!SX;67GKU8ZUas2>f5WVT%I4b&`C z$@V9+$dtZ*)a2^Ud$V4|EA^OXbBuOCiF19ITgdcqY7e)D^Z}87uKGv)F^JsK?{G!R z=4lub5DI~`kxAFxoZ>Lhwjy_gIe+6wH zEQ9NbYs)F%?9>kjIeADNb);fg%1Tsb`&~pyf&S29cUsIfswd-l@HSm7p_v_}> zF8a{BD}-nBv$YbylTJ4M(o7B-oNwg(3_qwNaifUcuG((n_WE2X3dju;2fFm#ZnA@g z&T)cP<@VZqbm(>UKXhA0TdabFFNQ^RBrX1iyV9%QG!^>(9hE2jX4SM-wwOBr$R>ht zeCY}MVxC}L^my-a@lm(VJD!$sG9F2h_y)7T+3Qonhi;9M+WFDYr?j;w?b9K)EAE$x zJ+%nno|4FD}m7LF4@J*_rQONE(gSHc`t8OyT)(rB82wBT=+s5m;w#Q3AZg*} ztAU{&^YK4M8T~1B_f4nu68yZi15V{?sw$bHNhzjR0?WM(y!I3DEyGhC-$*g^0l2+N z=-O;4Dut-#hwvu8Q8iRz&~8LsNslxxGE?q>db>N9uId#>SfdPEEN9HSf!_mBv$AIB zDoXrfw8w?6g@^BqMV{xkYn6$uWYZbe0yJ?Fc}Zpu**lO8%laP~T(nCSWDz_%yo-Qh zCr$A*x?mB_LeP;MMIW`QYhp$bE!ucD)$(X{Uq|%tnaRT zZW)vCktlOMk=w*s7*~JnV*J^+b%prBzLYjk*5q36fz&3yruJOhnvb2BfGtE_P|_QI zjXmFVAR9->%~)@+707B^dG@XL{BTQJQ#;l^NHH{}S~p-+`P zh8Y;lYi}6k0Ow<|q$xyGFE6R>)3+_Z>U?CJZ7V@WDY3aWrbSM*i+04K=YsIy z{@}Lb*k!fq*6`gGcZGPsw}ai`MhtQ79s#00n}O?rYyNFv4V@T_p-^dRX8<{ArVF>D zEVqOA)Frki31p|!VRPlC%B)1VB{!85G1^zwV4eATlyO32LDvj;=2aLOgAy@RE>qtC zd1lEbB7e*`lk`sIpsN#n9qNL!Bk4(5MP)D(APIDRUo&!ub*U-3>A_?HWIHXK30Uo4 zQBH+6&Z~fS(Z;Hh3Eiyv8F>`986lu-ja|Iwi@D^t*n%!IC6X>2PBF~;r!EKOTRWp} z+V@TU925MBEp+mTf6eC1N0?@Mh8PkTl(sDto zG3m=rc&-Y#xhiav7(P*|rQiz&9$&=_(0)SKiQ$%e0{I?!G|FMyJ<3EI3eMGnO163G z@(Rm62Ex=`BKQ%qYmHmdIv)p`8aXY5*+xV%9E&HzxCgV3;|#~Qr#)Lx$%-&O7{*yG z;eYV2M_+vu`X1z-J)RU3j<4z1xVt+3}v7=yqrg?nWlI zUm;_j4X#kF%yEe-??Ef7I8{X&xHwTVnbdN~!<(ebcV>#cC7auYpj)&g*}F-nv7H=C zN_TEngQcUxII#<3(}xMrwd zibfKN#ls&`2f|0?xrU|IG!NYhrS3napSPaJy~xqFgvJC!xq&gC-$+UA5ku(+-!b7T&POJ4AO*x-ed4mysK=#F25VwXoA{IP3tRk7{mj_kQ=}_}tL?KT3)NWD)&7 zXIed4tevfUDSFSpYep4vs~#v|Xf(Mp-rmuwuCdw!t2`;SWac6+qrVpNi35J5nPbH6 ztNN1vA#LnGTqln%+Zx-w^RFljFJ=i90Jy8Z#N#gA$b@gD`4EWI^)=3Dq3W`$A2u|% zc8=lB131`m?GysfD?5>@9b3tA>3ZARlSqLbf1j>$2H@3}m^~o?xH^wV={F_$z~b|v zsDEr*g4Lz@P`a+$nXM%`;I%>i6f!+yMfA@~W*-r5@Kf|@=f%p-4hWj8KkIBAKpBx9 zLwmJk1Ea8@3m?9F_53xj>gOF`aB7D3Nl3^JvYlo`s;E4BK7ce|oOj2Eq5gmhTlR;q zLAl*m>ZXH`3tM@r!2^%*fDkE@75rcPBPl%&#^+fzsd7P9o%Ej*vU;J<_Ad^FXy9QH zsLQa4K~fofZTNU@82ak};}0;hfIc{x{3en_XF_!@f&iPkVU+wfnFDUxO83h;Aercb&={TFCh2 z5#~4RfzNyL`VXP%Q}~4d#&5n{JwT`~<+Wbi&5DGf z(Q7m7U4P;Cz=;*{?E287f{o8f2tLNo_7sM#WX$RF-(Y67KVp_mC>XGG(bi^Blz2U; z^ppIVsEOC9^J6vJP2|9AwN3I)oLx^s4e0{%Xp##t1tvl5;R2o^)r+B|kEvTHZ0u7L z3$IOWbU!|Yb~b)%R->!%nJQY*ytCHxWg7{=B~(__w2p1%;?DwP6hKKZUHYBq4j~ot zQRj+>3+glvy~ikr54OPmPmxS8_S%_9SKP|>8s{B(L{iV*b6>av05WW~IWCzR@vxQFPP|3V zH3qva^y%=BX><6R=Oe}!g$}-um4PL7iEE%#y_VNW%x+J+t{OFehy50$R-KTA!8@HK zqDG}P&)zF746x5HtH@*a&il1H`WG!_i0fWMb`JjI!M^H-?@oGTdp?L^PLZdwQoXvP zsje{1KoOT+c-EIMMEakw?U~I51x}rHqht#h$04f?) zpvh4uVYNs1_7knPn&xXWbfU%c)giR7@EmPzvIKCJxZRuv>W#~`-n*OTcqXQIy_|AE z6dbZc?m1cAL5=Gg=$V~{2CzBCUdSyREmw0p^V{9$+uX8|Yt9FoQj&IcfzA%b^1U`4 z1rWrjAGjvrSw6lIeL!R*ALw#A9ylr;r-pZra7Zzo8R1@by~9a;Sf9Tn0bo2JRY&zg zqcic*DZ5o629wUR5THjs4cq%5G$y0DYM~h8y3vT>w6s6Qhtl-!p4v@{>585ueRGKA zlS;0oR}HI^UVS$=Suc2O)7u{ZYYD$vO+2^vLjI4oNFcFfKk z0$)Cp%-S0qN+^m0`|9w(0&=olV zCEI-6%Z*=4^PRE~A+>~e?*Vw?j+lHTFa!N|sdZ-&#QI(xdi!6Ng=?JP>tJ&U==In} zCvb_7`(*F=ol52rM{1aW#HDKJ7J2^EJ?SFgiYA&gy*YEW2UcMkwme=3{#<^juRFJ+ zeH$A$7fqU7-LMJBHTvFNxm!Ae`Ihmkb!Q4J&3(5*B@YWX)=>4DqC47x9L>Cu`ak@>{W=gaEdBh(Lwym$-)cqk(6_yIWYbBLDzq ztou;}l0SP-{>2NknuY&0#$UIQAKe+KehVIsJVXSy?=~Zj+xmHNFiA$4-0I{lq^v~$ z{Pjpr%-9$;`}$#Alrrv>XaUH;z*mLTqIXMw+A>B-K`r z?*f2!OfZG2KEE{OYu&Bl6xuy;vVh()6 zF!zvRr!?LfBY(jlU&_8MXWnH7;7vaA)b`Y2&|%Q_g_76CyvzXaS^FXLS-?k)Eb%*M zPxJQ-j%j99NO1!`1b2;gxa!%gacuu$9XcwERVwzKStz5)6WL3WKNw9Ql@@5qm$l+2 z15qD77sj^#?JnowwdR9Fg2!RaH>$`&$|K#D41qY#NR4Vb(B(|lSA5gukPWcu1DoXw z|1t~GbZD43X*%Jc>=KYPPC6|SI)|$?@ZkgO{+JC?e~GTyZIUYA26z{dw_<(3kdzgT zxT}5AdyUMs{luBwe>;<|4V_HbtISs$N$i$Is{!kv$?f4?M;FN9Iwj}spUr_ooHc+Oh3kZh=3ja zg3~Ljb-jQvnY-c)37P#D$^yq$jSk5X`u#3dUxJfJ;A8joeoqI&O6L+Sj(6zKNfa~L z+2WUlrPMH%abdKs{4nnn@u29DUee3apSneJ&dnm6>}u}`&S;h44YLIsaF^YtGQtJ$ z!fYr0_+`XcvywV{jGmw7@h+NN@J^1;mn-BP?WDN}Fm$Oy@J@XnmYyuB|D*5o# zdcdcsXLT>ecDImn%gDqDjQh%%w30&4u`@HLNM5@`XfqKZ$t#h^ywknV1u?q)Shv*v2y5tVVOC|lnXLdN3)otQKLdVC>6Nh+G;8;;=uUee4BtO2 zxa)@AYMB8{_-BY0=rv-BELASrzvxe@(U{qD7SXPyDA7W&Ylt%OlHYFkbQ1T9va#Z5 zD4<#KAGAS?M)^)XIGkMYaoV4Nzt&_T7dKIAPNA1l%3nS*H`52B2aUSP!j6 zk%0JVunztRk*eQm&23*8;j;_f{*JmO^<~itmI_EkcZ&hT(D9OTX6g(uCIqd|HA0&p z7TDyNHf#LXUFSdjpXiitb0e|h_4X^ywey~p=ZRjlYtLJ57PVjP=koXmlvJQTc?*mG z)sn6#n5pa*wo^)S>_!0>u@(;4n5kp($fn7uq@C(F-{jIW;~}P=b*fnJaM9A7=;}Y3 zWp9=ht^%21CefSx`8Q$tDL@=&8EEh#MQ7U zfVt2U5AokxmAT!#@h?C)&K3!relD-$1{XD0EYF%Lw6z39koLE{m^(c`$C$;jb>CfPjgXe+FFs^VaN z31IWdTOO0EWzgS=XugByGt`enRv-=Ph;y(L-O@Mi_kYBKal(m@!5ZpMh^-_v!bm4{Tb^IX(H`4=~aD=yHvbR%oZCF+9sQ z-xl0*YkkukJpcInKkxkiyb!R=ROmhNzgI-+XsLI+hXEzw2kBvV{sUaI|Ihcr@7v`y z)A16-2Lcj}*sHz!q~yW7e|!yaDF0tR*?&K3HqB}aR3jJYY|eFe4~;ODgi6p- zK5ZV%`hqTM+qtHw4RiA>S$*#F*?^MJLJ&yae{dFp*QAQr+U##)(FZ+@ROgw zTQ^<%J972Ez!R=ZeGlW_1H7G^A%}Ce9u-%n)H0VNW@Nzn2+xhjRP()11_iSnL(T9% z(fzz)w|Nv~c2T<9{rg21vcmGdyQAi_Spsv)qXNNHzkFm*OxgU6xY#`oW(YhhAR2yV z4u9*ABkEA^3|*p_pXTpSUi&WAtnWPpK~?m?fG~J(6Eice(Lebr+h-q~*2q$ZQj6|cS7oaT$vGlL= z8oDPQ#laG%B*UpQ8-9RL<#xfEAkl+Y*?(r1mMgh7xk8;RC4Vq* zVf^s%k-B5=`rN<#N>FjQwwK7SR&OZx+!m6i>{^23ap0T9WK*HycaOlk&9}GU0cIhP>r81<1+a zIp#3oLfE`?qX!g8F;tEE8aI%SME|w`FGz?VT=?{OrrA$XZ!7jA<1csen-oKJ$Qy%< z;m=9C{SiIc96n)dDrGX9lPQso`Spb=;!elEW(jUAp_hIZH5}+**b-iEchV$fe7{O| zB0a*S{)ttCxF*ur9{~}M1Toz*F<23^Zz2Z_xLZY9)GjpB6uuN{wcH@AhaxPT_}z_6 z?|u?;B>-U&8#jy0Q8Yqb9WMB0#+6oJ4!jhDyyh@zSr0w)K@zlf1MnJ#_$&xJl?J~G zT|JYa=D2HEm%ur0Uh)}0XA5CsWTFb@rz9eNO>-oWTc$V3gRJQhiB(0pX^GoA6L}ez zfz8EvZ22|O)G)uV2RYR4Vk{4oJ!gz3uMzF5$C$KCG0$4|tA2cG9E=7f;dlu%P`R?^ z7!Gm!e|rH?`qVX3fxO&1ekZGu#vrGd9df@H;rAY6*9By}quX3&Ry(^`IITQG<-eZtykG-?zPi|g(_QVaozMoY*i2r#iS5wNK73TlRRGVrNgXvq z_dKGTj@74pH~R0nS|7J+*mc^SHfKocxDAid;>0#p&4={}FZ*r_D>e7xt|#p#EI{Pv zk?*bflgfKv*4jel34vFImO{WURf?h$RK{R;X6@Kk_p|jLVR(8RrfY%UyV7v0i_z|& zPNv8vX5A@wry8=Jx{c;C9Ptc0^ya#uCAf!x;x0@V>m{Olmh>Ko#mg{(_fcOCzxHT{ zOQs@mP9gNEN4u|*mbL{00gc%f&4VPZU8>ndF?BmLFC2%ad5{-D z#ZllBI2nrF$k`e14r=Q}roFnvudHJJGcyi5u|=@CK)x%L@2;|XFEao7PpI0bN_8rH zyA+_3rQiWS{5-I^ezLj~KYU(ZDY0?5XY5DJhU+){=I%%!Exsha6xj9#f|E_&*{*o9|Z(r^)kQxcF`sTs`477jfohpD=6V8hq-e>{~Wr zHjt*jl7(z5rQ!kC+jPILDDm4_Z`TywEoSDnOMat^3_Hf=BnP6GdL@>V#5$)~@ z${~u0iPC0aC}WAek|YeA&62DwrA1SO3{Y;Uq(ruJPQ~tHt z*G+ER5Sh}*$DB>IYSpC^c?2`Q7f4s&S65)3x*cQ{2`6((R%TcaM+d4f;=tVRI_j)8 z0=L6`Xh_YA1F${cpT53rcg~jKY0Lv35%(v`p+Pyrh`bwh?P*=V-v2CTPPJH5#h!pAaAVA+r{$YzbqVhT>wPH_MyvDO(Oe|b zKMsbRHD2wodxCOza$cV^PdIJK4kH}eQ3Oobdb)8iAD`f9-KtTmmW=sC_WzhVC{>X~ zkqTzAUo#-N*m)nxO^WT zItnX%sP&W3bIny_n2>eTNX@tg&32%&-9)u#a5;>w0zRqyflx7fiMHfekmEJ}b zy5u~;+Je8!o91OJ<&NYfSM4ooZ%Y28;W`fFv73IfLy~Ga{=30y`E7;|~N;3Gtn;?9j0uk32{fo*2V(#>sH$ zv-7NZwc_h)LVl^0a{1O79q3*bkjkcfP~p32XL3*5)VquFSPrH|5vlC$*hih7e?Gbh z?X6~IGFBO?PAHacq+|~}oA`aJ-`>k}dl&B#h==?jZuT9cPrF*ZoIi`2>*c_BN=*xT zk5Cr`-D~u9EFJ9+l>Sl5%?ZuGWCe0fCx9_~UI{|3;J*aqmt=x(m?xa;FH&0AZqCE( z`k6$R^~EAlh5M8}z`}A7Ty=LX!F+89Xho&Qb`90>i2T6rd5lc)YM|MdAHV}~=~U1~ zBo)l2mt|Vv;r$?;J}IqKL@%I&A~oy>yS|O0lD&D#Q=>N1fq1b^8hkd?p!>Zk6gVnD zCUx!OV3b7EO7Yfr%KApfYcz?>+U6Y1-7`A$=!_A4yFw;vN6gd=%>F58(8qr5%_Zk~ zgWm#`%>RrCV3PqKJox-_aJL{QzjuXGs;k{p-&89RvCY}uPkV#Sr0&C$}_bu1< zJCQ2>AeA**wph?ZGj7B_@*wKA`U@@ixb}$99L3VrI1y)RA$#oxPxWnyIl~Q!aq+C~ zQ%*hP?RfCf?Fk3OFacS5kMZ%-O|C}y_b5QKwuwU7J#v$uP?AA*&f zW(~#bnhb5y5;ML#J@mW&HH1kzg(m?4$H8S34y;~pX8F0yRSR7k;UkMP{glpJ?3tSb zRo^&F+5o?+nG4p< z%<)ug9Ke+YD`pB@@-00)B}(tUVD1dY;7oWVFUx_BzqKJ<2Ul;&_}yLc)dHg`0bsBzGw)qayEUG${R-R{;flpRY{WPFfflS z2$<{#!BNZn1vjb-*3`T>#<7B7J>?K&a1{NJryQLCA;!uTm`^o;*paTGfX@Q{My`!>Phv;po7Y zXxi`u)D^9W?2HrV%o19{slmY60x|PI5(h3IXQ8-%w2Wo|sN~vZx%g$derlASZuqr`MDqd_$xMJ6$dB{v)5VpcIykl!qw+v$3G);b;*Kn_-?@2}*+ZC@hRy)Rf_P|BpO0j0vpllm z6x3AiRXZsEtX-`=_9QY419ynTXw#ZKYczixw`FBr8HC)*Sis!!+Z?et?9kFH#4i}1^aTH zF*}Jx++VI=l zvf5w=S0p{i%Nh&!BD?MJcV9P7uCI(v`hf84r-R-*r%TyB?V+OOUuQCPvyHK_vc((p z$djRw^>1AF^XM?H5|a_ASKEg0}WE$;?e^2&gQ77Wj6nd9&ig!Z}Qnu}8JEou}RD!qRQ!LFNeFN$!>&2g*6 z#Ld=%ZNasy2r;AJ0>7sl6Bm%8iBx>$=C!LtUSM^R=4-}&->%gH$nJ6!hyo9vObcRR z*-9(YBmEVr~` zHgH)3mA3|-Y~EQvP37CEo*2nJnHf%O%#Cwr9b~r_`1K1| zf-n-;omv8Z@fN1zakvwuTfL;GR@Z`Q%?lGW1G-0q$2AaplrU%G>Vhk! zn^$B>x|;##_szdVFoD&wN5B4RF!+;$#^-7j*IQ~yyw^9VN!CdhmP=<$^N8Jgd@w>oP=$-V#WXBH9 zj#q~n-$Td26k_d0bX=syA{R+558$Au?jl_JgSCXQ^0({nySwW^tEBriy!S=mNpd~O z{4Oafb}Fu%)!#$yrDa6)LkXTiZV@?|!p545@?p9dt*xK@#l0#s;!pINH=^ik1jyq**NS; z*IfNp)fBu%^hf=HoGFwG-pjunR&I(&CiYNg;yxe(rGU0CtxAGk4`XFYmLZxKOC`{h z!xQEX2a6ruYTu_Sy`}W)v+^J(Gd|{WKC_VFLRE@qZ*o0b&HVGBCFU||#fPhi)GI8N z1!kRvxl=;`P%h_(0z^PuwmbG_WM_$)KikiuI+IfF<}OG$9hA>joT%8WPP$h0 z2{(dGjY&n9j>P5gK5cP-^KmY(B>4IV9K($Bd*WkLO+(7tK3*#CeD<%$gXO#7$ByyJ z__@x|tNNNGym&n$-UsDgUxOMln=#aNKi)v+Z8Pk)UpHk;Om0>{6q^ZABE>l8>)pN# z5f~HZYDKbK;Jc~*h%0wyQlQ2i~Op@okml)_Pe?bI%L}CzIm+ALL`hBSAO`?Hw zk)C4`_Q0@?!jnfczcYRbjNKva4iH&_JQM#Pmiv76PmCEhPWY9t(9aKoJrk*jxD6z|1->m##)e0+`vkp#be}Wo zb1Yk)1$8zytF-huTK(eG5a%nt9{p+2y)7Bk>Y?OFi%9wn7xjnb^2U&0a=m_|nuIo` zVMpi3QxRe?u;^Aqk-d(Pe+Px|V2Z7{x($o8#79xCcysCRol#7GQa&4hMfrL?d*_jzI3)-O^NLCCYjTHPP<8}JG?W0fdS_C@WP5O5aO#y-@Hht z7#)+k2DmiW1yvA=JKMWzmS&ONL8p5=-V(u{h{Q{c=rb5*TPE`ajoKm3*24&6 zkvBYvp%%sTDZ@YeGy#W;iRE;WI9k6lIsX07RwM~h1_3$a&v=5cdo?%w=1emRPjQr*@!KqDS?gwH-5BKk~S;u;LU3QR%0F9E(d?Ud=pU0>0hC6GhO?p2hmbI4*B zUKw7-Ul@V_DspJsO_`@rxSdr80O3GV4hTGV+h($U!v||2yA0u9c{zjes<_iEK`Ywo z8d4*@G@Hk9VgJm7@{!&d^{4Wk`9GJI&G!X$F(lO<)yTj;!+=(#b4nR^Bq2ss7=&ruS^hEUUn417hWxp`R zoXX^XKCkRIbNIn2CJ`Nd#+!=*ye@kp^ONwH&O{o4;-x?m=*C>&E;CK6FA|G^(6q6n z%x6~N7T+6MOOB1jGdGsfwRn>rivxSEE8e?nzMM;&B`i{AtTxX)$D$pr==zk_Dr&UDyhhBl6@@QsPMosrF=GJxaI-^4pS?^bvoAVUsp0qz~E-EgxKOSz+Y0EyDIn*&HvC6k0MC0g@=7t(AP#JF| z@}%>>aebRlKYjD+o2sG0Lu=V^?y7N`%j~e|UQVfRLpXsU!XHh2O*cgMKT0Cs!du!A zP%Kouzf-BQMDgNVvrrM|4PT4`j^Ku9po=Mr$v8qN)t=JY8I=HhhFQU%#Qe_7awDL8 z=ODsj!N*RtIi=$J%YxH!2842=~uNFq=0 z95d4coChH0Sp#cVS$)9H)!EB1O%EObyl%cRmUheUSv5s*HRgi7=4}913kp%B*L4Ze4rZCZ)Y~Bc-Q?WJE zLrupuiuuw)UM1i>n6K%p=kHRLjaK(F*lMwzhcsp>_}-fAwHD zCQ=-8WtO@~gxMvhho>?)`Rw-ItvjqVH1FV_axVzf0|IH}&yy0oZA z5v!fs$WN%LgP+Xh;vVTvJ36d~Ak~4zdy0#uzPt&00k9h-g<3%lZS)X`)a(H;r$W%W=|dXS5@y5RP>dDq(H>8+r>>M$&B<5u9Ub8dF%D{e_X7HKT?#k za=bN?0Ady%Dc>n4x7LMAU$i|DK$ld#9k9v*&X=qHu7y!U8!71u9 z;Qq?Get`I~c?CoO4Q{!FhBE@6k5NBpHgzxkaRzzEG$Sq92_e+YY!87&_P2WkgqvhW zUybjZlCl;KJA}NOXf{k!vR?ddQ2Q0>eX># z*9>_VVp$#Cg|pj65Fy_rs9=`T$!F`d8dG)6K1!mcrcs5Ocb*V4seR%bK{NyfK=_>0 z@xN-00nm_-wgFw9oGv=%j@|Rv&|B@hn!i8tU$a*K;kgv$f`uV z_pzvnyY(L?Mp*0XPX*lg`W z?gQA4nq>M7>TrPb<>6_QlCeiDlmF3Y)%5Kq6$xqT(aHOs^tQSlZ|enz@xstigpq@* z>L@v8h#mqTMX-Ug{6d6s14OvHH23j9l@d5lI%HM2d;<5gie0L(1lViuOP_v+)VbqL zDpK+!BWYhM#(Rtl!I~PqbYlZa=OZi0wNZ?gKlDlyRe(5$G=gV$QF1Xov(u0%Zl2;r z_p#;hhbP{FL+?xo_OX?pN)qD-hrn5emHpjUH@JLD)d=U2%@U_gD@Atoct@FU?nEAV zEq04GzIvEL5jitv5`D^R)m=klcbWF#Px&~PFSc~3=`zb)Wek5vxH%hbZ4#q|#4>Jm z1WPqcmEnF?`-Wicbhh;FL8g0yN|@O)2gf)gnLHH1ZMYU`EsUYWk*ZQGt%MOcnCn!M zY06>Bk7|0=|8fz0`}alHc0@xX&PR9Wv}$!bD40n-h)ENaKEQq7{1G~ORx?eXbgw6R z!x?QkhO`nl3mM~XITc?#E&tR z9qD%XXiU~kMo2{5Q93>i)8_l2|79w}(=K3K2@0V+&?Y8oB}~TN!UbOy2N+m zQpJ|Jh*T3c2=}xJh83*>gde}tC>>b+Q@DDA9}mTu1ds$0Q@M=?I?0Jwo5Fxp0rt#z zi>}W-tVBeV=#MeosU%fAn>YLgL~B4mwV*RL09IfzZXB#6ay?5ie#(a@0{DM3P^*yN z=i!_6GdC$3EZl2*$%$>AqhZN~{TMdFNl?zLFqprqsrDZORMd1Cn+q-Xv>T}XTGmSC zi9SR9(b4LkOapdc+UjF({U}$p%<2spo37Z;bNei;6e2p_zn} zeKt;yw~dw%?rQ}#U#s1O(kicNLVv;3N^?%ka(8f1YrqDr*uY8ev-MLx-R&U%mzIba z0H5l!qY*OPu!+4YZ=4!i$Ch<2zJy@v8bPE3sG<}tr$pM0LCj!!7P2H*ReFiWabr8G z4z-1U9Dv-Mgo*+K<0n1=dtel(ECeTrMJKw$kczg`MN<#paC!Ln_g$};3!DYGX}Kwe zT$T+uUd24;VMem}6Y2qsuf6lhilvBuRXX-)@-OaZ@totGM*L-XpON&6tnd?iq*l=V zN^p6VJqhULE(Py-Pq2TBD|R0VW1#y4NU1zTEcglTeYp0pg_)LCT2{3~cVUP+lbOR& zg@fDECHm^Dfk(KTJ7bQb0|}!iqN*eB<>HqK+e@(WeQauL1B+!4Fyi^MGuftwU5^8} z4~H42Dg(q<;1_Mr#tL|LPKRAxy-}|xAD@GwKruB|U8A1|PMe z8?xxDSCsxzvlumoNy-wNK)0)50KI{XPCZjZe2NU@oLlI(Or+908Mv9s-3ERhH5F{75PfR?fie+RCD58TlrX5uE%g3 zx2fdDOus;^K;yn7e+hL?8%u4L*DvftXj@7Qa&b(S>M7#2X7F*bmp<^juD&gBHuxI^ z`9?@|e<<(eVwDnG-ZhU|%*NVH&75jxzz76$0U&SOQ$n?T^Y~|o*BTqKuBIr81D_;2 zf0XlNl;eqR{!h+WoY@9?2Rp_UG4O3{TB2^Xy?7$^~@ z77mAo$2j9T!GoOZ zxFl??##L#&F~UY)yH>uy7K0960-iT&yXZr|HQ~ku-S_2DaTBg`cud*2>1%pOZ4gq4 z)f3>1gD<-ArAr{~=gfX!aFRfRJ3#<3QH29_2gy6$gtMO`C`IQ!txx(a2zA6HIM1++ zoxet(PioUkJ6EJFqO@$;dJB#PR$uPWZv=M2fJUGVGf;bwdlg(Sp|-l$4knQ3bJ%3v zxm)YQfI?;qBZ=IRkuTIPT;nKjGoH?Rq|vx<+Z(&(QC(=!u=d4U zg4@i>&{}-2tur=K#+XWg8bkcQk8qcw&|uGe1Q4!!H-@B}kMG0_=T*QD33`5*op3W$9bRpbb!+Ro}Pxac*l*GNvR1;17gl$fE#ZCe^U5WMtrXTVtb2=3>tHPR* z{MkubN5qc+(4qK6-|0LNMTlo+Qy9)BO#aIsCj{$O@Tfn=Z;q*Xl@$x~p`%@q$@80Q zSa7dy)X#1A%uG6sQ*MPibg|C%4VR|=E=|>mDX8L^#Z4Q@NMO-!t%@IgJ#64RaQc}v zO7u9D3PSk5RI0=)mVz|JX91)@-N3;ha~n|TcyI=({cW&k815OKXZm!^z`DxEdjd^v zo{3Otu4fV*t76~kusXu;C?X9KP{tJ~Rd@#GG@_75;!x;Lh@ft`s<{XuYXhfMJW+Js z$vLM?rsFa*(IZrOgx!l1e9uIX^Yh#LE2Dac|7{I@kG(H)r#8(t z_kOzc@c{Rz5)gQZqRJb#ndJs?UP-ez5xTXrWGkFs;u4gFUv=jd_xlG1Z2y37_N+(v z*N63aepd^+&D5u546_;5`)bN(ZXe@~kvohJ4iM9f3(r<|*^%Yi{Crh-sTE5mR2rv2 z49l4_2oOq#=^=ZbBg#28mhamL$G5`~o}q|$BGw6fJ0Zxl9&Wm@t3mUvtG{3G-L8C3 zb6%;U6T>&kN$A;WhDJ2K{-PD#1hPMTZKU}U9V4mghC4|on=8c2@_-)uH&JyZw)w{N z0Mp3z$fF&oZ>ep&#UHMQt$6;jmO@@k38YvvDBT4V!I^MOkHj=xM#4vUW&nyWeQ%T?YwFOjP6j$=Zfg&!47u;BEfrlQnGd0?hXkpdbQOLMv5CdQ zO#js5?hr<%rB-$3XE1Y6$0*_{(6DxEmTX|1(A&*-4*bElxL{etqSa9=20^LnT*hbg z3pkGH{qkfFCWvPh-Zg4}NMr6W2yUPPBbj7T{)zm$nGFq2$#l!LN?ZMqC+%f(gg5`@ z9^b1J?)e(>a!sI<{urMG>ek+sppNKst4?~SDiFbSRhoHJ2|;k8lI1(Tw*;vcKf!ni zH0}%-mpBTKSKzqa=)!(el}mU^L-tJC?&}5CL-GE_W!=VxE=(y!-h{8mQexj>i0bsE zU`LzLW4B?WhQisu&RUk$w-E|rp-VdXC*v-Ih1Ck=H8J-pn3^QaN^DU_w^r(zwEJ}E z9HdSs>16Elv`YHSG&Qp+d=^289#QR3L^fks(H6~f`$xc(rObyak0v34VIR~gfKv=| zKL~;l&5;!I{2tiCo?5;;T)a!=7#X%O{hqn#(mOq*$0 z_(t^`GN1Hbf$ovI3T(d^|JRax*~v6^=1`pMCCPdcwiZK+%1^-_3{n#tXd`?F=bFv5oMXXREv)LJpDyAgKh+gNM+M_Xjrs?nT{V30Uw@E`tk zxHUb9>Fxuf82$|M{Hcy;!%w&6Vm;fWGOiJul1092eLn_2whsS&_u0|Lq!_GbpC zt9srAizhwXr;LJo1=nvlZ|1=a|9q|Cs#NHHA)$%14YK}By%ShaUj3vxiUz_9a{PBt zBJov-`nu_(yEu6Q6VZmyX_yq=}#2g^k@WEE4;jukY@p!xE8xNv7(uM*YeCw|E9SaqQ|hUB zcVoJk1Y!oNAYXTS(jm|+On$b1vYr#}7q=3hGIMKbd-M6!oc5XD0YQJPp(gfZ zNQp@Is58xj%#j>q1!@4c_~ekK!!m@+?_&5#?2jk-UX>{j!9<3cpk+h%Q0f8tlUhuQ~W@n-gj4( zrWcVCu5guVFXDB3u2Jp24NoiPgcG1y0c7={H?|rKzz1zrPjc=md}{O!c%Nuopsi0r zi6RrU=m|3Jq9yaW-P=4dA^V0j&J3+dDLE4dYQv5Fb#zqH-vKmS2|thR_qR{PjLX~4 z1U1>Ws_c5a@lP$R%XO;P)*jp3m^t5c97yaiZRXET6OhFS%RI7!-abtKnDHxZluXia zRnTRDPCNwgOoL@hPdN!h!mF1$be*?-v&@|Z*SG3JarA6I$w9<)HA(MKR?kH2N@RGZ zH=|bMs^m(Ry587gug|@%HYAa{gm-VY zUII9QIp56XALicOI`UI(g9o#mr)!$Vb-`dQehl$0cW!*Rztw+czcsgRPxk8(Qt+*q^P5c-rB--Th61&#)+2ZZ!4WH8+y88=1>4)-L#Qr`lM`mHs?ETGr$MT_1jn#3-i^6AXY*h z&K!5h`h|?3J9}<>^k~NstQf`B)=6i^tM0iJyQ+9cu(0Z>$xYMq89wQ!^~1xeP*OAg zfp`N2%-Wk2N#rNIAh-ooOA$x1m``654j>e-{4|}=w$SW7L2>&3KW%laOz;c)h)^yJz%=S|6;(~Cs@QCtc`h* zEsN{@l_rL+ziEnoxU&FtA~|7wqJ)BK)16Z7X@aP$jn(QmI(mi7Lw~(8`P&qj&Hs02 z<-_dH2qClMcbdmXd}K*p>W>`pz9l-xwrr>8_lJU=VNBsW!gK1sq; zS&Yb}6;y>QOK2lD3sUL186FdnmK!1HY|=1qmlCzZ zI>l^6UYF*lh=X+i|6~kK%@tZEx6u&Iem;fL?sDK>-xufC)$SynSP` z2k!ll-<0xU()VD{kOr#0xG$;}P|XS%(fO!Ca5|z|HNME~GqA32D;5bBZzsq&d~ko; zlw13pHm430s`FQyb|ha*PP8xFIG6GhN`wUT+27NryNU9RMmuch3&`Tjmy4*9a?I$D zZ1x{G0Tjw+$Jf_SpkSsWA$M{vK=?3Hy#} z3`gN8V(HzX_lqX{D2feLlQ1~{#`W+z|1*(SvoE{g!T}zy$R(hqAEup3vlQl&-&R)X zxzJ(D5MVbtN)M(>>TK3ltfBcm6FKZB_b1$zb5si!iLC4fN1P7} zoPB+`4XPbi4ZvPCxsKH5WG#-YeaHP<2zS{*|AgnD~lr!X=79~B?o->gzEKK9n82?q@R0`Zc?_R^J zcC(GsU1RzDWBoHVV)!k?V%_}WE@ASQql%wRBH_$j(?HF{mAZ=eV!|lpS>%(ncT1MU zgF(f;O4Uxb2t?_1k(7*d6X;QbhzjVj+$3Z=^Zr+QY~QP^6J2Y_bsBR4V38XeLavU8E<=m>18V^nViycvjS#Cz zEyCL>(vY@;@S+`B^W@BE8X(gFY>6d_cQ)r|Ns4iro z;nsU8VPs1J#q!Q|%BTwfFx(w#vm%A*+h9eQ-=LQ*G~l96zRg8_#1sA3xphXrUvOio z*0nr^yk-*)+EV>r5H?44{$ZNLYSS2$yJP!-u12EbsS|GOKDhkfr60drOI|44jehn% zDlkGISDf%s*=DOkz(O$lc8VYsjLa_5;>+juD`H~MISvJzfN^mJj3)`u`uaE22t}e^ z4H!RB7J*wYtnL-XqeYAn=}d&E2nkq?hLwQO*>04w(gJHyNIhT@U6=04dr)M73@JYh z0HjgH(ia%9o+{XhqWhZa>EMbx410cbcQ9QhRU{P|KMUmV^x4pU+iw2h<;7jeOD4+^ z70V6i!wcvDG3cQeZx4hs`2;e|J|EP>BRCTQVYIMC3J{x(Lr3kFp&FVBRkq@TnU;y< zpAu-(7R#n)tRoVwJShkFn&-_BgA<3vm~j^@lp~l}7WnU0q-I9L6;{W^S|1kt)7PT_ z4G^e?oaqYdeK2liP@pKj=s|+!22BIcd(dq+r$5+YZZXLKE>V5iHkDaGYZN<98SMKT zj3%u?`(mMB=YY{W_EbPbYWTL6ZLML3a}u7oS>sj=ep_5zwiCz&FCL$ z{`1}9Dj#G1*BwxQVabuX+-%k4ASv+$dv$8APS3zyG}P6=X9cCD$Tb8bB7%Z3R|Oc} z&*u-m+rbt>2D)3d*MpVBvMOh+F4Aat(Kz;pnEo7v41YWs_dhT53?`2fZra^!FApa_ zX&;=FAdMHdEC8;ccAa+v=)!YcQNMt!E&*#q@pFOHIX{aS4Oj>5@dw_c?1`cQBgPQP z#7B|DM^@C|MZkh!lf0ghEgmQ-)`~*eQqEpW>*ehMa}jCG*)1HiU7@1+@EIhPu6W%C znK-IQ%VR`dlXeug=D{{bk&BA>qf1aRhf<@3Ov39nPfxPFUC+E9(&oquCQ zC1&glASiQZOWQ9*=7*mznXZ9gA5L6P0KCJzQ%FFsl8uxgVop!yFEr12 zVUmtHsqLsgN*VJlL)1-#Pe69Na+>U_EJ)X`8uEmWYVX6>jOYBL6wm0o>f!$YIO;I7 zZ8-(*>KDOpZxpL+#Xu&dLdDg*ZDVm%tD%e9i^j064X>f1i^a}g<(0Z60pmz+bJ9+# zr>ZY_u4sFSs}aCGegfBxe?&|f`3tx&(!qF1T2%BUtX$-{=- z7z6YT#p=9qramY`Oa=H=WF<}gq@7HVw^q1gk_ci^-&}77c z&L+VWpz<$^t$%{Pg!VUmdM1*h=M;jebCRa}n&sK%K=U1=nk>YYSXRIdAK@YEZ4mQb z!b&WtVJVRC0qQ77%iR+WDCDxb!^U1VVtt2Q3g8@Ner-JEn>dQ>^OmgxP6eO_YAHz- zpP0dPa1bW&b8%)bLlURL0Fp$dB}L}zP-7lxpBA4-$OeZnp7ODqG3roCGVUQCS7FFK zlpJ?dl`U)koG!uOcr>|PNprtwD?}gchCzsf6}GL22ERpD*TGe}xvF^!%w*{>2@zWw zO-YuS8aC)wg9O`KiCJ6wEZ4EUbKvQXc9rClf#>sTy1j^-GlU6@F$^~1Et8i_1Obd@ z>yqwIL=aYMH>t3ghE*m`2YLFv@mHoBpWy}#i|Oo!GeL`{{U=}GwsOXOGKyEGrVQuW z`(=Ty|3f!E5> zFL3prZpjJ;gnM;0wYV6NDL`ZbsVP(Y2<4G=@fWJ@VPa>RK-vV zNE>g$am!f^v!vU7GaDC2t@Huo)z*ZgBZZN?>!Yfol&hdEF?Gm`vD#zdJZxEk zfW~AmKI7_A0er^~w5>z@1V^;V99{EjjvtJiMWiWrb$AsLE3cHTWx+3c_T`8*Bwbc; z2NqhyNO(cpXrH+6KF&2ykP@Bl84FRg;=FKR7UL}gU|X_u@_m03sJr5E%y7;?Lzq|W z_lWaUhI*K;9&kuZw{>vKeLH^K0N`87g{S_@LeqPikhAjLFWl?vx2!q&Al!+jnZA59 zJf`HvY3YChRArr-XCb@KT&bVI$XnfWhzP;H(PKx%Rphtp67YkdE#f0+oK`Zc%4~1g z_RHuTL)xk{{1py`>ie!9!MAzKP@7=NbgffN@eLt1S?ENjcNG=kx4&(YFl=4JDWoZ2 zdv?@N91i~QS??pDr=M?zyPAU#m5+i;2p#P?1iNXV50hv;0SxWbIoDmM0Aec+kSt$0 zaspY{d*eDj*Dz`g>~5Mtz;i65&M5x|mhTLHM1C4*yhwU0NIuWLDVbS$`OjuBxsTLv zgU?Og_scHteNOM<3;y_droi@c`8&Ljy^w$QUzwUA%~Y&Yy+Q#JMFm3C9gJd6~iy>;oJ3GhYyfcC$i z-DNpul)C}y#aPf%V=N2^AAgGb7tx4e5}tCNi1DL1;^^?Jnkd{1+1d5*RC!@y-x>+Y zqz(xCNexn6@54?1@gMt*pMP@b8zXU?b9cCt(FW3$RsjG@>aIJ(y4~W#3Z~YwpAo+d zZk$uIjC)DO$x+lUfPPG$gBI@zigpnXZ;4dhYvpGK^VR_zF`&*cz?~ZiqkOwJ)BGJc zxNh+~F;K_>1k@o_tNmW}%IY_V?aQIKi(T#WkO06O2S~!tmvmOZ-zjEB!FJ+SP$vyR zoQ3+`5tk+=hxgfR_JA^eD$CYLfV~4GTl4UQy_(4H07mR+-Nq}@Ygk57$ie^^{X01S znfX2ns$Xl;0z=+%y8K#-PrLT2ajFqJu8Moxn0cmTyRUtMWGRzJT(X0oH+_sJK72R4 z!<Pl5sINwnbeNEH{-x(Yd6* zIh)z9$=o1>$%h%)6PXxKir+gpShykhMum?Vqvp z1RDqN$%l3>*VJ^#d}awT^L={0&yn*f z{<|DIyMh5xY&e}6I8LNJQQOs_#|*{+_?LyJ|2;^?=*VzU?KX|rq7+J)_7ilHw-3i% z8seNt!%QyS@SmxYoXPOCLil|OiLSitq(p>_RyVX%eyPL8;z%tP~4}I0g3jQ3h-C@ zRxsj;dPPqrhF2{VBV@6syTGnfqKyst)#w)(A|?kxK^njZIm+4U#(q(u@I)AiIb#?2 zG%aLXJAh!gGp8(giQ6^V^vHI8dTh=4`T&E`nu>T05LY(|FsH}~@A+#mrp3lYXA3BA zeOulI5B&Zsl(#dgqW|zS5hG`pX#e7;yyGZj%7dbC9Cdc1U6;fT#mBy!F8Al;e#0un zxc7=uHS^Q4Kn^>VlpIKTpcet!XP;C+i*!hpD%|G=Um?I|co@|k-d6Y1tqrayG=9SB zxP~UGy|AJx$PX6Tp1r8;eJe(swIUx_~CJC%x`Q)PX^w%ZWn z((}Iz^z6HLb5}Oqf!W)`6nAWB4zcu_g?e~=Y-CSIQQw!wRduHDtQ9DzslkwpuqGXexEn*TYL;ZI#bb$#<2wD^YE>-7MPKX9>cl-^X+?{7l!$FvlkOr_12ZHV!%Rw28q zGu|_KkLj9XwLrr|;V}O{E;8TLn0C0L?`Vj+fm~~{_E*AREM-$OcLQhJORmbY11Woi zpM+Cg&^vEvmhKteK+IWxte+?AIEAxO*#>c%ot?*@mCyuP?Cuasf*sf5db`6m3St&K zv0^F|5J;pofb=XNr_Q2ym}maEA(Vf#LL0e{U+;K@p2%hi#y)Mo@}H~Y*@|~~roQ7T z+OE9f?Z-BSSCkY1nv@!NJVhqFsDiW~A9hTYni!ME5H8@pOi^>ufU7{`sr&lBXyoId zO&1S=ynQ~$YQyb@Z&i<^EU9E-GqcK8S(S)+lrS1Fmw>x9Bse!DpfwD!FFK>Yim4ph z(qZ{+BJVNIjCyRz`K_tW^RzB`5#DcnLpWfX4MpC3Ewa-d`fx*2>HQlixOtYCBi2Dw)h;S}IfAoMe6#(@%q?3H!S@Okkj;XvNBrD#`bT4f%E zbOv=%1~|~DH3vQ85gqXozRa$$mt!>H4r7M3j|oJwZ#4as7Bp;vX6euVDUgS-64XI_ zPNYxz`}~KU-e5J3CHixc6Wzr9DI>_qIgm7P_LD2>qMStMPZ?s|-yj)Bl5NAK53@K6 zzX^4#`!qbeK&d4uwQF}Gp}IAiY*3Tvn2hjFs#QUrTXoJ`!>?$V!@LW#`|_OQ!&h=Q zemm?lo)XnFJ&VIN#HTf(`avPE7zr}$+M-p|-G&a6G41e8=qE&H|M-Cm-!4T zpTVIWQam9otpN6&2;i%S`#0i-z^b_maZ3H?#jLBMk;!;~l9%35CSmHXjuCEN-Wbb3 z2&E;Amtb*BUasn`$hPZZ?g@5jd8_+HHAc39Ox#hQV1jfVsrYHr_|*S$%wlLK$xvj9 z^uFqR+!s=lZGY6gayc`1cSDZBa0&Ginx>_P1bYo&!;wE7iJ;H!Z<^iu%H>Ckv~;k@ zHound@62|4hzCo}-b~uXbju;4J9Z0T^gQ`O{sjcasZnmsjQD*c6Zy}sq`I3 zOT)W&JdO-#NFs_$C%+eeHLUp>k<*K*C2WvieGmeCkP8O*VW3&rb&b5GSx>}TdX?WE z1#DC0rA0VG!Q%K7N{JT&^>kmaZFV}TDVsWff&T54T4{7&>}kRj^ooBMZ+=&x%9^9X zdQz)BUPlPu@nke4=M4GhTl^R%`Zvu}Y$Qq3s(6dZkiNP~+Afq7L_pd+|1_-70~0XA z>aSBwC2ay}8d|Sgv92LEqo~|-ko-*)$C4U^3 zV88X0Dd(cjtL{s*f`N-{%8gjFO?L4QR8Yr=B|WPNTtH0ywJ8uiH;t$Y##vV#NE zI7ef;NfAW@e19{CSeFQp{O?GWw4?OARPM&E+{GBRR^G0~ah&*d5!O68%%w4A=dllMxM*5(8=oH%&c1PyjGjw_Ic)Z@x1Nnxb1024n)#I zG5-7t$5DR*ervxyHGDN}evOpkV@QOVj!I`1>!4o2HfCEtOf_df9E7w4)VzXXYz$Wd zNzq~kOk>j*tO7wV%X@9%I}Q;sl@P(tt(qdBzR+IIJMsYYTRiaiQ#2JZOiKKOitBxa z#|?)NheATlTW+=0^Fgn4T|1DxKxBD^w-`Dr!~G_ z&&&`_b(5vB4r@BArlO0-{HTA%lJ-lyRe=lhLgcQnmaIza%hF9~YuG`36v85@_ktZG zHu~?*lQmm3_ZiqH*dvADm3Q-0a8E0UJ*%P`iT^~SOG2)mC@~e1W9mwjcysJ39U`nY zPRetmK(yo6=H_OcK~p{;G;t%>s!+z@jHb>^q%Dny0(xICt6al_={PH`^nQVX^uS~) zNqFMiPoi|6Klu)z^du`>#EIEUoKDVQty%3wVzAsG#c6@0aNLWSXT%aa&25RJE zvO3C*z9-$lcqxo))UXW$ky$zlc3JAqYE5s#G#}N0c>+0t@?nQZNdFxq&Q$o{2EGu3 zzr>5k!?0E$eH0s(=heQelB<~2JNI(*7~A;*lpdAgSfZlZmH$VvXhc-r#v_ng;n@E05MEw z*&OC~hL4vbE8D9Lf7K*n5PJIx@SqG1PvqJjwskJF-}wn#Jh)v6f&8sPc@mC!vE~|y zqQqIq^F0rdXscbYcMLfAc(NtVl5K#Rl(70W6^$DtM6K&`7Ay(lO?`%W0@zdJ zkHP%MV!iLpp!GL{IF*(W&J(xYn>O7bXC9=k92p3|Cl+@EU?>%u%9c~L1Ggf^t|iQd zt9J?w&sBd}8fm+BU@voN zT1bFlv4OL6tv2AuoM+<*z;wQlc?8A9D~v`1knRI%GhL(<892D#2*tZe8b(0C`sB;6 zffT_m;VAow{X7zFeHhF6|Gdj#IrAl!KbIR{+4XQ`NfV^fS6=KRm}S`H;xy5{{Bp|R zR2O(x@sUhrrqfMcX(fE%hNd%4!!qny%se+K5QvI88z)xf2QE#%T%yjM4s-$%q6X>1 z)vXmJ8XaIXyo;NS=x{r@dn4LCzk*k11gU&nOTx)MiHxY)16bwLQGvqSkh5gn{KfSN z#dvH@dF);^@8j@ygnho&`j4E`xuHQ#?ndqPpp2cfFAKw?$?RVX>9kI5zItD~{oS8q)_twyNS4kK5%uAhrxd zuqCq|*xODIw$i!YxU3h{_JZ7E?FzN@0oztMlP($Dd8Bc6hTuDD+QY`R_fGqyt%jQU z0Y;LSz5tlG>rOG9=O3QxO0TZ}5D_$tgJNBfC%m@%n7(C1f$(WUqP$l8 z`$z27+knIV;y&TXBs2gC{_0G69XLx(`nt*UMY?3NZ_0}J+!N-XKw zVsX2n7#c{^?E*E5KMQgfaYw1er%m0#qxC$5{?Wh|`2uo85U-l``{xFR!!m34Wc>Nx zD*tU$*%9U^lhkP_=wZh{Am0`qIq=z$6Lia&kW2q?+ENg6@9=2?nH{rVs5JEs)J-S%=ZnZDW*$v_A9BX3Xdm z3o<4q*6sIXc>OC&AAh&`?`f}Pv0zcYAM?`AppYDICG;vs4|p62hCu>k7DcAThvc}D z`|2QvzzKKs0#N~M#|E84o4bm$>MR?%Co|{N=+9zsSk1g5GBmscO-R~7#fgk8Q z9PT4g!0k)m3)FbA*pmJWZuT^?CVZiKT|0I^f-P09aI2vcl=m$?Tl`(>IQ149%VO@-Ih3XrlhNF<=~WbTYjUMEf-9g+&a1CV4-oLdunEcmzsj1#5IjFjeuKrP?7hce+HsOg`nmlR9XqyVT(XT-Hpj zCPC}XxVoFLx%TPSOD0+%!!w_3a-8cs(7F0j^RhBT=q5oBVR=OdS#I0QhNCJ(xem=j zAxtXVFTnaR$5>lme-;(F%RD(Ou$TAXRZ&gK!M?@#mj7xz>pLC5dEci(FNV|k$?@e(J8#Zfa26X2R?I0c?u8g#w zJ17P+K$uOUUnJSa`IilP*vbAW^U1%EqI`sFltphb>Nqd$@}h zWe!lx1u8e7>f3}6=gxFH{p>|k&0b9S$!t3-i3b%LYBSt(b6857Qnr(rSamDYLAI5& z-)+DjcJE4`fSr3@p}9PfomNOY#h_zVt|>xYDVW}BTD8u2Pj^IsW3Rf>21`+_93T;q zkdcUQCEToH!~f#9vro=Ebo>hTZ*IM0fPo&BDXUKxOm0uZP7z-0*7 ztmrEX3yvRNw}(cuyikG}m#aM=6;p*soJp*-#4vL!Rvh-#K#bdp$ggo@TbdZfC){ZE z+St?Eh~kFXaKievR(E+W;Kj3ve3kOc(xI8BaZh$%qAeNDK4c)g5AP_~;Y_R}e-L+Rl!O zR#i|Evk?hZSU7ft*SM^tzm-;DnxnOF5pG?*#BFW#Pgw#CK&no7t1#WJbH7CJtSnPb z)sCU`3bAwUiWlHfc|<%qxos}f&Q-Izv)zywkS9GXNKwyByQy%zESAnLr$yotorqcZk8IN~_PAzy zZYM9oyWsa|{Q=P`ox((M+1zygb2q`U4bepe40ZA6Q&#{9h~DutV5x^l_|3OT7nPfz z<2o<3e>Kd@l4$@J{g_*KA@TzRIz_cqK+&Ito;F?I1?A~AYeO)9mp9Mr^G4-PbIyB& z6xRp~-Y6}muVxS``~{cKQ6g+t4)Rb)`rQ%Bis6T7xo&M^wWavgyPa}4)dTprsp@pd`uEyt32idL z{IVa-cm8{WkR~rI@z!%t#vbsbynNpI((mE>{}cExTP5~pXS9u(7h5tAJLj$thsXt$ zbV0rY-;6(QNg40~Jyp#W79uVW90?WGzH9u2{=bQTOL6GEfRL3-SZy}tBG-N?U}VbL z-%x?#yExaGdw;kRC_Y1Zb~yMO2|f1g>&+AMTNYrOEPE1`3p)EjrNP$8=aYZA(4HW^ zudnYRIRXvyhy^8cw230 ztQiE@UKTQtrdoN-W#=Kga`X^$1EpqvmwmZjBks${yHBSkLikvqkx z34|nsoFgbR!>txuB#@*w*3<&Q5imeVGKh*CNCE~3Atc)fNeH>@1d{CBb6#i7hqJ%F zYws`n-D|(k^Zft6|8PJZA0CpBfA$v?1^!hte4vZR`sao%V1s|+=aA(NV}KH8z~$PnyhIUYZ})WRihbG z(6$8^%`N~eUgmVsNPDZOg(+v2yY3NdAJ%Wxy<_ANTi6Fz53Fn~CAKllWopju9x9gg znPTJ+-IFI`J7pbB23#wiybO!zHa8gwulu!Hk(U=((rkMC!rlh+d_)Sby2;=vZzxd5 zB~4kl_c-j{R=i&-Zf21dX9QN5;)%?&auubFESmoK|A8srhzNSaxT^;f{1^ z6QOq)rOwbs`g`0Xs`EcL4m~u>O-6!$tc}g9 z)Gj4;#>nB&j|ca)lr^lZzWt@9q?+KJCh%f7+5{1$mJD7B=yaOW_f@CD9%y>gvVj%x zD!Wwhi;Y+=_gWXv&|p=k(>by?2B1EB%@OLEWiJn)(@aIE&kMiKt~#2CyJ&U>3TECg zKASmFXkI9BI@XFba|K1i+1iq8X>09KonzQM0cr)?FFz5b9*~{bpG>c9@3WVH(VX?= zd;}L@3LG00SF06HkHzA46JFOeU&o11@ddziiym3%n&^P~uz-h1Zwx~6a$fG56k_sx zsF1s4%8jSteUfelbQ|WRGi0)*A^Xkh?TldXhSD#Tv0Y|vo+Ni}?_%QPWJ*bEKW4FJ zS_OiDt;hSLR_-0%HDJrFsH-`OdhI#FqAmphJHQ+i9Xr`g-(>47)dNl*)m3*1>%{xZ zEjNFe04@Q^*9+MXW>+M_^+q^7 zA*el;BroBMX`>H{b(r2#lr~f=uxaD#|7?zcvtzC)x(7#ce0(Vg2iDLt>OM|zZ3^+y zuqXN3Gc2f(l_;UcgWv#Dtc7=Ecw-I`@#TLL?JvAV#QxXg4S|x zoJrEm(zE|)aO?2#!F?7M>)!y8Hz6FkvbbVcK&_*#xXaG76AG;~n-kqX7+YsAUmD|U zxT6awG1|4WI2eNn%)*UO008{<;CpiCObAl?8rS4oYJz5MDdfXZkf-ggE8H6%PrlDM ztTw)nlILF8S zC5v`_23?LrNrGELhpVCYD(L3=&(S$)Rog-+VR_mytlW*Nqy%>=6*T>pit)1is==vC z7aY1rI40BnCXo}vM_#P+ke+d1L0V{_1jsi} z6vCxf&a`cNw>n`kIwS`%iWh#898-cu)j2~baJpMut!YYIQyvpAw7M%l+zuCIGW6Bg z(R{iTge~Y+-*qL7p*4b)pZxc|c`9|I84smG#@H})4Jn(4Ju#$`qD(B;w z;m?2lvhl=_so?#e+B!?jHg&ro3luke?&YGpWE8Tg@@bH#sCLP4#j}BSLVMS^`=_7GlsU9IaBxuFiNN-ClhKD@_oPo zpH6hKzD;j33}V7=_6O7)D*48nPT+9;`fTFtjZ(%4m`kK(5(j0cN-yG)HvW>Pw zRqqMRZjy$%fM(=K5GoQP*}vu3w)X|ytVc#2O!%|BFGx*DXMT^O^eYrAs1$m(Tp7OS z0Ty&v#eh#MX+PKsMP%;eG3BTUJI}=Gr~jJSbodZs6AN@aG}kmR;8mzhSe<9mw3zs5 zxijr%aT1S`|8ZSHew~^#vZVe@iDd-=WdTQH*lN`65)ng^W2_Zis>q$lsEsk8qE3F@ zc9xy%$sp+jQ#<_jXR;GjB~S@dz#Cj7U74I&%|$BE_q_|#8evr;0fzb(%@m7CbMUYb z+x0{dV)#pc6_Y;qyOdw1>T)__=;nvoKK8cYX~Ni1-}fk=o8y$;D;}1hz4N@9RH12%*xQP+FB7}b)mVtAP6mU^~b zrU;eQ#_Fg1vXEwiQ#ZIkw^Z8xv@IyqkSBLwy97tY6yJi7YlJ#$h!t>R@-9kFmd^2yhD?5z?Y+TyQ8v}< z77);6Vfi=1v0j~YwR9WPSrYV>bsSS_`^!&50K(a=h#eG1owY-u{oDia`M;ZU;>4qT zN?Z#3(IKhKfwB}-)FW2|B%dZRGv8lSQnQu(w#NF^k`1CiBj{-r5;sU6MW_T;x&aZYw5v!BoL&t^V*+CEF=N z4Q5FDjnj$!gE1Z%+{a&&X6@>jAA7aX@!ixo(Xtac#un!o*|yG6%-uEtJe;7KnJJg_ zxIZ<%P@o7YUQ-IZ{iY<~nn~?c)nypMsS?Cd9}GdkP+hC|JUm#5&P*6vb)24gHXigy z&xmOlCT0=d2w?vzF*~gcUx;J{k;0Su@~Rqr&xm{)X6Qrgpfs6>G|o)+UZ4Jhb=Bi3 zo_pk8D|{FKN9iE#pn&t%(+je2ivz3A|J<^**-CRO}ld{y_T$;kQ4xaInFa&t1W?~lan!;t3{bGOP z-t6ckBJ=xPB8heTZso~?MTgi!S5^jW@$td^GeJ|Vx*@95_^>%*YXTs5-pnQ>!2Owe z@k?qG(kU6q1%eCPzFkZO+oNB(KnCc&>EDaWE=qa~;ZuG*hSnK7rg*7U1wLe8sqCVn zf+9xoJ2Da6BC^MLyg&SWBF3*t&$x**>xYGE)#U#t_m9pw^^(VBr&cDVej9z;ZN0 z?r6n^pM)@SS+RD=)4?Asl;(AV5~L0HlWmllydk1J1ZcnYPwMXD<$xsg3b`k)#9Tgt zOiX{NjZbb4H9G9Bc;!!R*?VUd7~$PYc{!d~u$?t%JB7&j(wCL;40AxPj zZ}A4YT{nBF^_~zYD@Xq$_+Ro<>bRTgcW_Zv#O?v(&Y!*OYqv*RqWPy!-#5GYQ+?iA z;-v|_=S)-He#Rf)xB55+GaklzUkrfa;&;vYyR}K@BXgRXu(-wf(TJ9$@fQ20v`idi z{cXG@4(ghJehupCv(I;BZO3gDU&L?P4>HqsX(JQY;n@zPCTvi4p74JCRIHbF14{Mw z8{y+1YGfSe45`=R==d|s8GL%hm~(#qFyXCyFwcU}s&^w+c6CopXpg)Nc7%cBoOk@Q z<&Q)2K8m;a*DNOh)8@M|O6VwYIV6jcA5$!cHZr7;QA$)B{C`6qQvd%jK!_P?O`UN#18*v~$(1yd!ss!;T1L<(tFU2) zg^!9z{qR=^PNa-h#^7C5;T#Dkp1jF=z&y2G&0!i@h>?D+W@1CQL}(MHGG5LWKZ@f} z+CKVyR42U3=XOrViTOBoCJ{xU&wj`Ax)|^7 zKGHL`M=D_!v!fa8TCa}x5|N1HjX1h-E>k~Uc5{C80gxuQ={Y5Ij>kGNd8&j+qBmWd zF`qxUV6|Enuv}bup(bqjc9HgIe&)vpgNZV!dpEepRP?D5E#!!|m-cZF3C?IsQU`1; zhL_d&jCD{iynz(Xr%e$UKFb5baIN@UN71T$4TjyZHpolnA_VLwGl`j)^+AYn|?4lp|mGDE93`6)Tj?ctIc))0K< zU_-Zhkh6qGR9y*f+v)*)N$}HcTQCoGAy^}L2smPgRuvRYS3 zGc(qnvaMS?7OhjmX|Cg0>J)KwX-yWT*(sRSd%aiZ*AMJ^^+7BOID*n%W!NYGbwgvKEH}cvcfBMH|}~bvB%%p)AF#gFmWP-*?}9+9kO;*f07U?yW}i6!mEt8fVRU^ zMX=azBHUV)HAhY<0AuYI?mW~WKvcy|F_r=_tF95q>b>*O1MYza5kLWHF-{9BSmW5? zwL}>wuvU$#A1c7B+(JSn*?p({EVM5MAiHfaW-L)uph-3+>HHvu$bXS3`%{+R1-~F? zu;L_`HH_k;_=LByl9=qbtUeQ=J`)}2g|s1w5TXIrNMZuw#xY z_6z8ikEN>TdNRy#CR}{B0zIzt4L$VcxTfEhW!{Pxo`*W8TExMTGI!6&;znLCAoE=m z;p%Xy)B3-?#)IQmE9|UcOrcf2YVx7{7Q^#X6H+5gSv| z3{o9l-w5FrF1>i#wd?WrFTi#Dv+Pg#pPak;e`aTf;Q#;t diff --git a/doc/images/tuning_error_surface.png b/doc/images/tuning_error_surface.png deleted file mode 100644 index 2204cee2f5204d1d2d2e53fab8cdd0a1cb9ac47d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 110461 zcmeFZ^;ebM7d?7JN6BTMM8xZ5C}QsgSd#Ad-_4USG>-qN9DD* z7qsQ9U7PV-eUXvU`S}NAc+uDw?$q3_5pnArugN|td_34puZ|1#iA~ljd*18%{)0FF zE8}32^jBy&=~sj);iNd$1BBmS&XRW-II)oje7~V~DTU6o9O&Mr36;1Nx3nDT{@wf7 zKK7}-q}6h!@v>s?=Av@x(#x-M@{<+32qyUc7S_$Zj6{z7<>|YOfdYc?-^=u9SpNUL zgp3!2Ed0NBqKLs~|My1Gz&CJ8PZz`I^k6=t{qI^O!w?AWf43}R5r?4scdZ)X|9`vx z56<1bH<>><@}`JY_K3T-|M+S`-X%eP=T)zHDht-^^}$ur>pbReml z25%!7(4stTTaH7>xs0R7jPoM^i_L@KJ=9F8Pk8^ZYzMX7Rc>(0VV~Vs2C*of{$!u7ibxh}8VrR3XcNFDriQ9~)lk zEIOerwFA5MKbsIAaKMjT51rgFmQ&y`li@2o(gvYQOA1D07`Y;`%D;M9n{^*iTG0|# zQc@BjTKs3C;ZOBxD0Kl&Xr{e9e&wKr6;yDF-ANK#f)d0rZq(DKZMuJpc;v2L1;ubn2EZ%E?Le3wd#zyN~284K)(QcOiizAVghSJ|%Bx~XF5*DvI1 z%?JY)X%@oXZ5Sg_9_DYlA7PQyo^;zf2n=)d8C0pX$fEgAo9ZK0v4vi=^BSdU43aQ6 zNkM04*29CZ+*EH314Rt-ggLIiF!^`b0Q*S9fP06g!ZX>L?+T=1D4OshMeJbP_0W_5 zZ5@+^iJDf&d3H{x4i0|s^zwT3`nBo(&H2GndsnpMl>R`(ct9QkYXnJvUFoF0)d|J)G!OcYt+a4@S54w&^+%c2 z%iELN`#*pGGJ0Bn{`@VDMzO~4(U)Z4!#ivmYHI9Gsl16^_r8MkF`7YWupuGO)S5~c z1#-_P5s>4`F!kb}CUJGi=aW_cz`*YQKG+3Y+uL)kZp{1B94utUXXn;hQyc4i&UH%`sYYWA~f%Dnk9$$|zVw5X&cHz()Kx_dF* zwmr8TEv}KH<9i4Ff_SGAbDbeNv+r>DBA6c$LAq8#RB68jQDWC|O-;;Y%kb-qiLY;F zM~5IHA|kkp(NuwaU1OthDz({YI^73n#mGoQfn&(g)=015alpZRqKNQkSst1=x=nsT z!A#7^2Aj&=c(!zXJ`y?utZ%=cCMG__ddVlSY-s8e8z-lvogH&tUS3^8Lyg_CAQ3Tf zlDKc;TgcxP@(w#c;(aM0xPk7)rkGi~d;AT|qU|BXg75K8Eo%vX;%mpYhv=|p9Dl!Z zl)_Pc%xcrF+{_ZkL0IjP03X5-rv~lV;Ls^1*t1?6bRa=QLV8+&2Ah_amL?D@Y#$Hs z-$DNJL04L#f@N6`xUrdbXlMqn1!N8cpo4b{>b?_-3Or^MJe%olOsuNEc?i+4BPDrU z_HbQXTx7-9EaiE`RwsJ@z$I2!vjznR&ow(0;#MK|AUqX1+E}?IpA@;Y_U#|<`S@h@ z^~t_|{rYEmy2b-$`fq+7CWA*%_ZnJ1bL_LPojfLxtbxZ7JEM+^Pou0O8v1?i>n|5%ndp482#^Qq z%!X>r?d1y!7R|k{n^HU~;8^mXVw!zQ?gbiXGoH{@S1lwq5fB$)Q zYinA&*X8L(oT6!a6G4~?NMW>LIDX3qmFaar^S^~JKq}m^9L6s z&~zTBp#FYI*Q13Gva&1W|p)1X$VQ8qu- zv<<3hof#8z^AM0NKvv$_*;!fX{%Ub3AwvN%Gh;<3>u-DX<(%ad&r|XrbbfQy0V7Q( zwuA5Vo47CKp`hFdR-wn5D=9A@Y0?D?Vr*<|{jZJ=8t>HL5*kRDTgjgCE%%Z);Ax=v zrI%B*$R_0a&<-6VM(oh>GkOK1^b`Pw;OQk%I*?)^78Vp8bLf}rlrZ&?S_j}XsM14=1#~q zRv@Z`8+@EZmqO9}=J*y)scD6-zg3a0{?}0-v6-c1*wT_7sD|dYwx%-`x@G#Ef_Cqc zzaV6<8{DlRC0{P3H8lO4nGd(M6qT39`Bm|WWVpL5iCME_F{d+F1f&NJ1=B-pK z6s8?%8hW<1>#Id43`+xYm}}JY{b`XD`0R=K@RK`O6A}iTqjE934Alipp2zDW_gqQ( z+Zbd_#Sv3kE)YUG=)@$^MiRLMABI16tx4GCKM()@63f2_L5i;JHeLl3@00a`SA>LJ z+pvknMR_wbvk8KPHVXewd6^!Fuzh{04DJ{oe(1#pChA}cSGL(tzgqK*Pe~$vIXjSs4 zvzFyuxjH;!Po`!NXAI}&UdUuJFB6d{V4;Aq;DiZX9O3p?>#rZps{fNQHI%RV2jAtW zG75kT$gkhbok?hn|I6>;eN{HZGcFo732gC|&jBtpwzZ9N*`NNvegY>BpIue(p}`(( zLDm?&y+s0mPke6J!S?$0)6mk=(%iygHSApl?Oe4fw&LEELqzLQkNZ)xC=&nWelPq{ zgAL(8^!B|oDO#=()XMF{S8;s!Y$cvD^=kdFc-UO6S+z~tjJ_W! z`Gik?I^DLTiX1yDcIff%ca`=d95d3-h7f-3-P_@PL;K-**?vB)4fcP7^_l6x=uQ6lGx_mMk?`>WS|x|GEg`kTJ^rWe2f27-BbJp#&o~yC zD>ZXM(Lm_X<$bVHR*Qw!z&m8C7h}{V4n(3LD3U^1`=s|`I0Dgx_bS`(&tBj_U zWQT*`+TTtu_otd~5yuK0k^)P(_u=9QR${J76X@NWy3UP;jy^dvBgIaXUs@WT!RJw1iXPbFtYF6BSYwNAZkHuxLN4;Ah1PcF=MB3WiYX2u>#hjknWHz%koUQlN@~{Mb z1N4gz@Ks-$#^3rk|GK9p9nEr3|F1X{YN0{H%R+`;bSHN=;M9}|p}@&d;bqw|<-gVWYj4F8M5#K*mXgL31pR)p zAOo!awL4INNc@ueY^DDz|TK!qHoux>MTiQuRL^PhpHQ@8$sH>xr z=QV{>q)BKRj2t9N81zLr6;)!LR}<5pO2Vqe*8}%{HCip6rD&thjjDZY@W-fFY3K$( znjPz}2kEvyS97znRu?6){ZQnQdLp||m{^!x=^`HH_(At4DSGe}(+SoZKx3oC`KL#! zGFXw4*XNafBfC?D#igaj0PX{Xi+_Juzk*w}Za~8G#bMc}$Zct`DFqQZPzl;WOb{R@ zhKb@q6B}}b#qx$fJT*fHdr_NRuRx51I8rFtomAEdOI3LOL_;~xEGNI5(io*QNd~{8 zR12~-NKnP^RC5N8L1!>S(tCCg58mR`}?_wUR3DNI^;uQ>BLrmUEbmWtsp6 z1~LUNAKzT3pU}&fFNXyC+f=)poCqH;>9_Xu7vdqnNAxX}q-|FV`tN*K3#> zopOlPbZ+JkWlJrN%Z1#L6D{VZrFXPo1gu(!>3X(jFLzS4tE)shATbVPbT+e5`>o+u zOr3X**7FJgJTh&jTj{RJsPo9ev+#k%#C!9%mHB$!B?m`@oyf$%z#pIn2&fH3#l?n~ zM~hT^OX+kgfj#K!tHFl89YsDnx_n9(lz&o-aLmH_CmF6#WWBcgzTh9<-yx6sHDM8@ z%5-9YDBgvuQW+x~#S5U#Kfb-cx_>C6+jd^m04c2Y+(}R2_nZR>$-p4deVH@ZekN2r zBg9~-uE+dFbHswd1qg~|Q&Ur*$~uCF2Xthv+8BwEkr53IjV1Q&%}3f4m9D+R#wRS} zz82auUqOfPDX*1NR29O(Mbhclp!sw6pfZbB0J#9~5{ZGs6?Yj&sO#YuS-h{c;a_sB zuHX3nf%pNMiGe0sfByXWyg;q{n}-A)tR4WF=4%~b-abzx1Mbv$+^0j~p8~K|d?3gW zOGtdnAO5=N0-}MF!2>7+kNfdTu3gUN@qu!7g{cMy;+!k!?|jE%pJ0zvwe$@PsKKL&neJ z>=K*`;$Z+cPMF67p2&c&9qafo5$*fUX1+|jF$%y)z+^4V&40}nK*=6Y$gWS5I)}%{ zF_Uy%U%4%L(A-{{HMDr{^t)W&G2Emn)WCCnM zbVRC5f&%O2_I4Kv9biENY<;fLj!ZDiZm@ys{6JfnE-*@x^|cSX-mJq~uOni3q1V9e z@kpd7c^{HefUSjChG%0)w^O>?i#)%7r&#kXuS}6x&Iv6Q4AT#P+RUrf_#l^t%P_&6 zY!%B8_;0V@CB-?uCMDhctD*k{Y{9l^uv?B&32SKJZ03fa93%&G>X7<^Y_1+O9TRUF;>3#K+ zY%8hrj;hu55bnS0X@ss{EZM+5JS>v{CKx30BlGD?FR8~d<>YtjLy0g0r zVEaognj9dBOwG(_@Yas)T5NUR>ZW@^^P9)n?NC|@QHi{ZnqP>*-k?oMTYbpmy2w_@ zWH3Ll=Z=`>z2%b3A9tzWX`Wk4^QV38Hs438TrsINxoXNelrR9qGakJBY zh_R936|A4w?lZt^WB_=F-lCImcP+JhHSbrnLT2l&2)LHq-)24DZR5-z%r{0aKO8P^ z*@0|_Gv={;yEL$TH?kZLqxu!PVf!ujL_DjpYUZ{fxjdI^u9n#&9XvtQYk}X)z9*<) z!pC#ObX@+b*VWZ+Y(CeFg|Y%D<#C&Ia$os~`ghp}*@*`+7fg{)Qtt4pKMUfcBY$V5 z@fDNM#s;OvEh24g8O;}*#t__w@D~;Z2&XB3M>2S#b@88AIvAr`1<6GEtDoLTMJMdQ zt1yY?C>fO-C@NwEqm!-)Jq$elG84Mh0uv$0PPCbyq_+lS0!4nX2`V}tJi!yDx()-j z%=GW|Q`|k~9%7L4_P&kg?^#(P|77KRGnM6dcje%_-{_0#Uu!mYdIuYunw;zbX%8^# z&6fR&^=+X?k5_tu2hu2-v>}_GmsFKAsKGx&*(=Ak^8j5P9UYCZ;&>mUptdE?5Oi}r z#`1XHT9oAHVmsW-b8z#i$~Z7zIYAV<6p9j^ORdQK(PHkC})$ zo#c%T4X4xEcKJS|F=jwlloFs=BX?I(857d>N54_rqwv2n;;XsM{PVir?X64}M8;qm*Kl2wuK)Qzhi6=sdSW$#$>zl{-58i&i;X?>(T!WukI3T)4Cl zm{?>wRYS)sCwe!^NgJ!+22V6ExC5pBL-uXU?;|*sYdR-FNm~Yi>Jn7p*|7Qt8;;d; zV!iNmW}F>9Xj|NNrdO2IYmaZ4nXfYe*cQCsRh*ib=sus(&qDrqH$irfm37ri18D*T zoX2qk`EtpNS^s7->EnUs%og9fGQ#2sLE6z)X8dpmP`Rw8cv&8g0{Usby1adt#?6MW0Xhs}m*cX>oH)O6b>5D)l=- z;(qm7j|4*a`gK7$qfb&^LqkK^e6Z*n+tx$Q4zTkbe!k>_RGE#v*7LnKvF*HfeF|St z(w;SK#LD7F);evEerr6Gspd3p71cV)^~uzTvzeEWx&z87kLv-UqTn?pP-}a(Vb$Os zt_QQq@4t?wV+jw4kb1!FPd)NYKVAeA3f&kgoD?d(u*p|w``YR>Fd2#V+-EL-oJc42 zNM6YNB3LS@3p&=JjB<@A7#Xu#_epu?@2Z~bi;~42`+k^?W)orUBe%GWwc*<`6nZUq z_0wjrvP};EReNqL4EN6EF`zMUJ8np?3C_4YEd~uSghAyYFOxOwrnIp0mM* z*<(a*N)Dcm)8?NzNa3IqXxBY4>fPPl!AE#me{o14{NR#LW%~t@c7t^ekS14_+WsdN zA0CJ9n2V6t{h9IXAdX+~a>gy!M^ZU>lX8 zhfO8y1N6wH{`GAS3~(-AACmanuS$%x8x%C4&kYsG zr=eh5MXggZXaQ+jS63I545x`;do2XwSJIJ?60YTtLJz+i9Qy+bB;BTS{8l=pb{Ypa zYcch-qCqxHdeL87VYB%sJj3oGfk#Vq-FkTToVINzlHlvAhoGYiARMpDqs<8^GI?B2Kx7$tAH{;v9j=ZI1uvV&9Oi8r zt6la#zV^LZ{f0!|b$_`$R^GVu8ww~bo1mZqNa6Z-YXk(ZL>{lR9(jQ7Fy?zv8xbAt zG-r}@arFJ44J&kOc3@;5SB9b!{=a- zCQHY0SxV?eS@32e+LWXc5JEfebz+`NFHm!ToD=DM_}oyz(ng!L&K7t%*U5knhgvu@p)TyjLTr~d8x-w-~>)a3OQ%gBos zQbLq`vbA*LxtLpDw?$+FC&n<>TihBE9qD2zkw+xPjk~RuyafK zmTEKqKk^^E&JEIJwbe{Tn~=H0+0~ZsF=^{ z`WD5?z+&r5W8dyfD*W(98$JQ0D@~MN+keth*JHZ8rJIH48|2xEz~8$2(u$W_UJoI! zS`q}3m3H78bMCfJ_6BCVIxIRhI<%4@mFw2ED&4n}BTit3inGc)DdSFfZzrIBf>iuP zU3Dz_jHNcXeWd&Xt4l@_eg$C|a9aXEp~ik_?Y7;Y5`V zp7GL@dUMui&ZYXlL`|2^}2Pxb9Mfa%{szX_=1Z= zxpTOqeQ<|#{&)n#K-@I3VjvuUo`R_r%h{+j<_!yOQ*%RvacyAn~^X4dtO{kz+j>E zUVRw(Kb{9c zu8?O9*EuF(LMtR({A)ps>L1c3%0o^tqxRhCnBSHThll0XE;Si~EK;=)j6nBB+ki4Y zSO!ld7EaF;e%$3m;M68!>b}<%`tC4bQ-l&br=tUH=nE6MIHbZMw*59XoHq>1_lfA0WKZnykFrDA{l*rA(x(f432C^p&)T|ME}fXA1Ytn^wqs@2@&yX4Jr3D z<=+X+@AG_0`FHtgsp0tkb1Xp$Jqv1W=Q>!Li4rz8Hfw&oc+hj{NQ%*s5vs|N7-{%& z4#6zuau&Gi!ogqHcH(Go zb}34Z90NDKV=?aKntRsUY!vGqf)2y^|IjmI^A@rhQr03BuIv}`++k(&`VVfjU-p%a+lmlL(3A6&z~vZV82;H zr`Il-gI;QsK~9VQz$yV!R5phn_uLz}BD;}X$aof_d;PlVl&g`Mss@pTivltO0mWW! ziorb=P}SVV#-#s|)~9CeT$SO^Sk|xxtCLK>JI1ok8`NY%AvZJg*RAa@A5&H%#@^YQ zp*F_QdfrL^QEe3Jkr>$FZN|TuX1F2;CmR{vOVjX$H*$js&cT=|#=PyZW^wX^7<~|ImJQa-m_#HJ-Rf=FjBr6MNg8&OQi*J zuVU%+)KnwTk&vboWhatH9yMqG-3slH1tEkTY%40d$NbD``Yo?>pqPXD6T0P8TqxfaR6AD$kleAQuyzgDH<+5 zERz%Idg|44P$@oR{&#uYbJI>jz_j=WS^HB>NN3=RW`RRpxUC&?k)2%w1?GT5h-5A| zD^k{&6lhLL!g$S>{cT}6&a{tb4w*YGh_s&|ri5&oF9Qh83I6QKXT1s%vG1;+R#k7^ zZ+T&O_0OtOSyvbMb&QRSDBcjY_d|ClhXyGe%0F^y)F79f(HlUC;$m19mZMnh zEhp}6y$Fs9!iU_y&BY)aJVoYvl#`<&s>z;Jmiznrv&&0kE&RpBMbI-`8_HX@R)$lu zia^3JetkILn7*@?7!QNRpsm%f!ak@GjV`FtecZvLSs%RU<= zPl+wj$|TsC)lxzqpOf8!cp3XKBIV6^N=amro5TAa)xN@j^R?y026X2E-r+sW0+c+_ z^_{)*A>IyHq#>c2y1KHudZ0xrkkuKn!mBl{Kj4C%3PZ8=1-u#4EwSOb(%>4wsDxkb z3t`w_f|EyBM$z~}>_GbSnS{gHQxSLr+t2-4RJiMlNeVweOA4qeAV5JOo37L!J6dej z>h$#yiPP$a8*gBJywzaIymFWxDNC`S7}+mEA=uAP%9OZZuC3h-ly{VERMA9~+(Pw> z5^R)YB#NAGx_a*GE%?~nvtGgUCqI&~Uu}W!9nqkmK}^vv{-Ybth-y*Q(-XCZ3#18w z#j4TXV~GH#eTqoXVdG$ez`EJxOG-;?KN3@GaMFbL78e)yF6)^%HPEhI7g<;Q2O-cY zw;t;%1NPT$AMUSw@7m9_uuU*qZ|+dKlxQjGR!Bl1{Lu_Et;||K`ixkCT}lizUqR9Y zE9MCLO`q!*ez_f_%gG!1oG>d_w#UwFxvD--rz?lRkCsRB@kJc%iM*uDH+%l<%tTwc zSXZDXJN|d-hmSaKS72Y;+5cu-96QH4)LLMpeO!exJ|OSCyUgYk7w#MQb1q*rf!4>C z-^8h|A(k09cJlm-y${ax#p^N`qI{zts#E=U$kkgJRNP(lih>Cvy9aiF0utn>wg2AI zp{V|oP?|vhg}xnSHtjK~4Y8KZAv}qTZS1mImPw3@Jpp!HlUq-X9UAgd4qY6Qi6pc6 zgg&Cy?GI*IK?@!k9?sd#4e;yg1Ls&SR|UkIst)^id&9eJ{p$X&n8Xll_J&N;Tg4jd zgf(xY^T9hlhkd?OIaDli#8adj(LfEE6RiOG+n@T(iH>6~~h-8^FRJizBp4 znpwNZHR6m8VR@}y2JO)X&Lr>5zy2nPe7}Xdzojw=l<8~lEANi7gb~0Iu|k0;3dB$> z^k4!l1QS_~+xv&Mz)0KxwSud_GdhFyKYXb8%z0W_`y}pmJQ$43lnCj>SpM(6(VX$4 zCK+khc^F3mDsj5;p>BG3WTe^+syAJ(tx@yhT^4e-=XH{%-)$00QqG~~KN`Bb^}O+_ z*NMje(s>yyl^sI&4khenAtR9v%6N1V12OEk<-Zxmt>>wULb08U9a>{b=yNB2VV`X0 zdnZ&olgih8*R8Sp#rHhj0@p;!LHi%;XaSTrH*Gqv^^>xTGTJ5_7BhDW-dnYZS^W1g z)_!p>>?}k|q=AEuMuT%3JM;wEeKRauh8gZ;blK}=S(ZT1W4GOgv*5ToM=w*WB^5gB zyI8oq#P*ZFRF7RK;tC&pz3~VC}rPR1$yGeYq2L zvFUR7i@i$9=B4M@$B5O&Hy;luGo_$Q@h<+`5n*}${ZI_0!%9gyyy&6=w4%#Hk@|Gm zYo_jzf(999htop2=Rp!>g-G>pLRL|8)KZ;&q6j1<3c6o>rNTLu`EMYVH9Yn9CLk%E zwA##vR_9L8g5)NrsL1?5K>ybT7dhREI6@U^-{YZIRn!A}<#u0%x1zVjum^}hX?Pak zHyQf2y~qWGrW7`_*)|U+qxW>qlj&Jmg&>UpeGxdcp7h?s!$aUl)Yo4=z2PwJVu?80 zC7bDMyR?mf3M&0+3Uy-sCB&Q+Kw!(Lzc4?R+HWrvUTS|HG@Q;O&^Xdddv)Km3e0^A ze4{LKU%DZJLrGeB>Kb_%N5Tk`;`gxw7~9x`Ly8lx*z(M$zWnoCsBn#Fp)J^jGfaH@ zK60GacCp`tEK7+u_FWp=Dx((g)ZN|8bW&p?8_aWp2z@cJ!>!QsK&|Zp`&40Fv9Uwb zTWQ!mNB2el0X51a7u|~#Dp@#P;N^L;?VCpPTL$$PVL`Zcrzk19J!{Gn$z)n z|9%*ABUN@4O0iB!+)fJ7ZR7XfX<_+Wec$+P3d*FmHZd?1gg2brsF=xcc?p!R_bH^H ze*DtCQgS$_vx_CXzq#FX5aZYVs|JkG#Phch=P~4s2_bYA1GThCy)~UZ({*!RK%wE!4 zayK96Yc$ek`-8Yep19_qsr{VFmLF~jfklR4OIVw&zP`TsrRv16ZcSi|?%x#Ej@_c- zug-AaC*8%Q8f^z`D*Cw+q2gJ~t}4hi7lUf>Mq2oZCMM3WjysSG!vJ)Sm*$e9i1~Ie z-;g=dZE)O%Tpbn(2@<;^YaxKmR2qzpi6jO+D=rJ#A?rmIhG;!cevaioME)N*frX8a zjE;i-_yACl!E@+%pY;9LmcDB9Nq7gBd>r6;OE~>_O~CRoL|+;)mPXCVBd&Dm&r9Nc z!lE1s^eOa~Ym0w(-P>q8~qhFT8o?a?D^7U>Wa?O7UW*ycz^5&K;= zaxeEa=Y!uvaMRwZg3 zldQCph{V%ND8c`2P4J5AUEnfVR~Ex>YRTGy+VlQMdkY+D!(VUrTjS5LmQ3_ham8I5 zzOH7|8OWPv@_o#SQD4SZ_@%&OV*qwo?cDN@{W*Tn_xtrrT9gtl^p_!LjAlr7X%TWZ zDAXsH$M?5idwIQByp|)0#qjKJs!3ZXX?T>7~sZ!J`E5`#^Uk5 z+J-M?%SAT{!2gSbxwv{yW}ItDLexH`u*59Efb-!vp4&bAp4^r&bNI!@NM0BE%I&>= z$=UiBCeb+uClbo-WiV&s=eTbmaqTm5Q%`d4&s^8Y`RS+$FU#GxgX$gC6wh5<3!>f* zwQ@^<$SePJ>=_X_E|YWWT?w0$>B}wJVdwq+NI+!h;?No8mkFZs5o?dze5}hJ&l~elYcF-Pk1hP_fB3DYBgj#;`3(WUg+-fx zs@HU2v)#hvKh=^#m!G^fJynN^;F*lz6o=AAUU^VkGI7rG5`Bg^>f&3m) z4R16DQ8)eBk_|1#3xxt)%f#44dqYF6fl#c-OzmDhRqgN;D<6Fpi8c-O_16mlF`o|( zBydqBBuu-vpY!R)S8zKVaaR1aok{m%)inzERAz5+j~#i9WdtY+d+9yJ-nrIj!_rVG ziacJZUJ2;lGM8AR!@~iaG7Sm7iWg_)#SEqXu)bjU$me)b zF3udFv~#Ez?U7fv-)d|m!$kN&;{Xg7_jL8L!RMrpASE-iATRIhb0qZ7cbwX^RM8Oh z3*(2WnHLWSbic0&QWU&HV}=3%pD-n2@V*z}sp-A_v_s19%BqO5tFd81nwEzGg;27)?Q z&MG8YR$PVRZg_5=;|w=A^FXadp2_PHrpb^f%@O=_wKu6pC#F~3&JPED^>LI7d&#$@ z0Do&sneIa8AG<@^jF^wWANcV^(N~<3*64wfSkEZ9wbA=#URGfDtX3g+@oR2A=35br z3DFB4`n_VscaC&`5yQ%eJW6s*f2$-G6ldMN<4|Ev1 zCmBuk;&0?BOs}UBCVtQ?qf<)MFXMbfO^8Jdyh_>yG*jFJ2Bck3c3=GTA-5UZvgd9q znh(>MOD{EpUDtH7dMRNFW z&sZ*^|EI;~)kWnWm6LHbj6%f$M%@ORfR94IQ%2Nozgw&vU<}|BRJ-p7<%0TNT2_Lx z{Jx`p-(`Qiwb7ur=0zs^b|&-mB<*LFMq|f;&9zA{i@i${+gH=FvQ)?a3RReMcY7;= z<6wH)wA@;O277yZ8>B($yF+zOvv3MTxPVD~)+VNDoX4O?Bn(Pk?q-|Aop4?jGo)uY z$Ps^UiI!aXExcOqbMF5vf2Rig>8*;BDunZ@t8$FuXxW2t)@FX!YRa1RxA7 z@^Md@^>99Ttntbysi9OdltsR_s#+?DwbP4my)gP~+NMYS-+YHx3U64|J@y|MP?>h& z%n&(*l<%5&wty;}a)$6?cm$j%C}{`AeRTn~Mx@$k*U z@$maQv~}@D8`-p*^YLnrI=BRw5AzCPT~y<@Qk${%@+x{jwJqp=)bQw*%-!CmGyYYH zN%khbJC}t=Uye-8iY)YEUixB$&BhyKYu=BXuTPwjbfFP8+wfQQZWRk9@W)G?*PwcQ z0c#dTCImXFm;eDC{HqT?-=8r{XZUUM7Y;!jA|-|YZP^%eh)E4H9P>t}7L7;5g~?Jv zitj)(cv5wz|KpCf!bI=n%u=mGC^46q`n{*+-X2Q&A5~M4AyNGr(!V4>mBNQUt9?%N zrj;3J=4`2W#*y}ic$WBWG-mdE6M8G-wu1dVM{>+0US-!8vTWoojt}`#kg=pI`3h@W zb93od`GVi3%zv%NM#o{p(H4HU>LmFg&aP{A>?gqrd1!$-Xht6)eu=PQmxt;oo1K3J zCRujHU}k)uD&u?uIJlwXG{W&p`wf-qD6)aVGj7hX>-JmLD1H=$2RcC$sTcKi<1Abn zryrEwNmws4mEB>s=6TH)C6@RO2%@?ME7$A(9HTLC~T${=W6GdMc+Sx_< zxM^+#TEXmu$FYV8yXyVfd;Nibp3?aK*Re%6Bli!j0sj`@s=B6Fwxm$BN9oFl=!=ml z6aM-h&C#c+Ltnr}F9j35?%kaew-#dz*rBXwn5dxM>b0-$7o1o86Y8%*D3gQVX47t! zsB*GrJ_h4V_?!HIP#rKxfaNbSzObP4J#xq+6!Za&e4!!{Z-wX;jv<;R$#krg^(?^RC<9gGz8T@EAtK)KCt1dDe#R8y*@34XQE{KEKPNsYRVCw?O21a-Cf+R zSC%Cd1x=wD4Toe5**3Yzw`!i@n`BgkeHgo((@kp#Ai#QOTy@T3PriME7hVB&9N zk-Y`UvvaC>;S4XZOK}4V(+%L`9bG z{rM3}m}?299V@Bmzg`VfKQ0uUi_f1vo+>xUte8~j(i?YO zjv3TM141^!2?lkGj;42kGb8jaqiDbZaUQk}zt%i|k{Hgz_FnSy3VVz+rJ-M(jeyZ0 zAWJKAUC47-&zj)kOZAy4B?xF}Sjaq(*9V0B&0pMH&?|ky*+1ywb};fVMtHt+pOig3 zI6QA2xilWeEAV~hJ5eNQJweH)4%$y~I=1a#J^3hq3lm+WVy?RE#>REG0sDISH)L^g z6gCrAeU@+d)I+ujdyh+e^AI#|Ht<&hU$;1l(t4pED%xedDYKmpL#F5GQ+rm){xmK! z{*aTccRPt6tI!rQbdx7e$_7zvw2D#=5)lUBxOFc}U1q#P`Y#wmZQ2`B2KTW*Ul+JO zwN>WQ2QT{f=h_x#bXB@Qrng6+ZBl83l#P=_O1x+ZeFK6WKK7sCdS%(Eb$+*Qnz`7P9ctW+%#Ycn4V=C8hjy zjuXbO9&s*Pb1zTb_IvAdz>Yen*X3M8yE%g*?&$FFQyab7VyXZ*^&hXIAAbRZnF%oQ zHXYOqHNi%K?NbQmA#gS>=KTQ6+Wu3lYQ5N!@Z>LB|4Mdeuy!&xr)Ar`i4WS@hJYe9 z?c|1EO$ptrK7d$Z2h*e*2xyx~*ryW;>NBlx@q>mb^AEwhhpU#} z0$LfyNaz0gPpep%x>F&k^s1|3>Xx!i>^OBvH}V!zNp^cY_(k2n=pd+Gx)I-<;kCl0 z;q6_Jh1WMu+DfWk)LC zQPbDU&d2*Fn;JMJM0M+ZF=uw&zI-dtahA_~l7rzB@-r9%wu<4Gtp9MD1PUtZf7fT- zZP0k{YcD??o#O}Ayt~1yyBDOSq<2Tq2V0{uMc>o7yL0_Vmo@0#-emnX11=Evgn20Z zJ;ydzV0jc1r}C3}>w1+OI)(=j1U9=ceRaKj>)8`p_DFO z(C-c=N@&s>lAd-;GRZ@JnS_Ea6ReK+6#^gjN1CpB!#~_p$F*WN9f#-Eebe@GhyAw1 z5_vabiRs@TYJ3v1oK^{0X>bvC5|DI8Q@6{eM%BN_Bj(DcR@`3tex~|xeeu919|ugj zR6Q2A7l(JU|oQwSkj$1rGs%LLhXv ziqQ<5OU;{!+KC4+vWHc&t1cwLt`V+fw5}%?-Z5}P!0Ib~7_r*Do@V!IKxp(S;2?tM z#fm4uQJq1c=WZASDXXXe?xI4v{V?*U^c4P5t_tO{=H1F%p)8**HVU3T1}9tO(UIi& zrP)$b1ODaja8|XWBK=srGYX)Rexe>8&c(gDf95Ty?^_;)l3cjh-l~*w&Z46Et;S#A zv$uGR$laEJb6AzE1+$Lx9@h(9;e6)~!klXN?N`Pwv*NfiR8l>ojmW(E>0=a_>ls|@ zJ<#%*wTX^$?QSz;@xnLQk{W-FaMMiePo3GvcU8MaFaFM4zfAy^T3`l&f)OS1<3~z* z?B*vmffF1!m#>1Jj9z0Cc~VnRZomd>+p=3AaoPTO58Yl3CR4cc*%^QH+G7Lj;TYrb zJLp?xdY^uH!fwmpnAnS*7h3U&Q^jDiJuf?0NgddE zy~|IoT+6HN>W5ib{d(#&$1qH1Ox<}ot=+?CHGYSao=wf*y8T? zJ0Gm73tJfe!yWIxxL)eEvxp=pKJ%P`aCj3j;H3Ykk4Ov@tRgm%_h%AbVfx{3Tum5b zjyup$jX!<)SN&QEA-t2jxkBS6Y!P+Wf;_d=M!VVyqVA3;N7kE2(>sYRJB4k#3Wz12 z{qv#aLf35=z)+aRzTc7o$`zw3^w&6B5Q8N%3x=Qfs1iOe9ADIJN;)$THzSYr4Glvf zn7~3_a~cqTK+r4RCY$&6=n=B#z(Ehz;4g9A&Tu;gM#qz1Z==VjrhZN-3XK#B-CF?i z)%0+)AK?CJ))=_w!4X|pTIY3II=90YM(u{rL@n0;C1_Ll#o2G|Cd4<9GcrP)bjAOG z8Z`Bl_r&&YNt|r?1BwEdC1|zvyXf9M;I3~SGqg0IbMUFoR&O~n&NUMMwP!ldIq*PM zGf6i{3a_x>*05GNt=^A?H@NFv1OQ7rDd|R_@hjm!{aS0(Y_*8%+TNxgKa5c}ff(n( zvgCmZOUW`yKa$`vG%pcx`J1~iQTCC^c0H&q&s`RofTgnF`gH$5GoyT2FU^rA8ehZGyOZ9F1{>q~i&!hI3aiH<=1w$WFE6hnoOjOwQhN4#4rO%7jbqy9 zGSS4k5}8uO9R1Qq!+=}%@YDNMrewc=6STisKnqOV22kGjiWLDW_u7_c2}wD7Y(JE1 zk`w#6XdPdY`^!8I%OS8Jyv%s-y0KRNo$B0b-qq-(r6cp$#ynMVCPPA3ACqys%u4+S zDk+GpI=m5y7wLZS0INQ4m@QPeaoR|-zl+X(vCc5(w95WNa79UweWIh4Wta#p?r%#frzv@^lapw_^rxdoz#il@K?Q81dgAz9`SaZCRBnb;Iedlw2@ zX)~(CBegZ9n6Xdew`$9$}cwaz}KPf!ZH2DFUw~BvpS~h>UOS5 znk((N#!+Y*pPAtA17lHTD@Xvl&3r79F7;@0^;?efE8a<;?=GMb68n8U<1A?GSPTOquMmUY%V6uMO{82^Yft7mF@h5$CSl7d7G+sv-JTD-SKA%G zzf_p2>7u>W(Lt!3UkGA#C^)7eLV4Twf6;W6QBiemn`Y<%5s;FQRJywx>F#bsx}{<0 zknZkIm2RZFL%O^BJ3QZdf6ZF_7}o4__FY%p=Bke{dwpibWx=$trs4|00bS5$NSx@! zj{0#KI~ig`m-m3K%AC!=4d5+(`0!yJMgI;4ke9x(r6oRkvnuXQ%*{1PK7HhN`md<@ zJ?0(tcIPUzn=a0}F{pTWkKGu(go^uu;mGBPzec3SBiMm#^ ztVKLP?UT`6OA>9A2~Q`d&E3#kkwzxXhIF^Zy!M4Fi+jAu)X8 zw@+K_h{!oy%3j3yLC_6iNY~$SpZpBxSe81$R`H=p+cvO!J>7d<0ANs_rHpw|9vcIU z<*m5KHwNVeprm2lbOSg*0Eu#K;Lc55eJ+lkWEX&Q#d6~Z9|;_|lsgJ7ng6&G0I@{8 znPX#3F$?Td&H4tE4VpdELXekz7E!G+GaXugen{3_)4fI+t34Z z3z_hJu_*MOx8d01auFq?k2Ch=h`wP&)@aM@Xi@fnNhJELk?hXNS~IA-( z6iR0&8Rgf=d8s}hE<>d~A(d_PR_+A*qIzVl2;fkY5GKJ2&9Re<9ZJiChGd`YWxbS| zJc~MOXHcAnkle@lh+r;m3jlz*2&bL!r~T5{1Z4@*^3c0FuN>3S*BfJulp&1rEq=9i z+J^h3_z@`^c|PB|xrb{ypV-xqo9Yl}K*1k`a1SO*HCe@YMy!iG*0gUN__fO`-v1X< zbYLy;5VUBF9?=bm?er*sJnJ;|lN=5_VOh5I6pZD{;BTChB_z@DlF3^!;soVNbd zQ|l&YWiX@exY*^ifzJ{DecqHHiAW3x~D+U?^zCCWsYuimKvPlWOrDS7D=Th;>AU4D-Qgb?#FbVj;Iyo_INBqw3&sq+UbRWEW#tR# zNfj@{@gDcJs-6AM9z2s^inpg24@s<_M^s&)OFY5_qH^h9-hotHFf&p<*wl`Zmd==r zq>4g48Vf_oL-p&zg23$;GkKVTG>TP!-fI^7?xp8JHj+DT7s9_tYyBMBX5zjnJVu)+ zoZqV+>4ch)rBSGANl7Q(*w-Qo&~zlq(4vq0&M@;WD&w+fzHSI^TUFZE@YCRd{Z9cX zQ&)!Elj~!eZnNhr_sqG(ySrM%E|Y?y`)JA0!tZxuwpXol zQ}z9Od6nY*D8uQej`ly#zk|vBZB6xzBK7=H7G!`g3;Gv>w?*l=3>4|>Uz?10= zllOw)e>o%3M1N6$sprOiKRuK0SR`-E@9mCScj`wiI5tYo{uslZlJ9L=i24|#@bhXd z^33zjDAEWP7IHvusXcuy;{Jkt=olt>WPp-p#3Uo0SX?f^bTPk@9lVF6lgZUgqp-2U*=!o0v2eMDaUHwNLF-Ta_e8L;7)@H0SmHM zVIAdbpBOOkUxF>D9yYoIp)#RRyEz+I%@;+`Rotijxg_%Wa&|Ca|#Cs$_69 z9L+ZvW^l)8^y-O)kZ{sQ+n+Uck}2sTvF=_f`%r$Q(Dhp{ZD&B%S>1MCwi?6t4(0xIB??YFSBJOB6 zHJfP=%n{n`N-?yh%MoopigNK33SJ*SWOUy!A%lqXd8yXt2mCQ%p5Y6V0U4B3G!ir~ zH0xx&J#Qw=Gv%~ux-`vI*@@YV!-U`!Ynw*7BQZszgc{E>^;d+ps z0O8;!4eMequyezQ%>;Se4=M`&8%`?7K#P2jF+$_>=th?F_gWlHAj^M{IOl)BI~~)y zg;3662!UL1MlbK;f|w_nwB=91hfpFBO-03)sau!s7x!NMw+TvjZEJ~Et7~g^PnGdq ztRw77#qqg1^qYU^T|!Pbf*`n_$n$*joli-RhVJ&PWz^k*D#3OrC1yCWJY^w{Lpu}Z zYB4v^2(3~D?2)mP?dQuQA}3GFIjaaKz276F!rp%HG&yp=CSq~CaO;Ij1oo=q!wvy> z2)}221GC?rJkl(c^0!VU<}(lSdeYB$QRkLcqa}so$+Km~iK;fFefHS)hhF}Osx`24 zM2Wo(NLtw7Fs)HFNoBnd^$Nf!Rlv<=6|3DM1;374N>eXN+VI5Ee5#| zhQTz80@;(-$9DI9S7LGe^#T4It8?sH?(}n(aSfLG&D_z<&hMWg{5=>@mLvh|#5V6P z7$9bLcvi2wSkJQARGdA5Q0SpTK-Fm6;IAkxR0C;S$LVSJF1rj)`SPI3AjTOIE`cWo z*CtQcKQ9rH<~gFCU>4Jj)e+y|+{GifivkHK>>XtsulkL&UN8P4u9dq<4`)Tl(Jr^O zKhR0#9q0W-V&b}wdO;zE+?a5%Nj?SREei)0I>L!Ttmx0q|I6a|`AARwVDX~znjd3PotT&(t9SBf-9s23y znVd%UFN@d1;I zO1TnMkTYA|t~*+dG?l}=PS%}{_G(MAQb!-h$G(S;uW0Vq$OnI9sEO9U@uK}6-G;Nw zsk_f;zNu&@)f1yzl6*a>Q8MRzg%(2=( zpC5JDzWOjLjnw&}p;xzqKc~_t6j!b0hQuggN7CrG&Ns|5D88xTYw8Ut!ZDN&7oie0 zFORRG=X>qY*`xX)AoObVMx?$oQ1K29!qv;W;@=d>f5}Zg zY^>PMx%HuEl7cSk2f{Tts+mACK!^TI&X{h9+XmBB$@^dSI0d&L-YiB`rV!R|{K$$c z1Z~kLP_|c~R{YS`9K?HTRNgzCgYR|U zEc60%_IQ(n8JtUMT2bYB7N~SdzLQASeX4OqhD0PkhTIbaDPt%vLZHVP!I51 zhboS~hLDJ}=Hg@rMT_4?%%h`6W@($`C7Z6HFSXU%D+JW)74?KiL*0n9l1n#H2haW) zxhSkO)6>`QzibH(YgboKU`^y;C)^a28`C;_^$j7|8ssFPpQ#9aq4;BxTy` za!Wt{r)33|uyIgu8K3#$S)YWR=URSB0IOn;%moK6`xV+2@i{EqVoB)ehRT=x{zmM0 zqIW0%g3+gyI_%6lE1)*_6UDuk9}$)O_T(2iOI{+$IOCfM7bHw4#jhBFfp-;2goA3R zaPzr>jDqLxS}N1rT6#Rf0lP!ILE!)V6KstVxZ{;Nzkxs zW!}LKZ>5H=52O8GudtG)*G08HCqR5XJu?V_(S1%&Hpk9s@Y^a_3 zTXTdk_^-&I2nL^%?X~&pTJO_NH{O~X-e#Hbe} z_AGl#R$Ca^Nu>5H;x;RVms5}x^sv@=HeY90+X`p0>KfZXR#4)YLgpCOUuKp5Dg2WG zxX`nZmgMGl?c8L#QcA-O?VX&P2Bxi1bii5XPuRFCf5GATR@czb>k9aU0UF^4_MtSP z2bL^R8Ty*$$n(e$tjXKc*V)3oDiMYZVg6^g&D*#W!a0f2*+p}jJ#jc)BJwCPtW^eZF?CgLyAD~kPq|70O86Gc318~(J13AjU z`TaLQc5b``9iE-52YMQ4fN$l}DEG&qizZP$&vMPbM&7YCe5m_^V#|ee{+T8olRN`8 zaC_na+jE#SYRKYq2xw|i?277#b5GzrnS_QNqd=dEg_tcP9Ycg|3sN5<4TX}lnYlSn zTh=_@{au@UgO;r*`k~d%AC2u_4=mvok@)+bpv@YIKXGk+UUZL)54a)4hhxUrw?cj{ z4w6+|{;OJdq!7&bpC_Vn$NEwogV?F=tmxG5{P9XjJ^nxjv12?w2Jla`u&F`}>B`NT+|gx+U-CPOHPJxYZc-5PJo5YdpZGGZ;>Ky~n>>)j(CFO$=^ zOe!{7Ii&;n=UWK;>}390p9bI&VbV;Z97w!QPT$L~RbU_)FlPEN$(eU--909}s^JyX zhu64S>t}5KKi0y*xk4KV$73xuH4`M!0axZY1nNv}FQ&tGyb)d-6Mj}dW-crvihlUc zFF@gLbvcKeZcF2B&&rA{Dl37>K<^y({8Q7qA&nu9`L#OvYYS1dGcOA%ev|EkG+?uE zxG1R^&b^)~VXk#hRH@B;F5D~;by z_}XJxMDC*{3aR+gNXx-G(9g9jpt?H8lIqE6f~a2xV#izi7N>dzY<{Qj;GlyovzC4U`Cq!8cQcJ+`G3=2vbv(rLfX~awCx6J`yXioX z&KA127p*u2gt$?svKB5|@&z0lkge4R%JoQ3kL&KH$+6jI6_mhIp=rcI zMD(`>v^;#_#5ow96Lt)=PA_VpF&I!X%E%JC>;51CYj7p^O3NdWOo#akC-Vaq0cj|R zqQgnf*jzu=rl@iSyZZDI&>HZ~P5q(--Uq5cLo(kPtAwE>hRdrdd1Y$sUw|T9Lr2F( z`-mG!_UdTg#E*dDIDN;=4ucd9GdBosUk6jqrS$VXtl)JOtSni0fP7Jnj{?Gw-x7q`^Q*aeXfs-1ba4?M1qG$$K()%q%BuA6hUk4?a8RbEkPR>2Rl4#* z_Fm^l=d?VlKMMXIwS@4z4<5U`8EqjC*X;N~$snh1$o4%i*?L3MM+JBz^mXkDJxzx_ zRj-jsA)da(j%e8y_cSij$!suqJGj5KoT#-iwIs-KKc;qZeeyLH6{}(yDyWI(>D~a|L2spa}uMmR2U3TD!~D)~lIkod>z-&MM!2 z{`OXj()Xtv$v)Ft`~Xs^mUX3G>)`fK(!$cx=GFexGGOOXl4+73k2VyX$h12aEee%J zImd=iUD_<-Iyl}-^UJh^;ojOtKUv7;_M#B~*{r~p09w>O@RTY{kCfuxhMn}sWzj;G zD^dlNq{I`x@#l{e4%vOjC9VTUWSF^WkhSsO#pntT<6w{UPmQ@86I!G_4es6UIY#-Z zsuh@r)**1Gp+iXf%@@Ohnn=zMs19|mpV>%7$sgw2Z0%5Lnp3vX$agfq7qagk$TcQT3LP2Y)yTbk0ONc3Alco=@K-R5l!KZ zwf<*HBy60ul`8MELRGSsR3BAw&Gr?FiY&(It>5vFkE6C~r_Q6^STN7pzSth>u^;~J zxa-#YS&u2k+20V)P0#Z|aV#t76kT9fDdZlkaiNd4+^eeZOxFFHEc@)(pzQ*U`iTr9r9z z`W=)q9eLAi_Nst7NCDaV9DAb=#Yszuc}f}L5U7M-@pI2`1&#+*I9|;nLB~`$jGSgC zA6+pSz3Zc%cUf5YK(Xr*$2K;^QH-pg@~vU;8~4Q!PgFpkNZ6M=Xgb%FzCOmf^y>Wt zlMGbXoG=fiCTRwE;(330K2h!el;{7eEYBxK`Ez};K;{b_9oqHu%!rBBSQdd>x!j&7 zId!4|7)B#iD3jR`e1Vwlp-oQs-X=eU5+nk%M%6`boubhgz-T-BHR<3t7IWTylDHJC zh&rS%9d0&RMY?F=J1-czv3mT#?V!mfw$pfdOR1W^Qh}+m_OokA7;P~7xhal3tsYhe zKRe3-l85m>M@gFW*NgtVp8|rqE$F{fm;*3_1}apG)wJ}RP_Jw-pG(uYPYS=HXf7a- zGNM8^T*`h966b&f&SpDXhiO`i5mJfTM}N7`7!k*=5yFa|CU?6W^wMT?Y0ocD#xA0K~t1RQnEE>=I&Q3C}ksIsJwoXU;pC*$b+u&JD#7t z>Gj)&6F#daXJl+QE`2iw%DDVAXLT6P*7QLEI}_Aug}r^vUzX@)EhE{^>}0%9JU_2U zPg=wdYDh}@Py(RVFu*W2{iJr9)LbC~EF!=0Fja3D>Dbs|$RLnuHyou=5E$7iedGFqa=AnYv51ik#P!I68IWZ?B_&kkDUk5Ea`s$DFzO)2!7}b>0kS zmBx+|@2k(Cun)EbQ)8l)hUBSL7$rQsZXU5JNfhtDSLvf{d|y+Jh@zDI z4<$engZTL{{-I)1s|q^3-d`ruRjE?>RxN$ zcX6}cf+{b}m76@d6206X#>g-F6=Il?TNiuvx^!W9 zHUIf1$JW&vQ?nb^)XvGV6{U7(okO_qO0dc3KyHdqkY+o0ES;fkv~>hH21Xdh8I!QB zaFlsig0W-{@#3Idb6hZoTmWwb30miCod3zUOW#aKRjnD`Ct=hnlAnL4Td?65|2kB+ z1e&XppvSmi5~*5+u=l=UK*&DoQu>zXCER_3L%oyw6LK@x6}oinA1V z6K)H$!=|+LEe13m!WPS(7lQYu`@l^JJcWKIFeL|Vet*)aCPFhL`l+z(G{BIRi^8u_(H-LWhQP$$b6FN@0sVk@25RhRqSXj0!x3LON- z$(~!P+bOmC32UREw|hW;Gm><=n`k)oQK$rO4Fl8-HB~>bU5h9FJUtg!_xVUJATi`x zgmzH`Pt%C+gc}c9%TPXSnKCkL>GmWNl`Hlu_i(_3Jl|Qse+YABEFElVjV5ZcoNSx zEF+mLg za!C+<5S9~(uZQ7>^EhGc*vs+DP+AMBfHR(azvz0~aBDSsMQnEWJE9l1dP`+JF0q;8 zbfsB)_)pZari3stCQ(ox*&e*MN%OQ@)M-RJ zD6CiVYNt>(mh3qq*O7VBk924(D@ehjrH3yDA9sCTh z2)cuYN4&wAzx~|Kzx2svNg7s-4JLpQbt}Ce1on!%cx!7z8|!+4XjJ;dG%>Y2*t)M@ zFv@BaU@7q62;)S$JBEjih<~@G%?vx&;z?j4(vJBdX|JvI$LL#GH_a^vGem7#GsA%1 zy+0!X3IfcW1~X63G~n3k+S*1hE|{y_N35xF!s(VGCy^^ZdSD7lqhvH|M-UJNl6cs| zpU3GO@85#{T+M$CT4n_f@EZF&w%DDYJYnyAqZq%wh4ygnBuuj-`ruy0mqar0P0}MS zCr0kr(lsTOUq3ltZy6_NCkvHy45n_l3uULWOI@qQf)&(5F076~IV9h+MG*dixp+T! zCobr%xng657It`4p)<0T|0SYl2AY7-wu4v3p8LEtiONE-b-u5b77S)CFehvr$?vb! z7Y`)%=bP>#3|{r`Ey?$kaGyS`ne!<3oD|5Q!=xX{sesXX_{3HN8)rZ+QZ8Gke{egxMfY8B`;^Js%si4Uv!=?PeSA}N70yjl7uv$k( zBw7Qv_=w}}3AxnKhs z6@jeWPCG=OCJyZkz~(y(EQ2JxI~hFHCeWWEBP6M)3+@io+Xpu`$SM!nB{u*U^8n6S zTO(j3Dym_g;fBu@Mz+D3ISo|^zz|Ws4!?&72a${9vnoT8{%mrR`#$&0qU;7!`dW&T zK;_m>74J`dHV5ObtRhb<&IbLG>%d~O#S6Y`i&nz#jxllk9krjvDbT%Y4T7)Y*B0}+>+maoy98k*0ze+4LAuEqNDS1Zl1D{6n+C4KfUYoNf%XAmb zmRX2oZhQ}JRX2zOD8g76VW3s+QySZT(^>F13AH%k=tf}1E8dWaCdhdf_7&T{C&zU1 zL2oGJ`VQwOnHxv(C|ud+GYOVMi-3wVV$-Zse1E^{-L|Lj^>(Vd;0J>#vK5Za~| z;(d7hH8q?deUTwc=DQDc#N_{glC&hxqn`-+h^;Y|$McE(k3EEbL@>wq*Qk`C09o)! z*ymi@!I5{zwnQFx3v%5x?U7GNXNjYAS+|0yjeE#&ZCt%-dU?(tH0#vPQ9q9PcF)34 z?vXV%evc0#3iu!f84wXo5GhNi`^s|u6gD)nt1sN|aeIzGcSi?(8P8HZ!tHww(>NjwUC zdmrkg>c0_!Vf=3{CAHZfp7wzZ2QED8KZK`()EHej^RZ+*Pbl zTUcHF)>7F3{JnB(+*wz4ijX=?;}pVH0xkPippo*Lv-wWPcZe9pe4~0e`mp81mTV#)O^G4kcj{@IuVjjR#4v)T(Y?Y*yMz5m@!jejd*qkc1Epm;I6nx; zBwj}o-4~iWx`2+*G?$it#vmkJSy3JrGW2;G{;&w$aBqB*gO)+*gtDmL=r_mR$OO-R*a@L|- zdPec_kl}=Ejm2y3EYG2)hY`&n0vs@h--N$)%*#?9xL0*T_T2RXNIWtnu*L9#ClUy` zqXITL=`OE$F-UuF)AcgN{pHQBQX`V7)Mvu zdI!fZ1O})H%5^fonkQoYQhjFOP|O$T#7GeIE!e=@NehupSo#(C`PEW+C)R%8Kr*s; zOhfjrp9tI-0J^9-*aw-2;Ab?ADvJu54)e7@g**7}lLgC6E2DbDkI)n{BBP}eG}czE zH2w1-^QT}{!=T$%+U7lKxA0Lz9G7mof6y1^BOT)+M_@pVIo+NZQ0_z(vsllSoK?uk zfT`A8Eh06cEij22rx2Z;n#iu`nDjBFa|pX+VAfd*^tZ(kF&Hl$m8c0V0vP!HlP(yg zJXOSQh!I)4rAZnB>Uj1ah`?l4-fQEX3hjGfP^zh+@y{{qkr1fVb^*sgB5R^IL*Yva zpDPJ~L4Hx2)EW`(ePsOR6RNxW$l-Iz@icn+gr3Bp|HfwD#dtl#aGy$BopyWu_s`e! zN2LDCw*EH)8S9a>)ho(3n%nulyiaOV`}1+Y%Y9$)+bcZ% zUb%n5r1=GI+PM=4hR+rK{Hh&|?MIbL1^p;kKUpMM^{kBgb2#ebPr+Db`11HJ46iPd z9@ei83pyjV?LR4`M{L(dzgO1=nqwpxO$aJ*_7*A!mD-f}P3KiS*-n2RM@%+>h}Wd- zW9G1#W|l<(EK2;W^1+Spb0lHb)MVNymoqVQp%AIi8Gd#K%NnaAg^0Ha`?x({Ng`7w zzMje~DuDVJAOG!M+rjP_=9jkBP9kY3r?A-D0!5O7e-g~tbY4qND zd#&xF*}1{4J)gMVcVhhT-F0tyW4DJ=WoGNpv+(ha0KQzmPum{fCWm)#1npG-Q+GIR zy}bYuRciE)E5Kqc(r)W>#7U6}_eMAk#J_#?mb1s;&1W!NF2QBr)aP=qq0StyYI;yNmE#nmcsrGKeR&^B||Vj-h}xBa|)`!t)l>9hkDz^ z`!?X1&yIoROTGZy@uoA#blX5uQ;T!nB>#91I`x=qz6;X7gNHh4z14fUv3pHyzYA~w zm+5(8`Is(n|5ISS-{-0S%~lz)r{ z@+)IQISYI)3hqQW)`U*qc!5kT{dP&>P%#+_>bnw6rQ-qwxs@^8%QMrT|W3%{9^SalmI< z6=1ze)c-W7-Zojw*Fms$*Oz#F`fC%N|8ZgL0_d}{8+P7Y$zvsm*MX}iVw?uRG-FT0 zV`p#d>VksZkfQxhz)-qa<}f+{Y8H1t6Yl%T(YP^(1kCithpRC7735cAj0|nT_^KP? zruC{gk}xu_H7YgarA8VP=4bM9W_JO4x@wkPaNm1S$kpyUO3wm!^kHGfrN<;BfGO!G{rPtZ1clfrqw zt&SYHiRl~IIt?tE5fZxGuC&Rc%N)fbuFc~}tC|Bw54j*@#4z~QkwKaDf-oM6!lN5 z#9%Zy{7zg>=jZ1EfF1`IS6feCM`!CzYt-)zv<^Jq+W&ZHggn$ldq=poKdzw3aMDot zyzKgnM!)Puvp3$4tUY}_0SIF< z_i#w6UDCjH5H-!lof;gH1#J7P`Mc<173;62N?XLBP?E-tF{`={o-`|B73tj*?BKp5 zaVbTU4wsXlgd#AfhH+z+HpHi>oX?JqY4_lvCZ%|<_*FMx0WIUE{yQiroNBmc_5*Qm zY7Gm}h55?S7~BtIfDk5n0{QVeV^O{=+ui}EBi=`W?&q2+?qQY=t zCQ2sTAZZD=NVDF|b#n}&upK;~5|p*zR9Ugy*`h1oJCsP3r@zaBC|lzNmxDi6wiBIJmf}Nn(pf{R*dnW5zvSad(j3L)(b`rHmQ`f|K>0Iw3e`WHKB7%e4SZgkrc{ISq1TVOA zaZzjN(AwHscdAxKMuxAcO0CT6AMD3}Q&}&pZ#L!7cR3#)NZam5>@MEo=rW+@TK0K; z22AY*3;>+SG(8KD4{ibCmaR_2kAznW{r*6WA;giNdzQ!yM5SB*Roz(sWUNfJH~v540I#W+|=X{_cOCtqeErW`erE zW<#7PJ%DZt_B`Rpe-5V|=~N*mjEtu_JdK)$qIE^snCPLZ$v~Xp7RTC8>vCj1;z8Qa zOGE-|s9d4HU&`6g6CLVax{gmJ8#*suuiD10-gW2WgR))PNvQIocyn7OxcMwi({|U( zaFKc2Qnblm<$Y{@59kvH?4J7USoI#?Y@Mz3AGJvS8qM%zvF3461Mcy+XEN9YSrRcD@_p3WK$o#MpwU7NxW@K(qz;0{w~+ zYRK<}|7piNeqir@Zrsa>havqmL!ZdF*_FDS0`4&GQ^bVKFA1}2Hlo5dRQpp~m~grg zEoa{_&gEp91WLC0O?2hH%d__Ptu}UiJZ`!KG!5|%zn2x@U!TTa->~nE zw}K3+{pGNI^ic1X`@fV97=Nb#aUuHcDLgG?J%4}#I_kOB6XzD-3(0@}{eH5{^`DCN zul$d=Xs<1DK`gN&Y(rkpn=iQZI(WNT1|)B2`^GPF-%T=KtNSk=3vh~lB)Sa$*n9YM zSRxnQgR|wtYZrn|zv78`|4n5-(&`zLxQ0;nPM{~Ugh1vdXXfUo%C}9e8on2ihA;=7 zFcAyp55gAt1yR?imd1%~dLH*w-Hv9gB?YFP6vp zPKCaiSt7M4pG1`j(D&Arek~3}oIIP$=Y1g<%P?MxvWjiH@#srVZXIStjDmNfWKL{M zR=@0)FI>~9QiNx`iiU!DnwEdYK0G{p6AaS>jEiNLC0nl($1H0QA5fqDP26k;aEt${ z-?yC>mwp&tzK2!qnQy%c8it_n+qthjRIF{hIkqn>E^Yvw(3|k-$O7)!tLrUoWgdb^ zzM<}rgZu*ZsWI0?LvKH}ZF$y6Q--(!OCNNnC z_viI(ekuOa6m*%c8ia&N$Py(E6;ko4j55TyDNGu@>}B7vg2@U{M@7{eE`UV7nr zaXi%@=s8mouc=ih^K@NM4Pj7Pi#>9o9rneGaizx!Y=cf@-hX##O5Ki3ib5+^*}uwI zA-PAOf9qd?-{Op=gaMts#SQDhCS|@=p4D^o)$=G4tI`XWp0et@8hEvC^p;?>#1C_O-;yAsJxSUMGc z#Iql&Z>)cuS7DjmEX*|er8<$Q4cC^XOYy10DYyNY6-_4Xu4IR!7b92KpIW#>u&0U% zt%Gbz910#LoUA$6K%@~6qBiZplZswc8+W9q8of#`0@V6Hvu%u|LH5`bG&|^U6Ka%u zsOq}V%%XeUSJSGR>zAYNWIu;2<-%8n%M1YLNPusxgH87F^+fRTzy{jc42gVId_i+- zwys+~ALuF?39mMgyXLD3JfnqkOVC}^SZAP0f30bK3eCXE`PtbdOWW9G?L>>rB#O`; zvcZ`hMkRdXI7FS-J~C3vtp~gQmiFY~n1wzA{U+SGPML2`BEpZO!w?3L0O!I5GWECofIrN&1tn}5}0NqU6DTeIJK`{hbi7HGN7KO&a zMpCs*2A?RystC)UnlWP3vGSehXVzkI1a;Y(5;q0>08AY>W$_;GqS&I2S^*dReljft;S5<^TXidpz7REBfhh7kl zDOAad97Tv^v~-5x7{4OB`HTD)>V8*32c}~EdU0&@{o{swbaF%A>eN6-{G;(Jdzx)@ z7{Zc;AxYGi7Z^yOlvg<^lCbN%vCq&6AyQI4~jQO9!#96eX6iu8l$G|!Xq#naNBc|)I%Jilw{6?u$ zT&0s6rD>wX(e%65*p7T2)MSy0L1n=knl&PKt>&5wfCH4hZjvnJ)ZOowF;*%WeF4@Y z-;wxz{RRgFN5dbAy~_LUF}8vo8YkO`Ag$+u@8CUEczXR*T>kPTsn_r4Iw zK>_C!y1`Wioi7d*BpFB?j&OCD;`s@t(2o&x7O)lCSQV0mJn`GTr!&VpfL+&R`2)I6 zRymJc8Q9MfVPF_}u{rh2M3Ax#7tJ#YPt^QoD?M45VSib-#URF9s6Jk>1y=y~af~gF zYd&P$#b10gjtU!rz)X<3+)pJjz{}{52ZfND=|cBEsSmK2x*w&jXv}1^O~is#$-5k_ zDK7DQY#m!{%<;!A@n7E*FarJd5vH29`PC`Zav&prZs;TR*P`&Ly#y|k`V}m=sc*zJ zb+g|6*cS8cAiq?B$blZoNhGOwo=AlNu~*h=1_w7_P)Fm76()z-!pdlZ9yL5uBn%0$ zGb=GkB&AqjZusl3g=a+^@J}lm%B-&6IGW|>n%n=l0GGl&CJ6-4-P2I~b8z4x3Xc&V z(Yv+7r3qMcyUx%MeaU~1Akyzdh*PynnE1LZaiTs=;T zmEAxYM#ejqr&Yl&xqfrQKF0rO6F`z$xF8JLb!6fW90hRm#^ZQ@?#}gyD zeTYR)Npf9cgBqC!2EnCkJ4}IsBN_fJf(M;{VyHR7750_pv?TqXV0gHGjvOHLmIvVH zL%u!4~ga;VS7BV#DYb z_|-I3l9k+DCJU_)pH6IKTMDVeL^nza4~6UKr}(fJ7CThw%Q7Hqm|jcs+X6j~wpA#k z)n8DCYN@f2ZBIGg^_ayKHvns6GS`8PG&`ls*wdN9Rq!~`|nuEH#WRhSxtoKSu0yfu4gDUl-!9Ltf#IHAsm zsFOa6_Rv^kw;LZ_EV|(scTD?m`2T8b?0Zv?1LGIe?!+sH1}#%h&*pPaKuYF6Ux7Yx zV0?Y$3B~t+4}UW8 zwX5If%LwfuOv&T}V8Jnq)?6bXGd6pw8QgKN^KfX_$jwDH~;RB8RGjhw|t z??$kFi^UCu_X>U-e;=D!r!VA@pSCDsHCSrYW2|*c!Q#2_)qtdSarQAu-&vhS!H(=n zd_-LbWBf{nMZ+9W+)y9bjaAAG3aQ1J-xGTA5Hd0fh4Jj#6jVta@NSwX7wy4L7)oUF zsIU1@$o{<{ylovOWA^J-yAvH@Q4w7zms*3gDyp$+6WnmC@qt3io}Z)@RyPeaC0XjR&0 zNmJj0rfb8{M|(>0Op4vo1+|jWA(mo!gA9LjObZ#^^}yr$MgE;cCb76yURE^0J05&w zDvznj#XLxYcUHz%viDJEi444UONH41{l;2_Y~uJuVCgQGAQP{p8;bux;W3ZPvS-Tw z6eS{xH*)|l(LxIEX^{L0{sq`{Et5pTGZLjJrP9OpUL<3qxJI=0A@mCZ59C3gG0M1< zjoDzyCEyNeA4^NQoQp}=LMNm5P>eA;1`d`$WNSX*{WKUeqLwoodw1bj2B!IAQx2NW zaW)e*N^fEWm@OPt1O+F4C>V9`=9u+edp%~m+?~4dtp>wFQfI zGyW+|l8+BJ#Bc?D-^w6jKrk_<3Zr!|#m@rRN+?zU%%=ZUwl zh?9|h>e%(wz(3j<&5r2FrIpMf{?-?f%7=L_K{V$8cXpo+(*{`i{I@{w=EfyYbPK>p z_V+D+Y)Smc@AZ8E<`Y|Q%MgL3N!$HDj{1RpnN*8?kK-ZyJ$$#z-2m}Zd-3mAd3ndf zilz%hpEJZa0w~`u>wgcX(eDH6pW+5HN5`N2#GVlV+6oxccdM+6JB^zxFq%;Pic;WD zWRov$l#qdzNn>OgYE%{Dh|TLmtHIGlnTAGSw~~a{XST2l!$q?I5zVx{N;iZVFPQ+| zhogh(8y%z7-w?)0_(*d=GzVncdZiK-HQ$BI=HHuMK}yOr?(ifxDzT5L_v57k<hG&fu>zX7 zt;>wRmPPnI|Bn*`A}=6f(6#DR2@xicSXJ1&W{I_Kp)#))9PTTGfO}+iV{#)PeA$bM zWV31rc80M7+}MsiN;iZ&%rIo+*+($S%AEEHONh;0zKRREQ}0W^20^3_qQk?FOh{RU zCpDTt>tq@0i+w^$k4^mu(5k}SMhvbvbU(E#%80t+yRd@H^ynO9oHj!t0{B^;XyJmH zV>JKXe60ckVB2mQs?JmAS`d7Jd6SO`(BOo8{P9byELv{z+hHmT^`=iE`l61N;ox3f8%kEQh_d8 zVr;iOCKDcxqO{1~|Iu_6FmZHU8+Rz~P~6?!p}0E~Def#3FYfMc#ogVDI~2F#)oX=iYP9V{~y>c1Ri_Wa-mi^wD%V?bnl_DNB3yWrBo30SdSU`>nqN zOPXM`K5`!xHjJB4aFKDzkG6D9)mwG76(K}sMBUgE=8o4Cz&28h9U*CkC?gn0YstKG z4&$-+s+sERAy{CTI!h0kk^8w>?+A{N*2P+l@*K&Tpa<#FM!E(4_98XKZw1az?ub{% z>$JAfuaDb_pY`9kC6DEQ|A89d#(2)1&9*MZy|ue)$Ck1UfX9XG)f?sw%n8&~;7bht zjM{+ao?uFlnklU?+`mZRF0goi`yPW*e2Mor>~n`wc@<3Qq|u#X^mKGhjV276Oao^A zVd=wD!7)2i?GiA+d-8ICzL`(&O*uTYApLgP4+jZ+gZBb}ff4YG6%Yjtof^MCjhuqL z*vz&i3VzJbtIu8mc7A&L`d@$>`T-a*{_@!N1;?&--_|&tbA)RSj?XF?Xdc9sQ#;>Z=O=zgqcYIRBVjfJQ_3iJ7)yvjr|5B2x;*5QN_r* z)2WTX>bK{?B!VE8u%Dm0ip?AK+&wBoW+~1jWT^-aq2RO% z*3v`{b7F0qyX4>7xSPqUNQ!wb14%H7iU%|bodRs|U#c1=tM)VTG=18|5x=;?G!Tgk z$zw84egSOQFc?aY9V8I;hMVxe;GGGKy}HkeI#^xd;zmrN;j(B$ZkC8T64DWTER9wG z;^E*Na8}(IKc7B7k^eQ(Nx`+yqh5r}OFjM`JZij33so@}A_1cJrpMdolmuiohE*`= zdGFi-U~97vLYEL=>!}2(r>*z5d#q7*@Neq;Ur#HvtH2lj1xU8tQ_TlR503}5qKE+w z_vaD+ZFa{K2m(Csb11i3ZXtZ^X9 zutsGYymEz-9v&&V8kEIN#b5>U&M9j%+%nG|~Qkl7`f8PqHA_~#9 z7?Q-{AwrI2#BJ5J#y;#G@DjA#JVE-CHRrY)1aclC|D^UwSA)P#Rwtt?Qo5motyY~A z%9dfjxz=^mZ49>CXf=-P$g7tczCpjnf_-nmBQ}ajjsdm< z0EvSejsHliq#<&h7!Pp?MymPZe6WA~mpl9U!dqunH~;NlnUBj0@HmS%FLM#1N`-*O zbl^zkj{^C=P@bru8k`?ga(u2wC@Ml%FrgBGK=|t#v5=?pMs~JAcatQ<2J%b5j79P+ zZ`GHn@7XaDFVf3God3f3udQr8wmSS4!QVhkb&#}%yk??|GE>ak_a+5AwYp)`J}Yh7 zy0u6A)9rT(rTDH`j=9`5mP;#AwN8Qdu*pb}^3r+8=$C#ufoOa4J*lruOtyQi5B~@Ka zV+^@EQH@iTi?nsMn9deIO)KuC=sfn0AWV{GCn(FW_eicvhIx|iZ2Md$$mh<$)9x6- zAS(aZNK3O=wSOv=-r^MB%v37kFOu>rQY3o>g54GqHmdtqf+dO&CD?{js}iw|11|^f zHXI8KM@hamNukmU6B+sP6RFlu!hEtv|7e%a9C(WgFo6$~gfzO3x5F-E?!jyl^G6PO zosn%MfodF;1gaPC6a_mv{+Ia;YmTeQt6+i!s<)QP)ZvqJ9W7i=Xbd8B34U=tV;tn-K_kC5Y`M!ng=hiiQzbDqMWhGY@VPpWRq_pYnUvV_Vztn zB5%*{rmn5G7W?SZp_~ADQgcqR@5V95LE1H0=A)1eFG>^(?-X{>|2 zSWW^zY|)k*;N4FW5yaLjW6My1B(h2myyEaPe{-RGkgf%W+3RuQ&nHGdVWu{n92wGo zH0}9jPxt6LOn$EZuI{4kmMcrmlr`8*_2%S=a1HOH1Bo(7L?&S(|gaTKc{`StUO2|+IlbC~@L zO0B_wpbO`u(G0b4dsdsrR1Is#gYJZd{|0S<@H=)s3|)1AAU{(l7D z`7BuAkcHG{WdG7XHKVHj{h}c~ZeHedpI2rhU!z^5t)O-;K2=Z#vH%4(iO19+d*%>=!3OVeV&gJr5 z2NOkCyQIl{5hJLKs`lkuEb8UsaWn#7AZ3`Hm_{26^lp z**UP5SVtdCL!yq&foRKMG55f9_MM`gjK}fZg&$9uvRS28W9Y@naIeoSG4s`68@GM? ziAZz)#MpoiONRkFM?k(cjAr<5{9C>!WX6!^FBTJXvEE#D>k=`$CtDXN8I&(sQ3V*1 zlHz$(5wbVr1|Thd`$H0UO{t}qHbDz<100mss83lmXQ-c&^+hTU8*Jl^RMSkyq#G=m z-DE4GCvWzTK@S4jPiTWiS`hnuBW{~Se&;||^=X$3iLiI`#VJ7i&a>_Y+KP#Z%NgNk zkA9Lb#1t#a)wv-Mt->$LxdPH@79=0~+!ZqEqDo1P+@n+|Yw}lltQ@GU<7K=O8U13) zVb*PP5ZG^zQ2lM0KRpeOJ&e*SF`w8Z9!>=Zi`1T?#-1-%?C+W;1j0;35vLqC)wZed zc?^*+LM}NVs(B>F4PfP)BegQXYU~!TPt4+|a-dZSPOtoh78XYqHNeK%*O z@+~q&bQiO4NE~g;ghZm2cJayLFP1SJ?9q+`T~*jhxx_~#%WllweoFinUxRjWHh08~ zdKHi8UoD7*jZwwD5Yxm$7=O^k7Z?=TM-nX7#DX$5(DsUR>0%=!1d%mKg01tz9Mo12 zjXrXqBv4Wvm``xY<#S{#3ok%83RcICMi=>eY%=1&{^Er>oo0w8}xyHr_h&_zTx)YGB=wAR95 z;DVK3)KWA=5HI7~Ku1^kw%n}5Rb1)*Gi9FSPF$OgiE}X056SqHGhA=rFU%=}>5OM} zoT`$LnW3Vi>lS_*I(`9^cW}=85cQh@nsW@1pz&}0{?`$I@otJnwvDOV)B7a`=|*Q{ z5II~GyPT;Hyo{3e@3{||20<*h%JfTWuTQ3&;#?yX8mNj42bqP59a1F$hx(~QZsSq> z3|bE(R?}5UkqM0C8kaP)#fz95|XyKV( zwpQ54=USrK_? z!l@rLI3Ty0VYd~`F>%` zxzLjG&7ND73Uu#yIf|<4L1-`&PR*{C*V(HLdwK+6@Aa4k;Zjxbah6*Pu%sfVSQ=08 zmzAW5>&PiQ^V%GcbJfsQ&85py5XMHy=wU0ImCGmz@zAzP#l+mixUa}yMvkFGV#yCK zD!mysV@4-J@?r<;E5A&1C}{E4;V1wP=G++fad?R5Xdtd<3WIU63~V+>780(-oMt_L zkP>KU;-@fj@_x>1opq1aEtYkqUvwMBvuW7?UeBQ2Um0qZ=$n4FsDYr4eI~JPl zGL>64Vup$=j37g}ll)<3k`Hl?ZuWRe&;1jr)Tqw&WRZR*Uvw7G!rX8Bivm%1l<}Kr ztSY$>xQ>$bttI|Y5q&>g#>*VbVr>7+zY;s>NnhJPnHLhlLBCBi3DOtL$R!DYC}rqF zPdlL_x7)q41G9FKP*c>a)y=OHW0HItjS2H=vsj!7&=EY`XH$rMHVHfip>eA1WxQgZ`2t7$lE<+C~Je!2W z^9-L|b${C_Us07o?6bhV<{R5QGgkEuFGhn)WRp3TqS{g5Y#V#A$ci}%Iu>v0dUx?q zIXaa?zi)){q-+c9!nj)OY1;h0Y7f`b(9l1gOxyA8kt6)W$Gp`+r<}`cKGg{t4){xR zw;i6nnMQq<#@%8z<-B6O^V$gkqFJu{lWjDE{}-?DAJ^RA>v_I_oW z!7^Tdf-jLn`z67oz^9C|*-`0^(KwFX#?w04PeO1`Y^i&UW)b|ErsXrCyRvl&*T-m= zZ*rKNY~!RFpGU8iy%KTFMopw2r`@b6Dj0t2DXkbeTze zupR*t76Svys6x`9Rhp1&xx$O*E}8i|7mTskU$6%X${LPeps2WRP_?M0-GU9W~Q2vlUO=ucYS)t2snu4 zRsFWn%(#&${3WAKx3pqmY^*GH8n(Mfg$)Z(@yBqXCdnES8bpa-cw8jQu@eCn^v6#z zVvy59JY}6dCo-#>gkc|!Cx2?*TdsQZ?f>=Yl zzHe5rJkd~#D`KunUn2ijz|2*Rp8{D}%S>#KG?X@7Y0(y8%qkV~6YqNYHiNzj2l|iJ zYZfA_j`3uD#UxXO=4yBF?1Ytos9lX9RqaQq5Wcv`!t!vc+PXTduwC3Bz6dI1dGOk7 z2dq5@feRXDef-<^d1IR<&J0}dx#r#ER2*ctU}bv*LzEJW6`yK0K%E@IrgazF{AmpC zdQ4mAKU0)eo}ZxhuyV$LqlGH? zwcjT0I6_hqe$m}}jl%ai6C*X149$tRuIsf@%Zgjds3P%64k*SO!Lp zY?ZE3VGZh0a>1;p2f#%!@0EFcx28gfC(YhZP+rvwE+v9LkDM(en#|pn)}xAUOL#!V zFRjeXEwPR;fNaf@k4Ltc<@(tCm%wGC$P^isQ9MPg3|aZY<6NC$K_vYQf%E%-Av({m z6hq`F%Yi^oMYrl-6%FVu;}zQ#(8$gS)#Q%}xK~0@S$0U;Fks5vuZv z|D}lpcxAaOJXk}1QXus4jY$~iJ_K{2zgR!)Ak0tz>fH{oM2l^}Lcv+@nUV2+j@!B+e1(hTrk#;)4t zQ47+1-0QJH*Z#Vi44ut@-1X_TOa4vW@PtEwC*Yyws=9mLzKDUy?{JFAr0Q~YY=2 zYwBw6Cz{5FVs(YoO4okdgWTO~U^91+8_P@#kkKihym)w4vQmpmHC=AEk69aB!9z}l zd-;*!kV}&;%O5m3TuZ#5h;po7X{TkJ~O7a!*DDFZj%fv^}3LGAoQzi1*x@WK7> zUg!BwVktYZ{NvNmSM_k?sGnYLtm zf)aG`KMVFNoGst-_(|nAlDS`)oS2wU5)^6HY=KJU=fI^U=0Sy1f6UoXua(HS(d=wD z*xye{raoO5>78I|5Eg7JV48MT9#=z5HXxCFh@-5WaM89|)~Oq#4^ga!-2_G3RC>Zu z>EaofBFSe+VijZx-~kz;P<9rG5HOFl-KiE&*rr~KLup=vD!$GQ?T zS#hgON<6Obii`{5vX|ClzhsB%Vza4C+{Nw_PD34o_&667l%+#v{oiiD{BL|SZ-g&^ zlhBKO%pde2aj%G#EV2<$H?aEw$4L$aQ2hbLG1!3$(82gUg!^Xz@sa<^G0rFwHh(x` zim|KCN84vaYY?@08Md-4HY{GHs;~qjrm6GnO?gBZCk+kBC6BGeNtzZD1@6R`L$$g> z_#!Q)uDANv#mC2f`iwZqAmf&(Sd!(_Q%gb2^(QWw^!ei`j|7JhNRJ2dbe&E3BCRT4 zPp2q%fwN;abn0jT#^D{?Sr3yXs{RWMvNY_pee{Ey2(*&J#0Lg>F1D%sDFQ=NYvXg(63yrGOQ`a+Ta6fn!Kq{w6a2Tof7dJh1*Lsnt$hj{ zX(E1Bk-1kz=+qh#$fNTn57}T1v7y{Mn2dlgAQWLN9GJNbFk^}NoCA)9FT4PItJ_WY zwt?pQE*Q31SBB-p3d|p-^Edn`bEodpvDEz=mRIE+9`cf=}nY3k5Z zH`t(zSusUOrg(qb{%4oXO=$at&k2T-Eqa*gE`TeDUh>D5Y7VRIddF2hnwj6P8@k^; z1yGgkQEqRHi%udDDD@O4&%JjaW>Bam2YEIv$)x{244qGN87eK!u&w`mX_e91!C9+s zjj;adYUr%4N?@^%h1St}!cZWxv&%X>Ko`$vTwF_|acRiqk##L~(PLy?_i->vEQ~(l zCsl+^mSaN)fF+)gN5t);{1eX!Aqz7d118{}f-g?Q;C*kbF^1Lmj{<;CB;fU4hNeso zvOa85*wp)+6fS3RDOLQY{J~qwdkHZs641UN+168mIzybEo0Ag4XM^i$T|*BePFuz? zJ7T9ugAfQ)EQu?f%9uj$Vxa-mA%mq#Hu~jrnI^k^e!^CCLN1PK^V|iSfG)nq9u+?t zL7;9(m$q{u!V|g}QEWOcUnUU^wVw^Fu{Y>2?nseLebW zikCjMsac%6FeK~RZ;|yvd4V_0LBDNw$tF`N7>(lA`yY=hIBp@y?XofLY88r7!v)Nu zxE6PUQc-cR+_bikrxTn?xgr}7lcAcc%GUegTzWIIc3Nbn=0{M%acyt#SX;D6ppJ|sk`uzPo3CRVggLkPFD(mjJ z4>1UxNPr0Y+2#(#?Whf1)DG*aOW=0`4-vF#Sy-&n@+Vjny2fHGb;03&Ae)PpOuVTQ zYk3^MDVEp@5>lmHZGR+)kWMA+t#aqfMfT9!@7qtD#G4Km99nnvy|!0Ii17 z*a3g|ZQ4nhv?~_cIl}%B!i{dVvZ)H5Yy1V*6Khz0B!$sQSBj(aB#PV$`_+E(hws?t z`#!Fgq0(s#kZ!cYb0W3_X%0%(g(66;62aV~UV?$R9ZVm{{e$XDu}&ru=+lX%E4kp7 zzL9Z@=rj3PR_(sLUA&9nu^Jr!dc$2H83`QLq?s?a`-3dmm6U3W8I(^njB4Zo}z#+R2X-ED2p-I|6G`{h!a`2TlmwEn^|30z{b9IHKo+rR|GN?9RiN7A2_2l zN0qtJ7JAr7uzyW=1KAP3o6yNe#wszGbmeAa#OA|AVAWS*j-0r23}DxgRKjewowT0Z z^LcTCYLK~8+4H-qb+6)<8vOLaPZ@?k?Gt9lE|OTEav;Uvvmh>_Ozo@U-{9lS3DN{a zkL((s2FGhsUH+jv(h4Oqa7&22i1Ah8+FtdJ=Y$^rt@Y?t|6^V;QD4XEI|!-ksb2%< zBwIFDG81QoW|97^iVQuz{Zyb8R6i_PkJ#cou}-E9%%L_2uaR&?2-xMtn}hjhBS$eh z8fK&YtbHJiRI-ae2wy5R_j;et_rc?raoYR?fc=OIzLFJ7woFV+&VN_g{dV#E0zJC- zNrbM)D7z1top!-8QSiC-2xykZCXL@*dhdHKKtO;EbN)>Ut8e5=_YpT(_OV~)cT)BM zWV(#ZzWtrO28VkA6LF$|HsHU!Y;M1j{3o|-FdKcy?t3KidtLF*1;%BOV9hVw4){ZX zbHds--H8|(8M}eEIJ98(Apmyq&hykoc^M0qXZ#2%IT@M&u;iG1xR`l_zDsP{y?Kx@yyDozQ47<2V_j$#BhI0^|qon6Hhrig&(J=`9Db8urNULdSVKOX@L*I9s& z)YxmbeCh_yt#JW#USOJ@vT2=17*Mlwj~LBB!l=q?w4ya+<>yYUZYnh6#v{v=mrd#d z_VBtxtxOY|<#FpGwn4;VsxXyO(8HZ^YQ>%@{B_v{>e34#%l+rC?b~pVWkFl_#dHYRh=mEDG><)IH$xrE^;5O_RGWM^6-1fGDCX zRQ>&4-_YDFm3K@m@U{H-Rx%vz3OJtHE_?ts-KxiPG+v#6>3^i4iMki0wqGvDtqw&k zM=EazDxP!EMvFJSAN(H%y8)1^$f)iH+!TS+sWfaLyEK5-oL&sI!g4dFCl4Ayb@@}N zB!q#vQVcm7^NayTtCa>ylyC>eGzDt7{m`#2HTtaRcx{nWhJtndVj&@kLl-@_HWG~k zgqyvlf|06XV!lzOG!W1sMg0YZtp_f0iVXcDw}9lL71N>+Br+?))R!JPRcxZ!mig0*BpOwoTl&h$+H% zXh|JpaRFv=9GcBaw&vfn_6R~*S?H03Z7z<<5k|)TXryY&<6{k^K(pQQ5Q4!^`Q)Q z#*L2|OmEa0SyT8a+!DQ!JT?>({_9J|c8&MOzN`rozU=nv8Fzk-1lsm{-`U+kfJ*Q&jQrx;vEbtntw=wk(G?N|FpfLE1gfrNFcVQ2 z#@W&`3uEOx(vbt(%nXBMXnHyZn9PsyJr`EY(1CW^WAwP;Q!Djh3#Rx^8A9L zhMz=3a9bVGVG$?Mz}8GrsT>=Cq0feDq9Xz;#I2y!Ww^{v#BeTD!iWuP$zQjSCvz$6 z8m3KWqdy*CFuJBX_Y--(e}JPY=Wp^UZNRuI(D2=QlK@(Yba~I-?IM|nFYL*9y0eBe z5xYV1h{dPvjH>#kdbq7-``#>mJAk7x0ClHKTF4J$%5%4=$(t%FCs6_5GH}AT=*N2Z z%#_qeZ6L@cWpGW7#o9dG5DXTADk6_K_)iarwHN$xKc{{xwwI#b86m}^r8~b1q|{|P z;PG!IjVDHIr&%?9;Kd@ENCUBV4110LK2J5Z)}G4y*g_OsSaiMC8f)Q1aVGJJ8ykbC*|as$ zWqbzRj82=Auy!yi{VpVxwLE2T;;@jkY4p5^96Nn>a_@#11a zCwSQsW5{XJfZBJSF``!2n;q2Cc!j3=!Eu-vVf^gtMx+`QbQMt;;P0SIMOjMgDG%Bp zoqiAQse+J5hCoAx>;o6XZgoB+vp!olo;!i?4?s~Y3)DYAwJQt(XwCbEz&vWe>P7^@ z?}pbud4Dwi226(U-ovTe+5h;?_x?}ieF(gfL9141^7*~+Ir*L%AO-~#eTu*S&sycV zNpv5)Z9(#RT~oyiT_dpitW5&m+Xq09;mg*=E2%j8zXwDkR`5(Wrc45jjO!JG)qI=u z&+F#UY=eWPe6&3_(vTdT3TQ0 zGr5YFIJY3|TZ8_bA2qD4%;GOJ_l&#RZPNi7U7(GR89FXBD?7zb_V znQe|{PP&a69$yidNoO(BqC2=3WV-X+zmpWyW(E4$+Z+8`IZ8B!SF%QTSHd({V%S8f z9D#sa0AqfF(uKhg#6ea@AzcX_Qv^*9+M2K~kFlrlJzhMdcOf}5bxe4_=kf5APGAZY-^$M=U(uTjCRez3Iw;3&)I{f`cuti8b=bmyDT>UR|l zR$iOCxut?jfg^@+zUkVi3mDJCyoB|2<_a~PH*1|w#CT#@lzHS5Hr%vO)_E3ew3?c}{4Q4+KVo9m-{ z(L2Eo^vx<2982*4gUBg?efl0A;>(==(0enwAuk0PjkK*O{)Li!B5ZUu-e+6opd#TE zML$+GT`9^6f5N|BGaX*D!sMjiW#QEino}DcsSoh0nL7fb$XKzkV*X11iHyzm(d5iV zPMqYGXv=(K3rz4xZ)rf-k$2-y?^J_(%#*zb;4h1jzNad{9!&FWI8TuwRgIvxl+&! zx`GX|B>%LhX}wCC$M7A7swUZze)xNkiTB&29D4>c@6~w_dMJ%5HARUojZ{T~;-o4O zK76<#tfXY`>rc{#TFj_D%<~W?hnogS(h7QewO1ajKJ|u>Rwlw*OQ>$js74q=jxY_g zEcv=hY*e$}Dp9Erud8~OXjF`u_&I)QyXh$8Pl&oarL-$*=rK0hv2Dm2jY6O1oArt* zng=E-H8PT{r6HrsrO`LGK)_sJ!)=ovUdqJC4yvf)$eA8Xn^se*59|0vOZZSUr8aq5 z@)g8JnU}v#%|L3>0uh-i9_@K3cu*V1fYFA;3D-aGYIw(7sPutP8qwNbE*OHDC4P-O z(ve8Q>CKPWa^4FtS4^$3#&ym3btqrTzv(t_km-1vn1|xi0ReMY3$?23OIQkVZpH{^ z%Z?cF2A#_c3(N!j9BbSt8R^( zjKh+S9Qvw{I*KKD^dCJp+g`*c-2*8Y36t0jIx1?#ad!4G#HB<(fV;I_Qz?t%M~rMvYi}?qX6|eWkFe2*8t}ZuQlc#V{5B)@W$@Gbedm z|7iM)D|;i|4rWr538Nvklz`ReV>XjO%tSPHd#quhX|-55w-q>;67E!(H-6@z#tgj6 zl*vlnoUSbX)_tL&cQeW((%C(pku0Lx)x}aIMe#&GF}?gMN?gmC#D3j_ z7RkDKTw)&$jorHwu7x;XqDWw~uVq}>(6*>Eb;_^tLHE&v#7@l1mXu>HoHO@YC-EW&J~y=pQ$yqMmq1j$Bd6>$XU`VzRyi%2TVSlh~UIT&lqaEL~}mgbsFjYJqkVu zsiBc1Y(@Ve!%XojV6}E%K)LLRi()9j z>=fy~{XNf3>)7~VISpjc-xhgP91vFBBpYdwWa&p_kj_X%R1{^Xij~xd+i9hzg~75s zBvmg6U(Y0+7JF2jRF#Zp!{7%}oNlCprN%v@P+%B}V51c&(9KCjkw%l}ysBIZMIR&w zwev{~TV>PD5T%EyEyAQ&vnXaz@&_w%H$IB{xrwGJ%@nrkoe0bzI`h~$6{%UH*-Ed$ zH5bS$H`6Ed$b``qL@05CIyme37E}%Ivf*FQh-wdXv?Lc8l*ey~lUo_GFbHo)mcFEj zNP%2;t`uz_M5_?%Dhvf(^(*fF*nVe9+M}?Oj{YfcgUf!~N63VOE|5#Ak+xhzOijUQ$Ah zCgMx#!E=gDW?o!OgViNA3L3Skr?0aj?y!L%Tm%j=!h~k6P~0vCRTF8( zB$9Buwn91)yNv@r)DS4z<(*k)b+^MuHw@7lZou(!gQpN%hiWf?zR_|Ah5u|Ij;Hic z1h4XJ_9I5YPlys?7Qq2XdzLYC?ja<;1fLhO&PqCWGeIql00o)$6s}KLOh+k$-Kc4} z6ifdIgvf9i?tWVL>S+nvs)}=uXP0PgUp7Lt=;Htp*4%Z_>=J2~xz3zpxJ#;x)9)>5 zu5BE@K3{tXH`9FaM)tX$6Iw9*9z-e3~GG^;np#Poym3jqX-B$v0&%Kx|kQbUC|l0t*t ze(`hH3kPVuC8Qf%*O%y*lF|sO=3&SJri6>K4J}Lgna+mBhK)mqImxQ_ zw0+SDrSxjVMc5>J;p%jO{8;s0c0ev@`yAzr16-<<#|eCfCuLAbOc9@^fAI4j;ueib z$7>zT#d6@9{=Iy)zibe4a*@~9AZiejF3`m?tzon$Hc9@jYpZK0+sblw%-Jt7)^-)R zm^N~~)JFMwb|_${kq}MAPa&3(Db^hwjA*8$mz;oO=7Vz>Vm~=oKt5EwRWzDVxW=3c=jW z3_1X;@d8`&fW5ke!?JrBFyvw*LN+%2N2iw;+G0Q%k~uq|0T#OmOVIGw=BgMJYW^m@ z#9OxAfhlaQK9OGL=~@y(P6~EVJxDQ;c1-qfH9@EyMxQ%u_Qf-wZ^ZVW+Syg&EqfKG zELqJevrjq$he4j2s5z8nOt^<*&K*pvSc|A_T76h^)>7WeHnz$7NeELB(;hrrYXOuv z$i%Kpc~dmcD6n)(*CO^;gF))6iGzr0veNBeO#;*5$xZPwr7-{!h#}Czv!RSPqHMIn zCjo}4;qK!$l=8Dp4W7tuw5FJK@(Or?u?^cRs5MYAoReO(r~9)j$dD4L^VXr>iB;~? zZfA+D-(Yai52*is4OHkj+ADRXVdD|>rL&Ej<8ELFWm|Cjs9b>X&13{t9Jzq1%@~3M z^1qF@_5+!-QWXFy0n(9p(Ko)Ts%ay5uy_uj;u*A7>3)j1))1QXrEhvteeWpf_#Agt zS6L+ilD}6Vf;YgirW?kgrTNNz_y=XP{|I>2Cot=`Mr#X+rYE*At=MaF`diN;RI9kF z@`Y#Z7{opgIeB4S*_p-0aPTbUnXG)p%R_ZRi<-owN8ZCYio&N;Xo8jrbM9}VVGy7< zO|&n1au^vF8dXb(>&TYoVsL+OZ+<0Opm%o`tY%WK)m?0?LV6h0cJRBt6l!C#&sE4O zl~P+I2Xg2d!I0J_K44bjJ8p^&vNo>rD$SZj!B!5isz-h@eF($S=Odu|LF?P_^^KTo%j63=l2YCom_{wAIb zaXu{6$rpzh1oY>`|Gz9Z-%bF)1HeOs-X8X=YidCMeyjaAdjUGd0)n)^!Ct5M$akMT zZ9QR^r+*FY=TM!OV?1d0{yZ%Np;k#A0f*Lgq-YSg^v3T3cQRSY+AmR5$W{iYg#DXP^zU zo^snW0j5cdPG0EhAqFkO$&!k>yh{0ld(MTGk)P~$-9$K3n9DDAt{ZTs%Fy1s*GCfs z!r#A=AN#kvopK-Wp~da1vN-wLCu$p6exjdEj@#>O6^ZG_7fMV62(;p&X|$fdGP92!ca&XU+VK3s?~ zn|w6*?4KSpJ%P3)3Tyaxw|f5=LL?Po#|RDb!VJuSIt|#YgxR8pGz2zLz(K>x%q$pa zx?KQn?M~-;tJ$-+W%FY}hos}NU}a{xh%$mML_9=^5Kl^dvHI_WircN)Ca8x(He3Db za(_8mpB4gfyw(Hz$-OC(dR4P}~hIYEL>Oo2J20K6GsO z=0cI(+otc_I?MK?(eR?UFH2FoWw_Y-SqY26DQrFi%BoxvJ(EB`VW9p?;n!nT zD-lgTR9b7l*DOVu8Se0O|71O*BL14)Z&`9G!sUqeT>`Wc5We3N_DYf9Z{TU0)8d2D zrByvgla_s^A0{rP3wZAiMPg}IX~mHUj>Nx=#oHr7E85LC;INSs3{}<6pkC6GO_(Vi zGMy6tR(TRoMQSk7Hq?egZ*C}f(kZt*&>SjTKjAe*&kt)?qF7OXy=FT7cj_kmVb%@U zzLG7NV#*&JWI{|a(iuMw;|Z4e163_y|jx!JLZCkfy5^QHF`?tqNnN~F->789y{tw#~rOPzRc1kCC zgYw+3LDvqWfpK!J><4(JT?BY zgLMKo->F{vw!wrZd21v-J(_L_LxvUmUAIh3Sbr#EE+r=lq?;`vo|1gQbbMy%pr+~* z&viR!qz!S8xC7=%tw46IH9*TK9N=K2rKj_Q_F!(ka{+Yx>esP|hsoP$Gs5@KVRrtB z`=t-~QhVQ`4kj$Lj+v^6N9sW~@%YLKFnyaDuO?=@k-BuU6%Xt@8&w<9`4qI7s;SK^?e$ zzQK-96a(|~n6%0{-{-8sNHyT+c8of8u@TF^J;9u_$Hni(2vdDYpF|&Sk=s}t0+KZr z9bzyuny4;UBb-+W6BClyPuOWv4pOfyBgs4NoE|c-$sVdJc@Qp8r%-50wlIr?DKpDr zUqdxO$Bmt}Ev2JV8nOl$iCQQ(x(5fj0Xu~tejNIFdM(d3(Aoi$^|XBYUOO4Cf?M{5 zrC@=d&Tc$g1OzW*^aYdN5Ej?;t1;`r%lqFRuNZSZ-;2k`+^*p?AMNGk77sUGB_hJk zO{M#N$l1X!Kg@Q$;zt2f1&cS6deqjkd?rYj@G$BQu5~y3b+!Umk423)=c>5 zcgYZ}V!U=9Iu;gM+K0WPer>Kt@kEIP1OynsOU}*xSZAVgD#8cY)3#hM9`m-ZUwho8 z5?j!=_9eK2d0s^sTRFw;1z}njtG@wed1Or3NfSsT=tR?LSOsm2PgM6m$Wmg1gR(T$ zV?!>8{K)Z9dhc}q%@+75(i5&YDt)*ysYl1XuV(TlYGZ|a?GuP=guUd0tM};klsU&3 zFr^6;vx_7!LjevfMHLkdmh_IVe=q^B_U-fs?p9$sC~GsBeBDf&VJx1X@!I;w$o7|$e3plwZ?Q4 zp*xZYrat3_)PSPj-u+YaMmv6s{$@#kls-%R_tq}PlF_?2UKqAPX3U3vqIJjPCsOq& zqGP#yvbjo6Tzvih>n~h%dr?yAHCiqfqFsa%&v)HE%T)vgXm7k&!tABeS?|y4J-V1; zJHIi1?BoA^N#o~+vTo~@i2U+% z{d)o<2S>em>?Z4ZB}_~vQ}(=4rNps}7S+K&wrCnl>bWv8^ycNrVOA7p4VJAj1pWpB z9u^Raf{Z?u2TEAI<|%Fhu0SY>a%csgv^3+9(rS3rwhO{X-7H0ZZ@&pF>e z20vhoXRP(CJLa61meNw|!?R`sCum7Cy>(LVdQkkl{Kg3NB7MVY5hP6*EwPQo<_q>C zqY^&2#jY<`d$wd(MH_iSq_}pfZxH794bIWPmy}KBXzEv2h7nAJK6^I|cg40{%Mxk3 zOJM*)%FDY;5CNTdo;-f2*5ptbZ?4R2Xz|f0J!THp>@=dC&a~wZp946j$1DmdQ0*8E ziv>2-HTH{w#^;@Qp_i2cbs(8wyik;cnffe&w-GK16>E!Ca0OOqxGL=5Yt-RnYHY9= zvJ?5alk~J_Q*MmxethkjU7@~PiUgL2LP|x#wCDJi^xv<@DWCc7bmaeXBEZy@F+}(H z!0j+dp!oMuIqVkyb(1`|vM4++#xs4#*(hCC#u|Sj@~0H9d6$n!!HDnGy#89I7F5$tc0j>0F+GhFMfqlW1cXt7we8Pl1FVNMgA4BpUi1;hpokBw z6f~?>EY}p88^a>0iW;g2TN%HOzAdu)lak4!)J>DTU@(3U*hpp!<9V^8mRx1S%rwFo&C zEnr&l_gCOtl#hMp)I+IQ@iKwcbki8WR#sDePS$sFmL^%dUIG=wrrJK4VMPm?i=g#n zx$361Y6}xKkALSO+d4~IuEgmRNR-bEz^|rx?#yfY$?V_O8`etxeqr;Wn`{|R$3zO5 z!o?%?15-Lmo8g6%_uj}(7Stv@Nfi7HsK2id#+AhV=1}=9(|#&XM7XwIG+G=gU89(P zXY-1zYj2a#uwR+I)(O#f$Om-J$?j@?Yim1XdR3|QLebOAQop)h<8Vq8iTAyA(>^Nf0BDD-(rJMNk5*RdtFtWj97FGRu^UAyMMkxDgJwroFKfkX6 ziFJ{lqMH4bdBMMAayn=Jo#O5dW!Gy|(S6wJG3K`>wkl z5{Yp*yV7G$81!uSkaUFpZ%Z) z*SNYUf2A$b!)nM~s7ugCuJJ2pRhOXa=wL~=iU8-B>B}}U+v{+-#$k+pFo%McC|a#G z(_4*b8h|&5NHtH*uyk9aFxL=d8SWhAf`qUK)`k9J-@P z#D^%bz`C8*{J0tQEy0DBRY?cKD+G&pNOJJJX=>S`FRD>}y~@{9Oazuc_R7TAS`MO& z-~Vg%d@H>eX&p;@J*&+3dJcw<&uY<_$S(V(4wAeHV*KvF#ZHNjM-otXue-4)Ac>ne z_x?dPVbU$BT(*C0XUh7k#y$rQ2#qh3D{9n3ZWCEQHhx)NN%&pN($}7q71!VXxWic- zQp@Iwaf7>T=4Ddnbg@5M_f1X&D^?QeAvO5go_kh#L@0di=TzCJ$smVW<^h4se8 z1ZHH-KME@1-;{DZ21~XP0_lb!3B9ZoHY2U=9kCw zOqAc#CJjk0vN1o>2Q}agnj`+5kL^E9Ff-KKM@%-U!c>~I=vr&<76h_I!=MCcVL{#~mDEEE7Sc7ry*h6FY8fj>@w+{ZQB+3HDhXB|*iqB*p<}Q70 zYZG)@Zw-xc0hlgsPk)X@-kqvbg-o7fq#lm`V)|z*25~chU14Cs9RuANK-n zfdPLt##rNBb}W2KY?ujK(qsu&A3Ay8tgnN`IJEtT;UkZIzix4s(WMfMN=Fz*Br&O* zpta&vcMy)~m0N%RUSCCebvhNkbd2;V zG{wVTxU9R3ET>`VrIjj2E1#~f@kbvsc2(ktNeOqN;ZafWB{PN(4C8&E4JvNRMoy+` zAi(lNt}4UJvQDqARSP$R%4ZF7N{;979R-sOinWV~=(l zRYpnOs`Jbj=y*80yv-}G1mESx&klAU zDDm@F>!Gt5mtEKh)T;+km#}_GEz!qljo-mZE+q+m{yn0D*Mfr2|20Zy3_Gtqc`r@4 z(dM5(B9QV0ls&|OSf#nm>GP6O`NT{HUQ}b*QAGfVTl6PDm zb?LFl;RM6fm~T1+1|i?KJQ*n=8{q9&XGYW~`jhUMSTFnZN48e~QJ`=2!6mjdq2kG| zh!ro3m8Re;s7LyI9@o|&D*ty$4*TCS9I1cA&R;wct~KlG@I@|1me@#^fBKt{Df=Ye zl1x}L-wnGMg7^pV{RXoX?gy_?Q|2qUD7AQ7V&UetHY?pJX6wrH1EV0Xx!p9#6CIEr z$d^qE!DTm;1x0m?m;o<=r>AE&kA3y^`yZ@=f8Jltuz1}1?v?%5S#7$$byh;3-_APl znGZN?fy92;ckg=u3v)-%uTzPc@Fubh}+^(j(*3Msl$tUmybX19b;D=mMf{%{a`&<3J8ublq_G?# zb>wftVbRlvVrnL)@AE`1mzEPmHC{xQMc{c)R}Pa~WF+@K*L4yWpj38}dgn zUU120o^bY1NgHJnuw%BqF(Stg!C{L(VC6fCi2LUhn!=7&;@46uG}N031t6pMIKAK-fdwPd+Fq-slT9|rx`ma`uY+EME3r5oh-my zjsMQWNMER1SN?!+C5B9W&70#9NxA4t>oJ7)_eSd9m+r*9Z2=5PN&o8TQv6^;Z{}s> z-NQq=bOkG6%IehH8P}&}+V>~;Jrv-Fq22OeS54^7oi@+r5%Z;TyaN}JBJbzoK7!MLOqE`YXSIj+^D`?_ z|4RJm#c(r(k}Ey|4!@zG7Aof>%C00fjZJWMi*-)CQCC}mU&iRMjLt=x28)y4`@MS` zkyY&IXpN|Bbcb)Q#8DD)tG9R0gsC*XEa?%eWK7sXFgmPe2c_ooNaadjK7T?-5kx3QYzGYF6aKiY07@iLMCY@8Yxah_l~D?Mq6SVMGtp!^ZD*@VO%NeN7GY$J%SiZ-0S4n zqFnUMmfAwTWS*ObeCkG`Y8UNXqZfF4kl_rv{+8YGjLfeZ2r9 z9bojEPpGWNwfHqY8C`wIV9{0-77_Jyf)1ew<3RYXNUa%*rR|tqh=+|;SHNU@mw2e6 z_)by+s^uSFY)?Madd)QBcSpW#e2t>IVfRzU->g+Z2`gvw3Zy3yFOPJB{Ir;9Eb&-i z=xH6!_sH0EK^RadH-ZP^WU-t1GWBG6W4#ermvx?G?mI&c$o3q7Y%&oz{SP$08#~z3PW}R z+jjV=w6Rrw25K`p2f1<{Jx4cmrgx}CF1+UWESaG2kIp;~;ILN{0$u+L&WP}9Vt z@-X?tqg>5u>%{$(b27*?Tu?!7A-a|MhXe#QS>gd`TI7 zdv-v;B=~*`=Heau8P+S66s|N2E2}yKu9||Q&N*ly+*5ay$h*JZrYBFYP$Hzdu`GQXl5rC1cbU?IgL?C9mg@5)@@{Nycw(x zrcZxjC35IYv~`0d3RTg2V|x7Hf1{u0xxrJJl_FN=C+p&(dgD6m7!$o0Y7=K_u-IaG z;`nN(+AHE#wSq5$M!Co~Y7*!>uLJ{c#4 z1F)ID@BSB9T3QN(;sUlYV3H413IJ-@N{@8!9io<3Be^D1I#4E%17(63pl=(gHfRCX zVfcBOt&}E!L>etX+DdOJiA8U(7SdTU{NQX~bGqEU#HqDKA4|;KO#x^+B$3f-kOB=AycKM?jMpODOliZ z;Xqab+-aD2kmxUke>C^4#v*r=;}Y8LmbVukAB}>Ci7E<`y}Zcf)O5MV9EG7~_Zwx^ z(>thwJ$aIH25y-nb~uDK9rR8RsMkuBl-g5vvJLLK6#?N^Z|5m!`x}-`L0lvz48$D z3;u!yDDI1A(g{?Qrp$JlW1SXntG;VdN=z{SuFRrKU*w``?Q{+`M_6qmqh%V$ai0P~ zx+FbI9(loor=C?QXXd8eya!& zBMy6s;d=S5i3tgJ7Jmfs?(OYC;i5!Jy3Y5_D#EO?H(v#b46#=7LNAocyo5nPTt^Pv z7_8LG(~fx#^cc8YqlnctDB&zWzz^Q_fhNN~IcSTeK-H~9n5fd>MyP{jEHC$3l|~2; z>P)?8T5cptl%sHLCxaZMg2hlUz%m+su@Z>t#x&clZEW>zn>m)gDMAP$ep>ENXei`Br6zTiKENj9iC4S3mM z=H(K@Uk5$c|DfA^teq)scHvJZx`r?@Tr`&LV-dEhN5l#0meHispMdIj$hx&sWc_(M zf=@XRV)3HEPBDQ2z?+m$Zu;C)_Ef&hd)$H*8JT$hoI*IHwgJA2(QMV!hd; zXaBQ!R21LO1p9qKrE_1qtRs^cN0X0_FDpO)yM8tHFlXZ48D(^f^Om)xCER#D=I21x zkl<^A;NCYeenV^P2(!tfg6Q=p)hi&jv}xn-8w-&fNYZTcy`j_jVSMn$iF|{1Hopee z>F&dlKR~_3pz0YI>;aaUm$e_SL~krLXk8PNBk!Oi?}s-w-Psunu&>-Ez4SpOe;kq3 z$3Ex?1xEc84_(u@r^{b*bFIMOD%$JH{(6>pPSAX(e~|%+G|Y2u*VEk1T*fhTP zT>y+&PczQoYX)<_$Y^|&S*{gleU`I-m6XMggn9tTS-M6}9*vLv!i zfqeHpexR6lCW)kGQ`Tu=1f&`}-j@Td>QO#Lwo5rhR=AkUN@j@PASja^t>GBS1{g$% zOT-R<d~t%HK{fT5%4U0u@kcKAED(4*+COhHY*oB{4SCVHDfIQ-;sv!=Qzrfo~Lv5_9D)>M}%vEDT|>|#*Po!|MnT7WnA z<>mdG&)EO+57sc?1qaBSZ{~IH4rU{2B$)p&gn$J$U+kq{%o3pRF27Ms(ci-H_mae> zlKwp;-5ded^bfy9-+Nz>Tn7Jpe(XUMt^K~?-Fa%1ra=1_$S4SUZg{h~zgav3ey8Tc zZNJe{f#h$3w;Q6R#drU+tk0>z*4P^%x#jE<$oAIz&$X*j9^tqJ1X*od_Qt{4Oy#@@ zWsDpR=&@W$-sM1%@uHf32lWUf0yn|I&;;7=EJeD^uR;tlzH>*36ehS6}Bq7 z^$X3$c1K>#I3oIYNcAg9rbSW=jk98KATE5@U%vzPCKx%2akHx>5QRW`@tiRmiLKL^w;E7`|;pK3lrGRIU z*)sisTU6K)74L6);J>6>z5t7U%9J$h#so>zs%A^)ZZbic6&E~zM=tr>M2{@<(*3J| zg{~4tE7u0qXNk)Ft$Mzyh~y-6{cxQ+5OX`4&OP=jC2`QM$K`M!qXAR`r)&*M%xQak zQqrCP#%0ibhrwnC1=0h+pd3XMd+0r%PSg00B@RFn{Kmb2gzz6X3m;X_pUzmFeNo>o z>6U*%zenjG-w>f4Tdz)U=nt=p_0QJjfFzFSv>C!WAZ?$!2$WrJQYoPFXu5&!Q(YxM@0(J5enevrd zPS4<~jkzmUR1xlSz=lSgi@0pDy5`p|2hT*$9i6TF7G|Uc$#-{PCwu1Inc6BGH;^<| z`<-xzM!hmZdc|k2f+CamksPIt_zEb?y6byHhha-ii3y?eiQAx^Qs)&kG#f|C1ecD5 z>-yXHk{K6T`iVxn`*_qm=eouvVJtbuR&c~^b}Vn|`j;TG?AcMzp9?DItxYeWS*aMg z_!UlVwhyb$OOLqI%UJHL1KJZHX zg{Hxqo9|PIdznid#?fy!ZA8u!{#^L!lh=uN%orC8+fHpE?_yu2zUk=beA>%OrB}Up zdI55J-#?!? zEnwGsC%TS>g3Gc@9F!}BuVLWVkB~})W1HyOQWM-Ru_eAcjFNX<*2yD$iC4?$9n(^u zn(~*Vj!;Ljl2PZC;Bhj{Sm>S0l}42lDet$aJ9A_Gl}Ey0%6T&V?Mkh=<3CA1FY_M;=mEcUS1*w01OImY zy;0n+D}msYw-4JJ*}0>6^62I#Fb+6+gZ+31-+Koazn(<^d!I}f-FClQF86SmTwRPO z`0_L=z=Mg8-|BTv1a-gRO%CL=Yco@#6~{kxbjp1-nlb5yh)d&cXj7_U!O zdSQ$xyYv1{Ev2GSu*@5V&S0TC+^F;iGXe`dq(9QmW3es(e&U<Uef2jJzc0x3W@2{Fs+lce62|0L|{KCXtJi^`W3sQEzL=esvm8OL$NSY zK;v}q?aqBn-OvNipw2~@j{(g_s!mh0>ZvcyAV0q2B1ly?Y5N3IiC`xJmb#9kg>CER9=5&CMUvBHaHvF-QS9JKfiBHCHyCp0ye!i&XHg?RdwlG(`Bt;G?-r}o+V z&3!TGKeEH}+mKR^{~}B&=0cHap?*8^b?na1$r-3;0WxyD0=ob)46r%SzW|a<%M&3w zOn9Py3f^6}O$R2vWo|(IXC*!Uo_}aT{=C8lqsxH%Fq;1&9~Bk#0O$sz_XK{b0rDfn z2xAi>**Z&Nv^#ja_yR+=w?W0&7|NR$5^$2Tnn;AXbQA8KY9z&R#s5M>{k*}|u$)!= z`cuAdR~b>HuDye|=2Z*a^x5WP29iDeIj{!^v-_INrK_}SdhVW}fn~|eww!h%B!iXm z(Bu!!hM-%>IwbIYb!+=+WL28)uPNiMu*%}a86?#+A2mN@Racl*ZqWI@?DU$r$5xrd zK_+Ho0P8@lp$d<WN;F_s5d4c{_BxlQ&o z-vO8L+|(RZthsC`)){Qi^jWgz-ETzGFBj%g7O<6{&A2Ptd@OovR^ny&5*aSsBRweA zbsbe-r+ltV^&c+TiP>)PoZG^SK#2SdBw8@7CWHdZo^PM~hu@jq6K^0{3c+Re;K%W| z1RRI9X&lw}y}+c`>MbJ0;<@21MC&ca?L&$WLDWv(=^}UqfJwc@>h#{c2ZFWOe{K&{ zBmqK!zKyaf*BLJ}4t8V|0p$r zesEVER8Ovvi&B`}I-v^6E-@ddx~xsHKi|{_sX$OB2}`po<_e6Z=8%|rdwIqtfdM~rh5W4A$kZ-#S8cO4j>8_AcUHSmjsgS>+NF_` zY{S|uEqYa1>*0#J^oqL2E;=P3xvHQ7289E($6ZXu&{i}n9f8yaJ#|sX;f~PwI@d-? z5(8X|L48XG9%S151&k;pyKJ4Fa;|1TQthg6sz7aJV8L0_ZhH}roDwvB?Z6i z0YzQVab^&5h{;2w$u{sd1pFx?hu>ZfeqHu`JvEtrW3pe}wRGpcA(vku-LJ&KkD0;9 zJy%y8Z|0E4$*mXVxnrKUIL-eeDc8c)N11!RR38a?rLrXjX2j(%Y!G!Alt{9l_(k%&` zwnc5z$Zc3iYqot1HJ=B*zR2QGf@wZgUknzr=9KBPu%W~?UEV3F&zqVs_JX=vtkj{h$NZ@*}{iLHxjgK25>Xt!0E|jIKcewBw>=VjS z>|Z~e5`?y75pYH>;oTWUz2bgHd#$cbctKr0HMIn=?B!^6pJk*3E>!QPf~%rnVDiItK;`qBpCR>JeDV8}VNXAGKyK@uZF817a( z(X33~fXChrl@RN9of$0`?r!J(9G*ZmZXE4k126xtppW1b2CKSI=_PLe#{EWlY>kr$ zfM(do@VTjg?plLB0tZC}fv?hl4Hahc{c2KVQ`pXLj&<7tPKF*4_4M^QOb4^z%DY=C zhK@j)+Vo1d6IL)ZZ5khExifySgMs+Jv+6U`gF4k94>z8?RfGZt2Fo=@5M#V%zx}X={mZhGo=ypmNkE4Tg0^Bft#!>#Gq8YSR(0EoS3!GX$yS_ z6OOXWM4C~WFO{+uT$(nx1~u9h$q`Fc$$Ejzstv-(DQKMIPdm(g%ry*t?pj?bDcw6@wXQ*mgNukXikV}M*o z-R2vd_AR>ZJl2D8;%?XknDOIoBp(PPPv#wpLPvS3&OYbOqKR>iU%@KIx0TTSrq@ge zCl^W^vkTZN%F72MepBR|m5B{N30tw^B|oO2#~Ah2zw_20aayIrP^ZnsM9&$?Ed)%6Ji1RA$nYkBdgvL5m|+l_>?9Qy(mz!b2BiV*5%Z zU3D#FdYSYzqs+@tTrF3=x0@O$nY!PEILsldvn&|?LCCs6C+78b&V5;|4$^raHxey8 zxWTp=k1xX-nNRRrakR|gsdFCW zc)9G#cWe&D-|2TDDVXYF=>)$7BjsbKLRPzu@H=t;(u zg)f$zEUS09EYqZ1|VOCe$ zsD_MgfK@dRwOFdc5FuY8M;J+tS_#b}Rog)(En=acHHT&4q|w03(&q+_W@% zO3)QmX}9Cobkiwphq~7U>MrqWGyq1a`Us}PN^YIiJV#4B-m#-$^zl?>R#nSJ;~GD* z=1iq=SF@{TV%ojei7L4MlGp4h(>@;NMYJ+~_L-Hb(P3$A2A_y@*C8oHaH8F!pWyunWY#Q7Y>@~rU-Nkv4o~3x=`V2 z6W}qUe+@5U?6;M#Yb*5G5|$iV(|P}%!*~qts!U((9w$sx#vk|qLrVyy-P;eDo!s^& z~~9jw75 ziQtJdL5pG85ro8L$>U?xlgp$Bf89>Mz7(#g#B)4gJ0*KTF27Qag}Z&?{KVg_38zj7UM5D8$K>ThF!4Qg6Zu3g7Tgga=E6 zl^HjC&RF01;s)G)AEu@?YqP{oaFD~m;CwXk$C9QXaZ8&`bl~&2eRXxUaWV9H0BGtJ z1twxiqw!2un2e|I0|Dt^?>QiK`TBJi$f2akbkGuSeCF#Ibg+!L{J3AbD!;{ICY0tL z?zJ#3;ew@6IAXfftV!x1^0;3+nyC@ck|FYi|7&XE5cjgm zgtcOvt1_~ZO%~_7AKKf^o_(#+3`Mp`ldMoOmtMC6@_*ArVPnJjN9?^K@O}z)>dd(dY*m)$|S5=`!nqWxj@`TBS)v(No;5h)80#XYF=g1X8g=cOPUk z$Slq=W;J(wws2^iSRuB?_I5^N!e=Q_*N(5G3hS{Ba*q)3LgFK38lsbfJ*H`s!AuL$ zf}igbGPu1j_BxqsacJjwD-ZZBVD<4tHkJy+<#|SuQbqaKbw@6{Z{}#nBfEa$0_P0YVfgJq zu;DujmJMp!m|>&0)g*IxjiHsHlQ-jGVlxbq0_S~8O&h8C{>jt~lOqn!DV^_zciAWW53@Kt6sfCMRk&rB#S6`60ulF|EC35>Yxehyig-s z6W$CoiIE83pry-z*9m_uYbM2P19&FO1fuaH~ z{kJT9e6rrUuDZH9*cRcKE>Xs!fPCz&g{E(Q5r&NWpJ(NbX$6f{5GwD?QR5Gv+Z!D@ z&(KFj_tq@hf2p0ArZLz~rdjYKF)wG!(HIpn$fP9$K7uncGD%n)*T7P`vyqlwuMK)T&uYUACIEMT&ueSRutsQFAqnTe>0W;gd_H+5y*dwtHX9@hVU< zr~FeczlVkt13o%6^;vcnj9Pp%@g))QXEREPLZ?9|BIlIA7z4T~~-e z+6!B$?D7<0D*TQa$B+cY#<>qJ9%P3YmQJ0v>EsE<>uVi5=zr7qAfI1{E9??AVyS!@ zG>5NAudg!b_|#)>%R!PSy$6u|=G3JiIK;j+rE}5a}b{J<$5O&Vi#9(ys zq|IPh)FB1P))0GJ27GZnX({Z1!ib3S{?zxwgnALuVO>ubol3lhb@ zF|c>6cjO#uuIJMF4}>3}4@#{iZ)j9G(d|nj8@i!KUK+gr+f8cD`3qm5!qp|b=S`pv zxTt_ZWfo^hxC^^O^_vtI@Z7c5j00P}fr$yURj}7L_7QPC=I?0$7aJucELN!bF~riJ zF67Gx-FI^=LK_`qI-a`-H8yvA3hkOqM)k2o8b~VJZmb+=aa7+~OK)ncAO^Y2L_;J6 zGd_$69+w(DONRI@lpldR%H>@&RM$^?z;BF0%p3ct0w3q!GfrZ5qx~%JJQ8)6vd62- z`GwqIpu-#15sviiwsXiTQ)7{K#C}~aW`nf%O4|k3gQO7N#^9+WNHIHK5kl0bc$`~h z)llSX+o*D)r-NoC{E_*4OKR`By;5~llvVx!DJi1@o^)Fw5>xD_ut|ps46n=sq{x9Q z!E`QMCp}zys+mOeBdQTG=j-iCa{x2dn2u|F!nO_cSY4OoI6b2OeKm~BIRD3>(h5}a z3XK0dUzeAPSY2HWy?^}18B$%F&-@p<{>9{3^;F(~<~Q3veHFT)PK~AWIy`C1F?f_% z{1mp0IE{Bj3_2ufb)a{6%dzJJCHLk78n{H4dV8LNJ>!4D>RCyPY*r+Din8T7L9V^n zeBZt2JjUQxy|T&R?tN;n4UcSRev6*MrK68@dEuv{&RDZQ&T3_AFh=IbQD(0rGB@@* zI`=igX=WF{gZfm!)A(<@R;z~TEsYfjn>B9iR3ItIx|-SgU?nd z8T?y^#2E+mwd%AZJ!sw=HfPOft2#Z7QzLlKo8L~UY}GUNe^QnA<%1>sj?JN9q z0eM@!5sIuMl{3z`!D3)N5O5c;+0uCww~ znvx&P(EK!)I4bH(oDLx#Gn0sO6P2c|q+I^?<9oqwbMS?953;y7L#|DA$FJEb`b&LG zt%zHAv;Bz8R7|R#H2Ix1d~8%R7Tl&<{12k|0PO%pa!3TR9OQx7fJM|;qmhW`p#F6{ zJ$c>$zj+RhY15T6&VvN*-txmVllG0UrNw&(xh?H9l+mEfi{+g6&qY6q{51Er5kiaP zfK?qpcC;YO`rl`Z0sq5)X5hDBCI?!Z+X0Hrbe`iQl>l13b`mtn{#b_-^x4AU_z0J* zFpO~moVs1+pa)2pgB@1!H^eF>g#+40NXcavP<9k_5H~|B-edgyR ze#-Klj@Bb$$-8U8g{uorOLJ*YZe>PCik8fj?4OD}BrR6|@(?;zMU1=q4Is^EQWBb{ z+Mu2x*0_1<*gAD-Oh!IQGipryKxy@sIxZOcPmD5>Iin1D{K>98NtA1tc`T@HGkq;M z8~p;Y$jVti$6%zwEI`m-eFV+Ztu zOphX7(DzCIvv1L0oiR$svOohyg>UDlP}2A29`n91l*N1LuU}ex<}M>}=64jO(EBUa z&~@bgF0$Oxr&MpVB-~&XD)e&iS+vkS!HUD zo*JC({|6waR-ASN0OWy3IfiK|3L51K6-?!K7kr&-2gaNmJbcY{Z8H<39OLK>gFZb~ z%Wx5n9GnA@E|}mhJ~T4-qbCe;5G;3SKXf=E$4W#UWi2uLhSo-j25W@|W0NIQx_{nz zwwOxZL~3^0nnH<_2v$W3*G+)U-6uL%B$LI)(^YX0>N2a(BQ9t3=zr$53zN&f{6I5&O?nuXfk~5qkfWD3k zRk__{TV8k&x=OiQ!}w?z(hbeXyM#7YKDEbOGBg9jg-}@BqpjIgT=vbpcb#bM~tRb1EuC$am1|FV0q#G1Z8i6R_G$_!H?6!<*rNW#$hfYH<7r1@I4}ScO$SfFU$wh>A zy3?B_%V|K^Rg*qCBP}c}tP!$@p*MG=Q#EuB^K)7;d2+Z!n3N+9f8$659(MQDeHAJ! z!b%{?W)RZu=N!u#^SxFK&Pq>W-mekpYxYKZ?yZ06wIx5CM9DV|b20md`erFh+d``h zQFMrJmjI{ z+X-a>#Z>fRrWopJaP(@yq17nLYc`>&D0BJhx+{AygF?iSIaS@-%z@6i3qdsOCc~7W zQg=kroQ~ztpIn@h7M6+POH(#y#o#yYnQM9F8om!2aA9eT4Kl|XXj|Ubfrv)@uH>S8 z<2v0$48o9LuMTBMr690?8NC!o@i`Ts2=R~Dh=;l(63!gwQTO)d#4)#T&24Zz5 z3~gW(cE)+19B#fV$CYr&2W^}DAwm(a&p3l@!b>P-QrM)+(JYB177(=_91`sW^ZyQO zO9vI<;7v3^D|x%K@)p7=|C)-~DoZKeIM3xrr>WGkHsK0qIQoEj04`@*_@FFdkYI4b zvG2;Y0tU#^no{RoXMjmeZ=3-QD&5uX`Q(SUgi9N2F z!$tG>1vI-Bh)zP}>HDO-w_uM4V5x!Yq*yw^kb+E!dq# zApQZFvP`QI>j%Hyy&n!uv#)B)spb7u{Rf(8PR*O(-6kFD@3b&WbuRY65!5pjv;>DLP!10b z!Hx34HRHjK7F^IEg&ndJEBp@w6nxZNh#w7x1)jK*O#Pab30>0D`h=W$>7kEua`%#N zF6wsnRxu$t`KgbpL&BZ_pzX}a2lFYGX#A&jK<5G_4I7M@y(hbjTR)SfpOVv#^@9v~?9c{={yqQzWCyrEclyp~JmUex%T zk<>;spJI8qy=kenChlBdfW={cm~(T9SQiS<#6^(a6@`vIee~O<`f;jS4np=8 z>5t8n)!#kd5RHqP50>+NL%xEfI4h_KmGE)g)L|v*hXTVNt7p59;b`2#nkr+-#Kkq_mHm{tK+Ex#w2=oF zN6fs%zE`ss{&W#N^(-voCu@FlH$O4lTI=fp52amlgf7bxw;xF6%48Dj18`IP;3L88 z5$SW1fePHBPaS>BCkN;kj~UL78KjS4XBj9y+^m22xDRC@_Hv{1*EIQT-n@@c`m9F9 zX~oU95gU03cRW8el}z_{bF{@zPHNNE|L}Lwt)0HWnSx|wiS(l*ANnMhg1t$8M4)v?+0C9Bg#8?OhMB-E|gobX_V{c&}S!`DaZ`F}K>gI}L-yvDPQW!qS` zy=B|xvTb{{tYzEQT6Qhl%eCq}-`_dsFX+|teV+UN;JV&dNW5r|(agC~=^%<#Qa7pr z!#Zm@N2?$aUO}7!eE$iq8JG4WPMFl55J36gov*YW>A3EXYE2HHf|!)x#z=~ldhL>^ z@P-eLgnW-`1J)W2y6E62 z0s&sUcC`jbgIrv`v=QWEH6YZsz-|8I5 zRh)Xv!+1=#NZd=?j2a0XMyA=p$hQCf6rNu|kU`1OA&8-8_{+zA50{dL>wVwjL+Lq! zHq(PEMr+F*-BAf^KZB7|p@}xxuFERw#YYnIMr)7Tvk2Q`P^lq^JPC?DoWh zAKJ82hbl=wM_bt@Wl`kILs8286SF(yy0GD~kzr4|P6JC#B{etA^P3)Ijg|7|rwqS= z7G1q}@`W{wzzmb%B#)?0lIUIY6J81dkNx#M)yK8hN;DKnLk=P7TIUbkmWGwo=j3Dt z>uQPfDrvg}37A(n|A~HoR;7O}>n*M!824X^@vr+l`dHwa(3m!qtMPcuQ-(uFRE>~q z6l6mvWzi)zHnK>2Op;fjo-xVM(%X{o`wL?G=kgP1jQ+~SMwcmJQ~+6O)02hUr3ZT~ z2q@-z#juPR;=u)}hv)fyezFZ3rymB`3Dzd~2rNHyhgO%J{x(g26Sl)#FS0->wKmD3 z78e%>9k=skirjO*j0r{^+#-NSy!E*Jg(>^+lX#W~uCbJnFh-viW6M>`t3y9Ze+3VyJnxyQXBu{61A`SD; zpiODv!y#&^ozBeab~)kCrC4K02XL9s5PxQ6-G#=)82vlofvyhjVl~pbr!9bmWLCon zzNyMLvR0F1&s4S|`3oPk5$d22nZGCfV7qx`pSA5N_|C-a=pWHpv}xKB@<7|G5*-u- z4gpS@^?25k!KgK>Vs{-lEu9GaX!EfQqcu z5zvkAWNMp#d41}MIO9uliLkJY49BZW-ZzZ5x0i-ipp~s*67UdW9_w0&^OYzJExd1@ ziKL>8xU^lmmbQIz8?UKazOY91#c}QP2x(lQ4|nKm#kN5{PafA%*`peyScIP^J>K03 zG`7*xH9EkK@mu)?hmIz_45KJURZy;jzdXak8C74w!-XmS-0vqnCW1{!9|cOu2sqN~oDbutA&c!b=5TUy##l6Dn(WQ_Hy{-#&0f%*dG2>IW$u3+UB|EafZ z?%)t_yIceC2s}5>3`UQSt(NN_<%=C!M=p+5EZFf3{GZK%m<&gOfcOuVp4$_AQ6oM< z%|u^8tK1;Q?3p50e=Run_umq4g+G_h5Cyl&G^$(R1iW{!qU}mQWNGD4%BFZSBs)p+ z9ij+ddJ?WWGM7mW@6aP_l*Q?w#l~`B2U%j$`Byy^lsX#H#v5f(y2aJcUmL8pMpN^Z zJh|G);d0#sbR|9>pvWzE(~w=l3+>#O9wZCQ&}3aJrzYf<(Q(s981R^q25Kf5oMz3C z>oD;(C55iUPCJIJI<#$#v_$Wi)^;T1r<{}%bui)R>WJbVpjE^MKJmc)Fjq<+(X;Fk z^q1K)R99P5S<(^brKz)Z+Il0t`bR}@#%%YeF2!Oj+?bVzXKZ`hX!*XWEd5*i2GPBM z-B#|K4A>yt_9lkxrXx^w@JCJ=guc-S2~O_;dJw&C?=;{N1gTS2RI!D1^$aQ}xz_}) zn_viMTug&TE$a1dY1h>SmiwE2Av z4M}gKhm!mI3Rgd$;+W(^tcp&3*G1S$dE%NP6|QR6_VV!fDGw z8=reE3)tG^E4@VNXzCas#I60WEo}wf4>z5KSKv432yp4B#?2aRa~>=NQP-jA-@J*X z>J2*s7i^kCxSZDefMZ$QZXf#iRp|Ei7Et{FCp!o@3mDZ1{FOaYeqLlOxU|9H6&SHwMvai5@FgQyR|i&NTG zuCte7x`u&y$s+u1RQF@uTSDZK4C=dDvadeia}=QsJ5PU@>kvgCaEpjvCyf9Lu}J%Y zeiT}*t^c(}0`;|IF82uI7IEwq@rf?ZX{zRKih+KYGsHz!nX@44E~#{gHVZ37=ug2+ zI`slQ_Xn~uetr(==`td_?RK#U-gYy*SqYS)^&EU$nXBR)&H!sjlN-nB5h87NmL0;^ zEhW#z-?#SO)%7+P(3DP367$^GGl3SY4BDM!QN!C0$g&Q~Dhwfp2?D6PS%yzrlY~OE z-#o4mHjsq^b#DX!7V4iS&{-e|;Tc%H69{%m=8aBL%?a3~4gvRx3|;%YeneM7l>NK zLu&m{Mw!A>#dj>aTdOd&HmtrN;@K&(goq&Ii3%rl?(W16(QckKuWUpn2RB6RAy%Z1 z#D_%N`l?dc2X!R%wd4!z7j$kY7g_2p$EoA?)+Gx*?D@{1TIlA3!x!i&QPtHwH; z$3X_xGX{_?+{*aVyE^N%Jaby{qm{n=rlzvZ7Oh2m-4>z^oomkvcZtaTRv`90w6OoJ z?n4>qJ}&-ywrdnN01XNU2T+prKl%ajY@_@8`=xom9#kvbTSu_P08j%+p9|>8V&mig z?x!faSC7IwPq6aZX8!8XN19w}r;CS!thI5f-{{QIQ-QO{IJNmo6noT8JS^H{Be@NRz4O{kgL z=#3nkU8TD9M!D95^XcqhTLa!K^MgrnG>$kGRPGZA#f`+s*k)tpnHkcUX4*6G8?wWXMzZJy{k^B@b$(2lWCm6&o+O9-P z+ZAmlU~l4yF%L6(F3>uGgw;>>90QE4w&To5h%MUtd zZ?8$Ay97Z!pTq)^H$Z6s!l?jN9w77t01PIRhAo?vHn07#tW_kP0H6|JGXSDLSL{jG z!PeoFU!Ruw3>*;`z*s4afd2)6GORGV!dj%aP5USgHN5pN?VYKv!4Ndd;d5B z%b8yCeL6ZnJ2H`$D#{`~0oi73jnPnZ1rBLmLPpE`4d08h2TE{zHs8kIN{Wc^+qXsX zE829^pp#}*5?w^m*-#U${u3E3PCRs0rCAwdsb7Occh|dTdRO>MX1CEPJx0Y|&U1y+Zm1wu>Jz)uL=-t1VnMFs;aK1e_WNxY<`WeY^Kin3gBAAf&5m$1O=F>YF8J$ zyaYZSDNaI1Z?=?u^@j~L>*MrdBkAfQzJbKeNS0@qYI3BYTPeDfahq3W1x)P4|%U5+?#NnvAwA zin{G9nt1iM-HyzN@^~jH1^W2C%X0-k^MPo-R?OzC2K157cbJ+91;Doq%rs84yvlqDNIID|-M<E)zT^LjS^dgmcc4s2Om z3$hJ(`>Qo<$s&eL>RF}-CCKY-Ev5iA8Zb@zubQjt=#T@1WFv6|fQdf+gzm5F)&FY& zPELq!^E%g~bojY!Bj%BnzM8N_BF6xQa} z8GGfWXrd>#-8`7tk|sk?K@8irI=c-8t~&pKA#=5ZU3R8;^UIcC+mlyG@4KwOU?Uqd zSnn@;(kVmRC)>(Tyht$8!Jx)8Y70(N7i}TfI}1vLBjzv0`)lN-k;uHx6CYx(O_?(F zUXy#!c)f3P23DxuPGcXH7gvw5d@LFin>fh1C7~^pi@9+047Cd!j-h%s#wPB^9sWt(osgi@YtW@VtvmZh0^1=-Ta!I72&dA{e1MJ@psipH0hgQmSK1LW*UZ^B$l1pXvhqLqo`zxp=f$1cbbKp zzFxI#$SHmkj^Ye2bersx~iu~F#mliS;*W|Aj5S-?&BM< zXkA6Hm;z~-*kWhScbF%4B_Ep8`vjek5l5ZMg~2d#F}Se63h3<|MKB{a!w}i#20h3)!=f1BuIYC@s_|DJuT42~%P%c8x&~cHVkgjsz*gh3 ztZ_*<4ci&8YTnj04|=y|(ca8kDUjx@u4&pS$jEi1q`JCZvv#_kTG8t9JO8>XUp0W` z5fGqh6b*{n2FU<>>u%k$1X@8-An^ICR<0|;|K9vmEM9H z0~M5$r+`%T(ur+mUQHsu>ZRwVR{fZ@tT~Z9aZyzt9Jvb{L!H}A#g}j_;@U&a5@%-) zo!`(uD3lmb84)z3>`b7Cj|%}d9#NQr zGa?BvkE0)U`(9S-T#Gi%vnsWE%(Z7;K#iQitiKD`@BmNHXD+Um%a@OB)c3%xm+CF@ zoUfqq{%ul%Zz$lEs5(0O*kySS28BF=+V9(=>07|+kPXT)we2!w{Nx3wd?rCbv+UoI zKuO(}0S}hXQF%{MBbNV}qoWjkgOBn8a>WYzPET#*TWdO@flf0+8PE*_yI^3T2;ik| z?&cM_bG6*(HBnoZEuX$7tg~pJEzg;E2B$eT!aT`IUEj|nnCI}4$^9EJgbJp!zb@=< z9v`~X%~+S&o@FU!XYG(ky{ydIh$eVi8+|<8pK%N6B^bWJSfpxZEIF4$fp%(QhVq)> zrmfb6*yhg8 z=+&IS_{tYBtQ>(V8^FWYXgU2CWJrJi_n)sd767CRf~+kEfQRTvoa6<~(Q8uI6PPgq zV?vqHm;a3IkiyrHpbWcx{@&aFP5AsaEjW5REj$fS)xi3Im_y?C9g_DTQ2P4LLb>X{ zi>-GikO`O9{Tz4eUVjT33Ou^C=f1k@zS{8sARugA5)avn9B(GN^+X%5bLrRuZWzD< z|9|n~0F8HDRL#J12il|_7la0%wRh6b=>2%@J&JSm-HWGhbhq8WG4w?rWNialk7fNw z0F*$G&iuIXZMq|!8AMJ6X44=rjQsk(^%SZhOQKtdEMTURD`6KGuRdEy3V`sEX7xgn z6Q#D_ebLQUD23flKjDb3B^vGLn6k%FRm==dYZ>REf*O&B|3^$j!;#}oKTElhjPJ z=>gA=*tZo}Lizt24fruTtDNh(3lAiFw*wX!;792KfCJluB=4-CoE8vh1oT6oLM`ts zEMN)Weik0|*?RpG&$e|F@1aDW_5|{Zj@W`UrJ!6&lDA`Na8L#&Ku`G74=Z#XG-3&| zm3sJ&dR(9NzMnDe`CWd?+gr8F6O07w(NU^g1i)R!z%n2z3Le-6aJwS=4PHuv2~Mvo z7k-KZyQEx-0ek702H}x_;{oXR9py7Fq`=b9d|&g2^Qtw@YF*cin=pHiX~HXDVst43 zz)qq|4;Sl*2nJbkj~hcqrL>$m#cT@W!)PX`TbHf5sq<&|GFti^xm|FhReiTCt!sYO z(csoG{dEscE^_|MriTgyq8Q1kYLyX43C~!?XJKpfUQ)2(;>x(wR&uib2v*TwPvO@m z4`>K>f;Jg8S%qNHIcf!JC8RGV{l%o~LY0zF5QYiLUiQ5MFDiB(>#6oX(_jLp6lMy5aw@TXQ-$JPbb zwCb!gw8`p1p-Vw&1Li~xcve-5JARkYp;v{?C^ju4L?BmenHju zWj^n4)c<^x2l$Xd@v&{b&+H)5Myt4w`w^(3#|7*+n)Qq^Wx3uJm8w2iopehp@aFdX z0|Y}3XnhZRXw9zZVoo1#kKgvkl6o9f`0GC$iMc4C(D2iAllXc?Y}Y!zNT=<)xHn>d z`@$3drC!6-Y-}VFPQV?aCcG$|QZ<8(Bsn zUZtxmw%=>DkZKpFrL`Q7Ws6VtR=2H#5>Cg?v(yti(*6xsvNk|OaXBlU)*TAJKjLGe zItmA2VYE6xG5(snUgusL_|bhD>>8s4x8NI6t*oCdaS)AG@j&R$Yn^qW^>cGZ_- zzaUbB2`rcBR2Fg$nL3w7sOru0&tlbJho~f$PhU&lenMQ2RFgvIHeyHVVARqM9-iS@ z4fKJ*MvP(0dOS6PsC|)*N8&e=Z_xJ8$~#{EJg}r=dO~ioHtZHIQ&Q#D*(~Dpj)FWO zU-lPE(-iBK(m#AoMsZtuF4tD$YNPT9PCCZ18C<)`OVFToU?fuD8(bkBn`(X}hh4J4~=&`(O=`8v)bybK9ZXjD~> zB2yZJt83x6NBnW|PGLWZ-p3255*%J6!?QFEln+Omh}s7r=?9nB5y!_E^1u284}@7%4IWto&oGEM*T6sWJ@zH22A4!pQU zY!(8$FtiH~{l^-#k`3AO6o5jyPeE1dCryDQk#&2g=+P*tVCBx!4TIq1ZN+vSpSXX4EqN!*mbyHyr})!gW0 z*NrT?qBu+P+gV?8NV?q`tgZ-W*);?G*v&-Xopt#c5ntABip2uUlNKc_Y)_5!2b7q2 z1FhHgmO^sc()fBUrf>k!F6{kS9t-{ui`@psx4{dz7CK&t>9>NxJJl2`!Js`Jbre1rvNhXV1$(si>55D~tWP(l@d2 z6I`g8%qpHU&k2J8BfJUIKI@a=D?UEmmsza{m1r~eD3u(;;RDdj_k_}EWHwe_^d*yMKrqERq*bk8G|FuaBQz@=T;vP;!1;x$U? zLK+<))!Q_$shYtDnNNbkx^G{Wdvihda#X)#ageZo4%7t1g`!|S###s#>1u+z%S_ey3WP2Cj+-ztct1W|$_dFLs)_ih z16?y_TOdnzw3c=&E_3LSTX5qkjkH9B7-*{y5C~$cIe#buzOC%+e;T*J<<3*paPy{>EvGEbhp*0^3VP$G`3(Ul=u6|c zOwO}gJW&rN1UrvX@Q!-W>;VJDBj*4P9DHp+*<|BKcB8VFIpWe8!j6~0;4~9uB+y`{ zP98`hwCKw5a)AG#Q#p9`-y?SY_vB0r5FDf!epdtQhV?axdXistkKdr2y1n*5q&C3M z`tP*%ogSEVN)vm9f*1hE1t3t@r2l_)(=KM-wx<7qrsv1&*7Wo3(epx?(Bd^1)YCqV z@Hi;>-TyekKhlnC57A?CP4D z&Fz@{wan{oR@%dk-_p{O?o$j1P(|R+^9^w2`&?-O3uB*&UCRdsUTXJu3RHYNfi`zvDu39&*0+Jq*TNp>o30oXM;qNbRLXL z5L!qwITG-cCZh6lbn{rDf3QNwFzCim7AI!~Q(T{Jf}dx_S<74!#n-!cd+`5^Gtw3M zcoJ}7%9K=fHX$bpw_3#Qxg0;R_OW`N8#qUDVe}p6p6+xD*_-2SA z)tJAr{ohn~$UE@Rkfy?6G@;83Iik`i*%eG%GZSkAVlg;*Qo$#&;h^WhYL{NV2Vo2; zJpKN#`F6Q}rKiKd!JptB3KxK?8$P^u6QAnxbrk9LXj`5ZKx7$29VrlGky_HKo&<^(B7fpGX=9XWz1GX${ceS$-kB`{7D@)?^d zpB(S(=Hs63$Qg(stTT z8M|f==A~jOScQm=4zXnJYf|UQLO%}gjF1;|mzxl`80ELP{e@Lx zL>xg?&-?<5s|GB+B;Aa3o=VJt`jFFzGWm(yOQbNdc?-Ts?pjC)3@lG$C=R{AXdV&vH5~VHDoi<&Zl!U2$j+`Rok~AyDU= zWSZdxUL==3jRpPb_Dgum3W|oUR5D0PjZUtr9e?5ygBqR?s#2?2<9le z{3c!Usfr4MlV?G-8>1q8L{Al$s>r@-DMr$(FytTf4;IRRoY2{dfh69LS+7+YL&V-1 zv@JM%nC0y!h_2@heS^oMYQ$A4EnnF>4hrpd`Y{%G6r!cBlZ;J!^vlgav|vwxuYmCL zXThA#9?w@qUdB%`j4UibIGQtInX-dv7WHU65x{@^`|K-T@7o%w@=$Trv$|kM zFsOutRNUfbI>paQ2_;!A3*V2&=r-MJX~c4v^d_z;6^PsDP?f}y;yUYY_I5lx(pu5>d%^0xL-nwW2CD(5ZRu6#a$p_?^-&}yAubOQ7u5H z&Yr?2Y&T~8MCOm2(4%vW7l|*?TI1k^Onqd;L+5#^Xdf@U_WafWV{{s9aXN4*5c-Es zLXz&{q6MgTa99i|^QOP;4grG0LX~n>VPVA*l>&`Q&QPsQ8`Rcy?|Q>?x6K`Yte{ZN zBH+`l-UDs3-9JOLX+qTjqfTHbzq@O)mCg*v;)?zFJK+VmdD|s7vai@e`%695h!DBxCrSxttjRzE(efTDVdYhJv3(**WN5N5_O+@kR*tvev(M6 zX4hc$4ss)-Y^DBesUaj&drK!<+loQ!^;Bnf$(w$B6IO>oaqM!e<*=#-$~aSZ_jDlK z^e<3=yWLYGNz%<8?*RY3cq(~`9h}md>g0{*zj%qMmwWzL3y!52LNQAlFJUP#afp>k zm_smv{~n!zEPCI}+yS6nQTY1RVPejld#a>}d*qsW*>Rzk=y4Q#RAJnnW@U+@re8k3 z?^ds3QDhrEX4qp0io0aHsYt7xz8nQ%;yG^O^;KF1#kAB6BSnEu^p>nc8?#-9org

#sV$13Ux!auoan*k1{`_0m^_X5JAdCWd!2sf?!1P{{ zG_m%wyEvUBecI_-%lzXT%JAQb3Aqqj2xMhB?%}^VHq|X@9fP!@jA?Q!E&3jGbyo<= za>6{3U=AceF&1%S^Eud|)zu5&4tA~y+i@H^EP#3EMx1d`xa&r}G$%Z#lcgkB+QiEB zm#hP&WLHq1={HvB4GL^qZmLNK)+I4w7*nP58qw7?Z!87!F|#F{Vyp_bo|DYO7yNwA z!P11WSFDMkFEM*OD{j->b>eckUUoDQJtPZ~9W^oL!*z@?m=Pj|8T-)<2V=Zcv-3E0 zBwZZ4VeAu83S%{aMcEkLy!gk|oWYKP{F6!J{doxfoHuIv3 zcli-$M%LFAZ%ljbIn9Cbfeqzsgh91ghto_N%!POtF=7$K{`S=yv3xfKy`X1!My5a! zL;Aw4gcZ9Lz7rj&bK8fMcl+G)(K)hR3=#f%I-|^-p{=iffhO)s5#dqcw{D?axd_^X zjGM9l5Dv)Q8%?-+c1IwK95xv;VUv=Q;``&M$>>3G(tU2%!XS6!mshD->#{cz=T=ml zja?ENuqFHXybgK8An)o8WbBH7>($CVy6}_}k&BvVDy~NcmX$ydz>Pq{>t$zcWyz#Q zoqj_{ZeNimLRt4yCqUz?s}li&3;FPXKn5Tjv|cOLBd<3jOc#=Ixh128m=kK0A-aS& zs1bZpyK-P3jnQEt+LS00FvA_McD5dT6UVqe>2n%oe{-Fpyp+BnJmbsnaz&-Th(f=!pnw3Bd49moR8 z{+!-yd{|R(w}sg)6clX+^4_hSJ=8Vf0Fl#8OF)Pv=+*^53W|dFSoK=x@eYF;##z*Z zg9IseRyMqLML4+;4zq)CV>4@OI)gxII7+5RqUu6uefe)fFnIVQU&+G`vsJ^|axI2m_P)B}e={yb-@TK1Xyw%V zlhCK;_=2OXUbz@7lA4m?$ar|G%Jnq%*T?p}yVE$c22)U4(kvHOsvp(Qkk>iibUwd8^TPycW5zPlQP;>7ldO&ku{;I|C`=*lwvQF- z2Gn385PwKUl)c|RbV@RmaM^8rqN0k4i9rsM_?dbkh2lXWrV8vz0gV!1w}2(~G?Lek zy40|fm@C2q{GfB!e6cOFw!%2T%LB}aK!-?LRN|0Vc zNIbzD5J6J{ve1=go$egm*6fl=V72BU(14q)UhQU>BH7vC(Tzf^8>E6j1_kKtu{J>aN?;D+;qd7KIV~iX?lIZsLUqGuny>uKbP5OUEav z*Ix$k5^98a_hrjp@qCvKI9sq5M&{x+vbXaMmpTYNV@#K9xo%&1Lh!C$uF6A#NyKKxTkNsG}Ck7+4V3 z2c;AG+RT;=&EqbfNDmVtPjStx79QT!M;O}rLBjdF%lT2a`MxMME_v|a-~fgfYKbZ$ zn~Xg7$Qdwk*BJKx26kK^{_JJu+=D})N`<8q1@J<~ZCf;IM-Q9q323Dc154{7AE`Uv z4eMc-*4v)djzI>^8ekEIqSE)D=G64O+4viGmE}o;ZaPz6IF!@p>h+drr!B{=o+-d! z)c*1Qs_Tn{JZNNP6 zU!Kb~prkvs2jGI#Aj*TI%)Hf9-*uAHmKT&J%Gb+|OXK96g7Na6b zH;hb#63ag(nvfZBAfTMMz}y(|^9IK#34B66T<N$7h^HDfbkV1gcdZwjF zw_e@qNS(5?RIQsj8I*0_)avMC^H~m~VU-aO6o}CcgJk)DgotQ;^exaT+8-d>7exLB zj#of`6crWqDL8ZbNVaSqu*QYcmaOx(rA*xTpP}e4AAsg(&nzDYy-qs=VLgzru*V?d zq3L-#-knBd)>+3!n{xWxam=0H4)vSTyX<`(kKxJvr`7DQ*LuU5-?{g`X$?UO*u>IOZOoceR zgu7-6&Cs@k??Dp>##J9DzsF@5yL@H+an>WY z3`0@+-M@PJ@gFLIDM@Olq^vA|$Lh~eu}I)UHYrl6Tm%e1K%EVoXp75_e-~pH7uA^! zx+1N!n4*sjj>xb|TyOc2bhNVsu_ry`2(f-6f-(tc0{+r~=v9vIhmM|y-z`zbwz&S= z++}y;dQ2jDf0kgfIY|bbuE1sf1)|O1jBEOq&qmZ80#Km>nWQ;6!yHq{ai*O+!o;do zD_O$Xj>H+pRGBSQ^52pfgJ6HCB>z^3xr`*Nkw$S`q%495qO8OoK4E4;7?>j1P>Ggz zd@io7AJ%Eq_-Q;2Ay*>;bXz~9=_Oq|XtH;`IB-iw>qYd29uc(u1? zM-f3KzWvjiNac;y!9b#elEam=33g^19qxF|G>55p7EQq1 z&iIF0K&w4seNvoUv(5qV3T||HVFJcFA|k51>04liiU&lO2t2GsjaSk!5rW4SZf(jL5MW&3E3# zv+fY1|Ds5}t<`st%A(j#sN>=XE)?c>t#$NBy?7AGz)!$Ik??}oED&u96gCHlg=k|B z(L)@-B}0!FG{ODKwqE*B#tCRL&kFgV*hOuGk#>L|Nr!UDvE>u0YQO%P;rNc1L1abO z&9eO}1Vjpe3><)Pf!O!DKmMihUf`kd*?0H zrf+ZF(1s&>Y2jR*f-`D$E=p$xr3L^*^=xwCPD!xACHveXPAFpjFaNkOJneAOvAXuM zx4A!g*p1cD^0KIrv5rTUL~>l|9B@W!= z<3eh>Cil+Vt-3;KXaib^9mG)7h;+(vv2I02t2;u;j5lsV5jZ5+k)xe!UnPy7&5CW! zv$g_U=eOIasn6+o4>Cv_UIE?RV#ms5&42XQlse4kHWSF3?7XQ9 zz()3liCV`A(tVHQRDhpboHStnZJdW4-e;slEgR4~T> z!-_S&4<;uok{E3r-W-DS4MB^IqN!&)JTGm8;QKi*#$>^?S$SEXi(xBO&Y(OM`#%LutsmjO~ zdslmvj2@l@tC}of{=+!wZ@yLo9^{Th$15RIx(C0( z9#4FRCn6d|5Ti&L7B0kQ*)KgSp8^qiEtodDF9jv6Sm9X53GniWNK9zn0Zg)VC7(;a zTqWuuxT>^>HF|DgTy>hsF8TL7q8`wW{=LB;PbTU%Wa-{rN%oCTD8A*Le)JL|>D^M4 zmq*;(+*E%2nf*IZ`@(HrqpDQ?C~}xI)Ll0_JNxA9tf-+Od3SeL@?K}=At3xw*KwsT z2uM~*1JQWRC)00ASp1^i-~B;3Ejl_nxIna&wD1eM#~&}t*cN~5B(y*_>NUZ@dF_rL zW4F1qJ1N~~OVxu8tr0a73B-c<(P;8tzoNhov=6DmlN|$pt)v7xWqb&rq6=Mh?Y3Vx zyIr^b$+wqpIwpqPiX74^1Bb%08Ooc;#7sYed!c4bBTmrIj^%mCx(VwdUPMgI-};0_ z<)ySXho^*u>5Ee2^)f#GDO1|$+;Nm)F&Vt!7VFpe_I;m7J!%SXTNA;xDCw^%9WE%< z11Tz{0XdHbh;maH6?>TdI`eb2N{PdKLXMo=uR=G?<_3C*HSLF4vU#dSMMZ3D9VN12U?Z7U z>R@D^B~N9-Z`N}18P90&;6J&ylX7wdtyiUO*(*IT7Dk$_E5qsN=vFQsB9T3Rr5)fo z$zuPjA>*5_ed_m?2xs$>yD7uCR6Se6ic>t>>0 zo-8Tra&WKVSfXXBw$PLdF}*Cf`75y!DKxwxx9Sf2Fk)a_yct>U(QMjcU3N9&B$Co^Wug}A^nDG@L@ zs{j;mXaR)S?~+zgS|~EbapRQyNMkah&@}OgJdL{Kor(kdIp-(bURyxA&2~tz!}njN z0k(8>ZGsk(6et<+TzY4gd-pE`j$Diy&a|O$t6vTd4_$uq9!WI8a)0{8MI~8HSOH1r zt=h1rtD>TE+~{m%6!`Mu&Cbrg(&crVJkd);LUQ!}dSwaR@B();;u|iLOf=PTii|RF zeYlwAv~5oE85I@uC?rCJHw>lEaD7%1tzniK-rWXkYiqa!1Vul8Mh*-NB8oXG1yR3rxi5D5p#F_qH!c4m0 zy+c;53YLpQ_Xb70E3Zelfw~Z_yen6ha5BZKNLoMG*CcIuoqSce?bWFL)KE|y6$V2q z@&eBbOeedusP+RNwS)`x$*H8B9=PA^x*Xnt=l4sJutTo~8Z!bVBBY~(ugic1@XZR+ zmGWB3$-+M_tHP=*9CO7CW7zD`(bDGE)v*F_ktxQlFcG-^2onD>;Km!DnAih#!V1A7yxa)wf%{9GF<&joJqL08e!Lw}AK_vZ!AQz#1K? z9A~REIf-%1c|4XVYyBMo{JS?J-#E5VA&?HYxt_YWaki8CvnhkIg)6})pCoo*N8eue!OR{fD7}K! zg?YiElBhM^pr}S4#VUY!nW)pi+L7$6iYE9f#!V|g6YuC&)-u`Cs%h)Xyr)T zs(y6*`kgdbUf_zQSurG&P9mFp?GqyCVbB+F?wH1FHnf}UHkD3AMSIM+*H2{_!v6MZ zHsDfA^0(8xf#ZY{2GR^0F4_i5!%^jDH>t=exwUpyCC~ZQRh}}Tw*WElm{_x?>{jZ? z$OXsbiE5YS#l;~3$M?d*|Iqc;VO?%r*C--L3P`sgDGkyR0)ikd9n#VbA`OxfN-5pa zptOKANQ;1gNOwzjfAjY|-+P_+T-P~&?Y)5w_wQb7%{k^6W6m|Au<;V7MnyGF$kWqP zlB9e%tX(@|M=BJ@Y5Mh87Bl8f!vPMo(?_izr~acyF(OQi7)%<#>$YWkIU?Z6`{Hq) zr;?p(%AXhXzkQ@3r||Oe&F{HuH+u3xEhIiaODl&@9Dy3q!6PRdl9K!(D(Vpzm;dnk zi0CbA)P-bZf=w;i;fXfwNR?>Hf3%e!o(tOI%V5!8FnHQARq4@s-a*A*6r_A>YF-A#% zd&6qm(tBu%Kio-(zWKN>LK*QnEue)=fb4nKaNDay#7V)x>Cue#L3p?~j>FRl=SA1-%8#g{T}&N9y68iBGL zPcjl#j9a9pD<-!ek;s!A1@iqSepDo`fBJi0@@b7wno@E~3KXOZAla+bWv*D zvvF!EKcJ-u@QL^?tCCtgXB|)GGGI%S7XO9DyVX}5 zgDd}u=G&ixs#tbCb8(laxkfAY$ya}QzPcIam@X&^aiAztcW+?L2zb}ry`H;jOYl7eRHG>WhmDoo}{e8X6kR;XDsn{yk0jlt4}$6$)vh zM3c3;x|)!X@aLvS6-oJg!9CB30han{>Tr8CwHJB#p-dG|C#R-<-W8aFhBG{`f8_mB27#?yt35MknKtN*B5 zO)vI(O^KCP{Gs65SSdg5piv3uBq^rBm(o<`ju$p|Fz%B?>C2a@4=RmRO;^|<13MkL%^re; zLlfwtjmIR_0=lBaO8II64AKr(9xP4;%$Nh>HzJ5TmTnIGu9e|}iJ_+yPf`)7!u&gC zruF*o-@hfDoq6CeXT;^o^P*;cZy?yZ74_?v(!}JX1?BF$XI0n60wir^RODWnh0n6H zjfdpM5@RqdNFj4ky3dtJAIiw zM~9tOpWaA(zNcx1Y5SVzm(E7Pr3Pl8UF%i9PVO9OEU9e*8RiobuGlZpLY7D#ZZU7o z^Pm3l4;tdUUy2zX*61UPJWhxd7s-afvVUSXjFI`WE5ZJ+iH>t%mcBVtwzuj);ZYR9gsckwe8uA$s2n?&G=m+?H-P)1AH( z+Ad*jy%Fhae?Y5=K|RFCMD?pM#HA<@)6!BQyKt-}6iu;;vfM~6guZX-*1IP(c>ANb zqdzcskeapAD~m>r1&%FtnDJ0mz8>Njma2Hx=J&L1`@?ol<}_oBRsq)tr&=PbiZFxi z%gT$iJbDRFV(f3dOC{>Nk@DF0W60Fz3Xl>|d>^*Vv7q*F9k`H-Mhh{=EhW1fwsNNs zlbJ~&N&8F0M+e7z5(=RBidpvk0ZEP5a1g`w?mSIn(K}(GrjN>=n#G^sv?2W|H7sSN zUTIgD@tp8&?_k6U#ydPb92^;WYG6RUn(Cr7rQD)KlC^qr!IbkE&d$N{q{Hy&Xz;W> zlc;DKqK~Ixw+*YrpfMTYbJKSnlIH#!f#Ug}l+YKd;QmQz;P%7>Rq<`==$g`w!MSvepzo{XyrYM31r@j#&!-Fd|`E#t12>6Q5wvd|Z6R z7ubrI8h&&kwZJvbrkW!gNgex8J8N)~;_B*Ak4kJ27k}t;(+M8b`A;gB0VR^HvI&uT zE%#76KOF76&u2MIK-=vfM5Vc)^^s*972CU_yQhF1bC*UAGaL_HuFW=9J03IRLfwDB zUiE_pXOzHnsMna)d*4w%jb^zoL#63oR7BpzoGl!jSz1|T<>xakiIu7Rp{rmUV8*Hb zROF5y%8?KO?~)P+4LTiIJUl#9uY9oH4q4LE(FI0EV*e6c#HN=s0S+HT)R+j)rbNIj zka;C*fVK%jiln3Gp6m`ssbs`MpF6p*cs&bL|HkH~(AD8^K6q+55vdkJ;?l~mc9CsT z&$veG-(izmIA`lt4ikyr9Q!_wjHpjDJjfrGgQB{sKLka)~+!;Mv?{GcCiX>2L9#cIR)68OF(K(8QCQ0Xco+of7<o_S|kRl=)*;sA;U#4vF% z=2vnVmY7gG^dT zjEi^P?WecLL>A`VJGTi}m{HDOb%rvEF$yUf@nBF&si}w2cmGt|tuyvkVDze!RBL`( z^9+-rzU~E)OgQrCdt`_3Cb>rsEPtj7(a^H)(+}let9~>sU9F;T6|!Myy3`47L$z)F zokFSQKpkcAqVu(TyARustjVpstgKtbqt9QzJ_<0md6r@sE|t(H{qiLh0Cy&DZt5t+ zgoiB$zZCJydE9@0%L7?U8+f+g7Z6Y~A*xVY6bS9khij_za>;+-1pb?C&Zwy1!NkH^ zJrrzQ<3DoCF4(h$!lS84tVGkMTbEHJOiE8wf8$Esr+s%<{!v1D(44ln-k?dRiQMli zm=;ymOK)tGblzTDrkz#)43msKR11PdvTu8t#7ENwE(Yo7xCf4I-KY75`()SwuYv>S zxiTblh*2)*PUI7RauNDdqk4BSI^LEvu2Px%iK)4H$@o^eOYG;rfS4k}dw~l;&L2ZM34;&c_dpkwKTRuwY_mX9s|wqN7u{cHp(z z=G|co2@J-YR(<>SLR(u_Lt`|+ESi&068(5a2CG(x--m3bXgnBdKV#{ctN-;N$;Mhzz8)t5^IWbkNmx<7)Tp(p{wW^xMaegpq1b1I_5AItwE09M7A3o+xv9 zjTvV=D6$JT{^*|_o&4_sAI-dItc>4ZuJDrcd`ol``aAUu`+bBV!e)f`Ybj}ho?7#x zL=J8QJALpt!f<3@#U^=(qR?uiP8%lY<9Fx28BU*>&?{t_XxATEcmZaC2^*iJ^!JDe zidh!~(&~tVFcW&&_!4EW;Ia43jkut9dE*x`RctaOs&TwbwuH^Ye2- z2umIth53KWjN2olINV-;{2woX*YYQ=(+#cW-h`$@2B4LW22V}`?qeUV?AN`$MT}=eVZH22U)x_twtwxEXE}TKTf$L}@=Fb-f&(+>XMTPD| zrKIiNp>pNPeRR*)I;{x_8-;L+THyH;+J_I<6X!<#K4fF>3iQ?Hb?1#{!?l)yFBN{< zv@G%B$b~G|AReXHJ&cjB#w&F5*D@r4GOSR5x@Q{jGR~_5S_);cvpP zU0of=UcT8YRQ7p(vYMdpjKSG-YO;32Bv0`I0%DS*Jjp1%hEJgKr;>XZ=0!->!5I@6 zcwbPW2*`E^eFsC7f30gsS22m%9mZ(VbedY87ZL@OxRy->-r#CVH@Y_(D3}vGeB}P? zQ~ukd-G3LyNHTXA;!9Q5Hq*`>Vm{J$4cZrwJv_ zNjQyC%$u}ytdGnhRl@CfL}R6pC3KGsXWCu>SM(5XK=gzX;c=gZz9G|!oU7BtC>N+y zo%;F4kaqu74(xYQH~>)LbOa>4DF&%__o&n_4LlM4KpCSIj|#7g$;rvN*niT+L|~Vs z_qh>&w1s{<6eKx&eWseuQ{2^V^7c^27s)FMx18P@Ez08OeTmN9lPx8R(YYb@ho;>l zpY?L}u`_d%B2jaQ)#Y|YS9ndwNrw=*u)R8$#r+J zbmRTg!FESs)Oo=7%!rGHIsAveNv1MC&C@|HU0Q#3!#RCMb#~H{=g(>IPT8aX#dp6! zGPr$9(KFQW)@Kwx2Fz6l%63~pxD&hJ>sK9T#ghYLmN>&LFEc>BU%zs6c6D*5^w;Wh z%=q2-uo3rt6bs#RyG4XWC_TS6^cfK@_4R*lj22ro)ZQ(n3=yBT?Jz#&4*RqE2i~K= zH~Z=EAxpEH>#LHN_52IXru?u|#Lm~~fI3MZJTcQ*$&TPKfp4pcZNo*%x6aPaybn@l zm(t$M3^fkuIW$W9`ij9l0Q!!ts0gAj&Nd_d0o1mrQ}zP){Vw-PfGpT~Hc>`)h&l(g zZHH7qm+SU%QSf_BN*o*5?3W4wQF*xd46b&67`S|#hu(5uws^X

+ +
Fig.1 The Arcitecture of deepspeech2 online model +

+ +### Data Preparation +#### Vocabulary +For English data, the vocabulary dictionary is composed of 26 English characters with " ' ", space, \ and \. The \ represents the blank label in CTC, the \ represents the unknown character and the \ represents the start and the end characters. For mandarin, the vocabulary dictionary is composed of chinese characters statisticed from the training set and three additional characters are added. The added characters are \, \ and \. For both English and mandarin data, we set the default indexs that \=0, \=1 and \= last index. +``` + # The code to build vocabulary + cd examples/aishell/s0 + python3 ../../../utils/build_vocab.py \ + --unit_type="char" \ + --count_threshold=0 \ + --vocab_path="data/vocab.txt" \ + --manifest_paths "data/manifest.train.raw" "data/manifest.dev.raw" + +# vocabulary for aishell dataset (Mandarin) +vi examples/aishell/s0/data/vocab.txt + +# vocabulary for librispeech dataset (English) +vi examples/librispeech/s0/data/vocab.txt +``` + +#### CMVN +For CMVN, a subset or the full of traininig set is chosed and be used to compute the feature mean and std. +``` + # The code to compute the feature mean and std +cd examples/aishell/s0 +python3 ../../../utils/compute_mean_std.py \ + --manifest_path="data/manifest.train.raw" \ + --spectrum_type="linear" \ + --delta_delta=false \ + --stride_ms=10.0 \ + --window_ms=20.0 \ + --sample_rate=16000 \ + --use_dB_normalization=True \ + --num_samples=2000 \ + --num_workers=10 \ + --output_path="data/mean_std.json" + +``` + +#### Feature Extraction + For feature extraction, three methods are implemented, which are linear (FFT without using filter bank), fbank and mfcc. + Currently, the released deepspeech2 online model use the linear feature extraction method. + ``` + The code for feature extraction + vi deepspeech/frontend/featurizer/audio_featurizer.py + ``` + +### Encoder +The encoder is composed of two 2D convolution subsampling layers and a number of stacked single direction rnn layers. The 2D convolution subsampling layers extract feature representation from the raw audio feature and reduce the length of audio feature at the same time. After passing through the convolution subsampling layers, then the feature representation are input into the stacked rnn layers. For the stacked rnn layers, LSTM cell and GRU cell are provided to use. Adding one fully connected (fc) layer after the stacked rnn layers is optional. If the number of stacked rnn layers is less than 5, adding one fc layer after stacked rnn layers is recommand. + +The code of Encoder is in: +``` +vi deepspeech/models/ds2_online/deepspeech2.py +``` + +### Decoder +To got the character possibilities of each frame, the feature representation of each frame output from the encoder are input into a projection layer which is implemented as a dense layer to do feature projection. The output dim of the projection layer is same with the vocabulary size. After projection layer, the softmax function is used to transform the frame-level feature representation be the possibilities of characters. While making model inference, the character possibilities of each frame are input into the CTC decoder to get the final speech recognition results. + +The code of the decoder is in: +``` +# The code of constructing the decoder in model +vi deepspeech/models/ds2_online/deepspeech2.py +# The code of CTC Decoder +vi deepspeech/modules/ctc.py +``` + +## Training Process +Using the command below, you can train the deepspeech2 online model. +``` + cd examples/aishell/s0 + bash run.sh --stage 0 --stop_stage 2 --model_type online --conf_path conf/deepspeech2_online.yaml +``` +The detail commands are: +``` +# The code for training in run.sh +set -e +source path.sh + +gpus=2,3,5,7 +stage=0 +stop_stage=5 +conf_path=conf/deepspeech2_online.yaml # conf/deepspeech2.yaml | conf/deepspeech2_online.yaml +avg_num=1 +model_type=online # online | offline + +source ${MAIN_ROOT}/utils/parse_options.sh || exit 1; + +avg_ckpt=avg_${avg_num} +ckpt=$(basename ${conf_path} | awk -F'.' '{print $1}') +echo "checkpoint name ${ckpt}" + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # prepare data + bash ./local/data.sh || exit -1 +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # train model, all `ckpt` under `exp` dir + CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${ckpt} ${model_type} +fi + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # avg n best model + avg.sh exp/${ckpt}/checkpoints ${avg_num} +fi +``` + +By using the command above, the training process can be started. There are 5 stages in "run.sh", and the first 3 stages are used for training process. The stage 0 is used for data preparation, in which the dataset will be downloaded, and the manifest files of the datasets, vocabulary dictionary and CMVN file will be generated in "./data/". The stage 1 is used for training the model, the log files and model checkpoint is saved in "exp/deepspeech2_online/". The stage 2 is used to generated final model for predicting by averaging the top-k model parameters based on validation loss. + +## Testing Process +Using the command below, you can test the deepspeech2 online model. + ``` + bash run.sh --stage 3 --stop_stage 5 --model_type online --conf_path conf/deepspeech2_online.yaml +``` +The detail commands are: +``` +conf_path=conf/deepspeech2_online.yaml +avg_num=1 +model_type=online +avg_ckpt=avg_${avg_num} + + if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + # test ckpt avg_n + CUDA_VISIBLE_DEVICES=2 ./local/test.sh ${conf_path} exp/${ckpt}/checkpoints/${avg_ckpt} ${model_type}|| exit -1 +fi + +if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then + # export ckpt avg_n + CUDA_VISIBLE_DEVICES=5 ./local/export.sh ${conf_path} exp/${ckpt}/checkpoints/${avg_ckpt} exp/${ckpt}/checkpoints/${avg_ckpt}.jit ${model_type} +fi + +if [ ${stage} -le 5 ] && [ ${stop_stage} -ge 5 ]; then + # test export ckpt avg_n + CUDA_VISIBLE_DEVICES=0 ./local/test_export.sh ${conf_path} exp/${ckpt}/checkpoints/${avg_ckpt}.jit ${model_type}|| exit -1 +fi + ``` +After the training process, we use stage 3,4,5 for testing process. The stage 3 is for testing the model generated in the stage 2 and provided the CER index of the test set. The stage 4 is for transforming the model from dynamic graph to static graph by using "paddle.jit" library. The stage 5 is for testing the model in static graph. + + +## Non-Streaming +The deepspeech2 offline model is similarity to the deepspeech2 online model. The main difference between them is the offline model use the stacked bi-directional rnn layers while the online model use the single direction rnn layers and the fc layer is not used. For the stacked bi-directional rnn layers in the offline model, the rnn cell and gru cell are provided to use. + +The arcitecture of the model is shown in Fig.2. +

+ +
Fig.2 The Arcitecture of deepspeech2 offline model +

+ + + +For data preparation and decoder, the deepspeech2 offline model is same with the deepspeech2 online model. + +The code of encoder and decoder for deepspeech2 offline model is in: +``` +vi deepspeech/models/ds2/deepspeech2.py +``` + +The training process and testing process of deepspeech2 offline model is very similary to deepspeech2 online model. +Only some changes should be noticed. + +For training and testing, the "model_type" and the "conf_path" must be set. + ``` +# Training offline +cd examples/aishell/s0 +bash run.sh --stage 0 --stop_stage 2 --model_type offline --conf_path conf/deepspeech2.yaml +``` +``` +# Testing offline +cd examples/aishell/s0 +bash run.sh --stage 3 --stop_stage 5 --model_type offline --conf_path conf/deepspeech2.yaml +``` diff --git a/doc/src/feature_list.md b/docs/src/feature_list.md similarity index 78% rename from doc/src/feature_list.md rename to docs/src/feature_list.md index 573669fa2..4639ddd6f 100644 --- a/doc/src/feature_list.md +++ b/docs/src/feature_list.md @@ -1,13 +1,20 @@ -# Featrues +# Features + +### Dataset +* Aishell +* Librispeech +* THCHS30 +* TIMIT ### Speech Recognition -* Offline +* Non-Streaming * [Baidu's DeepSpeech2](http://proceedings.mlr.press/v48/amodei16.pdf) * [Transformer](https://arxiv.org/abs/1706.03762) * [Conformer](https://arxiv.org/abs/2005.08100) -* Online +* Streaming + * [Baidu's DeepSpeech2](http://proceedings.mlr.press/v48/amodei16.pdf) * [U2](https://arxiv.org/pdf/2012.05481.pdf) ### Language Model @@ -22,6 +29,15 @@ * beam search * attention rescore +### Deployment + +* Paddle Inference + +### Aligment + +* MFA +* CTC Aligment + ### Speech Frontend * Audio diff --git a/doc/src/getting_started.md b/docs/src/getting_started.md similarity index 100% rename from doc/src/getting_started.md rename to docs/src/getting_started.md diff --git a/doc/src/install.md b/docs/src/install.md similarity index 95% rename from doc/src/install.md rename to docs/src/install.md index 01049a2fc..8cecba125 100644 --- a/doc/src/install.md +++ b/docs/src/install.md @@ -4,15 +4,16 @@ To avoid the trouble of environment setup, [running in Docker container](#runnin ## Prerequisites - Python >= 3.7 -- PaddlePaddle 2.0.0 or later (please refer to the [Installation Guide](https://www.paddlepaddle.org.cn/documentation/docs/en/beginners_guide/index_en.html)) +- PaddlePaddle latest version (please refer to the [Installation Guide](https://www.paddlepaddle.org.cn/documentation/docs/en/beginners_guide/index_en.html)) -## Setup +## Setup (Important) - Make sure these libraries or tools installed: `pkg-config`, `flac`, `ogg`, `vorbis`, `boost`, `sox, and `swig`, e.g. installing them via `apt-get`: ```bash sudo apt-get install -y sox pkg-config libflac-dev libogg-dev libvorbis-dev libboost-dev swig python3-dev ``` +The version of `swig` should >= 3.0 or, installing them via `yum`: diff --git a/doc/src/ngram_lm.md b/docs/src/ngram_lm.md similarity index 64% rename from doc/src/ngram_lm.md rename to docs/src/ngram_lm.md index 119a3b21c..7872df22d 100644 --- a/doc/src/ngram_lm.md +++ b/docs/src/ngram_lm.md @@ -35,52 +35,3 @@ Different from the English language model, Mandarin language model is character- * A whitespace character between two tokens is inserted. Please notice that the released language models only contain Chinese simplified characters. After preprocessing done we can begin to train the language model. The key training arguments for small LM is '-o 5 --prune 0 1 2 4 4' and '-o 5' for large LM. Please refer above section for the meaning of each argument. We also convert the arpa file to binary file using default settings. - - - -## [KenLM](http://kheafield.com/code/kenlm/) - -统计语言模型工具有比较多的选择,目前使用比较好的有srilm及kenlm,其中kenlm比srilm晚出来,训练速度也更快,而且支持单机大数据的训练。现在介绍一下kenlm的使用方法。 - -1. 工具包的下载地址:http://kheafield.com/code/kenlm.tar.gz - -2. 使用。该工具在linux环境下使用方便。 先确保linux环境已经按照1.36.0的Boost和zlib - - ``` - boost: - yum install boost - yum install boost-devel - - zlib: - yum install zlib - yum install zlib-devel - ``` - - 然后gcc版本需要是4.8.2及以上。 - - ``` - wget -O - https://kheafield.com/code/kenlm.tar.gz |tar xz - mkdir kenlm/build - cd kenlm/build - cmake .. - make -j2 - ``` - -3. 训练。使用如下命令进行训练: - - ``` - build/bin/lmplz -o 3 --verbose_header --text people2014corpus_words.txt --arpa result/people2014corpus_words.arps - ``` - - 其中, - 1)people2014corpus_words.txt文件必须是分词以后的文件。 - - 训练语料<人民日报2014版熟语料>,包括: 1)标准人工切词及词性数据people2014.tar.gz, 2)未切词文本数据people2014_words.txt, 3)kenlm训练字粒度语言模型文件及其二进制文件people2014corpus_chars.arps/klm, 4)kenlm词粒度语言模型文件及其二进制文件people2014corpus_words.arps/klm。 - - 2)-o后面的5表示的是5-gram,一般取到3即可,但可以结合自己实际情况判断。 - -4. 压缩。压缩模型为二进制,方便模型快速加载: - - ``` - build/bin/build_binary ./result/people2014corpus_words.arps ./result/people2014corpus_words.klm - ``` diff --git a/docs/src/reference.md b/docs/src/reference.md new file mode 100644 index 000000000..d3676fff2 --- /dev/null +++ b/docs/src/reference.md @@ -0,0 +1,8 @@ +# Reference + +We refer these repos to build `model` and `engine`: + +* [delta](https://github.com/Delta-ML/delta.git) +* [espnet](https://github.com/espnet/espnet.git) +* [kaldi](https://github.com/kaldi-asr/kaldi.git) +* [wenet](https://github.com/mobvoi/wenet) diff --git a/docs/src/released_model.md b/docs/src/released_model.md new file mode 100644 index 000000000..50670aafa --- /dev/null +++ b/docs/src/released_model.md @@ -0,0 +1,28 @@ +# Released Models + +## Acoustic Model Released in paddle 2.X +Acoustic Model | Training Data | Token-based | Size | Descriptions | CER | WER | Hours of speech +:-------------:| :------------:| :-----: | -----: | :----------------- |:--------- | :---------- | :--------- +[Ds2 Online Aishell Model](https://deepspeech.bj.bcebos.com/release2.1/aishell/s0/aishell.s0.ds_online.5rnn.debug.tar.gz) | Aishell Dataset | Char-based | 345 MB | 2 Conv + 5 LSTM layers with only forward direction | 0.0824 |-| 151 h +[Ds2 Offline Aishell Model](https://deepspeech.bj.bcebos.com/release2.1/aishell/s0/aishell.s0.ds2.offline.cer6p65.release.tar.gz)| Aishell Dataset | Char-based | 306 MB | 2 Conv + 3 bidirectional GRU layers| 0.065 |-| 151 h +[Conformer Online Aishell Model](https://deepspeech.bj.bcebos.com/release2.1/aishell/s1/aishell.chunk.release.tar.gz) | Aishell Dataset | Char-based | 283 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention + CTC | 0.0594 |-| 151 h +[Conformer Offline Aishell Model](https://deepspeech.bj.bcebos.com/release2.1/aishell/s1/aishell.release.tar.gz) | Aishell Dataset | Char-based | 284 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention | 0.0547 |-| 151 h +[Conformer Librispeech Model](https://deepspeech.bj.bcebos.com/release2.1/librispeech/s1/conformer.release.tar.gz) | Librispeech Dataset | Word-based | 287 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention |-| 0.0325 | 960 h +[Transformer Librispeech Model](https://deepspeech.bj.bcebos.com/release2.1/librispeech/s1/transformer.release.tar.gz) | Librispeech Dataset | Word-based | 195 MB | Encoder:Conformer, Decoder:Transformer, Decoding method: Attention |-| 0.0544 | 960 h + +## Acoustic Model Transformed from paddle 1.8 +Acoustic Model | Training Data | Token-based | Size | Descriptions | CER | WER | Hours of speech +:-------------:| :------------:| :-----: | -----: | :----------------- | :---------- | :---------- | :--------- +[Ds2 Offline Aishell model](https://deepspeech.bj.bcebos.com/mandarin_models/aishell_model_v1.8_to_v2.x.tar.gz)|Aishell Dataset| Char-based| 234 MB| 2 Conv + 3 bidirectional GRU layers| 0.0804 |-| 151 h| +[Ds2 Offline Librispeech model](https://deepspeech.bj.bcebos.com/eng_models/librispeech_v1.8_to_v2.x.tar.gz)|Librispeech Dataset| Word-based| 307 MB| 2 Conv + 3 bidirectional sharing weight RNN layers |-| 0.0685| 960 h| +[Ds2 Offline Baidu en8k model](https://deepspeech.bj.bcebos.com/eng_models/baidu_en8k_v1.8_to_v2.x.tar.gz)|Baidu Internal English Dataset| Word-based| 273 MB| 2 Conv + 3 bidirectional GRU layers |-| 0.0541 | 8628 h| + + + +## Language Model Released + +Language Model | Training Data | Token-based | Size | Descriptions +:-------------:| :------------:| :-----: | -----: | :----------------- +[English LM](https://deepspeech.bj.bcebos.com/en_lm/common_crawl_00.prune01111.trie.klm) | [CommonCrawl(en.00)](http://web-language-models.s3-website-us-east-1.amazonaws.com/ngrams/en/deduped/en.00.deduped.xz) | Word-based | 8.3 GB | Pruned with 0 1 1 1 1;
About 1.85 billion n-grams;
'trie' binary with '-a 22 -q 8 -b 8' +[Mandarin LM Small](https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm) | Baidu Internal Corpus | Char-based | 2.8 GB | Pruned with 0 1 2 4 4;
About 0.13 billion n-grams;
'probing' binary with default settings +[Mandarin LM Large](https://deepspeech.bj.bcebos.com/zh_lm/zhidao_giga.klm) | Baidu Internal Corpus | Char-based | 70.4 GB | No Pruning;
About 3.7 billion n-grams;
'probing' binary with default settings diff --git a/env.sh b/env.sh index c5acd0112..461586e7d 100644 --- a/env.sh +++ b/env.sh @@ -1,10 +1,10 @@ export MAIN_ROOT=${PWD} -export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:/usr/local/bin:${PATH} export LC_ALL=C # Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C -export PYTHONIOENCODING=UTF-8 +export PYTHONIOENCODING=UTF-8 export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/ diff --git a/examples/1xt2x/.gitignore b/examples/1xt2x/.gitignore new file mode 100644 index 000000000..a9a5aecf4 --- /dev/null +++ b/examples/1xt2x/.gitignore @@ -0,0 +1 @@ +tmp diff --git a/examples/1xt2x/README.md b/examples/1xt2x/README.md new file mode 100644 index 000000000..1f5fe8e3b --- /dev/null +++ b/examples/1xt2x/README.md @@ -0,0 +1,11 @@ +# 1xt2x + +Convert Deepspeech 1.8 released model to 2.x. + +## Model +* Deepspeech2x + +## Exp +* baidu_en8k +* aishell +* librispeech diff --git a/examples/1xt2x/aishell/.gitignore b/examples/1xt2x/aishell/.gitignore new file mode 100644 index 000000000..3631e544a --- /dev/null +++ b/examples/1xt2x/aishell/.gitignore @@ -0,0 +1,5 @@ +exp +data +*log +tmp +nohup* diff --git a/examples/1xt2x/aishell/conf/augmentation.json b/examples/1xt2x/aishell/conf/augmentation.json new file mode 100644 index 000000000..fe51488c7 --- /dev/null +++ b/examples/1xt2x/aishell/conf/augmentation.json @@ -0,0 +1 @@ +[] diff --git a/examples/1xt2x/aishell/conf/deepspeech2.yaml b/examples/1xt2x/aishell/conf/deepspeech2.yaml new file mode 100644 index 000000000..c2d692263 --- /dev/null +++ b/examples/1xt2x/aishell/conf/deepspeech2.yaml @@ -0,0 +1,67 @@ +# https://yaml.org/type/float.html +data: + train_manifest: data/manifest.train + dev_manifest: data/manifest.dev + test_manifest: data/manifest.test + min_input_len: 0.0 + max_input_len: 27.0 # second + min_output_len: 0.0 + max_output_len: .inf + min_output_input_ratio: 0.00 + max_output_input_ratio: .inf + +collator: + batch_size: 64 # one gpu + mean_std_filepath: data/mean_std.npz + unit_type: char + vocab_filepath: data/vocab.txt + augmentation_config: conf/augmentation.json + random_seed: 0 + spm_model_prefix: + spectrum_type: linear + feat_dim: + delta_delta: False + stride_ms: 10.0 + window_ms: 20.0 + n_fft: None + max_freq: None + target_sample_rate: 16000 + use_dB_normalization: True + target_dB: -20 + dither: 1.0 + keep_transcription_text: False + sortagrad: True + shuffle_method: batch_shuffle + num_workers: 2 + +model: + num_conv_layers: 2 + num_rnn_layers: 3 + rnn_layer_size: 1024 + use_gru: True + share_rnn_weights: False + blank_id: 4333 + +training: + n_epoch: 80 + accum_grad: 1 + lr: 2e-3 + lr_decay: 0.83 + weight_decay: 1e-06 + global_grad_clip: 3.0 + log_interval: 100 + checkpoint: + kbest_n: 50 + latest_n: 5 + +decoding: + batch_size: 32 + error_rate_type: cer + decoding_method: ctc_beam_search + lang_model_path: data/lm/zh_giga.no_cna_cmn.prune01244.klm + alpha: 2.6 + beta: 5.0 + beam_size: 300 + cutoff_prob: 0.99 + cutoff_top_n: 40 + num_proc_bsearch: 8 diff --git a/examples/1xt2x/aishell/local/data.sh b/examples/1xt2x/aishell/local/data.sh new file mode 100755 index 000000000..0bf35e1f5 --- /dev/null +++ b/examples/1xt2x/aishell/local/data.sh @@ -0,0 +1,70 @@ +#!/bin/bash +if [ $# != 1 ];then + echo "usage: ${0} ckpt_dir" + exit -1 +fi + +ckpt_dir=$1 + +stage=-1 +stop_stage=100 + +source ${MAIN_ROOT}/utils/parse_options.sh + +mkdir -p data +TARGET_DIR=${MAIN_ROOT}/examples/dataset +mkdir -p ${TARGET_DIR} + +bash local/download_model.sh ${ckpt_dir} +if [ $? -ne 0 ]; then + exit 1 +fi + +cd ${ckpt_dir} +tar xzvf aishell_model_v1.8_to_v2.x.tar.gz +cd - +mv ${ckpt_dir}/mean_std.npz data/ +mv ${ckpt_dir}/vocab.txt data/ + + +if [ ${stage} -le -1 ] && [ ${stop_stage} -ge -1 ]; then + # download data, generate manifests + python3 ${TARGET_DIR}/aishell/aishell.py \ + --manifest_prefix="data/manifest" \ + --target_dir="${TARGET_DIR}/aishell" + + if [ $? -ne 0 ]; then + echo "Prepare Aishell failed. Terminated." + exit 1 + fi + + for dataset in train dev test; do + mv data/manifest.${dataset} data/manifest.${dataset}.raw + done +fi + + + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # format manifest with tokenids, vocab size + for dataset in train dev test; do + { + python3 ${MAIN_ROOT}/utils/format_data.py \ + --feat_type "raw" \ + --cmvn_path "data/mean_std.npz" \ + --unit_type "char" \ + --vocab_path="data/vocab.txt" \ + --manifest_path="data/manifest.${dataset}.raw" \ + --output_path="data/manifest.${dataset}" + + if [ $? -ne 0 ]; then + echo "Formt mnaifest failed. Terminated." + exit 1 + fi + } & + done + wait +fi + +echo "Aishell data preparation done." +exit 0 diff --git a/examples/1xt2x/aishell/local/download_lm_ch.sh b/examples/1xt2x/aishell/local/download_lm_ch.sh new file mode 100755 index 000000000..ac27a9076 --- /dev/null +++ b/examples/1xt2x/aishell/local/download_lm_ch.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +. ${MAIN_ROOT}/utils/utility.sh + +DIR=data/lm +mkdir -p ${DIR} + +URL='https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm' +MD5="29e02312deb2e59b3c8686c7966d4fe3" +TARGET=${DIR}/zh_giga.no_cna_cmn.prune01244.klm + + +echo "Download language model ..." +download $URL $MD5 $TARGET +if [ $? -ne 0 ]; then + echo "Fail to download the language model!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/aishell/local/download_model.sh b/examples/1xt2x/aishell/local/download_model.sh new file mode 100644 index 000000000..ffa2f8101 --- /dev/null +++ b/examples/1xt2x/aishell/local/download_model.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +if [ $# != 1 ];then + echo "usage: ${0} ckpt_dir" + exit -1 +fi + +ckpt_dir=$1 + +. ${MAIN_ROOT}/utils/utility.sh + +URL='https://deepspeech.bj.bcebos.com/mandarin_models/aishell_model_v1.8_to_v2.x.tar.gz' +MD5=87e7577d4bea737dbf3e8daab37aa808 +TARGET=${ckpt_dir}/aishell_model_v1.8_to_v2.x.tar.gz + + +echo "Download Aishell model ..." +download $URL $MD5 $TARGET +if [ $? -ne 0 ]; then + echo "Fail to download Aishell model!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/aishell/local/test.sh b/examples/1xt2x/aishell/local/test.sh new file mode 100755 index 000000000..2ae0740b3 --- /dev/null +++ b/examples/1xt2x/aishell/local/test.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +if [ $# != 3 ];then + echo "usage: ${0} config_path ckpt_path_prefix model_type" + exit -1 +fi + +ngpu=$(echo $CUDA_VISIBLE_DEVICES | awk -F "," '{print NF}') +echo "using $ngpu gpus..." + +config_path=$1 +ckpt_prefix=$2 +model_type=$3 + +# download language model +bash local/download_lm_ch.sh +if [ $? -ne 0 ]; then + exit 1 +fi + +python3 -u ${BIN_DIR}/test.py \ +--nproc ${ngpu} \ +--config ${config_path} \ +--result_file ${ckpt_prefix}.rsl \ +--checkpoint_path ${ckpt_prefix} \ +--model_type ${model_type} + +if [ $? -ne 0 ]; then + echo "Failed in evaluation!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/aishell/path.sh b/examples/1xt2x/aishell/path.sh new file mode 100644 index 000000000..16a0ad63e --- /dev/null +++ b/examples/1xt2x/aishell/path.sh @@ -0,0 +1,16 @@ +export MAIN_ROOT=`realpath ${PWD}/../../../` +export LOCAL_DEEPSPEECH2=`realpath ${PWD}/../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} +export PYTHONPATH=${LOCAL_DEEPSPEECH2}:${PYTHONPATH} + +export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/ + +MODEL=deepspeech2 +export BIN_DIR=${LOCAL_DEEPSPEECH2}/src_deepspeech2x/bin +echo "BIN_DIR "${BIN_DIR} diff --git a/examples/1xt2x/aishell/run.sh b/examples/1xt2x/aishell/run.sh new file mode 100755 index 000000000..1ccac1c35 --- /dev/null +++ b/examples/1xt2x/aishell/run.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -e +source path.sh + +stage=0 +stop_stage=100 +conf_path=conf/deepspeech2.yaml +avg_num=1 +model_type=offline +gpus=2 + +source ${MAIN_ROOT}/utils/parse_options.sh || exit 1; + +v18_ckpt=aishell_v1.8 +ckpt=$(basename ${conf_path} | awk -F'.' '{print $1}') +echo "checkpoint name ${ckpt}" + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # prepare data + mkdir -p exp/${ckpt}/checkpoints + bash ./local/data.sh exp/${ckpt}/checkpoints || exit -1 +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # test ckpt avg_n + CUDA_VISIBLE_DEVICES=${gpus} ./local/test.sh ${conf_path} exp/${ckpt}/checkpoints/${v18_ckpt} ${model_type}|| exit -1 +fi + diff --git a/examples/1xt2x/baidu_en8k/.gitignore b/examples/1xt2x/baidu_en8k/.gitignore new file mode 100644 index 000000000..3631e544a --- /dev/null +++ b/examples/1xt2x/baidu_en8k/.gitignore @@ -0,0 +1,5 @@ +exp +data +*log +tmp +nohup* diff --git a/examples/1xt2x/baidu_en8k/conf/augmentation.json b/examples/1xt2x/baidu_en8k/conf/augmentation.json new file mode 100644 index 000000000..fe51488c7 --- /dev/null +++ b/examples/1xt2x/baidu_en8k/conf/augmentation.json @@ -0,0 +1 @@ +[] diff --git a/examples/1xt2x/baidu_en8k/conf/deepspeech2.yaml b/examples/1xt2x/baidu_en8k/conf/deepspeech2.yaml new file mode 100644 index 000000000..be51a9b90 --- /dev/null +++ b/examples/1xt2x/baidu_en8k/conf/deepspeech2.yaml @@ -0,0 +1,67 @@ +# https://yaml.org/type/float.html +data: + train_manifest: data/manifest.train + dev_manifest: data/manifest.dev + test_manifest: data/manifest.test-clean + min_input_len: 0.0 + max_input_len: .inf # second + min_output_len: 0.0 + max_output_len: .inf + min_output_input_ratio: 0.00 + max_output_input_ratio: .inf + +collator: + batch_size: 64 # one gpu + mean_std_filepath: data/mean_std.npz + unit_type: char + vocab_filepath: data/vocab.txt + augmentation_config: conf/augmentation.json + random_seed: 0 + spm_model_prefix: + spectrum_type: linear + feat_dim: + delta_delta: False + stride_ms: 10.0 + window_ms: 20.0 + n_fft: None + max_freq: None + target_sample_rate: 16000 + use_dB_normalization: True + target_dB: -20 + dither: 1.0 + keep_transcription_text: False + sortagrad: True + shuffle_method: batch_shuffle + num_workers: 2 + +model: + num_conv_layers: 2 + num_rnn_layers: 3 + rnn_layer_size: 1024 + use_gru: True + share_rnn_weights: False + blank_id: 28 + +training: + n_epoch: 80 + accum_grad: 1 + lr: 2e-3 + lr_decay: 0.83 + weight_decay: 1e-06 + global_grad_clip: 3.0 + log_interval: 100 + checkpoint: + kbest_n: 50 + latest_n: 5 + +decoding: + batch_size: 32 + error_rate_type: wer + decoding_method: ctc_beam_search + lang_model_path: data/lm/common_crawl_00.prune01111.trie.klm + alpha: 1.4 + beta: 0.35 + beam_size: 500 + cutoff_prob: 1.0 + cutoff_top_n: 40 + num_proc_bsearch: 8 diff --git a/examples/1xt2x/baidu_en8k/local/data.sh b/examples/1xt2x/baidu_en8k/local/data.sh new file mode 100755 index 000000000..f0bde77fe --- /dev/null +++ b/examples/1xt2x/baidu_en8k/local/data.sh @@ -0,0 +1,86 @@ +#!/bin/bash +if [ $# != 1 ];then + echo "usage: ${0} ckpt_dir" + exit -1 +fi + +ckpt_dir=$1 + +stage=-1 +stop_stage=100 +unit_type=char + +source ${MAIN_ROOT}/utils/parse_options.sh + +mkdir -p data +TARGET_DIR=${MAIN_ROOT}/examples/dataset +mkdir -p ${TARGET_DIR} + + +bash local/download_model.sh ${ckpt_dir} +if [ $? -ne 0 ]; then + exit 1 +fi + +cd ${ckpt_dir} +tar xzvf baidu_en8k_v1.8_to_v2.x.tar.gz +cd - +mv ${ckpt_dir}/mean_std.npz data/ +mv ${ckpt_dir}/vocab.txt data/ + + +if [ ${stage} -le -1 ] && [ ${stop_stage} -ge -1 ]; then + # download data, generate manifests + python3 ${TARGET_DIR}/librispeech/librispeech.py \ + --manifest_prefix="data/manifest" \ + --target_dir="${TARGET_DIR}/librispeech" \ + --full_download="True" + + if [ $? -ne 0 ]; then + echo "Prepare LibriSpeech failed. Terminated." + exit 1 + fi + + for set in train-clean-100 train-clean-360 train-other-500 dev-clean dev-other test-clean test-other; do + mv data/manifest.${set} data/manifest.${set}.raw + done + + rm -rf data/manifest.train.raw data/manifest.dev.raw data/manifest.test.raw + for set in train-clean-100 train-clean-360 train-other-500; do + cat data/manifest.${set}.raw >> data/manifest.train.raw + done + + for set in dev-clean dev-other; do + cat data/manifest.${set}.raw >> data/manifest.dev.raw + done + + for set in test-clean test-other; do + cat data/manifest.${set}.raw >> data/manifest.test.raw + done +fi + + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # format manifest with tokenids, vocab size + for set in train dev test dev-clean dev-other test-clean test-other; do + { + python3 ${MAIN_ROOT}/utils/format_data.py \ + --feat_type "raw" \ + --cmvn_path "data/mean_std.npz" \ + --unit_type ${unit_type} \ + --vocab_path="data/vocab.txt" \ + --manifest_path="data/manifest.${set}.raw" \ + --output_path="data/manifest.${set}" + + if [ $? -ne 0 ]; then + echo "Formt mnaifest.${set} failed. Terminated." + exit 1 + fi + }& + done + wait +fi + +echo "LibriSpeech Data preparation done." +exit 0 + diff --git a/examples/1xt2x/baidu_en8k/local/download_lm_en.sh b/examples/1xt2x/baidu_en8k/local/download_lm_en.sh new file mode 100755 index 000000000..dc1bdf665 --- /dev/null +++ b/examples/1xt2x/baidu_en8k/local/download_lm_en.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +. ${MAIN_ROOT}/utils/utility.sh + +DIR=data/lm +mkdir -p ${DIR} + +URL=https://deepspeech.bj.bcebos.com/en_lm/common_crawl_00.prune01111.trie.klm +MD5="099a601759d467cd0a8523ff939819c5" +TARGET=${DIR}/common_crawl_00.prune01111.trie.klm + +echo "Download language model ..." +download $URL $MD5 $TARGET +if [ $? -ne 0 ]; then + echo "Fail to download the language model!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/baidu_en8k/local/download_model.sh b/examples/1xt2x/baidu_en8k/local/download_model.sh new file mode 100644 index 000000000..a8fbc31e8 --- /dev/null +++ b/examples/1xt2x/baidu_en8k/local/download_model.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash +if [ $# != 1 ];then + echo "usage: ${0} ckpt_dir" + exit -1 +fi + +ckpt_dir=$1 + + +. ${MAIN_ROOT}/utils/utility.sh + +URL='https://deepspeech.bj.bcebos.com/eng_models/baidu_en8k_v1.8_to_v2.x.tar.gz' +MD5=c1676be8505cee436e6f312823e9008c +TARGET=${ckpt_dir}/baidu_en8k_v1.8_to_v2.x.tar.gz + + +echo "Download BaiduEn8k model ..." +download $URL $MD5 $TARGET +if [ $? -ne 0 ]; then + echo "Fail to download BaiduEn8k model!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/baidu_en8k/local/test.sh b/examples/1xt2x/baidu_en8k/local/test.sh new file mode 100755 index 000000000..4d00f30b8 --- /dev/null +++ b/examples/1xt2x/baidu_en8k/local/test.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +if [ $# != 3 ];then + echo "usage: ${0} config_path ckpt_path_prefix model_type" + exit -1 +fi + +ngpu=$(echo $CUDA_VISIBLE_DEVICES | awk -F "," '{print NF}') +echo "using $ngpu gpus..." + +config_path=$1 +ckpt_prefix=$2 +model_type=$3 + +# download language model +bash local/download_lm_en.sh +if [ $? -ne 0 ]; then + exit 1 +fi + +python3 -u ${BIN_DIR}/test.py \ +--nproc ${ngpu} \ +--config ${config_path} \ +--result_file ${ckpt_prefix}.rsl \ +--checkpoint_path ${ckpt_prefix} \ +--model_type ${model_type} + +if [ $? -ne 0 ]; then + echo "Failed in evaluation!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/baidu_en8k/path.sh b/examples/1xt2x/baidu_en8k/path.sh new file mode 100644 index 000000000..16a0ad63e --- /dev/null +++ b/examples/1xt2x/baidu_en8k/path.sh @@ -0,0 +1,16 @@ +export MAIN_ROOT=`realpath ${PWD}/../../../` +export LOCAL_DEEPSPEECH2=`realpath ${PWD}/../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} +export PYTHONPATH=${LOCAL_DEEPSPEECH2}:${PYTHONPATH} + +export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/ + +MODEL=deepspeech2 +export BIN_DIR=${LOCAL_DEEPSPEECH2}/src_deepspeech2x/bin +echo "BIN_DIR "${BIN_DIR} diff --git a/examples/1xt2x/baidu_en8k/run.sh b/examples/1xt2x/baidu_en8k/run.sh new file mode 100755 index 000000000..b7f69f6b5 --- /dev/null +++ b/examples/1xt2x/baidu_en8k/run.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -e +source path.sh + +stage=0 +stop_stage=100 +conf_path=conf/deepspeech2.yaml +avg_num=1 +model_type=offline +gpus=0 + +source ${MAIN_ROOT}/utils/parse_options.sh || exit 1; + +v18_ckpt=baidu_en8k_v1.8 +ckpt=$(basename ${conf_path} | awk -F'.' '{print $1}') +echo "checkpoint name ${ckpt}" + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # prepare data + mkdir -p exp/${ckpt}/checkpoints + bash ./local/data.sh exp/${ckpt}/checkpoints || exit -1 +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # test ckpt avg_n + CUDA_VISIBLE_DEVICES=${gpus} ./local/test.sh ${conf_path} exp/${ckpt}/checkpoints/${v18_ckpt} ${model_type}|| exit -1 +fi + diff --git a/examples/1xt2x/librispeech/.gitignore b/examples/1xt2x/librispeech/.gitignore new file mode 100644 index 000000000..3631e544a --- /dev/null +++ b/examples/1xt2x/librispeech/.gitignore @@ -0,0 +1,5 @@ +exp +data +*log +tmp +nohup* diff --git a/examples/1xt2x/librispeech/conf/augmentation.json b/examples/1xt2x/librispeech/conf/augmentation.json new file mode 100644 index 000000000..fe51488c7 --- /dev/null +++ b/examples/1xt2x/librispeech/conf/augmentation.json @@ -0,0 +1 @@ +[] diff --git a/examples/1xt2x/librispeech/conf/deepspeech2.yaml b/examples/1xt2x/librispeech/conf/deepspeech2.yaml new file mode 100644 index 000000000..ad7fb2c19 --- /dev/null +++ b/examples/1xt2x/librispeech/conf/deepspeech2.yaml @@ -0,0 +1,67 @@ +# https://yaml.org/type/float.html +data: + train_manifest: data/manifest.train + dev_manifest: data/manifest.dev + test_manifest: data/manifest.test-clean + min_input_len: 0.0 + max_input_len: 1000.0 # second + min_output_len: 0.0 + max_output_len: .inf + min_output_input_ratio: 0.00 + max_output_input_ratio: .inf + +collator: + batch_size: 64 # one gpu + mean_std_filepath: data/mean_std.npz + unit_type: char + vocab_filepath: data/vocab.txt + augmentation_config: conf/augmentation.json + random_seed: 0 + spm_model_prefix: + spectrum_type: linear + feat_dim: + delta_delta: False + stride_ms: 10.0 + window_ms: 20.0 + n_fft: None + max_freq: None + target_sample_rate: 16000 + use_dB_normalization: True + target_dB: -20 + dither: 1.0 + keep_transcription_text: False + sortagrad: True + shuffle_method: batch_shuffle + num_workers: 2 + +model: + num_conv_layers: 2 + num_rnn_layers: 3 + rnn_layer_size: 2048 + use_gru: False + share_rnn_weights: True + blank_id: 28 + +training: + n_epoch: 80 + accum_grad: 1 + lr: 2e-3 + lr_decay: 0.83 + weight_decay: 1e-06 + global_grad_clip: 3.0 + log_interval: 100 + checkpoint: + kbest_n: 50 + latest_n: 5 + +decoding: + batch_size: 32 + error_rate_type: wer + decoding_method: ctc_beam_search + lang_model_path: data/lm/common_crawl_00.prune01111.trie.klm + alpha: 2.5 + beta: 0.3 + beam_size: 500 + cutoff_prob: 1.0 + cutoff_top_n: 40 + num_proc_bsearch: 8 diff --git a/examples/1xt2x/librispeech/local/data.sh b/examples/1xt2x/librispeech/local/data.sh new file mode 100755 index 000000000..6f9bc5566 --- /dev/null +++ b/examples/1xt2x/librispeech/local/data.sh @@ -0,0 +1,84 @@ +#!/bin/bash + +if [ $# != 1 ];then + echo "usage: ${0} ckpt_dir" + exit -1 +fi + +ckpt_dir=$1 + +stage=-1 +stop_stage=100 +unit_type=char + +source ${MAIN_ROOT}/utils/parse_options.sh + +mkdir -p data +TARGET_DIR=${MAIN_ROOT}/examples/dataset +mkdir -p ${TARGET_DIR} + +bash local/download_model.sh ${ckpt_dir} +if [ $? -ne 0 ]; then + exit 1 +fi + +cd ${ckpt_dir} +tar xzvf librispeech_v1.8_to_v2.x.tar.gz +cd - +mv ${ckpt_dir}/mean_std.npz data/ +mv ${ckpt_dir}/vocab.txt data/ + +if [ ${stage} -le -1 ] && [ ${stop_stage} -ge -1 ]; then + # download data, generate manifests + python3 ${TARGET_DIR}/librispeech/librispeech.py \ + --manifest_prefix="data/manifest" \ + --target_dir="${TARGET_DIR}/librispeech" \ + --full_download="True" + + if [ $? -ne 0 ]; then + echo "Prepare LibriSpeech failed. Terminated." + exit 1 + fi + + for set in train-clean-100 train-clean-360 train-other-500 dev-clean dev-other test-clean test-other; do + mv data/manifest.${set} data/manifest.${set}.raw + done + + rm -rf data/manifest.train.raw data/manifest.dev.raw data/manifest.test.raw + for set in train-clean-100 train-clean-360 train-other-500; do + cat data/manifest.${set}.raw >> data/manifest.train.raw + done + + for set in dev-clean dev-other; do + cat data/manifest.${set}.raw >> data/manifest.dev.raw + done + + for set in test-clean test-other; do + cat data/manifest.${set}.raw >> data/manifest.test.raw + done +fi + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # format manifest with tokenids, vocab size + for set in train dev test dev-clean dev-other test-clean test-other; do + { + python3 ${MAIN_ROOT}/utils/format_data.py \ + --feat_type "raw" \ + --cmvn_path "data/mean_std.npz" \ + --unit_type ${unit_type} \ + --vocab_path="data/vocab.txt" \ + --manifest_path="data/manifest.${set}.raw" \ + --output_path="data/manifest.${set}" + + if [ $? -ne 0 ]; then + echo "Formt mnaifest.${set} failed. Terminated." + exit 1 + fi + }& + done + wait +fi + +echo "LibriSpeech Data preparation done." +exit 0 + diff --git a/examples/1xt2x/librispeech/local/download_lm_en.sh b/examples/1xt2x/librispeech/local/download_lm_en.sh new file mode 100755 index 000000000..dc1bdf665 --- /dev/null +++ b/examples/1xt2x/librispeech/local/download_lm_en.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +. ${MAIN_ROOT}/utils/utility.sh + +DIR=data/lm +mkdir -p ${DIR} + +URL=https://deepspeech.bj.bcebos.com/en_lm/common_crawl_00.prune01111.trie.klm +MD5="099a601759d467cd0a8523ff939819c5" +TARGET=${DIR}/common_crawl_00.prune01111.trie.klm + +echo "Download language model ..." +download $URL $MD5 $TARGET +if [ $? -ne 0 ]; then + echo "Fail to download the language model!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/librispeech/local/download_model.sh b/examples/1xt2x/librispeech/local/download_model.sh new file mode 100644 index 000000000..375d66404 --- /dev/null +++ b/examples/1xt2x/librispeech/local/download_model.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +if [ $# != 1 ];then + echo "usage: ${0} ckpt_dir" + exit -1 +fi + +ckpt_dir=$1 + +. ${MAIN_ROOT}/utils/utility.sh + +URL='https://deepspeech.bj.bcebos.com/eng_models/librispeech_v1.8_to_v2.x.tar.gz' +MD5=a06d9aadb560ea113984dc98d67232c8 +TARGET=${ckpt_dir}/librispeech_v1.8_to_v2.x.tar.gz + + +echo "Download LibriSpeech model ..." +download $URL $MD5 $TARGET +if [ $? -ne 0 ]; then + echo "Fail to download LibriSpeech model!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/librispeech/local/test.sh b/examples/1xt2x/librispeech/local/test.sh new file mode 100755 index 000000000..4d00f30b8 --- /dev/null +++ b/examples/1xt2x/librispeech/local/test.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +if [ $# != 3 ];then + echo "usage: ${0} config_path ckpt_path_prefix model_type" + exit -1 +fi + +ngpu=$(echo $CUDA_VISIBLE_DEVICES | awk -F "," '{print NF}') +echo "using $ngpu gpus..." + +config_path=$1 +ckpt_prefix=$2 +model_type=$3 + +# download language model +bash local/download_lm_en.sh +if [ $? -ne 0 ]; then + exit 1 +fi + +python3 -u ${BIN_DIR}/test.py \ +--nproc ${ngpu} \ +--config ${config_path} \ +--result_file ${ckpt_prefix}.rsl \ +--checkpoint_path ${ckpt_prefix} \ +--model_type ${model_type} + +if [ $? -ne 0 ]; then + echo "Failed in evaluation!" + exit 1 +fi + + +exit 0 diff --git a/examples/1xt2x/librispeech/path.sh b/examples/1xt2x/librispeech/path.sh new file mode 100644 index 000000000..e95de15b0 --- /dev/null +++ b/examples/1xt2x/librispeech/path.sh @@ -0,0 +1,15 @@ +export MAIN_ROOT=`realpath ${PWD}/../../../` +export LOCAL_DEEPSPEECH2=`realpath ${PWD}/../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} +export PYTHONPATH=${LOCAL_DEEPSPEECH2}:${PYTHONPATH} + +export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/ + +MODEL=deepspeech2 +export BIN_DIR=${LOCAL_DEEPSPEECH2}/src_deepspeech2x/bin diff --git a/examples/1xt2x/librispeech/run.sh b/examples/1xt2x/librispeech/run.sh new file mode 100755 index 000000000..8c667de2e --- /dev/null +++ b/examples/1xt2x/librispeech/run.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set -e +source path.sh + +stage=0 +stop_stage=100 +conf_path=conf/deepspeech2.yaml +avg_num=1 +model_type=offline +gpus=1 + +source ${MAIN_ROOT}/utils/parse_options.sh || exit 1; + +v18_ckpt=librispeech_v1.8 +ckpt=$(basename ${conf_path} | awk -F'.' '{print $1}') +echo "checkpoint name ${ckpt}" + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # prepare data + mkdir -p exp/${ckpt}/checkpoints + bash ./local/data.sh exp/${ckpt}/checkpoints || exit -1 +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # test ckpt avg_n + CUDA_VISIBLE_DEVICES=${gpus} ./local/test.sh ${conf_path} exp/${ckpt}/checkpoints/${v18_ckpt} ${model_type}|| exit -1 +fi diff --git a/examples/1xt2x/src_deepspeech2x/__init__.py b/examples/1xt2x/src_deepspeech2x/__init__.py new file mode 100644 index 000000000..d85a3dde7 --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/__init__.py @@ -0,0 +1,370 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any +from typing import List +from typing import Tuple +from typing import Union + +import paddle +from paddle import nn +from paddle.fluid import core +from paddle.nn import functional as F + +from deepspeech.utils.log import Log + +#TODO(Hui Zhang): remove fluid import +logger = Log(__name__).getlog() + +########### hcak logging ############# +logger.warn = logger.warning + +########### hcak paddle ############# +paddle.half = 'float16' +paddle.float = 'float32' +paddle.double = 'float64' +paddle.short = 'int16' +paddle.int = 'int32' +paddle.long = 'int64' +paddle.uint16 = 'uint16' +paddle.cdouble = 'complex128' + + +def convert_dtype_to_string(tensor_dtype): + """ + Convert the data type in numpy to the data type in Paddle + Args: + tensor_dtype(core.VarDesc.VarType): the data type in numpy. + Returns: + core.VarDesc.VarType: the data type in Paddle. + """ + dtype = tensor_dtype + if dtype == core.VarDesc.VarType.FP32: + return paddle.float32 + elif dtype == core.VarDesc.VarType.FP64: + return paddle.float64 + elif dtype == core.VarDesc.VarType.FP16: + return paddle.float16 + elif dtype == core.VarDesc.VarType.INT32: + return paddle.int32 + elif dtype == core.VarDesc.VarType.INT16: + return paddle.int16 + elif dtype == core.VarDesc.VarType.INT64: + return paddle.int64 + elif dtype == core.VarDesc.VarType.BOOL: + return paddle.bool + elif dtype == core.VarDesc.VarType.BF16: + # since there is still no support for bfloat16 in NumPy, + # uint16 is used for casting bfloat16 + return paddle.uint16 + elif dtype == core.VarDesc.VarType.UINT8: + return paddle.uint8 + elif dtype == core.VarDesc.VarType.INT8: + return paddle.int8 + elif dtype == core.VarDesc.VarType.COMPLEX64: + return paddle.complex64 + elif dtype == core.VarDesc.VarType.COMPLEX128: + return paddle.complex128 + else: + raise ValueError("Not supported tensor dtype %s" % dtype) + + +if not hasattr(paddle, 'softmax'): + logger.warn("register user softmax to paddle, remove this when fixed!") + setattr(paddle, 'softmax', paddle.nn.functional.softmax) + +if not hasattr(paddle, 'log_softmax'): + logger.warn("register user log_softmax to paddle, remove this when fixed!") + setattr(paddle, 'log_softmax', paddle.nn.functional.log_softmax) + +if not hasattr(paddle, 'sigmoid'): + logger.warn("register user sigmoid to paddle, remove this when fixed!") + setattr(paddle, 'sigmoid', paddle.nn.functional.sigmoid) + +if not hasattr(paddle, 'log_sigmoid'): + logger.warn("register user log_sigmoid to paddle, remove this when fixed!") + setattr(paddle, 'log_sigmoid', paddle.nn.functional.log_sigmoid) + +if not hasattr(paddle, 'relu'): + logger.warn("register user relu to paddle, remove this when fixed!") + setattr(paddle, 'relu', paddle.nn.functional.relu) + + +def cat(xs, dim=0): + return paddle.concat(xs, axis=dim) + + +if not hasattr(paddle, 'cat'): + logger.warn( + "override cat of paddle if exists or register, remove this when fixed!") + paddle.cat = cat + + +########### hcak paddle.Tensor ############# +def item(x: paddle.Tensor): + return x.numpy().item() + + +if not hasattr(paddle.Tensor, 'item'): + logger.warn( + "override item of paddle.Tensor if exists or register, remove this when fixed!" + ) + paddle.Tensor.item = item + + +def func_long(x: paddle.Tensor): + return paddle.cast(x, paddle.long) + + +if not hasattr(paddle.Tensor, 'long'): + logger.warn( + "override long of paddle.Tensor if exists or register, remove this when fixed!" + ) + paddle.Tensor.long = func_long + +if not hasattr(paddle.Tensor, 'numel'): + logger.warn( + "override numel of paddle.Tensor if exists or register, remove this when fixed!" + ) + paddle.Tensor.numel = paddle.numel + + +def new_full(x: paddle.Tensor, + size: Union[List[int], Tuple[int], paddle.Tensor], + fill_value: Union[float, int, bool, paddle.Tensor], + dtype=None): + return paddle.full(size, fill_value, dtype=x.dtype) + + +if not hasattr(paddle.Tensor, 'new_full'): + logger.warn( + "override new_full of paddle.Tensor if exists or register, remove this when fixed!" + ) + paddle.Tensor.new_full = new_full + + +def eq(xs: paddle.Tensor, ys: Union[paddle.Tensor, float]) -> paddle.Tensor: + if convert_dtype_to_string(xs.dtype) == paddle.bool: + xs = xs.astype(paddle.int) + return xs.equal( + paddle.to_tensor( + ys, dtype=convert_dtype_to_string(xs.dtype), place=xs.place)) + + +if not hasattr(paddle.Tensor, 'eq'): + logger.warn( + "override eq of paddle.Tensor if exists or register, remove this when fixed!" + ) + paddle.Tensor.eq = eq + +if not hasattr(paddle, 'eq'): + logger.warn( + "override eq of paddle if exists or register, remove this when fixed!") + paddle.eq = eq + + +def contiguous(xs: paddle.Tensor) -> paddle.Tensor: + return xs + + +if not hasattr(paddle.Tensor, 'contiguous'): + logger.warn( + "override contiguous of paddle.Tensor if exists or register, remove this when fixed!" + ) + paddle.Tensor.contiguous = contiguous + + +def size(xs: paddle.Tensor, *args: int) -> paddle.Tensor: + nargs = len(args) + assert (nargs <= 1) + s = paddle.shape(xs) + if nargs == 1: + return s[args[0]] + else: + return s + + +#`to_static` do not process `size` property, maybe some `paddle` api dependent on it. +logger.warn( + "override size of paddle.Tensor " + "(`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!" +) +paddle.Tensor.size = size + + +def view(xs: paddle.Tensor, *args: int) -> paddle.Tensor: + return xs.reshape(args) + + +if not hasattr(paddle.Tensor, 'view'): + logger.warn("register user view to paddle.Tensor, remove this when fixed!") + paddle.Tensor.view = view + + +def view_as(xs: paddle.Tensor, ys: paddle.Tensor) -> paddle.Tensor: + return xs.reshape(ys.size()) + + +if not hasattr(paddle.Tensor, 'view_as'): + logger.warn( + "register user view_as to paddle.Tensor, remove this when fixed!") + paddle.Tensor.view_as = view_as + + +def is_broadcastable(shp1, shp2): + for a, b in zip(shp1[::-1], shp2[::-1]): + if a == 1 or b == 1 or a == b: + pass + else: + return False + return True + + +def masked_fill(xs: paddle.Tensor, + mask: paddle.Tensor, + value: Union[float, int]): + assert is_broadcastable(xs.shape, mask.shape) is True + bshape = paddle.broadcast_shape(xs.shape, mask.shape) + mask = mask.broadcast_to(bshape) + trues = paddle.ones_like(xs) * value + xs = paddle.where(mask, trues, xs) + return xs + + +if not hasattr(paddle.Tensor, 'masked_fill'): + logger.warn( + "register user masked_fill to paddle.Tensor, remove this when fixed!") + paddle.Tensor.masked_fill = masked_fill + + +def masked_fill_(xs: paddle.Tensor, + mask: paddle.Tensor, + value: Union[float, int]) -> paddle.Tensor: + assert is_broadcastable(xs.shape, mask.shape) is True + bshape = paddle.broadcast_shape(xs.shape, mask.shape) + mask = mask.broadcast_to(bshape) + trues = paddle.ones_like(xs) * value + ret = paddle.where(mask, trues, xs) + paddle.assign(ret.detach(), output=xs) + return xs + + +if not hasattr(paddle.Tensor, 'masked_fill_'): + logger.warn( + "register user masked_fill_ to paddle.Tensor, remove this when fixed!") + paddle.Tensor.masked_fill_ = masked_fill_ + + +def fill_(xs: paddle.Tensor, value: Union[float, int]) -> paddle.Tensor: + val = paddle.full_like(xs, value) + paddle.assign(val.detach(), output=xs) + return xs + + +if not hasattr(paddle.Tensor, 'fill_'): + logger.warn("register user fill_ to paddle.Tensor, remove this when fixed!") + paddle.Tensor.fill_ = fill_ + + +def repeat(xs: paddle.Tensor, *size: Any) -> paddle.Tensor: + return paddle.tile(xs, size) + + +if not hasattr(paddle.Tensor, 'repeat'): + logger.warn( + "register user repeat to paddle.Tensor, remove this when fixed!") + paddle.Tensor.repeat = repeat + +if not hasattr(paddle.Tensor, 'softmax'): + logger.warn( + "register user softmax to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'softmax', paddle.nn.functional.softmax) + +if not hasattr(paddle.Tensor, 'sigmoid'): + logger.warn( + "register user sigmoid to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'sigmoid', paddle.nn.functional.sigmoid) + +if not hasattr(paddle.Tensor, 'relu'): + logger.warn("register user relu to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'relu', paddle.nn.functional.relu) + + +def type_as(x: paddle.Tensor, other: paddle.Tensor) -> paddle.Tensor: + return x.astype(other.dtype) + + +if not hasattr(paddle.Tensor, 'type_as'): + logger.warn( + "register user type_as to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'type_as', type_as) + + +def to(x: paddle.Tensor, *args, **kwargs) -> paddle.Tensor: + assert len(args) == 1 + if isinstance(args[0], str): # dtype + return x.astype(args[0]) + elif isinstance(args[0], paddle.Tensor): #Tensor + return x.astype(args[0].dtype) + else: # Device + return x + + +if not hasattr(paddle.Tensor, 'to'): + logger.warn("register user to to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'to', to) + + +def func_float(x: paddle.Tensor) -> paddle.Tensor: + return x.astype(paddle.float) + + +if not hasattr(paddle.Tensor, 'float'): + logger.warn("register user float to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'float', func_float) + + +def func_int(x: paddle.Tensor) -> paddle.Tensor: + return x.astype(paddle.int) + + +if not hasattr(paddle.Tensor, 'int'): + logger.warn("register user int to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'int', func_int) + + +def tolist(x: paddle.Tensor) -> List[Any]: + return x.numpy().tolist() + + +if not hasattr(paddle.Tensor, 'tolist'): + logger.warn( + "register user tolist to paddle.Tensor, remove this when fixed!") + setattr(paddle.Tensor, 'tolist', tolist) + + +########### hcak paddle.nn ############# +class GLU(nn.Layer): + """Gated Linear Units (GLU) Layer""" + + def __init__(self, dim: int=-1): + super().__init__() + self.dim = dim + + def forward(self, xs): + return F.glu(xs, axis=self.dim) + + +if not hasattr(paddle.nn, 'GLU'): + logger.warn("register user GLU to paddle.nn, remove this when fixed!") + setattr(paddle.nn, 'GLU', GLU) diff --git a/examples/1xt2x/src_deepspeech2x/bin/test.py b/examples/1xt2x/src_deepspeech2x/bin/test.py new file mode 100644 index 000000000..59e1b38dd --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/bin/test.py @@ -0,0 +1,56 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Evaluation for DeepSpeech2 model.""" +from src_deepspeech2x.test_model import DeepSpeech2Tester as Tester + +from deepspeech.exps.deepspeech2.config import get_cfg_defaults +from deepspeech.training.cli import default_argument_parser +from deepspeech.utils.utility import print_arguments + + +def main_sp(config, args): + exp = Tester(config, args) + exp.setup() + exp.run_test() + + +def main(config, args): + main_sp(config, args) + + +if __name__ == "__main__": + parser = default_argument_parser() + parser.add_argument("--model_type") + # save asr result to + parser.add_argument( + "--result_file", type=str, help="path of save the asr result") + args = parser.parse_args() + print_arguments(args, globals()) + if args.model_type is None: + args.model_type = 'offline' + print("model_type:{}".format(args.model_type)) + + # https://yaml.org/type/float.html + config = get_cfg_defaults(args.model_type) + if args.config: + config.merge_from_file(args.config) + if args.opts: + config.merge_from_list(args.opts) + config.freeze() + print(config) + if args.dump_config: + with open(args.dump_config, 'w') as f: + print(config, file=f) + + main(config, args) diff --git a/examples/1xt2x/src_deepspeech2x/models/__init__.py b/examples/1xt2x/src_deepspeech2x/models/__init__.py new file mode 100644 index 000000000..185a92b8d --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/models/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/examples/1xt2x/src_deepspeech2x/models/ds2/__init__.py b/examples/1xt2x/src_deepspeech2x/models/ds2/__init__.py new file mode 100644 index 000000000..39bea5bf9 --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/models/ds2/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from .deepspeech2 import DeepSpeech2InferModel +from .deepspeech2 import DeepSpeech2Model + +__all__ = ['DeepSpeech2Model', 'DeepSpeech2InferModel'] diff --git a/examples/1xt2x/src_deepspeech2x/models/ds2/deepspeech2.py b/examples/1xt2x/src_deepspeech2x/models/ds2/deepspeech2.py new file mode 100644 index 000000000..ce123743a --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/models/ds2/deepspeech2.py @@ -0,0 +1,314 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Deepspeech2 ASR Model""" +from typing import Optional + +import paddle +from paddle import nn +from src_deepspeech2x.models.ds2.rnn import RNNStack +from yacs.config import CfgNode + +from deepspeech.models.ds2.conv import ConvStack +from deepspeech.modules.ctc import CTCDecoder +from deepspeech.utils import layer_tools +from deepspeech.utils.checkpoint import Checkpoint +from deepspeech.utils.log import Log +logger = Log(__name__).getlog() + +__all__ = ['DeepSpeech2Model', 'DeepSpeech2InferModel'] + + +class CRNNEncoder(nn.Layer): + def __init__(self, + feat_size, + dict_size, + num_conv_layers=2, + num_rnn_layers=3, + rnn_size=1024, + use_gru=False, + share_rnn_weights=True): + super().__init__() + self.rnn_size = rnn_size + self.feat_size = feat_size # 161 for linear + self.dict_size = dict_size + + self.conv = ConvStack(feat_size, num_conv_layers) + + i_size = self.conv.output_height # H after conv stack + self.rnn = RNNStack( + i_size=i_size, + h_size=rnn_size, + num_stacks=num_rnn_layers, + use_gru=use_gru, + share_rnn_weights=share_rnn_weights) + + @property + def output_size(self): + return self.rnn_size * 2 + + def forward(self, audio, audio_len): + """Compute Encoder outputs + + Args: + audio (Tensor): [B, Tmax, D] + text (Tensor): [B, Umax] + audio_len (Tensor): [B] + text_len (Tensor): [B] + Returns: + x (Tensor): encoder outputs, [B, T, D] + x_lens (Tensor): encoder length, [B] + """ + # [B, T, D] -> [B, D, T] + audio = audio.transpose([0, 2, 1]) + # [B, D, T] -> [B, C=1, D, T] + x = audio.unsqueeze(1) + x_lens = audio_len + + # convolution group + x, x_lens = self.conv(x, x_lens) + x_val = x.numpy() + + # convert data from convolution feature map to sequence of vectors + #B, C, D, T = paddle.shape(x) # not work under jit + x = x.transpose([0, 3, 1, 2]) #[B, T, C, D] + #x = x.reshape([B, T, C * D]) #[B, T, C*D] # not work under jit + x = x.reshape([0, 0, -1]) #[B, T, C*D] + + # remove padding part + x, x_lens = self.rnn(x, x_lens) #[B, T, D] + return x, x_lens + + +class DeepSpeech2Model(nn.Layer): + """The DeepSpeech2 network structure. + + :param audio_data: Audio spectrogram data layer. + :type audio_data: Variable + :param text_data: Transcription text data layer. + :type text_data: Variable + :param audio_len: Valid sequence length data layer. + :type audio_len: Variable + :param masks: Masks data layer to reset padding. + :type masks: Variable + :param dict_size: Dictionary size for tokenized transcription. + :type dict_size: int + :param num_conv_layers: Number of stacking convolution layers. + :type num_conv_layers: int + :param num_rnn_layers: Number of stacking RNN layers. + :type num_rnn_layers: int + :param rnn_size: RNN layer size (dimension of RNN cells). + :type rnn_size: int + :param use_gru: Use gru if set True. Use simple rnn if set False. + :type use_gru: bool + :param share_rnn_weights: Whether to share input-hidden weights between + forward and backward direction RNNs. + It is only available when use_gru=False. + :type share_weights: bool + :return: A tuple of an output unnormalized log probability layer ( + before softmax) and a ctc cost layer. + :rtype: tuple of LayerOutput + """ + + @classmethod + def params(cls, config: Optional[CfgNode]=None) -> CfgNode: + default = CfgNode( + dict( + num_conv_layers=2, #Number of stacking convolution layers. + num_rnn_layers=3, #Number of stacking RNN layers. + rnn_layer_size=1024, #RNN layer size (number of RNN cells). + use_gru=True, #Use gru if set True. Use simple rnn if set False. + share_rnn_weights=True #Whether to share input-hidden weights between forward and backward directional RNNs.Notice that for GRU, weight sharing is not supported. + )) + if config is not None: + config.merge_from_other_cfg(default) + return default + + def __init__(self, + feat_size, + dict_size, + num_conv_layers=2, + num_rnn_layers=3, + rnn_size=1024, + use_gru=False, + share_rnn_weights=True, + blank_id=0): + super().__init__() + self.encoder = CRNNEncoder( + feat_size=feat_size, + dict_size=dict_size, + num_conv_layers=num_conv_layers, + num_rnn_layers=num_rnn_layers, + rnn_size=rnn_size, + use_gru=use_gru, + share_rnn_weights=share_rnn_weights) + assert (self.encoder.output_size == rnn_size * 2) + + self.decoder = CTCDecoder( + odim=dict_size, # is in vocab + enc_n_units=self.encoder.output_size, + blank_id=blank_id, # first token is + dropout_rate=0.0, + reduction=True, # sum + batch_average=True) # sum / batch_size + + def forward(self, audio, audio_len, text, text_len): + """Compute Model loss + + Args: + audio (Tenosr): [B, T, D] + audio_len (Tensor): [B] + text (Tensor): [B, U] + text_len (Tensor): [B] + + Returns: + loss (Tenosr): [1] + """ + eouts, eouts_len = self.encoder(audio, audio_len) + loss = self.decoder(eouts, eouts_len, text, text_len) + return loss + + @paddle.no_grad() + def decode(self, audio, audio_len, vocab_list, decoding_method, + lang_model_path, beam_alpha, beam_beta, beam_size, cutoff_prob, + cutoff_top_n, num_processes): + # init once + # decoders only accept string encoded in utf-8 + self.decoder.init_decode( + beam_alpha=beam_alpha, + beam_beta=beam_beta, + lang_model_path=lang_model_path, + vocab_list=vocab_list, + decoding_method=decoding_method) + + eouts, eouts_len = self.encoder(audio, audio_len) + probs = self.decoder.softmax(eouts) + print("probs.shape", probs.shape) + return self.decoder.decode_probs( + probs.numpy(), eouts_len, vocab_list, decoding_method, + lang_model_path, beam_alpha, beam_beta, beam_size, cutoff_prob, + cutoff_top_n, num_processes) + + def decode_probs_split(self, probs_split, vocab_list, decoding_method, + lang_model_path, beam_alpha, beam_beta, beam_size, + cutoff_prob, cutoff_top_n, num_processes): + self.decoder.init_decode( + beam_alpha=beam_alpha, + beam_beta=beam_beta, + lang_model_path=lang_model_path, + vocab_list=vocab_list, + decoding_method=decoding_method) + return self.decoder.decode_probs_split( + probs_split, vocab_list, decoding_method, lang_model_path, + beam_alpha, beam_beta, beam_size, cutoff_prob, cutoff_top_n, + num_processes) + + @classmethod + def from_pretrained(cls, dataloader, config, checkpoint_path): + """Build a DeepSpeech2Model model from a pretrained model. + Parameters + ---------- + dataloader: paddle.io.DataLoader + + config: yacs.config.CfgNode + model configs + + checkpoint_path: Path or str + the path of pretrained model checkpoint, without extension name + + Returns + ------- + DeepSpeech2Model + The model built from pretrained result. + """ + model = cls(feat_size=dataloader.collate_fn.feature_size, + dict_size=len(dataloader.collate_fn.vocab_list), + num_conv_layers=config.model.num_conv_layers, + num_rnn_layers=config.model.num_rnn_layers, + rnn_size=config.model.rnn_layer_size, + use_gru=config.model.use_gru, + share_rnn_weights=config.model.share_rnn_weights) + infos = Checkpoint().load_parameters( + model, checkpoint_path=checkpoint_path) + logger.info(f"checkpoint info: {infos}") + layer_tools.summary(model) + return model + + @classmethod + def from_config(cls, config): + """Build a DeepSpeec2Model from config + Parameters + + config: yacs.config.CfgNode + config.model + Returns + ------- + DeepSpeech2Model + The model built from config. + """ + model = cls(feat_size=config.feat_size, + dict_size=config.dict_size, + num_conv_layers=config.num_conv_layers, + num_rnn_layers=config.num_rnn_layers, + rnn_size=config.rnn_layer_size, + use_gru=config.use_gru, + share_rnn_weights=config.share_rnn_weights, + blank_id=config.blank_id) + return model + + +class DeepSpeech2InferModel(DeepSpeech2Model): + def __init__(self, + feat_size, + dict_size, + num_conv_layers=2, + num_rnn_layers=3, + rnn_size=1024, + use_gru=False, + share_rnn_weights=True, + blank_id=0): + super().__init__( + feat_size=feat_size, + dict_size=dict_size, + num_conv_layers=num_conv_layers, + num_rnn_layers=num_rnn_layers, + rnn_size=rnn_size, + use_gru=use_gru, + share_rnn_weights=share_rnn_weights, + blank_id=blank_id) + + def forward(self, audio, audio_len): + """export model function + + Args: + audio (Tensor): [B, T, D] + audio_len (Tensor): [B] + + Returns: + probs: probs after softmax + """ + eouts, eouts_len = self.encoder(audio, audio_len) + probs = self.decoder.softmax(eouts) + return probs, eouts_len + + def export(self): + static_model = paddle.jit.to_static( + self, + input_spec=[ + paddle.static.InputSpec( + shape=[None, None, self.encoder.feat_size], + dtype='float32'), # audio, [B,T,D] + paddle.static.InputSpec(shape=[None], + dtype='int64'), # audio_length, [B] + ]) + return static_model diff --git a/examples/1xt2x/src_deepspeech2x/models/ds2/rnn.py b/examples/1xt2x/src_deepspeech2x/models/ds2/rnn.py new file mode 100644 index 000000000..e45db7c05 --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/models/ds2/rnn.py @@ -0,0 +1,334 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import math + +import paddle +from paddle import nn +from paddle.nn import functional as F +from paddle.nn import initializer as I + +from deepspeech.modules.activation import brelu +from deepspeech.modules.mask import make_non_pad_mask +from deepspeech.utils.log import Log +logger = Log(__name__).getlog() + +__all__ = ['RNNStack'] + + +class RNNCell(nn.RNNCellBase): + r""" + Elman RNN (SimpleRNN) cell. Given the inputs and previous states, it + computes the outputs and updates states. + The formula used is as follows: + .. math:: + h_{t} & = act(x_{t} + b_{ih} + W_{hh}h_{t-1} + b_{hh}) + y_{t} & = h_{t} + + where :math:`act` is for :attr:`activation`. + """ + + def __init__(self, + hidden_size: int, + activation="tanh", + weight_ih_attr=None, + weight_hh_attr=None, + bias_ih_attr=None, + bias_hh_attr=None, + name=None): + super().__init__() + std = 1.0 / math.sqrt(hidden_size) + self.weight_hh = self.create_parameter( + (hidden_size, hidden_size), + weight_hh_attr, + default_initializer=I.Uniform(-std, std)) + self.bias_ih = None + self.bias_hh = self.create_parameter( + (hidden_size, ), + bias_hh_attr, + is_bias=True, + default_initializer=I.Uniform(-std, std)) + + self.hidden_size = hidden_size + if activation not in ["tanh", "relu", "brelu"]: + raise ValueError( + "activation for SimpleRNNCell should be tanh or relu, " + "but get {}".format(activation)) + self.activation = activation + self._activation_fn = paddle.tanh \ + if activation == "tanh" \ + else F.relu + if activation == 'brelu': + self._activation_fn = brelu + + def forward(self, inputs, states=None): + if states is None: + states = self.get_initial_states(inputs, self.state_shape) + pre_h = states + i2h = inputs + if self.bias_ih is not None: + i2h += self.bias_ih + h2h = paddle.matmul(pre_h, self.weight_hh, transpose_y=True) + if self.bias_hh is not None: + h2h += self.bias_hh + h = self._activation_fn(i2h + h2h) + return h, h + + @property + def state_shape(self): + return (self.hidden_size, ) + + +class GRUCell(nn.RNNCellBase): + r""" + Gated Recurrent Unit (GRU) RNN cell. Given the inputs and previous states, + it computes the outputs and updates states. + The formula for GRU used is as follows: + .. math:: + r_{t} & = \sigma(W_{ir}x_{t} + b_{ir} + W_{hr}h_{t-1} + b_{hr}) + z_{t} & = \sigma(W_{iz}x_{t} + b_{iz} + W_{hz}h_{t-1} + b_{hz}) + \widetilde{h}_{t} & = \tanh(W_{ic}x_{t} + b_{ic} + r_{t} * (W_{hc}h_{t-1} + b_{hc})) + h_{t} & = z_{t} * h_{t-1} + (1 - z_{t}) * \widetilde{h}_{t} + y_{t} & = h_{t} + + where :math:`\sigma` is the sigmoid fucntion, and * is the elemetwise + multiplication operator. + """ + + def __init__(self, + input_size: int, + hidden_size: int, + weight_ih_attr=None, + weight_hh_attr=None, + bias_ih_attr=None, + bias_hh_attr=None, + name=None): + super().__init__() + std = 1.0 / math.sqrt(hidden_size) + self.weight_hh = self.create_parameter( + (3 * hidden_size, hidden_size), + weight_hh_attr, + default_initializer=I.Uniform(-std, std)) + self.bias_ih = None + self.bias_hh = self.create_parameter( + (3 * hidden_size, ), + bias_hh_attr, + is_bias=True, + default_initializer=I.Uniform(-std, std)) + + self.hidden_size = hidden_size + self.input_size = input_size + self._gate_activation = F.sigmoid + self._activation = paddle.relu + + def forward(self, inputs, states=None): + if states is None: + states = self.get_initial_states(inputs, self.state_shape) + + pre_hidden = states # shape [batch_size, hidden_size] + + x_gates = inputs + if self.bias_ih is not None: + x_gates = x_gates + self.bias_ih + bias_u, bias_r, bias_c = paddle.split( + self.bias_hh, num_or_sections=3, axis=0) + + weight_hh = paddle.transpose( + self.weight_hh, + perm=[1, 0]) #weight_hh:shape[hidden_size, 3 * hidden_size] + w_u_r_c = paddle.flatten(weight_hh) + size_u_r = self.hidden_size * 2 * self.hidden_size + w_u_r = paddle.reshape(w_u_r_c[:size_u_r], + (self.hidden_size, self.hidden_size * 2)) + w_u, w_r = paddle.split(w_u_r, num_or_sections=2, axis=1) + w_c = paddle.reshape(w_u_r_c[size_u_r:], + (self.hidden_size, self.hidden_size)) + + h_u = paddle.matmul( + pre_hidden, w_u, + transpose_y=False) + bias_u #shape [batch_size, hidden_size] + h_r = paddle.matmul( + pre_hidden, w_r, + transpose_y=False) + bias_r #shape [batch_size, hidden_size] + + x_u, x_r, x_c = paddle.split( + x_gates, num_or_sections=3, axis=1) #shape[batch_size, hidden_size] + + u = self._gate_activation(x_u + h_u) #shape [batch_size, hidden_size] + r = self._gate_activation(x_r + h_r) #shape [batch_size, hidden_size] + c = self._activation( + x_c + paddle.matmul(r * pre_hidden, w_c, transpose_y=False) + + bias_c) # [batch_size, hidden_size] + + h = (1 - u) * pre_hidden + u * c + # https://www.paddlepaddle.org.cn/documentation/docs/zh/api/paddle/fluid/layers/dynamic_gru_cn.html#dynamic-gru + return h, h + + @property + def state_shape(self): + r""" + The `state_shape` of GRUCell is a shape `[hidden_size]` (-1 for batch + size would be automatically inserted into shape). The shape corresponds + to the shape of :math:`h_{t-1}`. + """ + return (self.hidden_size, ) + + +class BiRNNWithBN(nn.Layer): + """Bidirectonal simple rnn layer with sequence-wise batch normalization. + The batch normalization is only performed on input-state weights. + + :param size: Dimension of RNN cells. + :type size: int + :param share_weights: Whether to share input-hidden weights between + forward and backward directional RNNs. + :type share_weights: bool + :return: Bidirectional simple rnn layer. + :rtype: Variable + """ + + def __init__(self, i_size: int, h_size: int, share_weights: bool): + super().__init__() + self.share_weights = share_weights + if self.share_weights: + #input-hidden weights shared between bi-directional rnn. + self.fw_fc = nn.Linear(i_size, h_size, bias_attr=False) + # batch norm is only performed on input-state projection + self.fw_bn = nn.BatchNorm1D( + h_size, bias_attr=None, data_format='NLC') + self.bw_fc = self.fw_fc + self.bw_bn = self.fw_bn + else: + self.fw_fc = nn.Linear(i_size, h_size, bias_attr=False) + self.fw_bn = nn.BatchNorm1D( + h_size, bias_attr=None, data_format='NLC') + self.bw_fc = nn.Linear(i_size, h_size, bias_attr=False) + self.bw_bn = nn.BatchNorm1D( + h_size, bias_attr=None, data_format='NLC') + + self.fw_cell = RNNCell(hidden_size=h_size, activation='brelu') + self.bw_cell = RNNCell(hidden_size=h_size, activation='brelu') + self.fw_rnn = nn.RNN( + self.fw_cell, is_reverse=False, time_major=False) #[B, T, D] + self.bw_rnn = nn.RNN( + self.bw_cell, is_reverse=True, time_major=False) #[B, T, D] + + def forward(self, x: paddle.Tensor, x_len: paddle.Tensor): + # x, shape [B, T, D] + fw_x = self.fw_bn(self.fw_fc(x)) + bw_x = self.bw_bn(self.bw_fc(x)) + fw_x, _ = self.fw_rnn(inputs=fw_x, sequence_length=x_len) + bw_x, _ = self.bw_rnn(inputs=bw_x, sequence_length=x_len) + x = paddle.concat([fw_x, bw_x], axis=-1) + return x, x_len + + +class BiGRUWithBN(nn.Layer): + """Bidirectonal gru layer with sequence-wise batch normalization. + The batch normalization is only performed on input-state weights. + + :param name: Name of the layer. + :type name: string + :param input: Input layer. + :type input: Variable + :param size: Dimension of GRU cells. + :type size: int + :param act: Activation type. + :type act: string + :return: Bidirectional GRU layer. + :rtype: Variable + """ + + def __init__(self, i_size: int, h_size: int): + super().__init__() + hidden_size = h_size * 3 + + self.fw_fc = nn.Linear(i_size, hidden_size, bias_attr=False) + self.fw_bn = nn.BatchNorm1D( + hidden_size, bias_attr=None, data_format='NLC') + self.bw_fc = nn.Linear(i_size, hidden_size, bias_attr=False) + self.bw_bn = nn.BatchNorm1D( + hidden_size, bias_attr=None, data_format='NLC') + + self.fw_cell = GRUCell(input_size=hidden_size, hidden_size=h_size) + self.bw_cell = GRUCell(input_size=hidden_size, hidden_size=h_size) + self.fw_rnn = nn.RNN( + self.fw_cell, is_reverse=False, time_major=False) #[B, T, D] + self.bw_rnn = nn.RNN( + self.bw_cell, is_reverse=True, time_major=False) #[B, T, D] + + def forward(self, x, x_len): + # x, shape [B, T, D] + fw_x = self.fw_bn(self.fw_fc(x)) + + bw_x = self.bw_bn(self.bw_fc(x)) + fw_x, _ = self.fw_rnn(inputs=fw_x, sequence_length=x_len) + bw_x, _ = self.bw_rnn(inputs=bw_x, sequence_length=x_len) + x = paddle.concat([fw_x, bw_x], axis=-1) + return x, x_len + + +class RNNStack(nn.Layer): + """RNN group with stacked bidirectional simple RNN or GRU layers. + + :param input: Input layer. + :type input: Variable + :param size: Dimension of RNN cells in each layer. + :type size: int + :param num_stacks: Number of stacked rnn layers. + :type num_stacks: int + :param use_gru: Use gru if set True. Use simple rnn if set False. + :type use_gru: bool + :param share_rnn_weights: Whether to share input-hidden weights between + forward and backward directional RNNs. + It is only available when use_gru=False. + :type share_weights: bool + :return: Output layer of the RNN group. + :rtype: Variable + """ + + def __init__(self, + i_size: int, + h_size: int, + num_stacks: int, + use_gru: bool, + share_rnn_weights: bool): + super().__init__() + rnn_stacks = [] + for i in range(num_stacks): + if use_gru: + #default:GRU using tanh + rnn_stacks.append(BiGRUWithBN(i_size=i_size, h_size=h_size)) + else: + rnn_stacks.append( + BiRNNWithBN( + i_size=i_size, + h_size=h_size, + share_weights=share_rnn_weights)) + i_size = h_size * 2 + + self.rnn_stacks = nn.LayerList(rnn_stacks) + + def forward(self, x: paddle.Tensor, x_len: paddle.Tensor): + """ + x: shape [B, T, D] + x_len: shpae [B] + """ + for i, rnn in enumerate(self.rnn_stacks): + x, x_len = rnn(x, x_len) + masks = make_non_pad_mask(x_len) #[B, T] + masks = masks.unsqueeze(-1) # [B, T, 1] + # TODO(Hui Zhang): not support bool multiply + masks = masks.astype(x.dtype) + x = x.multiply(masks) + return x, x_len diff --git a/examples/1xt2x/src_deepspeech2x/test_model.py b/examples/1xt2x/src_deepspeech2x/test_model.py new file mode 100644 index 000000000..203a3bace --- /dev/null +++ b/examples/1xt2x/src_deepspeech2x/test_model.py @@ -0,0 +1,429 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains DeepSpeech2 and DeepSpeech2Online model.""" +import time +from collections import defaultdict +from contextlib import nullcontext +from pathlib import Path +from typing import Optional + +import numpy as np +import paddle +from paddle import distributed as dist +from paddle.io import DataLoader +from src_deepspeech2x.models.ds2 import DeepSpeech2InferModel +from src_deepspeech2x.models.ds2 import DeepSpeech2Model +from yacs.config import CfgNode + +from deepspeech.frontend.featurizer.text_featurizer import TextFeaturizer +from deepspeech.io.collator import SpeechCollator +from deepspeech.io.dataset import ManifestDataset +from deepspeech.io.sampler import SortagradBatchSampler +from deepspeech.io.sampler import SortagradDistributedBatchSampler +from deepspeech.models.ds2_online import DeepSpeech2InferModelOnline +from deepspeech.models.ds2_online import DeepSpeech2ModelOnline +from deepspeech.training.gradclip import ClipGradByGlobalNormWithLog +from deepspeech.training.trainer import Trainer +from deepspeech.utils import error_rate +from deepspeech.utils import layer_tools +from deepspeech.utils import mp_tools +from deepspeech.utils.log import Log + +logger = Log(__name__).getlog() + + +class DeepSpeech2Trainer(Trainer): + @classmethod + def params(cls, config: Optional[CfgNode]=None) -> CfgNode: + # training config + default = CfgNode( + dict( + lr=5e-4, # learning rate + lr_decay=1.0, # learning rate decay + weight_decay=1e-6, # the coeff of weight decay + global_grad_clip=5.0, # the global norm clip + n_epoch=50, # train epochs + )) + + if config is not None: + config.merge_from_other_cfg(default) + return default + + def __init__(self, config, args): + super().__init__(config, args) + + def train_batch(self, batch_index, batch_data, msg): + train_conf = self.config.training + start = time.time() + + # forward + utt, audio, audio_len, text, text_len = batch_data + loss = self.model(audio, audio_len, text, text_len) + losses_np = { + 'train_loss': float(loss), + } + + # loss backward + if (batch_index + 1) % train_conf.accum_grad != 0: + # Disable gradient synchronizations across DDP processes. + # Within this context, gradients will be accumulated on module + # variables, which will later be synchronized. + context = self.model.no_sync + else: + # Used for single gpu training and DDP gradient synchronization + # processes. + context = nullcontext + + with context(): + loss.backward() + layer_tools.print_grads(self.model, print_func=None) + + # optimizer step + if (batch_index + 1) % train_conf.accum_grad == 0: + self.optimizer.step() + self.optimizer.clear_grad() + self.iteration += 1 + + iteration_time = time.time() - start + + msg += "train time: {:>.3f}s, ".format(iteration_time) + msg += "batch size: {}, ".format(self.config.collator.batch_size) + msg += "accum: {}, ".format(train_conf.accum_grad) + msg += ', '.join('{}: {:>.6f}'.format(k, v) + for k, v in losses_np.items()) + logger.info(msg) + + if dist.get_rank() == 0 and self.visualizer: + for k, v in losses_np.items(): + # `step -1` since we update `step` after optimizer.step(). + self.visualizer.add_scalar("train/{}".format(k), v, + self.iteration - 1) + + @paddle.no_grad() + def valid(self): + logger.info(f"Valid Total Examples: {len(self.valid_loader.dataset)}") + self.model.eval() + valid_losses = defaultdict(list) + num_seen_utts = 1 + total_loss = 0.0 + for i, batch in enumerate(self.valid_loader): + utt, audio, audio_len, text, text_len = batch + loss = self.model(audio, audio_len, text, text_len) + if paddle.isfinite(loss): + num_utts = batch[1].shape[0] + num_seen_utts += num_utts + total_loss += float(loss) * num_utts + valid_losses['val_loss'].append(float(loss)) + + if (i + 1) % self.config.training.log_interval == 0: + valid_dump = {k: np.mean(v) for k, v in valid_losses.items()} + valid_dump['val_history_loss'] = total_loss / num_seen_utts + + # logging + msg = f"Valid: Rank: {dist.get_rank()}, " + msg += "epoch: {}, ".format(self.epoch) + msg += "step: {}, ".format(self.iteration) + msg += "batch : {}/{}, ".format(i + 1, len(self.valid_loader)) + msg += ', '.join('{}: {:>.6f}'.format(k, v) + for k, v in valid_dump.items()) + logger.info(msg) + + logger.info('Rank {} Val info val_loss {}'.format( + dist.get_rank(), total_loss / num_seen_utts)) + return total_loss, num_seen_utts + + def setup_model(self): + config = self.config.clone() + config.defrost() + config.model.feat_size = self.train_loader.collate_fn.feature_size + #config.model.dict_size = self.train_loader.collate_fn.vocab_size + config.model.dict_size = len(self.train_loader.collate_fn.vocab_list) + config.freeze() + + if self.args.model_type == 'offline': + model = DeepSpeech2Model.from_config(config.model) + elif self.args.model_type == 'online': + model = DeepSpeech2ModelOnline.from_config(config.model) + else: + raise Exception("wrong model type") + if self.parallel: + model = paddle.DataParallel(model) + + logger.info(f"{model}") + layer_tools.print_params(model, logger.info) + + grad_clip = ClipGradByGlobalNormWithLog( + config.training.global_grad_clip) + lr_scheduler = paddle.optimizer.lr.ExponentialDecay( + learning_rate=config.training.lr, + gamma=config.training.lr_decay, + verbose=True) + optimizer = paddle.optimizer.Adam( + learning_rate=lr_scheduler, + parameters=model.parameters(), + weight_decay=paddle.regularizer.L2Decay( + config.training.weight_decay), + grad_clip=grad_clip) + + self.model = model + self.optimizer = optimizer + self.lr_scheduler = lr_scheduler + logger.info("Setup model/optimizer/lr_scheduler!") + + def setup_dataloader(self): + config = self.config.clone() + config.defrost() + config.collator.keep_transcription_text = False + + config.data.manifest = config.data.train_manifest + train_dataset = ManifestDataset.from_config(config) + + config.data.manifest = config.data.dev_manifest + dev_dataset = ManifestDataset.from_config(config) + + config.data.manifest = config.data.test_manifest + test_dataset = ManifestDataset.from_config(config) + + if self.parallel: + batch_sampler = SortagradDistributedBatchSampler( + train_dataset, + batch_size=config.collator.batch_size, + num_replicas=None, + rank=None, + shuffle=True, + drop_last=True, + sortagrad=config.collator.sortagrad, + shuffle_method=config.collator.shuffle_method) + else: + batch_sampler = SortagradBatchSampler( + train_dataset, + shuffle=True, + batch_size=config.collator.batch_size, + drop_last=True, + sortagrad=config.collator.sortagrad, + shuffle_method=config.collator.shuffle_method) + + collate_fn_train = SpeechCollator.from_config(config) + + config.collator.augmentation_config = "" + collate_fn_dev = SpeechCollator.from_config(config) + + config.collator.keep_transcription_text = True + config.collator.augmentation_config = "" + collate_fn_test = SpeechCollator.from_config(config) + + self.train_loader = DataLoader( + train_dataset, + batch_sampler=batch_sampler, + collate_fn=collate_fn_train, + num_workers=config.collator.num_workers) + self.valid_loader = DataLoader( + dev_dataset, + batch_size=config.collator.batch_size, + shuffle=False, + drop_last=False, + collate_fn=collate_fn_dev) + self.test_loader = DataLoader( + test_dataset, + batch_size=config.decoding.batch_size, + shuffle=False, + drop_last=False, + collate_fn=collate_fn_test) + if "" in self.test_loader.collate_fn.vocab_list: + self.test_loader.collate_fn.vocab_list.remove("") + if "" in self.valid_loader.collate_fn.vocab_list: + self.valid_loader.collate_fn.vocab_list.remove("") + if "" in self.train_loader.collate_fn.vocab_list: + self.train_loader.collate_fn.vocab_list.remove("") + logger.info("Setup train/valid/test Dataloader!") + + +class DeepSpeech2Tester(DeepSpeech2Trainer): + @classmethod + def params(cls, config: Optional[CfgNode]=None) -> CfgNode: + # testing config + default = CfgNode( + dict( + alpha=2.5, # Coef of LM for beam search. + beta=0.3, # Coef of WC for beam search. + cutoff_prob=1.0, # Cutoff probability for pruning. + cutoff_top_n=40, # Cutoff number for pruning. + lang_model_path='models/lm/common_crawl_00.prune01111.trie.klm', # Filepath for language model. + decoding_method='ctc_beam_search', # Decoding method. Options: ctc_beam_search, ctc_greedy + error_rate_type='wer', # Error rate type for evaluation. Options `wer`, 'cer' + num_proc_bsearch=8, # # of CPUs for beam search. + beam_size=500, # Beam search width. + batch_size=128, # decoding batch size + )) + + if config is not None: + config.merge_from_other_cfg(default) + return default + + def __init__(self, config, args): + + self._text_featurizer = TextFeaturizer( + unit_type=config.collator.unit_type, vocab_filepath=None) + super().__init__(config, args) + + def ordid2token(self, texts, texts_len): + """ ord() id to chr() chr """ + trans = [] + for text, n in zip(texts, texts_len): + n = n.numpy().item() + ids = text[:n] + trans.append(''.join([chr(i) for i in ids])) + return trans + + def compute_metrics(self, + utts, + audio, + audio_len, + texts, + texts_len, + fout=None): + cfg = self.config.decoding + errors_sum, len_refs, num_ins = 0.0, 0, 0 + errors_func = error_rate.char_errors if cfg.error_rate_type == 'cer' else error_rate.word_errors + error_rate_func = error_rate.cer if cfg.error_rate_type == 'cer' else error_rate.wer + + vocab_list = self.test_loader.collate_fn.vocab_list + + target_transcripts = self.ordid2token(texts, texts_len) + + result_transcripts = self.compute_result_transcripts(audio, audio_len, + vocab_list, cfg) + for utt, target, result in zip(utts, target_transcripts, + result_transcripts): + errors, len_ref = errors_func(target, result) + errors_sum += errors + len_refs += len_ref + num_ins += 1 + if fout: + fout.write(utt + " " + result + "\n") + logger.info("\nTarget Transcription: %s\nOutput Transcription: %s" % + (target, result)) + logger.info("Current error rate [%s] = %f" % + (cfg.error_rate_type, error_rate_func(target, result))) + + return dict( + errors_sum=errors_sum, + len_refs=len_refs, + num_ins=num_ins, + error_rate=errors_sum / len_refs, + error_rate_type=cfg.error_rate_type) + + def compute_result_transcripts(self, audio, audio_len, vocab_list, cfg): + result_transcripts = self.model.decode( + audio, + audio_len, + vocab_list, + decoding_method=cfg.decoding_method, + lang_model_path=cfg.lang_model_path, + beam_alpha=cfg.alpha, + beam_beta=cfg.beta, + beam_size=cfg.beam_size, + cutoff_prob=cfg.cutoff_prob, + cutoff_top_n=cfg.cutoff_top_n, + num_processes=cfg.num_proc_bsearch) + result_transcripts = [ + self._text_featurizer.detokenize(item) + for item in result_transcripts + ] + return result_transcripts + + @mp_tools.rank_zero_only + @paddle.no_grad() + def test(self): + logger.info(f"Test Total Examples: {len(self.test_loader.dataset)}") + self.model.eval() + cfg = self.config + error_rate_type = None + errors_sum, len_refs, num_ins = 0.0, 0, 0 + with open(self.args.result_file, 'w') as fout: + for i, batch in enumerate(self.test_loader): + utts, audio, audio_len, texts, texts_len = batch + metrics = self.compute_metrics(utts, audio, audio_len, texts, + texts_len, fout) + errors_sum += metrics['errors_sum'] + len_refs += metrics['len_refs'] + num_ins += metrics['num_ins'] + error_rate_type = metrics['error_rate_type'] + logger.info("Error rate [%s] (%d/?) = %f" % + (error_rate_type, num_ins, errors_sum / len_refs)) + + # logging + msg = "Test: " + msg += "epoch: {}, ".format(self.epoch) + msg += "step: {}, ".format(self.iteration) + msg += "Final error rate [%s] (%d/%d) = %f" % ( + error_rate_type, num_ins, num_ins, errors_sum / len_refs) + logger.info(msg) + + def run_test(self): + self.resume_or_scratch() + try: + self.test() + except KeyboardInterrupt: + exit(-1) + + def export(self): + if self.args.model_type == 'offline': + infer_model = DeepSpeech2InferModel.from_pretrained( + self.test_loader, self.config, self.args.checkpoint_path) + elif self.args.model_type == 'online': + infer_model = DeepSpeech2InferModelOnline.from_pretrained( + self.test_loader, self.config, self.args.checkpoint_path) + else: + raise Exception("wrong model type") + + infer_model.eval() + feat_dim = self.test_loader.collate_fn.feature_size + static_model = infer_model.export() + logger.info(f"Export code: {static_model.forward.code}") + paddle.jit.save(static_model, self.args.export_path) + + def run_export(self): + try: + self.export() + except KeyboardInterrupt: + exit(-1) + + def setup(self): + """Setup the experiment. + """ + paddle.set_device('gpu' if self.args.nprocs > 0 else 'cpu') + + self.setup_output_dir() + self.setup_checkpointer() + + self.setup_dataloader() + self.setup_model() + + self.iteration = 0 + self.epoch = 0 + + def setup_output_dir(self): + """Create a directory used for output. + """ + # output dir + if self.args.output: + output_dir = Path(self.args.output).expanduser() + output_dir.mkdir(parents=True, exist_ok=True) + else: + output_dir = Path( + self.args.checkpoint_path).expanduser().parent.parent + output_dir.mkdir(parents=True, exist_ok=True) + + self.output_dir = output_dir diff --git a/examples/aug_conf/augmentation.json b/examples/aug_conf/augmentation.json deleted file mode 100644 index a1a759e67..000000000 --- a/examples/aug_conf/augmentation.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "type": "shift", - "params": { - "min_shift_ms": -5, - "max_shift_ms": 5 - }, - "prob": 1.0 - } -] diff --git a/examples/aug_conf/augmentation.example.json b/examples/augmentation/augmentation.json similarity index 91% rename from examples/aug_conf/augmentation.example.json rename to examples/augmentation/augmentation.json index efae2e5e3..c99299d6c 100644 --- a/examples/aug_conf/augmentation.example.json +++ b/examples/augmentation/augmentation.json @@ -52,16 +52,18 @@ { "type": "specaug", "params": { + "W": 80, + "warp_mode": "PIL", "F": 10, - "T": 50, "n_freq_masks": 2, + "T": 50, "n_time_masks": 2, "p": 1.0, - "W": 80, "adaptive_number_ratio": 0, "adaptive_size_ratio": 0, - "max_n_time_masks": 20 + "max_n_time_masks": 20, + "replace_with_zero": false }, - "prob": 0.0 + "prob": 1.0 } ] diff --git a/examples/cc-cedict/README.md b/examples/cc-cedict/README.md index e69de29bb..513fca533 100644 --- a/examples/cc-cedict/README.md +++ b/examples/cc-cedict/README.md @@ -0,0 +1,58 @@ +# [CC-CEDICT](https://cc-cedict.org/wiki/) + +What is CC-CEDICT? +CC-CEDICT is a continuation of the CEDICT project. +The objective of the CEDICT project was to create an online, downloadable (as opposed to searchable-only) public-domain Chinese-English dictionary. +CEDICT was started by Paul Andrew Denisowski in October 1997. +For the most part, the project is modeled on Jim Breen's highly successful EDICT (Japanese-English dictionary) project and is intended to be a collaborative effort, +with users providing entries and corrections to the main file. + + +## Parse CC-CEDICT to Json format + +1. Parse to Json + +``` +run.sh +``` + +2. Result + +``` +exp/ +|-- cedict +`-- cedict.json + +0 directories, 2 files +``` + +``` +4c4bffc84e24467fe1b2ea9ba37ed6b6 exp/cedict +3adf504dacd13886f88cc9fe3b37c75d exp/cedict.json +``` + +``` +==> exp/cedict <== +# CC-CEDICT +# Community maintained free Chinese-English dictionary. +# +# Published by MDBG +# +# License: +# Creative Commons Attribution-ShareAlike 4.0 International License +# https://creativecommons.org/licenses/by-sa/4.0/ +# +# Referenced works: + +==> exp/cedict.json <== +{"traditional": "2019\u51a0\u72c0\u75c5\u6bd2\u75c5", "simplified": "2019\u51a0\u72b6\u75c5\u6bd2\u75c5", "pinyin": "er4 ling2 yi1 jiu3 guan1 zhuang4 bing4 du2 bing4", "english": "COVID-19, the coronavirus disease identified in 2019"} +{"traditional": "21\u4e09\u9ad4\u7d9c\u5408\u75c7", "simplified": "21\u4e09\u4f53\u7efc\u5408\u75c7", "pinyin": "er4 shi2 yi1 san1 ti3 zong1 he2 zheng4", "english": "trisomy"} +{"traditional": "3C", "simplified": "3C", "pinyin": "san1 C", "english": "abbr. for computers, communications, and consumer electronics"} +{"traditional": "3P", "simplified": "3P", "pinyin": "san1 P", "english": "(slang) threesome"} +{"traditional": "3Q", "simplified": "3Q", "pinyin": "san1 Q", "english": "(Internet slang) thank you (loanword)"} +{"traditional": "421", "simplified": "421", "pinyin": "si4 er4 yi1", "english": "four grandparents, two parents and an only child"} +{"traditional": "502\u81a0", "simplified": "502\u80f6", "pinyin": "wu3 ling2 er4 jiao1", "english": "cyanoacrylate glue"} +{"traditional": "88", "simplified": "88", "pinyin": "ba1 ba1", "english": "(Internet slang) bye-bye (alternative for \u62dc\u62dc[bai2 bai2])"} +{"traditional": "996", "simplified": "996", "pinyin": "jiu3 jiu3 liu4", "english": "9am-9pm, six days a week (work schedule)"} +{"traditional": "A", "simplified": "A", "pinyin": "A", "english": "(slang) (Tw) to steal"} +``` diff --git a/examples/cc-cedict/path.sh b/examples/cc-cedict/path.sh index 84e2de7d0..f8fdd82d7 100644 --- a/examples/cc-cedict/path.sh +++ b/examples/cc-cedict/path.sh @@ -1,4 +1,4 @@ -export MAIN_ROOT=${PWD}/../../ +export MAIN_ROOT=`realpath ${PWD}/../../` export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} export LC_ALL=C diff --git a/examples/chinese_g2p/README.md b/examples/chinese_g2p/README.md deleted file mode 100644 index e3fdfe684..000000000 --- a/examples/chinese_g2p/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Download Baker dataset - -Baker dataset has to be downloaded mannually and moved to 'data/', because you will have to pass the CATTCHA from a browswe to download the dataset. - -Download URL https://test.data-baker.com/#/data/index/source. diff --git a/examples/dataset/aidatatang_200zh/.gitignore b/examples/dataset/aidatatang_200zh/.gitignore new file mode 100644 index 000000000..fc56525e6 --- /dev/null +++ b/examples/dataset/aidatatang_200zh/.gitignore @@ -0,0 +1,4 @@ +*.tgz +manifest.* +*.meta +aidatatang_200zh/ \ No newline at end of file diff --git a/examples/dataset/aidatatang_200zh/README.md b/examples/dataset/aidatatang_200zh/README.md new file mode 100644 index 000000000..e6f1eefbd --- /dev/null +++ b/examples/dataset/aidatatang_200zh/README.md @@ -0,0 +1,14 @@ +# [Aidatatang_200zh](http://www.openslr.org/62/) + +Aidatatang_200zh is a free Chinese Mandarin speech corpus provided by Beijing DataTang Technology Co., Ltd under Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International Public License. +The contents and the corresponding descriptions of the corpus include: + +* The corpus contains 200 hours of acoustic data, which is mostly mobile recorded data. +* 600 speakers from different accent areas in China are invited to participate in the recording. +* The transcription accuracy for each sentence is larger than 98%. +* Recordings are conducted in a quiet indoor environment. +* The database is divided into training set, validation set, and testing set in a ratio of 7: 1: 2. +* Detail information such as speech data coding and speaker information is preserved in the metadata file. +* Segmented transcripts are also provided. + +The corpus aims to support researchers in speech recognition, machine translation, voiceprint recognition, and other speech-related fields. Therefore, the corpus is totally free for academic use. diff --git a/examples/dataset/aidatatang_200zh/aidatatang_200zh.py b/examples/dataset/aidatatang_200zh/aidatatang_200zh.py new file mode 100644 index 000000000..e32f619e9 --- /dev/null +++ b/examples/dataset/aidatatang_200zh/aidatatang_200zh.py @@ -0,0 +1,153 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Prepare aidatatang_200zh mandarin dataset + +Download, unpack and create manifest files. +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +of each audio file in the data set. +""" +import argparse +import codecs +import json +import os + +import soundfile + +from utils.utility import download +from utils.utility import unpack + +DATA_HOME = os.path.expanduser('~/.cache/paddle/dataset/speech') + +URL_ROOT = 'http://www.openslr.org/resources/62' +# URL_ROOT = 'https://openslr.magicdatatech.com/resources/62' +DATA_URL = URL_ROOT + '/aidatatang_200zh.tgz' +MD5_DATA = '6e0f4f39cd5f667a7ee53c397c8d0949' + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "--target_dir", + default=DATA_HOME + "/aidatatang_200zh", + type=str, + help="Directory to save the dataset. (default: %(default)s)") +parser.add_argument( + "--manifest_prefix", + default="manifest", + type=str, + help="Filepath prefix for output manifests. (default: %(default)s)") +args = parser.parse_args() + + +def create_manifest(data_dir, manifest_path_prefix): + print("Creating manifest %s ..." % manifest_path_prefix) + json_lines = [] + transcript_path = os.path.join(data_dir, 'transcript', + 'aidatatang_200_zh_transcript.txt') + transcript_dict = {} + for line in codecs.open(transcript_path, 'r', 'utf-8'): + line = line.strip() + if line == '': + continue + audio_id, text = line.split(' ', 1) + # remove withespace, charactor text + text = ''.join(text.split()) + transcript_dict[audio_id] = text + + data_types = ['train', 'dev', 'test'] + for dtype in data_types: + del json_lines[:] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + + audio_dir = os.path.join(data_dir, 'corpus/', dtype) + for subfolder, _, filelist in sorted(os.walk(audio_dir)): + for fname in filelist: + if not fname.endswith('.wav'): + continue + + audio_path = os.path.abspath(os.path.join(subfolder, fname)) + audio_id = os.path.basename(fname)[:-4] + + audio_data, samplerate = soundfile.read(audio_path) + duration = float(len(audio_data) / samplerate) + text = transcript_dict[audio_id] + json_lines.append( + json.dumps( + { + 'utt': audio_id, + 'feat': audio_path, + 'feat_shape': (duration, ), # second + 'text': text, + }, + ensure_ascii=False)) + + total_sec += duration + total_text += len(text) + total_num += 1 + + manifest_path = manifest_path_prefix + '.' + dtype + with codecs.open(manifest_path, 'w', 'utf-8') as fout: + for line in json_lines: + fout.write(line + '\n') + + manifest_dir = os.path.dirname(manifest_path_prefix) + meta_path = os.path.join(manifest_dir, dtype) + '.meta' + with open(meta_path, 'w') as f: + print(f"{dtype}:", file=f) + print(f"{total_num} utts", file=f) + print(f"{total_sec / (60*60)} h", file=f) + print(f"{total_text} text", file=f) + print(f"{total_text / total_sec} text/sec", file=f) + print(f"{total_sec / total_num} sec/utt", file=f) + + +def prepare_dataset(url, md5sum, target_dir, manifest_path, subset): + """Download, unpack and create manifest file.""" + data_dir = os.path.join(target_dir, subset) + if not os.path.exists(data_dir): + filepath = download(url, md5sum, target_dir) + unpack(filepath, target_dir) + # unpack all audio tar files + audio_dir = os.path.join(data_dir, 'corpus') + for subfolder, dirlist, filelist in sorted(os.walk(audio_dir)): + for sub in dirlist: + print(f"unpack dir {sub}...") + for folder, _, filelist in sorted( + os.walk(os.path.join(subfolder, sub))): + for ftar in filelist: + unpack(os.path.join(folder, ftar), folder, True) + else: + print("Skip downloading and unpacking. Data already exists in %s." % + target_dir) + + create_manifest(data_dir, manifest_path) + + +def main(): + if args.target_dir.startswith('~'): + args.target_dir = os.path.expanduser(args.target_dir) + + prepare_dataset( + url=DATA_URL, + md5sum=MD5_DATA, + target_dir=args.target_dir, + manifest_path=args.manifest_prefix, + subset='aidatatang_200zh') + + print("Data download and manifest prepare done!") + + +if __name__ == '__main__': + main() diff --git a/examples/dataset/aishell/.gitignore b/examples/dataset/aishell/.gitignore index 9c6e517e5..27194aab8 100644 --- a/examples/dataset/aishell/.gitignore +++ b/examples/dataset/aishell/.gitignore @@ -1 +1,5 @@ data_aishell* +*.meta +manifest.* +*.tgz +resource_aishell diff --git a/examples/dataset/aishell/README.md b/examples/dataset/aishell/README.md new file mode 100644 index 000000000..6770cd207 --- /dev/null +++ b/examples/dataset/aishell/README.md @@ -0,0 +1,3 @@ +# [Aishell1](http://www.openslr.org/33/) + +This Open Source Mandarin Speech Corpus, AISHELL-ASR0009-OS1, is 178 hours long. It is a part of AISHELL-ASR0009, of which utterance contains 11 domains, including smart home, autonomous driving, and industrial production. The whole recording was put in quiet indoor environment, using 3 different devices at the same time: high fidelity microphone (44.1kHz, 16-bit,); Android-system mobile phone (16kHz, 16-bit), iOS-system mobile phone (16kHz, 16-bit). Audios in high fidelity were re-sampled to 16kHz to build AISHELL- ASR0009-OS1. 400 speakers from different accent areas in China were invited to participate in the recording. The manual transcription accuracy rate is above 95%, through professional speech annotation and strict quality inspection. The corpus is divided into training, development and testing sets. ( This database is free for academic research, not in the commerce, if without permission. ) diff --git a/examples/dataset/aishell/aishell.py b/examples/dataset/aishell/aishell.py index a0cabe352..66e069013 100644 --- a/examples/dataset/aishell/aishell.py +++ b/examples/dataset/aishell/aishell.py @@ -31,9 +31,11 @@ from utils.utility import unpack DATA_HOME = os.path.expanduser('~/.cache/paddle/dataset/speech') URL_ROOT = 'http://www.openslr.org/resources/33' -URL_ROOT = 'https://openslr.magicdatatech.com/resources/33' +# URL_ROOT = 'https://openslr.magicdatatech.com/resources/33' DATA_URL = URL_ROOT + '/data_aishell.tgz' MD5_DATA = '2f494334227864a8a8fec932999db9d8' +RESOURCE_URL = URL_ROOT + '/resource_aishell.tgz' +MD5_RESOURCE = '957d480a0fcac85fc18e550756f624e5' parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( @@ -60,18 +62,22 @@ def create_manifest(data_dir, manifest_path_prefix): if line == '': continue audio_id, text = line.split(' ', 1) - # remove withespace + # remove withespace, charactor text text = ''.join(text.split()) transcript_dict[audio_id] = text data_types = ['train', 'dev', 'test'] for dtype in data_types: del json_lines[:] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + audio_dir = os.path.join(data_dir, 'wav', dtype) for subfolder, _, filelist in sorted(os.walk(audio_dir)): for fname in filelist: - audio_path = os.path.join(subfolder, fname) - audio_id = fname[:-4] + audio_path = os.path.abspath(os.path.join(subfolder, fname)) + audio_id = os.path.basename(fname)[:-4] # if no transcription for audio then skipped if audio_id not in transcript_dict: continue @@ -81,22 +87,34 @@ def create_manifest(data_dir, manifest_path_prefix): json_lines.append( json.dumps( { - 'utt': - os.path.splitext(os.path.basename(audio_path))[0], - 'feat': - audio_path, + 'utt': audio_id, + 'feat': audio_path, 'feat_shape': (duration, ), # second - 'text': - text + 'text': text }, ensure_ascii=False)) + + total_sec += duration + total_text += len(text) + total_num += 1 + manifest_path = manifest_path_prefix + '.' + dtype with codecs.open(manifest_path, 'w', 'utf-8') as fout: for line in json_lines: fout.write(line + '\n') + manifest_dir = os.path.dirname(manifest_path_prefix) + meta_path = os.path.join(manifest_dir, dtype) + '.meta' + with open(meta_path, 'w') as f: + print(f"{dtype}:", file=f) + print(f"{total_num} utts", file=f) + print(f"{total_sec / (60*60)} h", file=f) + print(f"{total_text} text", file=f) + print(f"{total_text / total_sec} text/sec", file=f) + print(f"{total_sec / total_num} sec/utt", file=f) -def prepare_dataset(url, md5sum, target_dir, manifest_path): + +def prepare_dataset(url, md5sum, target_dir, manifest_path=None): """Download, unpack and create manifest file.""" data_dir = os.path.join(target_dir, 'data_aishell') if not os.path.exists(data_dir): @@ -110,7 +128,9 @@ def prepare_dataset(url, md5sum, target_dir, manifest_path): else: print("Skip downloading and unpacking. Data already exists in %s." % target_dir) - create_manifest(data_dir, manifest_path) + + if manifest_path: + create_manifest(data_dir, manifest_path) def main(): @@ -123,6 +143,14 @@ def main(): target_dir=args.target_dir, manifest_path=args.manifest_prefix) + prepare_dataset( + url=RESOURCE_URL, + md5sum=MD5_RESOURCE, + target_dir=args.target_dir, + manifest_path=None) + + print("Data download and manifest prepare done!") + if __name__ == '__main__': main() diff --git a/examples/dataset/aishell3/README.md b/examples/dataset/aishell3/README.md new file mode 100644 index 000000000..8a29a6d0f --- /dev/null +++ b/examples/dataset/aishell3/README.md @@ -0,0 +1,3 @@ +# [Aishell3](http://www.openslr.org/93/) + +AISHELL-3 is a large-scale and high-fidelity multi-speaker Mandarin speech corpus which could be used to train multi-speaker Text-to-Speech (TTS) systems. The corpus contains roughly **85 hours** of emotion-neutral recordings spoken by 218 native Chinese mandarin speakers and total 88035 utterances. Their auxiliary attributes such as gender, age group and native accents are explicitly marked and provided in the corpus. Accordingly, transcripts in Chinese character-level and pinyin-level are provided along with the recordings. The word & tone transcription accuracy rate is above 98%, through professional speech annotation and strict quality inspection for tone and prosody. ( This database is free for academic research, not in the commerce, if without permission. ) diff --git a/examples/dataset/gigaspeech/.gitignore b/examples/dataset/gigaspeech/.gitignore new file mode 100644 index 000000000..7f78176b7 --- /dev/null +++ b/examples/dataset/gigaspeech/.gitignore @@ -0,0 +1 @@ +GigaSpeech/ diff --git a/examples/dataset/gigaspeech/README.md b/examples/dataset/gigaspeech/README.md new file mode 100644 index 000000000..4a1715cb8 --- /dev/null +++ b/examples/dataset/gigaspeech/README.md @@ -0,0 +1,10 @@ +# [GigaSpeech](https://github.com/SpeechColab/GigaSpeech) + +``` +git clone https://github.com/SpeechColab/GigaSpeech.git + +cd GigaSpeech +utils/gigaspeech_download.sh /disk1/audio_data/gigaspeech +toolkits/kaldi/gigaspeech_data_prep.sh --train-subset XL /disk1/audio_data/gigaspeech ../data +cd .. +``` diff --git a/examples/dataset/gigaspeech/gigaspeech.py b/examples/dataset/gigaspeech/gigaspeech.py new file mode 100644 index 000000000..185a92b8d --- /dev/null +++ b/examples/dataset/gigaspeech/gigaspeech.py @@ -0,0 +1,13 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/examples/dataset/gigaspeech/run.sh b/examples/dataset/gigaspeech/run.sh new file mode 100755 index 000000000..a1ad8610c --- /dev/null +++ b/examples/dataset/gigaspeech/run.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e + +curdir=$PWD + +test -d GigaSpeech || git clone https://github.com/SpeechColab/GigaSpeech.git + + +pushd GigaSpeech +source env_vars.sh +./utils/download_gigaspeech.sh ${curdir}/ +#toolkits/kaldi/gigaspeech_data_prep.sh --train-subset XL /disk1/audio_data/gigaspeech ../data +popd diff --git a/examples/dataset/librispeech/.gitignore b/examples/dataset/librispeech/.gitignore index a8d8eb76d..465806def 100644 --- a/examples/dataset/librispeech/.gitignore +++ b/examples/dataset/librispeech/.gitignore @@ -1,7 +1,9 @@ -dev-clean/ -dev-other/ -test-clean/ -test-other/ -train-clean-100/ -train-clean-360/ -train-other-500/ +dev-clean +dev-other +test-clean +test-other +train-clean-100 +train-clean-360 +train-other-500 +*.meta +manifest.* diff --git a/examples/dataset/librispeech/librispeech.py b/examples/dataset/librispeech/librispeech.py index 55012f73c..e85bbb3aa 100644 --- a/examples/dataset/librispeech/librispeech.py +++ b/examples/dataset/librispeech/librispeech.py @@ -77,6 +77,10 @@ def create_manifest(data_dir, manifest_path): """ print("Creating manifest %s ..." % manifest_path) json_lines = [] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + for subfolder, _, filelist in sorted(os.walk(data_dir)): text_filelist = [ filename for filename in filelist if filename.endswith('trans.txt') @@ -86,7 +90,9 @@ def create_manifest(data_dir, manifest_path): for line in io.open(text_filepath, encoding="utf8"): segments = line.strip().split() text = ' '.join(segments[1:]).lower() - audio_filepath = os.path.join(subfolder, segments[0] + '.flac') + + audio_filepath = os.path.abspath( + os.path.join(subfolder, segments[0] + '.flac')) audio_data, samplerate = soundfile.read(audio_filepath) duration = float(len(audio_data)) / samplerate json_lines.append( @@ -99,10 +105,27 @@ def create_manifest(data_dir, manifest_path): 'text': text })) + + total_sec += duration + total_text += len(text) + total_num += 1 + with codecs.open(manifest_path, 'w', 'utf-8') as out_file: for line in json_lines: out_file.write(line + '\n') + subset = os.path.splitext(manifest_path)[1][1:] + manifest_dir = os.path.dirname(manifest_path) + data_dir_name = os.path.split(data_dir)[-1] + meta_path = os.path.join(manifest_dir, data_dir_name) + '.meta' + with open(meta_path, 'w') as f: + print(f"{subset}:", file=f) + print(f"{total_num} utts", file=f) + print(f"{total_sec / (60*60)} h", file=f) + print(f"{total_text} text", file=f) + print(f"{total_text / total_sec} text/sec", file=f) + print(f"{total_sec / total_num} sec/utt", file=f) + def prepare_dataset(url, md5sum, target_dir, manifest_path): """Download, unpack and create summmary manifest file. diff --git a/examples/dataset/magicdata/README.md b/examples/dataset/magicdata/README.md new file mode 100644 index 000000000..083aee97b --- /dev/null +++ b/examples/dataset/magicdata/README.md @@ -0,0 +1,15 @@ +# [MagicData](http://www.openslr.org/68/) + +MAGICDATA Mandarin Chinese Read Speech Corpus was developed by MAGIC DATA Technology Co., Ltd. and freely published for non-commercial use. +The contents and the corresponding descriptions of the corpus include: + +* The corpus contains 755 hours of speech data, which is mostly mobile recorded data. +* 1080 speakers from different accent areas in China are invited to participate in the recording. +* The sentence transcription accuracy is higher than 98%. +* Recordings are conducted in a quiet indoor environment. +* The database is divided into training set, validation set, and testing set in a ratio of 51: 1: 2. +* Detail information such as speech data coding and speaker information is preserved in the metadata file. +* The domain of recording texts is diversified, including interactive Q&A, music search, SNS messages, home command and control, etc. +* Segmented transcripts are also provided. + +The corpus aims to support researchers in speech recognition, machine translation, speaker recognition, and other speech-related fields. Therefore, the corpus is totally free for academic use. diff --git a/examples/dataset/multi_cn/README.md b/examples/dataset/multi_cn/README.md new file mode 100644 index 000000000..d59b11b6d --- /dev/null +++ b/examples/dataset/multi_cn/README.md @@ -0,0 +1,11 @@ +# multi-cn + +This is a Chinese speech recognition recipe that trains on all Chinese corpora on OpenSLR, including: + +* Aidatatang (140 hours) +* Aishell (151 hours) +* MagicData (712 hours) +* Primewords (99 hours) +* ST-CMDS (110 hours) +* THCHS-30 (26 hours) +* optional AISHELL2 (~1000 hours) if available diff --git a/examples/dataset/primewords/README.md b/examples/dataset/primewords/README.md new file mode 100644 index 000000000..a4f1ed65d --- /dev/null +++ b/examples/dataset/primewords/README.md @@ -0,0 +1,6 @@ +# [Primewords](http://www.openslr.org/47/) + +This free Chinese Mandarin speech corpus set is released by Shanghai Primewords Information Technology Co., Ltd. +The corpus is recorded by smart mobile phones from 296 native Chinese speakers. The transcription accuracy is larger than 98%, at the confidence level of 95%. It is free for academic use. + +The mapping between the transcript and utterance is given in JSON format. diff --git a/examples/dataset/st-cmds/README.md b/examples/dataset/st-cmds/README.md new file mode 100644 index 000000000..c7ae50e59 --- /dev/null +++ b/examples/dataset/st-cmds/README.md @@ -0,0 +1 @@ +# [FreeST](http://www.openslr.org/38/) diff --git a/examples/dataset/ted_en_zh/.gitignore b/examples/dataset/ted_en_zh/.gitignore new file mode 100644 index 000000000..ad6ab64af --- /dev/null +++ b/examples/dataset/ted_en_zh/.gitignore @@ -0,0 +1,6 @@ +*.tar.gz.* +manifest.* +*.md +EN-ZH/ +train-split/ +test-segment/ \ No newline at end of file diff --git a/examples/dataset/ted_en_zh/ted_en_zh.py b/examples/dataset/ted_en_zh/ted_en_zh.py new file mode 100644 index 000000000..14bef01d2 --- /dev/null +++ b/examples/dataset/ted_en_zh/ted_en_zh.py @@ -0,0 +1,116 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Prepare Ted-En-Zh speech translation dataset + +Create manifest files from splited datased. +dev set: tst2010, test set: tst2015 +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +of each audio file in the data set. +""" +import argparse +import codecs +import json +import os + +import soundfile + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "--src_dir", + default="", + type=str, + help="Directory to kaldi splited data. (default: %(default)s)") +parser.add_argument( + "--manifest_prefix", + default="manifest", + type=str, + help="Filepath prefix for output manifests. (default: %(default)s)") +args = parser.parse_args() + + +def create_manifest(data_dir, manifest_path_prefix): + print("Creating manifest %s ..." % manifest_path_prefix) + json_lines = [] + + data_types_infos = [ + ('train', 'train-split/train-segment', 'En-Zh/train.en-zh'), + ('dev', 'test-segment/tst2010', 'En-Zh/tst2010.en-zh'), + ('test', 'test-segment/tst2015', 'En-Zh/tst2015.en-zh') + ] + for data_info in data_types_infos: + dtype, audio_relative_dir, text_relative_path = data_info + del json_lines[:] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + + text_path = os.path.join(data_dir, text_relative_path) + audio_dir = os.path.join(data_dir, audio_relative_dir) + + for line in codecs.open(text_path, 'r', 'utf-8', errors='ignore'): + line = line.strip() + if len(line) < 1: + continue + audio_id, trancription, translation = line.split('\t') + utt = audio_id.split('.')[0] + + audio_path = os.path.join(audio_dir, audio_id) + if os.path.exists(audio_path): + if os.path.getsize(audio_path) < 30000: + continue + audio_data, samplerate = soundfile.read(audio_path) + duration = float(len(audio_data) / samplerate) + json_lines.append( + json.dumps( + { + 'utt': utt, + 'feat': audio_path, + 'feat_shape': (duration, ), # second + 'text': " ".join(translation.split()), + 'text1': " ".join(trancription.split()) + }, + ensure_ascii=False)) + + total_sec += duration + total_text += len(translation.split()) + total_num += 1 + if not total_num % 1000: + print(dtype, 'Processed:', total_num) + + manifest_path = manifest_path_prefix + '.' + dtype + '.raw' + with codecs.open(manifest_path, 'w', 'utf-8') as fout: + for line in json_lines: + fout.write(line + '\n') + + +def prepare_dataset(src_dir, manifest_path=None): + """create manifest file.""" + if os.path.isdir(manifest_path): + manifest_path = os.path.join(manifest_path, 'manifest') + if manifest_path: + create_manifest(src_dir, manifest_path) + + +def main(): + if args.src_dir.startswith('~'): + args.src_dir = os.path.expanduser(args.src_dir) + + prepare_dataset(src_dir=args.src_dir, manifest_path=args.manifest_prefix) + + print("manifest prepare done!") + + +if __name__ == '__main__': + main() diff --git a/examples/dataset/thchs30/.gitignore b/examples/dataset/thchs30/.gitignore new file mode 100644 index 000000000..b94cd7e40 --- /dev/null +++ b/examples/dataset/thchs30/.gitignore @@ -0,0 +1,6 @@ +*.tgz +manifest.* +data_thchs30 +resource +test-noise +*.meta diff --git a/examples/dataset/thchs30/README.md b/examples/dataset/thchs30/README.md new file mode 100644 index 000000000..6b59d663a --- /dev/null +++ b/examples/dataset/thchs30/README.md @@ -0,0 +1,55 @@ +# [THCHS30](http://www.openslr.org/18/) + +This is the *data part* of the `THCHS30 2015` acoustic data +& scripts dataset. + +The dataset is described in more detail in the paper ``THCHS-30 : A Free +Chinese Speech Corpus`` by Dong Wang, Xuewei Zhang. + +A paper (if it can be called a paper) 13 years ago regarding the database: + +Dong Wang, Dalei Wu, Xiaoyan Zhu, ``TCMSD: A new Chinese Continuous Speech Database``, +International Conference on Chinese Computing (ICCC'01), 2001, Singapore. + +The layout of this data pack is the following: + + ``data`` + ``*.wav`` + audio data + + ``*.wav.trn`` + transcriptions + + ``{train,dev,test}`` + contain symlinks into the ``data`` directory for both audio and + transcription files. Contents of these directories define the + train/dev/test split of the data. + + ``{lm_word}`` + ``word.3gram.lm`` + trigram LM based on word + ``lexicon.txt`` + lexicon based on word + + ``{lm_phone}`` + ``phone.3gram.lm`` + trigram LM based on phone + ``lexicon.txt`` + lexicon based on phone + + ``README.TXT`` + this file + + +Data statistics +=============== + +Statistics for the data are as follows: + + =========== ========== ========== =========== + **dataset** **audio** **#sents** **#words** + =========== ========== ========== =========== + train 25 10,000 198,252 + dev 2:14 893 17,743 + test 6:15 2,495 49,085 + =========== ========== ========== =========== diff --git a/examples/dataset/thchs30/thchs30.py b/examples/dataset/thchs30/thchs30.py new file mode 100644 index 000000000..77a264cbb --- /dev/null +++ b/examples/dataset/thchs30/thchs30.py @@ -0,0 +1,186 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Prepare THCHS-30 mandarin dataset + +Download, unpack and create manifest files. +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +of each audio file in the data set. +""" +import argparse +import codecs +import json +import os +from multiprocessing.pool import Pool +from pathlib import Path + +import soundfile + +from utils.utility import download +from utils.utility import unpack + +DATA_HOME = os.path.expanduser('~/.cache/paddle/dataset/speech') + +URL_ROOT = 'http://www.openslr.org/resources/18' +# URL_ROOT = 'https://openslr.magicdatatech.com/resources/18' +DATA_URL = URL_ROOT + '/data_thchs30.tgz' +TEST_NOISE_URL = URL_ROOT + '/test-noise.tgz' +RESOURCE_URL = URL_ROOT + '/resource.tgz' +MD5_DATA = '2d2252bde5c8429929e1841d4cb95e90' +MD5_TEST_NOISE = '7e8a985fb965b84141b68c68556c2030' +MD5_RESOURCE = 'c0b2a565b4970a0c4fe89fefbf2d97e1' + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "--target_dir", + default=DATA_HOME + "/THCHS30", + type=str, + help="Directory to save the dataset. (default: %(default)s)") +parser.add_argument( + "--manifest_prefix", + default="manifest", + type=str, + help="Filepath prefix for output manifests. (default: %(default)s)") +args = parser.parse_args() + + +def read_trn(filepath): + """read trn file. + word text in first line. + syllable text in second line. + phoneme text in third line. + + Args: + filepath (str): trn path. + + Returns: + list(str): (word, syllable, phone) + """ + texts = [] + with open(filepath, 'r') as f: + lines = f.read().strip().split('\n') + assert len(lines) == 3, lines + # charactor text, remove withespace + texts.append(''.join(lines[0].split())) + texts.extend(lines[1:]) + return texts + + +def resolve_symlink(filepath): + """resolve symlink which content is norm file. + + Args: + filepath (str): norm file symlink. + """ + sym_path = Path(filepath) + relative_link = sym_path.read_text().strip() + relative = Path(relative_link) + relpath = sym_path.parent / relative + return relpath.resolve() + + +def create_manifest(data_dir, manifest_path_prefix): + print("Creating manifest %s ..." % manifest_path_prefix) + json_lines = [] + data_types = ['train', 'dev', 'test'] + for dtype in data_types: + del json_lines[:] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + + audio_dir = os.path.join(data_dir, dtype) + for subfolder, _, filelist in sorted(os.walk(audio_dir)): + for fname in filelist: + file_path = os.path.join(subfolder, fname) + if file_path.endswith('.wav'): + audio_path = os.path.abspath(file_path) + text_path = resolve_symlink(audio_path + '.trn') + else: + continue + + assert os.path.exists(audio_path) and os.path.exists(text_path) + + audio_id = os.path.basename(audio_path)[:-4] + word_text, syllable_text, phone_text = read_trn(text_path) + audio_data, samplerate = soundfile.read(audio_path) + duration = float(len(audio_data) / samplerate) + + # not dump alignment infos + json_lines.append( + json.dumps( + { + 'utt': audio_id, + 'feat': audio_path, + 'feat_shape': (duration, ), # second + 'text': word_text, # charactor + 'syllable': syllable_text, + 'phone': phone_text, + }, + ensure_ascii=False)) + + total_sec += duration + total_text += len(word_text) + total_num += 1 + + manifest_path = manifest_path_prefix + '.' + dtype + with codecs.open(manifest_path, 'w', 'utf-8') as fout: + for line in json_lines: + fout.write(line + '\n') + + manifest_dir = os.path.dirname(manifest_path_prefix) + meta_path = os.path.join(manifest_dir, dtype) + '.meta' + with open(meta_path, 'w') as f: + print(f"{dtype}:", file=f) + print(f"{total_num} utts", file=f) + print(f"{total_sec / (60*60)} h", file=f) + print(f"{total_text} text", file=f) + print(f"{total_text / total_sec} text/sec", file=f) + print(f"{total_sec / total_num} sec/utt", file=f) + + +def prepare_dataset(url, md5sum, target_dir, manifest_path, subset): + """Download, unpack and create manifest file.""" + datadir = os.path.join(target_dir, subset) + if not os.path.exists(datadir): + filepath = download(url, md5sum, target_dir) + unpack(filepath, target_dir) + else: + print("Skip downloading and unpacking. Data already exists in %s." % + target_dir) + + if subset == 'data_thchs30': + create_manifest(datadir, manifest_path) + + +def main(): + if args.target_dir.startswith('~'): + args.target_dir = os.path.expanduser(args.target_dir) + + tasks = [ + (DATA_URL, MD5_DATA, args.target_dir, args.manifest_prefix, + "data_thchs30"), + (TEST_NOISE_URL, MD5_TEST_NOISE, args.target_dir, args.manifest_prefix, + "test-noise"), + (RESOURCE_URL, MD5_RESOURCE, args.target_dir, args.manifest_prefix, + "resource"), + ] + with Pool(7) as pool: + pool.starmap(prepare_dataset, tasks) + + print("Data download and manifest prepare done!") + + +if __name__ == '__main__': + main() diff --git a/examples/dataset/timit/.gitignore b/examples/dataset/timit/.gitignore new file mode 100644 index 000000000..9a3f42281 --- /dev/null +++ b/examples/dataset/timit/.gitignore @@ -0,0 +1,4 @@ +TIMIT.* +TIMIT +manifest.* +*.meta diff --git a/examples/dataset/timit/timit.py b/examples/dataset/timit/timit.py new file mode 100644 index 000000000..311d445cb --- /dev/null +++ b/examples/dataset/timit/timit.py @@ -0,0 +1,241 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Prepare Librispeech ASR datasets. + +Download, unpack and create manifest files. +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +of each audio file in the data set. +""" +import argparse +import codecs +import json +import os +import re +import string +from pathlib import Path + +import soundfile + +from utils.utility import unzip + +URL_ROOT = "" +MD5_DATA = "45c68037c7fdfe063a43c851f181fb2d" + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "--target_dir", + default='~/.cache/paddle/dataset/speech/timit', + type=str, + help="Directory to save the dataset. (default: %(default)s)") +parser.add_argument( + "--manifest_prefix", + default="manifest", + type=str, + help="Filepath prefix for output manifests. (default: %(default)s)") +args = parser.parse_args() + +#: A string containing Chinese punctuation marks (non-stops). +non_stops = ( + # Fullwidth ASCII variants + '\uFF02\uFF03\uFF04\uFF05\uFF06\uFF07\uFF08\uFF09\uFF0A\uFF0B\uFF0C\uFF0D' + '\uFF0F\uFF1A\uFF1B\uFF1C\uFF1D\uFF1E\uFF20\uFF3B\uFF3C\uFF3D\uFF3E\uFF3F' + '\uFF40\uFF5B\uFF5C\uFF5D\uFF5E\uFF5F\uFF60' + + # Halfwidth CJK punctuation + '\uFF62\uFF63\uFF64' + + # CJK symbols and punctuation + '\u3000\u3001\u3003' + + # CJK angle and corner brackets + '\u3008\u3009\u300A\u300B\u300C\u300D\u300E\u300F\u3010\u3011' + + # CJK brackets and symbols/punctuation + '\u3014\u3015\u3016\u3017\u3018\u3019\u301A\u301B\u301C\u301D\u301E\u301F' + + # Other CJK symbols + '\u3030' + + # Special CJK indicators + '\u303E\u303F' + + # Dashes + '\u2013\u2014' + + # Quotation marks and apostrophe + '\u2018\u2019\u201B\u201C\u201D\u201E\u201F' + + # General punctuation + '\u2026\u2027' + + # Overscores and underscores + '\uFE4F' + + # Small form variants + '\uFE51\uFE54' + + # Latin punctuation + '\u00B7') + +#: A string of Chinese stops. +stops = ( + '\uFF01' # Fullwidth exclamation mark + '\uFF1F' # Fullwidth question mark + '\uFF61' # Halfwidth ideographic full stop + '\u3002' # Ideographic full stop +) + +#: A string containing all Chinese punctuation. +punctuation = non_stops + stops + + +def tn(text): + # lower text + text = text.lower() + # remove punc + text = re.sub(f'[{punctuation}{string.punctuation}]', "", text) + return text + + +def read_txt(filepath: str) -> str: + with open(filepath, 'r') as f: + line = f.read().strip().split(maxsplit=2)[2] + return tn(line) + + +def read_algin(filepath: str) -> str: + """read word or phone alignment file. + + + Args: + filepath (str): [description] + + Returns: + str: token sepearte by + """ + aligns = [] # (start, end, token) + with open(filepath, 'r') as f: + for line in f: + items = line.strip().split() + # for phone: (Note: beginning and ending silence regions are marked with h#) + if items[2].strip() == 'h#': + continue + aligns.append(items) + return ' '.join([item[2] for item in aligns]) + + +def create_manifest(data_dir, manifest_path_prefix): + """Create a manifest json file summarizing the data set, with each line + containing the meta data (i.e. audio filepath, transcription text, audio + duration) of each audio file within the data set. + """ + print("Creating manifest %s ..." % manifest_path_prefix) + json_lines = [] + utts = set() + + data_types = ['TRAIN', 'TEST'] + for dtype in data_types: + del json_lines[:] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + + audio_dir = Path(os.path.join(data_dir, dtype)) + for fname in sorted(audio_dir.rglob('*.WAV')): + audio_path = fname.resolve() # .WAV + audio_id = audio_path.stem + # if uttid exits, then skipped + if audio_id in utts: + continue + + utts.add(audio_id) + text_path = audio_path.with_suffix('.TXT') + phone_path = audio_path.with_suffix('.PHN') + word_path = audio_path.with_suffix('.WRD') + + audio_data, samplerate = soundfile.read( + str(audio_path), dtype='int16') + duration = float(len(audio_data) / samplerate) + word_text = read_txt(text_path) + phone_text = read_algin(phone_path) + + gender_spk = str(audio_path.parent.stem) + spk = gender_spk[1:] + gender = gender_spk[0] + utt_id = '_'.join([spk, gender, audio_id]) + # not dump alignment infos + json_lines.append( + json.dumps( + { + 'utt': utt_id, + 'feat': str(audio_path), + 'feat_shape': (duration, ), # second + 'text': word_text, # word + 'phone': phone_text, + 'spk': spk, + 'gender': gender, + }, + ensure_ascii=False)) + + total_sec += duration + total_text += len(word_text.split()) + total_num += 1 + + manifest_path = manifest_path_prefix + '.' + dtype.lower() + with codecs.open(manifest_path, 'w', 'utf-8') as fout: + for line in json_lines: + fout.write(line + '\n') + + manifest_dir = os.path.dirname(manifest_path_prefix) + meta_path = os.path.join(manifest_dir, dtype.lower()) + '.meta' + with open(meta_path, 'w') as f: + print(f"{dtype}:", file=f) + print(f"{total_num} utts", file=f) + print(f"{total_sec / (60*60)} h", file=f) + print(f"{total_text} text", file=f) + print(f"{total_text / total_sec} text/sec", file=f) + print(f"{total_sec / total_num} sec/utt", file=f) + + +def prepare_dataset(url, md5sum, target_dir, manifest_path): + """Download, unpack and create summmary manifest file. + """ + filepath = os.path.join(target_dir, "TIMIT.zip") + if not os.path.exists(filepath): + print(f"Please download TIMIT.zip into {target_dir}.") + raise FileNotFoundError + + if not os.path.exists(os.path.join(target_dir, "TIMIT")): + # check md5sum + assert check_md5sum(filepath, md5sum) + # unpack + unzip(filepath, target_dir) + else: + print("Skip downloading and unpacking. Data already exists in %s." % + target_dir) + # create manifest json file + create_manifest(os.path.join(target_dir, "TIMIT"), manifest_path) + + +def main(): + if args.target_dir.startswith('~'): + args.target_dir = os.path.expanduser(args.target_dir) + + prepare_dataset(URL_ROOT, MD5_DATA, args.target_dir, args.manifest_prefix) + print("Data download and manifest prepare done!") + + +if __name__ == '__main__': + main() diff --git a/examples/dataset/timit/timit_kaldi_standard_split.py b/examples/dataset/timit/timit_kaldi_standard_split.py new file mode 100644 index 000000000..2b494c06d --- /dev/null +++ b/examples/dataset/timit/timit_kaldi_standard_split.py @@ -0,0 +1,108 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Prepare TIMIT dataset (Standard split from Kaldi) + +Create manifest files from splited datased. +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +of each audio file in the data set. +""" +import argparse +import codecs +import json +import os + +import soundfile + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "--src_dir", + default="", + type=str, + help="Directory to kaldi splited data. (default: %(default)s)") +parser.add_argument( + "--manifest_prefix", + default="manifest", + type=str, + help="Filepath prefix for output manifests. (default: %(default)s)") +args = parser.parse_args() + + +def create_manifest(data_dir, manifest_path_prefix): + print("Creating manifest %s ..." % manifest_path_prefix) + json_lines = [] + + data_types = ['train', 'dev', 'test'] + for dtype in data_types: + del json_lines[:] + total_sec = 0.0 + total_text = 0.0 + total_num = 0 + + phn_path = os.path.join(data_dir, dtype + '.text') + phn_dict = {} + for line in codecs.open(phn_path, 'r', 'utf-8'): + line = line.strip() + if line == '': + continue + audio_id, text = line.split(' ', 1) + phn_dict[audio_id] = text + + audio_dir = os.path.join(data_dir, dtype + '_sph.scp') + for line in codecs.open(audio_dir, 'r', 'utf-8'): + audio_id, audio_path = line.strip().split() + # if no transcription for audio then raise error + assert audio_id in phn_dict + audio_data, samplerate = soundfile.read(audio_path) + duration = float(len(audio_data) / samplerate) + text = phn_dict[audio_id] + json_lines.append( + json.dumps( + { + 'utt': audio_id, + 'feat': audio_path, + 'feat_shape': (duration, ), # second + 'text': text + }, + ensure_ascii=False)) + + total_sec += duration + total_text += len(text) + total_num += 1 + + manifest_path = manifest_path_prefix + '.' + dtype + '.raw' + with codecs.open(manifest_path, 'w', 'utf-8') as fout: + for line in json_lines: + fout.write(line + '\n') + + +def prepare_dataset(src_dir, manifest_path=None): + """create manifest file.""" + if os.path.isdir(manifest_path): + manifest_path = os.path.join(manifest_path, 'manifest') + if manifest_path: + create_manifest(src_dir, manifest_path) + + +def main(): + if args.src_dir.startswith('~'): + args.src_dir = os.path.expanduser(args.src_dir) + + prepare_dataset(src_dir=args.src_dir, manifest_path=args.manifest_prefix) + + print("manifest prepare done!") + + +if __name__ == '__main__': + main() diff --git a/examples/chinese_g2p/.gitignore b/examples/g2p/.gitignore similarity index 100% rename from examples/chinese_g2p/.gitignore rename to examples/g2p/.gitignore diff --git a/examples/g2p/README.md b/examples/g2p/README.md new file mode 100644 index 000000000..4ec5922b3 --- /dev/null +++ b/examples/g2p/README.md @@ -0,0 +1,3 @@ +# G2P + +* zh - Chinese G2P diff --git a/examples/g2p/zh/README.md b/examples/g2p/zh/README.md new file mode 100644 index 000000000..de5573565 --- /dev/null +++ b/examples/g2p/zh/README.md @@ -0,0 +1,93 @@ +# G2P + +* WS +jieba +* G2P +pypinyin +* Tone sandhi +simple + +We recommend using [Paraket](https://github.com/PaddlePaddle/Parakeet] [TextFrontEnd](https://github.com/PaddlePaddle/Parakeet/blob/develop/parakeet/frontend/__init__.py) to do G2P. +The phoneme set should be changed, you can reference `examples/thchs30/a0/data/dict/syllable.lexicon`. + +## Download Baker dataset + +[Baker](https://test.data-baker.com/#/data/index/source) dataset has to be downloaded mannually and moved to './data', +because you will have to pass the `CATTCHA` from a browswe to download the dataset. + + +## RUN + +``` +. path.sh +./run.sh +``` + +## Result + +``` +exp/ +|-- 000001-010000.txt +|-- ref.pinyin +|-- trans.jieba.pinyin +`-- trans.pinyin + +0 directories, 4 files +``` + +``` +4f5a368441eb16aaf43dc1972f8b63dd exp/000001-010000.txt +01707896391c2de9b6fc4a39654be942 exp/ref.pinyin +43380ef160f65a23a3a0544700aa49b8 exp/trans.jieba.pinyin +8e6ff1fc22d8e8584082e804e8bcdeb7 exp/trans.pinyin +``` + +``` +==> exp/000001-010000.txt <== +000001 卡尔普#2陪外孙#1玩滑梯#4。 + ka2 er2 pu3 pei2 wai4 sun1 wan2 hua2 ti1 +000002 假语村言#2别再#1拥抱我#4。 + jia2 yu3 cun1 yan2 bie2 zai4 yong1 bao4 wo3 +000003 宝马#1配挂#1跛骡鞍#3,貂蝉#1怨枕#2董翁榻#4。 + bao2 ma3 pei4 gua4 bo3 luo2 an1 diao1 chan2 yuan4 zhen3 dong3 weng1 ta4 +000004 邓小平#2与#1撒切尔#2会晤#4。 + deng4 xiao3 ping2 yu3 sa4 qie4 er3 hui4 wu4 +000005 老虎#1幼崽#2与#1宠物犬#1玩耍#4。 + lao2 hu3 you4 zai3 yu2 chong3 wu4 quan3 wan2 shua3 + +==> exp/ref.pinyin <== +000001 ka2 er2 pu3 pei2 wai4 sun1 wan2 hua2 ti1 +000002 jia2 yu3 cun1 yan2 bie2 zai4 yong1 bao4 wo3 +000003 bao2 ma3 pei4 gua4 bo3 luo2 an1 diao1 chan2 yuan4 zhen3 dong3 weng1 ta4 +000004 deng4 xiao3 ping2 yu3 sa4 qie4 er3 hui4 wu4 +000005 lao2 hu3 you4 zai3 yu2 chong3 wu4 quan3 wan2 shua3 +000006 shen1 chang2 yue1 wu2 chi3 er4 cun4 wu3 fen1 huo4 yi3 shang4 +000007 zhao4 di2 yue1 cao2 yun2 teng2 qu4 gui3 wu1 +000008 zhan2 pin3 sui1 you3 zhan3 yuan2 que4 tui2 +000009 yi2 san3 ju1 er2 tong2 he2 you4 tuo1 er2 tong2 wei2 zhu3 +000010 ke1 te4 ni1 shen1 chuan1 bao4 wen2 da4 yi1 + +==> exp/trans.jieba.pinyin <== +000001 ka3 er3 pu3 pei2 wai4 sun1 wan2 hua2 ti1 +000002 jia3 yu3 cun1 yan2 bie2 zai4 yong1 bao4 wo3 +000003 bao3 ma3 pei4 gua4 bo3 luo2 an1 diao1 chan2 yuan4 zhen3 dong3 weng1 ta4 +000004 deng4 xiao3 ping2 yu3 sa1 qie4 er3 hui4 wu4 +000005 lao3 hu3 you4 zai3 yu3 chong3 wu4 quan3 wan2 shua3 +000006 shen1 chang2 yue1 wu3 chi3 er4 cun4 wu3 fen1 huo4 yi3 shang4 +000007 zhao4 di2 yue1 cao2 yun2 teng2 qu4 gui3 wu1 +000008 zhan3 pin3 sui1 you3 zhan3 yuan2 que4 tui2 +000009 yi3 san3 ju1 er2 tong2 he2 you4 tuo1 er2 tong2 wei2 zhu3 +000010 ke1 te4 ni1 shen1 chuan1 bao4 wen2 da4 yi1 + +==> exp/trans.pinyin <== +000001 ka3 er3 pu3 pei2 wai4 sun1 wan2 hua2 ti1 +000002 jia3 yu3 cun1 yan2 bie2 zai4 yong1 bao4 wo3 +000003 bao3 ma3 pei4 gua4 bo3 luo2 an1 diao1 chan2 yuan4 zhen3 dong3 weng1 ta4 +000004 deng4 xiao3 ping2 yu3 sa1 qie4 er3 hui4 wu4 +000005 lao3 hu3 you4 zai3 yu3 chong3 wu4 quan3 wan2 shua3 +000006 shen1 chang2 yue1 wu3 chi3 er4 cun4 wu3 fen1 huo4 yi3 shang4 +000007 zhao4 di2 yue1 cao2 yun2 teng2 qu4 gui3 wu1 +000008 zhan3 pin3 sui1 you3 zhan3 yuan2 que4 tui2 +000009 yi3 san3 ju1 er2 tong2 he2 you4 tuo1 er2 tong2 wei2 zhu3 +000010 ke1 te4 ni1 shen1 chuan1 bao4 wen2 da4 yi1 +``` diff --git a/examples/chinese_g2p/local/convert_transcription.py b/examples/g2p/zh/local/convert_transcription.py similarity index 100% rename from examples/chinese_g2p/local/convert_transcription.py rename to examples/g2p/zh/local/convert_transcription.py diff --git a/examples/chinese_g2p/local/extract_pinyin_label.py b/examples/g2p/zh/local/extract_pinyin_label.py similarity index 100% rename from examples/chinese_g2p/local/extract_pinyin_label.py rename to examples/g2p/zh/local/extract_pinyin_label.py diff --git a/examples/chinese_g2p/local/ignore_sandhi.py b/examples/g2p/zh/local/ignore_sandhi.py similarity index 100% rename from examples/chinese_g2p/local/ignore_sandhi.py rename to examples/g2p/zh/local/ignore_sandhi.py diff --git a/examples/chinese_g2p/local/prepare_dataset.sh b/examples/g2p/zh/local/prepare_dataset.sh similarity index 100% rename from examples/chinese_g2p/local/prepare_dataset.sh rename to examples/g2p/zh/local/prepare_dataset.sh diff --git a/examples/chinese_g2p/path.sh b/examples/g2p/zh/path.sh similarity index 82% rename from examples/chinese_g2p/path.sh rename to examples/g2p/zh/path.sh index b4c625f95..f475ed833 100644 --- a/examples/chinese_g2p/path.sh +++ b/examples/g2p/zh/path.sh @@ -1,4 +1,4 @@ -export MAIN_ROOT=${PWD}/../../ +export MAIN_ROOT=`realpath ${PWD}/../../../` export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} export LC_ALL=C diff --git a/examples/chinese_g2p/requirements.txt b/examples/g2p/zh/requirements.txt similarity index 100% rename from examples/chinese_g2p/requirements.txt rename to examples/g2p/zh/requirements.txt diff --git a/examples/chinese_g2p/run.sh b/examples/g2p/zh/run.sh similarity index 80% rename from examples/chinese_g2p/run.sh rename to examples/g2p/zh/run.sh index 8197dce4b..cbe140a0d 100755 --- a/examples/chinese_g2p/run.sh +++ b/examples/g2p/zh/run.sh @@ -6,16 +6,20 @@ stage=-1 stop_stage=100 exp_dir=exp -data_dir=data +data=data source ${MAIN_ROOT}/utils/parse_options.sh || exit -1 mkdir -p ${exp_dir} +if [ $stage -le -1 ] && [ $stop_stage -ge -1 ];then + mkdir -p ${data} + test -e ${data}/BZNSYP.rar || { echo "Please download BZNSYP.rar and put it in "${data}; exit -1; } +fi if [ $stage -le 0 ] && [ $stop_stage -ge 0 ];then echo "stage 0: Extracting Prosody Labeling" - bash local/prepare_dataset.sh --exp-dir ${exp_dir} --data-dir ${data_dir} + bash local/prepare_dataset.sh --exp-dir ${exp_dir} --data-dir ${data} fi # convert transcription in chinese into pinyin with pypinyin or jieba+pypinyin diff --git a/examples/ngram_lm/READEME.md b/examples/ngram_lm/READEME.md new file mode 100644 index 000000000..84e1380c3 --- /dev/null +++ b/examples/ngram_lm/READEME.md @@ -0,0 +1,3 @@ +# Ngram LM + +* s0 - kenlm ngram lm diff --git a/examples/ngram_lm/README.md b/examples/ngram_lm/README.md deleted file mode 100644 index 698d7c290..000000000 --- a/examples/ngram_lm/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Ngram LM - -Train chinese chararctor ngram lm by [kenlm](https://github.com/kpu/kenlm). - -``` -bash run.sh -``` diff --git a/examples/ngram_lm/s0/.gitignore b/examples/ngram_lm/s0/.gitignore new file mode 100644 index 000000000..b20d93aa5 --- /dev/null +++ b/examples/ngram_lm/s0/.gitignore @@ -0,0 +1 @@ +data/lm diff --git a/examples/ngram_lm/s0/README.md b/examples/ngram_lm/s0/README.md new file mode 100644 index 000000000..65916ec54 --- /dev/null +++ b/examples/ngram_lm/s0/README.md @@ -0,0 +1,96 @@ +# Ngram LM + +Train chinese chararctor ngram lm by [kenlm](https://github.com/kpu/kenlm). + +## Run +``` +. path.sh +bash run.sh +``` + +## Results + +``` +exp/ +|-- text +|-- text.char.tn +|-- text.word.tn +|-- text_zh_char_o5_p0_1_2_4_4_a22_q8_b8.arpa +|-- text_zh_char_o5_p0_1_2_4_4_a22_q8_b8.arpa.klm.bin +|-- text_zh_word_o3_p0_0_0_a22_q8_b8.arpa +`-- text_zh_word_o3_p0_0_0_a22_q8_b8.arpa.klm.bin + +0 directories, 7 files +``` + +``` +3ae083627b9b6cef1a82d574d8483f97 exp/text +d97da252d2a63a662af22f98af30cb8c exp/text.char.tn +c18b03005bd094dbfd9b46442be361fd exp/text.word.tn +73dbf50097896eda33985e11e1ba9a3a exp/text_zh_char_o5_p0_1_2_4_4_a22_q8_b8.arpa +01334e2044c474b99c4f2ffbed790626 exp/text_zh_char_o5_p0_1_2_4_4_a22_q8_b8.arpa.klm.bin +36a42de548045b54662411ae7982c77f exp/text_zh_word_o3_p0_0_0_a22_q8_b8.arpa +332422803ffd73dd7ffd16cd2b0abcd5 exp/text_zh_word_o3_p0_0_0_a22_q8_b8.arpa.klm.bin +``` + +``` +==> exp/text <== +少先队员因该为老人让坐 +祛痘印可以吗?有效果吗? +不知这款牛奶口感怎样? 小孩子喝行吗! +是转基因油? +我家宝宝13斤用多大码的 +会起坨吗? +请问给送上楼吗? +亲是送赁上门吗 +送货时候有外包装没有还是直接发货过来 +会不会有坏的? + +==> exp/text.char.tn <== +少 先 队 员 因 该 为 老 人 让 坐 +祛 痘 印 可 以 吗 有 效 果 吗 +不 知 这 款 牛 奶 口 感 怎 样 小 孩 子 喝 行 吗 +是 转 基 因 油 +我 家 宝 宝 十 三 斤 用 多 大 码 的 +会 起 坨 吗 +请 问 给 送 上 楼 吗 +亲 是 送 赁 上 门 吗 +送 货 时 候 有 外 包 装 没 有 还 是 直 接 发 货 过 来 +会 不 会 有 坏 的 + +==> exp/text.word.tn <== +少先队员 因该 为 老人 让 坐 +祛痘 印 可以 吗 有 效果 吗 +不知 这 款 牛奶 口感 怎样 小孩子 喝行 吗 +是 转基因 油 +我家 宝宝 十三斤 用多大码 的 +会起 坨 吗 +请问 给 送 上楼 吗 +亲是 送赁 上门 吗 +送货 时候 有 外包装 没有 还是 直接 发货 过来 +会 不会 有坏 的 + +==> exp/text_zh_char_o5_p0_1_2_4_4_a22_q8_b8.arpa <== +\data\ +ngram 1=587 +ngram 2=395 +ngram 3=100 +ngram 4=2 +ngram 5=0 + +\1-grams: +-3.272324 0 +0 -0.36706257 + +==> exp/text_zh_word_o3_p0_0_0_a22_q8_b8.arpa <== +\data\ +ngram 1=689 +ngram 2=1398 +ngram 3=1506 + +\1-grams: +-3.1755018 0 +0 -0.23069073 +-1.2318869 0 +-3.067262 少先队员 -0.051341705 +``` diff --git a/examples/ngram_lm/data/README.md b/examples/ngram_lm/s0/data/README.md similarity index 100% rename from examples/ngram_lm/data/README.md rename to examples/ngram_lm/s0/data/README.md diff --git a/examples/ngram_lm/data/custom_confusion.txt b/examples/ngram_lm/s0/data/custom_confusion.txt similarity index 100% rename from examples/ngram_lm/data/custom_confusion.txt rename to examples/ngram_lm/s0/data/custom_confusion.txt diff --git a/examples/ngram_lm/data/text_correct.txt b/examples/ngram_lm/s0/data/text_correct.txt similarity index 100% rename from examples/ngram_lm/data/text_correct.txt rename to examples/ngram_lm/s0/data/text_correct.txt diff --git a/examples/ngram_lm/local/build_zh_lm.sh b/examples/ngram_lm/s0/local/build_zh_lm.sh similarity index 100% rename from examples/ngram_lm/local/build_zh_lm.sh rename to examples/ngram_lm/s0/local/build_zh_lm.sh diff --git a/examples/ngram_lm/local/download_lm_zh.sh b/examples/ngram_lm/s0/local/download_lm_zh.sh similarity index 100% rename from examples/ngram_lm/local/download_lm_zh.sh rename to examples/ngram_lm/s0/local/download_lm_zh.sh diff --git a/examples/ngram_lm/local/kenlm_score_test.py b/examples/ngram_lm/s0/local/kenlm_score_test.py similarity index 100% rename from examples/ngram_lm/local/kenlm_score_test.py rename to examples/ngram_lm/s0/local/kenlm_score_test.py diff --git a/examples/ngram_lm/path.sh b/examples/ngram_lm/s0/path.sh similarity index 67% rename from examples/ngram_lm/path.sh rename to examples/ngram_lm/s0/path.sh index 84e2de7d0..cbd1d82c0 100644 --- a/examples/ngram_lm/path.sh +++ b/examples/ngram_lm/s0/path.sh @@ -1,4 +1,4 @@ -export MAIN_ROOT=${PWD}/../../ +export MAIN_ROOT=`realpath ${PWD}/../../../` export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} export LC_ALL=C @@ -7,4 +7,4 @@ export LC_ALL=C export PYTHONIOENCODING=UTF-8 export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} -export LD_LIBRARY_PATH=/usr/local/lib/:${LD_LIBRARY_PATH} \ No newline at end of file +export LD_LIBRARY_PATH=/usr/local/lib/:${LD_LIBRARY_PATH} diff --git a/examples/ngram_lm/requirements.txt b/examples/ngram_lm/s0/requirements.txt similarity index 100% rename from examples/ngram_lm/requirements.txt rename to examples/ngram_lm/s0/requirements.txt diff --git a/examples/ngram_lm/run.sh b/examples/ngram_lm/s0/run.sh similarity index 100% rename from examples/ngram_lm/run.sh rename to examples/ngram_lm/s0/run.sh diff --git a/examples/punctuation_restoration/README.md b/examples/punctuation_restoration/README.md new file mode 100644 index 000000000..42ae0db3a --- /dev/null +++ b/examples/punctuation_restoration/README.md @@ -0,0 +1,3 @@ +# Punctation Restoration + +Please using [PaddleSpeechTask](https://github.com/745165806/PaddleSpeechTask] to do this task. diff --git a/examples/spm/README.md b/examples/spm/README.md index 3109d3ffb..fc4478ebb 100644 --- a/examples/spm/README.md +++ b/examples/spm/README.md @@ -1,7 +1,96 @@ # [SentencePiece Model](https://github.com/google/sentencepiece) +## Run Train a `spm` model for English tokenizer. ``` +. path.sh bash run.sh ``` + +## Results + +``` +data/ +└── lang_char + ├── input.bpe + ├── input.decode + ├── input.txt + ├── train_unigram100.model + ├── train_unigram100_units.txt + └── train_unigram100.vocab + +1 directory, 6 files +``` + +``` +b5a230c26c61db5c36f34e503102f936 data/lang_char/input.bpe +ec5a9b24acc35469229e41256ceaf77d data/lang_char/input.decode +ec5a9b24acc35469229e41256ceaf77d data/lang_char/input.txt +124bf3fe7ce3b73b1994234c15268577 data/lang_char/train_unigram100.model +0df2488cc8eaace95eb12713facb5cf0 data/lang_char/train_unigram100_units.txt +46360cac35c751310e8e8ffd3a034cb5 data/lang_char/train_unigram100.vocab +``` + +``` +==> data/lang_char/input.bpe <== +▁mi ster ▁quilter ▁ is ▁the ▁a p ost le ▁o f ▁the ▁mi d d le ▁c las s es ▁ and ▁we ▁ar e ▁g l a d ▁ to ▁we l c om e ▁h is ▁g o s pe l +▁ n or ▁ is ▁mi ster ▁quilter ' s ▁ma nne r ▁ l ess ▁in ter es t ing ▁tha n ▁h is ▁ma t ter +▁h e ▁ t e ll s ▁us ▁tha t ▁ at ▁ t h is ▁f es t ive ▁ s e ason ▁o f ▁the ▁ y e ar ▁w ith ▁ ch r is t m a s ▁ and ▁ro a s t ▁be e f ▁ l o om ing ▁be fore ▁us ▁ s i mile s ▁d r a w n ▁f r om ▁ e at ing ▁ and ▁it s ▁re s u l t s ▁o c c ur ▁m ost ▁re a di l y ▁ to ▁the ▁ mind +▁h e ▁ ha s ▁g r a v e ▁d o u b t s ▁w h e t h er ▁ s i r ▁f r e d er ic k ▁ l eig h to n ' s ▁w or k ▁ is ▁re all y ▁gre e k ▁a f ter ▁ all ▁ and ▁c a n ▁di s c o v er ▁in ▁it ▁b u t ▁li t t le ▁o f ▁ro ck y ▁it ha c a +▁li nne ll ' s ▁ p ic tur es ▁ar e ▁a ▁ s or t ▁o f ▁ u p ▁g u ar d s ▁ and ▁ at ▁ em ▁painting s ▁ and ▁m ason ' s ▁ e x q u is i t e ▁ i d y ll s ▁ar e ▁a s ▁ n at ion a l ▁a s ▁a ▁ j ing o ▁ p o em ▁mi ster ▁b i r k e t ▁f o ster ' s ▁ l and s c a pe s ▁ s mile ▁ at ▁on e ▁m u ch ▁in ▁the ▁ s a m e ▁w a y ▁tha t ▁mi ster ▁c ar k er ▁us e d ▁ to ▁f las h ▁h is ▁ t e e t h ▁ and ▁mi ster ▁ j o h n ▁c o ll i er ▁g ive s ▁h is ▁ s i t ter ▁a ▁ ch e er f u l ▁ s l a p ▁on ▁the ▁b a ck ▁be fore ▁h +e ▁ s a y s ▁li k e ▁a ▁ s ha m p o o er ▁in ▁a ▁ tur k is h ▁b at h ▁ n e x t ▁ma n +▁it ▁ is ▁o b v i o u s l y ▁ u nne c ess ar y ▁for ▁us ▁ to ▁ p o i n t ▁o u t ▁h o w ▁ l u m i n o u s ▁the s e ▁c rit ic is m s ▁ar e ▁h o w ▁d e l ic at e ▁in ▁ e x p r ess ion +▁on ▁the ▁g e n er a l ▁ p r i n c i p l es ▁o f ▁ar t ▁mi ster ▁quilter ▁w rit es ▁w ith ▁ e qual ▁ l u c i di t y +▁painting ▁h e ▁ t e ll s ▁us ▁ is ▁o f ▁a ▁di f f er e n t ▁ qual i t y ▁ to ▁ma t h em at ic s ▁ and ▁f i nish ▁in ▁ar t ▁ is ▁a d d ing ▁m or e ▁f a c t +▁a s ▁for ▁ e t ch ing s ▁the y ▁ar e ▁o f ▁ t w o ▁ k i n d s ▁b rit is h ▁ and ▁for eig n +▁h e ▁ l a ment s ▁m ost ▁b i t ter l y ▁the ▁di v or c e ▁tha t ▁ ha s ▁be e n ▁ma d e ▁be t w e e n ▁d e c or at ive ▁ar t ▁ and ▁w ha t ▁we ▁us u all y ▁c all ▁ p ic tur es ▁ma k es ▁the ▁c u s t om ar y ▁a p pe a l ▁ to ▁the ▁ las t ▁ j u d g ment ▁ and ▁re mind s ▁us ▁tha t ▁in ▁the ▁gre at ▁d a y s ▁o f ▁ar t ▁mi c ha e l ▁a n g e l o ▁w a s ▁the ▁f ur nish ing ▁ u p h o l ster er + +==> data/lang_char/input.decode <== +mister quilter is the apostle of the middle classes and we are glad to welcome his gospel +nor is mister quilter's manner less interesting than his matter +he tells us that at this festive season of the year with christmas and roast beef looming before us similes drawn from eating and its results occur most readily to the mind +he has grave doubts whether sir frederick leighton's work is really greek after all and can discover in it but little of rocky ithaca +linnell's pictures are a sort of up guards and at em paintings and mason's exquisite idylls are as national as a jingo poem mister birket foster's landscapes smile at one much in the same way that mister carker used to flash his teeth and mister john collier gives his sitter a cheerful slap on the back before he says like a shampooer in a turkish bath next man +it is obviously unnecessary for us to point out how luminous these criticisms are how delicate in expression +on the general principles of art mister quilter writes with equal lucidity +painting he tells us is of a different quality to mathematics and finish in art is adding more fact +as for etchings they are of two kinds british and foreign +he laments most bitterly the divorce that has been made between decorative art and what we usually call pictures makes the customary appeal to the last judgment and reminds us that in the great days of art michael angelo was the furnishing upholsterer + +==> data/lang_char/input.txt <== +mister quilter is the apostle of the middle classes and we are glad to welcome his gospel +nor is mister quilter's manner less interesting than his matter +he tells us that at this festive season of the year with christmas and roast beef looming before us similes drawn from eating and its results occur most readily to the mind +he has grave doubts whether sir frederick leighton's work is really greek after all and can discover in it but little of rocky ithaca +linnell's pictures are a sort of up guards and at em paintings and mason's exquisite idylls are as national as a jingo poem mister birket foster's landscapes smile at one much in the same way that mister carker used to flash his teeth and mister john collier gives his sitter a cheerful slap on the back before he says like a shampooer in a turkish bath next man +it is obviously unnecessary for us to point out how luminous these criticisms are how delicate in expression +on the general principles of art mister quilter writes with equal lucidity +painting he tells us is of a different quality to mathematics and finish in art is adding more fact +as for etchings they are of two kinds british and foreign +he laments most bitterly the divorce that has been made between decorative art and what we usually call pictures makes the customary appeal to the last judgment and reminds us that in the great days of art michael angelo was the furnishing upholsterer + +==> data/lang_char/train_unigram100_units.txt <== + 0 + 1 +' 2 +a 3 +all 4 +and 5 +ar 6 +ason 7 +at 8 +b 9 + +==> data/lang_char/train_unigram100.vocab <== + 0 + 0 + 0 +▁ -2.01742 +e -2.7203 +s -2.82989 +t -2.99689 +l -3.53267 +n -3.84935 +o -3.88229 +``` diff --git a/examples/spm/path.sh b/examples/spm/path.sh index 9da641e19..202378894 100644 --- a/examples/spm/path.sh +++ b/examples/spm/path.sh @@ -1,4 +1,4 @@ -export MAIN_ROOT=${PWD}/../../ +export MAIN_ROOT=`realpath ${PWD}/../../` export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} export LC_ALL=C diff --git a/examples/thchs30/README.md b/examples/thchs30/README.md new file mode 100644 index 000000000..7b3cc3d95 --- /dev/null +++ b/examples/thchs30/README.md @@ -0,0 +1,3 @@ +# thchs30 + +* a0 for mfa alignment diff --git a/examples/thchs30/a0/README.md b/examples/thchs30/a0/README.md new file mode 100644 index 000000000..da56fffc8 --- /dev/null +++ b/examples/thchs30/a0/README.md @@ -0,0 +1,42 @@ +# THCHS-30 数据集强制对齐实验 +----- +本实验对 THCHS-30 中文数据集用 [Montreal-Forced-Aligner](https://montreal-forced-aligner.readthedocs.io/en/latest/index.html) 进行强制对齐。 +THCHS-30 的文本标注数据分为: + 1. 汉字级别(word),该数据集用空格对词进行了划分,我们在使用时按照将不同字之间按空格划分 + 2. 音节级别(syllable),即汉语中的一个拼音 + 3. 音素级别(phone),一个拼音有多个音素组成,汉语的声母韵母可以理解为音素,不同的数据集有各自的音素标准,THCHS-30 数据集与标贝 BZNSYP 数据集的音素标准略有不同 + + 数据 A11_0 文本示例如下: +``` +绿 是 阳春 烟 景 大块 文章 的 底色 四月 的 林 峦 更是 绿 得 鲜活 秀媚 诗意 盎然↩ +lv4 shi4 yang2 chun1 yan1 jing3 da4 kuai4 wen2 zhang1 de5 di3 se4 si4 yue4 de5 lin2 luan2 geng4 shi4 lv4 de5 xian1 huo2 xiu4 mei4 shi1 yi4 ang4 ran2↩ +l v4 sh ix4 ii iang2 ch un1 ii ian1 j ing3 d a4 k uai4 uu un2 zh ang1 d e5 d i3 s e4 s iy4 vv ve4 d e5 l in2 l uan2 g eng4 sh ix4 l v4 d e5 x ian1 h uo2 x iu4 m ei4 sh ix1 ii i4 aa ang4 r an2 +``` +## 开始实验 +--- +在本项目的 根目录/tools 执行 +``` +make +``` +下载 MFA 的可执行包(也会同时下载本项目所需的其他工具) +执行如下命令: +``` +cd a0 +./run.sh +``` +应用程序会自动下载 THCHS-30数据集,处理成 MFA 所需的文件格式并开始训练,您可以修改 `run.sh` 中的参数 `LEXICON_NAME` 来决定您需要强制对齐的级别(word、syllable 和 phone) +## MFA 所使用的字典 +--- +MFA 字典的格式请参考: [MFA 官方文档 Dictionary format ](https://montreal-forced-aligner.readthedocs.io/en/latest/dictionary.html) +phone.lexicon 直接使用的是 `THCHS-30/data_thchs30/lm_phone/lexicon.txt` +word.lexicon 考虑到了中文的多音字,使用**带概率的字典**, 生成规则请参考 `local/gen_word2phone.py` +`syllable.lexicon` 获取自 [DNSun/thchs30-pinyin2tone](https://github.com/DNSun/thchs30-pinyin2tone) +## 对齐结果 +--- +我们提供了三种级别 MFA 训练好的对齐结果、模型和字典(`syllable.lexicon` 在 `data/dict` 中,`phone.lexicon` 和` word.lexicon` 运行数据预处理代码后会自动从原始数据集复制或生成) + +**phone 级别:** [phone.lexicon](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/phone/phone.lexicon)、 [对齐结果](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/phone/thchs30_alignment.tar.gz)、[模型](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/phone/thchs30_model.zip) +**syllabel 级别:** [syllable.lexicon](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/syllable/syllable.lexicon)、[对齐结果](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/syllable/thchs30_alignment.tar.gz)、[模型](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/syllable/thchs30_model.zip) +**word 级别:** [word.lexicon](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/word/word.lexicon)、[对齐结果](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/word/thchs30_alignment.tar.gz)、[模型](https://paddlespeech.bj.bcebos.com/MFA/THCHS30/word/thchs30_model.zip) + +随后,您可以参考 [MFA 官方文档 Align using pretrained models](https://montreal-forced-aligner.readthedocs.io/en/stable/aligning.html#align-using-pretrained-models) 使用我们给您提供好的模型直接对自己的数据集进行强制对齐,注意,您需要使用和模型对应的 lexicon 文件,当文本是汉字时,您需要用空格把不同的**汉字**(而不是词语)分开 diff --git a/examples/thchs30/a0/data/dict/syllable.lexicon b/examples/thchs30/a0/data/dict/syllable.lexicon new file mode 100644 index 000000000..e1da4e04c --- /dev/null +++ b/examples/thchs30/a0/data/dict/syllable.lexicon @@ -0,0 +1,2490 @@ +A0 aa a0 +A1 aa a1 +A2 aa a2 +A3 aa a3 +A4 aa a4 +AI0 aa ai0 +AI1 aa ai1 +AI2 aa ai2 +AI3 aa ai3 +AI4 aa ai4 +AN0 aa an0 +AN1 aa an1 +AN2 aa an2 +AN3 aa an3 +AN4 aa an4 +ANG0 aa ang0 +ANG1 aa ang1 +ANG2 aa ang2 +ANG3 aa ang3 +ANG4 aa ang4 +AO0 aa ao0 +AO1 aa ao1 +AO2 aa ao2 +AO3 aa ao3 +AO4 aa ao4 +BA0 b a0 +BA1 b a1 +BA2 b a2 +BA3 b a3 +BA4 b a4 +BAI0 b ai0 +BAI1 b ai1 +BAI2 b ai2 +BAI3 b ai3 +BAI4 b ai4 +BAN0 b an0 +BAN1 b an1 +BAN2 b an2 +BAN3 b an3 +BAN4 b an4 +BANG0 b ang0 +BANG1 b ang1 +BANG2 b ang2 +BANG3 b ang3 +BANG4 b ang4 +BAO0 b ao0 +BAO1 b ao1 +BAO2 b ao2 +BAO3 b ao3 +BAO4 b ao4 +BEI0 b ei0 +BEI1 b ei1 +BEI2 b ei2 +BEI3 b ei3 +BEI4 b ei4 +BEN0 b en0 +BEN1 b en1 +BEN2 b en2 +BEN3 b en3 +BEN4 b en4 +BENG0 b eng0 +BENG1 b eng1 +BENG2 b eng2 +BENG3 b eng3 +BENG4 b eng4 +BI0 b i0 +BI1 b i1 +BI2 b i2 +BI3 b i3 +BI4 b i4 +BIAN0 b ian0 +BIAN1 b ian1 +BIAN2 b ian2 +BIAN3 b ian3 +BIAN4 b ian4 +BIAO0 b iao0 +BIAO1 b iao1 +BIAO2 b iao2 +BIAO3 b iao3 +BIAO4 b iao4 +BIE0 b ie0 +BIE1 b ie1 +BIE2 b ie2 +BIE3 b ie3 +BIE4 b ie4 +BIN0 b in0 +BIN1 b in1 +BIN2 b in2 +BIN3 b in3 +BIN4 b in4 +BING0 b ing0 +BING1 b ing1 +BING2 b ing2 +BING3 b ing3 +BING4 b ing4 +BO0 b o0 +BO1 b o1 +BO2 b o2 +BO3 b o3 +BO4 b o4 +BU0 b u0 +BU1 b u1 +BU2 b u2 +BU3 b u3 +BU4 b u4 +CA0 c a0 +CA1 c a1 +CA2 c a2 +CA3 c a3 +CA4 c a4 +CAI0 c ai0 +CAI1 c ai1 +CAI2 c ai2 +CAI3 c ai3 +CAI4 c ai4 +CAN0 c an0 +CAN1 c an1 +CAN2 c an2 +CAN3 c an3 +CAN4 c an4 +CANG0 c ang0 +CANG1 c ang1 +CANG2 c ang2 +CANG3 c ang3 +CANG4 c ang4 +CAO0 c ao0 +CAO1 c ao1 +CAO2 c ao2 +CAO3 c ao3 +CAO4 c ao4 +CE0 c e0 +CE1 c e1 +CE2 c e2 +CE3 c e3 +CE4 c e4 +CEN0 c en0 +CEN1 c en1 +CEN2 c en2 +CEN3 c en3 +CEN4 c en4 +CENG0 c eng0 +CENG1 c eng1 +CENG2 c eng2 +CENG3 c eng3 +CENG4 c eng4 +CHA0 ch a0 +CHA1 ch a1 +CHA2 ch a2 +CHA3 ch a3 +CHA4 ch a4 +CHAI0 ch ai0 +CHAI1 ch ai1 +CHAI2 ch ai2 +CHAI3 ch ai3 +CHAI4 ch ai4 +CHAN0 ch an0 +CHAN1 ch an1 +CHAN2 ch an2 +CHAN3 ch an3 +CHAN4 ch an4 +CHANG0 ch ang0 +CHANG1 ch ang1 +CHANG2 ch ang2 +CHANG3 ch ang3 +CHANG4 ch ang4 +CHAO0 ch ao0 +CHAO1 ch ao1 +CHAO2 ch ao2 +CHAO3 ch ao3 +CHAO4 ch ao4 +CHE0 ch e0 +CHE1 ch e1 +CHE2 ch e2 +CHE3 ch e3 +CHE4 ch e4 +CHEN0 ch en0 +CHEN1 ch en1 +CHEN2 ch en2 +CHEN3 ch en3 +CHEN4 ch en4 +CHENG0 ch eng0 +CHENG1 ch eng1 +CHENG2 ch eng2 +CHENG3 ch eng3 +CHENG4 ch eng4 +CHI0 ch ix0 +CHI1 ch ix1 +CHI2 ch ix2 +CHI3 ch ix3 +CHI4 ch ix4 +CHONG0 ch ong0 +CHONG1 ch ong1 +CHONG2 ch ong2 +CHONG3 ch ong3 +CHONG4 ch ong4 +CHOU0 ch ou0 +CHOU1 ch ou1 +CHOU2 ch ou2 +CHOU3 ch ou3 +CHOU4 ch ou4 +CHU0 ch u0 +CHU1 ch u1 +CHU2 ch u2 +CHU3 ch u3 +CHU4 ch u4 +CHUAI0 ch uai0 +CHUAI1 ch uai1 +CHUAI2 ch uai2 +CHUAI3 ch uai3 +CHUAI4 ch uai4 +CHUAN0 ch uan0 +CHUAN1 ch uan1 +CHUAN2 ch uan2 +CHUAN3 ch uan3 +CHUAN4 ch uan4 +CHUANG0 ch uang0 +CHUANG1 ch uang1 +CHUANG2 ch uang2 +CHUANG3 ch uang3 +CHUANG4 ch uang4 +CHUI0 ch ui0 +CHUI1 ch ui1 +CHUI2 ch ui2 +CHUI3 ch ui3 +CHUI4 ch ui4 +CHUN0 ch un0 +CHUN1 ch un1 +CHUN2 ch un2 +CHUN3 ch un3 +CHUN4 ch un4 +CHUO0 ch uo0 +CHUO1 ch uo1 +CHUO2 ch uo2 +CHUO3 ch uo3 +CHUO4 ch uo4 +CI0 c iy0 +CI1 c iy1 +CI2 c iy2 +CI3 c iy3 +CI4 c iy4 +CONG0 c ong0 +CONG1 c ong1 +CONG2 c ong2 +CONG3 c ong3 +CONG4 c ong4 +COU0 c ou0 +COU1 c ou1 +COU2 c ou2 +COU3 c ou3 +COU4 c ou4 +CU0 c u0 +CU1 c u1 +CU2 c u2 +CU3 c u3 +CU4 c u4 +CUAN0 c uan0 +CUAN1 c uan1 +CUAN2 c uan2 +CUAN3 c uan3 +CUAN4 c uan4 +CUI0 c ui0 +CUI1 c ui1 +CUI2 c ui2 +CUI3 c ui3 +CUI4 c ui4 +CUN0 c un0 +CUN1 c un1 +CUN2 c un2 +CUN3 c un3 +CUN4 c un4 +CUO0 c uo0 +CUO1 c uo1 +CUO2 c uo2 +CUO3 c uo3 +CUO4 c uo4 +DA0 d a0 +DA1 d a1 +DA2 d a2 +DA3 d a3 +DA4 d a4 +DAI0 d ai0 +DAI1 d ai1 +DAI2 d ai2 +DAI3 d ai3 +DAI4 d ai4 +DAN0 d an0 +DAN1 d an1 +DAN2 d an2 +DAN3 d an3 +DAN4 d an4 +DANG0 d ang0 +DANG1 d ang1 +DANG2 d ang2 +DANG3 d ang3 +DANG4 d ang4 +DAO0 d ao0 +DAO1 d ao1 +DAO2 d ao2 +DAO3 d ao3 +DAO4 d ao4 +DE0 d e0 +DE1 d e1 +DE2 d e2 +DE3 d e3 +DE4 d e4 +DEI0 d ei0 +DEI1 d ei1 +DEI2 d ei2 +DEI3 d ei3 +DEI4 d ei4 +DEN0 d en0 +DEN1 d en1 +DEN2 d en2 +DEN3 d en3 +DEN4 d en4 +DENG0 d eng0 +DENG1 d eng1 +DENG2 d eng2 +DENG3 d eng3 +DENG4 d eng4 +DI0 d i0 +DI1 d i1 +DI2 d i2 +DI3 d i3 +DI4 d i4 +DIA0 d ia0 +DIA1 d ia1 +DIA2 d ia2 +DIA3 d ia3 +DIA4 d ia4 +DIAN0 d ian0 +DIAN1 d ian1 +DIAN2 d ian2 +DIAN3 d ian3 +DIAN4 d ian4 +DIAO0 d iao0 +DIAO1 d iao1 +DIAO2 d iao2 +DIAO3 d iao3 +DIAO4 d iao4 +DIE0 d ie0 +DIE1 d ie1 +DIE2 d ie2 +DIE3 d ie3 +DIE4 d ie4 +DING0 d ing0 +DING1 d ing1 +DING2 d ing2 +DING3 d ing3 +DING4 d ing4 +DIU0 d iu0 +DIU1 d iu1 +DIU2 d iu2 +DIU3 d iu3 +DIU4 d iu4 +DONG0 d ong0 +DONG1 d ong1 +DONG2 d ong2 +DONG3 d ong3 +DONG4 d ong4 +DOU0 d ou0 +DOU1 d ou1 +DOU2 d ou2 +DOU3 d ou3 +DOU4 d ou4 +DU0 d u0 +DU1 d u1 +DU2 d u2 +DU3 d u3 +DU4 d u4 +DUAN0 d uan0 +DUAN1 d uan1 +DUAN2 d uan2 +DUAN3 d uan3 +DUAN4 d uan4 +DUI0 d ui0 +DUI1 d ui1 +DUI2 d ui2 +DUI3 d ui3 +DUI4 d ui4 +DUN0 d un0 +DUN1 d un1 +DUN2 d un2 +DUN3 d un3 +DUN4 d un4 +DUO0 d uo0 +DUO1 d uo1 +DUO2 d uo2 +DUO3 d uo3 +DUO4 d uo4 +E0 ee e0 +E1 ee e1 +E2 ee e2 +E3 ee e3 +E4 ee e4 +EN0 ee en0 +EN1 ee en1 +EN2 ee en2 +EN3 ee en3 +EN4 ee en4 +ER0 ee er0 +ER1 ee er1 +ER2 ee er2 +ER3 ee er3 +ER4 ee er4 +FA0 f a0 +FA1 f a1 +FA2 f a2 +FA3 f a3 +FA4 f a4 +FAN0 f an0 +FAN1 f an1 +FAN2 f an2 +FAN3 f an3 +FAN4 f an4 +FANG0 f ang0 +FANG1 f ang1 +FANG2 f ang2 +FANG3 f ang3 +FANG4 f ang4 +FEI0 f ei0 +FEI1 f ei1 +FEI2 f ei2 +FEI3 f ei3 +FEI4 f ei4 +FEN0 f en0 +FEN1 f en1 +FEN2 f en2 +FEN3 f en3 +FEN4 f en4 +FENG0 f eng0 +FENG1 f eng1 +FENG2 f eng2 +FENG3 f eng3 +FENG4 f eng4 +FO0 f o0 +FO1 f o1 +FO2 f o2 +FO3 f o3 +FO4 f o4 +FOU0 f ou0 +FOU1 f ou1 +FOU2 f ou2 +FOU3 f ou3 +FOU4 f ou4 +FU0 f u0 +FU1 f u1 +FU2 f u2 +FU3 f u3 +FU4 f u4 +GA0 g a0 +GA1 g a1 +GA2 g a2 +GA3 g a3 +GA4 g a4 +GAI0 g ai0 +GAI1 g ai1 +GAI2 g ai2 +GAI3 g ai3 +GAI4 g ai4 +GAN0 g an0 +GAN1 g an1 +GAN2 g an2 +GAN3 g an3 +GAN4 g an4 +GANG0 g ang0 +GANG1 g ang1 +GANG2 g ang2 +GANG3 g ang3 +GANG4 g ang4 +GAO0 g ao0 +GAO1 g ao1 +GAO2 g ao2 +GAO3 g ao3 +GAO4 g ao4 +GE0 g e0 +GE1 g e1 +GE2 g e2 +GE3 g e3 +GE4 g e4 +GEI0 g ei0 +GEI1 g ei1 +GEI2 g ei2 +GEI3 g ei3 +GEI4 g ei4 +GEN0 g en0 +GEN1 g en1 +GEN2 g en2 +GEN3 g en3 +GEN4 g en4 +GENG0 g eng0 +GENG1 g eng1 +GENG2 g eng2 +GENG3 g eng3 +GENG4 g eng4 +GONG0 g ong0 +GONG1 g ong1 +GONG2 g ong2 +GONG3 g ong3 +GONG4 g ong4 +GOU0 g ou0 +GOU1 g ou1 +GOU2 g ou2 +GOU3 g ou3 +GOU4 g ou4 +GU0 g u0 +GU1 g u1 +GU2 g u2 +GU3 g u3 +GU4 g u4 +GUA0 g ua0 +GUA1 g ua1 +GUA2 g ua2 +GUA3 g ua3 +GUA4 g ua4 +GUAI0 g uai0 +GUAI1 g uai1 +GUAI2 g uai2 +GUAI3 g uai3 +GUAI4 g uai4 +GUAN0 g uan0 +GUAN1 g uan1 +GUAN2 g uan2 +GUAN3 g uan3 +GUAN4 g uan4 +GUANG0 g uang0 +GUANG1 g uang1 +GUANG2 g uang2 +GUANG3 g uang3 +GUANG4 g uang4 +GUI0 g ui0 +GUI1 g ui1 +GUI2 g ui2 +GUI3 g ui3 +GUI4 g ui4 +GUN0 g un0 +GUN1 g un1 +GUN2 g un2 +GUN3 g un3 +GUN4 g un4 +GUO0 g uo0 +GUO1 g uo1 +GUO2 g uo2 +GUO3 g uo3 +GUO4 g uo4 +HA0 h a0 +HA1 h a1 +HA2 h a2 +HA3 h a3 +HA4 h a4 +HAI0 h ai0 +HAI1 h ai1 +HAI2 h ai2 +HAI3 h ai3 +HAI4 h ai4 +HAN0 h an0 +HAN1 h an1 +HAN2 h an2 +HAN3 h an3 +HAN4 h an4 +HANG0 h ang0 +HANG1 h ang1 +HANG2 h ang2 +HANG3 h ang3 +HANG4 h ang4 +HAO0 h ao0 +HAO1 h ao1 +HAO2 h ao2 +HAO3 h ao3 +HAO4 h ao4 +HE0 h e0 +HE1 h e1 +HE2 h e2 +HE3 h e3 +HE4 h e4 +HEI0 h ei0 +HEI1 h ei1 +HEI2 h ei2 +HEI3 h ei3 +HEI4 h ei4 +HEN0 h en0 +HEN1 h en1 +HEN2 h en2 +HEN3 h en3 +HEN4 h en4 +HENG0 h eng0 +HENG1 h eng1 +HENG2 h eng2 +HENG3 h eng3 +HENG4 h eng4 +HONG0 h ong0 +HONG1 h ong1 +HONG2 h ong2 +HONG3 h ong3 +HONG4 h ong4 +HOU0 h ou0 +HOU1 h ou1 +HOU2 h ou2 +HOU3 h ou3 +HOU4 h ou4 +HU0 h u0 +HU1 h u1 +HU2 h u2 +HU3 h u3 +HU4 h u4 +HUA0 h ua0 +HUA1 h ua1 +HUA2 h ua2 +HUA3 h ua3 +HUA4 h ua4 +HUAI0 h uai0 +HUAI1 h uai1 +HUAI2 h uai2 +HUAI3 h uai3 +HUAI4 h uai4 +HUAN0 h uan0 +HUAN1 h uan1 +HUAN2 h uan2 +HUAN3 h uan3 +HUAN4 h uan4 +HUANG0 h uang0 +HUANG1 h uang1 +HUANG2 h uang2 +HUANG3 h uang3 +HUANG4 h uang4 +HUI0 h ui0 +HUI1 h ui1 +HUI2 h ui2 +HUI3 h ui3 +HUI4 h ui4 +HUN0 h un0 +HUN1 h un1 +HUN2 h un2 +HUN3 h un3 +HUN4 h un4 +HUO0 h uo0 +HUO1 h uo1 +HUO2 h uo2 +HUO3 h uo3 +HUO4 h uo4 +JI0 j i0 +JI1 j i1 +JI2 j i2 +JI3 j i3 +JI4 j i4 +JIA0 j ia0 +JIA1 j ia1 +JIA2 j ia2 +JIA3 j ia3 +JIA4 j ia4 +JIAN0 j ian0 +JIAN1 j ian1 +JIAN2 j ian2 +JIAN3 j ian3 +JIAN4 j ian4 +JIANG0 j iang0 +JIANG1 j iang1 +JIANG2 j iang2 +JIANG3 j iang3 +JIANG4 j iang4 +JIAO0 j iao0 +JIAO1 j iao1 +JIAO2 j iao2 +JIAO3 j iao3 +JIAO4 j iao4 +JIE0 j ie0 +JIE1 j ie1 +JIE2 j ie2 +JIE3 j ie3 +JIE4 j ie4 +JIN0 j in0 +JIN1 j in1 +JIN2 j in2 +JIN3 j in3 +JIN4 j in4 +JING0 j ing0 +JING1 j ing1 +JING2 j ing2 +JING3 j ing3 +JING4 j ing4 +JIONG0 j iong0 +JIONG1 j iong1 +JIONG2 j iong2 +JIONG3 j iong3 +JIONG4 j iong4 +JIU0 j iu0 +JIU1 j iu1 +JIU2 j iu2 +JIU3 j iu3 +JIU4 j iu4 +JU0 j v0 +JU1 j v1 +JU2 j v2 +JU3 j v3 +JU4 j v4 +JUAN0 j van0 +JUAN1 j van1 +JUAN2 j van2 +JUAN3 j van3 +JUAN4 j van4 +JUE0 j ve0 +JUE1 j ve1 +JUE2 j ve2 +JUE3 j ve3 +JUE4 j ve4 +JUN0 j vn0 +JUN1 j vn1 +JUN2 j vn2 +JUN3 j vn3 +JUN4 j vn4 +KA0 k a0 +KA1 k a1 +KA2 k a2 +KA3 k a3 +KA4 k a4 +KAI0 k ai0 +KAI1 k ai1 +KAI2 k ai2 +KAI3 k ai3 +KAI4 k ai4 +KAN0 k an0 +KAN1 k an1 +KAN2 k an2 +KAN3 k an3 +KAN4 k an4 +KANG0 k ang0 +KANG1 k ang1 +KANG2 k ang2 +KANG3 k ang3 +KANG4 k ang4 +KAO0 k ao0 +KAO1 k ao1 +KAO2 k ao2 +KAO3 k ao3 +KAO4 k ao4 +KE0 k e0 +KE1 k e1 +KE2 k e2 +KE3 k e3 +KE4 k e4 +KEI0 k ei0 +KEI1 k ei1 +KEI2 k ei2 +KEI3 k ei3 +KEI4 k ei4 +KEN0 k en0 +KEN1 k en1 +KEN2 k en2 +KEN3 k en3 +KEN4 k en4 +KENG0 k eng0 +KENG1 k eng1 +KENG2 k eng2 +KENG3 k eng3 +KENG4 k eng4 +KONG0 k ong0 +KONG1 k ong1 +KONG2 k ong2 +KONG3 k ong3 +KONG4 k ong4 +KOU0 k ou0 +KOU1 k ou1 +KOU2 k ou2 +KOU3 k ou3 +KOU4 k ou4 +KU0 k u0 +KU1 k u1 +KU2 k u2 +KU3 k u3 +KU4 k u4 +KUA0 k ua0 +KUA1 k ua1 +KUA2 k ua2 +KUA3 k ua3 +KUA4 k ua4 +KUAI0 k uai0 +KUAI1 k uai1 +KUAI2 k uai2 +KUAI3 k uai3 +KUAI4 k uai4 +KUAN0 k uan0 +KUAN1 k uan1 +KUAN2 k uan2 +KUAN3 k uan3 +KUAN4 k uan4 +KUANG0 k uang0 +KUANG1 k uang1 +KUANG2 k uang2 +KUANG3 k uang3 +KUANG4 k uang4 +KUI0 k ui0 +KUI1 k ui1 +KUI2 k ui2 +KUI3 k ui3 +KUI4 k ui4 +KUN0 k un0 +KUN1 k un1 +KUN2 k un2 +KUN3 k un3 +KUN4 k un4 +KUO0 k uo0 +KUO1 k uo1 +KUO2 k uo2 +KUO3 k uo3 +KUO4 k uo4 +LA0 l a0 +LA1 l a1 +LA2 l a2 +LA3 l a3 +LA4 l a4 +LAI0 l ai0 +LAI1 l ai1 +LAI2 l ai2 +LAI3 l ai3 +LAI4 l ai4 +LAN0 l an0 +LAN1 l an1 +LAN2 l an2 +LAN3 l an3 +LAN4 l an4 +LANG0 l ang0 +LANG1 l ang1 +LANG2 l ang2 +LANG3 l ang3 +LANG4 l ang4 +LAO0 l ao0 +LAO1 l ao1 +LAO2 l ao2 +LAO3 l ao3 +LAO4 l ao4 +LE0 l e0 +LE1 l e1 +LE2 l e2 +LE3 l e3 +LE4 l e4 +LEI0 l ei0 +LEI1 l ei1 +LEI2 l ei2 +LEI3 l ei3 +LEI4 l ei4 +LENG0 l eng0 +LENG1 l eng1 +LENG2 l eng2 +LENG3 l eng3 +LENG4 l eng4 +LI0 l i0 +LI1 l i1 +LI2 l i2 +LI3 l i3 +LI4 l i4 +LIA0 l ia0 +LIA1 l ia1 +LIA2 l ia2 +LIA3 l ia3 +LIA4 l ia4 +LIAN0 l ian0 +LIAN1 l ian1 +LIAN2 l ian2 +LIAN3 l ian3 +LIAN4 l ian4 +LIANG0 l iang0 +LIANG1 l iang1 +LIANG2 l iang2 +LIANG3 l iang3 +LIANG4 l iang4 +LIAO0 l iao0 +LIAO1 l iao1 +LIAO2 l iao2 +LIAO3 l iao3 +LIAO4 l iao4 +LIE0 l ie0 +LIE1 l ie1 +LIE2 l ie2 +LIE3 l ie3 +LIE4 l ie4 +LIN0 l in0 +LIN1 l in1 +LIN2 l in2 +LIN3 l in3 +LIN4 l in4 +LING0 l ing0 +LING1 l ing1 +LING2 l ing2 +LING3 l ing3 +LING4 l ing4 +LIU0 l iu0 +LIU1 l iu1 +LIU2 l iu2 +LIU3 l iu3 +LIU4 l iu4 +LONG0 l ong0 +LONG1 l ong1 +LONG2 l ong2 +LONG3 l ong3 +LONG4 l ong4 +LOU0 l ou0 +LOU1 l ou1 +LOU2 l ou2 +LOU3 l ou3 +LOU4 l ou4 +LU0 l u0 +LU1 l u1 +LU2 l u2 +LU3 l u3 +LU4 l u4 +LUAN0 l uan0 +LUAN1 l uan1 +LUAN2 l uan2 +LUAN3 l uan3 +LUAN4 l uan4 +LUE0 l ve0 +LUE1 l ve1 +LUE2 l ve2 +LUE3 l ve3 +LUE4 l ve4 +LVE0 l ve0 +LVE1 l ve1 +LVE2 l ve2 +LVE3 l ve3 +LVE4 l ve4 +LUN0 l un0 +LUN1 l un1 +LUN2 l un2 +LUN3 l un3 +LUN4 l un4 +LUO0 l uo0 +LUO1 l uo1 +LUO2 l uo2 +LUO3 l uo3 +LUO4 l uo4 +LV0 l v0 +LV1 l v1 +LV2 l v2 +LV3 l v3 +LV4 l v4 +MA0 m a0 +MA1 m a1 +MA2 m a2 +MA3 m a3 +MA4 m a4 +MAI0 m ai0 +MAI1 m ai1 +MAI2 m ai2 +MAI3 m ai3 +MAI4 m ai4 +MAN0 m an0 +MAN1 m an1 +MAN2 m an2 +MAN3 m an3 +MAN4 m an4 +MANG0 m ang0 +MANG1 m ang1 +MANG2 m ang2 +MANG3 m ang3 +MANG4 m ang4 +MAO0 m ao0 +MAO1 m ao1 +MAO2 m ao2 +MAO3 m ao3 +MAO4 m ao4 +ME0 m e0 +ME1 m e1 +ME2 m e2 +ME3 m e3 +ME4 m e4 +MEI0 m ei0 +MEI1 m ei1 +MEI2 m ei2 +MEI3 m ei3 +MEI4 m ei4 +MEN0 m en0 +MEN1 m en1 +MEN2 m en2 +MEN3 m en3 +MEN4 m en4 +MENG0 m eng0 +MENG1 m eng1 +MENG2 m eng2 +MENG3 m eng3 +MENG4 m eng4 +MI0 m i0 +MI1 m i1 +MI2 m i2 +MI3 m i3 +MI4 m i4 +MIAN0 m ian0 +MIAN1 m ian1 +MIAN2 m ian2 +MIAN3 m ian3 +MIAN4 m ian4 +MIAO0 m iao0 +MIAO1 m iao1 +MIAO2 m iao2 +MIAO3 m iao3 +MIAO4 m iao4 +MIE0 m ie0 +MIE1 m ie1 +MIE2 m ie2 +MIE3 m ie3 +MIE4 m ie4 +MIN0 m in0 +MIN1 m in1 +MIN2 m in2 +MIN3 m in3 +MIN4 m in4 +MING0 m ing0 +MING1 m ing1 +MING2 m ing2 +MING3 m ing3 +MING4 m ing4 +MIU0 m iu0 +MIU1 m iu1 +MIU2 m iu2 +MIU3 m iu3 +MIU4 m iu4 +MO0 m o0 +MO1 m o1 +MO2 m o2 +MO3 m o3 +MO4 m o4 +MOU0 m ou0 +MOU1 m ou1 +MOU2 m ou2 +MOU3 m ou3 +MOU4 m ou4 +MU0 m u0 +MU1 m u1 +MU2 m u2 +MU3 m u3 +MU4 m u4 +NA0 n a0 +NA1 n a1 +NA2 n a2 +NA3 n a3 +NA4 n a4 +NAI0 n ai0 +NAI1 n ai1 +NAI2 n ai2 +NAI3 n ai3 +NAI4 n ai4 +NAN0 n an0 +NAN1 n an1 +NAN2 n an2 +NAN3 n an3 +NAN4 n an4 +NANG0 n ang0 +NANG1 n ang1 +NANG2 n ang2 +NANG3 n ang3 +NANG4 n ang4 +NAO0 n ao0 +NAO1 n ao1 +NAO2 n ao2 +NAO3 n ao3 +NAO4 n ao4 +NE0 n e0 +NE1 n e1 +NE2 n e2 +NE3 n e3 +NE4 n e4 +NEI0 n ei0 +NEI1 n ei1 +NEI2 n ei2 +NEI3 n ei3 +NEI4 n ei4 +NEN0 n en0 +NEN1 n en1 +NEN2 n en2 +NEN3 n en3 +NEN4 n en4 +NENG0 n eng0 +NENG1 n eng1 +NENG2 n eng2 +NENG3 n eng3 +NENG4 n eng4 +NI0 n i0 +NI1 n i1 +NI2 n i2 +NI3 n i3 +NI4 n i4 +NIAN0 n ian0 +NIAN1 n ian1 +NIAN2 n ian2 +NIAN3 n ian3 +NIAN4 n ian4 +NIANG0 n iang0 +NIANG1 n iang1 +NIANG2 n iang2 +NIANG3 n iang3 +NIANG4 n iang4 +NIAO0 n iao0 +NIAO1 n iao1 +NIAO2 n iao2 +NIAO3 n iao3 +NIAO4 n iao4 +NIE0 n ie0 +NIE1 n ie1 +NIE2 n ie2 +NIE3 n ie3 +NIE4 n ie4 +NIN0 n in0 +NIN1 n in1 +NIN2 n in2 +NIN3 n in3 +NIN4 n in4 +NING0 n ing0 +NING1 n ing1 +NING2 n ing2 +NING3 n ing3 +NING4 n ing4 +NIU0 n iu0 +NIU1 n iu1 +NIU2 n iu2 +NIU3 n iu3 +NIU4 n iu4 +NONG0 n ong0 +NONG1 n ong1 +NONG2 n ong2 +NONG3 n ong3 +NONG4 n ong4 +NU0 n u0 +NU1 n u1 +NU2 n u2 +NU3 n u3 +NU4 n u4 +NUAN0 n uan0 +NUAN1 n uan1 +NUAN2 n uan2 +NUAN3 n uan3 +NUAN4 n uan4 +NUE0 n ve0 +NUE1 n ve1 +NUE2 n ve2 +NUE3 n ve3 +NUE4 n ve4 +NVE0 n ve0 +NVE1 n ve1 +NVE2 n ve2 +NVE3 n ve3 +NVE4 n ve4 +NUO0 n uo0 +NUO1 n uo1 +NUO2 n uo2 +NUO3 n uo3 +NUO4 n uo4 +NV0 n v0 +NV1 n v1 +NV2 n v2 +NV3 n v3 +NV4 n v4 +O0 oo o0 +O1 oo o1 +O2 oo o2 +O3 oo o3 +O4 oo o4 +OU0 oo ou0 +OU1 oo ou1 +OU2 oo ou2 +OU3 oo ou3 +OU4 oo ou4 +PA0 p a0 +PA1 p a1 +PA2 p a2 +PA3 p a3 +PA4 p a4 +PAI0 p ai0 +PAI1 p ai1 +PAI2 p ai2 +PAI3 p ai3 +PAI4 p ai4 +PAN0 p an0 +PAN1 p an1 +PAN2 p an2 +PAN3 p an3 +PAN4 p an4 +PANG0 p ang0 +PANG1 p ang1 +PANG2 p ang2 +PANG3 p ang3 +PANG4 p ang4 +PAO0 p ao0 +PAO1 p ao1 +PAO2 p ao2 +PAO3 p ao3 +PAO4 p ao4 +PEI0 p ei0 +PEI1 p ei1 +PEI2 p ei2 +PEI3 p ei3 +PEI4 p ei4 +PEN0 p en0 +PEN1 p en1 +PEN2 p en2 +PEN3 p en3 +PEN4 p en4 +PENG0 p eng0 +PENG1 p eng1 +PENG2 p eng2 +PENG3 p eng3 +PENG4 p eng4 +PI0 p i0 +PI1 p i1 +PI2 p i2 +PI3 p i3 +PI4 p i4 +PIAN0 p ian0 +PIAN1 p ian1 +PIAN2 p ian2 +PIAN3 p ian3 +PIAN4 p ian4 +PIAO0 p iao0 +PIAO1 p iao1 +PIAO2 p iao2 +PIAO3 p iao3 +PIAO4 p iao4 +PIE0 p ie0 +PIE1 p ie1 +PIE2 p ie2 +PIE3 p ie3 +PIE4 p ie4 +PIN0 p in0 +PIN1 p in1 +PIN2 p in2 +PIN3 p in3 +PIN4 p in4 +PING0 p ing0 +PING1 p ing1 +PING2 p ing2 +PING3 p ing3 +PING4 p ing4 +PO0 p o0 +PO1 p o1 +PO2 p o2 +PO3 p o3 +PO4 p o4 +POU0 p ou0 +POU1 p ou1 +POU2 p ou2 +POU3 p ou3 +POU4 p ou4 +PU0 p u0 +PU1 p u1 +PU2 p u2 +PU3 p u3 +PU4 p u4 +QI0 q i0 +QI1 q i1 +QI2 q i2 +QI3 q i3 +QI4 q i4 +QIA0 q ia0 +QIA1 q ia1 +QIA2 q ia2 +QIA3 q ia3 +QIA4 q ia4 +QIAN0 q ian0 +QIAN1 q ian1 +QIAN2 q ian2 +QIAN3 q ian3 +QIAN4 q ian4 +QIANG0 q iang0 +QIANG1 q iang1 +QIANG2 q iang2 +QIANG3 q iang3 +QIANG4 q iang4 +QIAO0 q iao0 +QIAO1 q iao1 +QIAO2 q iao2 +QIAO3 q iao3 +QIAO4 q iao4 +QIE0 q ie0 +QIE1 q ie1 +QIE2 q ie2 +QIE3 q ie3 +QIE4 q ie4 +QIN0 q in0 +QIN1 q in1 +QIN2 q in2 +QIN3 q in3 +QIN4 q in4 +QING0 q ing0 +QING1 q ing1 +QING2 q ing2 +QING3 q ing3 +QING4 q ing4 +QIONG0 q iong0 +QIONG1 q iong1 +QIONG2 q iong2 +QIONG3 q iong3 +QIONG4 q iong4 +QIU0 q iu0 +QIU1 q iu1 +QIU2 q iu2 +QIU3 q iu3 +QIU4 q iu4 +QU0 q v0 +QU1 q v1 +QU2 q v2 +QU3 q v3 +QU4 q v4 +QUAN0 q van0 +QUAN1 q van1 +QUAN2 q van2 +QUAN3 q van3 +QUAN4 q van4 +QUE0 q ve0 +QUE1 q ve1 +QUE2 q ve2 +QUE3 q ve3 +QUE4 q ve4 +QUN0 q vn0 +QUN1 q vn1 +QUN2 q vn2 +QUN3 q vn3 +QUN4 q vn4 +RAN0 r an0 +RAN1 r an1 +RAN2 r an2 +RAN3 r an3 +RAN4 r an4 +RANG0 r ang0 +RANG1 r ang1 +RANG2 r ang2 +RANG3 r ang3 +RANG4 r ang4 +RAO0 r ao0 +RAO1 r ao1 +RAO2 r ao2 +RAO3 r ao3 +RAO4 r ao4 +RE0 r e0 +RE1 r e1 +RE2 r e2 +RE3 r e3 +RE4 r e4 +REN0 r en0 +REN1 r en1 +REN2 r en2 +REN3 r en3 +REN4 r en4 +RENG0 r eng0 +RENG1 r eng1 +RENG2 r eng2 +RENG3 r eng3 +RENG4 r eng4 +RI0 r iz0 +RI1 r iz1 +RI2 r iz2 +RI3 r iz3 +RI4 r iz4 +RONG0 r ong0 +RONG1 r ong1 +RONG2 r ong2 +RONG3 r ong3 +RONG4 r ong4 +ROU0 r ou0 +ROU1 r ou1 +ROU2 r ou2 +ROU3 r ou3 +ROU4 r ou4 +RU0 r u0 +RU1 r u1 +RU2 r u2 +RU3 r u3 +RU4 r u4 +RUAN0 r uan0 +RUAN1 r uan1 +RUAN2 r uan2 +RUAN3 r uan3 +RUAN4 r uan4 +RUI0 r ui0 +RUI1 r ui1 +RUI2 r ui2 +RUI3 r ui3 +RUI4 r ui4 +RUN0 r un0 +RUN1 r un1 +RUN2 r un2 +RUN3 r un3 +RUN4 r un4 +RUO0 r uo0 +RUO1 r uo1 +RUO2 r uo2 +RUO3 r uo3 +RUO4 r uo4 +SA0 s a0 +SA1 s a1 +SA2 s a2 +SA3 s a3 +SA4 s a4 +SAI0 s ai0 +SAI1 s ai1 +SAI2 s ai2 +SAI3 s ai3 +SAI4 s ai4 +SAN0 s an0 +SAN1 s an1 +SAN2 s an2 +SAN3 s an3 +SAN4 s an4 +SANG0 s ang0 +SANG1 s ang1 +SANG2 s ang2 +SANG3 s ang3 +SANG4 s ang4 +SAO0 s ao0 +SAO1 s ao1 +SAO2 s ao2 +SAO3 s ao3 +SAO4 s ao4 +SE0 s e0 +SE1 s e1 +SE2 s e2 +SE3 s e3 +SE4 s e4 +SEN0 s en0 +SEN1 s en1 +SEN2 s en2 +SEN3 s en3 +SEN4 s en4 +SENG0 s eng0 +SENG1 s eng1 +SENG2 s eng2 +SENG3 s eng3 +SENG4 s eng4 +SHA0 sh a0 +SHA1 sh a1 +SHA2 sh a2 +SHA3 sh a3 +SHA4 sh a4 +SHAI0 sh ai0 +SHAI1 sh ai1 +SHAI2 sh ai2 +SHAI3 sh ai3 +SHAI4 sh ai4 +SHAN0 sh an0 +SHAN1 sh an1 +SHAN2 sh an2 +SHAN3 sh an3 +SHAN4 sh an4 +SHANG0 sh ang0 +SHANG1 sh ang1 +SHANG2 sh ang2 +SHANG3 sh ang3 +SHANG4 sh ang4 +SHAO0 sh ao0 +SHAO1 sh ao1 +SHAO2 sh ao2 +SHAO3 sh ao3 +SHAO4 sh ao4 +SHE0 sh e0 +SHE1 sh e1 +SHE2 sh e2 +SHE3 sh e3 +SHE4 sh e4 +SHEI0 sh ei0 +SHEI1 sh ei1 +SHEI2 sh ei2 +SHEI3 sh ei3 +SHEI4 sh ei4 +SHEN0 sh en0 +SHEN1 sh en1 +SHEN2 sh en2 +SHEN3 sh en3 +SHEN4 sh en4 +SHENG0 sh eng0 +SHENG1 sh eng1 +SHENG2 sh eng2 +SHENG3 sh eng3 +SHENG4 sh eng4 +SHI0 sh ix0 +SHI1 sh ix1 +SHI2 sh ix2 +SHI3 sh ix3 +SHI4 sh ix4 +SHOU0 sh ou0 +SHOU1 sh ou1 +SHOU2 sh ou2 +SHOU3 sh ou3 +SHOU4 sh ou4 +SHU0 sh u0 +SHU1 sh u1 +SHU2 sh u2 +SHU3 sh u3 +SHU4 sh u4 +SHUA0 sh ua0 +SHUA1 sh ua1 +SHUA2 sh ua2 +SHUA3 sh ua3 +SHUA4 sh ua4 +SHUAI0 sh uai0 +SHUAI1 sh uai1 +SHUAI2 sh uai2 +SHUAI3 sh uai3 +SHUAI4 sh uai4 +SHUAN0 sh uan0 +SHUAN1 sh uan1 +SHUAN2 sh uan2 +SHUAN3 sh uan3 +SHUAN4 sh uan4 +SHUANG0 sh uang0 +SHUANG1 sh uang1 +SHUANG2 sh uang2 +SHUANG3 sh uang3 +SHUANG4 sh uang4 +SHUI0 sh ui0 +SHUI1 sh ui1 +SHUI2 sh ui2 +SHUI3 sh ui3 +SHUI4 sh ui4 +SHUN0 sh un0 +SHUN1 sh un1 +SHUN2 sh un2 +SHUN3 sh un3 +SHUN4 sh un4 +SHUO0 sh uo0 +SHUO1 sh uo1 +SHUO2 sh uo2 +SHUO3 sh uo3 +SHUO4 sh uo4 +SI0 s iy0 +SI1 s iy1 +SI2 s iy2 +SI3 s iy3 +SI4 s iy4 +SONG0 s ong0 +SONG1 s ong1 +SONG2 s ong2 +SONG3 s ong3 +SONG4 s ong4 +SOU0 s ou0 +SOU1 s ou1 +SOU2 s ou2 +SOU3 s ou3 +SOU4 s ou4 +SU0 s u0 +SU1 s u1 +SU2 s u2 +SU3 s u3 +SU4 s u4 +SUAN0 s uan0 +SUAN1 s uan1 +SUAN2 s uan2 +SUAN3 s uan3 +SUAN4 s uan4 +SUI0 s ui0 +SUI1 s ui1 +SUI2 s ui2 +SUI3 s ui3 +SUI4 s ui4 +SUN0 s un0 +SUN1 s un1 +SUN2 s un2 +SUN3 s un3 +SUN4 s un4 +SUO0 s uo0 +SUO1 s uo1 +SUO2 s uo2 +SUO3 s uo3 +SUO4 s uo4 +TA0 t a0 +TA1 t a1 +TA2 t a2 +TA3 t a3 +TA4 t a4 +TAI0 t ai0 +TAI1 t ai1 +TAI2 t ai2 +TAI3 t ai3 +TAI4 t ai4 +TAN0 t an0 +TAN1 t an1 +TAN2 t an2 +TAN3 t an3 +TAN4 t an4 +TANG0 t ang0 +TANG1 t ang1 +TANG2 t ang2 +TANG3 t ang3 +TANG4 t ang4 +TAO0 t ao0 +TAO1 t ao1 +TAO2 t ao2 +TAO3 t ao3 +TAO4 t ao4 +TE0 t e0 +TE1 t e1 +TE2 t e2 +TE3 t e3 +TE4 t e4 +TENG0 t eng0 +TENG1 t eng1 +TENG2 t eng2 +TENG3 t eng3 +TENG4 t eng4 +TI0 t i0 +TI1 t i1 +TI2 t i2 +TI3 t i3 +TI4 t i4 +TIAN0 t ian0 +TIAN1 t ian1 +TIAN2 t ian2 +TIAN3 t ian3 +TIAN4 t ian4 +TIAO0 t iao0 +TIAO1 t iao1 +TIAO2 t iao2 +TIAO3 t iao3 +TIAO4 t iao4 +TIE0 t ie0 +TIE1 t ie1 +TIE2 t ie2 +TIE3 t ie3 +TIE4 t ie4 +TING0 t ing0 +TING1 t ing1 +TING2 t ing2 +TING3 t ing3 +TING4 t ing4 +TONG0 t ong0 +TONG1 t ong1 +TONG2 t ong2 +TONG3 t ong3 +TONG4 t ong4 +TOU0 t ou0 +TOU1 t ou1 +TOU2 t ou2 +TOU3 t ou3 +TOU4 t ou4 +TU0 t u0 +TU1 t u1 +TU2 t u2 +TU3 t u3 +TU4 t u4 +TUAN0 t uan0 +TUAN1 t uan1 +TUAN2 t uan2 +TUAN3 t uan3 +TUAN4 t uan4 +TUI0 t ui0 +TUI1 t ui1 +TUI2 t ui2 +TUI3 t ui3 +TUI4 t ui4 +TUN0 t un0 +TUN1 t un1 +TUN2 t un2 +TUN3 t un3 +TUN4 t un4 +TUO0 t uo0 +TUO1 t uo1 +TUO2 t uo2 +TUO3 t uo3 +TUO4 t uo4 +WA0 uu ua0 +WA1 uu ua1 +WA2 uu ua2 +WA3 uu ua3 +WA4 uu ua4 +WAI0 uu uai0 +WAI1 uu uai1 +WAI2 uu uai2 +WAI3 uu uai3 +WAI4 uu uai4 +WAN0 uu uan0 +WAN1 uu uan1 +WAN2 uu uan2 +WAN3 uu uan3 +WAN4 uu uan4 +WANG0 uu uang0 +WANG1 uu uang1 +WANG2 uu uang2 +WANG3 uu uang3 +WANG4 uu uang4 +WEI0 uu ui0 +WEI1 uu ui1 +WEI2 uu ui2 +WEI3 uu ui3 +WEI4 uu ui4 +WEN0 uu un0 +WEN1 uu un1 +WEN2 uu un2 +WEN3 uu un3 +WEN4 uu un4 +WENG0 uu ueng0 +WENG1 uu ueng1 +WENG2 uu ueng2 +WENG3 uu ueng3 +WENG4 uu ueng4 +WO0 uu uo0 +WO1 uu uo1 +WO2 uu uo2 +WO3 uu uo3 +WO4 uu uo4 +WU0 uu u0 +WU1 uu u1 +WU2 uu u2 +WU3 uu u3 +WU4 uu u4 +XI0 x i0 +XI1 x i1 +XI2 x i2 +XI3 x i3 +XI4 x i4 +XIA0 x ia0 +XIA1 x ia1 +XIA2 x ia2 +XIA3 x ia3 +XIA4 x ia4 +XIAN0 x ian0 +XIAN1 x ian1 +XIAN2 x ian2 +XIAN3 x ian3 +XIAN4 x ian4 +XIANG0 x iang0 +XIANG1 x iang1 +XIANG2 x iang2 +XIANG3 x iang3 +XIANG4 x iang4 +XIAO0 x iao0 +XIAO1 x iao1 +XIAO2 x iao2 +XIAO3 x iao3 +XIAO4 x iao4 +XIE0 x ie0 +XIE1 x ie1 +XIE2 x ie2 +XIE3 x ie3 +XIE4 x ie4 +XIN0 x in0 +XIN1 x in1 +XIN2 x in2 +XIN3 x in3 +XIN4 x in4 +XING0 x ing0 +XING1 x ing1 +XING2 x ing2 +XING3 x ing3 +XING4 x ing4 +XIONG0 x iong0 +XIONG1 x iong1 +XIONG2 x iong2 +XIONG3 x iong3 +XIONG4 x iong4 +XIU0 x iu0 +XIU1 x iu1 +XIU2 x iu2 +XIU3 x iu3 +XIU4 x iu4 +XU0 x v0 +XU1 x v1 +XU2 x v2 +XU3 x v3 +XU4 x v4 +XUAN0 x van0 +XUAN1 x van1 +XUAN2 x van2 +XUAN3 x van3 +XUAN4 x van4 +XUE0 x ve0 +XUE1 x ve1 +XUE2 x ve2 +XUE3 x ve3 +XUE4 x ve4 +XUN0 x vn0 +XUN1 x vn1 +XUN2 x vn2 +XUN3 x vn3 +XUN4 x vn4 +YA0 ii ia0 +YA1 ii ia1 +YA2 ii ia2 +YA3 ii ia3 +YA4 ii ia4 +YAN0 ii ian0 +YAN1 ii ian1 +YAN2 ii ian2 +YAN3 ii ian3 +YAN4 ii ian4 +YANG0 ii iang0 +YANG1 ii iang1 +YANG2 ii iang2 +YANG3 ii iang3 +YANG4 ii iang4 +YAO0 ii iao0 +YAO1 ii iao1 +YAO2 ii iao2 +YAO3 ii iao3 +YAO4 ii iao4 +YE0 ii ie0 +YE1 ii ie1 +YE2 ii ie2 +YE3 ii ie3 +YE4 ii ie4 +YI0 ii i0 +YI1 ii i1 +YI2 ii i2 +YI3 ii i3 +YI4 ii i4 +YIN0 ii in0 +YIN1 ii in1 +YIN2 ii in2 +YIN3 ii in3 +YIN4 ii in4 +YING0 ii ing0 +YING1 ii ing1 +YING2 ii ing2 +YING3 ii ing3 +YING4 ii ing4 +YO0 ii ou0 +YO1 ii ou1 +YO2 ii ou2 +YO3 ii ou3 +YO4 ii ou4 +YONG0 ii iong0 +YONG1 ii iong1 +YONG2 ii iong2 +YONG3 ii iong3 +YONG4 ii iong4 +YOU0 ii iu0 +YOU1 ii iu1 +YOU2 ii iu2 +YOU3 ii iu3 +YOU4 ii iu4 +YU0 vv v0 +YU1 vv v1 +YU2 vv v2 +YU3 vv v3 +YU4 vv v4 +YUAN0 vv van0 +YUAN1 vv van1 +YUAN2 vv van2 +YUAN3 vv van3 +YUAN4 vv van4 +YUE0 vv ve0 +YUE1 vv ve1 +YUE2 vv ve2 +YUE3 vv ve3 +YUE4 vv ve4 +YUN0 vv vn0 +YUN1 vv vn1 +YUN2 vv vn2 +YUN3 vv vn3 +YUN4 vv vn4 +YUO0 ii ou0 +YUO1 ii ou1 +YUO2 ii ou2 +YUO3 ii ou3 +YUO4 ii ou4 +ZA0 z a0 +ZA1 z a1 +ZA2 z a2 +ZA3 z a3 +ZA4 z a4 +ZAI0 z ai0 +ZAI1 z ai1 +ZAI2 z ai2 +ZAI3 z ai3 +ZAI4 z ai4 +ZAN0 z an0 +ZAN1 z an1 +ZAN2 z an2 +ZAN3 z an3 +ZAN4 z an4 +ZANG0 z ang0 +ZANG1 z ang1 +ZANG2 z ang2 +ZANG3 z ang3 +ZANG4 z ang4 +ZAO0 z ao0 +ZAO1 z ao1 +ZAO2 z ao2 +ZAO3 z ao3 +ZAO4 z ao4 +ZE0 z e0 +ZE1 z e1 +ZE2 z e2 +ZE3 z e3 +ZE4 z e4 +ZEI0 z ei0 +ZEI1 z ei1 +ZEI2 z ei2 +ZEI3 z ei3 +ZEI4 z ei4 +ZEN0 z en0 +ZEN1 z en1 +ZEN2 z en2 +ZEN3 z en3 +ZEN4 z en4 +ZENG0 z eng0 +ZENG1 z eng1 +ZENG2 z eng2 +ZENG3 z eng3 +ZENG4 z eng4 +ZHA0 zh a0 +ZHA1 zh a1 +ZHA2 zh a2 +ZHA3 zh a3 +ZHA4 zh a4 +ZHAI0 zh ai0 +ZHAI1 zh ai1 +ZHAI2 zh ai2 +ZHAI3 zh ai3 +ZHAI4 zh ai4 +ZHAN0 zh an0 +ZHAN1 zh an1 +ZHAN2 zh an2 +ZHAN3 zh an3 +ZHAN4 zh an4 +ZHANG0 zh ang0 +ZHANG1 zh ang1 +ZHANG2 zh ang2 +ZHANG3 zh ang3 +ZHANG4 zh ang4 +ZHAO0 zh ao0 +ZHAO1 zh ao1 +ZHAO2 zh ao2 +ZHAO3 zh ao3 +ZHAO4 zh ao4 +ZHE0 zh e0 +ZHE1 zh e1 +ZHE2 zh e2 +ZHE3 zh e3 +ZHE4 zh e4 +ZHEI0 zh ei0 +ZHEI1 zh ei1 +ZHEI2 zh ei2 +ZHEI3 zh ei3 +ZHEI4 zh ei4 +ZHEN0 zh en0 +ZHEN1 zh en1 +ZHEN2 zh en2 +ZHEN3 zh en3 +ZHEN4 zh en4 +ZHENG0 zh eng0 +ZHENG1 zh eng1 +ZHENG2 zh eng2 +ZHENG3 zh eng3 +ZHENG4 zh eng4 +ZHI0 zh ix0 +ZHI1 zh ix1 +ZHI2 zh ix2 +ZHI3 zh ix3 +ZHI4 zh ix4 +ZHONG0 zh ong0 +ZHONG1 zh ong1 +ZHONG2 zh ong2 +ZHONG3 zh ong3 +ZHONG4 zh ong4 +ZHOU0 zh ou0 +ZHOU1 zh ou1 +ZHOU2 zh ou2 +ZHOU3 zh ou3 +ZHOU4 zh ou4 +ZHU0 zh u0 +ZHU1 zh u1 +ZHU2 zh u2 +ZHU3 zh u3 +ZHU4 zh u4 +ZHUA0 zh ua0 +ZHUA1 zh ua1 +ZHUA2 zh ua2 +ZHUA3 zh ua3 +ZHUA4 zh ua4 +ZHUAI0 zh uai0 +ZHUAI1 zh uai1 +ZHUAI2 zh uai2 +ZHUAI3 zh uai3 +ZHUAI4 zh uai4 +ZHUAN0 zh uan0 +ZHUAN1 zh uan1 +ZHUAN2 zh uan2 +ZHUAN3 zh uan3 +ZHUAN4 zh uan4 +ZHUANG0 zh uang0 +ZHUANG1 zh uang1 +ZHUANG2 zh uang2 +ZHUANG3 zh uang3 +ZHUANG4 zh uang4 +ZHUI0 zh ui0 +ZHUI1 zh ui1 +ZHUI2 zh ui2 +ZHUI3 zh ui3 +ZHUI4 zh ui4 +ZHUN0 zh un0 +ZHUN1 zh un1 +ZHUN2 zh un2 +ZHUN3 zh un3 +ZHUN4 zh un4 +ZHUO0 zh uo0 +ZHUO1 zh uo1 +ZHUO2 zh uo2 +ZHUO3 zh uo3 +ZHUO4 zh uo4 +ZI0 z iy0 +ZI1 z iy1 +ZI2 z iy2 +ZI3 z iy3 +ZI4 z iy4 +ZONG0 z ong0 +ZONG1 z ong1 +ZONG2 z ong2 +ZONG3 z ong3 +ZONG4 z ong4 +ZOU0 z ou0 +ZOU1 z ou1 +ZOU2 z ou2 +ZOU3 z ou3 +ZOU4 z ou4 +ZU0 z u0 +ZU1 z u1 +ZU2 z u2 +ZU3 z u3 +ZU4 z u4 +ZUAN0 z uan0 +ZUAN1 z uan1 +ZUAN2 z uan2 +ZUAN3 z uan3 +ZUAN4 z uan4 +ZUI0 z ui0 +ZUI1 z ui1 +ZUI2 z ui2 +ZUI3 z ui3 +ZUI4 z ui4 +ZUN0 z un0 +ZUN1 z un1 +ZUN2 z un2 +ZUN3 z un3 +ZUN4 z un4 +ZUO0 z uo0 +ZUO1 z uo1 +ZUO2 z uo2 +ZUO3 z uo3 +ZUO4 z uo4 +EI0 ee ei0 +EI1 ee ei1 +EI2 ee ei2 +EI3 ee ei3 +EI4 ee ei4 +TEI0 t ei0 +TEI1 t ei1 +TEI2 t ei2 +TEI3 t ei3 +TEI4 t ei4 +HNG0 ee eng0 +HNG1 ee eng1 +HNG2 ee eng2 +HNG3 ee eng3 +HNG4 ee eng4 +LO0 l o0 +LO1 l o1 +LO2 l o2 +LO3 l o3 +LO4 l o4 +N0 ee en0 +N1 ee en1 +N2 ee en2 +N3 ee en3 +N4 ee en4 +NG0 ee eng0 +NG1 ee eng1 +NG2 ee eng2 +NG3 ee eng3 +NG4 ee eng4 +NOU0 n ao0 +NOU1 n ao1 +NOU2 n ao2 +NOU3 n ao3 +NOU4 n ao4 +SEI0 s ei0 +SEI1 s ei1 +SEI2 s ei2 +SEI3 s ei3 +SEI4 s ei4 +A5 aa a5 +AI5 aa ai5 +AN5 aa an5 +ANG5 aa ang5 +AO5 aa ao5 +BA5 b a5 +BAI5 b ai5 +BAN5 b an5 +BANG5 b ang5 +BAO5 b ao5 +BEI5 b ei5 +BEN5 b en5 +BENG5 b eng5 +BI5 b i5 +BIAN5 b ian5 +BIAO5 b iao5 +BIE5 b ie5 +BIN5 b in5 +BING5 b ing5 +BO5 b o5 +BU5 b u5 +CA5 c a5 +CAI5 c ai5 +CAN5 c an5 +CANG5 c ang5 +CAO5 c ao5 +CE5 c e5 +CEN5 c en5 +CENG5 c eng5 +CHA5 ch a5 +CHAI5 ch ai5 +CHAN5 ch an5 +CHANG5 ch ang5 +CHAO5 ch ao5 +CHE5 ch e5 +CHEN5 ch en5 +CHENG5 ch eng5 +CHI5 ch ix5 +CHONG5 ch ong5 +CHOU5 ch ou5 +CHU5 ch u5 +CHUAI5 ch uai5 +CHUAN5 ch uan5 +CHUANG5 ch uang5 +CHUI5 ch ui5 +CHUN5 ch un5 +CHUO5 ch uo5 +CI5 c iy5 +CONG5 c ong5 +COU5 c ou5 +CU5 c u5 +CUAN5 c uan5 +CUI5 c ui5 +CUN5 c un5 +CUO5 c uo5 +DA5 d a5 +DAI5 d ai5 +DAN5 d an5 +DANG5 d ang5 +DAO5 d ao5 +DE5 d e5 +DEI5 d ei5 +DEN5 d en5 +DENG5 d eng5 +DI5 d i5 +DIA5 d ia5 +DIAN5 d ian5 +DIAO5 d iao5 +DIE5 d ie5 +DING5 d ing5 +DIU5 d iu5 +DONG5 d ong5 +DOU5 d ou5 +DU5 d u5 +DUAN5 d uan5 +DUI5 d ui5 +DUN5 d un5 +DUO5 d uo5 +E5 ee e5 +EN5 ee en5 +ER5 ee er5 +FA5 f a5 +FAN5 f an5 +FANG5 f ang5 +FEI5 f ei5 +FEN5 f en5 +FENG5 f eng5 +FO5 f o5 +FOU5 f ou5 +FU5 f u5 +GA5 g a5 +GAI5 g ai5 +GAN5 g an5 +GANG5 g ang5 +GAO5 g ao5 +GE5 g e5 +GEI5 g ei5 +GEN5 g en5 +GENG5 g eng5 +GONG5 g ong5 +GOU5 g ou5 +GU5 g u5 +GUA5 g ua5 +GUAI5 g uai5 +GUAN5 g uan5 +GUANG5 g uang5 +GUI5 g ui5 +GUN5 g un5 +GUO5 g uo5 +HA5 h a5 +HAI5 h ai5 +HAN5 h an5 +HANG5 h ang5 +HAO5 h ao5 +HE5 h e5 +HEI5 h ei5 +HEN5 h en5 +HENG5 h eng5 +HONG5 h ong5 +HOU5 h ou5 +HU5 h u5 +HUA5 h ua5 +HUAI5 h uai5 +HUAN5 h uan5 +HUANG5 h uang5 +HUI5 h ui5 +HUN5 h un5 +HUO5 h uo5 +JI5 j i5 +JIA5 j ia5 +JIAN5 j ian5 +JIANG5 j iang5 +JIAO5 j iao5 +JIE5 j ie5 +JIN5 j in5 +JING5 j ing5 +JIONG5 j iong5 +JIU5 j iu5 +JU5 j v5 +JUAN5 j van5 +JUE5 j ve5 +JUN5 j vn5 +KA5 k a5 +KAI5 k ai5 +KAN5 k an5 +KANG5 k ang5 +KAO5 k ao5 +KE5 k e5 +KEI5 k ei5 +KEN5 k en5 +KENG5 k eng5 +KONG5 k ong5 +KOU5 k ou5 +KU5 k u5 +KUA5 k ua5 +KUAI5 k uai5 +KUAN5 k uan5 +KUANG5 k uang5 +KUI5 k ui5 +KUN5 k un5 +KUO5 k uo5 +LA5 l a5 +LAI5 l ai5 +LAN5 l an5 +LANG5 l ang5 +LAO5 l ao5 +LE5 l e5 +LEI5 l ei5 +LENG5 l eng5 +LI5 l i5 +LIA5 l ia5 +LIAN5 l ian5 +LIANG5 l iang5 +LIAO5 l iao5 +LIE5 l ie5 +LIN5 l in5 +LING5 l ing5 +LIU5 l iu5 +LONG5 l ong5 +LOU5 l ou5 +LU5 l u5 +LUAN5 l uan5 +LUE5 l ve5 +LVE5 l ve5 +LUN5 l un5 +LUO5 l uo5 +LV5 l v5 +MA5 m a5 +MAI5 m ai5 +MAN5 m an5 +MANG5 m ang5 +MAO5 m ao5 +ME5 m e5 +MEI5 m ei5 +MEN5 m en5 +MENG5 m eng5 +MI5 m i5 +MIAN5 m ian5 +MIAO5 m iao5 +MIE5 m ie5 +MIN5 m in5 +MING5 m ing5 +MIU5 m iu5 +MO5 m o5 +MOU5 m ou5 +MU5 m u5 +NA5 n a5 +NAI5 n ai5 +NAN5 n an5 +NANG5 n ang5 +NAO5 n ao5 +NE5 n e5 +NEI5 n ei5 +NEN5 n en5 +NENG5 n eng5 +NI5 n i5 +NIAN5 n ian5 +NIANG5 n iang5 +NIAO5 n iao5 +NIE5 n ie5 +NIN5 n in5 +NING5 n ing5 +NIU5 n iu5 +NONG5 n ong5 +NU5 n u5 +NUAN5 n uan5 +NUE5 n ve5 +NVE5 n ve5 +NUO5 n uo5 +NV5 n v5 +O5 oo o5 +OU5 oo ou5 +PA5 p a5 +PAI5 p ai5 +PAN5 p an5 +PANG5 p ang5 +PAO5 p ao5 +PEI5 p ei5 +PEN5 p en5 +PENG5 p eng5 +PI5 p i5 +PIAN5 p ian5 +PIAO5 p iao5 +PIE5 p ie5 +PIN5 p in5 +PING5 p ing5 +PO5 p o5 +POU5 p ou5 +PU5 p u5 +QI5 q i5 +QIA5 q ia5 +QIAN5 q ian5 +QIANG5 q iang5 +QIAO5 q iao5 +QIE5 q ie5 +QIN5 q in5 +QING5 q ing5 +QIONG5 q iong5 +QIU5 q iu5 +QU5 q v5 +QUAN5 q van5 +QUE5 q ve5 +QUN5 q vn5 +RAN5 r an5 +RANG5 r ang5 +RAO5 r ao5 +RE5 r e5 +REN5 r en5 +RENG5 r eng5 +RI5 r iz5 +RONG5 r ong5 +ROU5 r ou5 +RU5 r u5 +RUAN5 r uan5 +RUI5 r ui5 +RUN5 r un5 +RUO5 r uo5 +SA5 s a5 +SAI5 s ai5 +SAN5 s an5 +SANG5 s ang5 +SAO5 s ao5 +SE5 s e5 +SEN5 s en5 +SENG5 s eng5 +SHA5 sh a5 +SHAI5 sh ai5 +SHAN5 sh an5 +SHANG5 sh ang5 +SHAO5 sh ao5 +SHE5 sh e5 +SHEI5 sh ei5 +SHEN5 sh en5 +SHENG5 sh eng5 +SHI5 sh ix5 +SHOU5 sh ou5 +SHU5 sh u5 +SHUA5 sh ua5 +SHUAI5 sh uai5 +SHUAN5 sh uan5 +SHUANG5 sh uang5 +SHUI5 sh ui5 +SHUN5 sh un5 +SHUO5 sh uo5 +SI5 s iy5 +SONG5 s ong5 +SOU5 s ou5 +SU5 s u5 +SUAN5 s uan5 +SUI5 s ui5 +SUN5 s un5 +SUO5 s uo5 +TA5 t a5 +TAI5 t ai5 +TAN5 t an5 +TANG5 t ang5 +TAO5 t ao5 +TE5 t e5 +TENG5 t eng5 +TI5 t i5 +TIAN5 t ian5 +TIAO5 t iao5 +TIE5 t ie5 +TING5 t ing5 +TONG5 t ong5 +TOU5 t ou5 +TU5 t u5 +TUAN5 t uan5 +TUI5 t ui5 +TUN5 t un5 +TUO5 t uo5 +WA5 uu ua5 +WAI5 uu uai5 +WAN5 uu uan5 +WANG5 uu uang5 +WEI5 uu ui5 +WEN5 uu un5 +WENG5 uu ueng5 +WO5 uu uo5 +WU5 uu u5 +XI5 x i5 +XIA5 x ia5 +XIAN5 x ian5 +XIANG5 x iang5 +XIAO5 x iao5 +XIE5 x ie5 +XIN5 x in5 +XING5 x ing5 +XIONG5 x iong5 +XIU5 x iu5 +XU5 x v5 +XUAN5 x van5 +XUE5 x ve5 +XUN5 x vn5 +YA5 ii ia5 +YAN5 ii ian5 +YANG5 ii iang5 +YAO5 ii iao5 +YE5 ii ie5 +YI5 ii i5 +YIN5 ii in5 +YING5 ii ing5 +YO5 ii ou5 +YONG5 ii iong5 +YOU5 ii iu5 +YU5 vv v5 +YUAN5 vv van5 +YUE5 vv ve5 +YUN5 vv vn5 +YUO5 ii ou5 +ZA5 z a5 +ZAI5 z ai5 +ZAN5 z an5 +ZANG5 z ang5 +ZAO5 z ao5 +ZE5 z e5 +ZEI5 z ei5 +ZEN5 z en5 +ZENG5 z eng5 +ZHA5 zh a5 +ZHAI5 zh ai5 +ZHAN5 zh an5 +ZHANG5 zh ang5 +ZHAO5 zh ao5 +ZHE5 zh e5 +ZHEI5 zh ei5 +ZHEN5 zh en5 +ZHENG5 zh eng5 +ZHI5 zh ix5 +ZHONG5 zh ong5 +ZHOU5 zh ou5 +ZHU5 zh u5 +ZHUA5 zh ua5 +ZHUAI5 zh uai5 +ZHUAN5 zh uan5 +ZHUANG5 zh uang5 +ZHUI5 zh ui5 +ZHUN5 zh un5 +ZHUO5 zh uo5 +ZI5 z iy5 +ZONG5 z ong5 +ZOU5 z ou5 +ZU5 z u5 +ZUAN5 z uan5 +ZUI5 z ui5 +ZUN5 z un5 +ZUO5 z uo5 +EI5 ee ei5 +TEI5 t ei5 +HNG5 ee eng5 +LO5 l o5 +N5 ee en5 +NG5 ee eng5 +NOU5 n ao5 +SEI5 s ei5 \ No newline at end of file diff --git a/examples/thchs30/a0/local/data.sh b/examples/thchs30/a0/local/data.sh new file mode 100644 index 000000000..8614a0415 --- /dev/null +++ b/examples/thchs30/a0/local/data.sh @@ -0,0 +1,53 @@ +#! /usr/bin/env bash + +stage=-1 +stop_stage=100 + +source ${MAIN_ROOT}/utils/parse_options.sh + +mkdir -p data +TARGET_DIR=${MAIN_ROOT}/examples/dataset +mkdir -p ${TARGET_DIR} +LEXICON_NAME=$1 + +# download data, generate manifests +if [ ${stage} -le -1 ] && [ ${stop_stage} -ge -1 ]; then + python3 ${TARGET_DIR}/thchs30/thchs30.py \ + --manifest_prefix="data/manifest" \ + --target_dir="${TARGET_DIR}/thchs30" + + if [ $? -ne 0 ]; then + echo "Prepare THCHS-30 failed. Terminated." + exit 1 + fi +fi + +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # dump manifest to data/ + python3 ${MAIN_ROOT}/utils/dump_manifest.py --manifest-path=data/manifest.train --output-dir=data +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # copy files to data/dict to gen word.lexicon + cp ${TARGET_DIR}/thchs30/data_thchs30/lm_word/lexicon.txt data/dict/lm_word_lexicon_1 + cp ${TARGET_DIR}/thchs30/resource/dict/lexicon.txt data/dict/lm_word_lexicon_2 + # copy phone.lexicon to data/dict + cp ${TARGET_DIR}/thchs30/data_thchs30/lm_phone/lexicon.txt data/dict/phone.lexicon +fi + +if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then + # gen word.lexicon + python local/gen_word2phone.py --lexicon-files="data/dict/lm_word_lexicon_1 data/dict/lm_word_lexicon_2" --output-path=data/dict/word.lexicon +fi + +if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then + # reorganize dataset for MFA + if [ ! -d $EXP_DIR/thchs30_corpus ]; then + echo "reorganizing thchs30 corpus..." + python local/reorganize_thchs30.py --root-dir=data --output-dir=data/thchs30_corpus --script-type=$LEXICON_NAME + echo "reorganization done." + fi +fi + +echo "THCHS-30 data preparation done." +exit 0 diff --git a/examples/thchs30/a0/local/gen_word2phone.py b/examples/thchs30/a0/local/gen_word2phone.py new file mode 100644 index 000000000..9bc0249bf --- /dev/null +++ b/examples/thchs30/a0/local/gen_word2phone.py @@ -0,0 +1,114 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Gen Chinese characters to THCHS30-30 phone lexicon using THCHS30-30's lexicon +file1: THCHS-30/data_thchs30/lm_word/lexicon.txt +file2: THCHS-30/resource/dict/lexicon.txt +""" +import argparse +from collections import defaultdict +from pathlib import Path +from typing import List +from typing import Union + +# key: (cn, ('ee', 'er4')),value: count +cn_phones_counter = defaultdict(int) +# key: cn, value: list of (phones, num) +cn_counter = defaultdict(list) +# key: cn, value: list of (phones, probabilities) +cn_counter_p = defaultdict(list) + + +def is_Chinese(ch): + if '\u4e00' <= ch <= '\u9fff': + return True + return False + + +def proc_line(line: str): + line = line.strip() + if is_Chinese(line[0]): + line_list = line.split() + cn_list = line_list[0] + phone_list = line_list[1:] + if len(cn_list) == len(phone_list) / 2: + new_phone_list = [(phone_list[i], phone_list[i + 1]) + for i in range(0, len(phone_list), 2)] + assert len(cn_list) == len(new_phone_list) + for idx, cn in enumerate(cn_list): + phones = new_phone_list[idx] + cn_phones_counter[(cn, phones)] += 1 + + +""" +example lines of output +the first column is a Chinese character +the second is the probability of this pronunciation +and the rest are the phones of this pronunciation +一 0.22 ii i1↩ +一 0.45 ii i4↩ +一 0.32 ii i2↩ +一 0.01 ii i5 +""" + + +def gen_lexicon(lexicon_files: List[Union[str, Path]], + output_path: Union[str, Path]): + for file_path in lexicon_files: + with open(file_path, "r") as f1: + for line in f1: + proc_line(line) + + for key in cn_phones_counter: + cn = key[0] + cn_counter[cn].append((key[1], cn_phones_counter[key])) + + for key in cn_counter: + phone_count_list = cn_counter[key] + count_sum = sum([x[1] for x in phone_count_list]) + for item in phone_count_list: + p = item[1] / count_sum + p = round(p, 2) + if p > 0: + cn_counter_p[key].append((item[0], p)) + + with open(output_path, "w") as wf: + for key in cn_counter_p: + phone_p_list = cn_counter_p[key] + for item in phone_p_list: + phones, p = item + wf.write(key + " " + str(p) + " " + " ".join(phones) + "\n") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Gen Chinese characters to phone lexicon for THCHS-30 dataset" + ) + # A line of word_lexicon: + # 一丁点 ii i4 d ing1 d ian3 + # the first is word, and the rest are the phones of the word, and the len of phones is twice of the word's len + parser.add_argument( + "--lexicon-files", + type=str, + default="data/dict/lm_word_lexicon_1 data/dict/lm_word_lexicon_2", + help="lm_word_lexicon files") + parser.add_argument( + "--output-path", + type=str, + default="data/dict/word.lexicon", + help="path to save output word2phone lexicon") + args = parser.parse_args() + lexicon_files = args.lexicon_files.split(" ") + output_path = Path(args.output_path).expanduser() + + gen_lexicon(lexicon_files, output_path) diff --git a/examples/thchs30/a0/local/reorganize_thchs30.py b/examples/thchs30/a0/local/reorganize_thchs30.py new file mode 100644 index 000000000..c7c6248bc --- /dev/null +++ b/examples/thchs30/a0/local/reorganize_thchs30.py @@ -0,0 +1,84 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Recorganize THCHS-30 for MFA +read manifest.train from root-dir +Link *.wav to output-dir +dump *.lab from manifest.train, such as: text、syllable and phone +Manifest file is a json-format file with each line containing the +meta data (i.e. audio filepath, transcript and audio duration) +""" +import argparse +import os +from pathlib import Path +from typing import Union + + +def link_wav(root_dir: Union[str, Path], output_dir: Union[str, Path]): + wav_scp_path = root_dir / 'wav.scp' + with open(wav_scp_path, 'r') as rf: + for line in rf: + utt, feat = line.strip().split() + wav_path = feat + wav_name = wav_path.split("/")[-1] + new_wav_path = output_dir / wav_name + os.symlink(wav_path, new_wav_path) + + +def write_lab(root_dir: Union[str, Path], + output_dir: Union[str, Path], + script_type='phone'): + # script_type can in {'word', 'syllable', 'phone'} + json_name = 'text.' + script_type + json_path = root_dir / json_name + with open(json_path, 'r') as rf: + for line in rf: + line = line.strip().split() + utt_id = line[0] + context = ' '.join(line[1:]) + transcript_name = utt_id + '.lab' + transcript_path = output_dir / transcript_name + with open(transcript_path, 'wt') as wf: + if script_type == 'word': + # add space between chinese char + context = ''.join([f + ' ' for f in context])[:-1] + wf.write(context + "\n") + + +def reorganize_thchs30(root_dir: Union[str, Path], + output_dir: Union[str, Path]=None, + script_type='phone'): + output_dir.mkdir(parents=True, exist_ok=True) + link_wav(root_dir, output_dir) + write_lab(root_dir, output_dir, script_type) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Reorganize THCHS-30 dataset for MFA") + parser.add_argument("--root-dir", type=str, help="path to thchs30 dataset.") + parser.add_argument( + "--output-dir", + type=str, + help="path to save outputs (audio and transcriptions)") + + parser.add_argument( + "--script-type", + type=str, + default="phone", + help="type of lab ('word'/'syllable'/'phone')") + + args = parser.parse_args() + root_dir = Path(args.root_dir).expanduser() + output_dir = Path(args.output_dir).expanduser() + reorganize_thchs30(root_dir, output_dir, args.script_type) diff --git a/examples/thchs30/a0/path.sh b/examples/thchs30/a0/path.sh new file mode 100644 index 000000000..fc953bebf --- /dev/null +++ b/examples/thchs30/a0/path.sh @@ -0,0 +1,13 @@ +export MAIN_ROOT=`realpath ${PWD}/../../../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} + +export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/ + +# MFA is in tools +export PATH=${MAIN_ROOT}/tools/montreal-forced-aligner/bin:$PATH \ No newline at end of file diff --git a/examples/thchs30/a0/run.sh b/examples/thchs30/a0/run.sh new file mode 100755 index 000000000..5081b612a --- /dev/null +++ b/examples/thchs30/a0/run.sh @@ -0,0 +1,35 @@ +#!/bin/bash +set -e +source path.sh +stage=0 +stop_stage=100 +EXP_DIR=exp +# LEXICON_NAME in {'phone', 'syllable', 'word'} +LEXICON_NAME='phone' +# set MFA num_jobs as half of machine's cpu core number +NUM_JOBS=$((`nproc`/2)) +source ${MAIN_ROOT}/utils/parse_options.sh || exit 1; + +# download dataset、unzip and generate manifest +# gen lexicon relink gen dump +if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then + # prepare data + echo "Start prepare thchs30 data for MFA ..." + bash ./local/data.sh $LEXICON_NAME || exit -1 +fi + +if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then + # run MFA + if [ ! -d "$EXP_DIR/thchs30_alignment" ]; then + echo "Start MFA training ..." + mfa_train_and_align data/thchs30_corpus data/dict/$LEXICON_NAME.lexicon $EXP_DIR/thchs30_alignment -o $EXP_DIR/thchs30_model --clean --verbose --temp_directory exp/.mfa_train_and_align --num_jobs $NUM_JOBS + echo "MFA training done! \nresults: $EXP_DIR/thchs30_alignment \nmodel: $EXP_DIR/thchs30_model\n" + fi +fi + + + + + + + diff --git a/examples/tn/.gitignore b/examples/tn/.gitignore new file mode 100644 index 000000000..0f2503386 --- /dev/null +++ b/examples/tn/.gitignore @@ -0,0 +1 @@ +exp diff --git a/examples/tn/README.md b/examples/tn/README.md new file mode 100644 index 000000000..ff7be2934 --- /dev/null +++ b/examples/tn/README.md @@ -0,0 +1,36 @@ +# Regular expression based text normalization for Chinese + +For simplicity and ease of implementation, text normalization is basically done by rules and dictionaries. Here's an example. + +## Run + +``` +. path.sh +bash run.sh +``` + +## Results + +``` +exp/ +`-- normalized.txt + +0 directories, 1 file +``` + +``` +aff31f8aa08e2a7360228c9ce5886b98 exp/normalized.txt +``` + +``` +今天的最低气温达到零下十度. +只要有四分之三十三的人同意,就可以通过决议。 +一九四五年五月二日,苏联士兵在德国国会大厦上升起了胜利旗,象征着攻占柏林并战胜了纳粹德国。 +四月十六日,清晨的战斗以炮击揭幕,数以千计的大炮和喀秋莎火箭炮开始炮轰德军阵地,炮击持续了数天之久。 +如果剩下的百分之三十点六是过去,那么还有百分之六十九点四. +事情发生在二零二零年三月三十一日的上午八点. +警方正在找一支点二二口径的手枪。 +欢迎致电中国联通,北京二零二二年冬奥会官方合作伙伴为您服务 +充值缴费请按一,查询话费及余量请按二,跳过本次提醒请按井号键。 +快速解除流量封顶请按星号键,腾讯王卡产品介绍、使用说明、特权及活动请按九,查询话费、套餐余量、积分及活动返款请按一,手机上网流量开通及取消请按二,查���本机号码及本号所使用套餐请按四,密码修改及重置请按五,紧急开机请按六,挂失请按七,查询充值记录请按八,其它自助服务及工服务请按零 +``` diff --git a/examples/tn/data/sentences.txt b/examples/tn/data/sentences.txt new file mode 100644 index 000000000..d15bfe46b --- /dev/null +++ b/examples/tn/data/sentences.txt @@ -0,0 +1,26 @@ +今天的最低气温达到-10°C. +只要有33/4的人同意,就可以通过决议。 +1945年5月2日,苏联士兵在德国国会大厦上升起了胜利旗,象征着攻占柏林并战胜了纳粹德国。 +4月16日,清晨的战斗以炮击揭幕,数以千计的大炮和喀秋莎火箭炮开始炮轰德军阵地,炮击持续了数天之久。 +如果剩下的30.6%是过去,那么还有69.4%. +事情发生在2020/03/31的上午8:00. +警方正在找一支.22口径的手枪。 +欢迎致电中国联通,北京2022年冬奥会官方合作伙伴为您服务 +充值缴费请按1,查询话费及余量请按2,跳过本次提醒请按井号键。 +快速解除流量封顶请按星号键,腾讯王卡产品介绍、使用说明、特权及活动请按9,查询话费、套餐余量、积分及活动返款请按1,手机上网流量开通及取消请按2,查询本机号码及本号所使用套餐请按4,密码修改及重置请按5,紧急开机请按6,挂失请按7,查询充值记录请按8,其它自助服务及人工服务请按0 +智能客服助理快速查话费、查流量请按9,了解北京联通业务请按1,宽带IPTV新装、查询请按2,障碍报修请按3,充值缴费请按4,投诉建议请按5,政企业务请按7,人工服务请按0,for english severice press star key +您的帐户当前可用余额为63.89元,本月消费为2.17元。您的消费、套餐余量和其它信息将以短信形式下发,请您注意查收。谢谢使用,再见!。 +您的帐户当前可用余额为负15.5元,本月消费为59.6元。您的消费、套餐余量和其它信息将以短信形式下发,请您注意查收。谢谢使用,再见!。 +尊敬的客户,您目前的话费余额为负14.60元,已低于10元,为保证您的通信畅通,请及时缴纳费用。 +您的流量已用完,为避免您产生额外费用,建议您根据需求开通一个流量包以作补充。 +您可以直接说,查询话费及余量、开通流量包、缴费,您也可以说出其它需求,请问有什么可以帮您? +您的账户当前可用余额为负36.00元,本月消费36.00元。 +请问你是电话13985608526的机主吗? +如您对处理结果不满意,可拨打中国联通集团投诉电话10015进行投诉,按本地通话费收费,返回自助服务请按井号键 +“26314”号VIP客服代表为您服务。 +尊敬的5G用户,欢迎您致电中国联通 +首先是应用了M1芯片的iPad Pro,新款的iPad Pro支持5G,这也是苹果的第二款5G产品线。 +除此之外,摄像头方面再次升级,增加了前摄全新超广角摄像头,支持人物居中功能,搭配超广角可实现视频中始终让人物居中效果。 +屏幕方面,iPad Pro 12.9版本支持XDR体验的Mini-LEDS显示屏,支持HDR10、杜比视界,还支持杜比全景声。 +iPad Pro的秒控键盘这次也推出白色版本。 +售价方面,11英寸版本售价799美元起,12.9英寸售价1099美元起。 diff --git a/examples/tn/local/test_normalization.py b/examples/tn/local/test_normalization.py new file mode 100644 index 000000000..bcf7ee0da --- /dev/null +++ b/examples/tn/local/test_normalization.py @@ -0,0 +1,29 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse + +from text_processing import normalization + +parser = argparse.ArgumentParser( + description="Normalize text in Chinese with some rules.") +parser.add_argument("input", type=str, help="the input sentences") +parser.add_argument("output", type=str, help="path to save the output file.") +args = parser.parse_args() + +with open(args.input, 'rt') as fin: + with open(args.output, 'wt') as fout: + for sent in fin: + sent = normalization.normalize_sentence(sent.strip()) + fout.write(sent) + fout.write('\n') diff --git a/examples/tn/path.sh b/examples/tn/path.sh new file mode 100644 index 000000000..30689eee7 --- /dev/null +++ b/examples/tn/path.sh @@ -0,0 +1,8 @@ +export MAIN_ROOT=`realpath ${PWD}/../../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${MAIN_ROOT}/third_party:${PYTHONPATH}# diff --git a/examples/tn/run.sh b/examples/tn/run.sh new file mode 100755 index 000000000..c4043a319 --- /dev/null +++ b/examples/tn/run.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +source path.sh + +stage=-1 +stop_stage=100 + +exp_dir=exp +data_dir=data +filename="sentences.txt" + +source ${MAIN_ROOT}/utils/parse_options.sh || exit -1 + +mkdir -p ${exp_dir} + + +if [ $stage -le 1 ] && [ $stop_stage -ge 1 ]; then + echo "stage 1: Processing " + python3 local/test_normalization.py ${data_dir}/${filename} ${exp_dir}/normalized.txt + if [ -f "${exp_dir}/normalized.txt" ]; then + echo "Normalized text save at ${exp_dir}/normalized.txt" + fi + # TODO(chenfeiyu): compute edit distance against ground-truth +fi + +echo "done" +exit 0 diff --git a/hub/requirements.txt b/hub/requirements.txt new file mode 100644 index 000000000..c4c7d0229 --- /dev/null +++ b/hub/requirements.txt @@ -0,0 +1,26 @@ +coverage +gpustat +jsonlines +kaldiio +llvmlite==0.31.0 +loguru +numba==0.47.0 +numpy==1.18.5 +Pillow +pre-commit +pybind11 +python-speech-features +resampy==0.2.2 +sacrebleu +scipy==1.2.1 +sentencepiece +snakeviz +SoundFile==0.9.0.post1 +sox +soxbindings +tensorboardX +textgrid +tqdm +typeguard +visualdl==2.2.0 +yacs diff --git a/hub/setup_hub.sh b/hub/setup_hub.sh new file mode 100644 index 000000000..f2d43ad10 --- /dev/null +++ b/hub/setup_hub.sh @@ -0,0 +1,66 @@ +#! /usr/bin/env bash +cd .. >> /dev/null +source utils/log.sh + + +SUDO='sudo' +if [ $(id -u) -eq 0 ]; then + SUDO='' +fi + +if [ -e /etc/lsb-release ];then + ${SUDO} apt-get update -y + ${SUDO} apt-get install -y jq vim tig tree sox pkg-config libflac-dev libogg-dev libvorbis-dev libboost-dev swig python3-dev + if [ $? != 0 ]; then + error_msg "Please using Ubuntu or install pkg-config libflac-dev libogg-dev libvorbis-dev libboost-dev swig python3-dev by user." + exit -1 + fi +fi + + +source tools/venv/bin/activate + +cd - +#install python dependencies +if [ -f "requirements.txt" ]; then + pip3 install -r requirements.txt +fi +if [ $? != 0 ]; then + error_msg "Install python dependencies failed !!!" + exit 1 +fi +cd .. >> /dev/null + +# install package libsndfile +python3 -c "import soundfile" +if [ $? != 0 ]; then + info_msg "Install package libsndfile into default system path." + wget "http://www.mega-nerd.com/libsndfile/files/libsndfile-1.0.28.tar.gz" + if [ $? != 0 ]; then + error_msg "Download libsndfile-1.0.28.tar.gz failed !!!" + exit 1 + fi + tar -zxvf libsndfile-1.0.28.tar.gz + cd libsndfile-1.0.28 + ./configure > /dev/null && make > /dev/null && make install > /dev/null + cd .. + rm -rf libsndfile-1.0.28 + rm libsndfile-1.0.28.tar.gz +fi + + +# install decoders +python3 -c "import pkg_resources; pkg_resources.require(\"swig_decoders==1.1\")" +if [ $? != 0 ]; then + cd deepspeech/decoders/swig > /dev/null + sh setup.sh + cd - > /dev/null +fi +python3 -c "import pkg_resources; pkg_resources.require(\"swig_decoders==1.1\")" +if [ $? != 0 ]; then + error_msg "Please check why decoder install error!" + exit -1 +fi + + +info_msg "Install all dependencies successfully." diff --git a/requirements.txt b/requirements.txt index 57a951bbd..9ecf6bbd8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,13 @@ coverage +gpustat +jsonlines +kaldiio +loguru +Pillow pre-commit pybind11 resampy==0.2.2 +sacrebleu scipy==1.2.1 sentencepiece snakeviz @@ -9,5 +15,8 @@ SoundFile==0.9.0.post1 sox tensorboardX textgrid +tqdm typeguard +visualdl==2.2.0 yacs +editdistance \ No newline at end of file diff --git a/setup.sh b/setup.sh index 11daa102a..6e472c47d 100644 --- a/setup.sh +++ b/setup.sh @@ -9,14 +9,21 @@ if [ $(id -u) -eq 0 ]; then fi if [ -e /etc/lsb-release ];then - #${SUDO} apt-get update - ${SUDO} apt-get install -y vim tig tree sox pkg-config libflac-dev libogg-dev libvorbis-dev libboost-dev swig python3-dev + ${SUDO} apt-get update -y + ${SUDO} apt-get install -y jq vim tig tree sox pkg-config libflac-dev libogg-dev libvorbis-dev libboost-dev swig python3-dev if [ $? != 0 ]; then error_msg "Please using Ubuntu or install pkg-config libflac-dev libogg-dev libvorbis-dev libboost-dev swig python3-dev by user." exit -1 fi fi + +# tools/make +rm tools/*.done +pushd tools && make && popd + +source tools/venv/bin/activate + # install python dependencies if [ -f "requirements.txt" ]; then pip3 install -r requirements.txt @@ -43,6 +50,22 @@ if [ $? != 0 ]; then rm libsndfile-1.0.28.tar.gz fi +#install auto-log +python -c "import auto_log" +if [ $? != 0 ]; then + info_msg "Install auto_log into default system path" + test -d AutoLog || git clone https://github.com/LDOUBLEV/AutoLog + if [ $? != 0 ]; then + error_msg "Download auto_log failed !!!" + exit 1 + fi + cd AutoLog + pip install -r requirements.txt + python setup.py install + cd .. + rm -rf AutoLog +fi + # install decoders python3 -c "import pkg_resources; pkg_resources.require(\"swig_decoders==1.1\")" if [ $? != 0 ]; then @@ -66,4 +89,5 @@ if [ $? != 0 ]; then fi popd + info_msg "Install all dependencies successfully." diff --git a/speechnn/.gitignore b/speechnn/.gitignore new file mode 100644 index 000000000..378eac25d --- /dev/null +++ b/speechnn/.gitignore @@ -0,0 +1 @@ +build diff --git a/speechnn/CMakeLists.txt b/speechnn/CMakeLists.txt new file mode 100644 index 000000000..88182eb4c --- /dev/null +++ b/speechnn/CMakeLists.txt @@ -0,0 +1,56 @@ +cmake_minimum_required(VERSION 3.14 FATAL_ERROR) + +project(speechnn VERSION 0.1) + +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_SOURCE_DIR}/src CACHE PATH "Install path prefix." FORCE) +endif(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) +set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake;${CMAKE_MODULE_PATH}") + +# include file +include(cmake/third_party.cmake) + + +set(CMAKE_VERBOSE_MAKEFILE on) +# set std-14 +set(CMAKE_CXX_STANDARD 14) + + +# # fc_patch dir +# set(FETCHCONTENT_QUIET off) +# get_filename_component(fc_patch "fc_patch" REALPATH BASE_DIR "${CMAKE_SOURCE_DIR}") +# set(FETCHCONTENT_BASE_DIR ${fc_patch}) +# +# +# ############################################################################### +# # Option Configurations +# ############################################################################### +# # option configurations +# option(TEST_DEBUG "option for debug" OFF) +# +# +# ############################################################################### +# # Add local library +# ############################################################################### +# # system lib +# find_package() +# # if dir have CmakeLists.txt +# add_subdirectory() +# # if dir do not have CmakeLists.txt +# add_library(lib_name STATIC file.cc) +# target_link_libraries(lib_name item0 item1) +# add_dependencies(lib_name depend-target) +# +# +# ############################################################################### +# # Library installation +# ############################################################################### +# install() +# +# +# ############################################################################### +# # Build binary file +# ############################################################################### +# add_executable() +# target_link_libraries() +# diff --git a/speechnn/cmake/third_party.cmake b/speechnn/cmake/third_party.cmake new file mode 100644 index 000000000..fdd7b53c2 --- /dev/null +++ b/speechnn/cmake/third_party.cmake @@ -0,0 +1,197 @@ +include(ExternalProject) +# Creat a target named "third_party", which can compile external dependencies on all platform(windows/linux/mac) + +set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING + "A path setting third party libraries download & build directories.") +set(THIRD_PARTY_CACHE_PATH "${CMAKE_SOURCE_DIR}" CACHE STRING + "A path cache third party source code to avoid repeated download.") + +set(THIRD_PARTY_BUILD_TYPE Release) +set(third_party_deps) + + +# cache funciton to avoid repeat download code of third_party. +# This function has 4 parameters, URL / REPOSITOR / TAG / DIR: +# 1. URL: specify download url of 3rd party +# 2. REPOSITORY: specify git REPOSITORY of 3rd party +# 3. TAG: specify git tag/branch/commitID of 3rd party +# 4. DIR: overwrite the original SOURCE_DIR when cache directory +# +# The function Return 1 PARENT_SCOPE variables: +# - ${TARGET}_DOWNLOAD_CMD: Simply place "${TARGET}_DOWNLOAD_CMD" in ExternalProject_Add, +# and you no longer need to set any donwnload steps in ExternalProject_Add. +# For example: +# Cache_third_party(${TARGET} +# REPOSITORY ${TARGET_REPOSITORY} +# TAG ${TARGET_TAG} +# DIR ${TARGET_SOURCE_DIR}) + +FUNCTION(cache_third_party TARGET) + SET(options "") + SET(oneValueArgs URL REPOSITORY TAG DIR) + SET(multiValueArgs "") + cmake_parse_arguments(cache_third_party "${optionps}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + STRING(REPLACE "extern_" "" TARGET_NAME ${TARGET}) + STRING(REGEX REPLACE "[0-9]+" "" TARGET_NAME ${TARGET_NAME}) + STRING(TOUPPER ${TARGET_NAME} TARGET_NAME) + IF(cache_third_party_REPOSITORY) + SET(${TARGET_NAME}_DOWNLOAD_CMD + GIT_REPOSITORY ${cache_third_party_REPOSITORY}) + IF(cache_third_party_TAG) + LIST(APPEND ${TARGET_NAME}_DOWNLOAD_CMD + GIT_TAG ${cache_third_party_TAG}) + ENDIF() + ELSEIF(cache_third_party_URL) + SET(${TARGET_NAME}_DOWNLOAD_CMD + URL ${cache_third_party_URL}) + ELSE() + MESSAGE(FATAL_ERROR "Download link (Git repo or URL) must be specified for cache!") + ENDIF() + IF(WITH_TP_CACHE) + IF(NOT cache_third_party_DIR) + MESSAGE(FATAL_ERROR "Please input the ${TARGET_NAME}_SOURCE_DIR for overwriting when -DWITH_TP_CACHE=ON") + ENDIF() + # Generate and verify cache dir for third_party source code + SET(cache_third_party_REPOSITORY ${cache_third_party_REPOSITORY} ${cache_third_party_URL}) + IF(cache_third_party_REPOSITORY AND cache_third_party_TAG) + STRING(MD5 HASH_REPO ${cache_third_party_REPOSITORY}) + STRING(MD5 HASH_GIT ${cache_third_party_TAG}) + STRING(SUBSTRING ${HASH_REPO} 0 8 HASH_REPO) + STRING(SUBSTRING ${HASH_GIT} 0 8 HASH_GIT) + STRING(CONCAT HASH ${HASH_REPO} ${HASH_GIT}) + # overwrite the original SOURCE_DIR when cache directory + SET(${cache_third_party_DIR} ${THIRD_PARTY_CACHE_PATH}/third_party/${TARGET}_${HASH}) + ELSEIF(cache_third_party_REPOSITORY) + STRING(MD5 HASH_REPO ${cache_third_party_REPOSITORY}) + STRING(SUBSTRING ${HASH_REPO} 0 16 HASH) + # overwrite the original SOURCE_DIR when cache directory + SET(${cache_third_party_DIR} ${THIRD_PARTY_CACHE_PATH}/third_party/${TARGET}_${HASH}) + ENDIF() + + IF(EXISTS ${${cache_third_party_DIR}}) + # judge whether the cache dir is empty + FILE(GLOB files ${${cache_third_party_DIR}}/*) + LIST(LENGTH files files_len) + IF(files_len GREATER 0) + list(APPEND ${TARGET_NAME}_DOWNLOAD_CMD DOWNLOAD_COMMAND "") + ENDIF() + ENDIF() + SET(${cache_third_party_DIR} ${${cache_third_party_DIR}} PARENT_SCOPE) + ENDIF() + + # Pass ${TARGET_NAME}_DOWNLOAD_CMD to parent scope, the double quotation marks can't be removed + SET(${TARGET_NAME}_DOWNLOAD_CMD "${${TARGET_NAME}_DOWNLOAD_CMD}" PARENT_SCOPE) +ENDFUNCTION() + +MACRO(UNSET_VAR VAR_NAME) + UNSET(${VAR_NAME} CACHE) + UNSET(${VAR_NAME}) +ENDMACRO() + +# Funciton to Download the dependencies during compilation +# This function has 2 parameters, URL / DIRNAME: +# 1. URL: The download url of 3rd dependencies +# 2. NAME: The name of file, that determin the dirname +# +FUNCTION(file_download_and_uncompress URL NAME) + set(options "") + set(oneValueArgs MD5) + set(multiValueArgs "") + cmake_parse_arguments(URL "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + MESSAGE(STATUS "Download dependence[${NAME}] from ${URL}, MD5: ${URL_MD5}") + SET(${NAME}_INCLUDE_DIR ${THIRD_PARTY_PATH}/${NAME}/data PARENT_SCOPE) + ExternalProject_Add( + download_${NAME} + ${EXTERNAL_PROJECT_LOG_ARGS} + PREFIX ${THIRD_PARTY_PATH}/${NAME} + URL ${URL} + URL_MD5 ${URL_MD5} + TIMEOUT 120 + DOWNLOAD_DIR ${THIRD_PARTY_PATH}/${NAME}/data/ + SOURCE_DIR ${THIRD_PARTY_PATH}/${NAME}/data/ + DOWNLOAD_NO_PROGRESS 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND "" + ) + set(third_party_deps ${third_party_deps} download_${NAME} PARENT_SCOPE) +ENDFUNCTION() + + +# Correction of flags on different Platform(WIN/MAC) and Print Warning Message +if (APPLE) + if(WITH_MKL) + MESSAGE(WARNING + "Mac is not supported with MKL in Paddle yet. Force WITH_MKL=OFF.") + set(WITH_MKL OFF CACHE STRING "Disable MKL for building on mac" FORCE) + endif() +endif() + +if(WIN32 OR APPLE) + MESSAGE(STATUS "Disable XBYAK in Windows and MacOS") + SET(WITH_XBYAK OFF CACHE STRING "Disable XBYAK in Windows and MacOS" FORCE) + + if(WITH_LIBXSMM) + MESSAGE(WARNING + "Windows, Mac are not supported with libxsmm in Paddle yet." + "Force WITH_LIBXSMM=OFF") + SET(WITH_LIBXSMM OFF CACHE STRING "Disable LIBXSMM in Windows and MacOS" FORCE) + endif() + + if(WITH_BOX_PS) + MESSAGE(WARNING + "Windows or Mac is not supported with BOX_PS in Paddle yet." + "Force WITH_BOX_PS=OFF") + SET(WITH_BOX_PS OFF CACHE STRING "Disable BOX_PS package in Windows and MacOS" FORCE) + endif() + + if(WITH_PSLIB) + MESSAGE(WARNING + "Windows or Mac is not supported with PSLIB in Paddle yet." + "Force WITH_PSLIB=OFF") + SET(WITH_PSLIB OFF CACHE STRING "Disable PSLIB package in Windows and MacOS" FORCE) + endif() + + if(WITH_LIBMCT) + MESSAGE(WARNING + "Windows or Mac is not supported with LIBMCT in Paddle yet." + "Force WITH_LIBMCT=OFF") + SET(WITH_LIBMCT OFF CACHE STRING "Disable LIBMCT package in Windows and MacOS" FORCE) + endif() + + if(WITH_PSLIB_BRPC) + MESSAGE(WARNING + "Windows or Mac is not supported with PSLIB_BRPC in Paddle yet." + "Force WITH_PSLIB_BRPC=OFF") + SET(WITH_PSLIB_BRPC OFF CACHE STRING "Disable PSLIB_BRPC package in Windows and MacOS" FORCE) + endif() +endif() + +set(WITH_MKLML ${WITH_MKL}) +if(NOT DEFINED WITH_MKLDNN) + if(WITH_MKL AND AVX2_FOUND) + set(WITH_MKLDNN ON) + else() + message(STATUS "Do not have AVX2 intrinsics and disabled MKL-DNN") + set(WITH_MKLDNN OFF) + endif() +endif() + +if(WIN32 OR APPLE OR NOT WITH_GPU OR ON_INFER) + set(WITH_DGC OFF) +endif() + +if(${CMAKE_VERSION} VERSION_GREATER "3.5.2") + set(SHALLOW_CLONE "GIT_SHALLOW TRUE") # adds --depth=1 arg to git clone of External_Projects +endif() + + +########################### include third_party according to flags ############################### +include(third_party/libsndfile) # download, build, install libsndfile +include(third_party/boost) # download boost +include(third_party/eigen) # download eigen3 +include(third_party/threadpool) # download threadpool + + diff --git a/speechnn/cmake/third_party/absl.cmake b/speechnn/cmake/third_party/absl.cmake new file mode 100644 index 000000000..c2a8eceb5 --- /dev/null +++ b/speechnn/cmake/third_party/absl.cmake @@ -0,0 +1,13 @@ +cmake_minimum_required(VERSION 3.14) +include(ExternalProject) +include(FetchContent) + +FetchContent_Declare( + absl + GIT_REPOSITORY "https://github.com/abseil/abseil-cpp.git" + GIT_TAG "20210324.1" +) + +FetchContent_MakeAvailable(absl) + + diff --git a/speechnn/cmake/third_party/boost.cmake b/speechnn/cmake/third_party/boost.cmake new file mode 100644 index 000000000..eb0b2c150 --- /dev/null +++ b/speechnn/cmake/third_party/boost.cmake @@ -0,0 +1,49 @@ +include(ExternalProject) + +set(BOOST_PROJECT "extern_boost") +# To release PaddlePaddle as a pip package, we have to follow the +# manylinux1 standard, which features as old Linux kernels and +# compilers as possible and recommends CentOS 5. Indeed, the earliest +# CentOS version that works with NVIDIA CUDA is CentOS 6. And a new +# version of boost, say, 1.66.0, doesn't build on CentOS 6. We +# checked that the devtools package of CentOS 6 installs boost 1.41.0. +# So we use 1.41.0 here. +set(BOOST_VER "1.41.0") +set(BOOST_TAR "boost_1_41_0" CACHE STRING "" FORCE) +set(BOOST_URL "http://paddlepaddledeps.bj.bcebos.com/${BOOST_TAR}.tar.gz" CACHE STRING "" FORCE) + +MESSAGE(STATUS "BOOST_VERSION: ${BOOST_VER}, BOOST_URL: ${BOOST_URL}") + +set(BOOST_PREFIX_DIR ${THIRD_PARTY_PATH}/boost) +set(BOOST_SOURCE_DIR ${THIRD_PARTY_PATH}/boost/src/extern_boost) +cache_third_party(${BOOST_PROJECT} + URL ${BOOST_URL} + DIR BOOST_SOURCE_DIR) + +set(BOOST_INCLUDE_DIR "${BOOST_SOURCE_DIR}" CACHE PATH "boost include directory." FORCE) +set_directory_properties(PROPERTIES CLEAN_NO_CUSTOM 1) +include_directories(${BOOST_INCLUDE_DIR}) + +if(WIN32 AND MSVC_VERSION GREATER_EQUAL 1600) + add_definitions(-DBOOST_HAS_STATIC_ASSERT) +endif() + +ExternalProject_Add( + ${BOOST_PROJECT} + ${EXTERNAL_PROJECT_LOG_ARGS} + "${BOOST_DOWNLOAD_CMD}" + URL_MD5 f891e8c2c9424f0565f0129ad9ab4aff + PREFIX ${BOOST_PREFIX_DIR} + DOWNLOAD_DIR ${BOOST_SOURCE_DIR} + SOURCE_DIR ${BOOST_SOURCE_DIR} + DOWNLOAD_NO_PROGRESS 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + UPDATE_COMMAND "" + ) + +add_library(boost INTERFACE) + +add_dependencies(boost ${BOOST_PROJECT}) +set(Boost_INCLUDE_DIR ${BOOST_INCLUDE_DIR}) diff --git a/speechnn/cmake/third_party/eigen.cmake b/speechnn/cmake/third_party/eigen.cmake new file mode 100644 index 000000000..6a0323071 --- /dev/null +++ b/speechnn/cmake/third_party/eigen.cmake @@ -0,0 +1,53 @@ +include(ExternalProject) + +# update eigen to the commit id f612df27 on 03/16/2021 +set(EIGEN_PREFIX_DIR ${THIRD_PARTY_PATH}/eigen3) +set(EIGEN_SOURCE_DIR ${THIRD_PARTY_PATH}/eigen3/src/extern_eigen3) +set(EIGEN_REPOSITORY https://gitlab.com/libeigen/eigen.git) +set(EIGEN_TAG f612df273689a19d25b45ca4f8269463207c4fee) + +cache_third_party(extern_eigen3 + REPOSITORY ${EIGEN_REPOSITORY} + TAG ${EIGEN_TAG} + DIR EIGEN_SOURCE_DIR) + +if(WIN32) + add_definitions(-DEIGEN_STRONG_INLINE=inline) +elseif(LINUX) + if(WITH_ROCM) + # For HIPCC Eigen::internal::device::numeric_limits is not EIGEN_DEVICE_FUNC + # which will cause compiler error of using __host__ funciont in __host__ __device__ + file(TO_NATIVE_PATH ${PADDLE_SOURCE_DIR}/patches/eigen/Meta.h native_src) + file(TO_NATIVE_PATH ${EIGEN_SOURCE_DIR}/Eigen/src/Core/util/Meta.h native_dst) + file(TO_NATIVE_PATH ${PADDLE_SOURCE_DIR}/patches/eigen/TensorReductionGpu.h native_src1) + file(TO_NATIVE_PATH ${EIGEN_SOURCE_DIR}/unsupported/Eigen/CXX11/src/Tensor/TensorReductionGpu.h native_dst1) + set(EIGEN_PATCH_COMMAND cp ${native_src} ${native_dst} && cp ${native_src1} ${native_dst1}) + endif() +endif() + +set(EIGEN_INCLUDE_DIR ${EIGEN_SOURCE_DIR}) +INCLUDE_DIRECTORIES(${EIGEN_INCLUDE_DIR}) + +ExternalProject_Add( + extern_eigen3 + ${EXTERNAL_PROJECT_LOG_ARGS} + ${SHALLOW_CLONE} + "${EIGEN_DOWNLOAD_CMD}" + PREFIX ${EIGEN_PREFIX_DIR} + SOURCE_DIR ${EIGEN_SOURCE_DIR} + UPDATE_COMMAND "" + PATCH_COMMAND ${EIGEN_PATCH_COMMAND} + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + TEST_COMMAND "" +) + +add_library(eigen3 INTERFACE) + +add_dependencies(eigen3 extern_eigen3) + +# sw not support thread_local semantic +if(WITH_SW) + add_definitions(-DEIGEN_AVOID_THREAD_LOCAL) +endif() diff --git a/speechnn/cmake/third_party/libsndfile.cmake b/speechnn/cmake/third_party/libsndfile.cmake new file mode 100644 index 000000000..05d5c6ed4 --- /dev/null +++ b/speechnn/cmake/third_party/libsndfile.cmake @@ -0,0 +1,11 @@ +cmake_minimum_required(VERSION 3.14) +include(ExternalProject) +include(FetchContent) + +FetchContent_Declare( + libsndfile + GIT_REPOSITORY https://github.com/libsndfile/libsndfile.git + GIT_TAG v1.0.30 # tag v1.0.30 +) + +FetchContent_GetProperties(libsndfile) diff --git a/speechnn/cmake/third_party/openfst.cmake b/speechnn/cmake/third_party/openfst.cmake new file mode 100644 index 000000000..39f335a1c --- /dev/null +++ b/speechnn/cmake/third_party/openfst.cmake @@ -0,0 +1,26 @@ +cmake_minimum_required(VERSION 3.14) +include(ExternalProject) +include(FetchContent) + +FetchContent_Declare( + openfst + GIT_REPOSITORY https://github.com/kkm000/openfst + GIT_TAG 338225416178ac36b8002d70387f5556e44c8d05 # tag win/1.7.2.1 +) + +FetchContent_GetProperties(openfst) +if(NOT openfst_POPULATED) + FetchContent_Populate(openfst) + include_directories(${openfst_SOURCE_DIR}/src/include) + + add_subdirectory(${openfst_SOURCE_DIR} ${openfst_BINARY_DIR}) + + install(DIRECTORY ${openfst_SOURCE_DIR}/src/include/ DESTINATION include/ + FILES_MATCHING PATTERN "*.h") + + install(TARGETS fst + EXPORT kaldi-targets + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) +endif() diff --git a/speechnn/cmake/third_party/openfst_lib_target.cmake b/speechnn/cmake/third_party/openfst_lib_target.cmake new file mode 100644 index 000000000..dde5efc40 --- /dev/null +++ b/speechnn/cmake/third_party/openfst_lib_target.cmake @@ -0,0 +1,31 @@ +if(NOT OPENFST_ROOT_DIR) + message(FATAL_ERROR) +endif() + +set(fst_source_dir ${OPENFST_ROOT_DIR}/src/lib) +set(fst_include_dir ${OPENFST_ROOT_DIR}/src/include) + +include_directories(${fst_include_dir}) +file(GLOB fst_sources "${fst_source_dir}/*.cc") + +add_library(fst ${fst_sources}) +target_include_directories(fst PUBLIC + $ + $ +) + +install(TARGETS fst + EXPORT kaldi-targets + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} +) + +install(DIRECTORY ${fst_include_dir}/fst + DESTINATION include/openfst + PATTERN "test/*.h" EXCLUDE +) + +unset(fst_source_dir) +unset(fst_include_dir) +unset(fst_sources) diff --git a/speechnn/cmake/third_party/threadpool.cmake b/speechnn/cmake/third_party/threadpool.cmake new file mode 100644 index 000000000..d2c249e9b --- /dev/null +++ b/speechnn/cmake/third_party/threadpool.cmake @@ -0,0 +1,36 @@ +INCLUDE(ExternalProject) + +SET(THREADPOOL_PREFIX_DIR ${THIRD_PARTY_PATH}/threadpool) +SET(THREADPOOL_SOURCE_DIR ${THIRD_PARTY_PATH}/threadpool/src/extern_threadpool) +if(WITH_ASCEND OR WITH_ASCEND_CL) + SET(THREADPOOL_REPOSITORY https://gitee.com/tianjianhe/ThreadPool.git) +else() + SET(THREADPOOL_REPOSITORY ${GIT_URL}/progschj/ThreadPool.git) +endif() +SET(THREADPOOL_TAG 9a42ec1329f259a5f4881a291db1dcb8f2ad9040) + +cache_third_party(extern_threadpool + REPOSITORY ${THREADPOOL_REPOSITORY} + TAG ${THREADPOOL_TAG} + DIR THREADPOOL_SOURCE_DIR) + +SET(THREADPOOL_INCLUDE_DIR ${THREADPOOL_SOURCE_DIR}) +INCLUDE_DIRECTORIES(${THREADPOOL_INCLUDE_DIR}) + +ExternalProject_Add( + extern_threadpool + ${EXTERNAL_PROJECT_LOG_ARGS} + ${SHALLOW_CLONE} + "${THREADPOOL_DOWNLOAD_CMD}" + PREFIX ${THREADPOOL_PREFIX_DIR} + SOURCE_DIR ${THREADPOOL_SOURCE_DIR} + UPDATE_COMMAND "" + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + TEST_COMMAND "" +) + +add_library(simple_threadpool INTERFACE) + +add_dependencies(simple_threadpool extern_threadpool) diff --git a/speechnn/cmake/third_party/version.cmake b/speechnn/cmake/third_party/version.cmake new file mode 100644 index 000000000..c3780ee69 --- /dev/null +++ b/speechnn/cmake/third_party/version.cmake @@ -0,0 +1,15 @@ +function(get_version) + file(READ ${CMAKE_CURRENT_SOURCE_DIR}/src/.version version) + string(STRIP ${version} version) + execute_process(COMMAND git log -n1 --format=%H src/.version + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + OUTPUT_VARIABLE version_commit + OUTPUT_STRIP_TRAILING_WHITESPACE) + execute_process(COMMAND git rev-list --count "${version_commit}..HEAD" + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + OUTPUT_VARIABLE patch_number) + string(STRIP ${patch_number} patch_number) + + set(KALDI_VERSION ${version} PARENT_SCOPE) + set(KALDI_PATCH_NUMBER ${patch_number} PARENT_SCOPE) +endfunction() diff --git a/speechnn/core/transformers/.gitkeep b/speechnn/core/transformers/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/core/transformers/README.md b/speechnn/core/transformers/README.md new file mode 100644 index 000000000..edbcb9cc3 --- /dev/null +++ b/speechnn/core/transformers/README.md @@ -0,0 +1,9 @@ +# Fast Transformers for Speech + +- Conformer +- Transformer + +## Reference + +* https://github.com/NVIDIA/FasterTransformer.git +* https://github.com/idiap/fast-transformers diff --git a/speechnn/examples/.gitkeep b/speechnn/examples/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/examples/CMakeLists.txt b/speechnn/examples/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/CMakeLists.txt b/speechnn/speechnn/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/decoder/CMakeLists.txt b/speechnn/speechnn/decoder/CMakeLists.txt new file mode 100644 index 000000000..259261bdf --- /dev/null +++ b/speechnn/speechnn/decoder/CMakeLists.txt @@ -0,0 +1,2 @@ +aux_source_directory(. DIR_LIB_SRCS) +add_library(decoder STATIC ${DIR_LIB_SRCS}) diff --git a/speechnn/speechnn/frontend/CMakeLists.txt b/speechnn/speechnn/frontend/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/frontend/audio/CMakeLists.txt b/speechnn/speechnn/frontend/audio/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/frontend/text/CMakeLists.txt b/speechnn/speechnn/frontend/text/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/model/CMakeLists.txt b/speechnn/speechnn/model/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/nn/CMakeLists.txt b/speechnn/speechnn/nn/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/protocol/CMakeLists.txt b/speechnn/speechnn/protocol/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/speechnn/speechnn/utils/CMakeLists.txt b/speechnn/speechnn/utils/CMakeLists.txt new file mode 100644 index 000000000..e69de29bb diff --git a/third_party/__init__.py b/third_party/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/third_party/nnAudio/.gitignore b/third_party/nnAudio/.gitignore new file mode 100644 index 000000000..c09b85733 --- /dev/null +++ b/third_party/nnAudio/.gitignore @@ -0,0 +1,3 @@ +build +dist +*.egg-info/ diff --git a/third_party/nnAudio/nnAudio/Spectrogram.py b/third_party/nnAudio/nnAudio/Spectrogram.py new file mode 100755 index 000000000..b5d798457 --- /dev/null +++ b/third_party/nnAudio/nnAudio/Spectrogram.py @@ -0,0 +1,2443 @@ +""" +Module containing all the spectrogram classes +""" + +# 0.2.0 + +import torch +import torch.nn as nn +from torch.nn.functional import conv1d, conv2d, fold +import scipy # used only in CFP + +import numpy as np +from time import time + +# from nnAudio.librosa_functions import * # For debug purpose +# from nnAudio.utils import * + +from .librosa_functions import * +from .utils import * + +sz_float = 4 # size of a float +epsilon = 10e-8 # fudge factor for normalization + +### --------------------------- Spectrogram Classes ---------------------------### +class STFT(torch.nn.Module): + """This function is to calculate the short-time Fourier transform (STFT) of the input signal. + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + + The correct shape will be inferred automatically if the input follows these 3 shapes. + Most of the arguments follow the convention from librosa. + This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + Parameters + ---------- + n_fft : int + Size of Fourier transform. Default value is 2048. + + win_length : int + the size of window frame and STFT filter. + Default: None (treated as equal to n_fft) + + freq_bins : int + Number of frequency bins. Default is ``None``, which means ``n_fft//2+1`` bins. + + hop_length : int + The hop (or stride) size. Default value is ``None`` which is equivalent to ``n_fft//4``. + + window : str + The windowing function for STFT. It uses ``scipy.signal.get_window``, please refer to + scipy documentation for possible windowing functions. The default value is 'hann'. + + freq_scale : 'linear', 'log', or 'no' + Determine the spacing between each frequency bin. When `linear` or `log` is used, + the bin spacing can be controlled by ``fmin`` and ``fmax``. If 'no' is used, the bin will + start at 0Hz and end at Nyquist frequency with linear spacing. + + center : bool + Putting the STFT keneral at the center of the time-step or not. If ``False``, the time + index is the beginning of the STFT kernel, if ``True``, the time index is the center of + the STFT kernel. Default value if ``True``. + + pad_mode : str + The padding method. Default value is 'reflect'. + + iSTFT : bool + To activate the iSTFT module or not. By default, it is False to save GPU memory. + Note: The iSTFT kernel is not trainable. If you want + a trainable iSTFT, use the iSTFT module. + + fmin : int + The starting frequency for the lowest frequency bin. If freq_scale is ``no``, this argument + does nothing. + + fmax : int + The ending frequency for the highest frequency bin. If freq_scale is ``no``, this argument + does nothing. + + sr : int + The sampling rate for the input audio. It is used to calucate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + trainable : bool + Determine if the STFT kenrels are trainable or not. If ``True``, the gradients for STFT + kernels will also be caluclated and the STFT kernels will be updated during model training. + Default value is ``False`` + + output_format : str + Control the spectrogram output type, either ``Magnitude``, ``Complex``, or ``Phase``. + The output_format can also be changed during the ``forward`` method. + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints + + Returns + ------- + spectrogram : torch.tensor + It returns a tensor of spectrograms. + ``shape = (num_samples, freq_bins,time_steps)`` if ``output_format='Magnitude'``; + ``shape = (num_samples, freq_bins,time_steps, 2)`` if ``output_format='Complex' or 'Phase'``; + + Examples + -------- + >>> spec_layer = Spectrogram.STFT() + >>> specs = spec_layer(x) + """ + + def __init__(self, n_fft=2048, win_length=None, freq_bins=None, hop_length=None, window='hann', + freq_scale='no', center=True, pad_mode='reflect', iSTFT=False, + fmin=50, fmax=6000, sr=22050, trainable=False, + output_format="Complex", verbose=True): + + super().__init__() + + # Trying to make the default setting same as librosa + if win_length==None: win_length = n_fft + if hop_length==None: hop_length = int(win_length // 4) + + self.output_format = output_format + self.trainable = trainable + self.stride = hop_length + self.center = center + self.pad_mode = pad_mode + self.n_fft = n_fft + self.freq_bins = freq_bins + self.trainable = trainable + self.pad_amount = self.n_fft // 2 + self.window = window + self.win_length = win_length + self.iSTFT = iSTFT + self.trainable = trainable + start = time() + + + + # Create filter windows for stft + kernel_sin, kernel_cos, self.bins2freq, self.bin_list, window_mask = create_fourier_kernels(n_fft, + win_length=win_length, + freq_bins=freq_bins, + window=window, + freq_scale=freq_scale, + fmin=fmin, + fmax=fmax, + sr=sr, + verbose=verbose) + + + kernel_sin = torch.tensor(kernel_sin, dtype=torch.float) + kernel_cos = torch.tensor(kernel_cos, dtype=torch.float) + + # In this way, the inverse kernel and the forward kernel do not share the same memory... + kernel_sin_inv = torch.cat((kernel_sin, -kernel_sin[1:-1].flip(0)), 0) + kernel_cos_inv = torch.cat((kernel_cos, kernel_cos[1:-1].flip(0)), 0) + + + + if iSTFT: + self.register_buffer('kernel_sin_inv', kernel_sin_inv.unsqueeze(-1)) + self.register_buffer('kernel_cos_inv', kernel_cos_inv.unsqueeze(-1)) + + # Making all these variables nn.Parameter, so that the model can be used with nn.Parallel +# self.kernel_sin = torch.nn.Parameter(self.kernel_sin, requires_grad=self.trainable) +# self.kernel_cos = torch.nn.Parameter(self.kernel_cos, requires_grad=self.trainable) + + # Applying window functions to the Fourier kernels + if window: + window_mask = torch.tensor(window_mask) + wsin = kernel_sin * window_mask + wcos = kernel_cos * window_mask + else: + wsin = kernel_sin + wcos = kernel_cos + + if self.trainable==False: + self.register_buffer('wsin', wsin) + self.register_buffer('wcos', wcos) + + if self.trainable==True: + wsin = torch.nn.Parameter(wsin, requires_grad=self.trainable) + wcos = torch.nn.Parameter(wcos, requires_grad=self.trainable) + self.register_parameter('wsin', wsin) + self.register_parameter('wcos', wcos) + + # Prepare the shape of window mask so that it can be used later in inverse + self.register_buffer('window_mask', window_mask.unsqueeze(0).unsqueeze(-1)) + + + + if verbose==True: + print("STFT kernels created, time used = {:.4f} seconds".format(time()-start)) + else: + pass + + def forward(self, x, output_format=None): + """ + Convert a batch of waveforms to spectrograms. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + + output_format : str + Control the type of spectrogram to be return. Can be either ``Magnitude`` or ``Complex`` or ``Phase``. + Default value is ``Complex``. + + """ + output_format = output_format or self.output_format + self.num_samples = x.shape[-1] + + x = broadcast_dim(x) + if self.center: + if self.pad_mode == 'constant': + padding = nn.ConstantPad1d(self.pad_amount, 0) + + elif self.pad_mode == 'reflect': + if self.num_samples < self.pad_amount: + raise AssertionError("Signal length shorter than reflect padding length (n_fft // 2).") + padding = nn.ReflectionPad1d(self.pad_amount) + + x = padding(x) + spec_imag = conv1d(x, self.wsin, stride=self.stride) + spec_real = conv1d(x, self.wcos, stride=self.stride) # Doing STFT by using conv1d + + # remove redundant parts + spec_real = spec_real[:, :self.freq_bins, :] + spec_imag = spec_imag[:, :self.freq_bins, :] + + if output_format=='Magnitude': + spec = spec_real.pow(2) + spec_imag.pow(2) + if self.trainable==True: + return torch.sqrt(spec+1e-8) # prevent Nan gradient when sqrt(0) due to output=0 + else: + return torch.sqrt(spec) + + elif output_format=='Complex': + return torch.stack((spec_real,-spec_imag), -1) # Remember the minus sign for imaginary part + + elif output_format=='Phase': + return torch.atan2(-spec_imag+0.0,spec_real) # +0.0 removes -0.0 elements, which leads to error in calculating phase + + def inverse(self, X, onesided=True, length=None, refresh_win=True): + """ + This function is same as the :func:`~nnAudio.Spectrogram.iSTFT` class, + which is to convert spectrograms back to waveforms. + It only works for the complex value spectrograms. If you have the magnitude spectrograms, + please use :func:`~nnAudio.Spectrogram.Griffin_Lim`. + + Parameters + ---------- + onesided : bool + If your spectrograms only have ``n_fft//2+1`` frequency bins, please use ``onesided=True``, + else use ``onesided=False`` + + length : int + To make sure the inverse STFT has the same output length of the original waveform, please + set `length` as your intended waveform length. By default, ``length=None``, + which will remove ``n_fft//2`` samples from the start and the end of the output. + + refresh_win : bool + Recalculating the window sum square. If you have an input with fixed number of timesteps, + you can increase the speed by setting ``refresh_win=False``. Else please keep ``refresh_win=True`` + + + """ + if (hasattr(self, 'kernel_sin_inv') != True) or (hasattr(self, 'kernel_cos_inv') != True): + raise NameError("Please activate the iSTFT module by setting `iSTFT=True` if you want to use `inverse`") + + assert X.dim()==4 , "Inverse iSTFT only works for complex number," \ + "make sure our tensor is in the shape of (batch, freq_bins, timesteps, 2)."\ + "\nIf you have a magnitude spectrogram, please consider using Griffin-Lim." + if onesided: + X = extend_fbins(X) # extend freq + + + X_real, X_imag = X[:, :, :, 0], X[:, :, :, 1] + + # broadcast dimensions to support 2D convolution + X_real_bc = X_real.unsqueeze(1) + X_imag_bc = X_imag.unsqueeze(1) + a1 = conv2d(X_real_bc, self.kernel_cos_inv, stride=(1,1)) + b2 = conv2d(X_imag_bc, self.kernel_sin_inv, stride=(1,1)) + + # compute real and imag part. signal lies in the real part + real = a1 - b2 + real = real.squeeze(-2)*self.window_mask + + # Normalize the amplitude with n_fft + real /= (self.n_fft) + + # Overlap and Add algorithm to connect all the frames + real = overlap_add(real, self.stride) + + # Prepare the window sumsqure for division + # Only need to create this window once to save time + # Unless the input spectrograms have different time steps + if hasattr(self, 'w_sum')==False or refresh_win==True: + self.w_sum = torch_window_sumsquare(self.window_mask.flatten(), X.shape[2], self.stride, self.n_fft).flatten() + self.nonzero_indices = (self.w_sum>1e-10) + else: + pass + real[:, self.nonzero_indices] = real[:,self.nonzero_indices].div(self.w_sum[self.nonzero_indices]) + # Remove padding + if length is None: + if self.center: + real = real[:, self.pad_amount:-self.pad_amount] + + else: + if self.center: + real = real[:, self.pad_amount:self.pad_amount + length] + else: + real = real[:, :length] + + return real + + def extra_repr(self) -> str: + return 'n_fft={}, Fourier Kernel size={}, iSTFT={}, trainable={}'.format( + self.n_fft, (*self.wsin.shape,), self.iSTFT, self.trainable + ) + + +class MelSpectrogram(torch.nn.Module): + """This function is to calculate the Melspectrogram of the input signal. + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + + The correct shape will be inferred automatically if the input follows these 3 shapes. + Most of the arguments follow the convention from librosa. + This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + Parameters + ---------- + sr : int + The sampling rate for the input audio. + It is used to calculate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + n_fft : int + The window size for the STFT. Default value is 2048 + + win_length : int + the size of window frame and STFT filter. + Default: None (treated as equal to n_fft) + + n_mels : int + The number of Mel filter banks. The filter banks maps the n_fft to mel bins. + Default value is 128. + + hop_length : int + The hop (or stride) size. Default value is 512. + + window : str + The windowing function for STFT. It uses ``scipy.signal.get_window``, please refer to + scipy documentation for possible windowing functions. The default value is 'hann'. + + center : bool + Putting the STFT keneral at the center of the time-step or not. If ``False``, + the time index is the beginning of the STFT kernel, if ``True``, the time index is the + center of the STFT kernel. Default value if ``True``. + + pad_mode : str + The padding method. Default value is 'reflect'. + + htk : bool + When ``False`` is used, the Mel scale is quasi-logarithmic. When ``True`` is used, the + Mel scale is logarithmic. The default value is ``False``. + + fmin : int + The starting frequency for the lowest Mel filter bank. + + fmax : int + The ending frequency for the highest Mel filter bank. + + norm : + if 1, divide the triangular mel weights by the width of the mel band + (area normalization, AKA 'slaney' default in librosa). + Otherwise, leave all the triangles aiming for + a peak value of 1.0 + + trainable_mel : bool + Determine if the Mel filter banks are trainable or not. If ``True``, the gradients for Mel + filter banks will also be calculated and the Mel filter banks will be updated during model + training. Default value is ``False``. + + trainable_STFT : bool + Determine if the STFT kenrels are trainable or not. If ``True``, the gradients for STFT + kernels will also be caluclated and the STFT kernels will be updated during model training. + Default value is ``False``. + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints. + + Returns + ------- + spectrogram : torch.tensor + It returns a tensor of spectrograms. shape = ``(num_samples, freq_bins,time_steps)``. + + Examples + -------- + >>> spec_layer = Spectrogram.MelSpectrogram() + >>> specs = spec_layer(x) + """ + + def __init__(self, sr=22050, n_fft=2048, win_length=None, n_mels=128, hop_length=512, + window='hann', center=True, pad_mode='reflect', power=2.0, htk=False, + fmin=0.0, fmax=None, norm=1, trainable_mel=False, trainable_STFT=False, + verbose=True, **kwargs): + + super().__init__() + self.stride = hop_length + self.center = center + self.pad_mode = pad_mode + self.n_fft = n_fft + self.power = power + self.trainable_mel = trainable_mel + self.trainable_STFT = trainable_STFT + + # Preparing for the stft layer. No need for center + self.stft = STFT(n_fft=n_fft, win_length=win_length, freq_bins=None, + hop_length=hop_length, window=window, freq_scale='no', + center=center, pad_mode=pad_mode, sr=sr, trainable=trainable_STFT, + output_format="Magnitude", verbose=verbose, **kwargs) + + + # Create filter windows for stft + start = time() + + # Creating kernel for mel spectrogram + start = time() + mel_basis = mel(sr, n_fft, n_mels, fmin, fmax, htk=htk, norm=norm) + mel_basis = torch.tensor(mel_basis) + + if verbose==True: + print("STFT filter created, time used = {:.4f} seconds".format(time()-start)) + print("Mel filter created, time used = {:.4f} seconds".format(time()-start)) + else: + pass + + if trainable_mel: + # Making everything nn.Parameter, so that this model can support nn.DataParallel + mel_basis = torch.nn.Parameter(mel_basis, requires_grad=trainable_mel) + self.register_parameter('mel_basis', mel_basis) + else: + self.register_buffer('mel_basis', mel_basis) + + # if trainable_mel==True: + # self.mel_basis = torch.nn.Parameter(self.mel_basis) + # if trainable_STFT==True: + # self.wsin = torch.nn.Parameter(self.wsin) + # self.wcos = torch.nn.Parameter(self.wcos) + + def forward(self, x): + """ + Convert a batch of waveforms to Mel spectrograms. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + """ + x = broadcast_dim(x) + + spec = self.stft(x, output_format='Magnitude')**self.power + + melspec = torch.matmul(self.mel_basis, spec) + return melspec + + def extra_repr(self) -> str: + return 'Mel filter banks size = {}, trainable_mel={}'.format( + (*self.mel_basis.shape,), self.trainable_mel, self.trainable_STFT + ) + + +class MFCC(torch.nn.Module): + """This function is to calculate the Mel-frequency cepstral coefficients (MFCCs) of the input signal. + This algorithm first extracts Mel spectrograms from the audio clips, + then the discrete cosine transform is calcuated to obtain the final MFCCs. + Therefore, the Mel spectrogram part can be made trainable using + ``trainable_mel`` and ``trainable_STFT``. + It only support type-II DCT at the moment. Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + + The correct shape will be inferred autommatically if the input follows these 3 shapes. + Most of the arguments follow the convention from librosa. + This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + Parameters + ---------- + sr : int + The sampling rate for the input audio. It is used to calculate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + n_mfcc : int + The number of Mel-frequency cepstral coefficients + + norm : string + The default value is 'ortho'. Normalization for DCT basis + + **kwargs + Other arguments for Melspectrogram such as n_fft, n_mels, hop_length, and window + + Returns + ------- + MFCCs : torch.tensor + It returns a tensor of MFCCs. shape = ``(num_samples, n_mfcc, time_steps)``. + + Examples + -------- + >>> spec_layer = Spectrogram.MFCC() + >>> mfcc = spec_layer(x) + """ + + def __init__(self, sr=22050, n_mfcc=20, norm='ortho', verbose=True, ref=1.0, amin=1e-10, top_db=80.0, **kwargs): + super().__init__() + self.melspec_layer = MelSpectrogram(sr=sr, verbose=verbose, **kwargs) + self.m_mfcc = n_mfcc + + # attributes that will be used for _power_to_db + if amin <= 0: + raise ParameterError('amin must be strictly positive') + amin = torch.tensor([amin]) + ref = torch.abs(torch.tensor([ref])) + self.register_buffer('amin', amin) + self.register_buffer('ref', ref) + self.top_db = top_db + self.n_mfcc = n_mfcc + + def _power_to_db(self, S): + ''' + Refer to https://librosa.github.io/librosa/_modules/librosa/core/spectrum.html#power_to_db + for the original implmentation. + ''' + + log_spec = 10.0 * torch.log10(torch.max(S, self.amin)) + log_spec -= 10.0 * torch.log10(torch.max(self.amin, self.ref)) + if self.top_db is not None: + if self.top_db < 0: + raise ParameterError('top_db must be non-negative') + + # make the dim same as log_spec so that it can be broadcasted + batch_wise_max = log_spec.flatten(1).max(1)[0].unsqueeze(1).unsqueeze(1) + log_spec = torch.max(log_spec, batch_wise_max - self.top_db) + + return log_spec + + def _dct(self, x, norm=None): + ''' + Refer to https://github.com/zh217/torch-dct for the original implmentation. + ''' + x = x.permute(0,2,1) # make freq the last axis, since dct applies to the frequency axis + x_shape = x.shape + N = x_shape[-1] + + v = torch.cat([x[:, :, ::2], x[:, :, 1::2].flip([2])], dim=2) + Vc = torch.rfft(v, 1, onesided=False) + + # TODO: Can make the W_r and W_i trainable here + k = - torch.arange(N, dtype=x.dtype, device=x.device)[None, :] * np.pi / (2 * N) + W_r = torch.cos(k) + W_i = torch.sin(k) + + V = Vc[:, :, :, 0] * W_r - Vc[:, :, :, 1] * W_i + + if norm == 'ortho': + V[:, :, 0] /= np.sqrt(N) * 2 + V[:, :, 1:] /= np.sqrt(N / 2) * 2 + + V = 2 * V + + return V.permute(0,2,1) # swapping back the time axis and freq axis + + def forward(self, x): + """ + Convert a batch of waveforms to MFCC. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + """ + + x = self.melspec_layer(x) + x = self._power_to_db(x) + x = self._dct(x, norm='ortho')[:,:self.m_mfcc,:] + return x + + def extra_repr(self) -> str: + return 'n_mfcc = {}'.format( + (self.n_mfcc) + ) + + +class Gammatonegram(torch.nn.Module): + """ + This function is to calculate the Gammatonegram of the input signal. Input signal should be in either of the following shapes. 1. ``(len_audio)``, 2. ``(num_audio, len_audio)``, 3. ``(num_audio, 1, len_audio)``. The correct shape will be inferred autommatically if the input follows these 3 shapes. This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + Parameters + ---------- + sr : int + The sampling rate for the input audio. It is used to calucate the correct ``fmin`` and ``fmax``. Setting the correct sampling rate is very important for calculating the correct frequency. + n_fft : int + The window size for the STFT. Default value is 2048 + n_mels : int + The number of Gammatonegram filter banks. The filter banks maps the n_fft to Gammatone bins. Default value is 64 + + hop_length : int + The hop (or stride) size. Default value is 512. + window : str + The windowing function for STFT. It uses ``scipy.signal.get_window``, please refer to scipy documentation for possible windowing functions. The default value is 'hann' + center : bool + Putting the STFT keneral at the center of the time-step or not. If ``False``, the time index is the beginning of the STFT kernel, if ``True``, the time index is the center of the STFT kernel. Default value if ``True``. + pad_mode : str + The padding method. Default value is 'reflect'. + htk : bool + When ``False`` is used, the Mel scale is quasi-logarithmic. When ``True`` is used, the Mel scale is logarithmic. The default value is ``False`` + + fmin : int + The starting frequency for the lowest Gammatone filter bank + fmax : int + The ending frequency for the highest Gammatone filter bank + trainable_mel : bool + Determine if the Gammatone filter banks are trainable or not. If ``True``, the gradients for Mel filter banks will also be caluclated and the Mel filter banks will be updated during model training. Default value is ``False`` + trainable_STFT : bool + Determine if the STFT kenrels are trainable or not. If ``True``, the gradients for STFT kernels will also be caluclated and the STFT kernels will be updated during model training. Default value is ``False`` + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints + + Returns + ------- + spectrogram : torch.tensor + It returns a tensor of spectrograms. shape = ``(num_samples, freq_bins,time_steps)``. + + Examples + -------- + >>> spec_layer = Spectrogram.Gammatonegram() + >>> specs = spec_layer(x) + """ + + def __init__(self, sr=44100, n_fft=2048, n_bins=64, hop_length=512, window='hann', center=True, pad_mode='reflect', + power=2.0, htk=False, fmin=20.0, fmax=None, norm=1, trainable_bins=False, trainable_STFT=False, + verbose=True): + super(Gammatonegram, self).__init__() + self.stride = hop_length + self.center = center + self.pad_mode = pad_mode + self.n_fft = n_fft + self.power = power + + # Create filter windows for stft + start = time() + wsin, wcos, self.bins2freq, _, _ = create_fourier_kernels(n_fft, freq_bins=None, window=window, freq_scale='no', + sr=sr) + + wsin = torch.tensor(wsin, dtype=torch.float) + wcos = torch.tensor(wcos, dtype=torch.float) + + if trainable_STFT: + wsin = torch.nn.Parameter(wsin, requires_grad=trainable_STFT) + wcos = torch.nn.Parameter(wcos, requires_grad=trainable_STFT) + self.register_parameter('wsin', wsin) + self.register_parameter('wcos', wcos) + else: + self.register_buffer('wsin', wsin) + self.register_buffer('wcos', wcos) + + # Creating kenral for Gammatone spectrogram + start = time() + gammatone_basis = gammatone(sr, n_fft, n_bins, fmin, fmax) + gammatone_basis = torch.tensor(gammatone_basis) + + if verbose == True: + print("STFT filter created, time used = {:.4f} seconds".format(time() - start)) + print("Gammatone filter created, time used = {:.4f} seconds".format(time() - start)) + else: + pass + # Making everything nn.Prarmeter, so that this model can support nn.DataParallel + + if trainable_bins: + gammatone_basis = torch.nn.Parameter(gammatone_basis, requires_grad=trainable_bins) + self.register_parameter('gammatone_basis', gammatone_basis) + else: + self.register_buffer('gammatone_basis', gammatone_basis) + + # if trainable_mel==True: + # self.mel_basis = torch.nn.Parameter(self.mel_basis) + # if trainable_STFT==True: + # self.wsin = torch.nn.Parameter(self.wsin) + # self.wcos = torch.nn.Parameter(self.wcos) + + def forward(self, x): + x = broadcast_dim(x) + if self.center: + if self.pad_mode == 'constant': + padding = nn.ConstantPad1d(self.n_fft // 2, 0) + elif self.pad_mode == 'reflect': + padding = nn.ReflectionPad1d(self.n_fft // 2) + + x = padding(x) + + spec = torch.sqrt(conv1d(x, self.wsin, stride=self.stride).pow(2) \ + + conv1d(x, self.wcos, stride=self.stride).pow(2)) ** self.power # Doing STFT by using conv1d + + gammatonespec = torch.matmul(self.gammatone_basis, spec) + return gammatonespec + + +class CQT1992(torch.nn.Module): + """ + This alogrithm uses the method proposed in [1], which would run extremely slow if low frequencies (below 220Hz) + are included in the frequency bins. + Please refer to :func:`~nnAudio.Spectrogram.CQT1992v2` for a more + computational and memory efficient version. + [1] Brown, Judith C.C. and Miller Puckette. “An efficient algorithm for the calculation of a + constant Q transform.” (1992). + + This function is to calculate the CQT of the input signal. + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + + The correct shape will be inferred autommatically if the input follows these 3 shapes. + Most of the arguments follow the convention from librosa. + This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + + + Parameters + ---------- + sr : int + The sampling rate for the input audio. It is used to calucate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + hop_length : int + The hop (or stride) size. Default value is 512. + + fmin : float + The frequency for the lowest CQT bin. Default is 32.70Hz, which coresponds to the note C0. + + fmax : float + The frequency for the highest CQT bin. Default is ``None``, therefore the higest CQT bin is + inferred from the ``n_bins`` and ``bins_per_octave``. + If ``fmax`` is not ``None``, then the argument ``n_bins`` will be ignored and ``n_bins`` + will be calculated automatically. Default is ``None`` + + n_bins : int + The total numbers of CQT bins. Default is 84. Will be ignored if ``fmax`` is not ``None``. + + bins_per_octave : int + Number of bins per octave. Default is 12. + + trainable_STFT : bool + Determine if the time to frequency domain transformation kernel for the input audio is trainable or not. + Default is ``False`` + + trainable_CQT : bool + Determine if the frequency domain CQT kernel is trainable or not. + Default is ``False`` + + norm : int + Normalization for the CQT kernels. ``1`` means L1 normalization, and ``2`` means L2 normalization. + Default is ``1``, which is same as the normalization used in librosa. + + window : str + The windowing function for CQT. It uses ``scipy.signal.get_window``, please refer to + scipy documentation for possible windowing functions. The default value is 'hann'. + + center : bool + Putting the CQT keneral at the center of the time-step or not. If ``False``, the time index is + the beginning of the CQT kernel, if ``True``, the time index is the center of the CQT kernel. + Default value if ``True``. + + pad_mode : str + The padding method. Default value is 'reflect'. + + trainable : bool + Determine if the CQT kernels are trainable or not. If ``True``, the gradients for CQT kernels + will also be caluclated and the CQT kernels will be updated during model training. + Default value is ``False``. + + output_format : str + Determine the return type. + ``Magnitude`` will return the magnitude of the STFT result, shape = ``(num_samples, freq_bins,time_steps)``; + ``Complex`` will return the STFT result in complex number, shape = ``(num_samples, freq_bins,time_steps, 2)``; + ``Phase`` will return the phase of the STFT reuslt, shape = ``(num_samples, freq_bins,time_steps, 2)``. + The complex number is stored as ``(real, imag)`` in the last axis. Default value is 'Magnitude'. + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints + + Returns + ------- + spectrogram : torch.tensor + It returns a tensor of spectrograms. + shape = ``(num_samples, freq_bins,time_steps)`` if ``output_format='Magnitude'``; + shape = ``(num_samples, freq_bins,time_steps, 2)`` if ``output_format='Complex' or 'Phase'``; + + Examples + -------- + >>> spec_layer = Spectrogram.CQT1992v2() + >>> specs = spec_layer(x) + """ + + def __init__(self, sr=22050, hop_length=512, fmin=220, fmax=None, n_bins=84, + trainable_STFT=False, trainable_CQT=False, bins_per_octave=12, filter_scale=1, + output_format='Magnitude', norm=1, window='hann', center=True, pad_mode='reflect'): + + super().__init__() + + # norm arg is not functioning + self.hop_length = hop_length + self.center = center + self.pad_mode = pad_mode + self.norm = norm + self.output_format = output_format + + # creating kernels for CQT + Q = float(filter_scale)/(2**(1/bins_per_octave)-1) + + print("Creating CQT kernels ...", end='\r') + start = time() + cqt_kernels, self.kernel_width, lenghts, freqs = create_cqt_kernels(Q, + sr, + fmin, + n_bins, + bins_per_octave, + norm, + window, + fmax) + + self.register_buffer('lenghts', lenghts) + self.frequencies = freqs + + cqt_kernels = fft(cqt_kernels)[:,:self.kernel_width//2+1] + print("CQT kernels created, time used = {:.4f} seconds".format(time()-start)) + + # creating kernels for stft + # self.cqt_kernels_real*=lenghts.unsqueeze(1)/self.kernel_width # Trying to normalize as librosa + # self.cqt_kernels_imag*=lenghts.unsqueeze(1)/self.kernel_width + + print("Creating STFT kernels ...", end='\r') + start = time() + kernel_sin, kernel_cos, self.bins2freq, _, window = create_fourier_kernels(self.kernel_width, + window='ones', + freq_scale='no') + + # Converting kernels from numpy arrays to torch tensors + wsin = torch.tensor(kernel_sin * window) + wcos = torch.tensor(kernel_cos * window) + + cqt_kernels_real = torch.tensor(cqt_kernels.real.astype(np.float32)) + cqt_kernels_imag = torch.tensor(cqt_kernels.imag.astype(np.float32)) + + if trainable_STFT: + wsin = torch.nn.Parameter(wsin, requires_grad=trainable_STFT) + wcos = torch.nn.Parameter(wcos, requires_grad=trainable_STFT) + self.register_parameter('wsin', wsin) + self.register_parameter('wcos', wcos) + else: + self.register_buffer('wsin', wsin) + self.register_buffer('wcos', wcos) + + if trainable_CQT: + cqt_kernels_real = torch.nn.Parameter(cqt_kernels_real, requires_grad=trainable_CQT) + cqt_kernels_imag = torch.nn.Parameter(cqt_kernels_imag, requires_grad=trainable_CQT) + self.register_parameter('cqt_kernels_real', cqt_kernels_real) + self.register_parameter('cqt_kernels_imag', cqt_kernels_imag) + else: + self.register_buffer('cqt_kernels_real', cqt_kernels_real) + self.register_buffer('cqt_kernels_imag', cqt_kernels_imag) + + print("STFT kernels created, time used = {:.4f} seconds".format(time()-start)) + + def forward(self, x, output_format=None, normalization_type='librosa'): + """ + Convert a batch of waveforms to CQT spectrograms. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + """ + output_format = output_format or self.output_format + + x = broadcast_dim(x) + if self.center: + if self.pad_mode == 'constant': + padding = nn.ConstantPad1d(self.kernel_width//2, 0) + elif self.pad_mode == 'reflect': + padding = nn.ReflectionPad1d(self.kernel_width//2) + + x = padding(x) + + # STFT + fourier_real = conv1d(x, self.wcos, stride=self.hop_length) + fourier_imag = conv1d(x, self.wsin, stride=self.hop_length) + + # CQT + CQT_real, CQT_imag = complex_mul((self.cqt_kernels_real, self.cqt_kernels_imag), + (fourier_real, fourier_imag)) + + CQT = torch.stack((CQT_real,-CQT_imag),-1) + + if normalization_type == 'librosa': + CQT *= torch.sqrt(self.lenghts.view(-1,1,1))/self.kernel_width + elif normalization_type == 'convolutional': + pass + elif normalization_type == 'wrap': + CQT *= 2/self.kernel_width + else: + raise ValueError("The normalization_type %r is not part of our current options." % normalization_type) + + +# if self.norm: +# CQT = CQT/self.kernel_width*torch.sqrt(self.lenghts.view(-1,1,1)) +# else: +# CQT = CQT*torch.sqrt(self.lenghts.view(-1,1,1)) + + if output_format=='Magnitude': + # Getting CQT Amplitude + return torch.sqrt(CQT.pow(2).sum(-1)) + + elif output_format=='Complex': + return CQT + + elif output_format=='Phase': + phase_real = torch.cos(torch.atan2(CQT_imag,CQT_real)) + phase_imag = torch.sin(torch.atan2(CQT_imag,CQT_real)) + return torch.stack((phase_real,phase_imag), -1) + + def extra_repr(self) -> str: + return 'STFT kernel size = {}, CQT kernel size = {}'.format( + (*self.wcos.shape,), (*self.cqt_kernels_real.shape,) + ) + + +class CQT2010(torch.nn.Module): + """ + This algorithm is using the resampling method proposed in [1]. + Instead of convoluting the STFT results with a gigantic CQT kernel covering the full frequency + spectrum, we make a small CQT kernel covering only the top octave. + Then we keep downsampling the input audio by a factor of 2 to convoluting it with the + small CQT kernel. Everytime the input audio is downsampled, the CQT relative to the downsampled + input is equavalent to the next lower octave. + The kernel creation process is still same as the 1992 algorithm. Therefore, we can reuse the code + from the 1992 alogrithm [2] + [1] Schörkhuber, Christian. “CONSTANT-Q TRANSFORM TOOLBOX FOR MUSIC PROCESSING.” (2010). + [2] Brown, Judith C.C. and Miller Puckette. “An efficient algorithm for the calculation of a + constant Q transform.” (1992). + early downsampling factor is to downsample the input audio to reduce the CQT kernel size. + The result with and without early downsampling are more or less the same except in the very low + frequency region where freq < 40Hz. + """ + + def __init__(self, sr=22050, hop_length=512, fmin=32.70, fmax=None, n_bins=84, bins_per_octave=12, + norm=True, basis_norm=1, window='hann', pad_mode='reflect', trainable_STFT=False, filter_scale=1, + trainable_CQT=False, output_format='Magnitude', earlydownsample=True, verbose=True): + + super().__init__() + + self.norm = norm # Now norm is used to normalize the final CQT result by dividing n_fft + # basis_norm is for normalizing basis + self.hop_length = hop_length + self.pad_mode = pad_mode + self.n_bins = n_bins + self.output_format = output_format + self.earlydownsample = earlydownsample # TODO: activate early downsampling later if possible + + # This will be used to calculate filter_cutoff and creating CQT kernels + Q = float(filter_scale)/(2**(1/bins_per_octave)-1) + + # Creating lowpass filter and make it a torch tensor + if verbose==True: + print("Creating low pass filter ...", end='\r') + start = time() + lowpass_filter = torch.tensor(create_lowpass_filter( + band_center = 0.5, + kernelLength=256, + transitionBandwidth=0.001 + ) + ) + + # Broadcast the tensor to the shape that fits conv1d + self.register_buffer('lowpass_filter', lowpass_filter[None,None,:]) + + if verbose==True: + print("Low pass filter created, time used = {:.4f} seconds".format(time()-start)) + + # Calculate num of filter requires for the kernel + # n_octaves determines how many resampling requires for the CQT + n_filters = min(bins_per_octave, n_bins) + self.n_octaves = int(np.ceil(float(n_bins) / bins_per_octave)) + # print("n_octaves = ", self.n_octaves) + + # Calculate the lowest frequency bin for the top octave kernel + self.fmin_t = fmin*2**(self.n_octaves-1) + remainder = n_bins % bins_per_octave + # print("remainder = ", remainder) + + if remainder==0: + # Calculate the top bin frequency + fmax_t = self.fmin_t*2**((bins_per_octave-1)/bins_per_octave) + else: + # Calculate the top bin frequency + fmax_t = self.fmin_t*2**((remainder-1)/bins_per_octave) + + self.fmin_t = fmax_t/2**(1-1/bins_per_octave) # Adjusting the top minium bins + if fmax_t > sr/2: + raise ValueError('The top bin {}Hz has exceeded the Nyquist frequency, \ + please reduce the n_bins'.format(fmax_t)) + + if self.earlydownsample == True: # Do early downsampling if this argument is True + if verbose==True: + print("Creating early downsampling filter ...", end='\r') + start = time() + sr, self.hop_length, self.downsample_factor, early_downsample_filter, \ + self.earlydownsample = get_early_downsample_params(sr, + hop_length, + fmax_t, + Q, + self.n_octaves, + verbose) + + self.register_buffer('early_downsample_filter', early_downsample_filter) + if verbose==True: + print("Early downsampling filter created, \ + time used = {:.4f} seconds".format(time()-start)) + else: + self.downsample_factor=1. + + # Preparing CQT kernels + if verbose==True: + print("Creating CQT kernels ...", end='\r') + + start = time() + # print("Q = {}, fmin_t = {}, n_filters = {}".format(Q, self.fmin_t, n_filters)) + basis, self.n_fft, _, _ = create_cqt_kernels(Q, + sr, + self.fmin_t, + n_filters, + bins_per_octave, + norm=basis_norm, + topbin_check=False) + + # This is for the normalization in the end + freqs = fmin * 2.0 ** (np.r_[0:n_bins] / np.float(bins_per_octave)) + self.frequencies = freqs + + lenghts = np.ceil(Q * sr / freqs) + lenghts = torch.tensor(lenghts).float() + self.register_buffer('lenghts', lenghts) + + + self.basis=basis + fft_basis = fft(basis)[:,:self.n_fft//2+1] # Convert CQT kenral from time domain to freq domain + + # These cqt_kernel is already in the frequency domain + cqt_kernels_real = torch.tensor(fft_basis.real.astype(np.float32)) + cqt_kernels_imag = torch.tensor(fft_basis.imag.astype(np.float32)) + + if verbose==True: + print("CQT kernels created, time used = {:.4f} seconds".format(time()-start)) + + # print("Getting cqt kernel done, n_fft = ",self.n_fft) + # Preparing kernels for Short-Time Fourier Transform (STFT) + # We set the frequency range in the CQT filter instead of here. + + if verbose==True: + print("Creating STFT kernels ...", end='\r') + + start = time() + kernel_sin, kernel_cos, self.bins2freq, _, window = create_fourier_kernels(self.n_fft, window='ones', freq_scale='no') + wsin = kernel_sin * window + wcos = kernel_cos * window + + wsin = torch.tensor(wsin) + wcos = torch.tensor(wcos) + + if verbose==True: + print("STFT kernels created, time used = {:.4f} seconds".format(time()-start)) + + if trainable_STFT: + wsin = torch.nn.Parameter(wsin, requires_grad=trainable_STFT) + wcos = torch.nn.Parameter(wcos, requires_grad=trainable_STFT) + self.register_parameter('wsin', wsin) + self.register_parameter('wcos', wcos) + else: + self.register_buffer('wsin', wsin) + self.register_buffer('wcos', wcos) + + if trainable_CQT: + cqt_kernels_real = torch.nn.Parameter(cqt_kernels_real, requires_grad=trainable_CQT) + cqt_kernels_imag = torch.nn.Parameter(cqt_kernels_imag, requires_grad=trainable_CQT) + self.register_parameter('cqt_kernels_real', cqt_kernels_real) + self.register_parameter('cqt_kernels_imag', cqt_kernels_imag) + else: + self.register_buffer('cqt_kernels_real', cqt_kernels_real) + self.register_buffer('cqt_kernels_imag', cqt_kernels_imag) + + # If center==True, the STFT window will be put in the middle, and paddings at the beginning + # and ending are required. + if self.pad_mode == 'constant': + self.padding = nn.ConstantPad1d(self.n_fft//2, 0) + elif self.pad_mode == 'reflect': + self.padding = nn.ReflectionPad1d(self.n_fft//2) + + + def forward(self,x, output_format=None, normalization_type='librosa'): + """ + Convert a batch of waveforms to CQT spectrograms. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + """ + output_format = output_format or self.output_format + + x = broadcast_dim(x) + if self.earlydownsample==True: + x = downsampling_by_n(x, self.early_downsample_filter, self.downsample_factor) + hop = self.hop_length + + + + CQT = get_cqt_complex2(x, self.cqt_kernels_real, self.cqt_kernels_imag, hop, self.padding, + wcos=self.wcos, wsin=self.wsin) + + x_down = x # Preparing a new variable for downsampling + for i in range(self.n_octaves-1): + hop = hop//2 + x_down = downsampling_by_2(x_down, self.lowpass_filter) + + CQT1 = get_cqt_complex2(x_down, self.cqt_kernels_real, self.cqt_kernels_imag, hop, self.padding, + wcos=self.wcos, wsin=self.wsin) + CQT = torch.cat((CQT1, CQT),1) + + CQT = CQT[:,-self.n_bins:,:] # Removing unwanted top bins + + if normalization_type == 'librosa': + CQT *= torch.sqrt(self.lenghts.view(-1,1,1))/self.n_fft + elif normalization_type == 'convolutional': + pass + elif normalization_type == 'wrap': + CQT *= 2/self.n_fft + else: + raise ValueError("The normalization_type %r is not part of our current options." % normalization_type) + + if output_format=='Magnitude': + # Getting CQT Amplitude + return torch.sqrt(CQT.pow(2).sum(-1)) + + elif output_format=='Complex': + return CQT + + elif output_format=='Phase': + phase_real = torch.cos(torch.atan2(CQT[:,:,:,1],CQT[:,:,:,0])) + phase_imag = torch.sin(torch.atan2(CQT[:,:,:,1],CQT[:,:,:,0])) + return torch.stack((phase_real,phase_imag), -1) + + def extra_repr(self) -> str: + return 'STFT kernel size = {}, CQT kernel size = {}'.format( + (*self.wcos.shape,), (*self.cqt_kernels_real.shape,) + ) + + +class CQT1992v2(torch.nn.Module): + """This function is to calculate the CQT of the input signal. + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + + The correct shape will be inferred autommatically if the input follows these 3 shapes. + Most of the arguments follow the convention from librosa. + This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + This alogrithm uses the method proposed in [1]. I slightly modify it so that it runs faster + than the original 1992 algorithm, that is why I call it version 2. + [1] Brown, Judith C.C. and Miller Puckette. “An efficient algorithm for the calculation of a + constant Q transform.” (1992). + + Parameters + ---------- + sr : int + The sampling rate for the input audio. It is used to calucate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + hop_length : int + The hop (or stride) size. Default value is 512. + + fmin : float + The frequency for the lowest CQT bin. Default is 32.70Hz, which coresponds to the note C0. + + fmax : float + The frequency for the highest CQT bin. Default is ``None``, therefore the higest CQT bin is + inferred from the ``n_bins`` and ``bins_per_octave``. + If ``fmax`` is not ``None``, then the argument ``n_bins`` will be ignored and ``n_bins`` + will be calculated automatically. Default is ``None`` + + n_bins : int + The total numbers of CQT bins. Default is 84. Will be ignored if ``fmax`` is not ``None``. + + bins_per_octave : int + Number of bins per octave. Default is 12. + + filter_scale : float > 0 + Filter scale factor. Values of filter_scale smaller than 1 can be used to improve the time resolution at the + cost of degrading the frequency resolution. Important to note is that setting for example filter_scale = 0.5 and + bins_per_octave = 48 leads to exactly the same time-frequency resolution trade-off as setting filter_scale = 1 + and bins_per_octave = 24, but the former contains twice more frequency bins per octave. In this sense, values + filter_scale < 1 can be seen to implement oversampling of the frequency axis, analogously to the use of zero + padding when calculating the DFT. + + norm : int + Normalization for the CQT kernels. ``1`` means L1 normalization, and ``2`` means L2 normalization. + Default is ``1``, which is same as the normalization used in librosa. + + window : string, float, or tuple + The windowing function for CQT. If it is a string, It uses ``scipy.signal.get_window``. If it is a + tuple, only the gaussian window wanrantees constant Q factor. Gaussian window should be given as a + tuple ('gaussian', att) where att is the attenuation in the border given in dB. + Please refer to scipy documentation for possible windowing functions. The default value is 'hann'. + + center : bool + Putting the CQT keneral at the center of the time-step or not. If ``False``, the time index is + the beginning of the CQT kernel, if ``True``, the time index is the center of the CQT kernel. + Default value if ``True``. + + pad_mode : str + The padding method. Default value is 'reflect'. + + trainable : bool + Determine if the CQT kernels are trainable or not. If ``True``, the gradients for CQT kernels + will also be caluclated and the CQT kernels will be updated during model training. + Default value is ``False``. + + output_format : str + Determine the return type. + ``Magnitude`` will return the magnitude of the STFT result, shape = ``(num_samples, freq_bins,time_steps)``; + ``Complex`` will return the STFT result in complex number, shape = ``(num_samples, freq_bins,time_steps, 2)``; + ``Phase`` will return the phase of the STFT reuslt, shape = ``(num_samples, freq_bins,time_steps, 2)``. + The complex number is stored as ``(real, imag)`` in the last axis. Default value is 'Magnitude'. + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints + + Returns + ------- + spectrogram : torch.tensor + It returns a tensor of spectrograms. + shape = ``(num_samples, freq_bins,time_steps)`` if ``output_format='Magnitude'``; + shape = ``(num_samples, freq_bins,time_steps, 2)`` if ``output_format='Complex' or 'Phase'``; + + Examples + -------- + >>> spec_layer = Spectrogram.CQT1992v2() + >>> specs = spec_layer(x) + """ + + def __init__(self, sr=22050, hop_length=512, fmin=32.70, fmax=None, n_bins=84, + bins_per_octave=12, filter_scale=1, norm=1, window='hann', center=True, pad_mode='reflect', + trainable=False, output_format='Magnitude', verbose=True): + + super().__init__() + + self.trainable = trainable + self.hop_length = hop_length + self.center = center + self.pad_mode = pad_mode + self.output_format = output_format + + # creating kernels for CQT + Q = float(filter_scale)/(2**(1/bins_per_octave)-1) + + if verbose==True: + print("Creating CQT kernels ...", end='\r') + + start = time() + cqt_kernels, self.kernel_width, lenghts, freqs = create_cqt_kernels(Q, + sr, + fmin, + n_bins, + bins_per_octave, + norm, + window, + fmax) + + self.register_buffer('lenghts', lenghts) + self.frequencies = freqs + + cqt_kernels_real = torch.tensor(cqt_kernels.real).unsqueeze(1) + cqt_kernels_imag = torch.tensor(cqt_kernels.imag).unsqueeze(1) + + if trainable: + cqt_kernels_real = torch.nn.Parameter(cqt_kernels_real, requires_grad=trainable) + cqt_kernels_imag = torch.nn.Parameter(cqt_kernels_imag, requires_grad=trainable) + self.register_parameter('cqt_kernels_real', cqt_kernels_real) + self.register_parameter('cqt_kernels_imag', cqt_kernels_imag) + else: + self.register_buffer('cqt_kernels_real', cqt_kernels_real) + self.register_buffer('cqt_kernels_imag', cqt_kernels_imag) + + if verbose==True: + print("CQT kernels created, time used = {:.4f} seconds".format(time()-start)) + + + def forward(self,x, output_format=None, normalization_type='librosa'): + """ + Convert a batch of waveforms to CQT spectrograms. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + + normalization_type : str + Type of the normalisation. The possible options are: \n + 'librosa' : the output fits the librosa one \n + 'convolutional' : the output conserves the convolutional inequalities of the wavelet transform:\n + for all p ϵ [1, inf] \n + - || CQT ||_p <= || f ||_p || g ||_1 \n + - || CQT ||_p <= || f ||_1 || g ||_p \n + - || CQT ||_2 = || f ||_2 || g ||_2 \n + 'wrap' : wraps positive and negative frequencies into positive frequencies. This means that the CQT of a + sinus (or a cosinus) with a constant amplitude equal to 1 will have the value 1 in the bin corresponding to + its frequency. + """ + output_format = output_format or self.output_format + + x = broadcast_dim(x) + if self.center: + if self.pad_mode == 'constant': + padding = nn.ConstantPad1d(self.kernel_width//2, 0) + elif self.pad_mode == 'reflect': + padding = nn.ReflectionPad1d(self.kernel_width//2) + + x = padding(x) + + # CQT + CQT_real = conv1d(x, self.cqt_kernels_real, stride=self.hop_length) + CQT_imag = -conv1d(x, self.cqt_kernels_imag, stride=self.hop_length) + + if normalization_type == 'librosa': + CQT_real *= torch.sqrt(self.lenghts.view(-1, 1)) + CQT_imag *= torch.sqrt(self.lenghts.view(-1, 1)) + elif normalization_type == 'convolutional': + pass + elif normalization_type == 'wrap': + CQT_real *= 2 + CQT_imag *= 2 + else: + raise ValueError("The normalization_type %r is not part of our current options." % normalization_type) + + if output_format=='Magnitude': + if self.trainable==False: + # Getting CQT Amplitude + CQT = torch.sqrt(CQT_real.pow(2)+CQT_imag.pow(2)) + else: + CQT = torch.sqrt(CQT_real.pow(2)+CQT_imag.pow(2)+1e-8) + return CQT + + elif output_format=='Complex': + return torch.stack((CQT_real,CQT_imag),-1) + + elif output_format=='Phase': + phase_real = torch.cos(torch.atan2(CQT_imag,CQT_real)) + phase_imag = torch.sin(torch.atan2(CQT_imag,CQT_real)) + return torch.stack((phase_real,phase_imag), -1) + + def forward_manual(self,x): + """ + Method for debugging + """ + + x = broadcast_dim(x) + if self.center: + if self.pad_mode == 'constant': + padding = nn.ConstantPad1d(self.kernel_width//2, 0) + elif self.pad_mode == 'reflect': + padding = nn.ReflectionPad1d(self.kernel_width//2) + + x = padding(x) + + # CQT + CQT_real = conv1d(x, self.cqt_kernels_real, stride=self.hop_length) + CQT_imag = conv1d(x, self.cqt_kernels_imag, stride=self.hop_length) + + # Getting CQT Amplitude + CQT = torch.sqrt(CQT_real.pow(2)+CQT_imag.pow(2)) + return CQT*torch.sqrt(self.lenghts.view(-1,1)) + + +class CQT2010v2(torch.nn.Module): + """This function is to calculate the CQT of the input signal. + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + + The correct shape will be inferred autommatically if the input follows these 3 shapes. + Most of the arguments follow the convention from librosa. + This class inherits from ``torch.nn.Module``, therefore, the usage is same as ``torch.nn.Module``. + + This alogrithm uses the resampling method proposed in [1]. + Instead of convoluting the STFT results with a gigantic CQT kernel covering the full frequency + spectrum, we make a small CQT kernel covering only the top octave. Then we keep downsampling the + input audio by a factor of 2 to convoluting it with the small CQT kernel. + Everytime the input audio is downsampled, the CQT relative to the downsampled input is equivalent + to the next lower octave. + The kernel creation process is still same as the 1992 algorithm. Therefore, we can reuse the + code from the 1992 alogrithm [2] + [1] Schörkhuber, Christian. “CONSTANT-Q TRANSFORM TOOLBOX FOR MUSIC PROCESSING.” (2010). + [2] Brown, Judith C.C. and Miller Puckette. “An efficient algorithm for the calculation of a + constant Q transform.” (1992). + + Early downsampling factor is to downsample the input audio to reduce the CQT kernel size. + The result with and without early downsampling are more or less the same except in the very low + frequency region where freq < 40Hz. + + Parameters + ---------- + sr : int + The sampling rate for the input audio. It is used to calucate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + hop_length : int + The hop (or stride) size. Default value is 512. + + fmin : float + The frequency for the lowest CQT bin. Default is 32.70Hz, which coresponds to the note C0. + + fmax : float + The frequency for the highest CQT bin. Default is ``None``, therefore the higest CQT bin is + inferred from the ``n_bins`` and ``bins_per_octave``. If ``fmax`` is not ``None``, then the + argument ``n_bins`` will be ignored and ``n_bins`` will be calculated automatically. + Default is ``None`` + + n_bins : int + The total numbers of CQT bins. Default is 84. Will be ignored if ``fmax`` is not ``None``. + + bins_per_octave : int + Number of bins per octave. Default is 12. + + norm : bool + Normalization for the CQT result. + + basis_norm : int + Normalization for the CQT kernels. ``1`` means L1 normalization, and ``2`` means L2 normalization. + Default is ``1``, which is same as the normalization used in librosa. + + window : str + The windowing function for CQT. It uses ``scipy.signal.get_window``, please refer to + scipy documentation for possible windowing functions. The default value is 'hann' + + pad_mode : str + The padding method. Default value is 'reflect'. + + trainable : bool + Determine if the CQT kernels are trainable or not. If ``True``, the gradients for CQT kernels + will also be caluclated and the CQT kernels will be updated during model training. + Default value is ``False`` + + output_format : str + Determine the return type. + 'Magnitude' will return the magnitude of the STFT result, shape = ``(num_samples, freq_bins, time_steps)``; + 'Complex' will return the STFT result in complex number, shape = ``(num_samples, freq_bins, time_steps, 2)``; + 'Phase' will return the phase of the STFT reuslt, shape = ``(num_samples, freq_bins,time_steps, 2)``. + The complex number is stored as ``(real, imag)`` in the last axis. Default value is 'Magnitude'. + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints. + + Returns + ------- + spectrogram : torch.tensor + It returns a tensor of spectrograms. + shape = ``(num_samples, freq_bins,time_steps)`` if ``output_format='Magnitude'``; + shape = ``(num_samples, freq_bins,time_steps, 2)`` if ``output_format='Complex' or 'Phase'``; + + Examples + -------- + >>> spec_layer = Spectrogram.CQT2010v2() + >>> specs = spec_layer(x) + """ + + +# To DO: +# need to deal with the filter and other tensors + + def __init__(self, sr=22050, hop_length=512, fmin=32.70, fmax=None, n_bins=84, filter_scale=1, + bins_per_octave=12, norm=True, basis_norm=1, window='hann', pad_mode='reflect', + earlydownsample=True, trainable=False, output_format='Magnitude', verbose=True): + + super().__init__() + + self.norm = norm # Now norm is used to normalize the final CQT result by dividing n_fft + # basis_norm is for normalizing basis + self.hop_length = hop_length + self.pad_mode = pad_mode + self.n_bins = n_bins + self.earlydownsample = earlydownsample # We will activate early downsampling later if possible + self.trainable = trainable + self.output_format = output_format + + # It will be used to calculate filter_cutoff and creating CQT kernels + Q = float(filter_scale)/(2**(1/bins_per_octave)-1) + + # Creating lowpass filter and make it a torch tensor + if verbose==True: + print("Creating low pass filter ...", end='\r') + start = time() + # self.lowpass_filter = torch.tensor( + # create_lowpass_filter( + # band_center = 0.50, + # kernelLength=256, + # transitionBandwidth=0.001)) + lowpass_filter = torch.tensor(create_lowpass_filter( + band_center = 0.50, + kernelLength=256, + transitionBandwidth=0.001) + ) + + # Broadcast the tensor to the shape that fits conv1d + self.register_buffer('lowpass_filter', lowpass_filter[None,None,:]) + if verbose==True: + print("Low pass filter created, time used = {:.4f} seconds".format(time()-start)) + + # Caluate num of filter requires for the kernel + # n_octaves determines how many resampling requires for the CQT + n_filters = min(bins_per_octave, n_bins) + self.n_octaves = int(np.ceil(float(n_bins) / bins_per_octave)) + if verbose==True: + print("num_octave = ", self.n_octaves) + + # Calculate the lowest frequency bin for the top octave kernel + self.fmin_t = fmin*2**(self.n_octaves-1) + remainder = n_bins % bins_per_octave + # print("remainder = ", remainder) + + if remainder==0: + # Calculate the top bin frequency + fmax_t = self.fmin_t*2**((bins_per_octave-1)/bins_per_octave) + else: + # Calculate the top bin frequency + fmax_t = self.fmin_t*2**((remainder-1)/bins_per_octave) + + self.fmin_t = fmax_t/2**(1-1/bins_per_octave) # Adjusting the top minium bins + if fmax_t > sr/2: + raise ValueError('The top bin {}Hz has exceeded the Nyquist frequency, \ + please reduce the n_bins'.format(fmax_t)) + + if self.earlydownsample == True: # Do early downsampling if this argument is True + if verbose==True: + print("Creating early downsampling filter ...", end='\r') + start = time() + sr, self.hop_length, self.downsample_factor, early_downsample_filter, \ + self.earlydownsample = get_early_downsample_params(sr, + hop_length, + fmax_t, + Q, + self.n_octaves, + verbose) + self.register_buffer('early_downsample_filter', early_downsample_filter) + + if verbose==True: + print("Early downsampling filter created, \ + time used = {:.4f} seconds".format(time()-start)) + else: + self.downsample_factor=1. + + # Preparing CQT kernels + if verbose==True: + print("Creating CQT kernels ...", end='\r') + start = time() + basis, self.n_fft, lenghts, _ = create_cqt_kernels(Q, + sr, + self.fmin_t, + n_filters, + bins_per_octave, + norm=basis_norm, + topbin_check=False) + # For normalization in the end + # The freqs returned by create_cqt_kernels cannot be used + # Since that returns only the top octave bins + # We need the information for all freq bin + freqs = fmin * 2.0 ** (np.r_[0:n_bins] / np.float(bins_per_octave)) + self.frequencies = freqs + + lenghts = np.ceil(Q * sr / freqs) + lenghts = torch.tensor(lenghts).float() + self.register_buffer('lenghts', lenghts) + + self.basis = basis + # These cqt_kernel is already in the frequency domain + cqt_kernels_real = torch.tensor(basis.real.astype(np.float32)).unsqueeze(1) + cqt_kernels_imag = torch.tensor(basis.imag.astype(np.float32)).unsqueeze(1) + + if trainable: + cqt_kernels_real = torch.nn.Parameter(cqt_kernels_real, requires_grad=trainable) + cqt_kernels_imag = torch.nn.Parameter(cqt_kernels_imag, requires_grad=trainable) + self.register_parameter('cqt_kernels_real', cqt_kernels_real) + self.register_parameter('cqt_kernels_imag', cqt_kernels_imag) + else: + self.register_buffer('cqt_kernels_real', cqt_kernels_real) + self.register_buffer('cqt_kernels_imag', cqt_kernels_imag) + + + if verbose==True: + print("CQT kernels created, time used = {:.4f} seconds".format(time()-start)) + # print("Getting cqt kernel done, n_fft = ",self.n_fft) + + # If center==True, the STFT window will be put in the middle, and paddings at the beginning + # and ending are required. + if self.pad_mode == 'constant': + self.padding = nn.ConstantPad1d(self.n_fft//2, 0) + elif self.pad_mode == 'reflect': + self.padding = nn.ReflectionPad1d(self.n_fft//2) + + + def forward(self,x,output_format=None, normalization_type='librosa'): + """ + Convert a batch of waveforms to CQT spectrograms. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + """ + output_format = output_format or self.output_format + + x = broadcast_dim(x) + if self.earlydownsample==True: + x = downsampling_by_n(x, self.early_downsample_filter, self.downsample_factor) + hop = self.hop_length + CQT = get_cqt_complex(x, self.cqt_kernels_real, self.cqt_kernels_imag, hop, self.padding) # Getting the top octave CQT + + x_down = x # Preparing a new variable for downsampling + + for i in range(self.n_octaves-1): + hop = hop//2 + x_down = downsampling_by_2(x_down, self.lowpass_filter) + CQT1 = get_cqt_complex(x_down, self.cqt_kernels_real, self.cqt_kernels_imag, hop, self.padding) + CQT = torch.cat((CQT1, CQT),1) + + CQT = CQT[:,-self.n_bins:,:] # Removing unwanted bottom bins + # print("downsample_factor = ",self.downsample_factor) + # print(CQT.shape) + # print(self.lenghts.view(-1,1).shape) + + # Normalizing the output with the downsampling factor, 2**(self.n_octaves-1) is make it + # same mag as 1992 + CQT = CQT*self.downsample_factor + # Normalize again to get same result as librosa + if normalization_type == 'librosa': + CQT = CQT*torch.sqrt(self.lenghts.view(-1,1,1)) + elif normalization_type == 'convolutional': + pass + elif normalization_type == 'wrap': + CQT *= 2 + else: + raise ValueError("The normalization_type %r is not part of our current options." % normalization_type) + + + + if output_format=='Magnitude': + if self.trainable==False: + # Getting CQT Amplitude + return torch.sqrt(CQT.pow(2).sum(-1)) + else: + return torch.sqrt(CQT.pow(2).sum(-1)+1e-8) + + elif output_format=='Complex': + return CQT + + elif output_format=='Phase': + phase_real = torch.cos(torch.atan2(CQT[:,:,:,1],CQT[:,:,:,0])) + phase_imag = torch.sin(torch.atan2(CQT[:,:,:,1],CQT[:,:,:,0])) + return torch.stack((phase_real,phase_imag), -1) + + +class CQT(CQT1992v2): + """An abbreviation for :func:`~nnAudio.Spectrogram.CQT1992v2`. Please refer to the :func:`~nnAudio.Spectrogram.CQT1992v2` documentation""" + pass + + + +# The section below is for developing purpose +# Please don't use the following classes +# + +class DFT(torch.nn.Module): + """ + Experimental feature before `torch.fft` was made avaliable. + The inverse function only works for 1 single frame. i.e. input shape = (batch, n_fft, 1) + """ + def __init__(self, n_fft=2048, freq_bins=None, hop_length=512, + window='hann', freq_scale='no', center=True, pad_mode='reflect', + fmin=50, fmax=6000, sr=22050): + + super().__init__() + + self.stride = hop_length + self.center = center + self.pad_mode = pad_mode + self.n_fft = n_fft + + # Create filter windows for stft + wsin, wcos, self.bins2freq = create_fourier_kernels(n_fft=n_fft, + freq_bins=n_fft, + window=window, + freq_scale=freq_scale, + fmin=fmin, + fmax=fmax, + sr=sr) + self.wsin = torch.tensor(wsin, dtype=torch.float) + self.wcos = torch.tensor(wcos, dtype=torch.float) + + def forward(self,x): + """ + Convert a batch of waveforms to spectrums. + + Parameters + ---------- + x : torch tensor + Input signal should be in either of the following shapes.\n + 1. ``(len_audio)``\n + 2. ``(num_audio, len_audio)``\n + 3. ``(num_audio, 1, len_audio)`` + It will be automatically broadcast to the right shape + """ + x = broadcast_dim(x) + if self.center: + if self.pad_mode == 'constant': + padding = nn.ConstantPad1d(self.n_fft//2, 0) + elif self.pad_mode == 'reflect': + padding = nn.ReflectionPad1d(self.n_fft//2) + + x = padding(x) + + imag = conv1d(x, self.wsin, stride=self.stride) + real = conv1d(x, self.wcos, stride=self.stride) + return (real, -imag) + + def inverse(self,x_real,x_imag): + """ + Convert a batch of waveforms to CQT spectrograms. + + Parameters + ---------- + x_real : torch tensor + Real part of the signal. + x_imag : torch tensor + Imaginary part of the signal. + """ + x_real = broadcast_dim(x_real) + x_imag = broadcast_dim(x_imag) + + x_real.transpose_(1,2) # Prepare the right shape to do inverse + x_imag.transpose_(1,2) # Prepare the right shape to do inverse + + # if self.center: + # if self.pad_mode == 'constant': + # padding = nn.ConstantPad1d(self.n_fft//2, 0) + # elif self.pad_mode == 'reflect': + # padding = nn.ReflectionPad1d(self.n_fft//2) + + # x_real = padding(x_real) + # x_imag = padding(x_imag) + + # Watch out for the positive and negative signs + # ifft = e^(+2\pi*j)*X + + # ifft(X_real) = (a1, a2) + + # ifft(X_imag)*1j = (b1, b2)*1j + # = (-b2, b1) + + a1 = conv1d(x_real, self.wcos, stride=self.stride) + a2 = conv1d(x_real, self.wsin, stride=self.stride) + b1 = conv1d(x_imag, self.wcos, stride=self.stride) + b2 = conv1d(x_imag, self.wsin, stride=self.stride) + + imag = a2+b1 + real = a1-b2 + return (real/self.n_fft, imag/self.n_fft) + + + + +class iSTFT(torch.nn.Module): + """This class is to convert spectrograms back to waveforms. It only works for the complex value spectrograms. + If you have the magnitude spectrograms, please use :func:`~nnAudio.Spectrogram.Griffin_Lim`. + The parameters (e.g. n_fft, window) need to be the same as the STFT in order to obtain the correct inverse. + If trainability is not required, it is recommended to use the ``inverse`` method under the ``STFT`` class + to save GPU/RAM memory. + + When ``trainable=True`` and ``freq_scale!='no'``, there is no guarantee that the inverse is perfect, please + use with extra care. + + Parameters + ---------- + n_fft : int + The window size. Default value is 2048. + + freq_bins : int + Number of frequency bins. Default is ``None``, which means ``n_fft//2+1`` bins + Please make sure the value is the same as the forward STFT. + + hop_length : int + The hop (or stride) size. Default value is ``None`` which is equivalent to ``n_fft//4``. + Please make sure the value is the same as the forward STFT. + + window : str + The windowing function for iSTFT. It uses ``scipy.signal.get_window``, please refer to + scipy documentation for possible windowing functions. The default value is 'hann'. + Please make sure the value is the same as the forward STFT. + + freq_scale : 'linear', 'log', or 'no' + Determine the spacing between each frequency bin. When `linear` or `log` is used, + the bin spacing can be controlled by ``fmin`` and ``fmax``. If 'no' is used, the bin will + start at 0Hz and end at Nyquist frequency with linear spacing. + Please make sure the value is the same as the forward STFT. + + center : bool + Putting the iSTFT keneral at the center of the time-step or not. If ``False``, the time + index is the beginning of the iSTFT kernel, if ``True``, the time index is the center of + the iSTFT kernel. Default value if ``True``. + Please make sure the value is the same as the forward STFT. + + fmin : int + The starting frequency for the lowest frequency bin. If freq_scale is ``no``, this argument + does nothing. Please make sure the value is the same as the forward STFT. + + fmax : int + The ending frequency for the highest frequency bin. If freq_scale is ``no``, this argument + does nothing. Please make sure the value is the same as the forward STFT. + + sr : int + The sampling rate for the input audio. It is used to calucate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + trainable_kernels : bool + Determine if the STFT kenrels are trainable or not. If ``True``, the gradients for STFT + kernels will also be caluclated and the STFT kernels will be updated during model training. + Default value is ``False``. + + trainable_window : bool + Determine if the window function is trainable or not. + Default value is ``False``. + + verbose : bool + If ``True``, it shows layer information. If ``False``, it suppresses all prints. + + Returns + ------- + spectrogram : torch.tensor + It returns a batch of waveforms. + + Examples + -------- + >>> spec_layer = Spectrogram.iSTFT() + >>> specs = spec_layer(x) + """ + + def __init__(self, n_fft=2048, win_length=None, freq_bins=None, hop_length=None, window='hann', + freq_scale='no', center=True, fmin=50, fmax=6000, sr=22050, trainable_kernels=False, + trainable_window=False, verbose=True, refresh_win=True): + + super().__init__() + + # Trying to make the default setting same as librosa + if win_length==None: win_length = n_fft + if hop_length==None: hop_length = int(win_length // 4) + + self.n_fft = n_fft + self.win_length = win_length + self.stride = hop_length + self.center = center + + self.pad_amount = self.n_fft // 2 + self.refresh_win = refresh_win + + start = time() + + # Create the window function and prepare the shape for batch-wise-time-wise multiplication + + # Create filter windows for inverse + kernel_sin, kernel_cos, _, _, window_mask = create_fourier_kernels(n_fft, + win_length=win_length, + freq_bins=n_fft, + window=window, + freq_scale=freq_scale, + fmin=fmin, + fmax=fmax, + sr=sr, + verbose=False) + window_mask = get_window(window,int(win_length), fftbins=True) + + # For inverse, the Fourier kernels do not need to be windowed + window_mask = torch.tensor(window_mask).unsqueeze(0).unsqueeze(-1) + + # kernel_sin and kernel_cos have the shape (freq_bins, 1, n_fft, 1) to support 2D Conv + kernel_sin = torch.tensor(kernel_sin, dtype=torch.float).unsqueeze(-1) + kernel_cos = torch.tensor(kernel_cos, dtype=torch.float).unsqueeze(-1) + + # Decide if the Fourier kernels are trainable + if trainable_kernels: + # Making all these variables trainable + kernel_sin = torch.nn.Parameter(kernel_sin, requires_grad=trainable_kernels) + kernel_cos = torch.nn.Parameter(kernel_cos, requires_grad=trainable_kernels) + self.register_parameter('kernel_sin', kernel_sin) + self.register_parameter('kernel_cos', kernel_cos) + + else: + self.register_buffer('kernel_sin', kernel_sin) + self.register_buffer('kernel_cos', kernel_cos) + + # Decide if the window function is trainable + if trainable_window: + window_mask = torch.nn.Parameter(window_mask, requires_grad=trainable_window) + self.register_parameter('window_mask', window_mask) + else: + self.register_buffer('window_mask', window_mask) + + + if verbose==True: + print("iSTFT kernels created, time used = {:.4f} seconds".format(time()-start)) + else: + pass + + + def forward(self, X, onesided=False, length=None, refresh_win=None): + """ + If your spectrograms only have ``n_fft//2+1`` frequency bins, please use ``onesided=True``, + else use ``onesided=False`` + To make sure the inverse STFT has the same output length of the original waveform, please + set `length` as your intended waveform length. By default, ``length=None``, + which will remove ``n_fft//2`` samples from the start and the end of the output. + If your input spectrograms X are of the same length, please use ``refresh_win=None`` to increase + computational speed. + """ + if refresh_win==None: + refresh_win=self.refresh_win + + assert X.dim()==4 , "Inverse iSTFT only works for complex number," \ + "make sure our tensor is in the shape of (batch, freq_bins, timesteps, 2)" + + # If the input spectrogram contains only half of the n_fft + # Use extend_fbins function to get back another half + if onesided: + X = extend_fbins(X) # extend freq + + + X_real, X_imag = X[:, :, :, 0], X[:, :, :, 1] + + # broadcast dimensions to support 2D convolution + X_real_bc = X_real.unsqueeze(1) + X_imag_bc = X_imag.unsqueeze(1) + + a1 = conv2d(X_real_bc, self.kernel_cos, stride=(1,1)) + b2 = conv2d(X_imag_bc, self.kernel_sin, stride=(1,1)) + + # compute real and imag part. signal lies in the real part + real = a1 - b2 + real = real.squeeze(-2)*self.window_mask + + # Normalize the amplitude with n_fft + real /= (self.n_fft) + + # Overlap and Add algorithm to connect all the frames + real = overlap_add(real, self.stride) + + # Prepare the window sumsqure for division + # Only need to create this window once to save time + # Unless the input spectrograms have different time steps + if hasattr(self, 'w_sum')==False or refresh_win==True: + self.w_sum = torch_window_sumsquare(self.window_mask.flatten(), X.shape[2], self.stride, self.n_fft).flatten() + self.nonzero_indices = (self.w_sum>1e-10) + else: + pass + real[:, self.nonzero_indices] = real[:,self.nonzero_indices].div(self.w_sum[self.nonzero_indices]) + # Remove padding + if length is None: + if self.center: + real = real[:, self.pad_amount:-self.pad_amount] + + else: + if self.center: + real = real[:, self.pad_amount:self.pad_amount + length] + else: + real = real[:, :length] + + return real + + +class Griffin_Lim(torch.nn.Module): + """ + Converting Magnitude spectrograms back to waveforms based on the "fast Griffin-Lim"[1]. + This Griffin Lim is a direct clone from librosa.griffinlim. + + [1] Perraudin, N., Balazs, P., & Søndergaard, P. L. “A fast Griffin-Lim algorithm,” + IEEE Workshop on Applications of Signal Processing to Audio and Acoustics (pp. 1-4), Oct. 2013. + + Parameters + ---------- + n_fft : int + The window size. Default value is 2048. + + n_iter=32 : int + The number of iterations for Griffin-Lim. The default value is ``32`` + + hop_length : int + The hop (or stride) size. Default value is ``None`` which is equivalent to ``n_fft//4``. + Please make sure the value is the same as the forward STFT. + + window : str + The windowing function for iSTFT. It uses ``scipy.signal.get_window``, please refer to + scipy documentation for possible windowing functions. The default value is 'hann'. + Please make sure the value is the same as the forward STFT. + + center : bool + Putting the iSTFT keneral at the center of the time-step or not. If ``False``, the time + index is the beginning of the iSTFT kernel, if ``True``, the time index is the center of + the iSTFT kernel. Default value if ``True``. + Please make sure the value is the same as the forward STFT. + + momentum : float + The momentum for the update rule. The default value is ``0.99``. + + device : str + Choose which device to initialize this layer. Default value is 'cpu' + + """ + + def __init__(self, + n_fft, + n_iter=32, + hop_length=None, + win_length=None, + window='hann', + center=True, + pad_mode='reflect', + momentum=0.99, + device='cpu'): + super().__init__() + + self.n_fft = n_fft + self.win_length = win_length + self.n_iter = n_iter + self.center = center + self.pad_mode = pad_mode + self.momentum = momentum + self.device = device + if win_length==None: + self.win_length=n_fft + else: + self.win_length=win_length + if hop_length==None: + self.hop_length = n_fft//4 + else: + self.hop_length = hop_length + + # Creating window function for stft and istft later + self.w = torch.tensor(get_window(window, + int(self.win_length), + fftbins=True), + device=device).float() + + def forward(self, S): + """ + Convert a batch of magnitude spectrograms to waveforms. + + Parameters + ---------- + S : torch tensor + Spectrogram of the shape ``(batch, n_fft//2+1, timesteps)`` + """ + + assert S.dim()==3 , "Please make sure your input is in the shape of (batch, freq_bins, timesteps)" + + # Initializing Random Phase + rand_phase = torch.randn(*S.shape, device=self.device) + angles = torch.empty((*S.shape,2), device=self.device) + angles[:, :,:,0] = torch.cos(2 * np.pi * rand_phase) + angles[:,:,:,1] = torch.sin(2 * np.pi * rand_phase) + + # Initializing the rebuilt magnitude spectrogram + rebuilt = torch.zeros(*angles.shape, device=self.device) + + for _ in range(self.n_iter): + tprev = rebuilt # Saving previous rebuilt magnitude spec + + # spec2wav conversion +# print(f'win_length={self.win_length}\tw={self.w.shape}') + inverse = torch.istft(S.unsqueeze(-1) * angles, + self.n_fft, + self.hop_length, + win_length=self.win_length, + window=self.w, + center=self.center) + # wav2spec conversion + rebuilt = torch.stft(inverse, + self.n_fft, + self.hop_length, + win_length=self.win_length, + window=self.w, + pad_mode=self.pad_mode) + + # Phase update rule + angles[:,:,:] = rebuilt[:,:,:] - (self.momentum / (1 + self.momentum)) * tprev[:,:,:] + + # Phase normalization + angles = angles.div(torch.sqrt(angles.pow(2).sum(-1)).unsqueeze(-1) + 1e-16) # normalizing the phase + + # Using the final phase to reconstruct the waveforms + inverse = torch.istft(S.unsqueeze(-1) * angles, + self.n_fft, + self.hop_length, + win_length=self.win_length, + window=self.w, + center=self.center) + return inverse + + + +class Combined_Frequency_Periodicity(nn.Module): + """ + Vectorized version of the code in https://github.com/leo-so/VocalMelodyExtPatchCNN/blob/master/MelodyExt.py. + This feature is described in 'Combining Spectral and Temporal Representations for Multipitch Estimation of Polyphonic Music' + https://ieeexplore.ieee.org/document/7118691 + + Under development, please report any bugs you found + """ + def __init__(self,fr=2, fs=16000, hop_length=320, + window_size=2049, fc=80, tc=1/1000, + g=[0.24, 0.6, 1], NumPerOct=48): + super().__init__() + + self.window_size = window_size + self.hop_length = hop_length + + # variables for STFT part + self.N = int(fs/float(fr)) # Will be used to calculate padding + self.f = fs*np.linspace(0, 0.5, np.round(self.N//2), endpoint=True) # it won't be used but will be returned + self.pad_value = ((self.N-window_size)) + # Create window function, always blackmanharris? + h = scipy.signal.blackmanharris(window_size).astype(np.float32) # window function for STFT + self.register_buffer('h',torch.tensor(h)) + + # variables for CFP + self.NumofLayer = np.size(g) + self.g = g + self.tc_idx = round(fs*tc) # index to filter out top tc_idx and bottom tc_idx bins + self.fc_idx = round(fc/fr) # index to filter out top fc_idx and bottom fc_idx bins + self.HighFreqIdx = int(round((1/tc)/fr)+1) + self.HighQuefIdx = int(round(fs/fc)+1) + + # attributes to be returned + self.f = self.f[:self.HighFreqIdx] + self.q = np.arange(self.HighQuefIdx)/float(fs) + + # filters for the final step + freq2logfreq_matrix, quef2logfreq_matrix = self.create_logfreq_matrix(self.f, self.q, fr, fc, tc, NumPerOct, fs) + self.register_buffer('freq2logfreq_matrix',torch.tensor(freq2logfreq_matrix.astype(np.float32))) + self.register_buffer('quef2logfreq_matrix',torch.tensor(quef2logfreq_matrix.astype(np.float32))) + + def _CFP(self, spec): + spec = torch.relu(spec).pow(self.g[0]) + + if self.NumofLayer >= 2: + for gc in range(1, self.NumofLayer): + if np.remainder(gc, 2) == 1: + ceps = torch.rfft(spec, 1, onesided=False)[:,:,:,0]/np.sqrt(self.N) + ceps = self.nonlinear_func(ceps, self.g[gc], self.tc_idx) + else: + spec = torch.rfft(ceps, 1, onesided=False)[:,:,:,0]/np.sqrt(self.N) + spec = self.nonlinear_func(spec, self.g[gc], self.fc_idx) + + return spec, ceps + + + def forward(self, x): + tfr0 = torch.stft(x, self.N, hop_length=self.hop_length, win_length=self.window_size, + window=self.h, onesided=False, pad_mode='constant') + tfr0 = torch.sqrt(tfr0.pow(2).sum(-1))/torch.norm(self.h) # calcuate magnitude + tfr0 = tfr0.transpose(1,2)[:,1:-1] #transpose F and T axis and discard first and last frames + # The transpose is necessary for rfft later + # (batch, timesteps, n_fft) + tfr, ceps = self._CFP(tfr0) + +# return tfr0 + # removing duplicate bins + tfr0 = tfr0[:,:,:int(round(self.N/2))] + tfr = tfr[:,:,:int(round(self.N/2))] + ceps = ceps[:,:,:int(round(self.N/2))] + + # Crop up to the highest frequency + tfr0 = tfr0[:,:,:self.HighFreqIdx] + tfr = tfr[:,:,:self.HighFreqIdx] + ceps = ceps[:,:,:self.HighQuefIdx] + tfrL0 = torch.matmul(self.freq2logfreq_matrix, tfr0.transpose(1,2)) + tfrLF = torch.matmul(self.freq2logfreq_matrix, tfr.transpose(1,2)) + tfrLQ = torch.matmul(self.quef2logfreq_matrix, ceps.transpose(1,2)) + Z = tfrLF * tfrLQ + + # Only need to calculate this once + self.t = np.arange(self.hop_length, + np.ceil(len(x)/float(self.hop_length))*self.hop_length, + self.hop_length) # it won't be used but will be returned + + return Z, tfrL0, tfrLF, tfrLQ + + def nonlinear_func(self, X, g, cutoff): + cutoff = int(cutoff) + if g!=0: + X = torch.relu(X) + X[:, :, :cutoff] = 0 + X[:, :, -cutoff:] = 0 + X = X.pow(g) + else: # when g=0, it converges to log + X = torch.log(X) + X[:, :, :cutoff] = 0 + X[:, :, -cutoff:] = 0 + return X + + def create_logfreq_matrix(self, f, q, fr, fc, tc, NumPerOct, fs): + StartFreq = fc + StopFreq = 1/tc + Nest = int(np.ceil(np.log2(StopFreq/StartFreq))*NumPerOct) + central_freq = [] # A list holding the frequencies in log scale + + for i in range(0, Nest): + CenFreq = StartFreq*pow(2, float(i)/NumPerOct) + if CenFreq < StopFreq: + central_freq.append(CenFreq) + else: + break + + Nest = len(central_freq) + freq_band_transformation = np.zeros((Nest-1, len(f)), dtype=np.float) + + # Calculating the freq_band_transformation + for i in range(1, Nest-1): + l = int(round(central_freq[i-1]/fr)) + r = int(round(central_freq[i+1]/fr)+1) + #rounding1 + if l >= r-1: + freq_band_transformation[i, l] = 1 + else: + for j in range(l, r): + if f[j] > central_freq[i-1] and f[j] < central_freq[i]: + freq_band_transformation[i, j] = (f[j] - central_freq[i-1]) / (central_freq[i] - central_freq[i-1]) + elif f[j] > central_freq[i] and f[j] < central_freq[i+1]: + freq_band_transformation[i, j] = (central_freq[i + 1] - f[j]) / (central_freq[i + 1] - central_freq[i]) + + # Calculating the quef_band_transformation + f = 1/q # divide by 0, do I need to fix this? + quef_band_transformation = np.zeros((Nest-1, len(f)), dtype=np.float) + for i in range(1, Nest-1): + for j in range(int(round(fs/central_freq[i+1])), int(round(fs/central_freq[i-1])+1)): + if f[j] > central_freq[i-1] and f[j] < central_freq[i]: + quef_band_transformation[i, j] = (f[j] - central_freq[i-1])/(central_freq[i] - central_freq[i-1]) + elif f[j] > central_freq[i] and f[j] < central_freq[i+1]: + quef_band_transformation[i, j] = (central_freq[i + 1] - f[j]) / (central_freq[i + 1] - central_freq[i]) + + return freq_band_transformation, quef_band_transformation + + +class CFP(nn.Module): + """ + This is the modified version so that the number of timesteps fits with other classes + + Under development, please report any bugs you found + """ + def __init__(self,fr=2, fs=16000, hop_length=320, + window_size=2049, fc=80, tc=1/1000, + g=[0.24, 0.6, 1], NumPerOct=48): + super().__init__() + + self.window_size = window_size + self.hop_length = hop_length + + # variables for STFT part + self.N = int(fs/float(fr)) # Will be used to calculate padding + self.f = fs*np.linspace(0, 0.5, np.round(self.N//2), endpoint=True) # it won't be used but will be returned + self.pad_value = ((self.N-window_size)) + # Create window function, always blackmanharris? + h = scipy.signal.blackmanharris(window_size).astype(np.float32) # window function for STFT + self.register_buffer('h',torch.tensor(h)) + + # variables for CFP + self.NumofLayer = np.size(g) + self.g = g + self.tc_idx = round(fs*tc) # index to filter out top tc_idx and bottom tc_idx bins + self.fc_idx = round(fc/fr) # index to filter out top fc_idx and bottom fc_idx bins + self.HighFreqIdx = int(round((1/tc)/fr)+1) + self.HighQuefIdx = int(round(fs/fc)+1) + + # attributes to be returned + self.f = self.f[:self.HighFreqIdx] + self.q = np.arange(self.HighQuefIdx)/float(fs) + + # filters for the final step + freq2logfreq_matrix, quef2logfreq_matrix = self.create_logfreq_matrix(self.f, self.q, fr, fc, tc, NumPerOct, fs) + self.register_buffer('freq2logfreq_matrix',torch.tensor(freq2logfreq_matrix.astype(np.float32))) + self.register_buffer('quef2logfreq_matrix',torch.tensor(quef2logfreq_matrix.astype(np.float32))) + + def _CFP(self, spec): + spec = torch.relu(spec).pow(self.g[0]) + + if self.NumofLayer >= 2: + for gc in range(1, self.NumofLayer): + if np.remainder(gc, 2) == 1: + ceps = torch.rfft(spec, 1, onesided=False)[:,:,:,0]/np.sqrt(self.N) + ceps = self.nonlinear_func(ceps, self.g[gc], self.tc_idx) + else: + spec = torch.rfft(ceps, 1, onesided=False)[:,:,:,0]/np.sqrt(self.N) + spec = self.nonlinear_func(spec, self.g[gc], self.fc_idx) + + return spec, ceps + + + def forward(self, x): + tfr0 = torch.stft(x, self.N, hop_length=self.hop_length, win_length=self.window_size, + window=self.h, onesided=False, pad_mode='constant') + tfr0 = torch.sqrt(tfr0.pow(2).sum(-1))/torch.norm(self.h) # calcuate magnitude + tfr0 = tfr0.transpose(1,2) #transpose F and T axis and discard first and last frames + # The transpose is necessary for rfft later + # (batch, timesteps, n_fft) + tfr, ceps = self._CFP(tfr0) + +# return tfr0 + # removing duplicate bins + tfr0 = tfr0[:,:,:int(round(self.N/2))] + tfr = tfr[:,:,:int(round(self.N/2))] + ceps = ceps[:,:,:int(round(self.N/2))] + + # Crop up to the highest frequency + tfr0 = tfr0[:,:,:self.HighFreqIdx] + tfr = tfr[:,:,:self.HighFreqIdx] + ceps = ceps[:,:,:self.HighQuefIdx] + tfrL0 = torch.matmul(self.freq2logfreq_matrix, tfr0.transpose(1,2)) + tfrLF = torch.matmul(self.freq2logfreq_matrix, tfr.transpose(1,2)) + tfrLQ = torch.matmul(self.quef2logfreq_matrix, ceps.transpose(1,2)) + Z = tfrLF * tfrLQ + + # Only need to calculate this once + self.t = np.arange(self.hop_length, + np.ceil(len(x)/float(self.hop_length))*self.hop_length, + self.hop_length) # it won't be used but will be returned + + return Z#, tfrL0, tfrLF, tfrLQ + + def nonlinear_func(self, X, g, cutoff): + cutoff = int(cutoff) + if g!=0: + X = torch.relu(X) + X[:, :, :cutoff] = 0 + X[:, :, -cutoff:] = 0 + X = X.pow(g) + else: # when g=0, it converges to log + X = torch.log(X) + X[:, :, :cutoff] = 0 + X[:, :, -cutoff:] = 0 + return X + + def create_logfreq_matrix(self, f, q, fr, fc, tc, NumPerOct, fs): + StartFreq = fc + StopFreq = 1/tc + Nest = int(np.ceil(np.log2(StopFreq/StartFreq))*NumPerOct) + central_freq = [] # A list holding the frequencies in log scale + + for i in range(0, Nest): + CenFreq = StartFreq*pow(2, float(i)/NumPerOct) + if CenFreq < StopFreq: + central_freq.append(CenFreq) + else: + break + + Nest = len(central_freq) + freq_band_transformation = np.zeros((Nest-1, len(f)), dtype=np.float) + + # Calculating the freq_band_transformation + for i in range(1, Nest-1): + l = int(round(central_freq[i-1]/fr)) + r = int(round(central_freq[i+1]/fr)+1) + #rounding1 + if l >= r-1: + freq_band_transformation[i, l] = 1 + else: + for j in range(l, r): + if f[j] > central_freq[i-1] and f[j] < central_freq[i]: + freq_band_transformation[i, j] = (f[j] - central_freq[i-1]) / (central_freq[i] - central_freq[i-1]) + elif f[j] > central_freq[i] and f[j] < central_freq[i+1]: + freq_band_transformation[i, j] = (central_freq[i + 1] - f[j]) / (central_freq[i + 1] - central_freq[i]) + + # Calculating the quef_band_transformation + f = 1/q # divide by 0, do I need to fix this? + quef_band_transformation = np.zeros((Nest-1, len(f)), dtype=np.float) + for i in range(1, Nest-1): + for j in range(int(round(fs/central_freq[i+1])), int(round(fs/central_freq[i-1])+1)): + if f[j] > central_freq[i-1] and f[j] < central_freq[i]: + quef_band_transformation[i, j] = (f[j] - central_freq[i-1])/(central_freq[i] - central_freq[i-1]) + elif f[j] > central_freq[i] and f[j] < central_freq[i+1]: + quef_band_transformation[i, j] = (central_freq[i + 1] - f[j]) / (central_freq[i + 1] - central_freq[i]) + + return freq_band_transformation, quef_band_transformation diff --git a/third_party/nnAudio/nnAudio/__init__.py b/third_party/nnAudio/nnAudio/__init__.py new file mode 100755 index 000000000..984fc572f --- /dev/null +++ b/third_party/nnAudio/nnAudio/__init__.py @@ -0,0 +1 @@ +__version__ = "0.2.2" \ No newline at end of file diff --git a/third_party/nnAudio/nnAudio/librosa_functions.py b/third_party/nnAudio/nnAudio/librosa_functions.py new file mode 100755 index 000000000..0d7792170 --- /dev/null +++ b/third_party/nnAudio/nnAudio/librosa_functions.py @@ -0,0 +1,490 @@ +""" +Module containing functions cloned from librosa + +To make sure nnAudio would not become broken when updating librosa +""" + +import numpy as np +import warnings +### ----------------Functions for generating kenral for Mel Spectrogram------------ ### +# This code is equalvant to from librosa.filters import mel +# By doing so, we can run nnAudio without installing librosa +def fft2gammatonemx(sr=20000, n_fft=2048, n_bins=64, width=1.0, fmin=0.0, + fmax=11025, maxlen=1024): + """ + # Ellis' description in MATLAB: + # [wts,cfreqa] = fft2gammatonemx(nfft, sr, nfilts, width, minfreq, maxfreq, maxlen) + # Generate a matrix of weights to combine FFT bins into + # Gammatone bins. nfft defines the source FFT size at + # sampling rate sr. Optional nfilts specifies the number of + # output bands required (default 64), and width is the + # constant width of each band in Bark (default 1). + # minfreq, maxfreq specify range covered in Hz (100, sr/2). + # While wts has nfft columns, the second half are all zero. + # Hence, aud spectrum is + # fft2gammatonemx(nfft,sr)*abs(fft(xincols,nfft)); + # maxlen truncates the rows to this many bins. + # cfreqs returns the actual center frequencies of each + # gammatone band in Hz. + # + # 2009/02/22 02:29:25 Dan Ellis dpwe@ee.columbia.edu based on rastamat/audspec.m + # Sat May 27 15:37:50 2017 Maddie Cusimano, mcusi@mit.edu 27 May 2017: convert to python + """ + + wts = np.zeros([n_bins, n_fft], dtype=np.float32) + + # after Slaney's MakeERBFilters + EarQ = 9.26449; + minBW = 24.7; + order = 1; + + nFr = np.array(range(n_bins)) + 1 + em = EarQ * minBW + cfreqs = (fmax + em) * np.exp(nFr * (-np.log(fmax + em) + np.log(fmin + em)) / n_bins) - em + cfreqs = cfreqs[::-1] + + GTord = 4 + ucircArray = np.array(range(int(n_fft / 2 + 1))) + ucirc = np.exp(1j * 2 * np.pi * ucircArray / n_fft); + # justpoles = 0 :taking out the 'if' corresponding to this. + + ERB = width * np.power(np.power(cfreqs / EarQ, order) + np.power(minBW, order), 1 / order); + B = 1.019 * 2 * np.pi * ERB; + r = np.exp(-B / sr) + theta = 2 * np.pi * cfreqs / sr + pole = r * np.exp(1j * theta) + T = 1 / sr + ebt = np.exp(B * T); + cpt = 2 * cfreqs * np.pi * T; + ccpt = 2 * T * np.cos(cpt); + scpt = 2 * T * np.sin(cpt); + A11 = -np.divide(np.divide(ccpt, ebt) + np.divide(np.sqrt(3 + 2 ** 1.5) * scpt, ebt), 2); + A12 = -np.divide(np.divide(ccpt, ebt) - np.divide(np.sqrt(3 + 2 ** 1.5) * scpt, ebt), 2); + A13 = -np.divide(np.divide(ccpt, ebt) + np.divide(np.sqrt(3 - 2 ** 1.5) * scpt, ebt), 2); + A14 = -np.divide(np.divide(ccpt, ebt) - np.divide(np.sqrt(3 - 2 ** 1.5) * scpt, ebt), 2); + zros = -np.array([A11, A12, A13, A14]) / T; + wIdx = range(int(n_fft / 2 + 1)) + gain = np.abs((-2 * np.exp(4 * 1j * cfreqs * np.pi * T) * T + 2 * np.exp( + -(B * T) + 2 * 1j * cfreqs * np.pi * T) * T * ( + np.cos(2 * cfreqs * np.pi * T) - np.sqrt(3 - 2 ** (3 / 2)) * np.sin( + 2 * cfreqs * np.pi * T))) * (-2 * np.exp(4 * 1j * cfreqs * np.pi * T) * T + 2 * np.exp( + -(B * T) + 2 * 1j * cfreqs * np.pi * T) * T * (np.cos(2 * cfreqs * np.pi * T) + np.sqrt( + 3 - 2 ** (3 / 2)) * np.sin(2 * cfreqs * np.pi * T))) * ( + -2 * np.exp(4 * 1j * cfreqs * np.pi * T) * T + 2 * np.exp( + -(B * T) + 2 * 1j * cfreqs * np.pi * T) * T * ( + np.cos(2 * cfreqs * np.pi * T) - np.sqrt(3 + 2 ** (3 / 2)) * np.sin( + 2 * cfreqs * np.pi * T))) * ( + -2 * np.exp(4 * 1j * cfreqs * np.pi * T) * T + 2 * np.exp( + -(B * T) + 2 * 1j * cfreqs * np.pi * T) * T * ( + np.cos(2 * cfreqs * np.pi * T) + np.sqrt(3 + 2 ** (3 / 2)) * np.sin( + 2 * cfreqs * np.pi * T))) / ( + -2 / np.exp(2 * B * T) - 2 * np.exp(4 * 1j * cfreqs * np.pi * T) + 2 * ( + 1 + np.exp(4 * 1j * cfreqs * np.pi * T)) / np.exp(B * T)) ** 4); + # in MATLAB, there used to be 64 where here it says n_bins: + wts[:, wIdx] = ((T ** 4) / np.reshape(gain, (n_bins, 1))) * np.abs( + ucirc - np.reshape(zros[0], (n_bins, 1))) * np.abs(ucirc - np.reshape(zros[1], (n_bins, 1))) * np.abs( + ucirc - np.reshape(zros[2], (n_bins, 1))) * np.abs(ucirc - np.reshape(zros[3], (n_bins, 1))) * (np.abs( + np.power(np.multiply(np.reshape(pole, (n_bins, 1)) - ucirc, np.conj(np.reshape(pole, (n_bins, 1))) - ucirc), + -GTord))); + wts = wts[:, range(maxlen)]; + + return wts, cfreqs + +def gammatone(sr, n_fft, n_bins=64, fmin=20.0, fmax=None, htk=False, + norm=1, dtype=np.float32): + """Create a Filterbank matrix to combine FFT bins into Gammatone bins + Parameters + ---------- + sr : number > 0 [scalar] + sampling rate of the incoming signal + n_fft : int > 0 [scalar] + number of FFT components + n_bins : int > 0 [scalar] + number of Mel bands to generate + fmin : float >= 0 [scalar] + lowest frequency (in Hz) + fmax : float >= 0 [scalar] + highest frequency (in Hz). + If `None`, use `fmax = sr / 2.0` + htk : bool [scalar] + use HTK formula instead of Slaney + norm : {None, 1, np.inf} [scalar] + if 1, divide the triangular mel weights by the width of the mel band + (area normalization). Otherwise, leave all the triangles aiming for + a peak value of 1.0 + dtype : np.dtype + The data type of the output basis. + By default, uses 32-bit (single-precision) floating point. + Returns + ------- + G : np.ndarray [shape=(n_bins, 1 + n_fft/2)] + Gammatone transform matrix + """ + + if fmax is None: + fmax = float(sr) / 2 + n_bins = int(n_bins) + + weights,_ = fft2gammatonemx(sr=sr, n_fft=n_fft, n_bins=n_bins, fmin=fmin, fmax=fmax, maxlen=int(n_fft//2+1)) + + return (1/n_fft)*weights + +def mel_to_hz(mels, htk=False): + """Convert mel bin numbers to frequencies + Examples + -------- + >>> librosa.mel_to_hz(3) + 200. + >>> librosa.mel_to_hz([1,2,3,4,5]) + array([ 66.667, 133.333, 200. , 266.667, 333.333]) + Parameters + ---------- + mels : np.ndarray [shape=(n,)], float + mel bins to convert + htk : bool + use HTK formula instead of Slaney + Returns + ------- + frequencies : np.ndarray [shape=(n,)] + input mels in Hz + See Also + -------- + hz_to_mel + """ + + mels = np.asanyarray(mels) + + if htk: + return 700.0 * (10.0**(mels / 2595.0) - 1.0) + + # Fill in the linear scale + f_min = 0.0 + f_sp = 200.0 / 3 + freqs = f_min + f_sp * mels + + # And now the nonlinear scale + min_log_hz = 1000.0 # beginning of log region (Hz) + min_log_mel = (min_log_hz - f_min) / f_sp # same (Mels) + logstep = np.log(6.4) / 27.0 # step size for log region + + if mels.ndim: + # If we have vector data, vectorize + log_t = (mels >= min_log_mel) + freqs[log_t] = min_log_hz * np.exp(logstep * (mels[log_t] - min_log_mel)) + elif mels >= min_log_mel: + # If we have scalar data, check directly + freqs = min_log_hz * np.exp(logstep * (mels - min_log_mel)) + + return freqs + +def hz_to_mel(frequencies, htk=False): + """Convert Hz to Mels + Examples + -------- + >>> librosa.hz_to_mel(60) + 0.9 + >>> librosa.hz_to_mel([110, 220, 440]) + array([ 1.65, 3.3 , 6.6 ]) + Parameters + ---------- + frequencies : number or np.ndarray [shape=(n,)] , float + scalar or array of frequencies + htk : bool + use HTK formula instead of Slaney + Returns + ------- + mels : number or np.ndarray [shape=(n,)] + input frequencies in Mels + See Also + -------- + mel_to_hz + """ + + frequencies = np.asanyarray(frequencies) + + if htk: + return 2595.0 * np.log10(1.0 + frequencies / 700.0) + + # Fill in the linear part + f_min = 0.0 + f_sp = 200.0 / 3 + + mels = (frequencies - f_min) / f_sp + + # Fill in the log-scale part + + min_log_hz = 1000.0 # beginning of log region (Hz) + min_log_mel = (min_log_hz - f_min) / f_sp # same (Mels) + logstep = np.log(6.4) / 27.0 # step size for log region + + if frequencies.ndim: + # If we have array data, vectorize + log_t = (frequencies >= min_log_hz) + mels[log_t] = min_log_mel + np.log(frequencies[log_t]/min_log_hz) / logstep + elif frequencies >= min_log_hz: + # If we have scalar data, heck directly + mels = min_log_mel + np.log(frequencies / min_log_hz) / logstep + + return mels + +def fft_frequencies(sr=22050, n_fft=2048): + '''Alternative implementation of `np.fft.fftfreq` + Parameters + ---------- + sr : number > 0 [scalar] + Audio sampling rate + n_fft : int > 0 [scalar] + FFT window size + Returns + ------- + freqs : np.ndarray [shape=(1 + n_fft/2,)] + Frequencies `(0, sr/n_fft, 2*sr/n_fft, ..., sr/2)` + Examples + -------- + >>> librosa.fft_frequencies(sr=22050, n_fft=16) + array([ 0. , 1378.125, 2756.25 , 4134.375, + 5512.5 , 6890.625, 8268.75 , 9646.875, 11025. ]) + ''' + + return np.linspace(0, + float(sr) / 2, + int(1 + n_fft//2), + endpoint=True) + +def mel_frequencies(n_mels=128, fmin=0.0, fmax=11025.0, htk=False): + """ + This function is cloned from librosa 0.7. + Please refer to the original + `documentation `__ + for more info. + + Parameters + ---------- + n_mels : int > 0 [scalar] + Number of mel bins. + + fmin : float >= 0 [scalar] + Minimum frequency (Hz). + + fmax : float >= 0 [scalar] + Maximum frequency (Hz). + + htk : bool + If True, use HTK formula to convert Hz to mel. + Otherwise (False), use Slaney's Auditory Toolbox. + + Returns + ------- + bin_frequencies : ndarray [shape=(n_mels,)] + Vector of n_mels frequencies in Hz which are uniformly spaced on the Mel + axis. + + Examples + -------- + >>> librosa.mel_frequencies(n_mels=40) + array([ 0. , 85.317, 170.635, 255.952, + 341.269, 426.586, 511.904, 597.221, + 682.538, 767.855, 853.173, 938.49 , + 1024.856, 1119.114, 1222.042, 1334.436, + 1457.167, 1591.187, 1737.532, 1897.337, + 2071.84 , 2262.393, 2470.47 , 2697.686, + 2945.799, 3216.731, 3512.582, 3835.643, + 4188.417, 4573.636, 4994.285, 5453.621, + 5955.205, 6502.92 , 7101.009, 7754.107, + 8467.272, 9246.028, 10096.408, 11025. ]) + """ + + # 'Center freqs' of mel bands - uniformly spaced between limits + min_mel = hz_to_mel(fmin, htk=htk) + max_mel = hz_to_mel(fmax, htk=htk) + + mels = np.linspace(min_mel, max_mel, n_mels) + + return mel_to_hz(mels, htk=htk) + +def mel(sr, n_fft, n_mels=128, fmin=0.0, fmax=None, htk=False, + norm=1, dtype=np.float32): + """ + This function is cloned from librosa 0.7. + Please refer to the original + `documentation `__ + for more info. + Create a Filterbank matrix to combine FFT bins into Mel-frequency bins + + + Parameters + ---------- + sr : number > 0 [scalar] + sampling rate of the incoming signal + n_fft : int > 0 [scalar] + number of FFT components + n_mels : int > 0 [scalar] + number of Mel bands to generate + fmin : float >= 0 [scalar] + lowest frequency (in Hz) + fmax : float >= 0 [scalar] + highest frequency (in Hz). + If `None`, use `fmax = sr / 2.0` + htk : bool [scalar] + use HTK formula instead of Slaney + norm : {None, 1, np.inf} [scalar] + if 1, divide the triangular mel weights by the width of the mel band + (area normalization). Otherwise, leave all the triangles aiming for + a peak value of 1.0 + dtype : np.dtype + The data type of the output basis. + By default, uses 32-bit (single-precision) floating point. + + Returns + ------- + M : np.ndarray [shape=(n_mels, 1 + n_fft/2)] + Mel transform matrix + + Notes + ----- + This function caches at level 10. + + Examples + -------- + >>> melfb = librosa.filters.mel(22050, 2048) + >>> melfb + array([[ 0. , 0.016, ..., 0. , 0. ], + [ 0. , 0. , ..., 0. , 0. ], + ..., + [ 0. , 0. , ..., 0. , 0. ], + [ 0. , 0. , ..., 0. , 0. ]]) + Clip the maximum frequency to 8KHz + >>> librosa.filters.mel(22050, 2048, fmax=8000) + array([[ 0. , 0.02, ..., 0. , 0. ], + [ 0. , 0. , ..., 0. , 0. ], + ..., + [ 0. , 0. , ..., 0. , 0. ], + [ 0. , 0. , ..., 0. , 0. ]]) + >>> import matplotlib.pyplot as plt + >>> plt.figure() + >>> librosa.display.specshow(melfb, x_axis='linear') + >>> plt.ylabel('Mel filter') + >>> plt.title('Mel filter bank') + >>> plt.colorbar() + >>> plt.tight_layout() + >>> plt.show() + """ + + if fmax is None: + fmax = float(sr) / 2 + + if norm is not None and norm != 1 and norm != np.inf: + raise ParameterError('Unsupported norm: {}'.format(repr(norm))) + + # Initialize the weights + n_mels = int(n_mels) + weights = np.zeros((n_mels, int(1 + n_fft // 2)), dtype=dtype) + + # Center freqs of each FFT bin + fftfreqs = fft_frequencies(sr=sr, n_fft=n_fft) + + # 'Center freqs' of mel bands - uniformly spaced between limits + mel_f = mel_frequencies(n_mels + 2, fmin=fmin, fmax=fmax, htk=htk) + + fdiff = np.diff(mel_f) + ramps = np.subtract.outer(mel_f, fftfreqs) + + for i in range(n_mels): + # lower and upper slopes for all bins + lower = -ramps[i] / fdiff[i] + upper = ramps[i+2] / fdiff[i+1] + + # .. then intersect them with each other and zero + weights[i] = np.maximum(0, np.minimum(lower, upper)) + + if norm == 1: + # Slaney-style mel is scaled to be approx constant energy per channel + enorm = 2.0 / (mel_f[2:n_mels+2] - mel_f[:n_mels]) + weights *= enorm[:, np.newaxis] + + # Only check weights if f_mel[0] is positive + if not np.all((mel_f[:-2] == 0) | (weights.max(axis=1) > 0)): + # This means we have an empty channel somewhere + warnings.warn('Empty filters detected in mel frequency basis. ' + 'Some channels will produce empty responses. ' + 'Try increasing your sampling rate (and fmax) or ' + 'reducing n_mels.') + + return weights +### ------------------End of Functions for generating kenral for Mel Spectrogram ----------------### + + +### ------------------Functions for making STFT same as librosa ---------------------------------### +def pad_center(data, size, axis=-1, **kwargs): + '''Wrapper for np.pad to automatically center an array prior to padding. + This is analogous to `str.center()` + + Examples + -------- + >>> # Generate a vector + >>> data = np.ones(5) + >>> librosa.util.pad_center(data, 10, mode='constant') + array([ 0., 0., 1., 1., 1., 1., 1., 0., 0., 0.]) + + >>> # Pad a matrix along its first dimension + >>> data = np.ones((3, 5)) + >>> librosa.util.pad_center(data, 7, axis=0) + array([[ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.], + [ 1., 1., 1., 1., 1.], + [ 1., 1., 1., 1., 1.], + [ 1., 1., 1., 1., 1.], + [ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.]]) + >>> # Or its second dimension + >>> librosa.util.pad_center(data, 7, axis=1) + array([[ 0., 1., 1., 1., 1., 1., 0.], + [ 0., 1., 1., 1., 1., 1., 0.], + [ 0., 1., 1., 1., 1., 1., 0.]]) + + Parameters + ---------- + data : np.ndarray + Vector to be padded and centered + + size : int >= len(data) [scalar] + Length to pad `data` + + axis : int + Axis along which to pad and center the data + + kwargs : additional keyword arguments + arguments passed to `np.pad()` + + Returns + ------- + data_padded : np.ndarray + `data` centered and padded to length `size` along the + specified axis + + Raises + ------ + ParameterError + If `size < data.shape[axis]` + + See Also + -------- + numpy.pad + ''' + + kwargs.setdefault('mode', 'constant') + + n = data.shape[axis] + + lpad = int((size - n) // 2) + + lengths = [(0, 0)] * data.ndim + lengths[axis] = (lpad, int(size - n - lpad)) + + if lpad < 0: + raise ParameterError(('Target size ({:d}) must be ' + 'at least input size ({:d})').format(size, n)) + + return np.pad(data, lengths, **kwargs) + +### ------------------End of functions for making STFT same as librosa ---------------------------### diff --git a/third_party/nnAudio/nnAudio/utils.py b/third_party/nnAudio/nnAudio/utils.py new file mode 100644 index 000000000..a5ac366cc --- /dev/null +++ b/third_party/nnAudio/nnAudio/utils.py @@ -0,0 +1,535 @@ +""" +Module containing helper functions such as overlap sum and Fourier kernels generators +""" + +import torch +from torch.nn.functional import conv1d, fold + +import numpy as np +from time import time +import math +from scipy.signal import get_window +from scipy import signal +from scipy import fft +import warnings + +from nnAudio.librosa_functions import * + +## --------------------------- Filter Design ---------------------------## +def torch_window_sumsquare(w, n_frames, stride, n_fft, power=2): + w_stacks = w.unsqueeze(-1).repeat((1,n_frames)).unsqueeze(0) + # Window length + stride*(frames-1) + output_len = w_stacks.shape[1] + stride*(w_stacks.shape[2]-1) + return fold(w_stacks**power, (1,output_len), kernel_size=(1,n_fft), stride=stride) + +def overlap_add(X, stride): + n_fft = X.shape[1] + output_len = n_fft + stride*(X.shape[2]-1) + + return fold(X, (1,output_len), kernel_size=(1,n_fft), stride=stride).flatten(1) + +def uniform_distribution(r1,r2, *size, device): + return (r1 - r2) * torch.rand(*size, device=device) + r2 + +def extend_fbins(X): + """Extending the number of frequency bins from `n_fft//2+1` back to `n_fft` by + reversing all bins except DC and Nyquist and append it on top of existing spectrogram""" + X_upper = torch.flip(X[:,1:-1],(0,1)) + X_upper[:,:,:,1] = -X_upper[:,:,:,1] # For the imaganinry part, it is an odd function + return torch.cat((X[:, :, :], X_upper), 1) + + +def downsampling_by_n(x, filterKernel, n): + """A helper function that downsamples the audio by a arbitary factor n. + It is used in CQT2010 and CQT2010v2. + + Parameters + ---------- + x : torch.Tensor + The input waveform in ``torch.Tensor`` type with shape ``(batch, 1, len_audio)`` + + filterKernel : str + Filter kernel in ``torch.Tensor`` type with shape ``(1, 1, len_kernel)`` + + n : int + The downsampling factor + + Returns + ------- + torch.Tensor + The downsampled waveform + + Examples + -------- + >>> x_down = downsampling_by_n(x, filterKernel) + """ + + x = conv1d(x,filterKernel,stride=n, padding=(filterKernel.shape[-1]-1)//2) + return x + + +def downsampling_by_2(x, filterKernel): + """A helper function that downsamples the audio by half. It is used in CQT2010 and CQT2010v2 + + Parameters + ---------- + x : torch.Tensor + The input waveform in ``torch.Tensor`` type with shape ``(batch, 1, len_audio)`` + + filterKernel : str + Filter kernel in ``torch.Tensor`` type with shape ``(1, 1, len_kernel)`` + + Returns + ------- + torch.Tensor + The downsampled waveform + + Examples + -------- + >>> x_down = downsampling_by_2(x, filterKernel) + """ + + x = conv1d(x,filterKernel,stride=2, padding=(filterKernel.shape[-1]-1)//2) + return x + + +## Basic tools for computation ## +def nextpow2(A): + """A helper function to calculate the next nearest number to the power of 2. + + Parameters + ---------- + A : float + A float number that is going to be rounded up to the nearest power of 2 + + Returns + ------- + int + The nearest power of 2 to the input number ``A`` + + Examples + -------- + + >>> nextpow2(6) + 3 + """ + + return int(np.ceil(np.log2(A))) + +## Basic tools for computation ## +def prepow2(A): + """A helper function to calculate the next nearest number to the power of 2. + + Parameters + ---------- + A : float + A float number that is going to be rounded up to the nearest power of 2 + + Returns + ------- + int + The nearest power of 2 to the input number ``A`` + + Examples + -------- + + >>> nextpow2(6) + 3 + """ + + return int(np.floor(np.log2(A))) + + +def complex_mul(cqt_filter, stft): + """Since PyTorch does not support complex numbers and its operation. + We need to write our own complex multiplication function. This one is specially + designed for CQT usage. + + Parameters + ---------- + cqt_filter : tuple of torch.Tensor + The tuple is in the format of ``(real_torch_tensor, imag_torch_tensor)`` + + Returns + ------- + tuple of torch.Tensor + The output is in the format of ``(real_torch_tensor, imag_torch_tensor)`` + """ + + cqt_filter_real = cqt_filter[0] + cqt_filter_imag = cqt_filter[1] + fourier_real = stft[0] + fourier_imag = stft[1] + + CQT_real = torch.matmul(cqt_filter_real, fourier_real) - torch.matmul(cqt_filter_imag, fourier_imag) + CQT_imag = torch.matmul(cqt_filter_real, fourier_imag) + torch.matmul(cqt_filter_imag, fourier_real) + + return CQT_real, CQT_imag + + +def broadcast_dim(x): + """ + Auto broadcast input so that it can fits into a Conv1d + """ + + if x.dim() == 2: + x = x[:, None, :] + elif x.dim() == 1: + # If nn.DataParallel is used, this broadcast doesn't work + x = x[None, None, :] + elif x.dim() == 3: + pass + else: + raise ValueError("Only support input with shape = (batch, len) or shape = (len)") + return x + + +def broadcast_dim_conv2d(x): + """ + Auto broadcast input so that it can fits into a Conv2d + """ + + if x.dim() == 3: + x = x[:, None, :,:] + + else: + raise ValueError("Only support input with shape = (batch, len) or shape = (len)") + return x + + +## Kernal generation functions ## +def create_fourier_kernels(n_fft, win_length=None, freq_bins=None, fmin=50,fmax=6000, sr=44100, + freq_scale='linear', window='hann', verbose=True): + """ This function creates the Fourier Kernel for STFT, Melspectrogram and CQT. + Most of the parameters follow librosa conventions. Part of the code comes from + pytorch_musicnet. https://github.com/jthickstun/pytorch_musicnet + + Parameters + ---------- + n_fft : int + The window size + + freq_bins : int + Number of frequency bins. Default is ``None``, which means ``n_fft//2+1`` bins + + fmin : int + The starting frequency for the lowest frequency bin. + If freq_scale is ``no``, this argument does nothing. + + fmax : int + The ending frequency for the highest frequency bin. + If freq_scale is ``no``, this argument does nothing. + + sr : int + The sampling rate for the input audio. It is used to calculate the correct ``fmin`` and ``fmax``. + Setting the correct sampling rate is very important for calculating the correct frequency. + + freq_scale: 'linear', 'log', or 'no' + Determine the spacing between each frequency bin. + When 'linear' or 'log' is used, the bin spacing can be controlled by ``fmin`` and ``fmax``. + If 'no' is used, the bin will start at 0Hz and end at Nyquist frequency with linear spacing. + + Returns + ------- + wsin : numpy.array + Imaginary Fourier Kernel with the shape ``(freq_bins, 1, n_fft)`` + + wcos : numpy.array + Real Fourier Kernel with the shape ``(freq_bins, 1, n_fft)`` + + bins2freq : list + Mapping each frequency bin to frequency in Hz. + + binslist : list + The normalized frequency ``k`` in digital domain. + This ``k`` is in the Discrete Fourier Transform equation $$ + + """ + + if freq_bins==None: freq_bins = n_fft//2+1 + if win_length==None: win_length = n_fft + + s = np.arange(0, n_fft, 1.) + wsin = np.empty((freq_bins,1,n_fft)) + wcos = np.empty((freq_bins,1,n_fft)) + start_freq = fmin + end_freq = fmax + bins2freq = [] + binslist = [] + + # num_cycles = start_freq*d/44000. + # scaling_ind = np.log(end_freq/start_freq)/k + + # Choosing window shape + + window_mask = get_window(window,int(win_length), fftbins=True) + window_mask = pad_center(window_mask, n_fft) + + if freq_scale == 'linear': + if verbose==True: + print(f"sampling rate = {sr}. Please make sure the sampling rate is correct in order to" + f"get a valid freq range") + start_bin = start_freq*n_fft/sr + scaling_ind = (end_freq-start_freq)*(n_fft/sr)/freq_bins + + for k in range(freq_bins): # Only half of the bins contain useful info + # print("linear freq = {}".format((k*scaling_ind+start_bin)*sr/n_fft)) + bins2freq.append((k*scaling_ind+start_bin)*sr/n_fft) + binslist.append((k*scaling_ind+start_bin)) + wsin[k,0,:] = np.sin(2*np.pi*(k*scaling_ind+start_bin)*s/n_fft) + wcos[k,0,:] = np.cos(2*np.pi*(k*scaling_ind+start_bin)*s/n_fft) + + elif freq_scale == 'log': + if verbose==True: + print(f"sampling rate = {sr}. Please make sure the sampling rate is correct in order to" + f"get a valid freq range") + start_bin = start_freq*n_fft/sr + scaling_ind = np.log(end_freq/start_freq)/freq_bins + + for k in range(freq_bins): # Only half of the bins contain useful info + # print("log freq = {}".format(np.exp(k*scaling_ind)*start_bin*sr/n_fft)) + bins2freq.append(np.exp(k*scaling_ind)*start_bin*sr/n_fft) + binslist.append((np.exp(k*scaling_ind)*start_bin)) + wsin[k,0,:] = np.sin(2*np.pi*(np.exp(k*scaling_ind)*start_bin)*s/n_fft) + wcos[k,0,:] = np.cos(2*np.pi*(np.exp(k*scaling_ind)*start_bin)*s/n_fft) + + elif freq_scale == 'no': + for k in range(freq_bins): # Only half of the bins contain useful info + bins2freq.append(k*sr/n_fft) + binslist.append(k) + wsin[k,0,:] = np.sin(2*np.pi*k*s/n_fft) + wcos[k,0,:] = np.cos(2*np.pi*k*s/n_fft) + else: + print("Please select the correct frequency scale, 'linear' or 'log'") + return wsin.astype(np.float32),wcos.astype(np.float32), bins2freq, binslist, window_mask.astype(np.float32) + + +# Tools for CQT + +def create_cqt_kernels(Q, fs, fmin, n_bins=84, bins_per_octave=12, norm=1, + window='hann', fmax=None, topbin_check=True): + """ + Automatically create CQT kernels in time domain + """ + + fftLen = 2**nextpow2(np.ceil(Q * fs / fmin)) + # minWin = 2**nextpow2(np.ceil(Q * fs / fmax)) + + if (fmax != None) and (n_bins == None): + n_bins = np.ceil(bins_per_octave * np.log2(fmax / fmin)) # Calculate the number of bins + freqs = fmin * 2.0 ** (np.r_[0:n_bins] / np.float(bins_per_octave)) + + elif (fmax == None) and (n_bins != None): + freqs = fmin * 2.0 ** (np.r_[0:n_bins] / np.float(bins_per_octave)) + + else: + warnings.warn('If fmax is given, n_bins will be ignored',SyntaxWarning) + n_bins = np.ceil(bins_per_octave * np.log2(fmax / fmin)) # Calculate the number of bins + freqs = fmin * 2.0 ** (np.r_[0:n_bins] / np.float(bins_per_octave)) + + if np.max(freqs) > fs/2 and topbin_check==True: + raise ValueError('The top bin {}Hz has exceeded the Nyquist frequency, \ + please reduce the n_bins'.format(np.max(freqs))) + + tempKernel = np.zeros((int(n_bins), int(fftLen)), dtype=np.complex64) + specKernel = np.zeros((int(n_bins), int(fftLen)), dtype=np.complex64) + + lengths = np.ceil(Q * fs / freqs) + for k in range(0, int(n_bins)): + freq = freqs[k] + l = np.ceil(Q * fs / freq) + + # Centering the kernels + if l%2==1: # pad more zeros on RHS + start = int(np.ceil(fftLen / 2.0 - l / 2.0))-1 + else: + start = int(np.ceil(fftLen / 2.0 - l / 2.0)) + + sig = get_window_dispatch(window,int(l), fftbins=True)*np.exp(np.r_[-l//2:l//2]*1j*2*np.pi*freq/fs)/l + + if norm: # Normalizing the filter # Trying to normalize like librosa + tempKernel[k, start:start + int(l)] = sig/np.linalg.norm(sig, norm) + else: + tempKernel[k, start:start + int(l)] = sig + # specKernel[k, :] = fft(tempKernel[k]) + + # return specKernel[:,:fftLen//2+1], fftLen, torch.tensor(lenghts).float() + return tempKernel, fftLen, torch.tensor(lengths).float(), freqs + + +def get_window_dispatch(window, N, fftbins=True): + if isinstance(window, str): + return get_window(window, N, fftbins=fftbins) + elif isinstance(window, tuple): + if window[0] == 'gaussian': + assert window[1] >= 0 + sigma = np.floor(- N / 2 / np.sqrt(- 2 * np.log(10**(- window[1] / 20)))) + return get_window(('gaussian', sigma), N, fftbins=fftbins) + else: + Warning("Tuple windows may have undesired behaviour regarding Q factor") + elif isinstance(window, float): + Warning("You are using Kaiser window with beta factor " + str(window) + ". Correct behaviour not checked.") + else: + raise Exception("The function get_window from scipy only supports strings, tuples and floats.") + + + +def get_cqt_complex(x, cqt_kernels_real, cqt_kernels_imag, hop_length, padding): + """Multiplying the STFT result with the cqt_kernel, check out the 1992 CQT paper [1] + for how to multiple the STFT result with the CQT kernel + [2] Brown, Judith C.C. and Miller Puckette. “An efficient algorithm for the calculation of + a constant Q transform.” (1992).""" + + # STFT, converting the audio input from time domain to frequency domain + try: + x = padding(x) # When center == True, we need padding at the beginning and ending + except: + warnings.warn(f"\ninput size = {x.shape}\tkernel size = {cqt_kernels_real.shape[-1]}\n" + "padding with reflection mode might not be the best choice, try using constant padding", + UserWarning) + x = torch.nn.functional.pad(x, (cqt_kernels_real.shape[-1]//2, cqt_kernels_real.shape[-1]//2)) + CQT_real = conv1d(x, cqt_kernels_real, stride=hop_length) + CQT_imag = -conv1d(x, cqt_kernels_imag, stride=hop_length) + + return torch.stack((CQT_real, CQT_imag),-1) + +def get_cqt_complex2(x, cqt_kernels_real, cqt_kernels_imag, hop_length, padding, wcos=None, wsin=None): + """Multiplying the STFT result with the cqt_kernel, check out the 1992 CQT paper [1] + for how to multiple the STFT result with the CQT kernel + [2] Brown, Judith C.C. and Miller Puckette. “An efficient algorithm for the calculation of + a constant Q transform.” (1992).""" + + # STFT, converting the audio input from time domain to frequency domain + try: + x = padding(x) # When center == True, we need padding at the beginning and ending + except: + warnings.warn(f"\ninput size = {x.shape}\tkernel size = {cqt_kernels_real.shape[-1]}\n" + "padding with reflection mode might not be the best choice, try using constant padding", + UserWarning) + x = torch.nn.functional.pad(x, (cqt_kernels_real.shape[-1]//2, cqt_kernels_real.shape[-1]//2)) + + + + if wcos==None or wsin==None: + CQT_real = conv1d(x, cqt_kernels_real, stride=hop_length) + CQT_imag = -conv1d(x, cqt_kernels_imag, stride=hop_length) + + else: + fourier_real = conv1d(x, wcos, stride=hop_length) + fourier_imag = conv1d(x, wsin, stride=hop_length) + # Multiplying input with the CQT kernel in freq domain + CQT_real, CQT_imag = complex_mul((cqt_kernels_real, cqt_kernels_imag), + (fourier_real, fourier_imag)) + + return torch.stack((CQT_real, CQT_imag),-1) + + + + +def create_lowpass_filter(band_center=0.5, kernelLength=256, transitionBandwidth=0.03): + """ + Calculate the highest frequency we need to preserve and the lowest frequency we allow + to pass through. + Note that frequency is on a scale from 0 to 1 where 0 is 0 and 1 is Nyquist frequency of + the signal BEFORE downsampling. + """ + + # transitionBandwidth = 0.03 + passbandMax = band_center / (1 + transitionBandwidth) + stopbandMin = band_center * (1 + transitionBandwidth) + + # Unlike the filter tool we used online yesterday, this tool does + # not allow us to specify how closely the filter matches our + # specifications. Instead, we specify the length of the kernel. + # The longer the kernel is, the more precisely it will match. + # kernelLength = 256 + + # We specify a list of key frequencies for which we will require + # that the filter match a specific output gain. + # From [0.0 to passbandMax] is the frequency range we want to keep + # untouched and [stopbandMin, 1.0] is the range we want to remove + keyFrequencies = [0.0, passbandMax, stopbandMin, 1.0] + + # We specify a list of output gains to correspond to the key + # frequencies listed above. + # The first two gains are 1.0 because they correspond to the first + # two key frequencies. the second two are 0.0 because they + # correspond to the stopband frequencies + gainAtKeyFrequencies = [1.0, 1.0, 0.0, 0.0] + + # This command produces the filter kernel coefficients + filterKernel = signal.firwin2(kernelLength, keyFrequencies, gainAtKeyFrequencies) + + return filterKernel.astype(np.float32) + +def get_early_downsample_params(sr, hop_length, fmax_t, Q, n_octaves, verbose): + """Used in CQT2010 and CQT2010v2""" + + window_bandwidth = 1.5 # for hann window + filter_cutoff = fmax_t * (1 + 0.5 * window_bandwidth / Q) + sr, hop_length, downsample_factor = early_downsample(sr, + hop_length, + n_octaves, + sr//2, + filter_cutoff) + if downsample_factor != 1: + if verbose==True: + print("Can do early downsample, factor = ", downsample_factor) + earlydownsample=True + # print("new sr = ", sr) + # print("new hop_length = ", hop_length) + early_downsample_filter = create_lowpass_filter(band_center=1/downsample_factor, + kernelLength=256, + transitionBandwidth=0.03) + early_downsample_filter = torch.tensor(early_downsample_filter)[None, None, :] + + else: + if verbose==True: + print("No early downsampling is required, downsample_factor = ", downsample_factor) + early_downsample_filter = None + earlydownsample=False + + return sr, hop_length, downsample_factor, early_downsample_filter, earlydownsample + +def early_downsample(sr, hop_length, n_octaves, + nyquist, filter_cutoff): + '''Return new sampling rate and hop length after early dowansampling''' + downsample_count = early_downsample_count(nyquist, filter_cutoff, hop_length, n_octaves) + # print("downsample_count = ", downsample_count) + downsample_factor = 2**(downsample_count) + + hop_length //= downsample_factor # Getting new hop_length + new_sr = sr / float(downsample_factor) # Getting new sampling rate + sr = new_sr + + return sr, hop_length, downsample_factor + + +# The following two downsampling count functions are obtained from librosa CQT +# They are used to determine the number of pre resamplings if the starting and ending frequency +# are both in low frequency regions. +def early_downsample_count(nyquist, filter_cutoff, hop_length, n_octaves): + '''Compute the number of early downsampling operations''' + + downsample_count1 = max(0, int(np.ceil(np.log2(0.85 * nyquist / + filter_cutoff)) - 1) - 1) + # print("downsample_count1 = ", downsample_count1) + num_twos = nextpow2(hop_length) + downsample_count2 = max(0, num_twos - n_octaves + 1) + # print("downsample_count2 = ",downsample_count2) + + return min(downsample_count1, downsample_count2) + +def early_downsample(sr, hop_length, n_octaves, + nyquist, filter_cutoff): + '''Return new sampling rate and hop length after early dowansampling''' + downsample_count = early_downsample_count(nyquist, filter_cutoff, hop_length, n_octaves) + # print("downsample_count = ", downsample_count) + downsample_factor = 2**(downsample_count) + + hop_length //= downsample_factor # Getting new hop_length + new_sr = sr / float(downsample_factor) # Getting new sampling rate + + sr = new_sr + + return sr, hop_length, downsample_factor \ No newline at end of file diff --git a/third_party/nnAudio/setup.py b/third_party/nnAudio/setup.py new file mode 100755 index 000000000..cb69481af --- /dev/null +++ b/third_party/nnAudio/setup.py @@ -0,0 +1,34 @@ +import setuptools +import codecs +import os.path + +def read(rel_path): + here = os.path.abspath(os.path.dirname(__file__)) + with codecs.open(os.path.join(here, rel_path), 'r') as fp: + return fp.read() + +def get_version(rel_path): + for line in read(rel_path).splitlines(): + if line.startswith('__version__'): + delim = '"' if '"' in line else "'" + return line.split(delim)[1] + else: + raise RuntimeError("Unable to find version string.") + +setuptools.setup( + name="nnAudio", # Replace with your own username + version=get_version("nnAudio/__init__.py"), + author="KinWaiCheuk", + author_email="u3500684@connect.hku.hk", + description="A fast GPU audio processing toolbox with 1D convolutional neural network", + long_description='', + long_description_content_type="text/markdown", + url="https://github.com/KinWaiCheuk/nnAudio", + packages=setuptools.find_packages(), + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + ], + python_requires='>=3.6', +) diff --git a/third_party/nnAudio/tests/parameters.py b/third_party/nnAudio/tests/parameters.py new file mode 100644 index 000000000..c8356ac6d --- /dev/null +++ b/third_party/nnAudio/tests/parameters.py @@ -0,0 +1,38 @@ +# Creating parameters for STFT test +""" +It is equivalent to +[(1024, 128, 'ones'), + (1024, 128, 'hann'), + (1024, 128, 'hamming'), + (2048, 128, 'ones'), + (2048, 512, 'ones'), + (2048, 128, 'hann'), + (2048, 512, 'hann'), + (2048, 128, 'hamming'), + (2048, 512, 'hamming'), + (None, None, None)] +""" + +stft_parameters = [] +n_fft = [1024,2048] +hop_length = {128,512,1024} +window = ['ones', 'hann', 'hamming'] +for i in n_fft: + for k in window: + for j in hop_length: + if j < (i/2): + stft_parameters.append((i,j,k)) +stft_parameters.append((256, None, 'hann')) + +stft_with_win_parameters = [] +n_fft = [512,1024] +win_length = [400, 900] +hop_length = {128,256} +for i in n_fft: + for j in win_length: + if j < i: + for k in hop_length: + if k < (i/2): + stft_with_win_parameters.append((i,j,k)) + +mel_win_parameters = [(512,400), (1024, 1000)] \ No newline at end of file diff --git a/third_party/nnAudio/tests/test_spectrogram.py b/third_party/nnAudio/tests/test_spectrogram.py new file mode 100644 index 000000000..3aa074c1e --- /dev/null +++ b/third_party/nnAudio/tests/test_spectrogram.py @@ -0,0 +1,373 @@ +import pytest +import librosa +import torch +import matplotlib.pyplot as plt +from scipy.signal import chirp, sweep_poly +from nnAudio.Spectrogram import * +from parameters import * + +gpu_idx=0 + +# librosa example audio for testing +example_y, example_sr = librosa.load(librosa.util.example_audio_file()) + + +@pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters) +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_inverse2(n_fft, hop_length, window, device): + x = torch.tensor(example_y,device=device) + stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window).to(device) + istft = iSTFT(n_fft=n_fft, hop_length=hop_length, window=window).to(device) + X = stft(x.unsqueeze(0), output_format="Complex") + x_recon = istft(X, length=x.shape[0], onesided=True).squeeze() + assert np.allclose(x.cpu(), x_recon.cpu(), rtol=1e-5, atol=1e-3) + +@pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters) +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_inverse(n_fft, hop_length, window, device): + x = torch.tensor(example_y, device=device) + stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window, iSTFT=True).to(device) + X = stft(x.unsqueeze(0), output_format="Complex") + x_recon = stft.inverse(X, length=x.shape[0]).squeeze() + assert np.allclose(x.cpu(), x_recon.cpu(), rtol=1e-3, atol=1) + + + +# @pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters) + +# def test_inverse_GPU(n_fft, hop_length, window): +# x = torch.tensor(example_y,device=f'cuda:{gpu_idx}') +# stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window, device=f'cuda:{gpu_idx}') +# X = stft(x.unsqueeze(0), output_format="Complex") +# x_recon = stft.inverse(X, num_samples=x.shape[0]).squeeze() +# assert np.allclose(x.cpu(), x_recon.cpu(), rtol=1e-3, atol=1) + + +@pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters) +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_stft_complex(n_fft, hop_length, window, device): + x = example_y + stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Complex") + X_real, X_imag = X[:, :, :, 0].squeeze(), X[:, :, :, 1].squeeze() + X_librosa = librosa.stft(x, n_fft=n_fft, hop_length=hop_length, window=window) + real_diff, imag_diff = np.allclose(X_real.cpu(), X_librosa.real, rtol=1e-3, atol=1e-3), \ + np.allclose(X_imag.cpu(), X_librosa.imag, rtol=1e-3, atol=1e-3) + + assert real_diff and imag_diff + +# @pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters) +# def test_stft_complex_GPU(n_fft, hop_length, window): +# x = example_y +# stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window, device=f'cuda:{gpu_idx}') +# X = stft(torch.tensor(x,device=f'cuda:{gpu_idx}').unsqueeze(0), output_format="Complex") +# X_real, X_imag = X[:, :, :, 0].squeeze().detach().cpu(), X[:, :, :, 1].squeeze().detach().cpu() +# X_librosa = librosa.stft(x, n_fft=n_fft, hop_length=hop_length, window=window) +# real_diff, imag_diff = np.allclose(X_real, X_librosa.real, rtol=1e-3, atol=1e-3), \ +# np.allclose(X_imag, X_librosa.imag, rtol=1e-3, atol=1e-3) + +# assert real_diff and imag_diff + +@pytest.mark.parametrize("n_fft, win_length, hop_length", stft_with_win_parameters) +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_stft_complex_winlength(n_fft, win_length, hop_length, device): + x = example_y + stft = STFT(n_fft=n_fft, win_length=win_length, hop_length=hop_length).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Complex") + X_real, X_imag = X[:, :, :, 0].squeeze(), X[:, :, :, 1].squeeze() + X_librosa = librosa.stft(x, n_fft=n_fft, win_length=win_length, hop_length=hop_length) + real_diff, imag_diff = np.allclose(X_real.cpu(), X_librosa.real, rtol=1e-3, atol=1e-3), \ + np.allclose(X_imag.cpu(), X_librosa.imag, rtol=1e-3, atol=1e-3) + assert real_diff and imag_diff + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_stft_magnitude(device): + x = example_y + stft = STFT(n_fft=2048, hop_length=512).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Magnitude").squeeze() + X_librosa, _ = librosa.core.magphase(librosa.stft(x, n_fft=2048, hop_length=512)) + assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3) + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_stft_phase(device): + x = example_y + stft = STFT(n_fft=2048, hop_length=512).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Phase") + X_real, X_imag = torch.cos(X).squeeze(), torch.sin(X).squeeze() + _, X_librosa = librosa.core.magphase(librosa.stft(x, n_fft=2048, hop_length=512)) + + real_diff, imag_diff = np.mean(np.abs(X_real.cpu().numpy() - X_librosa.real)), \ + np.mean(np.abs(X_imag.cpu().numpy() - X_librosa.imag)) + + # I find that np.allclose is too strict for allowing phase to be similar to librosa. + # Hence for phase we use average element-wise distance as the test metric. + assert real_diff < 2e-4 and imag_diff < 2e-4 + +@pytest.mark.parametrize("n_fft, win_length", mel_win_parameters) +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_mel_spectrogram(n_fft, win_length, device): + x = example_y + melspec = MelSpectrogram(n_fft=n_fft, win_length=win_length, hop_length=512).to(device) + X = melspec(torch.tensor(x, device=device).unsqueeze(0)).squeeze() + X_librosa = librosa.feature.melspectrogram(x, n_fft=n_fft, win_length=win_length, hop_length=512) + assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3) + + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_cqt_1992(device): + # Log sweep case + fs = 44100 + t = 1 + f0 = 55 + f1 = 22050 + s = np.linspace(0, t, fs*t) + x = chirp(s, f0, 1, f1, method='logarithmic') + x = x.astype(dtype=np.float32) + + # Magnitude + stft = CQT1992(sr=fs, fmin=220, output_format="Magnitude", + n_bins=80, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + + + # Complex + stft = CQT1992(sr=fs, fmin=220, output_format="Complex", + n_bins=80, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + + # Phase + stft = CQT1992(sr=fs, fmin=220, output_format="Phase", + n_bins=160, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + + assert True + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_cqt_2010(device): + # Log sweep case + fs = 44100 + t = 1 + f0 = 55 + f1 = 22050 + s = np.linspace(0, t, fs*t) + x = chirp(s, f0, 1, f1, method='logarithmic') + x = x.astype(dtype=np.float32) + + # Magnitude + stft = CQT2010(sr=fs, fmin=110, output_format="Magnitude", + n_bins=160, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + + # Complex + stft = CQT2010(sr=fs, fmin=110, output_format="Complex", + n_bins=160, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + + # Phase + stft = CQT2010(sr=fs, fmin=110, output_format="Phase", + n_bins=160, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + assert True + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_cqt_1992_v2_log(device): + # Log sweep case + fs = 44100 + t = 1 + f0 = 55 + f1 = 22050 + s = np.linspace(0, t, fs*t) + x = chirp(s, f0, 1, f1, method='logarithmic') + x = x.astype(dtype=np.float32) + + # Magnitude + stft = CQT1992v2(sr=fs, fmin=55, output_format="Magnitude", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + ground_truth = np.load("tests/ground-truths/log-sweep-cqt-1992-mag-ground-truth.npy") + X = torch.log(X + 1e-5) + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Complex + stft = CQT1992v2(sr=fs, fmin=55, output_format="Complex", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + ground_truth = np.load("tests/ground-truths/log-sweep-cqt-1992-complex-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Phase + stft = CQT1992v2(sr=fs, fmin=55, output_format="Phase", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + ground_truth = np.load("tests/ground-truths/log-sweep-cqt-1992-phase-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_cqt_1992_v2_linear(device): + # Linear sweep case + fs = 44100 + t = 1 + f0 = 55 + f1 = 22050 + s = np.linspace(0, t, fs*t) + x = chirp(s, f0, 1, f1, method='linear') + x = x.astype(dtype=np.float32) + + # Magnitude + stft = CQT1992v2(sr=fs, fmin=55, output_format="Magnitude", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-1992-mag-ground-truth.npy") + X = torch.log(X + 1e-5) + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Complex + stft = CQT1992v2(sr=fs, fmin=55, output_format="Complex", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-1992-complex-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Phase + stft = CQT1992v2(sr=fs, fmin=55, output_format="Phase", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-1992-phase-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_cqt_2010_v2_log(device): + # Log sweep case + fs = 44100 + t = 1 + f0 = 55 + f1 = 22050 + s = np.linspace(0, t, fs*t) + x = chirp(s, f0, 1, f1, method='logarithmic') + x = x.astype(dtype=np.float32) + + # Magnitude + stft = CQT2010v2(sr=fs, fmin=55, output_format="Magnitude", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + X = torch.log(X + 1e-2) +# np.save("tests/ground-truths/log-sweep-cqt-2010-mag-ground-truth", X.cpu()) + ground_truth = np.load("tests/ground-truths/log-sweep-cqt-2010-mag-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Complex + stft = CQT2010v2(sr=fs, fmin=55, output_format="Complex", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) +# np.save("tests/ground-truths/log-sweep-cqt-2010-complex-ground-truth", X.cpu()) + ground_truth = np.load("tests/ground-truths/log-sweep-cqt-2010-complex-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + +# # Phase +# stft = CQT2010v2(sr=fs, fmin=55, device=device, output_format="Phase", +# n_bins=207, bins_per_octave=24) +# X = stft(torch.tensor(x, device=device).unsqueeze(0)) +# # np.save("tests/ground-truths/log-sweep-cqt-2010-phase-ground-truth", X.cpu()) +# ground_truth = np.load("tests/ground-truths/log-sweep-cqt-2010-phase-ground-truth.npy") +# assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_cqt_2010_v2_linear(device): + # Linear sweep case + fs = 44100 + t = 1 + f0 = 55 + f1 = 22050 + s = np.linspace(0, t, fs*t) + x = chirp(s, f0, 1, f1, method='linear') + x = x.astype(dtype=np.float32) + + # Magnitude + stft = CQT2010v2(sr=fs, fmin=55, output_format="Magnitude", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) + X = torch.log(X + 1e-2) +# np.save("tests/ground-truths/linear-sweep-cqt-2010-mag-ground-truth", X.cpu()) + ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-2010-mag-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Complex + stft = CQT2010v2(sr=fs, fmin=55, output_format="Complex", + n_bins=207, bins_per_octave=24).to(device) + X = stft(torch.tensor(x, device=device).unsqueeze(0)) +# np.save("tests/ground-truths/linear-sweep-cqt-2010-complex-ground-truth", X.cpu()) + ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-2010-complex-ground-truth.npy") + assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + + # Phase +# stft = CQT2010v2(sr=fs, fmin=55, device=device, output_format="Phase", +# n_bins=207, bins_per_octave=24) +# X = stft(torch.tensor(x, device=device).unsqueeze(0)) +# # np.save("tests/ground-truths/linear-sweep-cqt-2010-phase-ground-truth", X.cpu()) +# ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-2010-phase-ground-truth.npy") +# assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3) + +@pytest.mark.parametrize("device", ['cpu', f'cuda:{gpu_idx}']) +def test_mfcc(device): + x = example_y + mfcc = MFCC(sr=example_sr).to(device) + X = mfcc(torch.tensor(x, device=device).unsqueeze(0)).squeeze() + X_librosa = librosa.feature.mfcc(x, sr=example_sr) + assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3) + + +x = torch.randn((4,44100)) # Create a batch of input for the following Data.Parallel test + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_STFT_Parallel(device): + spec_layer = STFT(hop_length=512, n_fft=2048, window='hann', + freq_scale='no', + output_format='Complex').to(device) + inverse_spec_layer = iSTFT(hop_length=512, n_fft=2048, window='hann', + freq_scale='no').to(device) + + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + inverse_spec_layer_parallel = torch.nn.DataParallel(inverse_spec_layer) + spec = spec_layer_parallel(x) + x_recon = inverse_spec_layer_parallel(spec, onesided=True, length=x.shape[-1]) + + assert np.allclose(x_recon.detach().cpu(), x.detach().cpu(), rtol=1e-3, atol=1e-3) + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_MelSpectrogram_Parallel(device): + spec_layer = MelSpectrogram(sr=22050, n_fft=2048, n_mels=128, hop_length=512, + window='hann', center=True, pad_mode='reflect', + power=2.0, htk=False, fmin=0.0, fmax=None, norm=1, + verbose=True).to(device) + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + spec = spec_layer_parallel(x) + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_MFCC_Parallel(device): + spec_layer = MFCC().to(device) + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + spec = spec_layer_parallel(x) + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_CQT1992_Parallel(device): + spec_layer = CQT1992(fmin=110, n_bins=60, bins_per_octave=12).to(device) + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + spec = spec_layer_parallel(x) + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_CQT1992v2_Parallel(device): + spec_layer = CQT1992v2().to(device) + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + spec = spec_layer_parallel(x) + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_CQT2010_Parallel(device): + spec_layer = CQT2010().to(device) + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + spec = spec_layer_parallel(x) + +@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}']) +def test_CQT2010v2_Parallel(device): + spec_layer = CQT2010v2().to(device) + spec_layer_parallel = torch.nn.DataParallel(spec_layer) + spec = spec_layer_parallel(x) \ No newline at end of file diff --git a/third_party/paddle_audio/__init__.py b/third_party/paddle_audio/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/third_party/paddle_audio/frontend/common.py b/third_party/paddle_audio/frontend/common.py new file mode 100644 index 000000000..7638dae53 --- /dev/null +++ b/third_party/paddle_audio/frontend/common.py @@ -0,0 +1,201 @@ +import paddle +import numpy as np +from typing import Tuple, Optional, Union + + +# https://github.com/kaldi-asr/kaldi/blob/cbed4ff688/src/feat/feature-window.cc#L109 +def povey_window(frame_len:int) -> np.ndarray: + win = np.empty(frame_len) + a = 2 * np.pi / (frame_len -1) + for i in range(frame_len): + win[i] = (0.5 - 0.5 * np.cos(a * i) )**0.85 + return win + +def hann_window(frame_len:int) -> np.ndarray: + win = np.empty(frame_len) + a = 2 * np.pi / (frame_len -1) + for i in range(frame_len): + win[i] = 0.5 - 0.5 * np.cos(a * i) + return win + +def sine_window(frame_len:int) -> np.ndarray: + win = np.empty(frame_len) + a = 2 * np.pi / (frame_len -1) + for i in range(frame_len): + win[i] = np.sin(0.5 * a * i) + return win + +def hamm_window(frame_len:int) -> np.ndarray: + win = np.empty(frame_len) + a = 2 * np.pi / (frame_len -1) + for i in range(frame_len): + win[i] = 0.54 - 0.46 * np.cos(a * i) + return win + +def get_window(wintype:Optional[str], winlen:int) -> np.ndarray: + """get window function + + Args: + wintype (Optional[str]): window type. + winlen (int): window length in samples. + + Raises: + ValueError: not support window. + + Returns: + np.ndarray: window coeffs. + """ + # calculate window + if not wintype or wintype == 'rectangular': + window = np.ones(winlen) + elif wintype == "hann": + window = hann_window(winlen) + elif wintype == "hamm": + window = hamm_window(winlen) + elif wintype == "povey": + window = povey_window(winlen) + else: + msg = f"{wintype} Not supported yet!" + raise ValueError(msg) + return window + + +def dft_matrix(n_fft:int, winlen:int=None, n_bin:int=None) -> Tuple[np.ndarray, np.ndarray, int]: + # https://en.wikipedia.org/wiki/Discrete_Fourier_transform + # (n_bins, n_fft) complex + if n_bin is None: + n_bin = 1 + n_fft // 2 + if winlen is None: + winlen = n_bin + # https://github.com/numpy/numpy/blob/v1.20.0/numpy/fft/_pocketfft.py#L49 + kernel_size = min(n_fft, winlen) + + n = np.arange(0, n_fft, 1.) + wsin = np.empty((n_bin, kernel_size)) #[Cout, kernel_size] + wcos = np.empty((n_bin, kernel_size)) #[Cout, kernel_size] + for k in range(n_bin): # Only half of the bins contain useful info + wsin[k,:] = -np.sin(2*np.pi*k*n/n_fft)[:kernel_size] + wcos[k,:] = np.cos(2*np.pi*k*n/n_fft)[:kernel_size] + w_real = wcos + w_imag = wsin + return w_real, w_imag, kernel_size + + +def dft_matrix_fast(n_fft:int, winlen:int=None, n_bin:int=None) -> Tuple[np.ndarray, np.ndarray, int]: + # (n_bins, n_fft) complex + if n_bin is None: + n_bin = 1 + n_fft // 2 + if winlen is None: + winlen = n_bin + # https://github.com/numpy/numpy/blob/v1.20.0/numpy/fft/_pocketfft.py#L49 + kernel_size = min(n_fft, winlen) + + # https://en.wikipedia.org/wiki/DFT_matrix + # https://ccrma.stanford.edu/~jos/st/Matrix_Formulation_DFT.html + weight = np.fft.fft(np.eye(n_fft))[:self.n_bin, :kernel_size] + w_real = weight.real + w_imag = weight.imag + return w_real, w_imag, kernel_size + + +def bin2hz(bin:Union[List[int], np.ndarray], N:int, sr:int)->List[float]: + """FFT bins to Hz. + + http://practicalcryptography.com/miscellaneous/machine-learning/intuitive-guide-discrete-fourier-transform/ + + Args: + bins (List[int] or np.ndarray): bin index. + N (int): the number of samples, or FFT points. + sr (int): sampling rate. + + Returns: + List[float]: Hz's. + """ + hz = bin * float(sr) / N + + +def hz2mel(hz): + """Convert a value in Hertz to Mels + + :param hz: a value in Hz. This can also be a numpy array, conversion proceeds element-wise. + :returns: a value in Mels. If an array was passed in, an identical sized array is returned. + """ + return 1127 * np.log(1+hz/700.0) + + +def mel2hz(mel): + """Convert a value in Mels to Hertz + + :param mel: a value in Mels. This can also be a numpy array, conversion proceeds element-wise. + :returns: a value in Hertz. If an array was passed in, an identical sized array is returned. + """ + return 700 * (np.exp(mel/1127.0)-1) + + + +def rms_to_db(rms: float): + """Root Mean Square to dB. + + Args: + rms ([float]): root mean square + + Returns: + float: dB + """ + return 20.0 * math.log10(max(1e-16, rms)) + + +def rms_to_dbfs(rms: float): + """Root Mean Square to dBFS. + https://fireattack.wordpress.com/2017/02/06/replaygain-loudness-normalization-and-applications/ + Audio is mix of sine wave, so 1 amp sine wave's Full scale is 0.7071, equal to -3.0103dB. + + dB = dBFS + 3.0103 + dBFS = db - 3.0103 + e.g. 0 dB = -3.0103 dBFS + + Args: + rms ([float]): root mean square + + Returns: + float: dBFS + """ + return rms_to_db(rms) - 3.0103 + + +def max_dbfs(sample_data: np.ndarray): + """Peak dBFS based on the maximum energy sample. + + Args: + sample_data ([np.ndarray]): float array, [-1, 1]. + + Returns: + float: dBFS + """ + # Peak dBFS based on the maximum energy sample. Will prevent overdrive if used for normalization. + return rms_to_dbfs(max(abs(np.min(sample_data)), abs(np.max(sample_data)))) + + +def mean_dbfs(sample_data): + """Peak dBFS based on the RMS energy. + + Args: + sample_data ([np.ndarray]): float array, [-1, 1]. + + Returns: + float: dBFS + """ + return rms_to_dbfs( + math.sqrt(np.mean(np.square(sample_data, dtype=np.float64)))) + + +def gain_db_to_ratio(gain_db: float): + """dB to ratio + + Args: + gain_db (float): gain in dB + + Returns: + float: scale in amp + """ + return math.pow(10.0, gain_db / 20.0) \ No newline at end of file diff --git a/third_party/paddle_audio/frontend/english.wav b/third_party/paddle_audio/frontend/english.wav new file mode 100644 index 0000000000000000000000000000000000000000..bb28291f69123209e6b7cc46b584d0a1f2c7bb16 GIT binary patch literal 35824 zcmW(-19%-<7d__;F5;$cYTIsV+qP}n#;aa!o2kuL+cwiSxVd*`4*u2u%hxtZlY3|O z*;sq+wWo2d>eau_A*4m6=GD6Q8=5Jc5JK@Wu{=H*M+hU~q-&?4oi5>L{(fI6ljx)@ zanxK@n;cUq$Q@E!`BhD_L@mL0SE}OVt4d8ukk{(B(nv6gz(>4FQenhH!tlHCN+(|8 z!XK`La*+^X;Dg~iLHLPC(w9^u|ESsOf!akDkYDNoergywNH&vURQ@hC#(vHNC8hF*kq%hvICE2Ams!?Q= zDyvG8HTcO2B!fy&8ObZzQ%Us=SN%xcQ4>f7QjR=VSJY8*OO;S7NqsUFcie>JBfVHj zZ7aE?dMQFW&~TDjja8rV8ih!IvRjQIQ%P~so=hYC$T@Y1d?sI1N7$q+`Hx&z$JBdu zO)XKu1JzHKP!r_AjI;tT_Nz#xH>YBWxmXeyPJFc#)>OsTEH93|Rr*&ycBfHj; zOrtJxLvs^aM0wSltitszz-gS9E!bx*MjaAA!?9|nMk8f%X*}n z+9}GbIb;pFsUFKysy`h>o0I8k5~)Snk{39Ik?IXRr67Jcqk4&ZyrqiL!D@rdODd?R zsw(-0Ynn}Z(m!e|{IWE;Kr54+RKjXUXa_o*bz&uyBXX!^sMy&H z*|@(p#6x>iliH*PeM_HeK{P+kcmq4h4$)8=LfQ}mSN}lPRpm(*_}M*~9p`;it%fJp zAaN?3%zzgy!#Q^)!|8t{kyNMm$!W5JoTpbwb$LP_kr8T*tfj`tukxMhjBD?ys*}8G zy-Y)P;Ws1Hag|j?lj$UjDy*KtkE)W~q^G(=O4FXC529#4sZA%7mujE-iOBh*zN?w4 z0KB*uu~ZSdQ)O0R_>FtjLJnw6&XT(%GfhV=Qis%`iSX@_qyRkV9`5Rjs)W4ol{6>Wkpaq+ zz3|Risv5o*N4CTA8X160a1F6@h2$Vli4WhsjjtWmj^sjCsEd=@qi*3Gmf*@K!{4*O z+g7NBD!-blE+K-_AoE;Q0Yo^(DYsC$V3%?@$7#6NLZl|?NZe!_@=Qxo2Dzj>?xQ6# z(pf}%E!go5IgjjAg>--q6ed4a5h8Jy8P!s?84*|x|1MQ2;i32CeYrrbLMD0)TlXMC z$t(3qWkw#FLu!+5Bs20=ZZZ=mm>1FVPQ4{3Nq5Bf3)xByB>&*Vi>c=7iJFOg`9lqZ z9}FQ2NhUHHmOibvA;UvgJ#19OG!a`l1x-(kj-8qVkhADX23?{)IH*& zb>L4oWH~aI-X=lHtNN0$G=xr4C**CFmM*4l>Vt1ABs=JCnuhKrzez1RpKhcKr`3qG zhKKzj-|*QywM8D5YmqrMvOxWd_cGw+$;o>}{a{22p~J~DrN1hqTB<6zhbHQ(3WZ<(hSwxRmD;OPk!++D8H9XTT$Lgh$yMC*LAe25a#d|n zHPm^zPUcb{RAn+r6_k6#MVWzgMkHmFRbhw0G#9JP#<31;Dx1yrvszjweVg7uOGEeL zYQt$SdYGKSwa&$TtVLG2i7JAM09*OVYP=$Yx85a1=yLdJHh95K)T5SUGCb~pDvCQ! zf&6e4`D(55!D`b~Fg|+Vw4b34)KZbC3HenkbxPI2dDkMbY7_2tF+9vb+=Rp9%c*xV zQAVlZi1S^zqMV4rp=ukV_l(Mb6Bw>;$z;-y$<|?$QKzAkN4C$Acl?PCSFw|n^4bks(y&*vWUSJ>Vdo| zlc{F-sICUUGBfb|v9L%bc#?&PTddl^GkPG7ub`SQ#rrImd*yGL2e0DBPrjDD(9i=a-%iZJn*4-rFUN_b5X{>eV&ER;j&2Ub^K@5=5?6|~b+)!o}Q z$}DAuo8z>7G>_WE2icbb7XyW@L)LG5k28j!7G8K>B|41V(x@@pXlm5gdueyrK=?v- z)E*9hY$qy+I6hH?%YWo^`CP50f^A^)5nKD%ac!}7pLu9g)QwfDq56f`T_*?2`S{sL zoa`A~Z4{}8>|UFsBe!wis}XTWP$5R+gk37VidKVFN5pw2#GZ{RxEyhn7Bw>^^2K!GCAML*NP}PUYv9?InA9j{DbI$-LX_fl zIOjXWcTq@|L1k~H7O98owTgueYmgh{HOWby%FQCRILP1d3Zjj8#Q!)Iof*zZ?iU;6 z0MX64Vh`qNRXrNd?rJsNM?#h+yB*Obq_g{=@ki^gRnzv9hhnE~`7b7(j_2_$d^E7x z&M!8qU9^^VU(amxGp3l8-PJrt+*izD`dhk5T^C#V8K<;U)VX6|IV-kx>QH zhm7jO^qKkqy}n*v|EW#Xk`O-y=^M3Lt`^xugg7g!)2}R=r2^LMEsLP4D{=yn^G799 zZRKOJSIn17fMN^K^E8HjMh9>MQJ|6a$R|EESEj{zmf}l!3sF_Bk?B=UctvG(S+zj+ znTIZ9J3OVlEGD}0=T1{6r8C+tY@f1oJFT66L1E_%?U6 z9?f#I7j&NNX%F@{_HB*t_^0yk)N%gA&sG7IT)SvIFzdSNy3)8xxC?m81by(Xa%VQC z)3maYQ^We{AL4)MzZ@uLb+8;u*%x>YwS$({7n*I{_uUsQQ^jlKF8wu8)<5h5)= z1*<)=z9ZVM@_{NVYpbU-D(ggFrDrt18He?(EWPrJZsHSH&RDygUC#N+SIDudDfvi_ zQ^~q%Q?zI;2a6>w@pCqB#=kproz2d7XAok4xa^JY<1Hyk$5Ic?L_8{5?i6ouUS;g` z*1CWdSYk!mv+NR1L9tGqV@c+*unwt@r>~i2MEE#Yc5MhPM;a@ab1tb-;*|J^xSzk$ z{Cf1Kg3s+pvdkFn>E&JKt?TXRF}yW{E(h)L_H=dE7N}Rwg1~kkNj#izJdyc-1v=Oh z9ZhJe9X0i>rsi4Zec(;>G;rrNm*_tBk(89N&KkR={msg3)d`dj^tT2%UF9}XpJmev zxfXbbggC*=JwuHmbiIlZMZ{F6p0y>g)9(tr3GA{?Ii+PKnp-br&T^%2?K0BqhiQ8< zL=BZS#Xcvgsd2d;Qq+w08j@k;AiDp!%MFL;Lv+&%`b!%MUm|yvK2QJ$W zogHF>N}!h6AiR9q2ALM6TO1MYT0zT@V{*Q@WvxwI6aO%-TwJC<|NhGIXJld}`#h;- z+TQ*l2SXZ%912SBW!@a#Ri3J@oZ41-(2n*uN~{*YI4)QGw#1@=Ce9(TTs5X`wNb`8 zS3i%>yDBI$XrHHwi)(@uk#SC6d!7|(tqw#7dIl<4%j``2n#fD0vIF`K^Q3FPySz8n z+aRc;dy~F^6co3e4VFLf%U|4|F|apK+WzEp6~Jojs=m|=fuH6vJLq-UWtB(9a^bXa z=GmXE`&MH+uhWiS5N%X|G-k=QfvAAGnl4`R*1Vxp(Kf9?0hbkG4|2NkouZ_wL0_1I z!n>p$oas}#LdgcZYLE_MxwFrH?f;W-GQMN{r1(Hw>p#uoUL;a`1es)J4;~gaIs9u_ z!;n_qy6#}tNb|Bjm7W!u?A-pAi525Z#X8Tk1yeCWA`Vs%+|rB!aF3(lx%R=%%IIK z)7Yg|Wf_QD+_CJWk%^b%_rxuZ%bUdF)kMFEQJ{H}OZ} z3f~%k%RoA-vt7^$6_e#^(nPCda`!NAHt%wGeq%JfDbCxqtoMOlR$hCX9dG;WfldiN zNJOfiWCeSs71G;kXX#v(PgHTwoUL)Db$ zgVM7$d?Bo{A*n>-vG@k@v*UlnABvAoDCJucXe+j}vhE5Yx5G~*b0R*69`|lCUuuk% zBT{5{S_H29J|@KDH_|2CNR08XaKI*L5w0bkltGPy?t1Tf{sCUeYR=YMgReNonf=5+ zC@IDl>-(HEEpXj#D!!;fELdM*gt+#(yLf*D9S;r;KIj=|v?e{A82>NdKHs*a6aI~X zZPsEtH_s=BlOL?S(ZsdZ{llHX9WbIa56L8`bKB}+Ewa+u%j^WZmh-?l%2UhNY7I@J zebe@7OW9-+EDt*2_DpN4b-{XMEwDaWbL|ZLmN-E=nSVkXrp}+CP5R;~3kAPp%XvNP zus>(gfW+wu6%x89v`NU8upyzFZ+KuV&q%YHB&c~*86IUhOv<8aG^pBpyJ<>ZqsAEtq?UeM9tfx$n#cU&vANY&9U;Lqdxl(@n7CaFiD zx%J(i!57Nf^sUy%tmp3HsqG=|SR+XLrD_Uc_pwd|)&@QYCR!!!2ljp^miLr}$!8j= z)zhA`DRj14Chj@|QOOoq^Q_%g1v{fNkFQomjg}!_Q;bZPCtbr7n}g45TSP6pGIIBS ze#5`dH!*Q&LQH(k1Uqq~Kh8cRzp_5A*q~iusUw2IMet{LSK~eFKzow{s+)LX=L~G~ z#Uw-|Tu4~$%Nz)Gvd9{=qn^=~!IQ=t>q&G!b^S5ZnGKBM`T|yutQDW^c>&_@o^&SZ ztADap!bu^}Ez$nkX``_Fz2~*}ruUX-iF>NqN-s&Di(R(k|Cuy6DW5-eppNy)ZpCZJ zbR>+W(a#yeEasYL<~15={pnJ5SCru|?Kf6q#K11AD{|yv9w8e5bG$-7zloeg|2cw@$mdoXSQot zSlX18(^OA&H9W*UO1-oC`|tV}TZJ6kId2yVFyFlRa`B%N^7tz|S=0`e%xvk&8oVuJ zamc%%AMX1`ZfzP326y)X?86RG$zG6jJK=2n+Jp>A-K^al%pfhQ_0`)OSDJ5gdbg_ zHwUz(q(&PGIe29hFn6@Lr= z^FTpoFyH62u6JGfCS&KzFI#+jG_q>~fn}oCoj`DVPE!S7FO>`1HMu*Zk z^^K>uR{4r09E~rK_%*4t-CKY;q_OOP)=JN3C}Xt=)anXy9WbNJRK`H9BpogHwa98ow$?a&)PUjK#fL|*N zF8d!coKyyXFqZ5j=g2ZrhzwKvB+ zbY}BkqBLo!M|yjQ=Sndk6h@O*CvRlk`bi0z(+Nx|naOR5M;LY08g|sIqI{0K;9T-rVvblLzJhyLhQ9k4_|^XSmQuqWZ-oS#Rn<~^ ze;}3h&Hllcin6?dbKz)z6uE+%8Wk&lUF@*GjXE(OD1CbLpn;(teVQB8#1r6zn^Z_{Ucy@Y*gej>`^U zpf`hWt4@>HX}!AH%yrLo$(7p`W$x79vM(e6Mvd@BP95i(bBR9^A?iPs7wkC&gOd+k zd2Q;WJ-~yMWJBqAvKWlacyJJJWDB)jJqN2Z9?aiGxm+%mM`eP{2F8M^gR-)GBo=_J zdM=uygV(@k9mV&Ki8?|<_m)Cr25a-3f8|-lLoryrXIb3Ap*xe6NNz=356)udBZc`= zJIFc0`+?Q3A)@R)Ny`&<#NSQ$C+VA&Ph=!pSd8A#9PE-V$2Hb<+`Mn3HLmHObxUuh zuV$B25q>W)G3k)6Yf{cY7rPhuom#Rsu3!UZJHzP+t(LLH?CM(ND(V_wRyS^IX<1kD zL9Q1M`3!!L7ZESSBycTTWG!T~Qs7;el7?)R#`HN_W#rcR%7e+qI`IBWkQrWrpGzyZ zi5QVb-U17^Qtbz;T~mdlCJdEPh@zv2iYTyGZ@_vzkWJ(|K|~DR=MT>>%EKbp`FWmR zRFTJttv~cm3il-c6PYIbm#4f|NKSHUI2-vOISO-@3(jGGe8RoB$oT1rnFCLqbZS36 zp_MVJm_N*H=n9{kE6mPjO|zX@$6RO})(#Sfx3``p1$;Y`+6UU%jKAj&KftGRm*_25 zkSMJOs{R$%6IWZ;TyvyRK)=A6)8r&XRh7F%VR4@S3wwOw14RnVcc!W=V8#2B5oh!>@0mqilU%!9n_T5wk>)*Z8TrgN zSeyKv{3FpZ4R@q-#NqY;dnLMHL*66Hv@^y>GoR~`S<8$uG8qH)<=Q69$gX1&`c@th zllU&Dt@F%z!}Ea4uSoV{2K0e^CXuuwy+k8fKK6<>qKnB3FufGhlpC1CypWDef$3E$ zWQB+F9K7j?l+vZ{$UgF?7!5ABpLh%|Y@3`WzaevW;*q>OKf+6jx#FJ422Y)?_R!a%ni1T3FN-`A*L&nHm8qNSAN;QC1F_Hm$;3IvjP#kjvR>DTSGdZz0iF6 zHDkQl+*RHE)BVRC?OqHk^atkOZEvx{?TSt=PWUUkomD$f0ciJ@Gec&g`LvOGoF1%R zy@m@V}n^WrR`wwj71U$uSi%%SHZPDktx+2TseQYa<=A@Q^@J5A1uNmf~3 zr+9aK(ZrAbS6t^sS zB=7KcP8p{+? zKT-9cfwLVVeu`?C;{1j^LPSXcSrT3{6WO7oD1vHpnS7x0wHUKTkP~_$YS7$xTC&#Q^$%l0xI{e_qn&)#D}SEAL2DFv-6}Yl@gh!qm&M5@^n(5LHt>c?Mh@ek z9;Nrx4uBJnr?uHZR*&0*sqWuw1?DB~@~utU5*T8)1{?ar z=C&6dYeV?t9V)d!W{|t0J2R{hte?^f=nM7c`V`H>KB@kqtJBZ^$8P3C@}hjX^U=Oz zk98*Uf5inEPTFGL-dlU3z1D7Pf3#wHC%vEER1aw7wDar`JH|NeK&FDd?#mDHsiFd| zznz%MH6Fz|pM#9nm~N-TX-OJI>(ghnF6+-q16>rOwJ^`#fz!_*ZE;of7Ei!_6CTW) z^Gm!eaMyD1+g|a2U*o3e56_s4dBQAkzc1BbvWa~(GX|Fp%M{wt(^s#?3ThjS+^+g& zU9G2T&okQ-16%y@{%lqbyQ7`czHHsF>exM;*LLBAqqBw&pZE(%0X@}UYZI|=Sv3Yix19KZ6CNw%t$*OAg^)I?%9M`Mp zx3tb$er=&PORr~K)!VaZW%C&RPJBf!>@5C;ZJXK^oaTJ2m?itFY@`!e1kSb{O~a0` z5||r&X2I+MW`%jdivOLpjsy;`4+Oo*iFfkye8`Wfc_yBc>pYCN<4^b$QBr=BE!9MI zNTsApbltlw^mXVGRp&NIMQy2~h*Z zDrc}AY0n3HumG9(IG>Iz7=V&yHfct~*a22p%b;Cm$-xTf^aW~e4Jb_J%4Nb2L&yyhfxoLSnu zg?kAx-stD_XuY;((P89|+$t=7gYSi>?sPucUBDeavgbOjcs}t^n;3t`xO-Imu&_LXQ)}n}-CxsY*ocSLg!|UTUPJ!p_gI9{;6ZlL%j$h_O z#WMMbDDAc@t2fqN-<+zaHQ%`Ncwc%Bx>g!OJH!^!wW_8}iHUl8vEQj|UkI!Q<6gvC zWiNLQIRjx~iJCG98TbU<#`gFA>J02njrcBji)1faLUzLE9BKqMm6({Il`P{bkViFWMT>r=U1HPj&gLK4Ib}8#fpJiY#U~;U!X4-kJ(5D=$`gLxmAvQRNXK`Y=#Ny zZy>B*=+VFNJR&c+|M9#SGV5_3DN=~F;;5>qoicm4F6+zKTNYvVbZ>HZcAYlT8>{sg ztpeK&_GK}qv7OZ-@zNP%7qH*jJDrsLvg37Pz?!;|hyD`_fCdLb2a}eTN9TBfz1NoO zhxIndhdZ_LT3gI&m^Oz!gH|aGX@UNrw=5+8h;<^n*vOl}W456a9D&TAPL9Tudx82x z3TV^x)cPm3mOavv8I6sadIGD8DfBLS30jq4=mRdw_HvU*xq7eu7o zjTvZPsK@F-SJp=L#=k9~UYP^ELQeHa?m)jX3W%bu%n0S(Y)p`sh|A&#=8IINr}wp& z#w+07V762v=2o+^nZp>T*VNBwYguhNn+W2jqewKSDAV{mCkx*XJbas%;YZ4H_9*agS-lrz&}8d>*Z;D9i%d#hg~K+0DZ29Mb_f8Y>1wp zBAF~N8{v1y$(-ai>t|d7|96GdmqLrnjq1MSRIOAFJ0n{4n(YXc7JaPnjkH6N~0aagR_}X##3^;tB+z&f1Ku0!Q zPQrA3nrtAm!aJhCT(p!8(Wk7H{mDyK$XID~(o*5-efoB@5vH1HbdMgY7uRmn8RR&r zOT($H3d>KtCLe|PEC4oWD6DZ9QGbXR2ENRSeA);)p$TA3Mv$d+6uZOru!(E|Iw-G} zN6VrGX%|^zc99l_T4^lKHaXc31;vDSqQZ$jFP+}5#o1UdJp#L-2 zEtXG<1U@gvPS76U3gYzo~tC36!#=m-w-dOWp&tU zKD^{N?6C>GS#Nm2LVUJJHj~+98g$F`fYH8*g813>vb0Jf?X(NVdZQ(|2b2TKcd7fQ64^-7CM?w(BzgQ z7Kvn^SY~*`G&YKDVZT`@vc@@9g#8OG))J`4pmBv}HW&DW0`f9650``+I*g3)TR#x; zzli#dWU97VZ^vw?Ny;$8=xCJH9zvJ%hAf~-P&{ozmwrz5f#xSjJ`}^yfsF-riiKxv z1InDJazVKhz*lKukJa#%kJ5`wGa25H3_6Q$a*sTQiF;YOMNCATH-|-MLwmYZ#>qZp zJ_|DTn6->y+IE&xuZ*rUx4B-Q0Usa4rqX=S%p|B2s8YYg9)1yZIuWzks!%MUqjvs) zY0d}5#Sm!b!k|%nMUvwLqu58*LTir9F$Zd{Vyqr(#!7&b{06<=CN&86SW4N5_3ko< z{0fHYHBM(Hddqvz2ZcaA`I6)WcXgjVry1y0+Fq-tpVq?JE;58%B?<71X+Ty#;6?Xj zPnjEcy#?pG1$}xb^qT+T^KkgzSEWH?+YvtV41CU4baUn8ckvclj48P9dWh2B=+A;= zEPOL76fR?AUqsnz=swHRky?yV+x5o?Xzy5*-rOt&hU&C_hRvcEf%c9->7%LE@{(Yp zGT(tI?MvqhKac)quSnwe_#u%8s8W&CP-nVm0Xl^;c*c7~{5tdv&*(^+gLG zjh0a{P}n?{4EOXKy1vQ4Efdivlz}?yq&g2~ijdu86MmZZg?uZ@iiPMCf`MyCLC;b_mPO?XLoanpzEnZdH%O==3s>#)!LGBnchTQs~ffLE}3E$~_&-Y#Y?YwdyQ1cl}TcBf!m-hHkAG z>!MZ0l*fa7(T1G|W_4&QG6gEULx_$ussS`d$H2TrL1*H?I^9$b==wb1h5E}CvY~pV zZbJuINiG&Wz;HGN^LG?>_?tf)qWV0NpTT@YAtN6~AJ?lXh3T zV{~-2Hpl8e*i3Dgp3zvUm(#-OP*nhI*&k66ezslI6O~|@a;P!;#00q){aYLI1a)P; zV$d!&K@LoYp6dkEPv4+H-$}N>&)bpeq$Jd5?V!g@BAIA5>c{^yfwrtTG`9V~Rvks< zJ%+k?0O$Q#{zCpsM895#9A^J&gTN-IhL*`@!?gKq7;@TaXqvO2cKX0yHAQ}mM7L1@ zn%gh(w3>@Kz+kch`pA^3HZ)3qnb1D+0+@w`LX!oNZ?mGFOh?sSiL9{$aaRPq;X5!s zhw$Df|IZO+#P=FN4fupCWkqyp)Nu7SH|jrGCC$*c=zaAQE202@rh>s=R% zP-Xj~7kmxX&=EC=%pw`cY9O8g$T#OmZs=hLLdDq#s=CzB2YrCPIRV<|m6%}cBhT^w z?V$;8K!3nSZ&0UqBR`(U&q`pVeBk7ogQ?02D-S?+zRcdSyU+l(rybY=ZJsucT_;_D zJ5s>UVt}(=;LaAwPcjuWvz_4;jqo1hp~S4Mge-!ecny`?ZRy}@2LkIAM|}3g9h3x1 zxEq!I8R8@(w5BES)BmAv4@3m-K=j;2%{&B4FHwo)9~PuvGeo_es%1-20(@8}bk6}1FZP3JuK-_Z1RkskrW1cP+e=gh;KJ4Q#vA>rKkJx-WR}XwUq=h z84bn!DzM;raIzUtb%T%_i=i&Xz+28p4|Waqp>h>cu^_`MG z(XD=#TY(Jvp@s}lGiY_Ky8b{L&U(pgBpdb z8-i#l3{AF9=aRMRKlofz%n#n9IzN^}P+6t{6Q9I6DmfSaIR;fD2Jv@A76Ou*hYYq8 z9nJ}ngbIEaYPg(GWv9T5<1@*}UTSU4(XK|Wjm8jtCy;j~t(hiS932eB;4@(D>0%yu zjV`DbMddy)t)s*fu@n4SZ7BL@gYB&jwl*ECpAD*eFr3I3MWN=+iw-LoQ?mE;6J1a1 zpeug_tNC&Lcc88R3DxE~)WF_|&lu$WcgWlS$=1kMJLv;V6|b{z^grO&2lQWBknV!c z`w}v3Ms)_t@GsCY&xTsx#+JZ#Xg;q%iCYWr$*?Dq8Cvk!sIRqAjoYCLQ!tNj(CthF z!p*NL!n=xrO^iZTCx{>muXYU8x;}7ADd5`kvH&vwOLdqm$LVx3j+x0^?~S>7H_eai zQUo}>AZq~S{A*x|rudz~zyrPTK6y~#uM6`3N|qTIKA1d4?MMdJ_AT~A;=p@e#O}me zG7!58ZSk)cD&ZsaC;dhb(q`DMI6#)dS`$bQWZVa+rw(Fy3A|<}t~ySpL9|n-k9xAt zEFJ3!X0S4CNm~QEo=3+10Hl@yXD|#?q_W5{8uoPV0M}dsdR_vb`2r2-aYXcE;P@G^ z!zRp@&ZEoOEUzP~BG4tK!?sBsRUcaPDzNZ3eE$@F_9z(Pq!h91Lal zTX^huPfZ9`o*Qf=}Jp;~JhKltH8x);MZ$xJ{OyhPVBjiC<&k6;5b>!=__$m{6r(@{* zM*&m+!W3pKG{argdgSwKxQ-Fv+dHBvt;R%ZJS;L0{9D+JqL1TIymB^_{06;T`0;%w3ZRTaW}4ui`1Hu8^#%mb1g9c?P$ znR-BpGszBgRy&ZP7eUoO4qG~7;Z0M>e0*mYcDY(%_JK_t%q9n7vuY+7>_|+Ymf$Wa z5YJKgPcSO#QXq{ZuviCd3DrQpJ%!V$ij3YGHrR#jlX=)r=z}QS0^d4^EsjZ0ZJ)(w zTaj%#pq4hpYt(@?t02aTLC@V9pH~H%I*IwyX;P0KqC06HS^?Lx0J(AyEkiSb%Pj(S zYAG}dln#QI{jg#kLZDx;NHFf%PvG@G{Wgz#CsgazwPib9PfAnle+%!m7%Cs>kwz7a6VJu zM@MkK+u-#RkfRpix5uK&H-Ufrz3OT>#R~BIhOpyg+}nL*^aUg(T}F4%?yyV+)R8gx z(~PEqZ9K3|cf?dUoes;?1CL(|SS*~}Mc&?`{_ZRdzz&HI6H*EvDThk;_kCL9YMbJ- zR=^h>@l&VJIWm$KaaDxmCwUM%6=C6)Kw%59ZMg=vSptt3Lb?FKH$$AYgExh4EI9NA?DEb$n5@(B2%VW_SLaVKwJuMM!;EL__@SYSD9HC6qC ze7X|Wm4*Mg1rr=fQlTl<4@KA5$pl?h{dAd>yC#NU|?lT}ckT)6VTnDTGE&H;a(3+)8OZ{7y>FO2@%!B1p> z@9bAiNGN?o8?(D?wpLBwpu6;xT0HLP0xHN~KA|e?atfT)EHGTzWnEyf1HgtYz(3{! zZb*fEYO5feW-;Uqj(GVFzevE25k=J0MpoE`GfluAUMzW!^Nm4IxdOSP26&JY@c+E( z2VQ*>I_twgF%h`ZdB_jDaj!X1_Xkm(RYZ;SVsl^*Z2`|+j7;N4wb_hQt%D37hFz9) z@P_Hw`pJQs&=!$c89S3f<_lHae<<@QbpDseOpbcjqB zeTT35z>eS07f(he%8I%3YvA*P;DxSAzbuEm_z+uAEfC#4G60==K9(OdimP-stw9rz zo&WA;v_f9bh>ya+K2_mk-4L&j(1mBid6j{6Y9n7~f)5>r725pYKH;cgLFg(AAiGXN z<#>*H8^_ALVR5s%K*fu6)Kec*obz*dc5_tdCcAHV}8(GJ=keU-7@_@%$s zwxO48K#Hj1@)cBS23U*dvgqIaR`n8=8iw=Eh|K3h&A(16fQPC<-y=s{M0Wp)`;5gb zX9n2q%BV6I;5V`4B~ac@MA~I+yZzNcTtn~O1f0nnFqfCXBh~_^+#YB94s6UE}? zPlw!e7;)DCwKE9)>MwXs7^={i|96@cu~-y&uP9K!b;MUW{KN(L@d9X*j^NaSpgm}V z+%+AZ(h`53s{`aTEw0Vemm3$2YQ}KAqLv8U6{Mbv-@FB;Y$50{Zpk5FL_Q$5YyxgL zh|?H{T%!Zwh9ZYgL|%zO#{C!gdkuK7R$$RQs3^lodUOl-=tJCNYsz7rmWaz(*zgNp z>lfJhE*y z)RhXbbXIuNG-SYJsDY#K9>>tbj3d#A|DEug5On8HV9{WBUprXzHc&+bI2;{W;Wv8q zOSq%az<;6Y6_DCac=TTM-5-zz^8p>s#ubjoJKo0TcYUDsQ`%>JoiWBpX;jj4X^&_N zvQ-Wi(fp3{!g zF*}AjRs?LE9jrS+MhU_;eL?)2$IH@!&}Z;m-c<$uEYb z3LvwOQdtQ{_jw9Bi(oUOdCUklu4{GK60%OF7rPxF^z~hxhJ1!Nji()q2E#cKSh^7= zTjkLU4h53<%QJQZL-7&ObqSa@oQwoZ+X;+XJ}_$4&>zN-!89FP0JsG&3`-r#>VA$KRP&S)BK&ESqezloQ4i+~8 zzkg3QLAGuHe0>E764N$d?wZJo5_{yw;LSTxujirWSHza0gTDU_7``_^_F0hG76U0g z17dg%J>)dxABC!a1s`|8q$}`3*g-)b60hcg6$oX;w2pdbFKG77aOaq;5Z*+@6u+nO99Y27TGQ$(vp#CM2 zKeV9MK@T_f>tT9s?13!BCSn07TPEA-?b-HP=ObSwvyg6>@Q=~B_Fb!}<-uv_?K0pQPPrf5hA3@yzz@#fBc#)rK z1zoQ#GTg52u3D~RW?kc&c9GUrX~ZAGtPCwxWk93f(Vqsu=+CYo>4?4d( z@XZKd@B%EoHj!9=##HJn#~jv=Gu7XHVMvXiXi zuuEz2NOmT_XdA7H{;ysN8|Sf@&$myWkAqMIIsioIEE6+wvfOr#PUi>X_BcV~AQ_axVSqnv(}{*aaUTWek* zM_@!?k(JkZ&QHo-K$69^tJ+!2T7y|A4aT-=dEn*xK;B{Cu!_-vv_E}?8Vd#u8q`jr z7MMa?yq5|yi$Scub_Kegq1tWsk{-wGY^975xuJW`3~sI{u4IGigKWQt-oiX~AMFez znFU@M4rZByPyG-5SY71Qf$BZbYF6wj=SDQ%z@%S5HChNr;0BnV1(?W8#^kq{+Jy}D z9dql)A`EQkWtkQ|!YpKf^@ybrh^V|kM_&*>Pw~#Nz!*vJq8H#U&Y@;k2bQ~tUG;G+ zmA)CeHtJsLdSQ+>7HKKibG1dxat>RU1KR>wtRD7PXM{+hhLUUaI4h$Kz*cT!wi%ss zHz30lU^QA`k~|XVrY-vRgRoX%Iax%D>S7!2`Lleeu9NTd2b-ro*7j&wwH~ZLIJ7Bh z6PVWN;1hdcf?gh1RSwtv7;3kMq!FstVN9H|>mRj&+EvW1B%Xe71=`7%&U>c}6t2_3 zyG>Ag$qhP(T||Y8V2u!G`+;TJgV{QZbH5KgK!4zXkGSWnsHFoiRoH;&E{RHY8C*eG z^o@*kRVSdJTZ9eCC@{+n<#9Ea&eX!q*6u=H?(OVN;dyT+Y1L>pnaNocxRjJVXH1n(OGqcV^U}`eWaPtz zkef*fwnMwDmxn5Hi9TG*$!_5JCYxk}*bEih9Z?MNUH}zy+5fxc)X1fKup3P=#~a4( zv2kLHO~tT)m-fGTJ|13R}MnGTc=&p8yaPd9pJ;ev7Gd;*5f1k1_vkB4Yn8v z%=J`u#=d$JOmSMsI%GMUp*JmOH@9tOsi}8g&5}NtPEq z`EP6ow}eWps|saaW4wDzP|uLlA>)Fpc}u${YhP6v{wz=DeZNESJlcmIMeg-P+G9ooJa7nPoDJ@Jtq3K!) zR-LxSQ=Z-dH}Apu&O_B`huks_m~A8wNIlHtv&lio42{)S)g7qnHJ-DQ8qYBrFP~s@ zx;QorW~19&i!5;s2yZ`XLuY8Tm>AAOxzDKrwf#F0s#cTf?4n-Q8xYTD*zvz(D% zyH28Ejgqzq)Cm+0@W4R34sS2>;aM#m*cnW2&q24+ne_mgy~Q#?AJ|VT3ysq;JjZC7 ze1_ezRydkr$XbC3Y_=!+yWOpp^jTT@0jm3w1vNsI~$AgyZR1<$+zg|G)2j zkJt$YOVJD&>@BkB2Vk0Gu!AOwi37+r;qbt1z_EkT=M0sHpeC(`-QYre1<#AEgBZD2 z6{A_SHO3tG$)G^U&Cs?XKfUW+$@H~km~h)o{55?lakH@cbAbGD4`DgL%s{D6VgaFVIJB#%!|(xT&)0 z4y+@@E>yqt(Av8&SzQSq>;~)~h52a%I!!xmp7Ev%{Sa0xY;!R4NWCKcB!=4y{Zo7% z-+SK-|8lF7(+3+cU!hFBjb}~mlM~^;uk|3;J$EkeDQ{cvO7|6G0L!5I@YA+w$6J~0 zC3bme;AV0^4;9a#=Kaohi9~su*esQC*}UlTxoW$@&B=NUCRZiluZ1xgtc|;Qik{`N zOaUF+cm5Byt8$CHatgN5YOo^OQsku=c8`7n(wPAaR0}A4F}R@r!1@+YD=;|H4-tPs;e}e{L;=B6t{&b-FkOp}ZU}DA~hm8)sanJqg|xL5scn z-Rq1pEQh+tyF1~~A0Kw!II+$ZXB?g!5wLICx9wrhEAGb=N;2tP%onbP?l{*&v$1g! z^|?5-;$wk_IA#Fr;pn z^Bc_PA9#9Nq`Q5AVDbhjrv@0_*H1G|Jxpbh-R zZ>p2GvUQL?}g=bA@sdH*S@pOuNq@bEAemQ6C@78wXM8M+KL=&Zk21CXzPdnmj7np zh_%&D=6ti;+Ec7xYk(E%Xfg|p)JH%``P(zwo8P<9UEDmZ&7f}LLX=j<>#v6*bh=Z) zu5En@Bn0YPm2jHH<#kd_a~pq*!X`I{7>D#YptLm5s%+7umPW6lg|MQesI0~h+aIl? zR&#r%^B8;Ro7D+C5pFH;!YF*~0VCI+ErS-nH#>}J8lLzk^NH@zNXI%wuun4-dq?xo z*I$8lr!5p3q2L>$ai>*q-^ryH_He**ycIgC^ii1`8RQHpqFph225k<#88#<$L{LHZ zbiEKYu0VO3h9`r%cC!_2d96OyYI_8*M?qRiyRGjwJmv$VJ~mAA>RYt*KuSe` zni^>bproR7KVo2|lfhYGudqvFV?P~sZfBD6m_(gMRZW9gY!jN94M101i0uJ3uSx2v z@8SY4&C~EU$T)?dt1J(d!cr(CtH1&&F;9PqP0iY12Im4_6_?@AkrKp30`!I#kgtoA zC-k6R!1GVYvalv$TY_JCwwlGY8`vq^YURXE_k@7g%57^-E<71xj#b=xZuN5h72A}b zPSrgx!Fk9SrI%@XpBj4(*z%KGsUJ+W0 z1lbJJB?2YWMP#STsJj#RRbCDCvMhS`w3xMQ25Q)Zn)sEjG_x%B*OfGzGG(oYsHV zU1u)(<=UP}zN``5B1ZTRc*kp%l|y0;SFzWbz0HecAkE+pYs~6&<~XAYqgN5_yl!WJ z?ebDtsP*;C@D%WLVjND&O{Hp7^ykFA%!gyr4%7gh+!Q;;YH5toeR@F8gPP-=Gn1-j z0og%L^hRCqDR@sNe2!k+8#2T)!V*x5z4lf#Y|&_1@}k$?Z~bWx0>z1PL&X1d)H+kF zY*u9FwQrBy$U#w)qH;v_i;VYo@m!GKyKAk9di!9d)CQ@o zgKDU4C@FMA_wc?&tv=Q>tG442EU|<#NSp8d#~0~8?Thg}@F?1N`M$8-Y0K_(-fUr3 zHtU#Qp+N{3Nko$vvo?JBch)Xvi_lN9;VTSdXUOcCuWbWM+l|I`rhH2JO^OBWj~7lk zN6w$MWa#^T>Uw&su`pON`g0UWOCHT=vvX| zqgq7H_I;&AO9Py;=F(8P;I7mz!EB)>p>3fOM1{||E|o@G5yQ?i!QCeoQr0pGn|yiw zIs6H}2HrB-YPpIy3)E`}YT{O?T0Ai8`>>9V7(=B{5nBlTDItJ6>j zjMYqayc(xwz<73*QbCz0pJSbO5=^Hs%9gT5Q$4rdSTASPVl_2#4zqf*v5Tg_-a0M~ zg-Lz^tU9x}oSZVj1>?0pp-9+d{$h?Wmzf{UA1yzbvUL8h*!uZ zy*QX>VY{8Hi-(n#9=|WiSKXh-Kiuc_-cx77;A%j%xRAA+jKKvZGit4Va5f5Sg+0YQXK=bHEG>4^+b{IS!Az;^ z0&h}#hb|i}>;~>NVUM^(Dj^q_pQ6xwicWJT%GJ8kLAo24+@8)=YnHjy_{#7brHrSB zZMvwy-;xJBrJCAmU14^9LC&_B-2bZGkD9up^htT(%@p-bOu5*_F-M}yMGf=U_e__I z3$^U=#@^6nJT*pz3WVka>j$R?!wA*g&`AA)ansV={$eTRv*vi8`CQ*sUt8ZGZ<6*- z$qkSEHr*wYT7Q8(&uV1)jUxKK&~KqZp+7@g^cH4qJFojd7!G4{yHZeXgf8<}wYMs( zJMqtYpsn|O=UIs_iXzEEPL#sa^=_dj!9JlZ`dOo^b--Tgl!D2WNxUakr;eXbUUghP z0rN9K8bbwk#A#|5w|d~=@EKj!2lEX1^7J%LRAc*p46^@V_ zan)@uyV|&js?pP8md3czBcqo18!-wm-SO5a{YbDwumXNtk3+peYA7aDKGZ0*Ce)GZ zxoXj~kh-evQQyq;XYt4Tx_NVZepde>@9n`VO^?gy1IE6sHPpBtdKp|Byc%p7YOAB( zvvZ<1{a%_Vmr&{}6_ql|R3)giRePz!)SK!YG~h)&ca?Hd83FHGYEdh+DRe$`Kp$tw zXc;qtD6MlwyK99@;%TyqpgdC9gVN-MG!1!Y0^Vww@JO0&-ZisZ<5>y4c)hpAS~bxC zmc})!D(m48nRJBR*$&y`sTDd<&y^HAD1Ck9qesNXqzlE=j_w$7%#%YIE&OU{F^A%! z(MKO-Ofs73%Yw~PF9co%wx?zfl`}kcZo!rgtNFa!dh(UfGVq z^oH=tNwIDk2SeF|qf>pszM&{%l6l>VaBR9I8NoQZinXbcyC}O=)syV`;3?#JrEXIt zd|MvKy~W4wGE~(Yt@36k{I$NA z(qYTX7-xKIwnkSy1#aj_ zGW}xiQ{je`Nx7)J2ebY|&MB=D;u(X6)SKyM{2#rER4@+w%t?Pd)p&x<>cZxJa2bX zsc!znm*$<(z&M19Q!XQqSo7#7* zHOvBj>5YO92mVtV-6!5W0OWP7U&t7EK{3qImIClLqTS%P2ctq$^k9~}`B9ks+~ zYGc#_YBX5sW4V(w5!CFa`AE+YTA#Wha4Rq~cvfF;ma{gRZwmoJ<>pch+V|{u*g2@zmys(>HM5xqs6vXN4o$U+g9W`6ijggL zhb24;Cjdq3jW@*ao(>G;+ZTDc)S zm|EaWbcD=fx0#blyED(_7M=|?sORE@n<(6()S+s6btWUJsf*Po>MPYz66Euu4>$9C zP7-^6Z~HzG|CTuo7tNREHtUFe(CH1jyn)`|A$a6V@Eson;{%UH`cA9q3qOFZ@s+zR zP4#ewbyX1;)<1B3>0%W_tyqstvN&qyRd#-|$wHD(tL*FMTjeRQRz)xTJ1%asy?=Xd zsDtHJ;sQE!9jQ!jv%=<}CEa1H)Gvolh1Tjh&4tz|6naq8KrDR}E zTt$ankNL4i`BgFGLg=(ie51$G&lTK*RJoJv!(@9W$o4Amq{@@s90w0N>C6&j*fod9 z5bxm`^sBN`>81RG;&!|Am9!ey-yG;&SI!RS3aFl9?nE6t*g%1dcQkirJ8tpt^;SO3ohRnhS3?65buaL6G7$6p!xZU&wgePE*ASqkO(tEK_ z#ljfK53j!j+@SA3K|YF8=z^VB(a+1^)z2Qso7IjfKF%ce0ZJZgZIx*DzGR^`A z&_7m&8F4wS3Yd!j`(04pzTzojK|Z({Q^g^$xL@IJ(Va@F8NI;Q?0U=S!&Ju0=i2k+8s?&@u9gD{=46=6tpqSQ1K^pDe@b zO!X?->zV7@t=4uO_$=?yEX;@P7Vao^KrMVPI{WLFAjrF&u)}jc*m0wYXno;aEyHo(DcE!zx{5^5whr{nCZ_QT za==&KNY}e9m~-Y2dKs0^wqZz+w;Tm+6)ux2bcvJ@qW|cna+Vv zmINoMDPw*o>J!z`ScmY%{$%V&a_aU_uF~?M~C( z>(8vr1U^!O=&Hj6i(~bEbPl+?#a4=@-Bv}Vs8o)qY)aFVMD>b#MF~nFVGKF*aq7Kf zkPk(OcVekvcj6uKyZr(@>kbhk3;wS@;!icYDFW;C1vTv=6s$kPhbv6C!&J;H}m$g(-f3B0~*bHaJ9m`y^`>W zJg~GUz|tuV^CrwB4Z`K9&Hrmg?fnKm;Bb0j-RaZygFV}t_2VPYJqj~jCyKWjoUG89oI~DzC8`M+rsEhB|SwVht(5rk!hp03wuQPK~g9khcu3IT)_C;pN zsWe`SDR!2o(zE#lo?@Vkc_e7mjv=_S1Jc-4VdiZ{`Yju{A5QUr7q>tZuhs^++pU0Z(Os}vcyub4F zDys5(MtGi&(HtaGhdZD)kLZ=}WA)tPX@)f)<>)R}6lRF&2=pl^VYS}%>6`Yj5f@Az{`difb(>)wPp(u6Z0PLMmCgr8T5E_ij+0^1px z2JDRu=;n9lJ$v(OAS~fk^yCim+5_YG7PBS}wnYbKN_ILo_uw3VViz&sh@Ay}nhyKw z7gkaN>v0&PGZfD8aCnjZc&`pzVL5mL1zrB|NPalKNl>m*Xx6!`L{FznVu(kDp0#@oQMkec6ZdF)x=>88m}s9mn%7!M>4^ zlPwguL?6>KhLw!N6rRW-=0_?VoC}QgMCQ<3#^ebR%i?7+M+6vTQOp-V)$uj{?I0t3 zmY)v0?C+u1vz2GMf+w~Vw(UID;8eQqBZ)n|S?P7)A%(}~OU5A!qZ7l3c!(D%@J>ST zw?44$65X3ZVJTjA<-BM=(#Z|LS!*!QOY^iZil2xDBgAZQZoBhry0YIDgN3{c1@M=Q z#vLa&YhNHjc67=U$(GPz8O7C?CAR#{dI_jo3@bo9xTmX|@TwES^G`S=W|qCmcp}Vx`3bzC4%|~nh)d&U-4YjbPl@nEZ@@%f zM^-TfUQH|1iC!XQI%a8S*3v$@4z*Eyt;0CEE5PyROKHzhZp$!a92c zS1q>~!MQTM;N^{j{riC3^EqSt6^zhT@N9kXK%2wYYR25k$DULZX4pkg&f@d{w!q}N z%Z!-Ld^*f%U8n0BK3!lLzy9RDSHg^T7QBsfQuz6{%W8Bd@$Ion{?{%U^e0Ia?R zthv$bbKks|kU8|m$wh3xD8xyp<-1B((77?n1i8C3 zNqolrB*U8AAZ{dveg=QMNkm%)>!%jo|9ND|lD!xuaVpHL)$AkL*;{@CqsRcGaEMTt zoU4q~L+TFCIzSdM03O?0VILg+DX2LQfa`8zPk#yP>nJPsBOI^v?4keRk2C{TXCY?H zV&Mp<+T37Qe#lOphnTeqM#B-*7yqz+f8vQJ(Gv}`&_^%=@yz8fxX(QB`#KO|CZl#Z z#=iaq^E4BuS?y=WbcJ=%jMy`sD?G+Ln#}jvs4QRcx!2gcPLMZkBHNqnj3&BHg1NJt zUH7z83=YF8aRKbMD_U*O9x|CrGS0V()CFnZD5aVU?%|1kzW{k~sL1f2kkH+yN{gMt;SSGkwLHQzGhIFDyMp)Ll1UoAu zqq>J!c#YbxJK0G+GMQuW6c!MnS`zs$GRB)=XegXe(h^2;A-*o*dY>~!9bj?vbTq1m zt-N-jU%1V4`e;Yfb!$MDJez$i3+s5K_)%V?rFz3&>chPGwUzQ1F&~Wa%xD;{qC;*X zK65iW9jtWb*T!@GGM&?X#$0nL$lf_;3rd9*WHfb!5D{~_uv07xM)()}(~at3rLXM3 z^^)P+{ta8iK^@Q;4oMLw!G34;Mrode%g$7*F_8@x!OpR@1u$6Jgtb}W36F8&)y6c47 zVzd&_Y+s?s^^ubz621M^ic)}nj_dePe!nCKj&pyt&grLvvBCDi=AkKi&}eVXwiBFg zHU@$M z|AGJ6N&CL@62G6pVi4BkZ!lPX=IaTnt4H#B**$Suer1ff(3xPxgZ3Xchr#HX zkE7on>lj$oY#bZA!xufo=`$VW74j50x7X7zFa2wc47?2zO-v-Rm?YpXx@wF-$xtRZy3B0}%zHJ<<0fByW;DTbK_U zZkn@_T&}z@-dT;0Y+P#FK=#zr!M|W^+_4IiRhI^7N)!i5+2ll&x|@`Wpl=_v@i0y! zw5xJ0QF0ekTl!FzU&n1Eqy64oYb?~8>L2xH=0|I|gOdhoKrc0OPBN=~@+bLArIJz` zH>h;VI^1m%#o@4FKar*85oh3*)51P!nnqQ)b5+r2f1kGZ7a{wKbIUPL-bqtIq5q^q zvypr%5nkW#?tSoro_rW{VG9vpA9 zv=y?}667j7gfFQsno0TOn)uw6)J|!U_%&=&7s~;mpK}O2qqN--eh3Ua`@UJuD8@Mp zmGMCD;&ee-Yf*vR;Oj7)zqZJVQdQ}OGhBKU>P@8-_>W)1s&wtC?j-Sulq7C)XIVec zZWqo!iTHj)vvmQ7jgwgQl{$)&>Fd{GU{W=NS zK8kZu?&8|B2~Na#r@4I)7t+a2Z$ZGRB&#seZg2YNTn%zl#Y*)5FN;<1Vr|7f+XTFa zAchCqa_awgX-u5}SzNLsvR@fgj8Dlq?pfEZCU!P@w@=}NjdOd$r^*GJbQSySpUjg{ zu)^}TNNFM-5x!`H%yLn@gZ?%i`nIM(l#N=z7)zASo(QR zO5tE%!?yCXoAiWt@Jxu7PD|zFyK*wjV-rC;pwL5Z>&=-$6g}J$fn(a@Vdz4!blkDKdZ?Gyl8IU;s~!BtPbBgZRCl+#MGau>GXA95>cm@vaBX0PUKhI}|jTxA6g zaPBf|vr1*eFNHYrk&{AQ{^vU?)rBy>R)JDaa2C3wg{vyfy%|IbG3%v zMc{n1JcpD|;#~JB$nhY!Xp`-MoXf5Z37@&e?isq)8SzuAN&U7_iNaxS z5a%u(r@MJsOm*@0qdz*I&is2i(m9;j^rVX7z!PxCi}T?U=8~6!R3(tHOYG-kz$Pz| zH^#c(kf|rq#T!Vz--Y_17k_u1?7?x`kt4g*^h3!G$~yb(&9DK&stFwrgNn`zcIl(^ z2&&;lSr^PZmYuo=8P*!-n(L8TsRjLWqL0TuiazFRuXGVIpx>In4jnJ-v~L*CgUbSa zQ*s4frIy!sm`CjFuo?S`yWpy|lCR4p)%2dH-hi*M?}fHUelNV`UPPRtM0>8a!W?6? z)>nmIhYA>b%o_GBXDUyjsa$|H(_N{hOjq*aLA6MYQD4esq$a{Drv-lFTj^_UC8s!J z{{^3|E!kuzXBa1Uic+k+R9+~@%j>1jczhHk{~VjzW?;0d>eXnNUAvg#X0g9ii4QX&G&Q_qHSncZ!{ZB3T)Nt!EP z#QQE;z3myoi8`OWy|s8bQ9!K)=YEM*$y#adHje6_LyJQ(dNbppIo7U(3(g+tGd$dT zbZssvCDkLU2p@O3l0$AH4ssvTOP8z*rb_=l#p;67lt#X`hz`w8SliR&hMdgwA5~sU zX(cu4OVFyN=o{*}#c?Y=WZks7!#n?*U2-y!Z!NXbVk(FtV0^R4D34P+cEY3QFiu>p z$Z;pYAzEY?VwMzVcf-?DSHFAgO!cHty4J1w#RR+s{EUrQ(3KE^sM&P^0)PcwA*qXv7KAO zsc!#nm9mza|KQ;1hGvD%h3@H-&E|G)_``ooZ{Y) zrRP})PnE79q7%U@R5ZeEVIxmNA5sbYtFQZzY~=&1I~=hR?d2fl%|M&lfjMP^ncoPn zK>>xsaVHPzpdn&)CEi;gs&ed!n4A$))otPgDz_YP*3P*LtZGJ6Tw6mads6NNB12P+ zZ|(j(xtZu3a-t>Zp>)$Wcp(sQ29=~`kRQW`uI@Nw2bn;%E_;SVLe?LSzEo5;Q*u$-ZW=pdny$%m7(f&k%gU%@TS9BGx zB?mX63rbJw-P@8P^`T;)M}D2xtpRuB5!m}xxWn6-2bMFAoF*rz)<~+oXXpoxQkh<- z4j4lIHlO)$j+K&3cKi!->M>Pnp+!Vgj2RqTB&wBX z0bX72gjejKErm|@VIzzFbMS27Y|7Zc55b4}WGl((3M0K7{(`M}a{s7VyvKc(?~u2b z_PgAik^J51V0Wj3`!hbx&-GmT793AM>3^DwZCGvM4e2KRl0mT4v!e^m&!`Mh%PV`O z3}SEh9yz^()9Mmj;IqRRYhhOepPU32u9#Ss+~~BN1$9qB%Y82Tltt!q)iPlfwFpY_ull=u|7%SYrd&&ceSF{5_b3t2PGdG%p;_>n&k zAmh6ND{~dx-;B6pCdhG~(vjz5uEmUuc&YlOmt?-3rHkTb_cyDdu{3l$wOZhDN@~Cp z8en{B7jq}!_BL8_QSz{>Ykps8|8-x4_j~oU6cje2`SEj>$uanPt&O4jk6aX%Ib;opA7`FfRt$Q~C2gDxx;%IEoNcK2x0)!1W~x=e2L$Q$iUS+G8VH#%Qqv zBiTu$VpQ~AX&0vxeq}#1YU<5{JpyA=1_$N^%W#rH4Qj&nIH+x;Zt1B^(@uLun3;9G zuhcej1)wpX4kde zm@SzjWx%+Ex+(w$6{E)abVQ>kVE!S=zHYzKpTGto-5!|wCR)>3wyX><)1 z+J3sPHRxQ`MhQ|C1n^g(1KjJLzV%TT;Us#BUF6SkE-ub^;-(_;qc?}If`AEcxY z>`l$4?=*jK@QM&i!w{p=R&r>Sz3Y94eT%)DwT4Q7Zs;JK91k*2bk2%9qigymPD6RF zZ!#0@EA9%hh+IQCN1oDJ-3TAa#|eiK>UFt|v_*K~RIvkQ99*{7hHGRrhnn3j%U> z!%hHq?~X^t*Hpe^QI?Hjr7fmzKW&x3$0*#xo5R|vLA0*Jx~hhaxs6>`adGUg+3?H--{OASXZ1p+q;+FZyaEw#t zK*r=}kTsj=T%KyEBeD0eQ-KPhKZr{LtEVEq0sX`NkiJgQuVdek-By*;39IQ|X(FdG z+Ov(#p?txifki1@11*9F^yStO_X~1vuhLqntISj4vYhRxqyU z9%F^^4XlAJ;0^WRx&0-TQ);X4)tp*Tovfz7c#M!wh~EmIaVB14T{Gk1qs=qxv4*~& z0@_T4R)CzMle7*+@IW{mWnl8);6n9L&_0Vx^h)cP^`4GceRiPZb|i|^MowNZ$SW{i zLuC9HoQ~jy_|=PhF4 zs&rThse|!Ry{cAIFH!xZpgN0|jtHgQv3AfrXh_C8y*K=nWb=@<(!PvqdS{$ht9vSY zn|XSu>*O=w4{M}qoN74Neaudm2bNPUxW~U*1+xRirIpoIs9DtO_V8d`w_L2ws>4|Hcu z1dZk9JZp!&9Sooms^i>r^IwR48IyfdX2~Pobx*=hXl`f24gNWk$O$Dk-nylC3|&ff z1EW%R2Y-USdEPeML7d8zN`3c@@>=Pw-Ql#l(cXWx+)4+rk$V{~E-YD2DBXdQBne(q zN$#?<_1fOb$yU|mL5dH)>rG`dPU_F7l{+hK~lZ9JO694y2_3AeF&#T6p?$V z$Gp=bt3-bnxz|%s-UIgO2uI1NL8r{IdYhmXD4g0Um|IUW{)C@95JlZq9A36bUE~Ng zsIByt^TiQYqTxp`q#8XA)*7Ny5O13Bdb7iT%WICZF4&pj71e>AG#+Mef4PuiD9hBN zYE8AMvR%p~UZiV2hN|rznfF*ch|^p3L5Ux_)j;vzFbf(`W0aEnz>`V9P1Hd9+Z5-K znkbhCxG$(`df>^>22RHbry>pl52=$^kmI+dgE<=9|5v&LSE&u3(-F)ApW!U^QXjgg z=fPQ~knLUq4;q0Ie>UjD}_=WOw90)x+8)Ed`$Ic>1^_rKND(CQCnZ0$*Euj(L%& ze~`1oZjl`%!vDAfYkeRXWi)loE2#rcm0>SWvJaeeP3eI&81~m_p^f_^eeY9l9ensI zp*yXI9JZ%j(*2FxtUB7}OVqXaE0JqflUhl8a3LCl-na>FO!4%cO3~*^PcHcnDD^`- z6Npe@#{er`Km|Sv{ckO}47cfj&%jym2^e=idLmsw9Inxgh++hLh$obMz7Y}M_!nvW zrMq}X&zFy>Uuol%kWj`ZOAj5_Uz^*Ro%^kTaTBaRH}1IeoG;P!Z^L=efy0}~j5#Z3 z0xy{ZUv!*Y3=Q%VFphsfUJts8`w)a zblwU&Mc_0xu`fB3SZ8HWl5eGNJC~EsR!MR4PsGDrc+70Xt1gRp2hMC4`d(Y;2+txL zaBPVRz5zX*Z^7EL(&hdUe#KkPeVK-)s;3yky6p#FW{Q-I8nv@DMf?+dx-E)|C)}+c zmEsz_5`SQ1%7B-Yb+6I88wgwDE1ttOdR23Am#IOJ_e zz1Rbni3ePdQ=f=E&v^xp)Gr)%%G!NVE=W!j*b7!3-YL3EIjv41q6@l;(|NQbCP{OVSV& zA{8d|T%K2uc^H8QSaq0RM?mx2;1#l&&-ok0ZEJGg+A!A^)4Lo-*X0eUok4$iAWtZc zuY2jAo4ohoG#e4X^OOyyTt2KYNpzc9s7BdB*G! zpYEp*nvLttj^|AtbRX4-^?j!?%@9}r zk4zmkjU!{?|znFr_#PVOAq~DM(Hk}|CH-^$MqT9F}X4C5RIEw4)nRXdF7&8 zUl2ZHNk*+SpB4V3@aL6C`%^)FA}d#$9(8vNTuple[int, np.ndarray]: + """load wav file. + + Args: + wavpath (str): wav path. + sr (int, optional): expect sample rate. Defaults to None. + dtype (str, optional): wav data bits. Defaults to 'int16'. + + Returns: + Tuple[int, np.ndarray]: sr (int), wav (int16) [T, C]. + """ + wav, r_sr = sf.read(wavpath, start=start, stop=stop, dtype=dtype, always_2d=always_2d) + if sr: + assert sr == r_sr + return r_sr, wav + + +def write(wavpath:str, wav:np.ndarray, sr:int, dtype='PCM_16'): + """write wav file. + + Args: + wavpath (str): file path to save. + wav (np.ndarray): wav data. + sr (int): data samplerate. + dtype (str, optional): wav bit format. Defaults to 'PCM_16'. + """ + sf.write(wavpath, wav, sr, subtype=dtype) + + +def frames(x: Tensor, + num_samples: Tensor, + sr: int, + win_length: float, + stride_length: float, + clip: bool = False) -> Tuple[Tensor, Tensor]: + """Extract frames from audio. + + Parameters + ---------- + x : Tensor + Shape (B, T), batched waveform. + num_samples : Tensor + Shape (B, ), number of samples of each waveform. + sr: int + Sampling Rate. + win_length : float + Window length in ms. + stride_length : float + Stride length in ms. + clip : bool, optional + Whether to clip audio that does not fit into the last frame, by + default True + + Returns + ------- + frames : Tensor + Shape (B, T', win_length). + num_frames : Tensor + Shape (B, ) number of valid frames + """ + assert stride_length <= win_length + stride_length = int(stride_length * sr) + win_length = int(win_length * sr) + + num_frames = (num_samples - win_length) // stride_length + padding = (0, 0) + if not clip: + num_frames += 1 + need_samples = num_frames * stride_length + win_length + padding = (0, need_samples - num_samples - 1) + + weight = paddle.eye(win_length).unsqueeze(1) #[win_length, 1, win_length] + + frames = F.conv1d(x.unsqueeze(-1), + weight, + padding=padding, + stride=(stride_length, ), + data_format='NLC') + return frames, num_frames + + +def dither(signal:Tensor, dither_value=1.0)->Tensor: + """dither frames for log compute. + + Args: + signal (Tensor): [B, T, D] + dither_value (float, optional): [scalar]. Defaults to 1.0. + + Returns: + Tensor: [B, T, D] + """ + D = paddle.shape(signal)[-1] + signal += paddle.normal(shape=[1, 1, D]) * dither_value + return signal + + +def remove_dc_offset(signal:Tensor)->Tensor: + """remove dc. + + Args: + signal (Tensor): [B, T, D] + + Returns: + Tensor: [B, T, D] + """ + signal -= paddle.mean(signal, axis=-1, keepdim=True) + return signal + +def preemphasis(signal:Tensor, coeff=0.97)->Tensor: + """perform preemphasis on the input signal. + + Args: + signal (Tensor): [B, T, D], The signal to filter. + coeff (float, optional): [scalar].The preemphasis coefficient. 0 is no filter, Defaults to 0.97. + + Returns: + Tensor: [B, T, D] + """ + return paddle.concat([ + (1-coeff)*signal[:, :, 0:1], + signal[:, :, 1:] - coeff * signal[:, :, :-1] + ], axis=-1) + + +class STFT(nn.Layer): + """A module for computing stft transformation in a differentiable way. + + http://practicalcryptography.com/miscellaneous/machine-learning/intuitive-guide-discrete-fourier-transform/ + + Parameters + ------------ + n_fft : int + Number of samples in a frame. + + sr: int + Number of Samplilng rate. + + stride_length : float + Number of samples shifted between adjacent frames. + + win_length : float + Length of the window. + + clip: bool + Whether to clip audio is necesaary. + """ + def __init__(self, + n_fft: int, + sr: int, + win_length: float, + stride_length: float, + dither:float=0.0, + preemph_coeff:float=0.97, + remove_dc_offset:bool=True, + window_type: str = 'povey', + clip: bool = False): + super().__init__() + self.sr = sr + self.win_length = win_length + self.stride_length = stride_length + self.dither = dither + self.preemph_coeff = preemph_coeff + self.remove_dc_offset = remove_dc_offset + self.window_type = window_type + self.clip = clip + + self.n_fft = n_fft + self.n_bin = 1 + n_fft // 2 + + w_real, w_imag, kernel_size = dft_matrix( + self.n_fft, int(self.win_length * self.sr), self.n_bin + ) + + # calculate window + window = get_window(window_type, kernel_size) + + # (2 * n_bins, kernel_size) + w = np.concatenate([w_real, w_imag], axis=0) + w = w * window + # (kernel_size, 2 * n_bins) + w = np.transpose(w) + weight = paddle.cast(paddle.to_tensor(w), paddle.get_default_dtype()) + self.register_buffer("weight", weight) + + def forward(self, x: Tensor, num_samples: Tensor) -> Tuple[Tensor, Tensor]: + """Compute the stft transform. + Parameters + ------------ + x : Tensor [shape=(B, T)] + The input waveform. + num_samples : Tensor [shape=(B,)] + Number of samples of each waveform. + Returns + ------------ + C : Tensor + Shape(B, T', n_bins, 2) Spectrogram. + + num_frames: Tensor + Shape (B,) number of samples of each spectrogram + """ + batch_size = paddle.shape(num_samples) + F, nframe = frames(x, num_samples, self.sr, self.win_length, self.stride_length, clip=self.clip) + if self.dither: + F = dither(F, self.dither) + if self.remove_dc_offset: + F = remove_dc_offset(F) + if self.preemph_coeff: + F = preemphasis(F) + C = paddle.matmul(F, self.weight) # [B, T, K] [K, 2 * n_bins] + C = paddle.reshape(C, [batch_size, -1, 2, self.n_bin]) + C = C.transpose([0, 1, 3, 2]) + return C, nframe + + +def powspec(C:Tensor) -> Tensor: + """Compute the power spectrum |X_k|^2. + + Args: + C (Tensor): [B, T, C, 2] + + Returns: + Tensor: [B, T, C] + """ + real, imag = paddle.chunk(C, 2, axis=-1) + return paddle.square(real.squeeze(-1)) + paddle.square(imag.squeeze(-1)) + + +def magspec(C: Tensor, eps=1e-10) -> Tensor: + """Compute the magnitude spectrum |X_k|. + + Args: + C (Tensor): [B, T, C, 2] + eps (float): epsilon. + + Returns: + Tensor: [B, T, C] + """ + pspec = powspec(C) + return paddle.sqrt(pspec + eps) + + +def logspec(C: Tensor, eps=1e-10) -> Tensor: + """Compute log-spectrum 20log10∣X_k∣. + + Args: + C (Tensor): [description] + eps ([type], optional): [description]. Defaults to 1e-10. + + Returns: + Tensor: [description] + """ + spec = magspec(C) + return 20 * paddle.log10(spec + eps) + diff --git a/third_party/paddle_audio/frontend/kaldi_test.py b/third_party/paddle_audio/frontend/kaldi_test.py new file mode 100644 index 000000000..34ff413c5 --- /dev/null +++ b/third_party/paddle_audio/frontend/kaldi_test.py @@ -0,0 +1,533 @@ +from typing import Tuple +import numpy as np +import paddle +import unittest + +import decimal +import numpy +import math +import logging +from pathlib import Path + +from scipy.fftpack import dct + +from third_party.paddle_audio.frontend import kaldi + +def round_half_up(number): + return int(decimal.Decimal(number).quantize(decimal.Decimal('1'), rounding=decimal.ROUND_HALF_UP)) + +def rolling_window(a, window, step=1): + # http://ellisvalentiner.com/post/2017-03-21-np-strides-trick + shape = a.shape[:-1] + (a.shape[-1] - window + 1, window) + strides = a.strides + (a.strides[-1],) + return numpy.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)[::step] + + +def do_dither(signal, dither_value=1.0): + signal += numpy.random.normal(size=signal.shape) * dither_value + return signal + +def do_remove_dc_offset(signal): + signal -= numpy.mean(signal) + return signal + +def do_preemphasis(signal, coeff=0.97): + """perform preemphasis on the input signal. + + :param signal: The signal to filter. + :param coeff: The preemphasis coefficient. 0 is no filter, default is 0.95. + :returns: the filtered signal. + """ + return numpy.append((1-coeff)*signal[0], signal[1:] - coeff * signal[:-1]) + + +def framesig(sig, frame_len, frame_step, dither=1.0, preemph=0.97, remove_dc_offset=True, wintype='hamming', stride_trick=True): + """Frame a signal into overlapping frames. + + :param sig: the audio signal to frame. + :param frame_len: length of each frame measured in samples. + :param frame_step: number of samples after the start of the previous frame that the next frame should begin. + :param winfunc: the analysis window to apply to each frame. By default no window is applied. + :param stride_trick: use stride trick to compute the rolling window and window multiplication faster + :returns: an array of frames. Size is NUMFRAMES by frame_len. + """ + slen = len(sig) + frame_len = int(round_half_up(frame_len)) + frame_step = int(round_half_up(frame_step)) + if slen <= frame_len: + numframes = 1 + else: + numframes = 1 + (( slen - frame_len) // frame_step) + + # check kaldi/src/feat/feature-window.h + padsignal = sig[:(numframes-1)*frame_step+frame_len] + if wintype is 'povey': + win = numpy.empty(frame_len) + for i in range(frame_len): + win[i] = (0.5-0.5*numpy.cos(2*numpy.pi/(frame_len-1)*i))**0.85 + else: # the hamming window + win = numpy.hamming(frame_len) + + if stride_trick: + frames = rolling_window(padsignal, window=frame_len, step=frame_step) + else: + indices = numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile( + numpy.arange(0, numframes * frame_step, frame_step), (frame_len, 1)).T + indices = numpy.array(indices, dtype=numpy.int32) + frames = padsignal[indices] + win = numpy.tile(win, (numframes, 1)) + + frames = frames.astype(numpy.float32) + raw_frames = numpy.zeros(frames.shape) + for frm in range(frames.shape[0]): + frames[frm,:] = do_dither(frames[frm,:], dither) # dither + frames[frm,:] = do_remove_dc_offset(frames[frm,:]) # remove dc offset + raw_frames[frm,:] = frames[frm,:] + frames[frm,:] = do_preemphasis(frames[frm,:], preemph) # preemphasize + + return frames * win, raw_frames + + +def magspec(frames, NFFT): + """Compute the magnitude spectrum of each frame in frames. If frames is an NxD matrix, output will be Nx(NFFT/2+1). + + :param frames: the array of frames. Each row is a frame. + :param NFFT: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. + :returns: If frames is an NxD matrix, output will be Nx(NFFT/2+1). Each row will be the magnitude spectrum of the corresponding frame. + """ + if numpy.shape(frames)[1] > NFFT: + logging.warn( + 'frame length (%d) is greater than FFT size (%d), frame will be truncated. Increase NFFT to avoid.', + numpy.shape(frames)[1], NFFT) + complex_spec = numpy.fft.rfft(frames, NFFT) + return numpy.absolute(complex_spec) + + +def powspec(frames, NFFT): + """Compute the power spectrum of each frame in frames. If frames is an NxD matrix, output will be Nx(NFFT/2+1). + + :param frames: the array of frames. Each row is a frame. + :param NFFT: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. + :returns: If frames is an NxD matrix, output will be Nx(NFFT/2+1). Each row will be the power spectrum of the corresponding frame. + """ + return numpy.square(magspec(frames, NFFT)) + + + +def mfcc(signal,samplerate=16000,winlen=0.025,winstep=0.01,numcep=13, + nfilt=23,nfft=512,lowfreq=20,highfreq=None,dither=1.0,remove_dc_offset=True,preemph=0.97, + ceplifter=22,useEnergy=True,wintype='povey'): + """Compute MFCC features from an audio signal. + + :param signal: the audio signal from which to compute features. Should be an N*1 array + :param samplerate: the samplerate of the signal we are working with. + :param winlen: the length of the analysis window in seconds. Default is 0.025s (25 milliseconds) + :param winstep: the step between successive windows in seconds. Default is 0.01s (10 milliseconds) + :param numcep: the number of cepstrum to return, default 13 + :param nfilt: the number of filters in the filterbank, default 26. + :param nfft: the FFT size. Default is 512. + :param lowfreq: lowest band edge of mel filters. In Hz, default is 0. + :param highfreq: highest band edge of mel filters. In Hz, default is samplerate/2 + :param preemph: apply preemphasis filter with preemph as coefficient. 0 is no filter. Default is 0.97. + :param ceplifter: apply a lifter to final cepstral coefficients. 0 is no lifter. Default is 22. + :param appendEnergy: if this is true, the zeroth cepstral coefficient is replaced with the log of the total frame energy. + :param winfunc: the analysis window to apply to each frame. By default no window is applied. You can use numpy window functions here e.g. winfunc=numpy.hamming + :returns: A numpy array of size (NUMFRAMES by numcep) containing features. Each row holds 1 feature vector. + """ + feat,energy = fbank(signal,samplerate,winlen,winstep,nfilt,nfft,lowfreq,highfreq,dither,remove_dc_offset,preemph,wintype) + feat = numpy.log(feat) + feat = dct(feat, type=2, axis=1, norm='ortho')[:,:numcep] + feat = lifter(feat,ceplifter) + if useEnergy: feat[:,0] = numpy.log(energy) # replace first cepstral coefficient with log of frame energy + return feat + +def fbank(signal,samplerate=16000,winlen=0.025,winstep=0.01, + nfilt=40,nfft=512,lowfreq=0,highfreq=None,dither=1.0,remove_dc_offset=True, preemph=0.97, + wintype='hamming'): + """Compute Mel-filterbank energy features from an audio signal. + + :param signal: the audio signal from which to compute features. Should be an N*1 array + :param samplerate: the samplerate of the signal we are working with. + :param winlen: the length of the analysis window in seconds. Default is 0.025s (25 milliseconds) + :param winstep: the step between successive windows in seconds. Default is 0.01s (10 milliseconds) + :param nfilt: the number of filters in the filterbank, default 26. + :param nfft: the FFT size. Default is 512. + :param lowfreq: lowest band edge of mel filters. In Hz, default is 0. + :param highfreq: highest band edge of mel filters. In Hz, default is samplerate/2 + :param preemph: apply preemphasis filter with preemph as coefficient. 0 is no filter. Default is 0.97. + :param winfunc: the analysis window to apply to each frame. By default no window is applied. You can use numpy window functions here e.g. winfunc=numpy.hamming + winfunc=lambda x:numpy.ones((x,)) + :returns: 2 values. The first is a numpy array of size (NUMFRAMES by nfilt) containing features. Each row holds 1 feature vector. The + second return value is the energy in each frame (total energy, unwindowed) + """ + highfreq= highfreq or samplerate/2 + frames,raw_frames = sigproc.framesig(signal, winlen*samplerate, winstep*samplerate, dither, preemph, remove_dc_offset, wintype) + pspec = sigproc.powspec(frames,nfft) # nearly the same until this part + energy = numpy.sum(raw_frames**2,1) # this stores the raw energy in each frame + energy = numpy.where(energy == 0,numpy.finfo(float).eps,energy) # if energy is zero, we get problems with log + + fb = get_filterbanks(nfilt,nfft,samplerate,lowfreq,highfreq) + feat = numpy.dot(pspec,fb.T) # compute the filterbank energies + feat = numpy.where(feat == 0,numpy.finfo(float).eps,feat) # if feat is zero, we get problems with log + + return feat,energy + +def logfbank(signal,samplerate=16000,winlen=0.025,winstep=0.01, + nfilt=40,nfft=512,lowfreq=64,highfreq=None,dither=1.0,remove_dc_offset=True,preemph=0.97,wintype='hamming'): + """Compute log Mel-filterbank energy features from an audio signal. + + :param signal: the audio signal from which to compute features. Should be an N*1 array + :param samplerate: the samplerate of the signal we are working with. + :param winlen: the length of the analysis window in seconds. Default is 0.025s (25 milliseconds) + :param winstep: the step between successive windows in seconds. Default is 0.01s (10 milliseconds) + :param nfilt: the number of filters in the filterbank, default 26. + :param nfft: the FFT size. Default is 512. + :param lowfreq: lowest band edge of mel filters. In Hz, default is 0. + :param highfreq: highest band edge of mel filters. In Hz, default is samplerate/2 + :param preemph: apply preemphasis filter with preemph as coefficient. 0 is no filter. Default is 0.97. + :returns: A numpy array of size (NUMFRAMES by nfilt) containing features. Each row holds 1 feature vector. + """ + feat,energy = fbank(signal,samplerate,winlen,winstep,nfilt,nfft,lowfreq,highfreq,dither, remove_dc_offset,preemph,wintype) + return numpy.log(feat) + +def hz2mel(hz): + """Convert a value in Hertz to Mels + + :param hz: a value in Hz. This can also be a numpy array, conversion proceeds element-wise. + :returns: a value in Mels. If an array was passed in, an identical sized array is returned. + """ + return 1127 * numpy.log(1+hz/700.0) + +def mel2hz(mel): + """Convert a value in Mels to Hertz + + :param mel: a value in Mels. This can also be a numpy array, conversion proceeds element-wise. + :returns: a value in Hertz. If an array was passed in, an identical sized array is returned. + """ + return 700 * (numpy.exp(mel/1127.0)-1) + +def get_filterbanks(nfilt=26,nfft=512,samplerate=16000,lowfreq=0,highfreq=None): + """Compute a Mel-filterbank. The filters are stored in the rows, the columns correspond + to fft bins. The filters are returned as an array of size nfilt * (nfft/2 + 1) + + :param nfilt: the number of filters in the filterbank, default 20. + :param nfft: the FFT size. Default is 512. + :param samplerate: the samplerate of the signal we are working with. Affects mel spacing. + :param lowfreq: lowest band edge of mel filters, default 0 Hz + :param highfreq: highest band edge of mel filters, default samplerate/2 + :returns: A numpy array of size nfilt * (nfft/2 + 1) containing filterbank. Each row holds 1 filter. + """ + highfreq= highfreq or samplerate/2 + assert highfreq <= samplerate/2, "highfreq is greater than samplerate/2" + + # compute points evenly spaced in mels + lowmel = hz2mel(lowfreq) + highmel = hz2mel(highfreq) + + # check kaldi/src/feat/Mel-computations.h + fbank = numpy.zeros([nfilt,nfft//2+1]) + mel_freq_delta = (highmel-lowmel)/(nfilt+1) + for j in range(0,nfilt): + leftmel = lowmel+j*mel_freq_delta + centermel = lowmel+(j+1)*mel_freq_delta + rightmel = lowmel+(j+2)*mel_freq_delta + for i in range(0,nfft//2): + mel=hz2mel(i*samplerate/nfft) + if mel>leftmel and mel 0: + nframes,ncoeff = numpy.shape(cepstra) + n = numpy.arange(ncoeff) + lift = 1 + (L/2.)*numpy.sin(numpy.pi*n/L) + return lift*cepstra + else: + # values of L <= 0, do nothing + return cepstra + +def delta(feat, N): + """Compute delta features from a feature vector sequence. + + :param feat: A numpy array of size (NUMFRAMES by number of features) containing features. Each row holds 1 feature vector. + :param N: For each frame, calculate delta features based on preceding and following N frames + :returns: A numpy array of size (NUMFRAMES by number of features) containing delta features. Each row holds 1 delta feature vector. + """ + if N < 1: + raise ValueError('N must be an integer >= 1') + NUMFRAMES = len(feat) + denominator = 2 * sum([i**2 for i in range(1, N+1)]) + delta_feat = numpy.empty_like(feat) + padded = numpy.pad(feat, ((N, N), (0, 0)), mode='edge') # padded version of feat + for t in range(NUMFRAMES): + delta_feat[t] = numpy.dot(numpy.arange(-N, N+1), padded[t : t+2*N+1]) / denominator # [t : t+2*N+1] == [(N+t)-N : (N+t)+N+1] + return delta_feat + +##### modify for test ###### + +def framesig_without_dither_dc_preemphasize(sig, frame_len, frame_step, wintype='hamming', stride_trick=True): + """Frame a signal into overlapping frames. + + :param sig: the audio signal to frame. + :param frame_len: length of each frame measured in samples. + :param frame_step: number of samples after the start of the previous frame that the next frame should begin. + :param winfunc: the analysis window to apply to each frame. By default no window is applied. + :param stride_trick: use stride trick to compute the rolling window and window multiplication faster + :returns: an array of frames. Size is NUMFRAMES by frame_len. + """ + slen = len(sig) + frame_len = int(round_half_up(frame_len)) + frame_step = int(round_half_up(frame_step)) + if slen <= frame_len: + numframes = 1 + else: + numframes = 1 + (( slen - frame_len) // frame_step) + + # check kaldi/src/feat/feature-window.h + padsignal = sig[:(numframes-1)*frame_step+frame_len] + + if wintype is 'povey': + win = numpy.empty(frame_len) + for i in range(frame_len): + win[i] = (0.5-0.5*numpy.cos(2*numpy.pi/(frame_len-1)*i))**0.85 + elif wintype == '': + win = numpy.ones(frame_len) + elif wintype == 'hann': + win = numpy.hanning(frame_len) + else: # the hamming window + win = numpy.hamming(frame_len) + + if stride_trick: + frames = rolling_window(padsignal, window=frame_len, step=frame_step) + else: + indices = numpy.tile(numpy.arange(0, frame_len), (numframes, 1)) + numpy.tile( + numpy.arange(0, numframes * frame_step, frame_step), (frame_len, 1)).T + indices = numpy.array(indices, dtype=numpy.int32) + frames = padsignal[indices] + win = numpy.tile(win, (numframes, 1)) + + frames = frames.astype(numpy.float32) + raw_frames = frames + return frames * win, raw_frames + + +def frames(signal,samplerate=16000,winlen=0.025,winstep=0.01, + nfilt=40,nfft=512,lowfreq=0,highfreq=None, wintype='hamming'): + frames_with_win, raw_frames = framesig_without_dither_dc_preemphasize(signal, winlen*samplerate, winstep*samplerate, wintype) + return frames_with_win, raw_frames + + +def complexspec(frames, NFFT): + """Compute the magnitude spectrum of each frame in frames. If frames is an NxD matrix, output will be Nx(NFFT/2+1). + + :param frames: the array of frames. Each row is a frame. + :param NFFT: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. + :returns: If frames is an NxD matrix, output will be Nx(NFFT/2+1). Each row will be the magnitude spectrum of the corresponding frame. + """ + if numpy.shape(frames)[1] > NFFT: + logging.warn( + 'frame length (%d) is greater than FFT size (%d), frame will be truncated. Increase NFFT to avoid.', + numpy.shape(frames)[1], NFFT) + complex_spec = numpy.fft.rfft(frames, NFFT) + return complex_spec + + +def stft_with_window(signal,samplerate=16000,winlen=0.025,winstep=0.01, + nfilt=40,nfft=512,lowfreq=0,highfreq=None,dither=1.0,remove_dc_offset=True, preemph=0.97, + wintype='hamming'): + frames_with_win, raw_frames = framesig_without_dither_dc_preemphasize(signal, winlen*samplerate, winstep*samplerate, wintype) + + spec = magspec(frames_with_win, nfft) # nearly the same until this part + scomplex = complexspec(frames_with_win, nfft) + + rspec = magspec(raw_frames, nfft) + rcomplex = complexspec(raw_frames, nfft) + return spec, scomplex, rspec, rcomplex + + +class TestKaldiFE(unittest.TestCase): + def setUp(self): + self. this_dir = Path(__file__).parent + + self.wavpath = str(self.this_dir / 'english.wav') + self.winlen=0.025 # ms + self.winstep=0.01 # ms + self.nfft=512 + self.lowfreq = 0 + self.highfreq = None + self.wintype='hamm' + self.nfilt=40 + + paddle.set_device('cpu') + + + def test_read(self): + import scipy.io.wavfile as wav + rate, sig = wav.read(self.wavpath) + sr, wav = kaldi.read(self.wavpath) + wav = wav[:, 0] + self.assertTrue(np.all(sig == wav)) + self.assertEqual(rate, sr) + + def test_frames(self): + sr, wav = kaldi.read(self.wavpath) + wav = wav[:, 0] + _, fs = frames(wav, samplerate=sr, + winlen=self.winlen, winstep=self.winstep, + nfilt=self.nfilt, nfft=self.nfft, + lowfreq=self.lowfreq, highfreq=self.highfreq, + wintype=self.wintype) + + t_wav = paddle.to_tensor([wav], dtype='float32') + t_wavlen = paddle.to_tensor([len(wav)]) + t_fs, t_nframe = kaldi.frames(t_wav, t_wavlen, sr, self.winlen, self.winstep, clip=False) + t_fs = t_fs.astype(fs.dtype)[0] + + self.assertEqual(t_nframe.item(), fs.shape[0]) + self.assertTrue(np.allclose(t_fs.numpy(), fs)) + + + def test_stft(self): + sr, wav = kaldi.read(self.wavpath) + wav = wav[:, 0] + + for wintype in ['', 'hamm', 'hann', 'povey']: + self.wintype=wintype + _, stft_c_win, _, _ = stft_with_window(wav, samplerate=sr, + winlen=self.winlen, winstep=self.winstep, + nfilt=self.nfilt, nfft=self.nfft, + lowfreq=self.lowfreq, highfreq=self.highfreq, + wintype=self.wintype) + + t_wav = paddle.to_tensor([wav], dtype='float32') + t_wavlen = paddle.to_tensor([len(wav)]) + + stft_class = kaldi.STFT(self.nfft, sr, self.winlen, self.winstep, window_type=self.wintype, dither=0.0, preemph_coeff=0.0, remove_dc_offset=False, clip=False) + t_stft, t_nframe = stft_class(t_wav, t_wavlen) + t_stft = t_stft.astype(stft_c_win.real.dtype)[0] + t_real = t_stft[:, :, 0] + t_imag = t_stft[:, :, 1] + + self.assertEqual(t_nframe.item(), stft_c_win.real.shape[0]) + + self.assertLess(np.sum(t_real.numpy()) - np.sum(stft_c_win.real), 1) + self.assertTrue(np.allclose(t_real.numpy(), stft_c_win.real, atol=1e-1)) + + self.assertLess(np.sum(t_imag.numpy()) - np.sum(stft_c_win.imag), 1) + self.assertTrue(np.allclose(t_imag.numpy(), stft_c_win.imag, atol=1e-1)) + + + def test_magspec(self): + sr, wav = kaldi.read(self.wavpath) + wav = wav[:, 0] + for wintype in ['', 'hamm', 'hann', 'povey']: + self.wintype=wintype + stft_win, _, _, _ = stft_with_window(wav, samplerate=sr, + winlen=self.winlen, winstep=self.winstep, + nfilt=self.nfilt, nfft=self.nfft, + lowfreq=self.lowfreq, highfreq=self.highfreq, + wintype=self.wintype) + + t_wav = paddle.to_tensor([wav], dtype='float32') + t_wavlen = paddle.to_tensor([len(wav)]) + + stft_class = kaldi.STFT(self.nfft, sr, self.winlen, self.winstep, window_type=self.wintype, dither=0.0, preemph_coeff=0.0, remove_dc_offset=False, clip=False) + t_stft, t_nframe = stft_class(t_wav, t_wavlen) + t_stft = t_stft.astype(stft_win.dtype) + t_spec = kaldi.magspec(t_stft)[0] + + self.assertEqual(t_nframe.item(), stft_win.shape[0]) + + self.assertLess(np.sum(t_spec.numpy()) - np.sum(stft_win), 1) + self.assertTrue(np.allclose(t_spec.numpy(), stft_win, atol=1e-1)) + + + def test_magsepc_winprocess(self): + sr, wav = kaldi.read(self.wavpath) + wav = wav[:, 0] + fs, _= framesig(wav, self.winlen*sr, self.winstep*sr, + dither=0.0, preemph=0.97, remove_dc_offset=True, wintype='povey', stride_trick=True) + spec = magspec(fs, self.nfft) # nearly the same until this part + + t_wav = paddle.to_tensor([wav], dtype='float32') + t_wavlen = paddle.to_tensor([len(wav)]) + stft_class = kaldi.STFT( + self.nfft, sr, self.winlen, self.winstep, + window_type='povey', dither=0.0, preemph_coeff=0.97, remove_dc_offset=True, clip=False) + t_stft, t_nframe = stft_class(t_wav, t_wavlen) + t_stft = t_stft.astype(spec.dtype) + t_spec = kaldi.magspec(t_stft)[0] + + self.assertEqual(t_nframe.item(), fs.shape[0]) + + self.assertLess(np.sum(t_spec.numpy()) - np.sum(spec), 1) + self.assertTrue(np.allclose(t_spec.numpy(), spec, atol=1e-1)) + + + def test_powspec(self): + sr, wav = kaldi.read(self.wavpath) + wav = wav[:, 0] + for wintype in ['', 'hamm', 'hann', 'povey']: + self.wintype=wintype + stft_win, _, _, _ = stft_with_window(wav, samplerate=sr, + winlen=self.winlen, winstep=self.winstep, + nfilt=self.nfilt, nfft=self.nfft, + lowfreq=self.lowfreq, highfreq=self.highfreq, + wintype=self.wintype) + stft_win = np.square(stft_win) + + t_wav = paddle.to_tensor([wav], dtype='float32') + t_wavlen = paddle.to_tensor([len(wav)]) + + stft_class = kaldi.STFT(self.nfft, sr, self.winlen, self.winstep, window_type=self.wintype, dither=0.0, preemph_coeff=0.0, remove_dc_offset=False, clip=False) + t_stft, t_nframe = stft_class(t_wav, t_wavlen) + t_stft = t_stft.astype(stft_win.dtype) + t_spec = kaldi.powspec(t_stft)[0] + + self.assertEqual(t_nframe.item(), stft_win.shape[0]) + + self.assertLess(np.sum(t_spec.numpy() - stft_win), 5e4) + self.assertTrue(np.allclose(t_spec.numpy(), stft_win, atol=1e2)) + + +# from python_speech_features import mfcc +# from python_speech_features import delta +# from python_speech_features import logfbank +# import scipy.io.wavfile as wav + +# (rate,sig) = wav.read("english.wav") + +# # note that generally nfilt=40 is used for speech recognition +# fbank_feat = logfbank(sig,nfilt=23,lowfreq=20,dither=0,wintype='povey') + +# # the computed fbank coefficents of english.wav with dimension [110,23] +# # [ 12.2865 12.6906 13.1765 15.714 16.064 15.7553 16.5746 16.9205 16.6472 16.1302 16.4576 16.7326 16.8864 17.7215 18.88 19.1377 19.1495 18.6683 18.3886 20.3506 20.2772 18.8248 18.1899 +# # 11.9198 13.146 14.7215 15.8642 17.4288 16.394 16.8238 16.1095 16.4297 16.6331 16.3163 16.5093 17.4981 18.3429 19.6555 19.6263 19.8435 19.0534 19.001 20.0287 19.7707 19.5852 19.1112 +# # ... +# # ... +# # the same with that using kaldi commands: compute-fbank-feats --dither=0.0 + + +# mfcc_feat = mfcc(sig,dither=0,useEnergy=True,wintype='povey') + +# # the computed mfcc coefficents of english.wav with dimension [110,13] +# # [ 17.1337 -23.3651 -7.41751 -7.73686 -21.3682 -8.93884 -3.70843 4.68346 -16.0676 12.782 -7.24054 8.25089 10.7292 +# # 17.1692 -23.3028 -5.61872 -4.0075 -23.287 -20.6101 -5.51584 -6.15273 -14.4333 8.13052 -0.0345329 2.06274 -0.564298 +# # ... +# # ... +# # the same with that using kaldi commands: compute-mfcc-feats --dither=0.0 + + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/third_party/text_processing/__ini__.py b/third_party/text_processing/__ini__.py new file mode 100644 index 000000000..8d1c8b69c --- /dev/null +++ b/third_party/text_processing/__ini__.py @@ -0,0 +1 @@ + diff --git a/third_party/text_processing/__init__.py b/third_party/text_processing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/third_party/text_processing/normalization/__init__.py b/third_party/text_processing/normalization/__init__.py new file mode 100644 index 000000000..0b4f0e7f8 --- /dev/null +++ b/third_party/text_processing/normalization/__init__.py @@ -0,0 +1,42 @@ +from .sentence_split import split +from .num import RE_NUMBER, RE_FRAC, RE_PERCENTAGE, RE_RANGE, RE_INTEGER, RE_DEFAULT_NUM +from .num import replace_number, replace_frac, replace_percentage, replace_range, replace_default_num + +from .chronology import RE_TIME, RE_DATE, RE_DATE2 +from .chronology import replace_time, replace_date, replace_date2 + +from .quantifier import RE_TEMPERATURE +from .quantifier import replace_temperature + +from .phone import RE_MOBILE_PHONE, RE_TELEPHONE, replace_phone + +from .char_convert import tranditional_to_simplified +from .constants import F2H_ASCII_LETTERS, F2H_DIGITS, F2H_SPACE + + +def normalize_sentence(sentence): + # basic character conversions + sentence = tranditional_to_simplified(sentence) + sentence = sentence.translate(F2H_ASCII_LETTERS).translate( + F2H_DIGITS).translate(F2H_SPACE) + + # number related NSW verbalization + sentence = RE_DATE.sub(replace_date, sentence) + sentence = RE_DATE2.sub(replace_date2, sentence) + sentence = RE_TIME.sub(replace_time, sentence) + sentence = RE_TEMPERATURE.sub(replace_temperature, sentence) + sentence = RE_RANGE.sub(replace_range, sentence) + sentence = RE_FRAC.sub(replace_frac, sentence) + sentence = RE_PERCENTAGE.sub(replace_percentage, sentence) + sentence = RE_MOBILE_PHONE.sub(replace_phone, sentence) + sentence = RE_TELEPHONE.sub(replace_phone, sentence) + sentence = RE_DEFAULT_NUM.sub(replace_default_num, sentence) + sentence = RE_NUMBER.sub(replace_number, sentence) + + return sentence + + +def normalize(text): + sentences = split(text) + sentences = [normalize_sentence(sent) for sent in sentences] + return sentences diff --git a/third_party/text_processing/normalization/char_convert.py b/third_party/text_processing/normalization/char_convert.py new file mode 100644 index 000000000..bd328f695 --- /dev/null +++ b/third_party/text_processing/normalization/char_convert.py @@ -0,0 +1,15 @@ +"""Traditional and simplified Chinese conversion with +`opencc `_. +""" + + +import opencc + +_t2s_converter = opencc.OpenCC("t2s.json") +_s2t_converter = opencc.OpenCC('s2t.json') + +def tranditional_to_simplified(text: str) -> str: + return _t2s_converter.convert(text) + +def simplified_to_traditional(text: str) -> str: + return _s2t_converter.convert(text) diff --git a/third_party/text_processing/normalization/chronology.py b/third_party/text_processing/normalization/chronology.py new file mode 100644 index 000000000..7143eb58c --- /dev/null +++ b/third_party/text_processing/normalization/chronology.py @@ -0,0 +1,64 @@ +import re +from .num import verbalize_cardinal, verbalize_digit, num2str, DIGITS + + +def _time_num2str(num_string: str) -> str: + """A special case for verbalizing number in time.""" + result = num2str(num_string.lstrip('0')) + if num_string.startswith('0'): + result = DIGITS['0'] + result + return result + +# 时刻表达式 +RE_TIME = re.compile( + r'([0-1]?[0-9]|2[0-3])' + r':([0-5][0-9])' + r'(:([0-5][0-9]))?' +) +def replace_time(match: re.Match) -> str: + hour = match.group(1) + minute = match.group(2) + second = match.group(4) + + result = f"{num2str(hour)}点" + if minute.lstrip('0'): + result += f"{_time_num2str(minute)}分" + if second and second.lstrip('0'): + result += f"{_time_num2str(second)}秒" + return result + + +RE_DATE = re.compile( + r'(\d{4}|\d{2})年' + r'((0?[1-9]|1[0-2])月)?' + r'(((0?[1-9])|((1|2)[0-9])|30|31)([日号]))?' +) +def replace_date(match: re.Match) -> str: + year = match.group(1) + month = match.group(3) + day = match.group(5) + result = "" + if year: + result += f"{verbalize_digit(year)}年" + if month: + result += f"{verbalize_cardinal(month)}月" + if day: + result += f"{verbalize_cardinal(day)}{match.group(9)}" + return result + +# 用 / 或者 - 分隔的 YY/MM/DD 或者 YY-MM-DD 日期 +RE_DATE2 = re.compile( + r'(\d{4})([- /.])(0[1-9]|1[012])\2(0[1-9]|[12][0-9]|3[01])' +) +def replace_date2(match: re.Match) -> str: + year = match.group(1) + month = match.group(3) + day = match.group(4) + result = "" + if year: + result += f"{verbalize_digit(year)}年" + if month: + result += f"{verbalize_cardinal(month)}月" + if day: + result += f"{verbalize_cardinal(day)}日" + return result diff --git a/third_party/text_processing/normalization/constants.py b/third_party/text_processing/normalization/constants.py new file mode 100644 index 000000000..d5c04a761 --- /dev/null +++ b/third_party/text_processing/normalization/constants.py @@ -0,0 +1,58 @@ +import string +import re +from pypinyin.constants import SUPPORT_UCS4 + + +# 全角半角转换 +# 英文字符全角 -> 半角映射表 (num: 52) +F2H_ASCII_LETTERS = { + chr(ord(char) + 65248): char + for char in string.ascii_letters +} + +# 英文字符半角 -> 全角映射表 +H2F_ASCII_LETTERS = {value: key for key, value in F2H_ASCII_LETTERS.items()} + +# 数字字符全角 -> 半角映射表 (num: 10) +F2H_DIGITS = { + chr(ord(char) + 65248): char + for char in string.digits +} +# 数字字符半角 -> 全角映射表 +H2F_DIGITS = {value: key for key, value in F2H_DIGITS.items()} + +# 标点符号全角 -> 半角映射表 (num: 32) +F2H_PUNCTUATIONS = { + chr(ord(char) + 65248): char + for char in string.punctuation +} +# 标点符号半角 -> 全角映射表 +H2F_PUNCTUATIONS = {value: key for key, value in F2H_PUNCTUATIONS.items()} + +# 空格 (num: 1) +F2H_SPACE = {'\u3000': ' '} +H2F_SPACE = {' ': '\u3000'} + +# 非"有拼音的汉字"的字符串,可用于NSW提取 +if SUPPORT_UCS4: + RE_NSW = re.compile( + r'(?:[^' + r'\u3007' # 〇 + r'\u3400-\u4dbf' # CJK扩展A:[3400-4DBF] + r'\u4e00-\u9fff' # CJK基本:[4E00-9FFF] + r'\uf900-\ufaff' # CJK兼容:[F900-FAFF] + r'\U00020000-\U0002A6DF' # CJK扩展B:[20000-2A6DF] + r'\U0002A703-\U0002B73F' # CJK扩展C:[2A700-2B73F] + r'\U0002B740-\U0002B81D' # CJK扩展D:[2B740-2B81D] + r'\U0002F80A-\U0002FA1F' # CJK兼容扩展:[2F800-2FA1F] + r'])+' + ) +else: + RE_NSW = re.compile( # pragma: no cover + r'(?:[^' + r'\u3007' # 〇 + r'\u3400-\u4dbf' # CJK扩展A:[3400-4DBF] + r'\u4e00-\u9fff' # CJK基本:[4E00-9FFF] + r'\uf900-\ufaff' # CJK兼容:[F900-FAFF] + r'])+' + ) diff --git a/third_party/text_processing/normalization/num.py b/third_party/text_processing/normalization/num.py new file mode 100644 index 000000000..60fc1686d --- /dev/null +++ b/third_party/text_processing/normalization/num.py @@ -0,0 +1,155 @@ +""" +Rules to verbalize numbers into Chinese characters. +https://zh.wikipedia.org/wiki/中文数字#現代中文 +""" + +import re +from typing import List +from collections import OrderedDict + +DIGITS = {str(i): tran for i, tran in enumerate('零一二三四五六七八九')} +UNITS = OrderedDict({ + 1: '十', + 2: '百', + 3: '千', + 4: '万', + 8: '亿', +}) + +# 分数表达式 +RE_FRAC = re.compile(r'(-?)(\d+)/(\d+)') +def replace_frac(match: re.Match) -> str: + sign = match.group(1) + nominator = match.group(2) + denominator = match.group(3) + sign: str = "负" if sign else "" + nominator: str = num2str(nominator) + denominator: str = num2str(denominator) + result = f"{sign}{denominator}分之{nominator}" + return result + + +# 百分数表达式 +RE_PERCENTAGE = re.compile(r'(-?)(\d+(\.\d+)?)%') +def replace_percentage(match: re.Match) -> str: + sign = match.group(1) + percent = match.group(2) + sign: str = "负" if sign else "" + percent: str = num2str(percent) + result = f"{sign}百分之{percent}" + return result + +# 整数表达式 +# 带负号或者不带负号的整数 12, -10 +RE_INTEGER = re.compile( + r'(-?)' + r'(\d+)' +) + +# 编号-无符号整形 +# 00078 +RE_DEFAULT_NUM = re.compile(r'\d{4}\d*') +def replace_default_num(match: re.Match): + number = match.group(0) + return verbalize_digit(number) + +# 数字表达式 +# 1. 整数: -10, 10; +# 2. 浮点数: 10.2, -0.3 +# 3. 不带符号和整数部分的纯浮点数: .22, .38 +RE_NUMBER = re.compile( + r'(-?)((\d+)(\.\d+)?)' + r'|(\.(\d+))' +) +def replace_number(match: re.Match) -> str: + sign = match.group(1) + number = match.group(2) + pure_decimal = match.group(5) + if pure_decimal: + result = num2str(pure_decimal) + else: + sign: str = "负" if sign else "" + number: str = num2str(number) + result = f"{sign}{number}" + return result + +# 范围表达式 +# 12-23, 12~23 +RE_RANGE = re.compile( + r'(\d+)[-~](\d+)' +) +def replace_range(match: re.Match) -> str: + first, second = match.group(1), match.group(2) + first: str = num2str(first) + second: str = num2str(second) + result = f"{first}到{second}" + return result + + +def _get_value(value_string: str, use_zero: bool=True) -> List[str]: + stripped = value_string.lstrip('0') + if len(stripped) == 0: + return [] + elif len(stripped) == 1: + if use_zero and len(stripped) < len(value_string): + return [DIGITS['0'], DIGITS[stripped]] + else: + return [DIGITS[stripped]] + else: + largest_unit = next(power for power in reversed(UNITS.keys()) if power < len(stripped)) + first_part = value_string[:-largest_unit] + second_part = value_string[-largest_unit:] + return _get_value(first_part) + [UNITS[largest_unit]] + _get_value(second_part) + +def verbalize_cardinal(value_string: str) -> str: + if not value_string: + return '' + + # 000 -> '零' , 0 -> '零' + value_string = value_string.lstrip('0') + if len(value_string) == 0: + return DIGITS['0'] + + result_symbols = _get_value(value_string) + # verbalized number starting with '一十*' is abbreviated as `十*` + if len(result_symbols) >= 2 and result_symbols[0] == DIGITS['1'] and result_symbols[1] == UNITS[1]: + result_symbols = result_symbols[1:] + return ''.join(result_symbols) + +def verbalize_digit(value_string: str, alt_one=False) -> str: + result_symbols = [DIGITS[digit] for digit in value_string] + result = ''.join(result_symbols) + if alt_one: + result.replace("一", "幺") + return result + +def num2str(value_string: str) -> str: + integer_decimal = value_string.split('.') + if len(integer_decimal) == 1: + integer = integer_decimal[0] + decimal = '' + elif len(integer_decimal) == 2: + integer, decimal = integer_decimal + else: + raise ValueError(f"The value string: '${value_string}' has more than one point in it.") + + result = verbalize_cardinal(integer) + + decimal = decimal.rstrip('0') + if decimal: + # '.22' is verbalized as '点二二' + # '3.20' is verbalized as '三点二 + result += '点' + verbalize_digit(decimal) + return result + + + + + + + + + + + + diff --git a/third_party/text_processing/normalization/phone.py b/third_party/text_processing/normalization/phone.py new file mode 100644 index 000000000..1acc18365 --- /dev/null +++ b/third_party/text_processing/normalization/phone.py @@ -0,0 +1,31 @@ +import re +from .num import verbalize_digit + + +# 规范化固话/手机号码 +# 手机 +# http://www.jihaoba.com/news/show/13680 +# 移动:139、138、137、136、135、134、159、158、157、150、151、152、188、187、182、183、184、178、198 +# 联通:130、131、132、156、155、186、185、176 +# 电信:133、153、189、180、181、177 +RE_MOBILE_PHONE= re.compile( + r"(? str: + if mobile: + sp_parts = phone_string.strip('+').split() + result = ''.join( + [verbalize_digit(part, alt_one=True) for part in sp_parts]) + return result + else: + sil_parts = phone_string.split('-') + result = ''.join( + [verbalize_digit(part, alt_one=True) for part in sil_parts]) + return result + + +def replace_phone(match: re.Match) -> str: + return phone2str(match.group(0)) diff --git a/third_party/text_processing/normalization/quantifier.py b/third_party/text_processing/normalization/quantifier.py new file mode 100644 index 000000000..024eb6e01 --- /dev/null +++ b/third_party/text_processing/normalization/quantifier.py @@ -0,0 +1,18 @@ +import re +from .num import num2str + + +# 温度表达式,温度会影响负号的读法 +# -3°C 零下三度 +RE_TEMPERATURE = re.compile( + r'(-?)(\d+(\.\d+)?)(°C|℃|度|摄氏度)' +) +def replace_temperature(match: re.Match) -> str: + sign = match.group(1) + temperature = match.group(2) + unit = match.group(3) + sign: str = "零下" if sign else "" + temperature: str = num2str(temperature) + unit: str = "摄氏度" if unit == "摄氏度" else "度" + result = f"{sign}{temperature}{unit}" + return result diff --git a/third_party/text_processing/normalization/sentence_split.py b/third_party/text_processing/normalization/sentence_split.py new file mode 100644 index 000000000..5867342ba --- /dev/null +++ b/third_party/text_processing/normalization/sentence_split.py @@ -0,0 +1,23 @@ +import re +from typing import List + + +SENTENCE_SPLITOR = re.compile(r'([。!?][”’]?)') + +def split(text: str) -> List[str]: + """Split long text into sentences with sentence-splitting punctuations. + + Parameters + ---------- + text : str + The input text. + + Returns + ------- + List[str] + Sentences. + """ + text = SENTENCE_SPLITOR.sub(r'\1\n', text) + text = text.strip() + sentences = [sentence.strip() for sentence in re.split(r'\n+', text)] + return sentences diff --git a/tools/Makefile b/tools/Makefile index c129bf5a2..c925054b8 100644 --- a/tools/Makefile +++ b/tools/Makefile @@ -1,7 +1,16 @@ +SHELL:= /bin/bash PYTHON:= python3.7 + +CXX ?= g++ +CC ?= gcc # used for sph2pipe +# CXX = clang++ # Uncomment these lines... +# CC = clang # ...to build with Clang. + +WGET ?= wget + .PHONY: all clean -all: virtualenv kenlm.done sox.done soxbindings.done +all: virtualenv kenlm.done sox.done soxbindings.done mfa.done sclite.done virtualenv: test -d venv || virtualenv -p $(PYTHON) venv @@ -18,8 +27,8 @@ kenlm.done: apt install -y build-essential cmake libboost-system-dev libboost-thread-dev libboost-program-options-dev libboost-test-dev libeigen3-dev zlib1g-dev libbz2-dev liblzma-dev apt-get install -y gcc-5 g++-5 && update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 50 && update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 50 test -d kenlm || wget -O - https://kheafield.com/code/kenlm.tar.gz | tar xz - mkdir -p kenlm/build && cd kenlm/build && cmake .. && make -j4 && make install - cd kenlm && python setup.py install + rm -rf kenlm/build && mkdir -p kenlm/build && cd kenlm/build && cmake .. && make -j4 && make install + source venv/bin/activate; cd kenlm && python setup.py install touch kenlm.done sox.done: @@ -31,5 +40,57 @@ sox.done: soxbindings.done: test -d soxbindings || git clone https://github.com/pseeth/soxbindings.git - source venv/bin/activate; cd soxbindings && python3 setup.py install + source venv/bin/activate; cd soxbindings && python setup.py install touch soxbindings.done + +mfa.done: + test -d montreal-forced-aligner || wget https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner/releases/download/v1.0.1/montreal-forced-aligner_linux.tar.gz + tar xvf montreal-forced-aligner_linux.tar.gz + touch mfa.done + + +#== SCTK =============================================================================== +# SCTK official repo does not have version tags. Here's the mapping: +# # 2.4.9 = 659bc36; 2.4.10 = d914e1b; 2.4.11 = 20159b5. +SCTK_GITHASH = 20159b5 + +SCTK_CXFLAGS = -w -march=native +SCTK_MKENV = CFLAGS="$(CFLAGS) $(SCTK_CXFLAGS)" \ + CXXFLAGS="$(CXXFLAGS) -std=c++11 $(SCTK_CXFLAGS)" \ + + +# Keep the existing target 'sclite' to avoid breaking the users who might have +# scripted it in. +.PHONY: sclite.done sctk_cleaned sctk_made + +sclite.done sctk_made: sctk/.compiled + touch sclite.done + +sctk/.compiled: sctk + rm -f sctk/.compiled + $(SCTK_MKENV) $(MAKE) -C sctk config + $(SCTK_MKENV) $(MAKE) -C sctk all doc + $(MAKE) -C sctk install + touch sctk/.compiled + +# The GitHub archive unpacks into SCTK-{40-character-long-hash}/ +sctk: sctk-$(SCTK_GITHASH).tar.gz + tar zxvf sctk-$(SCTK_GITHASH).tar.gz + rm -rf sctk-$(SCTK_GITHASH) sctk + mv SCTK-$(SCTK_GITHASH)* sctk-$(SCTK_GITHASH) + ln -s sctk-$(SCTK_GITHASH) sctk + touch sctk-$(SCTK_GITHASH).tar.gz + +sctk-$(SCTK_GITHASH).tar.gz: + if [ -d '$(DOWNLOAD_DIR)' ]; then \ + cp -p '$(DOWNLOAD_DIR)/sctk-$(SCTK_GITHASH).tar.gz' .; \ + else \ + $(WGET) -nv -T 10 -t 3 -O sctk-$(SCTK_GITHASH).tar.gz \ + https://github.com/usnistgov/SCTK/archive/$(SCTK_GITHASH).tar.gz; \ + fi + +sctk_cleaned: + -for d in sctk/ sctk-*/; do \ + [ ! -f $$d/.compiled ] || $(MAKE) -C $$d clean; \ + rm -f $$d/.compiled; \ + done diff --git a/tools/extras/README.md b/tools/extras/README.md new file mode 100644 index 000000000..19c06a134 --- /dev/null +++ b/tools/extras/README.md @@ -0,0 +1,11 @@ +1. kaldi + +deps gcc, mkl or openblas + +2. OpenFST/ngram/pynini + +deps gcc + +3. MFA + +deps kaldi diff --git a/tools/extras/install_gcc.sh b/tools/extras/install_gcc.sh new file mode 100755 index 000000000..eb4ea1f05 --- /dev/null +++ b/tools/extras/install_gcc.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -e +set -x + +# gcc +apt update -y +apt install build-essential -y +apt install software-properties-common -y +add-apt-repository ppa:ubuntu-toolchain-r/test +apt install gcc-8 g++-8 -y +update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 80 +update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-8 80 +update-alternatives --config gcc + +# gfortran +apt-get install gfortran-8 diff --git a/tools/extras/install_kaldi.sh b/tools/extras/install_kaldi.sh new file mode 100755 index 000000000..b87232b01 --- /dev/null +++ b/tools/extras/install_kaldi.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +# Installation script for Kaldi +# +set -e + +apt-get install subversion -y + +KALDI_GIT="--depth 1 -b master https://github.com/kaldi-asr/kaldi.git" + +KALDI_DIR="$PWD/kaldi" + +if [ ! -d "$KALDI_DIR" ]; then + git clone $KALDI_GIT $KALDI_DIR +else + echo "$KALDI_DIR already exists!" +fi + +cd "$KALDI_DIR/tools" +git pull + +# Prevent kaldi from switching default python version +mkdir -p "python" +touch "python/.use_default_python" + +./extras/check_dependencies.sh + +make -j4 + +pushd ../src +./configure --shared --use-cuda=no --static-math --mathlib=OPENBLAS --openblas-root=${KALDI_DIR}/../OpenBLAS/install +make clean -j && make depend -j && make -j4 +popd + +echo "Done installing Kaldi." diff --git a/tools/extras/install_kenlm.sh b/tools/extras/install_kenlm.sh new file mode 100755 index 000000000..100225bf9 --- /dev/null +++ b/tools/extras/install_kenlm.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +apt install -y build-essential cmake libboost-system-dev libboost-thread-dev libboost-program-options-dev libboost-test-dev libeigen3-dev zlib1g-dev libbz2-dev liblzma-dev + +apt-get install -y gcc-5 g++-5 && update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 50 && update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 50 + +test -d kenlm || wget -O - https://kheafield.com/code/kenlm.tar.gz | tar xz + +rm -rf kenlm/build && mkdir -p kenlm/build && cd kenlm/build && cmake .. && make -j4 && make install diff --git a/tools/extras/install_liblbfgs.sh b/tools/extras/install_liblbfgs.sh new file mode 100755 index 000000000..8d6ae4ab7 --- /dev/null +++ b/tools/extras/install_liblbfgs.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +VER=1.10 + +WGET=${WGET:-wget} + +if [ ! -f liblbfgs-$VER.tar.gz ]; then + if [ -d "$DOWNLOAD_DIR" ]; then + cp -p "$DOWNLOAD_DIR/liblbfgs-$VER.tar.gz" . || exit 1 + else + $WGET https://github.com/downloads/chokkan/liblbfgs/liblbfgs-$VER.tar.gz || exit 1 + fi +fi + +tar -xzf liblbfgs-$VER.tar.gz +cd liblbfgs-$VER +./configure --prefix=`pwd` +make +# due to the liblbfgs project directory structure, we have to use -i +# but the erros are completely harmless +make -i install +cd .. + +( + [ ! -z "${LIBLBFGS}" ] && \ + echo >&2 "LIBLBFGS variable is aleady defined. Undefining..." && \ + unset LIBLBFGS + + [ -f ./env.sh ] && . ./env.sh + + [ ! -z "${LIBLBFGS}" ] && \ + echo >&2 "libLBFGS config is already in env.sh" && exit + + wd=`pwd` + wd=`readlink -f $wd || pwd` + + echo "export LIBLBFGS=$wd/liblbfgs-1.10" + echo export LD_LIBRARY_PATH='${LD_LIBRARY_PATH:-}':'${LIBLBFGS}'/lib/.libs +) >> env.sh + diff --git a/tools/extras/install_mfa.sh b/tools/extras/install_mfa.sh new file mode 100755 index 000000000..ae126fa62 --- /dev/null +++ b/tools/extras/install_mfa.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# install openblas, kaldi before + +test -d Montreal-Forced-Aligner || git clone https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner.git + +pushd Montreal-Forced-Aligner && python setup.py install && popd + +test -d kaldi || { echo "need install kaldi first"; exit 1;} + +mfa thirdparty kaldi $PWD/kaldi + +mfa thirdparty validate + +echo "install mfa pass." diff --git a/tools/extras/install_miniconda.sh b/tools/extras/install_miniconda.sh new file mode 100755 index 000000000..3d1909af6 --- /dev/null +++ b/tools/extras/install_miniconda.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +WGET=${WGET:-wget} + +# The script automatically choose default settings of miniconda for installation +# Miniconda will be installed in the HOME directory. ($HOME/miniconda3). +# Also don't make miniconda's python as default. + +if [ -d "$DOWNLOAD_DIR" ]; then + cp -p "$DOWNLOAD_DIR/Miniconda3-latest-Linux-x86_64.sh" . || exit 1 +else + $WGET https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh || exit 1 +fi +bash Miniconda3-latest-Linux-x86_64.sh -b + +$HOME/miniconda3/bin/python -m pip install --user tqdm +$HOME/miniconda3/bin/python -m pip install --user scikit-learn +$HOME/miniconda3/bin/python -m pip install --user librosa +$HOME/miniconda3/bin/python -m pip install --user h5py diff --git a/tools/extras/install_mkl.sh b/tools/extras/install_mkl.sh new file mode 100755 index 000000000..8c1899bdf --- /dev/null +++ b/tools/extras/install_mkl.sh @@ -0,0 +1,277 @@ +#!/usr/bin/env bash + +# Intel MKL is now freely available even for commercial use. This script +# attempts to install the MKL package automatically from Intel's repository. +# +# For manual repository setup instructions, see: +# https://software.intel.com/articles/installing-intel-free-libs-and-python-yum-repo +# https://software.intel.com/articles/installing-intel-free-libs-and-python-apt-repo +# +# For other package managers, or non-Linux platforms, see: +# https://software.intel.com/mkl/choose-download + +set -o pipefail + +default_package=intel-mkl-64bit-2020.0-088 + +yum_repo='https://yum.repos.intel.com/mkl/setup/intel-mkl.repo' +apt_repo='https://apt.repos.intel.com/mkl' +intel_key_url='https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB' + +Usage () { + cat >&2 <] + +Checks if MKL is present on the system, and/or attempts to install it. + +If is not provided, ${default_package} will be installed. + +Intel packages are installed under the /opt/intel directory. You should be root +to install MKL into this directory; run this script using the sudo command. + +Options: + -s - Skip check for MKL being already present. + -p -- Force type of package management. Use only + if automatic detection fails, as instructed. + -h - Show this message. + +Environment: + CC The C compiler to use for MKL check. If not set, uses 'cc'. +EOF + exit 2 +} + +Fatal () { echo "$0: $@"; exit 1; } + +Have () { type -t "$1" >/dev/null; } + +# Option values. +skip_cc= +distro= + +while getopts ":hksp:" opt; do + case ${opt} in + h) Usage ;; + s) skip_cc=yes ;; + p) case $OPTARG in + suse|redhat|debian|fedora|arch) distro=$OPTARG ;; + *) Fatal "invalid value -p '${OPTARG}'. " \ + "Allowed: 'suse', 'redhat', 'debian', 'fedora', or 'arch'." + esac ;; + \?) echo >&2 "$0: invalid option -${OPTARG}."; Usage ;; + esac +done +shift $((OPTIND-1)) + +orig_arg_package=${1-''} +package=${1:-$default_package} + +# Check that we are actually on Linux, otherwise give a helpful reference. +[[ $(uname) == Linux ]] || Fatal "\ +This script can be used on Linux only, and your system is $(uname). + +Installer packages for Mac and Windows are available for download from Intel: +https://software.intel.com/mkl/choose-download" + +# Test if MKL is already installed on the system. +if [[ ! $skip_cc ]]; then + : ${CC:=cc} + Have "$CC" || Fatal "\ +C compiler $CC not found. + +You can skip the check for MKL presence by invoking this script with the '-s' +option to this script, but you will need a functional compiler anyway, so we +recommend that you install it first." + + mkl_version=$($CC -E -I /opt/intel/mkl/include - <<< \ + '#include + __INTEL_MKL__.__INTEL_MKL_MINOR__.__INTEL_MKL_UPDATE__' 2>/dev/null | + tail -n 1 ) || mkl_version= + mkl_version=${mkl_version// /} + + [[ $mkl_version ]] && Fatal "\ +MKL version $mkl_version is already installed. + +You can skip the check for MKL presence by invoking this script with the '-s' +option and proceed with automated installation, but we highly discourage +this. This script will register Intel repositories with your system, and it +seems that they have been already registered, or MKL has been installed some +other way. + +You should use your package manager to check which MKL package is already +installed. Note that Intel packages register the latest installed version of +the library as the default. If your installed version is older than +$package, it makes sense to upgrade." +fi + +# Try to determine which package manager the distro uses, unless overridden. +if [[ ! $distro ]]; then + dist_vars=$(cat /etc/os-release 2>/dev/null) + eval "$dist_vars" + for rune in $CPE_NAME $ID $ID_LIKE; do + case "$rune" in + cpe:/o:fedoraproject:fedora:2[01]) distro=redhat; break;; # Use yum. + rhel|centos) distro=redhat; break;; + redhat|suse|fedora|debian|arch) distro=$rune; break;; + esac + done + + # Certain old distributions do not have /etc/os-release. We are unlikely to + # encounter these in the wild, but just in case. + # NOTE: Do not try to guess Fedora specifically here! Fedora 20 and below + # detect as redhat, and this is good, because they use yum by default. + [[ ! $distro && -f /etc/redhat-release ]] && distro=redhat + [[ ! $distro && -f /etc/SuSE-release ]] && distro=suse + [[ ! $distro && -f /etc/debian_release ]] && distro=debian + [[ ! $distro && -f /etc/arch-release ]] && distro=arch + + [[ ! $distro ]] && Fatal "\ +Unable to determine package management style. + +Invoke this script with the option '-p

g2tn(^#??EF-GxzF#V$zh}~+IqhPnSv>Fd6KK9iL4GSo{0J2lm6w-yO>AZQ&FnIw z2FUL#gZ%duNFTF3s=Kh;j>LZckp#`rmDF+czTQe&*D&U-I#xon zTkT!3;_B>N1y2%?d&}A5Q8Lj}$&fo}jQ9O*qq!2F9Qko~Bg@6^T|5yYo=Ew$lv`qy zn5?m7mK$ZvOMZ4*dJ7;K^TUU-uTg|948+7;Lwo7 zvu9|qc&RSGgw(a!Fa2(AMkXzJo^b!>-g{$`laIl}&Nab4q~(xPCfmyQpkL>bEHYq< zsMTn?6^D$M-CGQGg3uAE0!hk$aG+~Au$?Lv<&H~P4PF59kQw3WC4Q`bwr#794T|2i zlCj6zTispOd%5l3(E@rebI|b5%{vFg%o#6E`H66t#;U zg$0!_UzLf{1T=(uO#~U;>eJdo;hK%-`bV09og^Gm?K)0nOvuiHNdhyQQo)c^|<#3d!Lh`L(yet{7*K!_a zX7nR!sBv@$z}!0ur&U!~$E2o;%$rpd;XTKkx13$FgTk!mipTUbQ?vpS;R>c1ZscJv zww3RnG4?pauSTLQStwAd^b`jC6nUG|GIx)Rp(I+gAlvV4} z-ErB1Xe(ZHvrMkrzr+|?;+xgFM+vQc#P0X^_a7b~ztGivZe_KeGd5)#)UB>oz9$?^ zh9UFp8EndD@yiA1iR(q1;n@npj1dgN%B;y+Uv#BCJkGKU&3?&HO71S4zy-coX=!O@ zuU_r#_i-M;)>Bbcl>j}dX2z2h`hhHnJrAT3`^}-+9)N9n26z8bPFy#7oj4!#3n+j0 zQ&d!BHj`Ke?>ZMSsI1NpzRv$A5reg}S^ z>2IzjEA2tG{GMkcV)fzYt8Ybh1klUTGcd$_{K#u{&Uu-$>MFdxz1<1orzh8_jPn-8 z&eZU4!3lwKAEQ@{wHvRdD)G!c6CepKL~zggF@o>oR(D~1)X+n zeLbtLF6rk_1%8Jah3DbEgyRa5X5YiRUF9evMp1N#`XRu6Oh{0B@uFf5S*9MF?3X^1 zX?v6$Z^t}XK0|Tu-&s?eQDu@mC3ZhBrrsX6EKV?Mv!3!2;e} zcM?%WU?;eO+`;jWjzzg~`yID`f8T;Wq=;(xdQ_uCyF38oe$#SoL6}^?Bw{d)c60&7 z4xM6KKW`QzEA zIHU)jaSHwPd`iR$Wdb}(r&)_Bcb2G=$A2hw2>(!nS2!>C$`Ml)m6uK~cCF6%3q%JE z(WsidDDzFFEQ#-@&Av-R_cGcThvNW8&*IJR`xG&<&q1ftDm$VkiKy$i^|J?G^46w* z&=`BnCr;8K1q?l+HEa~=%y5*}74en~w%sMn$j-UgG6p1Roq#V|XF8%D(u}pDrlycf z=74LjG`|Z}#DUnlvevz!WB;NZB0JoIA%Pz7JdE8 z5-GpZoE&;Q&hP7LJb5U|WuY(h@YPu5?i^$w1m?QPgR}+&rY%l!A`_ zu#zU#x(``c78W8nSKzXEk%yS%Va&D6S;DOYA#Y}V5;itA`NBtd>?AC9p{1iKfgr3@YGw(z)ya@G4RgsXM~+- zce@@;29RK?k+GxEO#}pg3!r@4dYjJYpfdEWk%NImOn}6_QxSao(5KEJMzt5Gp(Q9w z3CxGzdR;weyPXF!mB%I zXzTy}{n$J*Dc9b;xQZ*Ho!5xToj?;LQCFMJ%VRXt$@~7@Q^`q}|M8`LO-y`XS4TWe zq7{YM)aRMVJHh{WId47V@l7Bb&%75J%bVYu)k@#pNATB^{*pSZK?9R+8sGQk$Epe; zscY*y?3uEJ29|4V#_nz{(gXj6J94rmZ1<*IBcdeJG%09N8XFsV1q9-N&{0%;Z~vN^ zdhmQbcs5mOH9#+~fP~V{&W@0n_=S!R+3SmJ_a8ea%6RJ&dhV|26JD0(+G{&I`SAA4 z=Br1Fd_i%Va_XXLI9o{(K};6evN6IZl4B^|-(1~f8!im*B=HG<@=0BFtGREvYCsj!7BvV#VU8W;K(b^|UNItG;|GN-Jbho@J4g z-6&y}Z_?G-Sx{U|hlgHf-k)qm@KDLT4Yj^+*~HcsJAW9XY%TuWA`mY?{CY2H%KM-n z5O#dV^EE%er_li2|nq!cx3y$sWfmH~2R0TfKGFE<0Zl9`+2)+wBv zGVil}ve`(_A-f?wUjUllzI_|?e?)jAJ8e%d_xmNI9(vqz@RN%g zy>b&&A|j%Dlch5pVQ;RZ-k#s?3O17X`oy_?4ZE*l0xM)Ug#|~EQ9FLIOVmVSkAho| z-Z@?#o0?A{kyw?G)Ty-$*ZU5-g6coT3iOHutY{;58s_UFYeM_)xXRaMpC=qUIuP(c~Y;QdlE zvyxm)$@zE74I$7P_#9~)8gB6JfM*&GjTq_e&mcA4?4Yu;6Ru^{gD99;syC@@bs`Dh0gpZO>s z4KkJAMBNb(_Q1A=8y7IoqMb7?hLaZ6|>%-__w#4d#(LW ziWX(3>4F2{Q~}rdQU{6Q$WfCXI)Yn(iAw!2DCRDT;UeN&@p|@2{9PwI*V{X_#}hVE zLQO?Tgpw+tl0hZc^YYC!DvFl?7AB_qdTs(EsR+}c#kzq(7pYvVIXlwbJMF1FUUcjs zBXX_r@x<77q(-1dPC3LH*s7k~Iy^d3%BNSOJPKu)31Z+mus~UnNm9)EO!2tt4 z{R1|i87FZ2Z%#S9djCeERf> z-Lk!Q2%<||lvW0>Q78J{HQcZ&=p1In%o`5=t>fWomy8cjeq0@HOidT5g+v zG0s53BSkKAPKubSl(7WkY29jnKPk@c{(k;Xi+5EG$+V zelshXo9T3#R}M{0fo&YeBHhEhK-#pZiaeo@sKP7R`iy6$_+N~PySrf3(^{+HJTj(z zi*{Fo%wg%(I(?2Hyn9i%j7!uQe zFxgXPenHx}>;6>gG#J;tM7NrSL%C(*Z(*UcSGBwug#N6D;?-JA4c6}2T2m9J-@w|F zm}Hdl({a3C%x#BjXqxBXx}a@fh?9GSx}g~_TCar5K8{pScuz>go|H`3{2}jpU8c#G zxdQ=@hhIOwrR?l7o)x>j)6A)=vA#0Aio0-h?j+brMoQYU?uw6&!j+_OdgTkXJnVe> z$B!vW$0$B+9KUFvcT|DeYI19(YFH`dqmyt-RdOMVy4u~(L0)e;G+}c9Mu+WO?s;Ub z`BlG;=At@!$itvC0a4WZUDWt3!bA>!cndq(|GuQ#eQ*dMpu7Hc%0z38>PXAuRV{^C zPRJQ!^nJ=;tLN9ct4ZZJ;A4!JPNAF+iw%_%STc``_yhz1pe{VX8nJ!r*D%`kwM+5v z^;!gzPROMsYFCTXJX&-rhpbY@+Is%NL0k9kQgdJ{5}yUyX`A>(H&%+ybGf$zeqt>Q z36B3vOtPCJSPVdsf}jdPj9biCq@E;k%t?q05prv4VB*$2_!vEqQ-t!D8^+ZOsz_eQ zv-3NEN(E9D6l4p%*j`OT9`-d9{;jPN2A=B-y}i8%kyQM8Q5@-Sjl=yEemmXWtE6yP z@}j6su$16U{+XQ}`2Kn`yPQJ)!w)>!j{s3BpP%4Kf}R*OZJ(c=9RO3mBV`t|DR#Bl z))Wnj4C|-25^pM+Lth_wXx@VQhl#!| z1y&@1S?_}!TD+P`MS1zYpdBk0@8Y5&*m;CRL{7ltI<2O?L6)G%xxBpmf!EC@r;cR$ zPIxXRDX9k>4flOdY2QAf$-a#%iGJtK9dIvo8gZ?0P^2kXme#fp#DW`lvl+TWiw%1p8J`EAQ@PER{b8AH65qj!fjFOe!R2?5?+)16O(K>x~6 z<1#63?kCo5Z@1#85h3Eo!FZs8|Ni?|_Qi|eRYFC&UYU-0x*KnV(+J|v+k<6G7&+;J z$2N1Jmb<>bMtxZ^!t>I%)ZnR}yR89G^P>D%L-suXHJ1VbU^`Lu(7wnw<_Q)4b0sAN zqm`SVA4+22{#M!Vd>Hsc@bJM$$^(MM9>pQY#rqZl1yg&j(L&~x;^zV&;C_nEzH$`tlKYaLrp@H{>OTGEeRZc&G783crI<7W>wdwmxLXkCAco2 zHSudaTeZR){i<@0iC3@khlO$z(pfB()T=JM#Sj>jk}{{L$Xd>K`(bz+$iCetO(&vF z=cA?j7qd5Fu1=pmetZhT6iB<=GreH|`ACc=zu5tPMeweG`M}m<3X%y1Zi`N=Hu492 zPx%(6*tsmNwBIYyMQUanhns6_%y4^57D|*m!lto8KAZRQ0oIA_fxAc?K5CU=S0@R! z@FoM!wc+jl`=ly{*^(`5?}A$C1yAICIU0EUov2xhCvGKWXFq^yiYks_q7b`BBkt8Z z+#%{;r>E`($`yHe=BoLb3eu-s&ubHA!v5yMGy({d$}%!{;IZXjphLQJ999%?0vvLE zHG7kWm?R~?%@=EAjDV>3&`}@)JaPRzIkGNReLgff88L0&WyFPtjv^^9IN#TUWgJ)W z7s_R@he^{n-8Cmk$bVx?TqL|))jIzMoAR$s%VMyaIMrN3AM#JFgrdGi#<(!RMv1uH z3c-KOgc?>=h_*cY;)nFVs}ly(J^KJPE3w)R2D>d{8NjRumC?4RE8SbX zfa&@-E?rb_90AJgkEkh2ca-wA_8_>_uqQPB1FL$`|FKHK=L2t2)~la|MIqKZA7`BR zUzGbKvJXW%3`?<292Kce_`=vM()U|0KF+mtd*jyiEoTD}IW#mRJ!@I2Wu?`unZR)r z|55peF0}fnYH5Yy1@PJYQQ#p{;u`0WF3q;#BJJ+&eIP38)3&FBNw5?c7&zs7vDI*O zJb4LYfaVbc=ZL*CR$><~CTvRgV5si!@81ES(}64g@lHb=LhN(J_Y#q zZ>5E@`UzdD6FxH6b3oNWGP86#d&AjOyV0+bKvvhmbuZ}SQp$G;z*9w)b#|`!DyaWl zBP=-5A%%V3$zviejXWV=i!U-bru2VURwZL&S_sLB{VS-`A;I+C>o2?_4}V*al;)Gq zyclK%)&3#te+crG_NNgTQC+}j--b{Nq)uFFDy8R6!A(flO0c1N$Hi(AUVy-id!U|u zUGQBB{@tCM>m33%W#aIi213DX= z6WoV_qAA%ZLm(f1ehM5Xn5Kbcd7+`v0Rl$h*ROd{8_|y_J;k#|69JBwnS~|1yGM#M zP@VNj_N%WAgO-bv@D{SEXNfV|(5S0!em(O~MTaK_y2OZ=959SfVX>yCdDDiIQ=R^I z&4*w|g9#Y1a4CtM-BuM@6$WEARV=vHbY#uevc9Z^%vO=8dlF4+*K~uA76x_g>;eLa zNAUbMU+WmnGs+Ko_U^92M6M;5j0;L!u~Qm6LjMWdSrjZ~*lrl$ZGrr9_wHRVpZ^BC z4ZOPla*-APa;mCaB?v^k#b)W}v#zm%ibCcWxYCOx z0X0Dl1hp3NC&y8#fo?5j|22)ftGc(G{_Ab$;gN*6s!$?FOfXOMvVQs(G~|nA;2Omu zg&jq!G%W797XbSX8&ys1(Zf5o96?RdEK4Ko(L>Cq<0vI58A&;x0u`_6i^P*u!GK3Zbs8ary5){_d ziAoPLFexnxe+oN2HU&93WZ-O|kf4UR^li2u;YsX}d-VrBs})DlinRE!OV^5|FoBZ;lbo84j>n73s|Ue&0pMkW z7uY>8kY7}UsT2P?A+8$vsl%LcxyB?SCV-a&;zUn>e=fMNtx4h~-+_r)oWr`>r^xst z!4V3=uMS4xMYvmSB`cJA0qUi&v*0Jn%DAkWC3A?8x8n^SE^8MMy%!9UK(b?N?5C{GCBHb?i8>R@V1sm*n(29)TF5yFN(zNK_;yNe)b zkg>4@uC{cv4Ob74T7bJP(W~JYvCp)K%T{zBJXW-dQWg2J%uWv$F9ew7PZ=*Wmipmb zV-e1SZMZ!qQ<3ijQygf~U?;oCB_GZVxzUgd5M?5N<*765l^~45q5^~n*3|^V56Pw? zY5+eMqRZ4BC?rXtrusKKV5WK-5D5pwc~7$Y2P$ z!@Q#)qSCMZSJB7J#+Cytmfhqx0lPJ}z`GDw!dru*N?09{)s-~;kE6>@8($aV`(+5`zu5D~Q5E4pCPA0z|yE!-1 z_dQ#FQr33ypK+1m*QDSqQ&qi(NUM;U<}egt2UB3&N~HN=QbPWVa5gW`eoc`XiZhjs z&K~kl`OKIvU%mvtKx|x`6vAGGNy5jWTv|k-{Hbsqq&wJ;kHo~Ndv{)9u3@}6%G(+q zE&lgwx$$#av_X@%5MT)T{YLmD|6@hgGVox4ap9If->(v+PC?)cAPeg%GNxacT!BBf zl@w00=kx%|8n8jC5(dL7t1YSx>RJxg$^-aO^{FuUNlH+KNe3q2)<*i^2<2xqVS!Xx_D1!&hINX5f?$){g% z%5OL7Smetu>Qqa5{s}s95V`p1NY%jLSCK|B@u+{Nf)(FY)T>v^l3OnV&>=QrWJ~fEg@7>IW?$3A+hoVTAmtz_ME%oi^v(rb z{h^YPJj1(gBE!j{e5K1O))stWdv)L~cK^TJ72ESYO8F7r_W5~Zh|U1M;Seqyc`{X* zD!ekT{QG0Rr3DG0N%}#J<$gSFaTe-#8j3m_YH4=*|4*J~uX!mtrUoG?l+FFDwoXh) zKu9`*u4{M>O=pa7sGU51-&MvPJf=kcE+3D@hiX$fj=&Q;Cu%QWDjFCVsQVchF-J><0yn6!ZK7Jfb6S^8SPjU14{4nlmx;ct0;rw$_3$pn-b zut5_1xI3@W4mosp)9@x4X4eKmfe$TD+~n^gtR=}9pDnK|wr)TceZVrXd4w>+LpMim zZZ6V_0?aKV1arFVmbjj3PNauXjkc5;- zLQ$}#9MJ9J?8?TZO(+{1G-R0rgbA?0h}aY!5dlkf-KJxa##d;4+QXDl=5?}No2K$- zJHhh>ra+a3vb}ZnAXKVjV**)*)?%t269zs16<<>ev=!g7_2uN|DsC_kNrHVs$;<08 zG?AE@nL+YLw5{;>Iu^=Q^rLNM{`4MJiIg#2+bNYE@cM0C{d;=)ecy62o(1XF;^**O z1Qvv0JMh7pRbRZoFJH~Th46&%v5a;F1Rz72&ht(+2-%svxthHV8I8pA!|tlOU%b&? zG;Hk!NcZ@N*o~MZHHh|FQm_39Ph(JQ7Z-?tp&5R)Wd#p$ zV{AkkU;xkrfxSv~>LiQHQ?n2jgR14AT2|*LXa&$L7>O59`Ae8}A&@7Q-k3iHd{1$A zSPwr$B{SwEcPhU!el#Y*5!&902#+Aw%OjpbQDC~NfVCZX1Vkfe7WVAp?b^Wo{r!iJ zyb0J0@a0=D;@Mob_(}jZPMf^sOZ~80(BQZuLPJ=d5FgLKDiIo&^n~UKBx}(Af!GYM zsHhOqD#@#mRbAXa{1?3NN`)DpIb2dtkK)+}O5@QY4XBK|Mty_3Dy&%Fxg1SX2}~pRPzxPSzC0FqhlWc55NjhZmFVBGgkc&5ppX;m zjUu+?;GjPg$=|;xUtW4o+o=>&&l5>iv*gMw2XnG^sV&Gv>_8#E}k-a)W+uC=wwm(Aydj)BfLwlm{|C2x!kX1S9$d zFR^Ij9`z>yF)>7`1go3%<9#k5?^UifDguw`X1ryW64PUoJ72z0wzye?lU>rnpw)5wC)M{j`kJEl(1mbuu`0&g3 zLL(rgPGg=Vh?$Bb9FiuAG$f3Q!9L!=y#OC9! z(q@A*2(1x-VBopRtVfF&^e})sn+u_^pM#f-XkdZfl?D;^C3botDDpgdw7Qi>GQb?(HsE*df`FSG_5XNaru{Yh0(D5`JX93iQKHABV2DZe zJzqy?;J_zKoHQZu62fT#XaL#h>ih^Xo|$0V4@&P!xN{97Y7!j6&;b`V1INtV+M)4?_|23P8n|lAA=6HI?KJB99F1aCwqh& z6+00UJ6j@vG1K?=z!^(rq^ig~Ks;Kd1=vFbjsJ3XJt*~?ArJKYK=cO`Na8pUf5*2u z1JFC1x`%9RU^tQ%08D628Yb8toKspFmEzEpgy7E+ljRZfs3UtW%%KX=2S1L;#lK41 zsoEH*@OO(1ufjV!W%dsan5J(aPYbW6dvQSL5e~G+Bj#t%Uakm$n=+AzRTD|#)?ZK< zfJ-Iy)Cabjo?r`w2^VDDSJ%))xeMdYlVBv-0eB`~$=v!oGxMu3KfM>xC~u<71oanu@h7PiTrkarSH6g2Gm-lu=s7#ei0nTONjbV0WI+VCA1dD z{$HULQBbDu?Lp)6KNvj)ZOC80Z?FJI@}ka`9*~^C(l|qlo@7$8QCFpF9bAL7+j3>v`(t#t&(X07X{hbfx1l_yE8F z3M@iFNl8v^t^2Q1)J8%6BO5#Y^z`O=Nt_jITwE6rA@-pz--ipd4iIX3Ug?D_y*ngR zSc)_FLI+}e{D0}2BNf*fR1*CbDO}N~N=iy2#|#qQm4}_P#6mz`tbz+TdAE^W@0;VK zT}_8&1U{QNaYX67_i>*|xly^cBop))%Ru0%V1pthywwODM41hMh;Dz1 zsSW;jP)n9Z^IrfslagvLNnd2%n*pL$s^ud#>j~GmN;C zs%tFRZJcBb4KubW{J@ow(D!N0_&MG^dbu`B-WMPO!|y0~j&!ckjzEq;9Cy7M9Rvjx zU?MmH5HJxYKZK(+(uS^Q?^oS$NEatMju~IlrFgbrw16wp?Sc$$wt4tJBYM`n#8uMhPCV6*ddcO zK-SfV2{fsG*Jr>A0GxFDVNyx(!$0??FrG#tK&U*iCtsJiLDbn)ZI_&3Wl14E9M1D%t{*^lP?puz)v0S~J){3)D+X!9>WgY#cyFaPn^XA| zL^5R{eF27#{Q1Mt!>08gN7W^`yiFlW0;dd^5TS5En~Y98f*#S*cS4=Lxjr#%tfK|j zcL!qd6JmA|zwKW{f$_J|O91F(Xex08i%dCG#{hA0TIixg+5lE`A*#_|q9;>>2L&X2 z|DV=Jum=aC;l_Ua_ycUy0}whfTM+3Aq;g>H>BLW4aii&hgF_xApIGU@twf8fGo>gR+XTU9Rdh~Z4fz~ z3xxC-yxk06ZaV9C*hk>O`Oy0nK&k?3j|c;`;C%xYb`gR=7Ery2q^IxP!$2wG9t|ua zf(mz>^*Mn-H0XFmd7v-C7%>TGn*m$H+=s{8a~s8_D+s$&)lDRZIgRSANFGlu89d5$ zKfu1V(9R6{^&BF$>?R@_5Fn8u>dEVq<_xX?CCA@*27Dq=&eywbEwsG74Q8T;uweT) z6ZHoL1>3ARbF;Tlq)|Rf|Dd@|j7RcBU1hM1n=pJL=ntusV&dm8xY`$)Kl9H^LINuZ z?pSLV$A?7khY~ICb@;2QA`9M=h_5w{HZG3jVxxQ1eBC-f8Y4|;PMUfl6#DsKrSX1q zC|1ISZ?o53Z0r%YQ0F&G&K?QUrJtr*qPRO>U4`J@|KPOP`Oy2oZxjj*lktjIKnBVH zP)~TtD@)$3;IScP6&vIf?+~5Egv5dc;F!72j=E7yn!w<+^MOlGT1%TrQoUU zX|1vv1_|?~_c)rGnfZ2s+04w$8b_zn^XD6f$ZDn;m;F-ZJ7)akV?4b6UDrQl zP((pX1_sOx1C9p^>@&BjrOtGK33&O{)azSjg*KZEEOq{u{d5DHzj6LGFiZ;m+x>9u z9wimkV|8-(Pe7eLpryS90--iA-PxZ%8}@Wf_Hy~0y9PJQKQjOM#MYJ#(k^XWkCB5z zsiQC023$7LS5{U`z*}$%=W%ashz{;XMzYiS36nujt`wuGKO;pmzkmARZDr-7cs88? z1d|}z^tHB@G$bTsYV+E6#aX^E$-3ahpFi6N8Y28X=!i&Jo`tXs11zM_b4eN0XiS}&x=ca88M$C)XT#*f7f0lW_;kIg8Z!ty=*O@1H?TlB4Kml=^#pcviV%b7;lBKk% zDIE)E*;xnzMGI*ljL<`D3ZSpCse})Mtyk`%IM-1c`Wz8K-id%4ya(Xs=*TwQn4&6+ z0cvkxM)AU)I=Z=aZco?OdtWv{_&^-0%}d@_y@a-t-xTM)J4qpg`ukiQX901To}T`< z{tf59fB(+Cw;>R#x%(V#X#5=?H{M_BiS98gC@N}$GDEmgQ%j44CrM#m8}cFu2logF zZo`djNOn>FVV;>t(FIf_BqZ?2Mhic}{? zkSTyjK?d10F)=Z-u8s^m!?kaAsK-LS6qn3D*~Hx2otN@HoeyPZW`^lrnD7cOwg#Xg zLY<$VmsL;*0v9dJNt=VI5bzJB4B&LkXn=aArluwpapwXX{4GO6L&yq7higMf6TS8S zCaST}a47xXzrPFaY&2LQI9SJLpSOTn2nr1~h29dd`Lx69x<^5Q<>lqY20l#*35j>_ z-*>Z4RBk^%H{n|3~d7Xy@gXE+h7zsdf_mBO!hPi563SFp zON@a4hb%ZWIEeJ@*|Wr?q}PJ0>-@ILV*-?xU-p-rg+5S=hQE7QrWPxX}96=!=4_H_>>c;xac(7rh6b1$cIwmHvDK;>v@7}#@0dg0qs#*k5 ziUU$0{O)xaUsEH%sE~*R7r%g435JN!2?~nTY-0_3ofNgcSHk-G`scRSOC736 zvg+APw~QzO`?9?$Rm;Oe=()VSF~}cYUKb=B58Fut z)mgS&L!H40KU z2QUPFpKa2kqa!W>fv$4XE-r@|5tuWH29Laj#|Cm!Yexs_-rnBxI+AR%wR>EuHum=R zwXW+g{FDRnkT&Z6l!4X(i4(jpKXT;ajI6ECf1ki~XEagKMjXUyc!Xw_iR&(NcRHRQ zTEUBN>+in{ErHg> z7LBTnWR{BEbs$sD<01na=D42Sxz^i=r-X5{aELW^8I|PaH|rqM;$UZtu_!|z4}SC! zu7Zt;i4lCgL1hQ!@GU{}1a$I>h=`QIv;bedi_XQp%;I7Me*jxZf4=dT z?y{)op=_3hfyL{)8Frt#NTtlp83O_XjcsfqAceF;IXhZ&t`D{oFr;i(C0rbAB*I}K z;O6EQ^t%>GO--#`+#1u+)MUA&(bv(D+gz0`h28KGE)sf{RptVoOaGfU5+FrB0r&h# zlRAs^)2Ap9DBytM!mt+@INvn9Fc{Q;X~1if7#kaq;df021216+eW(ia570eEoJ02` zs(J|`I^0ipX~$SNq3sXmIKk(Z0QBQ~I*$fG1_$1mb-la}99OkG1qnFAHFs1v66jz( zCtt{9_#Bf#o^m?=r=yT2tT8VNy&^E0lm-|8tW#=n-$>{!q!kp@_7ITbMTVpOR1VfhkHxw-u^N9n1W5=D*IaN(F$yZGF~q&UO+Js2jNqai?Pgz5sNsovUy5GU z`oI_7g@vgy-*#E-yqo*+Ep2izkAD!e9IU3Kq$HQ!1TW-m6PUTPdvqjiY;2sba}Nn| zwJh@O+vPAt>}QGqE(p-*=;%*iSPG+3YQe%|nvErh369V$h_Iz8T*rovB0Lapa ziNlaXJ7Ib7h=@?BsHom`m_Tji$c{9r7hV_#7f=8_N)G!cRF*yqWxPegH`Xj`=hA zAifuqvu>WhV2{(r$nHW*flF7Oat010CwfN4BI}|hqDO!RR#sO{*MH|4Ma}~NRKJPC zW5~v)p=EhM9C0gCD(rneJcW}D!z3K%|I^#G$3vO6;n%Ec6q;mPQJHtL6A3%jCg+TC zSZUXxwb~V&-#6(c>zc0@Q++Dd{0NF+8*bgAx!jrPjezcctDpzl zZDUH8>_1w1kfJmbzYy4O+=i1^%z%@|9UOiA`o*6f=UfbmkNFFNek(c`@cs8j*=GKS z5ZBb<5GIzDh1A)laRu{hEgg?AJw!B3Ux^v#qQ%?2ZZEpT`S@+!*{TfqZH=Z3{U#(o ziE0-LHY_9H&liu0S`rgeX%SBJ8ylmiEb!f%65 zm-UWm+v0m!`*-ZVB=?yT)~7@dP3~|n+8{?GRJmsJyIkILaxi6H@$x;Hs2B)Vhc%b! z)C0jOhlV;UH`SqG1bpsN9&a9^6uft0L4iJ?IV}ED*OF-t0XT6CEvWX-QqP zPogpeXvHsLVIYbWZLKY}B_nh3p}A{9b+xp#s7pc1&Q?^gbam6p%gc9EJ{$jwIcJW8 zk58_A40pK)Pn|V8$u-ZMzG0scujcWyM8YC9&e;DzPdWn5JbaNvZ3NQnu$w=OylKrc z#-2=j^vDOY*8t$y=9ZR$8p#TJaAcrD38PM`ar^rE${k0SWw`=Kw!P_|(b0N$a&nSw zY<%V$5t1zOd-t?}=6x=0&EzX%`ET`R;C!~AgpyuxKsh{o(Bp*DPJLutyDYFnv2cZ7 zY;-i+!eZgSAmC8Ec##fLO&1efOLWP(-mn34H8vH5-TV>H7TGJDu=9{Oblr&k1iD~8 zTxM9An@d6A3^iQ*6?%IG21c%}5y}{FtLxNO4eFcSpNIUe8YaSduZ8Q|50WkDs~m|f zK<%CPPg`2la6_wVYRDEGLyR}k+s^#nVc|M{if46{fXvJW#@&zQNRb%RN-gOeA{>T? zhip$*-ClpkRor6@7%G4?himswL13f+zNS!5w`UC(cm2cLpd@5@En<}f=x3pNz5XJF zap|nyi6?gij|ocwNLx}?wv^4L)^&0vIE=QNV>^*~0k2L{GeZ}7Y6BWD5d4j&mo4^) zuL;Y2`0yc8N^)?usbH$VcFWjW2z|$k&dYOw5v3TV4OF9^Dv+BvMDMzoy>WSxyP+6g za7n->Vtx^SR~YhI&F$?CsAdg+*7`BwO~mL&2^;x!++J~Ha;ucZ-{@qT0~+(`I>>HJ zH(&ehSqw6!s4Pr6e0VWLc1FoYK z!^0Y2Hc@X#N%3hyk?4Max|x~Tj?FKjiA*H#P*bx2EwpnjBDnBfyj?GRzJ8)bKPjdp z5?h0uHkC_RnbF7GxDIF+0~{Uj!w(($US4x#GT9`Uj>PpjrZ?Q#6MZf7*IHU~fT5v3 z^ypDnqXnyeT`J_im1Jjo?^n#F;E7>i!ck-$mz%YF>&ui^^>giN!xx18T4fQxh3PzRFR%N<{n;&n6`+(c|L4=E^uSs4 zV@cR>Y;`Kct?A_dh63CCe2a+E&TWWMS{E%^gfRk3^49L;SnYXxDJ}TSO^^~kE-uW8 zZ@Gtg7&Kag@U!1g@bVfk`kzquA8*(f%DMyI|vr1--JwS+ME#NO|!BV@3pAG3 zwYnX_Er*MCp{HI@iffT^ZcOThnq2H+;F-r$^N1q* zd_D9klD216moM*!?WNe*?kkU~iyG7Sxft*fr(S+n4MmyfDQ)COX7{4w7;a4hFB$qP zG*eZKD$!JHG#-gc;8vvgmhBi9gSx4#Gy}3pXlM`+Nf8b*aNFtk3`q_LUF;FNxEN@) z=kiY6kASj3`+_HhDBpRL5g*ZlnUKG`rJ>Gkl5kHCBy9=NeQ@XVUup1svd&JubRLNY5j|2J58qbnleV!SQ}71FvtpQTy-cQpVv%U{ zRVYI40b-8nx1N$EBxpcr1RW;vXUM*x%53`h!}y{zXAT&7m6E8&3ImHWG3z6UTWv@7 zug6R{4WLL8-zSOCuZ9O_t?cb3ERSh;TT9D6v;st0di3FUVJZ}~v15?QN=XNX>gvfi zzxyi{3aZe=!7$4e{Ej?GHR8t~Ntp6>sFFM-gsdTVjwhkjUu5L-f@({&ZnzZ|H?BuX zj~IE4qBxU=p3Xn+ub}-ACM#AL8b3&dTZZ}tvDn9m%L!D(!2r-yx%40l!p_x%>@N&G z>>MjG!6*9pX%@LNkr5iX(-R}odUd+!rMCFBN~6v<6<2RaFXCh4Yn)zz`lLE`QNUSz zHcNQ7*b(O{$;Chp?uQqGIai^1u22#t$H!AUrT}%wkiGBBbMx~L(hkW&&@5He)z5Cf z!?CooUTHA6!88YL^iSTO2yV~a2Z~4xlD!F!rEF}|ss9?>)V(9Yja84n?N2F~bsSL{ z1ss5S26AR3d^|wt92eWy~;ju=6RHCO<$YAZ#GUKk>aLU zC)QSDmGQ|j(a3o(2!SAym0LI@tJ@2Og>oHiQ()eGSRNH#tw7E#eM8 - -| # of GPU | Acceleration Rate | -| -------- | --------------: | -| 1 | 1.00 X | -| 2 | 1.98 X | -| 4 | 3.73 X | -| 8 | 6.95 X | - -`utils/profile.sh` provides such a demo profiling tool, you can change it as need. diff --git a/doc/src/faq.md b/doc/src/faq.md deleted file mode 100644 index e29428176..000000000 --- a/doc/src/faq.md +++ /dev/null @@ -1,37 +0,0 @@ -# FAQ - -1. 音频变速快慢到达什么晨读会影响识别率? - - 变速会提升识别效果,一般用0.9, 1.0, 1.1 的变速。 - -2. 音量大小到什么程度会影响识别率? - - 一般训练会固定音量到一个范围内,波动过大会影响训练,估计在10dB ~ 20dB吧。 - -3. 语音模型训练数据的最小时长要求时多少? - - Aishell-1大约178h的数据,数据越多越好。 - -4. 那些噪声或背景生会影响识别率? - - 主要是人生干扰和低信噪比会影响识别率。 - -5. 单条语音数据的长度限制是多少? - - 一般训练的语音长度会限制在1s~6s之间,和训练配置有关。 - -6. 背景声在识别前是否需要分离出来,或做降噪处理? - - 需要分离的,需要结合具体场景考虑。 - -7. 模型是否带有VAD人生激活识别能力? - - VAD是单独的模型或模块,模型不包含此能力。 - -8. 是否支持长语音识别? - - 一般过VAD后识别。 - -9. Mandarin LM Large语言模型需要的硬件配置时怎样的? - - 内存能放得下LM即可。 diff --git a/doc/src/reference.md b/doc/src/reference.md deleted file mode 100644 index 69ff6ab88..000000000 --- a/doc/src/reference.md +++ /dev/null @@ -1,3 +0,0 @@ -# Reference - -* [wenet](https://github.com/mobvoi/wenet) diff --git a/doc/src/released_model.md b/doc/src/released_model.md deleted file mode 100644 index 0919bba58..000000000 --- a/doc/src/released_model.md +++ /dev/null @@ -1,9 +0,0 @@ -# Released Models - -## Language Model Released - -Language Model | Training Data | Token-based | Size | Descriptions -:-------------:| :------------:| :-----: | -----: | :----------------- -[English LM](https://deepspeech.bj.bcebos.com/en_lm/common_crawl_00.prune01111.trie.klm) | [CommonCrawl(en.00)](http://web-language-models.s3-website-us-east-1.amazonaws.com/ngrams/en/deduped/en.00.deduped.xz) | Word-based | 8.3 GB | Pruned with 0 1 1 1 1;
About 1.85 billion n-grams;
'trie' binary with '-a 22 -q 8 -b 8' -[Mandarin LM Small](https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm) | Baidu Internal Corpus | Char-based | 2.8 GB | Pruned with 0 1 2 4 4;
About 0.13 billion n-grams;
'probing' binary with default settings -[Mandarin LM Large](https://deepspeech.bj.bcebos.com/zh_lm/zhidao_giga.klm) | Baidu Internal Corpus | Char-based | 70.4 GB | No Pruning;
About 3.7 billion n-grams;
'probing' binary with default settings diff --git a/doc/src/server.md b/doc/src/server.md deleted file mode 100644 index 4918d5ebe..000000000 --- a/doc/src/server.md +++ /dev/null @@ -1,34 +0,0 @@ - -# Trying Live Demo with Your Own Voice - -Until now, an ASR model is trained and tested qualitatively (`infer`) and quantitatively (`test`) with existing audio files. But it is not yet tested with your own speech. We build up a real-time demo ASR engine with the trained model, enabling you to test and play around with the demo, with your own voice. - -First, change your directory to `examples/aishell` and `source path.sh`. - -To start the demo's server, please run this in one console: - -```bash -CUDA_VISIBLE_DEVICES=0 bash local/server.sh -``` - -For the machine (might not be the same machine) to run the demo's client, please do the following installation before moving on. - -For example, on MAC OS X: - -```bash -brew install portaudio -pip install pyaudio -pip install keyboard -``` - -Then to start the client, please run this in another console: - -```bash -CUDA_VISIBLE_DEVICES=0 bash local/client.sh -``` - -Now, in the client console, press the `whitespace` key, hold, and start speaking. Until finishing your utterance, release the key to let the speech-to-text results shown in the console. To quit the client, just press `ESC` key. - -Notice that `deepspeech/exps/deepspeech2/deploy/client.py` must be run on a machine with a microphone device, while `deepspeech/exps/deepspeech2/deploy/server.py` could be run on one without any audio recording hardware, e.g. any remote server machine. Just be careful to set the `host_ip` and `host_port` argument with the actual accessible IP address and port, if the server and client are running with two separate machines. Nothing should be done if they are running on one single machine. - -Please also refer to `examples/aishell/local/server.sh`, which will first download a pre-trained Chinese model (trained with AISHELL1) and then start the demo server with the model. With running `examples/aishell/local/client.sh`, you can speak Chinese to test it. If you would like to try some other models, just update `--checkpoint_path` argument in the script.   diff --git a/docs/images/ds2offlineModel.png b/docs/images/ds2offlineModel.png new file mode 100644 index 0000000000000000000000000000000000000000..0d8722ab00127074b04c03a6b27e4352ff15eb41 GIT binary patch literal 95607 zcmeGEWmuG38$S#K(j`a{U? zxc9#I_W$)g-VaZFm|-1ju4~2F=ed515EUgkJZuVVBqSs}`IjfR7#~I?%Jt zj;)A}X+aXO4vQG9)emLnBs))PMdqPf!;LnI*$Ln@IB2Muq~0 zDwaHLMn@un%ue?LW+WORp$aMq3wC6re>hr&@f#JD4sj`T61PWelPXCx+TZ(jre>=I z*VlX+8usGd=h9s6A^iw?LTvF38_5j2Qikxxu)6*iDuu&C0wjC_WYYYKNfSB-QPGFA zJXJRabMxp!D_$^Xp7B<{TO-+!c9JS&{|AIhYr#`VnC`R}@7amXk^Kv$cP@IX-p9^F zlviT15WLxZoTqfuB(Ice!sq*w>FIaMHz1^aGg5`m*ho{T6|l;wf|rk+k-m?k;2ZhB z#s9u*M;#XUmhwAkQj!AGx!`*cfgc?bG0*qomDUc$YrQKo9dA9QR@?4X`a}2OjRCbVuFBT=kD`ac_WI)rc%yUlekAh>No0rv8XbGTzWexTx}1rlCK=u%k}~l1tK9jr%%@ zyLxPv-}-o4`ed5ld}rg-;dt|+zAp{B$XOLu#b6GpitZI^64VEOv@ZU(PmYdE`)J6Y zvs~5?jAZ8IQppJ2H+-s3x0H8SX@$EYzdVwNQX7ZXKMo&9_wQK;Up#GY;XM(3UieFg zQi~9&$+_c~&3LCJ^4oDD`39$Pi+3~lp>#+o_I4r65ctPb?{?Z&^g~JV{CoE4_Z9u0 zs3kGHMN54d7eI?E6W~$)upIfm8495`zGlElIudb!y)hFr#%p7V{CjfWxQDQn0$dgz z)T2B2`@W@l*~aC8{GtuQg)c>bpDC&Fl4>LXm(Hb|G*fyvnv)n;R`NjQ!DnnNTC8ep zI1-7}4;3K|yfJBs=S&$cLnQfRjW~GE8HOk~c*EcIl=Bp$QMMf!3-|d4E--r#e!=Dp zbXoNFz|lva_!hKC`sxc)xduBUN`WmOkAdo z3Me0}LAJ&CD%30F;(cdkffWV(0U{J%QZ8$1!+e8(qkqHMm8ucV2ib>6ESOr>uU&;J z`JqcV&c~3D_H2`QQ*5rBMBP5DK3PQxi-$+lVLu8@A}W+7k?ceT@R}YpkvCyXhayv1b2)WgESM7-aA|K|HM)fg*9SB4jau;A?w^A5%(*`<3+dJjUf z<(wJWX%FPLUz%rUW%p)FjWAU{m8vOeIiuTv-9XwP-k^1e>M8B{@yg^}|6J{S`EX*@=qUklIB_$HHnuJC zQ(~p3xXj@!Y9#i=?Zmt!f~*3}hN|AV@6~zK4qq=Zud*VvgvH5Oj^Ne-nBnWnY?{N1;`sQle90gs2h*&1#7m_%wMHT}b0qe@yB`iCCq8@N?KS ze<>23aN~eDEY6s^>{)+Lwn!#SX5~M#1=~hiTic%7V%30cQzpC$N(vDLDY*ujJ})3L zL50hu?Gr4+LPbYka-W+H@mQHx<5a5}fM$^lLL6!wqIT<3D-5d*Qw^);qwMP1X4@<>oRuGyKKzYgh9Gis_;qU3q@R!}7;^JZx3$ zlZovi|=dC*Qr)wRrgd`*Lj-STJ+YIl((46 zRUB36*-luT!RKaDZCfX9BJYjSb-c-v&TH3_W0H$8ja?3o4N(82o&!TNNTODnS8Ux0 zcRF=8-1h!?9vhaW5Z}w4AfeB005hP}PwN*<9Zc~vzKMpWQuda>L0d{L%Fgra94A93 z#-5`dqNk&0D<0!s;^v7wBM*bf%bJT@WDJ>_#XXul%1(FprOtm{5?>tudV2oiGX7Hh z5O$<+VtY(^;=XURIZx=1g^4{$&Gl{6q%!HGZ&)(#+gJeEyKbo%sa2_{!2Wl#Z3w!z zFO9n+qcJi*X1p63=OXCDohCfxntA4}^J@^N9OpZk2Aju&H$)XrGg&`TG(4`PJM{Ui zH9DSuIm|fxX=C8wd16Q6MdJMm@roZ6BlVt6Ma!i>Y&(vYrysn<;>N1MGR5&B+tq*e zWg}WU+5-}EkoiSci`15FK-=3Z-jjK#(STnHX|!hCDV-SGwaSIe3%eIC zjN-g3N7<6{(H!;V8W8J&5yOmza!F=`wMx77N>$b9d`;XCvXJS@&oRgadM5PeN<4{( zRD+mnC(pjaWuZ`^cVBi%ELa6fJd(E)Aa9>|J%O07Kb?KOt%X-=QCw3vCz{AByL~b!T=r~l_TwZT;#~K9`!MM1(QRpkWT~VESNX7;>DbxE-ue8Q zUFCE&1XlN4X>p8xJ98wH6FLPgyD>hJp1Mz@r&fDx=Qh*mP6>t@Pa_c2)7zmWVJ=}o z;P*RLEE#dc@y*i>X6tckmm`G+9R{BaMk*I7YiH4$>JT?-1FU;x`-N7eqs22C=Wp&? zD$g4Zwt!yizLtHRc6M+!TX0(DP{3WtG8;9U)^ghuka*7kV-sWBmft4Krj`1^qfg_q zqXzlzg=LOIc9U>S!B-S=_)OGaojTTE>cyI?yH(k^46DA?a6Uedl9!khCv`18dUovr zQ~oINIyF6&*)Bj=vpT%?P&dc^z`lj)o-(+$sW$D`)YiImo~!cfFPcIvC%6ZSH%TE? z4VGQP@2NNkb5bd{^hZ2zy=Tt#$Vl}!8XmeH96o7PSZ{i0V9D8JAnx{K^=D7faZ$;{ zqp})T5!bX|KNb^+#3IGmCsIo5X5W4to)c+3Q%sYaZ-9yma?YB}HICE3=OH2&rzrs) z$eUCHDVFZMyI;R1_&8qOG-4!Te7H|S9WOrOt#MSeS=O&6x49{m*0$Rf6Sa50BFONH z@dnZR_J-(Z>`0!l@^;#kxCsA}p!4$i_w{zp_ne(Zq*tZCAgipgS`ycb=W=IOGaLK1 z!`mT((D}kvvEzXogLUm+lUZj;dtDO5z8bgMXCXHQ+b%ondCR@63tvC`@L$7k*f*vF zL;6Cn=oC7VDbQ4czq7C)H4DA(!#Y4xaYT~TeFkQ%YU*HS}5Xn;ji`W)2-mDEn^4CML~p zCQG*LYCB>`!gI=lN3oefeVV6XOq)049KZ4u2QU0K57Jl@i@#A{xm*)P?vWVLnuR3+ zG43;SZFvh22yp`@ey({nE@{oeoDdUro6Ucz8VNRmkM zGEy2I$UC!W&BU)Sza6ZP%u;f_4GB$2%=g0);hrOF^6@Dlo1Xagz2m!s><>wq z&j}8jyB0U=LR{^3bF&clz1gH|DvA0kUdttKOYok!%fJ`)g~Z2?Q3)iGkpJ`LjbiQ% zQ>6DtLcyf{_sbX!leB^0|9I2{6_eETWn#dek4lzz^Z)ze?-l`rWcfqs@3{Y&=5DBW zvZeo>8R!al7~st9QrrLQ#xHEO%PEYAJu<1)y{oa(J+G1b=$dzuG4*DfZkTW4L0Xo~ z#0w?wpu|0)mKU)()lsqbjLXELuwT<{HN>H|*1sly@R)dSt^3fp%t1>RH>|6fYJh zYq^LfM%&vLW{u!lC&-;s9{M}W(t2(A=(lJ z4b4;2I5Z3i#q){ELgHtwC%NVSB~dhYz~x7!<62o>jVO9zqEWVwOMGgWe{7ky>Tb{z zGu!{Z=P$_&&{5inf(7*tl1J+VyqC5#0}I;W{g+DLnbV~mqQA2c)CEEbmlHg45W^Xf zg8hHK>o4hx*-@hNNE{ra<@HWB>K)df>u^N%u;6x7;zyJHgZFA=wAzZZpV zS1{)OzFD&EeJ7)@5DT zn6%MC0R`%LKZ9s|nTZ$`qtS3kyEA3Ovc1m_)mnXhtS8GejR+ej<=ecSYrsY+n3G zxe_ID^U@3HzWA?nJnFKRQ{9G>aXp0V2#Izupsd++1cKT#bqXzf0#cfHf)S98rBR@k zGbFt`UZR)b*$pUd5{Y($bIup9t%6zs?lJc5Wn%YvpE~c;DN7!+Zh|SNE-H0{IB$SV zl4=3k$+2oIsHE)ZfSep~txmZO{|&f~n&oyag@k&BqFOVF*c znPM!cvgi-($=XIVZkO_S>>CRmbP$=w^WFml=cN{H&7A1S?9+3?{~4!|2lW6wO;Kch z7!7T4)(6uZ)2eQ-jt0%w`jU9uclAAQug`Ma_vQ#Mhax99n)jb)kb9!yI08fo1_V|1 zy(#}cDnP4_Ld2w0&HwHOQDZ-?>HBLDW16kTdG+UwY^KzTsvOl58lU5sFP>{jguT@t z4+Ai$gA=649awvL9)SO{{ra%-^H z!4EPyw(&?uQV#ux+);ve8PCx;dkbF|t$e-QJ(}aav$n*t{>Bx|HpPMeSYET23m?b~32vhm`=Nq$ zAVt`ecZ=7_O8s(Vn6`OGFd^Zd=iRf}tdix4r4Ro{)c{;V#1>bxk-|=eJFwGc$Eg9 z^3*!h{BDHe>{g^`na~U&2>xN+U>SL6H5Fv1LM;z}{x&2$F-npVEmtXpqM{)XUzmBx zr_k|q#$_+NuI z-ko3JvT#~fI)BMIoFQEPJx*k#JDJx;rYeefkAyNsmMG|$TC7vmIUO^;Bj_+Q2}#?1 zk6*B;JRFekM-IDV+8cmrAFwJ!%TSNWV>E6yX~ykyP3W+j!Mx`aA}Y1sBTDozc&BE<5X{!v zeqUk+A9IgmnZF7JmZuywnJ#cUzQ`4t{8Y2}VMQT&)}4n=&qr~BW{|p>76(a2qaEJ| zhmo?YJ$)xI2>G=br2bQZ5!D$a*|e#AqB~e)y6*7+#|m!*PjF_z+yhvS?Cm**-GtK_ zI_|@T)${%K`>Q2&bDwUmFCjl>eONN{-lLYd zVX>JaodtltItg?&0_JNSWt46098L1zw_<*$BPc^>5#E3KWbhptPP622T*zFcNZP~_ z&Q5tdJ|X2W$rCP14p1Mz#^joX@`3_Ijb8*Eo8Xq>(`%A_EuCn5#T`?djcPn+k z`pl}O
RhmBeTLNuyL>h!JTxYiwZ=V0mm$EYq}-#sjgYiyvf0!T7{w=lt4_&9x*0 zJ;azCD1=?}cYvpMr#~ARc^~`;ZuYvD0eB61C3 z%b*w;cb(;6>YdU{)}p=$XVUJAx3_jou*lExL7$ZPJIYw8k#toz_Ssc|RDzhS?ahv> zBGKD**YZJ}5(RPM@Qe|4yfZ-mg|cXA_UeL=%RSD_4eBFjJ@oR z9WiK2=wX-O0<9IiO8UPTgro1ZnZrVrezTxoVe)}1-~E-o+4FR5lnFyfN%KJ`C6Cv! zm7@r&R_W(E)dPcJC3=FxekmgEq*P%}SHBMK{M-34OxYZ8+;hT!z$^nE;2-O)iVDmp zJ4LYlJ2qa)mCSh+7wT#ZJR5ONsGE&f@c=MIPt#kbz5<(b2DaP#9*d~?3hopkz&qww zz*rJ-7cHz-!;}=$4zmaY5nH14r>_gM_3dsO;g`lGx;1Ks z3E`NZtNp>wF#{(EY9qj~?-1bbLJSEdX`4`l=a%jQpG4D0mhni<65S_xX}*nL+{Se( zjoaRt*oVadPqm_9_brr6%Q^Nt;=D?;MDf}jgbFGPIL_In&5!tVr&L=F)9W;WOYn#R zZAl#{T=^?1q%HV%Ht)w7C3c#)k(h9C#%MSKzim02)4Mp^b9BT^@j0EU1Ok`f;)^qD zAUbmHd6E!3heS^ojrThc7J$rReYiirY;Hln4rC85H^ff<0L$*wD7p;gLNrr6EuU)6 zYN*~}R@fR|U9d43u1loz)!Q@b#gTQ%N=L}FUwso!jays!n|6=*VYEg5EWiqrul5y0 z9Q2terQqYE|Kmd^Int>>Zd;>0H`C&RHyT0sR4aA!zOi}^4YKHX6px_-_J=&BI#n;e zUTm?xlU05XM8hv-{Vhq=Rq5~ltqrsUNC7rug+2G~i_$X4MScbh$m6A8{{6>yJle!m zsY;9__53Fq2|&W!X+9bNYe%SNz38bL6lVo=pvrY`P7FzIfDT>NI8A9<1P^!Vz@;w) zZArZ9naR}y#|UbwEf1LDf0C*@TC4;~&Lsa>G_5;plr2xBAw$WL`6DF1Lg6z!v*n8T zZGHat_5W)+1|$!Zex-Nm%G~bQ*T|jeoYXV%x%a2pTYI!jhSayBwdDANH2t2pa}(U3 zZXb0)$0K9C+>{K#wGAy<1s)%7JUyJ*&&x1wT@C#rJK5v+ZtEroV_X~Mf*RK#ES_h3 z5|#bgA8$VV{S;qX*435aq0v1>3_1Vg9}8cK!l?>kE7&XO?p%R13jVwz`6n~pzof%V z5y%xxKetV(NWI0c8I5v1PX_Urwk4}c;Nv3!@h|ayKk#zuz=LZn$d+%vf>qQGdcY3$27k#WU+Lz~^C~xBDuA-jX(jtjFqPe^wlMy!z9|Ph9Jj!qk1{;LR}&^B z7W>Fu9BTta<)FC+BhM?w1TxMxlid`eNM;PwrLo3+3=s?^+#nUR0FMdRW>48UWLCff zy+E6x6rCyVV`in4Mhf5qbP0ya0QrSBBQZ4mWc5C{8gnm@K6)cL7tGCU8NJ*mhkV=r z2$*4+L^6r68xL@_GtcfnlEugRbt@@HL+V$aCF?C$u5q2c&zTAK7?qkm&#k1B z2qn|afHf(MLt9qsC7=3*7Z~&%#C9*rVrDyo@;)({WHn161r%dFZft9hNGXn~dIM-5 zJrIbZ2Fad3iQc&HT8&Z@b~nT80F5$h-}h@<<-{#6+au^l6=(TIzD?}!18!HOt=l1~Hau5 zCJR+Dlm5dl9LfV)k~Sb<{(offfV?UahT8pi0DnnwEFgo2P30E<*&PbZ$MQVpkMaUR zHxs~qZ{g|BB>z~~-znHjRbW0@=E^&B`0ps^j{!a^TS6=UOzc^(3D|NmWd;5r&+nDf zrU3!l(WagH@09a@g9Eg9_k%d2`{RGd#axL6HZ7iF|1J*->Ur2SAh2e zTJ65PDv-`U_WAeV0mOjJ{Qrr_%n2sqm2}bWD4;EkpG`dK3vY4Wf`Tru)P(=qG7mej zPHWX#GKv6Sj2(jHN)-ZB4S(C@?SFVULYPi&Bdq(kr~Rk*PzJD$qC&&O{}ff;F-TS& zEPP>&Bo5%hA%4dArqXn4!Csy<#b)`>4lCK5xU-xk6)i_jLwOB=5Ij&(tL;;2T#ISq)LvK)G zxJlihVIeyHMrZE0vp-U3JfhNU6+NlKIKum=RhGx8P1n(rL)l8v*JhA$WEM4K7Cs*T zisa|3sDSyN3y=~AjX>95MoDtu&VJ6YEcyeRU`El^bKCvezamO>6(?8|LMdn;Q^EKW zDry-wTC8gbwo#Qnoi7OJR?<_@m_lVIQT#m7i5i!zSvLK+imW8=^|oM-gT0MozuAq1 z%}=!g!-i(QB0Zg_T5PuK6Nh62wFO2x4iY(mu6Bm)9MD$R6z`+;=Q>}#vZ}51L0}u5 z_NyZ61a^a(;jf*S^V&q-_~haW7c?WUD8}a~>`J-2j@QFU)Y^BH>&nWKIYgUl6jPdB z?fN-ZUh%nkHutX%h`JcU$(K{~#wr6csV5nUu7I-PZ%aZe1}OP)bN8J^@IbkrG9~Zj zR_ZvT_x+Xu=Za6fVxe zYu1HI9#`Oi<1HnibWwgn?fsuz%50pDphV{WSf7<8j4>eI9ic+*$b;3)7iok=<4GAJUfF4RG$mnrEf%V&wwQbqrvKc6bY5lzSiXAHk{OpS=8vr0v*;X zTSF+c(K=!(PvV-#r{T+W2hHKkIt*cFZvvUiV3+Mp+qOQbt<1lN^|zrIA!EMMlDU{R z!@JXzQ51gGxH#5Go9(!Rnmvo2tGMZV9FA?ZA(o2lGo*Mkir)k+)?Rt~{F<-y3M_ld z<%KqXS%5Rq6V%!lhm~u+EtK11%X*}k&4{Oxc5ai)X}OYJdUn2dggA<&NM}?~!mb!y zHtLI>XXHE)aCwua=_C?SVDk=kG4`{|V6CLtKJ(4Vp<15JBA$M=OL}K+JYQV4Cv>ck z*6Wdj^Cx1JGb0~8btA#n+t*NvI$M>gfsu6?x8womtLKUgck#h{o%H?IkvAM-=669$ zC~bMrrbe#PNl9C;t~%tT(i%IN4P;O~FxuB2?X*VTmX^m}cl&8I$`Bm8Ir9RB$g?Z! zhppwYX4xQTO+W@lMQx%R=^D4Vg-x;dYMgh+1v`7o1H|lhJKdi48g?#rvGndh5Nzwu z)t4tL-@G24bZPHoKV0ngM#;zIgA(EM_x6M4Tj9&$ z>@Gb4DUH0g8wtgug%;t_IGebiQ{ES%Yk_;p2R~tz4o`&a640Gx>RzaoS;#h=j5#@< zaN8reZP%r)sjp{e`R^(hcT~~@LaOS#_;)~MvKAH2SG;6@;RstUka&1VDgz=uBbPud zs^8-lj7Y1xo_gtSc)V=}gKS2uO|>ZIO!W$MXOe8&f6Jb>gXM?fm0lobWlDt`OB7R^ zbPdnJ@qA@kJ>VUer8$r+#I)u3ymg1pYUgxPNTeqfvE5xpl<+fxGb%Q+zp#hIV%GG` z#sm^QW{FqRfZ$!5>Mkv@NlrD>>ZLtWTw^4%Y7l z+%-vFkB`X{Pi1U{`d6L}oaF3@_wV>#lY|L>?mi;hqwn8(9g`t3P%DLqnWYR9k2+5~ zYI=3s<(uHyJJAcBa80NT{mMS;t1IW?t!K+REm-6r{PbygfB5VwP$i0YSQoxL$TKVu{@Q@d@g;++nTOLXQQ}0N@5tAl z2i9FTCMK&BvDvuj;SmcaQ4DVzQm|4S+awD+M&NC@GKW(t*)jRhT!91}xIH6nZ6RVl zU8sJ_e!RVFW9tHXeEIaV(_bA9#z?`G!^OJS$zvel_d6K*{0Q`XW;ZP}SSS{+0lSTR;hVKVy> z>RPM_>h1T4^GouF;gMn_MSgVs(!&XI(E(~=cLIM-Uw%!d{PlCt6@Qx5Izf(hJOXgoi!6o-vg9$kt*Fv$EyNfu>m{f2TIFf%91p>35!mFKC&PPM%-1 zvK&oJ;@ixui{?2NvAFVjOIRmuNPzGZ^8{-YF@%!WItps8M_u7r{o4FB0{zZzq6(l3 zyS10>h-cj();f?ah7bkw-%hbx5D-6a zk>$@o9X6DmYfMybJ$EhlW6|SJ0(-|2oRdF~f1U>xc(V3ni$w?&G}Tm*&+a``cRBOR zC87WsU02X<9>qXvO0ADJZ?E`0=lbl4NI-)n6kj$8s0a~qb?j+2D9VET`L9)v(-^53 zrETvmKe3$tm^?O>taT*%t4-q^vU%Y#)#AD9HHXXim!IzD4Jc9GUf0QB;Q*+r@+hFS zZ=cMe(B7ska<(MkQlO}8*`j2>cD~M%cbYIJ0( zv`pe5XBz&S3Zo`?CHVT(q6(6HcA%z5gd~#taR@z>Q5xnoeNSDm6+PBLx7@CUIPsS6 zwIC=Ehw`Io9B1N=MQTWR0@y*J_f0AHOhXXF z&EL#RW;efkV38Q+(SygF#|bxN^_**C&oR+(oo+G5^W6GmkZt+SSYWO;Dd^53`Aa4O zX=S@Nm<34Q%|HvtuyCgnc*$VhSecG|YKs+_wRz`Sn38bE_Y=9=+7|oM$hQeWDnN&AG*e zjw%<2s>q+g*swB7(}v9ze8Y%&Kk-C1hFk}Gl*{vt4|Anu+*C7PW{V2=_y(y?xlFm{ z+`Z(c^3=s#`l`%MKC<%wF($OGa%!zAVTVy}Pi-DOw>tmA9jC{dxN_-^D<% zkU$##Yp2fIMb4R&o$8VoZzDJHPMZ!CR;F*+!DLg66;1r}@QO(M)(WEpFL2T+{q!*I z5w~fN7rCXE_*izobsF!wqU{4{lCkbRM+Bi;oKRl-3wxirO11M6s(CArrM*$!3-G`rJ!a1+4p|b=LS}Rmp`1*4!2gD6ck+S zE||6g+?l439Om)eR>swZ;bWy#S9#vNZ*2F^7s#m)^^uLnS)0W#dR0oCJ*WA`TBGtL zk{JCcih}y1ADUl}HB9qZmmpS^qbkJewkqaabeXKHO58`+CDQ0P@zdh=8(!Y}CcXgl z@9_0qzRz>gk9@9sr4kP{=Kh=)_Z;vc@>o{toxS+>Pkg|R%npueH$KYARX_1HZoguE z%T%}J1usD-z-Kl)SG_d_>x=FY(H|ItLldge5nHD4pOK7-EPIeh<)f{AQkrXFWppS- zi;ldolhB)}kIvHb_y+=xfqZxZTBTFW@QgubUb`O9BI`@2* z!qqq>3i;K!-?|#3oYyN%^sS+w1?#(mK+R_==tTMD&~#i@9(yd*h?3JK-tldfyc~fdH`6ZEdL_^^G_I;nFo)$so_8<1iJ z0>w}A4m0&XhtLLUl#C|U}j5w1xk%b!EA0YkhIiv4^CS^1YB-RS_0{vSeS z=7eI}NS%5+c(<93_Ma8n;7rS%YGsV&WDN{d4fU@QPTbCTDnSFk=-U~q7vH`q=^Fjc| z&xN)?b674&iWR*p48VB3Y5yn>ASia+d~eRzdslj5W7jO{{$YX&90e^>KF5b(M?wOP z?`8Ae*UOabVYD`qAcNT4x&8g zdue~Z93^2k*X%ixugXwXyxN;^-}&I#U9IuX`@I5+>CM8k4XWDL2OcM5T4Ct#?*MUk zC%1<~()WAj{chQ_|ARzvBD#yo(v%xBR=4_ZR^3*4wb2%9`eLn#874DP!n+ZZ5PG%wV=L zlrfT{Kz0XhL)4_f&|V!Z+*}7{yy~EUTSMGRY%lR7ga3xmxXGRb;g`iPp(sE}+RSgBk zqqmZoY%Kk^8(C3za7X7+RXKS8qM)YBq?+4B4N-JxK6o*`a|&RTNZ6MrD@++|;%n`u zOrT3PTspvWGEfs={NJERVek$!290+)-WRX4UG@W;(RfZH4lgPz_9Fm_W@j9yRB`;G zJ(^B>1%NVyHC=1e@tbzyo&)%?9Q*0oN`Qkojk+`5v5cTk0IF!ja9zynxZG3x_R2+Q zC0cp~KnD?Bi&`%U*Wq4qBB8+F>$f(Y?D^NC3~O>;=wPXjjC0(>z*j7I30I>JXZKZm zDw;&lEe$BSsOV;cikZEyFhH|J>0z2t-Q~$TMxr!BoiPj)Xrx5_Ew6g`E?4o%GMb!-fut=h@696Ktgei$kNpx!I zJAKgiJzrJJdJ(a-6Yr)VCF!VnwJvcRGUWn5@&KHY!7<}U^w`y(abZ2`lel;fFrb!@ zy~b@0oocI?md&BBK9}D$73%eEg$JK4<8DrrrL`~;S+Fgk*vEVARCK}9Pvf)%+|>2RU3TvaE-JCHvliTk(lc>Ff2!da7Dn!26+9-AHfzQOl?F* zgsaJnyob3#+9~nudcwJyJSA>K<^sr>VvI04`Ajz1WJljG(}?8cABiU*l}&xuVcE#@ zWds0dnB}ff;`U68nMPp@cDciS3|1Jfd)d?YT^&SjcJveg2)Gm@0J*w8e|zJui^eT6 z`o+L~c5HaCTWI~uaw3hBbqNh;RfF}Ls4mp715z^ER4h9x_4bX#^vUzp1S9&|A8yob zZO})AT!Mm+)Y}~-h;p%NOp~KUaBKbpT0Mx1AK}q^9#EM|298w7;tB)B}X)_uX_+ zSovl9y9cC0gBE}We5o1`?&SvMrr=QubcnqJutGw);CWx_JEUN}=coQr#MVVfZJM3I zEzgl?bqlu*we|j>5=8gjbC~`d_cgl5cB5a=tg+KlM@XdSqmJP{emoAq(&%*Y^`9Kx z;XxOP(*vPurBnUuvlGQSybcE^=cNjqEu$Qi+$QhqL4wN`a{}rR^|9O{;gyf#?dCU= zOuf{J=68T7AI(F+gw~4Xp|5=tQeic=p;;1NFYFKPzt3KVRVV;cd?I$W-+r1TL1pqH zjq>M7_8@rks1yE3OXQdQ=lTJn9!Zlni3SOb7R$%qJ^Hy`QQhUg@#1EV*xk#~L_EX$_tG24ttEZi5} z_ct&IdN@o&k!=hB6b5W3O6}7I$40W9Ber>E;XNN2VXkB1MrQLe*Ny;a91Ka8Q12O~ zpR}~i3aTGQY)5r^4AN|ub6wa~o_Z7y&N3?KulKlWYmT5?C^i((MCYY?{k(xe<1fx+ zCGsdXlHfiNX`b0RpBf@o1eRVJ@@T6%oLs{xm7l|!?Dhsc`3B?iQR`fG%N<(#XabH* z02~i|XNvwc;q~k1s}PjO<*u7*dsajz@muCxL;%LS-ls^cxKqLJ`Xm97Qt7rmK2q;w z$_>(FsN57k#wfahiS!ga#zU!bmn-=J_|hDA+{RLHc)Dq?byVW+{i_Z8ttdj!uI;h& zS<77iSW}DeN^qos9CnmHBqh%|!q)!d+|H0v{DTv1VdHN|1pqkg8o6*HS3>M;#s&P6 zqNTHjfu*G<=G6$kb>GnsaB$Q>ipaE~o|_onBe!4o5y7uS%tri2Kzq$ckqErkBFQ6g z3=mxEMI~IN)I}?5VuUnbudbA;vPTTRM0NCy!`G`r1&HYoA6;Er)_AZVU9E&*n22|UvCk^ zzBbj+F3Z=22s*F4oQC9YK4x0ms9BTtduF?E2O{Mj44$O8@AraA;2X%n{dV`#qwFHc z=$`s@TZse`UxRzn2}kt+?AmHXq-R(jQa;N4g#Q`<1%5ye_d3&zJxzUu9prxsFPaEo z^aFWijx?+rcA14I@DcD*iS`uarP`2fCq`b}8vy0EX!6 zZ5 zkE(M}zsyWA9NGWebj@m=HW&P3jepj68sm7Dq*_jGHm!gzeDfuvA%-BFCEyg-5=B)G z(lTnO;C9Vz+WGA6ZUuPCoUH3A(YY_hYMR`?i3Z^zcUIzgeOt|dj1b~g9hp1lYqhTY z%#3s7qVX*}&U!GVHIPyrhDfB-+3{9qT~Ex-`as<>Es|BR83sKA9Gcr5at~s#?N&Sy z=G%GIgK7%?YW-?G&0fMXS)x%;!v)sKJxB>}ws=U^wClN^Vp&&oesW~?&T!rD_A;$b zj7VeByHWHa4eMpI_W@)5z;XexPpUQbBNu!_kHrtccs5bh!|OeKt>N8SDGiAfo59|> zp58L4Twg=Zrrdf9{2WL7`Iy`+pLp{Nl2P$K{@kBd%s_!)dm*8mEHS2`y{>WLuH5Rl z0RRjrdM&YHWdy6M6RCk&a(0LKdGjI9-|Wl+G)u(swz z*LIH)@#X2o1$h3_R(SU6Myu4N^~FJ6&8C+tkKhWuMb0NV5ptNzhVtke;VZneUpZ7OXPg( z3ERW2ljesBceOGv^7VHUjg%t=NaB3PIUaV|&mb70>M0959(H&MT+sxAb9#Zo7TwdWv0nVPbev;P%@)mTd6DfAxD8@K zHr86r+VSD9qm|z3o7rcllcx4gHKt4@2ApgSCEAT(1chLcZ`w9qHh|q$R#bN$UQi5L z-oIrZ9!z2ML4T%tzxO6$+frU{)JnPI;;I?nD68GhN+A`vS)^6n&f3ybz>c38Yn}Y` z#`5eWNedw-DN_F`epDKEmu{MylA~avDyHunQ6aMu1VQ~!;Obe72t|gt>=Y-f@@Eb+ z&!9dF-vYqJ%sww$ndNOSW)|728Rc%dpq65xl?Cs&*$kT}_2M^m6&%|YSBm|?FR~)~6%)DdF4~nL^kdwk)v=#smZ6N(sT{CH;Tf7@3U4cfNiNZZr-keZ zV_RA4jSY0hR(;(g<%9J?M*sc^##ckr5U;hd3XBuokt+(eHT4n36*#1TqQTFy8mqxm zNt|fxRW4@G89mMdl2O}Y5BHhZTaq++6Wk1FJ!x!E8{r97O(v762ii<&uQwt`5wscg_s(sibLDM;sM?`CzlH&xy$)pC6@hGdm8DQQKeoXX zk%_sKF(qk3pOe?q*>VZnya~y>^~?B|rn6sJ)^3l_2|*4zVGTl>oOH-7bZT(Gay`ou zeBsJ7SZJ&a<_1d~UIq9>JHXK!F&u6sYpO9W+Rf z(udkK))2vYH-XE61}ldf{wMGiB@v_p!CCogDW97ckf&qW5-UI!wZoea>pm)4UJm8y4Q-*f|yyh|7_?%Y9AS{Wf@GsqGz>)TfH6q_E40oUScyG&BX2mQh5 zIi^fD_FlqLR+Xj;!pgbrm9*HvwoWEt*3V1*7;2`J*TMBy-1kqA2-}2H_&wW3d0P_x8i!fSV2w*FqR`JLXL`{NOCRT$3tVO;EmZZeT zFj&nzEln#3lz3g1MOS83y{`_sxJ@fkd@nX-!EbM)ZTxd3gkzuB3lxI3tL2(9apN*- zAn+?Ck=kL)wwdhZb?byJ_=<8@N3RA3Enqim+5%3sH%e4oI)^hOu-v8zK!Oc%3c|0)z%Po_7&++ zBn#g&Scb(XH)X$eJi$pg0;uzFWnNyp9#?l7*pe*s=ZsT(E*so#+RdjWY|oUnxpH6* zy%)W%lH@%+?$^E-Kzj#rUu1I%XTh?v4-=U1Tnb^bv!vdAC0zU=l0T>4@mNRNN9h14 z<#8S!(d>e&t_uhKtYDf{rINNG)e3HEzBpF)Ipe>q^HjWG?e?cRgHNb^j)!YQ3ei8o z$65vST2?+OI8%pji>@oH&QZ~Vx?vKld<@F-;rs-c;r`Y(QqO7mP;#yFP0=5__hAf8 z`Q2Wp3}?xSLFs$4@+n(TD zbqj^YY&QcHWtzvxu&l!Ca4{e!kiS38XW$$^wVtmo01&|MWH zwJ1two{z+>53f7%{71^0i$~?Fyip3o?&^1iX(2}7K7#RgFMK|d0N@_vPvAxs{oVCb zAHzOWgVP#7%0^oK{K!SXC-#x{|NJk2;`)|Xxs_u5SuL+@x*o$jsvmzkO?1CR)J;d? zRZkU<6UPwj<80IvPkaRXX4C{bVA`cUYz77sFSiL zJouE8a*#U!2*b22l4I5w&Oor6#H|3%$fKSZ^C|HFbPB_#+_0@6~_4TB)vU4lw? zm$XPocb9Y{-5}lFjnXYG{oRA!`@Vkug6H`S$C-2XK4-^T>lGW~j4`o(X^TJ!_BUSR z?I5D0>_J=*3=w{$j>_mCftJJ=3t`*Fn6gU6g^vuGt#=t9342?X+~x*0xbuimg@ZgG z&*wQCtRv{E5&L4CrKA*;2Kx>P^YI%fmtQHHJ zKqRPtyD2P*Y3&3iw!(~-k^ab+bTG2yW|2BroN$h+;XL0PC8M7PL9ibf`ozE1Wc!!? zTn+oq@KurC7u*_WVW-!`Ky@RCa?6?Z=@0A>ssk%v{;DVvtRW@H{;dMU5@0&2ebg)a z8?y)nm&IEt>n!;j_fUiK(;w3&{7u#WNDC0J|KELCmVVyLfj}gSL6sEKpg5c}V6yv= z6<@cNf^xn;^j}dkwEhlDEc->%bF4sKAbzZnY&TvxA{r%6R6s!BUT~ryZ4ty1@kT!Q zIBQUwZt_SzBh_#~dCjElYphl6Q8J4^kohPQR zXwid52^IWbd9^d#sG`fIPc4)y@EOKDlrNN@3E}S8NyPo{s6Z$1RN7|ij!DPqf*W=J zoKWslayEc6S5@x=nEDnu7lCO_o10pIlgH@HN+SDc_yI)jKVYJ$5`-p@<=Ig6Hz_;@ z!#N023Z&(&+-1Y?fazQYt3Oaat~aj!rPMf7ge=1bq0MDj3F2Mr*XAF@FT)~wy(WCt zx_hFSA~%+m{SP_&cb_2-SonrG1R^?6@c2Vp$U|SMj#**NXCzJk3{1<1ie3JN?+^@K zQmsBFTpf6EhfKajCVPavPOG&ZCF#sd{x_872IHQ%6hd_66pLB}nrn3)kD`@p>-gwz zC~pbpXD8y#ujUlr%_SsoG4@{_GQ(C_e<*K%?&_iajfnEsGQ9hs@o zmFuHBhQmKk%JT}FSQ_E%T!vcp5j#xg-%=TLKWt*vKxj(&pS}=#c%ai-WBjjkcYn2k zovkDE<==oV#N%z4$zNbKfRBX%i0Jd_uQv@)Xt)5)`+wc!9xNmHkddtKKLZSUDPq+X zy#Mo&9|jc;v+wUKT0QVVBck_$M~gmK?vwzEPhC*y3FULW=mG@;3Oe=j&`}wF-n|dG z1DZg$IR|p_Lr^8@2EzRephLIrXPp$?UuaO67m{AbXVwpVgwIC1$N0>u4WqP|zOL;} zhvXl?nhGkz9(OT}or z)qu5?=nj*zn|wnFps_^BIHuPWQt^s&QtNFOU~4TEIH9YWd#;czUC}w#=;~avTREFy zwcNpJ6-e-pO^GRR^_VknhEfsGRo>23TQONKL97TmA6i4D^ZgFng;hPk62Z{q{(4^= zro+6Vfg3=}4*Qq~yOJChjP8$g61i&v#DnThoqa{ZRJB!l{ppN}bqB6NMs0Uu?_{dL zeJJumt6nZzRJ--U*#$?-@k^r1VK=8k0Rj4-f#3&5z!T_u_eKD@4^FAkD8=yGP82M= zs8YS&9#G^Y#giOJ=B*1YB-7>rWyFLp(n(yWO7~x*Je|*A4Vt-P?P^Zbz%Gok1JKd# ziuB<>VkNMt!?SbRzv3VOVlrMBc680JZ#@lB-rA0+m;RWbHU1Vy)d0Z8Lx3PqsuNV7 zKX~w9f$=iCA?_b_C`1~nIK|BN^a30RP3)hOZiG%NVTU6U4Bfz=VwLdF?Q)m$_I%L2 z{%o%eSRAZO8>B>my5}3^0#y=F%n1Y1X3cVD9CluSJRbHkGvXw`-_enwZR3RD6uW>D?5+^_A~z$g66YJP^1k= zMsvMg@otF+QZ>P)o zta}z)AEW1|GTY6OPp}jmHj?+ggQvqcfNxR%JyuIG2^e$?7g>CyHULw^{tf0Y4vKqL zk|GTD(_s3UEpFEX2%0-90)IW#3$?TtfRrn-!S)2BJmZW>EgVraDwRi45lZX8wZtJ$ zxo~Tza@JhB#(H(<%4UGGvjz-cLd>62=i_-wWk*s9>YZ@h3ndAV;Q@Z|5|hBMxqv@} zgQTTih<5h8F4wHphm$S6qr7%tA3}(!%9WjH?XQq4Pu>aFF#s^yxgA7pZKu^I7WlRM zis5^maA}pMS@x5RgO*%tadLG@G)2`dv!6COI?Gs^3v^&jode^WS;*}rA*PeVI+hWL z$7X6$shf}RSh|cRi<0`&^~SI3!Ml=f^+9RSX#}Y%LBqqj?eDu)i}vj8&}||#2G+bU zwHS~>41H0oFe!O~s2xZhWgbXMMWbBJ-4oF6XATU`9S*=4P8Gk;iSkm?EVjx8fLhW( z6RZt?YX9qF= znZxlED_F0zgL6I0dL@hV?UQe0yhP?3ALf{IfuMf39;{c>NqX7_{qcJDY5Ltk*>v`s zyqDs$8wtj1zn2cP((-#Q?Zo;7NMPT43M!EIn5$NHweZDA{>$vmXkh z_2)M?BpmZ>)?V!2T<%v1WX;sTq&-~EVw-XTsrMbkr&g-_x$ecCErAviI{T-y@}s`A zb5_WmAQbbZXNn7GYvuYVC}&RXTs%r=tKk92h$%eB9vc7!g5z?Q=`Ru}$t9W^n5h4AUy%^C@DCCX8`an`9$TaL{;9O6Y zZVil?%_0WCtes9+XHyt2!MGMnurWT8kjlcAM&;glc?Z7V-t6D*)SF(*B*2<5Ybo?@ z?Y8g=6KX;E0t+sLmm_9;m*4KAIUZFw?dBIZ8YdjqGFRjlQkW^{NjaXRAU?_j+zk6h zw!!hN9OLypnS_e1rwIUFE+KR%v)`RzW4Qgj{Supko$noYdL%pyTq(udc!!x1Y}VL~ z`MRnqB3Jrh*CJVituoJeV2z}seSOQ!YJXaY>Wx9@cYf5KV1FOt5O&E-dlx`ja0*1-t_@U9rJ-HO6 zF$0E^X&Dr#h+}xW%L6Wk0TgUfLuXdol;%|)vTvjIfp*$s;*@HlPgyn~( zSv6UOQe*de3M*_(F0SC0&{)9AxT3ANVQae-WBP#b1F4pHd5{d>dD}zbETde*{)|Qn z35Pi=-!}!G@zD)r;r5m~$ug0^fvNWjJOdUfscH?B<6|Bp$i^vIey7u%Ri{%$1EAz! z9`WSu4t4OTyg0bo9HROY&K)X&h(vpkXI1!~XCxfU&sk7aHADL{Orz<5fT7G<( zPZ>6k+{~>LOt+5A;%)tWH!?tLPvs)`ykewB)axZF3%`u&$En}XhKa?8kk5uZ)Sl5) z>P^!dUuUi`GR_K-@!ft&c~&<83V{ zpgcJ{SDNK6Ef#G``(1%04XSA&50lTD9T(3i~1t5Y14*F*18O1tKL-R1#K1u3PFVqj?|w04*pTo=Gt^eeo<|Y z{jtsmk|)gNzGnXUdaf&BB6Fr<+UZre3}v8pT~Qf;!w?O%;}8xeFe*Fet)Q5cI;hO` zOueMd4Q2)IRR$mWee8I74Q5u9BkU)bfvpL5;jKahF#efU8PzCA+Mj9X7FqEB6Lb7) zb@mZhB4G@riY73|Gd|k2vBrxvEEC=3dF|XWmeF8rESoFFT`~*=TzG-i&9M`aM(}jw z`6^Y-pZnk5WHu)qxhpb{xHcf7;n$!dg}^ zqV6tVGAJ8c44uERpKd?jcfZ|^)sjx?2*RH|k>TED;TBHkR&Cf^9X|(KcGr@I}!c;aGR=`w>)^!Rfl}xBGrexR~!mC9ozHANNHT=HJ?Bes4UzfGCT(+mIv9yzY|AY0kDoE=a@c*l8S+$ACya2 z1LE?ABoj=<-7BAeD6X5xL-@uQ`UQ8i?a*{s*UQt2h6-bbTqx5{=S^+Hm$eNN8`a+z zT`2&L7M#w;953(MVfz4&)tHQn*3VujV=(3)1#s#HF!#cq4`#iyQLt9@fG~>4Jo3FG zb)!Ss@Q@^WRP4XF&Q!SI<8Ui|hQE?tNGd3?-?yvfQ2iZt|H;x*b-~9H9dQkRC*7W) zI`(L6^T*r2-5bII{7Z{#a0-^A|6CkSoH+BXRlqV_o-6_FfB!>Q+k=DTvLmy`_fzwG zZ|esOR)uO+(+T>pf3;WkcV%J-(HGPuO7x?Uv=DI$RrjUtT=o9ftQ-%1>7Siw8=nM; z-+i6RFefkm1PVci!1L~}GJxXm+JtQj38+k%PL{{3v_NY`pxooM|JPSRd1B}nWEqbA zd>P*1S#dMA=u_HCQ`y&eV+Dp1RaOzC+QC}rJK@&H|LG}lC?iY|8jJK$hLu+b8i=Yv zi3Ax5N%uBs%PBMIrE`?a+2r$mUd+FKk^+l|W9YI(5rog;M(iyCTAu@WKVS29CMu&< zexy*)hMl>^3q~m3Ss?w}nL`msLb5SWs+DH~i`G!JCl;S4Y-fgU`$&nx!nswjWL(uu zukym9{Nc>kDAOfYtWXWDVy>cck0oXE(YyaVVb7NchP0`zI)=sBGjTzD=^JdkqdZ>eg{Zo(<7 zcrb3{X6kdBsL*1G_!Ad(@;rkD0q%{5evSaY7lC^RaQ+#)@Y2W@Lx2XM)upS?+kyEmv0XV2|CHr!iJ@Rb!%(X z(i7lVB;@X6U^UA0lrlJ7h`0^>f`qRIi_X<^gMaL5N_sJrRa}O6p+Cs+NPHh*ni&>> z4Abw-DTtk6n(a;!peqj+tcZA1nxC)-H&mSoIKMv1dkg{pzzs{}E)+DCk1BB18Xkj| z<|#zMsGcr>X5Tl-EK{Qc_e+pauqE&wB3WH-n5mkFf7h4i=*FJ9DWkf*uYz8E2dm#0 zh{PnWu`KV^JP+Lou`1Q8vA!Y@xj`bU9scewTel(p(;s4v^b>cW|ENWd#KQxj`MC-V ztgr|FnC3$PY|&MH;TKLHO}~|pqU(Ap@}=aiAeO>Fr7<ez17UnHa;}*bQ5Eakz6Iu_#}g_`$re~o7wo5pyR^gHmAM)ECog)9E?H&B zX`BE93+j;;IJFPFhy(5{5xdM__*!dR-8i`54y95x8=u4;K1ZiRC%iiF# zfVY4ShUqI0)xiH=0AVE^1n+#wc0GT8j0I#ostD)-|CwxH-*bUVemcvffmhUj7EsSu zU_U^HdC&OYb(}af1dPCQ#|dgM(*Cz16U+L6`;kCsp}Mv|`w95#5b8=qG{~j*_s2-k zy#vm#{l6CgE=W+nnq{@@zP~>v0r&I1SMv0~*G2CV0iFK;z(a=AxX`lP^{(Z`P=<8o zSL`5bl29D5@ErYMQ4ocLC58H*#ip-9FsNWWW15n2OJbmoLkG!Fv)O;%*oqhQWhLtM zK;3`;_i+aGH|7ANk70m4`M*bs-UScsI!A9L#$O);E0W5AN0MVySVf{=Q-G&J<*U zS0^O#R?~{Kn_EwkIMmLQctNy>4QWwe8e9Jo!4zkS+pz96pEUU+>|%mb6``OXVsG$! zQ`2ijTeqGZ4S!}L4SnKy4s0n_*UC;iVrv#oPbALY9jWz>gQuX$%S@mW`k=*{G?#1u zW>J9rOCR5%l*fK5(5C0cumjpmKNqUont=t)?OapP+~oD)5i#>525Wnw?}@KPU)ig%4(X%NQ%2sYd=)E-!42)o*;a>P|<5t{#* z>neMkfiVw@QP7WwPP-F>cX)TtH*|AOKhY?j%l_s?-D=ZXVdTxcLsyNK{E6@g`F_xe zcrknU@#UBJ&F@hlR*l0P8dO|NitqA~N?5(zl-TugPp*(WHB!@1pUjP~A$(ve;R63) z=aW^n=aVIFs;}Af%ka;b2hvbOzMp=`8>BKGRRFM&t~P|<jNhGeMG_6pop3g@js$x1NsLGo2I4SV=w?h-gpN#vD_pbW_v_TkfC zsN!U;E^wtZ-yCG^U2R;q8JmBUlbbZ?Onheq0ytS~*qgz`+cH&akx)mOo27B0F$yg& za*#ez!`vDS+^Y4BoqByGbg#L7Cct4IjNzGz{EOe_X7K$`Z~FlbJvt_Bk!Dl3hD4_v zaHxcIBmVx+ZbE?{yyspj?y?DvDA7Qyp7pz(j0&@8E%sNdr_ONSr#OY-KGHAU_MP(w z2r%GuYF9%fzizmjHk!tLY^j0(nPgY2$1U^vEv>#OeEU==zvBsfMf((4KCmM6|J~@= zA6s9#))=Z5NwK`s(Ggave}8}N*E<_WTy#G~Z04_0Y#=cq!TJebe3Q!d={3;&tt9^W z)|{vQfd{lLx>hl!U`SQ|Zv1n|cc|6-e^B?|>lb7OEYL;~C9wZpx`@%;z$*1KMMwPq zENM{VH8xxtVsul_paizwT!%)=Ch1trosX6vxDI88btzuy%Uyy%v#~&=$li^Ksi%Ne z_-d)M#693AyORWwg@grCdw zaFk)si2*eLTyqNXyCw9R@1Yy({SYM#Dr!&o#VfU&qtmW}-C!bzyCnytVT&|>(lAlE zr7u-(VFLSsScEg_uWCvGeC(7fUsFF;DEAs~BKJlAwsMR;_gGYt2773wlu35AQt3H} zhpzD?4W!-z`HMUdZpH3$2a{eDOX5CD32LVH#x);uobPt-A@8)O@g{v61Hd&lm`0Tr zdtquc*c#H*CXy=C=?jbHEV|MU^TJ_z&qvqjLpZw#@y0^SE^VXAQ0KehVy^ZAVZ>j0 zy=8<69B?}x7STIDoC=VNQOZ*kgNnkS_#BJrl&rz)j14=Wo}q?n&V-)1GMmk?;4V;=u3p(USNn3a4O1o(aEjE8 zCl|CD3I!$Zxv3 z^?i?Wc)Q-|Xa%^QKS7zvQz@p$3pKvS<9_1;rE~9=RBszMDtfg;wS=ImN3-8_+Xy|K z0LrCzU_JzxQSWwfleyQsq+IQ>j1^dwv-we5p)!L*Nox9WE#%qc16?6=x4zewWUkEm zeNS=OOjYN2)CCs+5;|>GHJ>37L!Y*YJDgh$R%eQV46%s%{%GK2T~vQ4o~v4t#y*$? znD96&;I{UQGb-2gwLko6p0ANRw+3bCay4spoQYcwE&{C(J-ASihDVFJ{w*jg9Ri-G z(>BE=s?%m>j4qI?X=x?psFh`zp{y*rrzK2(I8t*Ld7Yge%q0$`2lI08T|&=10MZph z3;kO#CZM3}Wu2IN2=v^8EXla{?%MM}R%taE!p=M`wJpS6yKLs`l*y2~s1rlfVJNnkRIjB>&BTeM%-L==x{vuw8d&`$iw zCedu>+%b?3kAu$Tt^06_g_#GSp3Yz1!amCezW?@WU^)?E3JKFrvcnVFhQ2AgQ zVAL*Rbh7n9oG|56=*2GM!JPG*&WSq!jphbWN!cZ9bD~b;m`-UNgLdcDy-FuOKn9Cl z_3n2BL$+a8WW7)>-RI3I5ReKp^lYE)t;=Yag|V5=t?XF#q@ONQJJfGqF}c}`*L9y- zW!j>1hIY@rwdzAb%T_DXKfSQypLM-Bwvr@Xk6oLvcLUWefa*9=uK6_q0l+!{InO3o zn-g^0>f2WU%Y1a}8zXzHsg@0CHzPkf;gL@F-s1Jn*3BT_uJaVkJo^P%vRFUxHo-~ z`{Fu%QP7vrJ~n+Z3H*I~>>$sz2|bA6@=e6fC3VuSFRMi;zt@^@&DpGo`~5I^oFVlj zKdcpGk~Csm7aimCotR+^5dw^|ao_>nepx!=!N9c{^8Bn67Z>=d&OkGN9{Q^**oL-D?YutyhZyHFrm)E2?D@1J2 z`#aL@NfDzA&AD!vq+M@F*E#I(0pTXgo`X!AfEjd_Iljo`XNckVdVQ@WCc+xinqHx@*$3eEcwR!7*4m%iG(5xlknJ*>Hfuhy8_e{~N;a z9bpG?d{m>XRQHGT_^aToi8rY{^G0rZPoj~KkRIz#+YIhX-ERU%X;hy}26+L6^$7hX zfci<}?Q3Za%TT3euvoNu+SsmUSu0Q~b8`pk0GA&qgJ0aVV~obUv5$*Ye+&kwRGZ5J zoI+~-Cf&~Xt-(IeQXy3lzZ+GW?UUECFmm4((x#kWw=L9_w2^iAH;AM&o)(?w$ExZ~ zQoV21!WdIUN0xt*sY>jf?J;RLXk$_)L!{SYoehg{KRMn9Tr%+AiXF-8t_6wa*R?I? z&y{m=OMsZgIiCj%O6*aEtVU}T5sSz((b*8?m;xd9LZq@yHrT%6`Udt*P|Nz($@g?3 z7O{^gjVe>zv>L0A%L!B-!dyh1yA!Rj39#LG;8&7*CZM_zdm<81kS$QdRF2i@ zeLOR}o#O=;_6lHMoT8ewSdq+HuYB1yw64SVIE{(S9(B1jeN{7;NVR`OK~Puonj)l7 zJ~S!UF9?^pNyaJ?|GI!7Io{J*wCP-{?nUT z5Jc<|BEFEseuUDHP5>x16yB`V-EN}6h?~CQwo|?kD>zx~fTh*7=S~su><)v{eCN9R ztj8dtIhHW!(uu7nTEmR<-pFqdO0H%}!{NzW<7U8B3FgV5eJDmm_vrh1<>A%~3x7eM zM~v*lbGli1tBhn!UUi2(G)GC&1aQX5-xhC9tELpbT=<(Ni{JkQdKRAPQU6o|is8M> zAl6JDGRq|jXUU^}Z~$ml?Cde!&H@wgtWJUhs{)lnrD3SB=v0Z2L!C;#Ew(Yr$Qj?r zbRy)6h|W#oL)!V!F5@3yK?{z^JQFNEOeOL^_;k>>~ZdXIvQHA z-@VPdz5D0UG#=;D^%kcl3QweQG>Pd4mh?l8O*U(7NxXFcMA{JO|9^ZCkOOn_Q0SFha_EUA$wVPez! zc?z)H6uPK0s7XDKacY_LrIg9h9YH8}l`{>_@maMA!;M8c@(VvDpB1KW-qwcwim;GRvZSOr`)bNJR zzzU*KglwLIT057%t#RPCOzsUodbHu(5-$f+)dxi@}5fp@gh%UA|usE_aK;i zk1B6q^;hU|C;!cGLOIlSV}VRbdkXnFR;dScP*TuUP3NL44H8zZ3yX3>t-|-R=a?pY zC_6GjUten#j7&|5O+j0?-0Rp266%1*Rnckexo7YXt`M(>04GkIU-2G8IH;t;a#T}) zl8)X`?PGm<>4js$` z9If#EiNHk?_$uhyPPkIHGc8&oJ9pXymnRC>`ZZ5zSeUg3>SwGsTfOk?9@U@z7O{WY zvs6&SpCxRqC5H!#1uT_lh?8Rboo;7! zi{3d8tHc!SII(DjH+=oSyjDlR627Nba!(r1Ga{od{q`v3cIOFc`)0fNvsa_9v+}Gt z!XyT`whJCka?*-C+nnV8{t1(o_S6*!QZi__!;+P=@&E-iIfbm)9fBtiW_bRn0UbW9 z#_xv25TA$H=XFObTgEusCvP*R;A?Hf1<)YEGD)@=jCmM)> zZj(rXZi{{kGR*W#x)Q~5qsFlNu!NP_MJ=ix`O@0{co=Yv5?-~qmITaYSP~VpNP7k@ z^8;6$E~%m5#BPI?wS%_ro7@9lhUFvTH0L~XR1k4n&O_1emsU$$OI=P5LA`s}jX{8t zRwKG<gXdg4yMlx|47eDI_#{up2Wv8eE2rKEbTy<311jieaJ*dvN1?(q zSG=Un7hBMFVu?tTy-a4 z9^)r==ZDx377mze^DF4D6wT4@@-Eo35ab{NMF$R$pVR5Z*gIB0i1@r?()_(WV83{fa=E?(E& zR~Q>qjAwa$O;Z@$9^Dz|a$&zhpplulm1y>Qpr^AlSE@FXS^kHeY%pKOm>X@n>@Bh- zQPPi6?(C0Y%o6?aiq{!7d6+e5wvB_-l|((`lPlXPrPvwLu=x!r6pIO4E74Jz@qx6p zS}ZX;AgpsM3L)K%=2+kk#NSjwe}-98v}!(&a84;-wng6-)L+`h>3C> zp&euoI4Sg$PveE2CBEax97%Rs!{xuo;nc`$5?0WAnC+zSn2tT|s!e=ylpyb$Qq~9c z_r#9jfedZZ4?PEVvCEgwR427+w;;lJ%o}}+_u?ELI)Qz%wP-Pqu-ht1C~Lu*52MkA z#;07=EV!M*QE!|{P;~UM>J(j_V9H2&V8RQB{&j`h=eWEd%GtS5CXs^m zj|ZLu*lj)8+1&4RR{k>7`dEkhfjB|VVAy$m^pzth)Le6WMZ?J_o$S2?rb8C(Bo>3x zB}e;YhhJnR`6G*qyT|#-{k!PlwtOMAJ!-q!C?nenQk@s#`&>8S{+i;?YAc2BB`zm* zQM)^d2nv!=O2uOt>9+P8JQ6UKAd{8Ef0Pc(&VQQnxOnF0W_x7*_qjZFZw(O^Yk%3R z@!9LDpO@xxTV2%bR_!Aq4_RBXF*w$7$29Oi9nJO1t`LZ~t!M6UNYn_UwRAy`W+FWk zpjf>C6l|B32_s21o<3z>mJ^>QL0`@yU~c0^R`;f;nWZl|BbouCo@L6q@`%nEMZfq>WD2CoWUY zhaU>2ST7G{?JM{z-g<;In5l z0>#@+R0SG1&`YtWrrYb7k4%GqEgD@sIlMGwAtY01jWd)yLYN`K4n-M1ZyyxMa5>u< z$$ej4kerBZx&{@I=4YE;M;sNzTL*pM%{gW3>UzJNJcrHIZ}+^UoMUy^k-yebwtf&5 zj`*cJf}$~g!I*V_O5I4k%J4=)bpCLyP7p>Ax-rXF+2WkdNz1HF3S?NiBvMM1c$^F~ z`r#|{Af%pG0?JQlKGt{>EYf|pN zWwY~c5+vqtGAONy+n$2bT=K{a4<#`3-*rOv7?EhXHwEN9th+0AKtaK*H@7w)z z=M_p27N($;{PXWWjoJzacYU*9{{rQ%asU1=F+WUX`<08I_uq|_jRAMvKCDzQ`aiuo zN7SVXpnJUO9*097JOzFg9}g+*C!c+G#J+ z!K?uBNf}@tUa~;tups<;LE1mAqHPA<=%Mx}4^asWfoP;(RPc906wy2%>6I*hELKp zh=ebH1TgG?5bUr?cLC7ntD3#wXLIDzUE;q8`p$qtV~pf`a5f?EVc_f#4t)AOsttHP zOoO^~Sa?>Xmu^D@k9c>44d_%00@`~)nBO+0PZZMH#?SFqm zuSu4aN%HPq7;08X;6KQIH{y9&cYM80XE5$}I`xcT!blpa4sU-1(FyMEbU#{AB+%OM z-!F``Y`Of!^UyD}d$r3 zdD?*>hzWHYaKDYrb!y0G9I-S^Z+9D={@Nz*!0Pg*9^SgqQtt8U=b%W`E| z)*#~jutiY!u1)CDW#Vii%TB^%wl=ClBQ>f*G661yj-dqe2Ska&Z>rtSMRtB5D>kFL%08JR^H;@$GcG}^{c#g^(7MU8aEWWesym-OA97)r8N)6iw*qlXq8!@QfG$vD$E@;e31{79a0 zh8>q#QD@t}a*hO4Gxh0a6FM{Q!nuCsy_w-!d)2Q$wlQ#$2@w?U!Bo)HWW{s}aE1@n zFTM0tYajK^@I~E_sk5ft@OrW2nTfzElKSi-RqIj{f3fPb10n8ySwsp%OOG*;%Vo!w zd$WX*nv|F%m<~)UJQ&WhVS;vX7T$g(=4%Cz8PRin5=^a!%UczccQQ%hXTL~=uAdN2 z#4&NAPo-1U*=fXja%v{`tba}6o_=#qnaz)AtUS^Z`iI=DgX zzWI7EjjCE-(gQ{?hj=lU1m$@bUAMI8VJl8aay2!Px_$X_(8V)6!OnCt1pkG0&HbTu zdm8Q9b3u;3_7+)8u*l;(;Fr-S4ObU%ApgC?+=Dar275-~oY)pv%Kp2zr3QlE^&&(R z{MqUL*-XZeJSQsTG5&nr8v}Z@Zj$@W`6*b2<4@0KOc=O9tp}MqPVt{#C_aGUyypx| zMR!3)^|eH&a|YBs3oRBKE7#&pW`LkTACwl2zCPoqHbTb$RXrGlF>Fsj2cMq+6^Cm1 z0NPT&Tlf(mc;W~o{M|$d9Yzj2JUgjy;8$=&WoJhSkp+M&|{hO{5RbX1krdStJ(m3SOFHv5pZ&KOZ_Gm)QeODoRkVRD=j0BR(&qr1k49J z(mylFnTf`t*J#oH;{{MQg--GV8UTzv^#rBiIlf;@_b-7nXaOJuSArCKOcf;;Y#Mh> z`hf6=kZVjh!CEu1gOHqiw|um|1$y@I zEPdV1V;ph+9{}sIfQm&Oq>n)3)|9Z8G0S!y5%JBjginJL&bAW|HAzk^l=qm-~bjp@FJg>CjmQ;uTpzIA9uz!@U}*@WT!p z^jsUNj4{daQGD=wPKAoUt+_5LOId%-_f?^etx|Z=ywZA*e!}MSK;_DNLeqnR-2#P6 z?*g6K2c}=x3LBWFHIY_h#+HL#_ZSxH-Oy4blW4 zt4JxMikO6o2ouuvxfwovp?&z+^lnTbda7tSwX@AxwpLdHjY27~mtac%`Ap<1h*_ZX zIR^u_^9}plGslD0G-I2-B9qwz=c`(JD>=%TP*tRP;Jj!I&Z!5M@=iP#8|e~V2Tj>M zjKO_xiUF?w@d%u!I~mRx3UfESR!Cn0Q`Shs>!Tqw=o5hs$rql;Xp3{on+>Y%Dl+~* zzIonnNP8Mx{}fd7=YfEaJmw9NZ*k2Uo`wUMBi{s!`eGP+-t%#_2|$}B0GnW5n_B?@ z;-a|d0V;b`Ar~g@-^+&Q3u4QjpurjEWdlW9u}muCngFQ5sj`L{D?lgDB-8z^rOSXx zu!iLa1#$9~2TCTa;($?L0&w2m1VU8v!yhD%!E1r+VuZfuu=*jC0M2sOHn7T!QqF4- zmnZ!*ML&LP(!QbM%WLBd5Rh|$Rz(k=%J7~<+HH=sC#3WU8e4v_0e71L;$JM_*B30Q zT9I5Ti~-aGq2ZFZx8(82;8gXS62ZuJEL(!Rr;P>+fL!KlQ0#;bNPq+(445fSR^99m z`5*ekD`H&&hgBJ#1EHS7r%ZxTEji~tcM}yzx!e!~UCwr)LTsSeU8G6COL~;%0#y>Y zS9en+d|FD>h-^**RaCX`;S^wf8UXuc4(|!5!D@h7 zlLG%hQT}4NUQ-q_s3qtvM}bo)lSugCf%9Yw@Y!#5Ry*b_H{7QEdZYZsGYGFexhg*=8N}7_?ter{KxCK+SEz>Ut{1 zs$Bw{V;4v-$YV6anLfnPFhsU%>LNgIAe#<%tas=~8tgZz#7oO#vvZAU*oLD5Nwt1l8)U*P-9QGqOl9t5nl0gCb_&O!KH8B&@-@ z&}hR`3gQEa$amL$A_mcZ8o}hSr#br1_yuoC45VaYcsqj70eK%5 zPf4G|5 z@cXUbgX@t3Y2lnuHyU&dJV6g_$svOMchR%P`1Xhu2e;+_@gg#lSKoBFpOWa~q zLZ0l#zLFO4)eH$iu%l=9RNo5q4M{Sm5i|s;#wUc?fYd7qSUwT!^++F<8ME`ES_O-1D5!+AXy0RVo(j_yox<6GLgUpvY_#gA7&X z4D|hAL6$&(Tq04WJ*{4$8_BqohK58o^(uAqpNPvKQ$Q~awZ`f#Co*ghi7X0X>BXlc z3w)!H;5clR62Syz`=9C(v(hvUWl?qZz!A7)IQ9glHN zL)iH$IQefLr*Igy7o85lTpOtbBe=?5sRAsm6eGu%4?(ZQDnvr-?ryT+Zs!P!&b}@? zr2CnHRC_h*4t2)9N~WKKk=OFsz;J|Mz-0~GGzZ(8?It_$g$#~#X3%JAN^LQ(ft?}1wH2&RjpSCt~fhBooZA84i??;!(GPsxI&WBL6v6{ySL-0y1Zwwombd|6-LlgFWELSfmWV;DVC*&SIH(=|Nd&C0XqY+|G zVQ&8evC4LIWwcq{hE1=n@4*V4;v^6vMQVpMr=Bn+wG2V?5U>X~9r*1kMi3ZjoI ztuXW;!rMf=s|m1OM;&RuJ4NH~L@W<}4rX#6|L>&ss2Hx_Jri0)h;P!3goaDMYXPSw z(kNvw0=j*P&n-Pb$PFIcUCJ-zzIvtN_jtHay;78*1dMtX)<0E+MsxDjlmdrv5%Q?8 zQb?-u?;B4TyXH<7!hLdtaE>O@q(dA7uT8`8M;WVU1DG0B*Q`f ztPlVzAKM;jg$E=ZA(e#<37=9Rl>$N2ejlOk&{IxI8RsH~LQtXu+TJyMu;QKj#3Ku& z9>6)~n(%NBd;&>f3`7=BX2#?VxAl5OLe)#n-|M0ObB|J0Ec(ZSsg;O7=P=FqL9zt0cXd>305GnMf%pwYN(kKrA5C<>nykR$Rn*TV$7AYEBc0ph% zD9&^dfkp6p!xjgttCG_!m2CqC_|d{1$B8%Og!~rp2?_uw5mzZBt?Sri zI^6p{(-OL@3bb&xWzTM*cVt&z!?nFeXSJ)H6jM9X)ETAKs%gfr?Jl=zmp6GhoYI%V zeG7c8r$|G1!cX4In}T%2xaqv#!Zb=rx#R)1DlzFi>!hX*C{YErvQNFeKMP{o<>jx; zZdZBCm;M6q_0%XCIt&@{4#nDo5yVN_ZaMggbhHnl?*i`S7WAHdA(b^T~r3FIJx-v zCj;WO{DJZRDaV^BHLLa~W__`Y--ixQXsYJz6Qtr z*^$EH6HvGNI*^dLR7bL3TL7DlD1{Na(yr$>giC;^$$s6m2kZ%&*2HNFb)ktJrVP(8 z;JkhxJO_?1W?i8q)a%=j>v7<__ZI3)69VAi>m6Vj(jyu}w-mt8_!Z=YV;SXkJ5$4B zDn%6&>TGsfigmzB#YL^fNn9nTlmi%6XeUGcde}@RXfY6eKv!I1~!I=@o3L(`$?!CtTON+c)I8rSMZ zGdz*xmt`&%_r3Q4bl54Evx_%u9O5;7@qOvoiH`&i>Vnz%Bz+@Hg!VMx$lNGJ!*EM@Y0xMh#8_?4tfa^(} z=;WixMeAO=BfSU=hjAs9*ZAhu?_6#`zgI@98KQ?lnUn}*6WyIP16<260_y=7EYTN^g4hyn)E zNOz}nmvnbY2-2l=NF&mn8#diaBS@h~1vO2PX}aurB1d$_+^+uerW> zt}}3~XCHd)`|;|iOlTHhA!z=0=d4IYj{5Q$Aq(O5EXMr#;$yBySQ54|Ehwzk<40J5 z!RIa5G}hBKwv8<84|<7Srd2+4q;Rs=n}}JHR0UED9!D0B&P+F-Ky1x&a0p?3F6PYk za9ng6SYJa2xUl|v{CtM0g}h(E!NC|D^2&tUo#FYFd13NTP14`Gqm>RTMlr8y!z*P< zcWo?D0z|0LX~CEgl(il1JV-EneukZv#LO1s%n~!#DDRkV5;1muLRk4+(i8^1~K}iS3w)gCC z(qZQidC{s)lB-=Lk_)ivP6Bd1{jrovOc%xY>6$wq1H%iex4Qsw83zjYq;yIlq4mO? z#=4RoV|1{2S|VpM;&fCKra2T5S@H56nK*Nn{Q%I}hll(!}Q|Bu$m&eiJho847uM-*25FGph5foG~VQEMFnD zIAK7Fh1&y%!DL_NC0NAG6;c^q?tcHAUl)vVD)e7LU4p=ipZ^gZT5^1Q?A2^~)(Q5P z4`2>WfHE*dQgtdCj+$~cfSa9?B(OCBr5twDmfhP?Cc%OKF0=)s@UboOJ<#bb z7N^U8={(bB?9L{I3fH(=U`F1Xl>HocfEU6ZQoW1*F4{59a#5rQul;tCck)w@({Uj9Ab z-Bj*5F}xy(Z;gnoAB99oUhFI9UCgX`a%7n`@2E&hn~rrM7Nv)`@L3?bsx3hPZ5Kp3!#w2HXcxU z$PqV*~3vaMdu zkt|sihOY@te9k4jlv*QMYU4?bfw%^>pDSrFUn}ZtU(K3qo2ZMPcPjE0FXf&p>L$t9 zvpM#1k7s6vMR#CE$MP&zTmxy~w+frP*#_G{9#uSPj`C`?akd*~+PjwJg_?NjbN<%x`{vgX+NLvWw2sm!76~q!CmdqPq5TDQi03 zN3SG|#WR1L{jmLPg2A-QUGzjY2d6Qq=vCIdK>8H6mfnwo62u^GP%cX{tKb!q57{1Y zL;qqYI{lqT`7(LTNIvx7Z2RFCsbUktb1+`LL7PrbO(K}2evMbFg3=gdcQq&LqEi)B zcUguJHCW~Vow*lB3T3&}xFXkPcGK2!R-F`1>SBwDF00?6kP!%-sFkwYq>F5I$LXsL zV`!F1BfM0G-RlfS#O7>LlWve?rX=~O>FaAs{UxDWcc@g3Z5sh$*@j?Y*AIo=L|qF> zDDR-@Xjp-J7K^`e5dK-LwFH}p<{Wh0{H2%+gjQ$BFBhCabIl#Bf?ToBdHI%9`P8u+ z!23w6jF$*$CdXlWF-YU$^J}VAZOzz;@_R=)Zw+jXcf92wZg}d$Z|5p+z1%}-hnNS?>okz%G1zenHS*#{WVnR_ykqX|f?T1MKTF1@W`` zd=Rx1F!_$(xY}!Ycw4`O&`ZoC^=fv?E(CwNY&SbGS9|*W-iPZQq(VKfP9P55 z5>_7N3Y>bE*2B0{w?ce&9&sBGoR%gXQ zxm6xPPurLu#P$SuA&*c1d@J$g_>+RhRt&kHdLwk!#%}emBROp1_OhPnJ9JXTea1WI z5<7YC zP&t}LoLyMFr4KM^-zzE0+_N37v%)?LC|Ag5cyB0^o^!dIo&W1U&um=IruZS6#T@lI z`!z3_c9+TVg?BUfBQH8R&TuH}O0(u@Kp^6y^G>rwzWXPHz$NyA6>hEa@*34W-FZyM zeVljXG*-{u;%%6aI7{s_`|Auvo*#%$jRLVf=fFAUNmnJY5bK`QkuG3~Ho;NnWP`s3 z7?1N`O#@t6LGI@5LFsqPOkEt@!4uLgT(4=qRJKtKS3LCdAO)M>cYWqOHD;+GU8{oh zk`y-g2H@D^8NXiOLu=@{CsC7YfHFE0_<(ETlj|o3W0gonUZ+;0)T4RCWlA)IL)1yT zBe-R-1wKOFAqu_9?*)a6t+CKFNA=t&iv@)wldsya@R&h=i8J2PJQ`47f}3YsV81Xs z%U~J|$^iL09!NYwpDqV2GM2g5PXL<}_&vU4jAw*)792$4F^Ew?Od|A=poUDKZgi#{ z94DCZM!q)-xq!0YV=^P{!!#P?;ogcje^Ub-jA{1fsd&K5Cdq<~B-gOyPge zABwj?v?yGHvEgHf#T?-OREsInRgpX!IHFN1E(nC2@~jX&M(wU~1T4(CQ(vht)s*vM z%z-vOFaK4C^D{d8&HzA8NtCLcxYzoScNR9D+eOqA0)y-;0d86R2Y76h4r%6#ShXEq zu&`|qD_Q_8X}Dg{#+N5%c7yspk+rqxAr?Su5(#hcB5?t^A%P|_T{~B6KVO>TV+2{Z z!zNB&nvdbvCGEu5dIerWOUmy`5X&>;Vi7 zPdk{dAlavDP<(i=rH9lc$7BBN$}NnkCK!&b)-`Fj7A?I) z($_^H)XtQKUdNeD(Ix|@5?_?0SnyQ$F0hd7^Bga5lyhlaS#Prb*wXRbPPB7%u0LoE;^Z`RqDf_T2Qdrty!w z8O|R{ofai&3G@*^gi9KjR%V4e+pakCIYhcYvaoVVTvXYR1pq*HEQ&2-2iC*-H1T2TPC$?J-rKl> z+)=q#&WQqPF4EA9FeMC&aLS}x$&t$BE!jh}6!T2?r_z>)^c4>@jbXAmdV=Qf6s(D3 z32s`bXN{IMUSFNpOj*6-T=(S8UOj&eoP4KM<(QVv5p5k&yu_H<@&dAj~8 zg%Dl&{lo2T$jd|eCw9qqcOmzzWW>Q%wQo7X~FWMZulci}5ihb)agvIC4$ zYbL*4np8Dk&gJ*Rt<}tWDrD5mr&eVQr+kF2Mrour5}CT3*XlcvfO5@_9i;I87cTS_ zu|==loI`h6x|I|9b|jMEn1SA_iAGYV7(%jtJ>{Q^Gryu7ZccT2^FQO{x0u$jZp>dQ zA?`AEcaJ6b-`&8qtR%=5AxVjS#qSf&t1V}{_!!^&R$DjtI@7WvZZYYk6UY9q#{f`ModJM} zR!xra1b@*dm_S2kBUWqjmnJS#7s5a{(sE{gF-HGkTLAtT4FnOGEmYS3A#?sAzei;O zvZ=d6xb%mfGma{P)KoS7_wFcWv$+Ya{zt{ zQYUx->ql+^o|l;lX?&a%4{rW?vT_;#6W<*)t5_lTxFRrNXkC~Z#Pb;A0@T>*H)A zP9f<=We?nMUD}CUb93p&MJwlIh?lxDJON%h^BX;P%n+3?K?BI~FW6g!At9d_t~HeqKa4|90<{{q<0JDC=77Tc z4_qG6Z-R~U1%(}TvHgN6R= zv?tHo_mNlx2pST@X5)Mf`92W=^&BHuq0z8HX9MLz&baM{j$P)hQ+T~$^Qm8{FIn{LeMkANC^S7?jk^crZ9+v?P98`f#6pRF>h2e_+83Ti1c+& z=HB)j*DS}TUD(177Fb*kDRxE|n6mrOl6U^6zgt3srCEtrfV5NhDhMadV~Aa&u^3Uw z300sw6jhK(Y9xzJ6;V49tt*q3r-E3AAu_EYrfkL|@yu9$lO}Bgj+bpeU1kpYR$Ks= zHUHcNg6A=n3Dln6%jQZJ`oP*cdX^|uIJU!j^iCY%P&C0!YxxBXlD1vOu16gPX{{qzhMK|vnSc_BBo+>YH7 zBofk4>soe5+uqSZyz4^}{#iv2oA4%1Q%!|gB>G!^vZSQuTq!xV*lHpUbEs#Tki_R#F=ikE4+BjqVT`5)hjxdn1g_kafp;{XZ(+ramN*knoCSuA_8VG~}e7ysOrz zHi>ME?Vi+Y;;W)telvuG4YB7PioQa${J3JhcQtWthoQox?jYo0W%WzAc{xRqNk!2QZ9ef%8sUn=I`k)Bo$AmLX=sbF+@^-+O~!C>03ZD?YsOheRiT ze9J-z9Ho2P(;5EU*&rE!HuxO-l{M}Eh_qkd^83#N9ByMK^VpWI1^w}pMKTRg-j|Mt zs=Yv7q}bh0X7O?=E8W_sqCiE^5GD2e`Q5!VbXo#*&PUIbxiXJ@!tt;r--UZT|=c!l)RuacyHQ!C4Sn zgx6j^j$qyQC$cCnkwoZxNL+T&-&l{oAqpc?W&sa5awsUGL3|qk_CV(u8d=^9WXfxq zAUU&p_w2zC=tLY6_GYcu?sN^S6#aD3-pTqvOu(W_02@pRM@+EKZu5USVt6BR7UoR* zC@0JhaoUO^dh3Z)O}W)O+@(N~5{!;9p{l@9tIHx>pwe&$J0HVNx^lJ|dGrp6LvyVe zJ#$i^hyjAXx|s4?xLcS zD(`{TV*7;Ak|M4rg4WK?dC(Iha~vx1l(Ej?7USNb3 z@0=@b{naWHFu#+r@A>`pBiB>GQ@y_VSUvoAt4N_pu~-}V&%hV-h4MFu)IGmad-YeV zbU-JVwTY4b^&`(fYOePGkGek7*-HzI1Jbxpv0=>dc+x=c@V}sTJsk8!+U(1>)2x1% z+&hym$yI?00C{&WN7fFj?e(W?OspT4}t_N%L;A3?Sy)Ip)9J3TllO6WR#jZHlG*G(@XgM@jJj1zGAKpxZK>RaHBa_ z_-M!VEX~w*LBm5k42KCngUno$%Fn-7+0fIqQvYRp_wiLL%e%^KEc}0s1PemH1P|wm z;p6E6E`w^R1-9?SZz%^dD}Q!3y1UG6u}m|4nJenLt7`GD7nv!5=-n`D<+6N5p!+Vd z$Bg0OU{@biStYH+Ub)Z;lkLSYGa!36eHQhXHw z_l%=2WB?5RWADrCLzX6qHb4_ULy)UM!mqCDeamR&9{&AE-4V;wBlOQpZy+2ghOqz4kzQ3j+juRzz*24rX5n+e89@c>a$;mgIYgu!bZW>+uBn`BQ!<+JE7FD53Yn zT~OkW%b-7lMO1~Q*B=ZN2|{6e=dgJj=jbnq&j0QPE<=}IN0Mo}$yY5!`mT~^r6FTJ z#fnG*2$B9=yL^!;j{A7QA=kxDdN48?|_A}RTmm{7=rUU+JNKBF`x*h{?wP)?}2g|iu@p*89LOufU+B3e;hcj zfYjWBExKt=S5Q42BnppKN}%8Qy`)=kfgPmxP5QkjnGPtR_99CSRE-8AE#t*NZdh*( zkh@NTECN$dC!!pZhk#37_*`xnZuN41JG9IrECoI}Tp&{m($m|lj}!@Q0fo6aFg)|U z+;;s~1x$_d7N-aNureUM2E$DvewX(UygR0A&6M$>i=66l@Q_AWC9R|}C#X(44J;F& z4zB{{R}v^RCyM8qFL!|R zjJy#<7Ps5mAu>>HgP$1%2FQmFx+Ecm>#IwO0`dIZPS@{}W7%yuPkS;NzT#Vp+r_g} zoC5t3S~c($DzMM7#M?{2F9!qRl&wJ}FPI0D_(UXG;TtK%LXsr&*^HL3Ywl8$+G2XH>y z4ttBPzCbP3pnSQUw_pmU888I5$e7zvzC-CHMFpP{uObPK_u3HP#l{Hp9^e5=SGv$I|(Q!Tq>Cw9W*)Du$0#$%a zv;T_sAk7#m>%+~d(efYZzyTbXsdX__0bNlqEI*bUAl>{B5#<#_?71uI_>EF6^d$A$ z#aFIbC$&_-`N;AhjRM`FLG1acA-mP@1+-eymp821MXKl_qAVrpZ3$G(N|}@GS+8>V zEQcf@zt|c8!{11e1awKWz&Iise)l_ol6qkcXWbS~4l8M{eXlzIV?_kYNIA*!@vaQ& z$?<-Laat#mWDx=k%NB^S=+;05sOx0*f@+Yk@d;q^b`79#tir)e?!Xp!3&)TnQ9)8R zV>`Qyy;+TSWM)h5d&Okd&r;o%AHS5@-L_OTi?%MU`^x9E z`u$;s_nGDS`L}E0?9Es?=Eo70eC(31fSKOHE)ZxBz&l@h61u=G-_dOY@3hC}^H%f+ zqDVwo*_WD*@|+op2TB^Kbj3iBe_%wGCHdO<+oHUrqYNj~3qTSacYS2PjoP^hWNEDC z&6~mkHejH5zdr+F_&w*V;f$-rm4tO~3q%b#75y+US;Jf*E8q;h2FcW>1!*oRYg71n zV~wrYn%bWug|FO%$JvF*`XiSati0dBDb^?5M_mkM@5f56&+#)~K?w*c z>$!Cu^##zvws6{((q8|GTEjR>=HtNU#9)k~K z36HO{_?*h!4F@VTvh;wZ#+>}tJQADgQB8Z@lc5Y^A`;<)9(H@ag5tIxZJQR%d9Oku z;QIu;ffQ%JC2O&%uCS&+Fj*v3H=GkffIB#nxM)F7yMxPbD_PXN|C~^?(&u`mcKh0{c`QPo$eJ$0Qyx<`to|1X1@mG;ia)~d+<%1DZ>evYmoR_W zHs_kQDv%OXZGC z!t#QOC&1CjS^S1qmXKXdbEp|Ix9}~Ln`niv5c+J`9CBP@*rg(7f|c(^8OOH)Q6jVV zniGW^-6`e6!=bmhnwN^gmxqvWLsd7Ibd72}qN8<%2UGjHG9$f=fGVVHMY5QDRO@4! z%T#Aj!(a!`!!2oERbtiMmhRe?v=gl|d8!1iz!ePc^|dsVz_2 z@4$;{nW42r4h3Q=qLRk_E+IYH6TsGPEVQ!IzES;|>x<-~I+qYdU=(p+@2L5lN~tUR z>D#vUy1}wS06Pjb5we8HMLU_Q{HT8e=cK#Ntt_lH-Z{qs@$g+4WMzQQe| z)~UDV7}#C>Si1Q%kd5GetP?T@(B<&m9)>#rVM!tPyL)TZ%sGkU(H(HrqGv z7^z8TOZ4weB(&ctnZaf3Wy?A@9WH+E9fP3btBS6x>c&zhM+F*Gy2}Ps-}B~;jy72K z)#X>s_mi=wu$=Q`#mo1CAuMBZSDJy0#`m44(Q5fXS+*Z*At)yE@;>E(ChV*1B9`QG zh3I0qMC6;;1D!8tFgBwH?j25CJa#cUrD}>^I5O#wvunn!4JUg%b`zHpD zdTIWv!@&{qv{Y>Oa6WowTX!sSr(!vNOT(~WY5tkrPo0dkLAo1_Qq}YL*0jyIwT*Vx zX2M5pt67zh^d&I9kMnDFQ>cRBPh?Cq-_})~Sqe&5-wNr;s2+(>yIQDLHN{}8dfF@M zl5>>UT@>B#EBq^bS006EkF#SHH{>1?1iRCF!cua?Qk-SQSc8p4q(1_=2x&e5y{n#Q zvO4Mfr#mi-O&YO1G;oaNHcO@_y+7h8SEd7LH=*=fFid2v_c{8g3f%8l3V; zVP8$8+S@zJ;oG>f6e!+wDx5F6FY4HnSh&x!o!E~fEK)=ASXKx2E@~n zE&?oky0;HxI%ov-Y0W*{XqEPxj-Ot}?9u(L;}@&QM?^Cld^6QzNlL0K<8MuuX*;phxQw? zbLjyACbOx^-A3_9*IZILI9|=S`cs(qw7O5I5sQRUWMM8^7rh91yAVDdYM%uMed(ZT z1W)&#dQApg5(tC9p-K581izfjsW~EZNQ6WynVa|AbIUAY>CF}Gp9v>;pM+FR0EtE8hk@_zn&~D>yj;oO*WH!j zl1h2nvN0Vs+oo_M%$?UN(j{`dAH{QuRNo&NiBR{kEhNYk3p8^D8dI@9jktr;Z4kQk z{d0gIj$rCpGMQ4P45m8lw%p<-f|>cd5{f7w^Z!33(nM&(CyqD-&Bs1av3_;*2K=rALIo8(9TJn&pj{ZO%}zXMwvz<7D9b}j@L3AHT-3`n=0t> z8EdBbDwN5qrpVjIA5Zr+oQoBCysJjzbSYl?M2!O0R{ro*27M$Y7z5_RUsgir=%`@^ zOOk`1{J@Gcr(4jaP@movB`Zf}ybQVc9pOz;f9Jct{~6ov4hAT{t%bN~@a@Rc-DlfW z&}<%_*=ku$o&EHuz)$uo+ zb76Nea=nnF?>;9U+*P=4CODAkG)s?pZ0|B+&hvXW0!NsZ5S7(r8l`T=h5gaJ1@bxx&6l!FTfW(h3vf34Xr4}abA6!wSf0a z^;eGRj-Hj;=NN??xW+*dmAuk z*_?icnRXL!vyEy0(!7j=dVEWFw35NXBe8142=NnfBiWo4Oz4gIHY=0#-`HRrYxl}e zM59gijj!9CPqt=glV^$Y_&nNeTH~Z@a9CM{>%pYAvn;?>EA3S(RzeP`BmZ#!jp4%q_prS%cwZz6kkov<9B5`6IC_M$bEJ;i#DJe zI+ZP8UCKH-c+Z@Ubt7cVD&B#*P4YC<4=V`5JiaxUz;ZKr9%dSZ#EZSO%{&pzea2J38k1UvUl;R@M{iG;VlX1veTkye zus{5#oPQ+LT8fLB`*J;f5kv6f54`)P(d3(w8<%u_ z_b1D=@aGSIh?7vYAO-84VgS5sgbP4=Q_UZSz!OPq`z&QT>4UTQ)H zrWYc;Xd;G%u&1yO^o$JE!U!e6nkO)ONYEGsuQ< zbYYXFFc$f~5IC$R!v_vh-#k|^p_D*UT+oAhCRPlM&bAssQ@_`ECUoRTtCpI&_52NM zORNy~onB=e&KBBsrTxQc^_Y+4*KNJ$H3NVFoLs?w0# ze@6l6h$aqnZSvWJjmuatBP0-a82QB zJn>WNrbNZwCJ*K>9NU}f6U~&NPC3-Ie?as%5WuyS9XbEP!=WqiK$yfHjrMT6R{u2)oUmeE>#?7)o39!RIevRK5rLVWL@QL8N z%89Hk1VC1>e+)Z;5<>pCY(4ciqJN~2$7P+Np|e@xR#00*;&rC+%hl_T`vIOU_Dodu zMd1tjYMt}quK5>d&SBQ)BLoWzUO*@EuLn~{k30lyf|puR@07U)4pMQIjo$RD43g*U z%`gHINK5fYpN-JmT)RE=(4X??4cgZl7i5r*%x#btR5J1|tZZp!$EOAmEXG?S$tiz) z2mBd9PTC}L<|lsUe|RU?1qFQT>3a*Kf~^lmdK*%P1lB$c8>2sy|8buAr_Gto{~=KR z`z4Lp2(2xnkF$2wwaep|=L-D$T{NsR5h4Ha;rR6m{HAC~ht<|@4k7tVD@8Z^j&S+m z#Kh@ojNsD;>9h5uyIYIFb7#d$Y@-yk-j|H8+g>IbjNoj{jOkYQ6ghrfsrYw`)rD@Z z-Owd|VxY9tBk710uvR4du`Q*0LV?C62vzy?L**jIvIb5<199SkozJ8l-mzjQF2$`s z`%EO$15}yb>$&6>S1F!*s;%BhppNszj-{D@lDT%l^p($i5?S!SNFGvUB*aDNX^#a< z0#efojX!r~_rTF`1I8y%Azi3@+4Dq=oZsbNRIQ&C%>WodW6$>#^PRD( z1>Q8K&-dg{?O5$CvB>H$3cc$4YoJ1au&;TFRikx0-oIHN5ll#KVEoDC-!zX%CO#zV zThPzx`D@UpBm)K%^A`2r?Cz*E;H>d?@J;`6@Ed3a6D{=->*Qa|dZ8r1ct_pFY5!6n z`cVTuc%s#}^KWXM6AFL7Hb}z$)fFK18}fUgz9auPxBfOpWW)IG<<;voE}K@G;|u;c zn;Beo+v6FMFy6FZ_7ne|Bq9lDs}rRWOw%P*w)MJ7({{5v$duV{R3`%P_&J z5K2d~`n^~D?o2;8BFB{5k;?SMvh`sX>`m|#S+tf;e70Lh+Y{Tl#%ole8Ve&XizcC( zgEyBpqoxg0#%a@lY?XgioYwC5uX*=rGs*qH=4;Q+=B)kjW>%7LB{bJQ6IYma<8Upu zZmB%l_%7O61nk+I&WCqGq_C0<8gni$Yj4)b?vy(F2L9KW|r=oPC?Pf?3*{kPYlH30NP?n7yoXm8S(X40mcMfwos$`pkz z4$X@P!0+IXr99Ic1-cTAvWWXNYfbeNz-9j2oQ?zG7wp0RTtR-_UIZ+Seq)AGR!Klf zXx}mdvyWIh!ut10OG=6$bNLMJ7#cFd48(oLFKu$HtljVbzp$cl&RB6A6MyP5M1Y(^ zxG>}VTV|!UFl`~-pFINo7(_76{Sg069D`N`gZcH9cH*z!bi+>sY&Fr(ejBa)G4-H9 z{^<6vxa!XiL<9Pne;kbdT#ZS=9D4NIv-4lO3j(-{i2v_(Xaw;e29#3;m;N8qG7Ck7 zlGKBFJ)r#sCZ51zvKp$F042d&gIXRX_3Lks3xqE_flikg_W~GcSA*gX?d?WVYq&Zl z=o?ej@0gknhahbJDM&04bIPfK1g(&-(wz6X?JMB*v;}yg8q*~Gv9qHMbAlI*6<+(D zG~oKONx+2-6keomuNQ$;Jrl@Lyv40al;c^@GGFf9}WE=*7yJ^6rp<{AHX?Q^{V{+-5Qs7?U^ zFjXP~rpfj{FJ^#^u9z<)^Gt;F@d&rJq(4xZ5BL00WdM$ff)>reZlDILH*A zuJ!>4%}k2OIUaH-cj^Ub%r^#6RTC&@?cNfkjlI`fNj{|Sxz}b80_D;JUopF1z5`gP z?EM16NlJGWEB2wJS-@&q^$Q$?n}a$S=eVg7qj?`88zPXmQUfv{CX@7Y!ercmxOo#? zTn(C^Ga27zH(T$%WN2#&#IDV5fG=#k1T?1N>5Vu>Zjcc7YdV@yeZR^wlkRUSgA(Cm zfp-~@QOU#Ds2sZF-haMBAbt&OaI*ndJIy+eOUN+Eh;uUxTe0&fv zHJigcRBRN@s<-BRt}hFL(P{Hcj$}k=*qWq#4s`MlB6<1groCGQzoOBhJCA9_gxWhU zFJK_#nU>~|tcrA^LWU6q#+MLPBQGlO0(LNztftzcfLaFjf&uLJ5>`n!D8hQ*zIC)m zR;RuRODqLh2Xps_=-8X10Uw?~LL>mrwHLuz@}hudN~=X+%ZPVc9r&DD{3aMUOnic8 zTS7&TNbEgjOt|RO zbYbg|KY{&Y)6#Q5evN|4D$g@Og&{(N%}7ax4F!`k0qe2iaI5at1K?fNh5RD&Mg%cZ zj3xK~I8mP3q7nWlb)*B~h}$5mEzaFU8S9A(pv``k6Qm9X-sn~7q1l?MJ}~pQTWqI+ zY%DebYTNG``teP?S|@O}vODdeOV|PS+vX4_nrcoZaYsVg@v$L5*2A-lgzVzEDd4{A z)X_E!otNvS*m=K}IX_*>PaVcga~kW2ei$egluD7$hfF~MtQtT%P659ybEw2hl-=$G ztZWMO+R_S>l>u}nfD-5W-P73z55`sf0bIsu+R8R9BSq|<+exRSO05XD03!S<*P$Hr z0u4=_HptV~fi^ZCGUIRobLSNed&+tKphmAF#MwfHYmRbqWXtNzRGs305k|uVSrFlT zfchJmIO6-|k%L!9$Eqmr;zBy&m|6nWpCe-%k~laj`W&Qw zOJ1I&TFLPSX;;fV!A;K4spSMdO!i67$s?{hZ$_}|9JdV#j6kjw%ko*k0#K1%{~XZM2L~9OI~pV`ixY)t zgPv``Ij);usvg)SE~wpY+Sx;~lR{1F6TbEwuIt>oLYQQ~^T_M9zQ9SoyvoI%F>D)@Y_k)r# z1WXa|_Q4L=+cN=Raarazm#20$zrhnXU;*?VVeo+T4%pb&ABr(yEPGJWSc>r46|Oi) zCoMD~N8N(Ho%KsiChd?|A9IBg%_@QI_?>^+Y|u#Ohc9JRjvr9d{9H6=)L8f-*JVDVafbf?b)S( z+^kf=uG9+LG4kV^M|bQ|pI#g}5%sD-PAWNo$Cpw(4PUX~TjJvbh82QRf&5EdD+@jL z6a~TJeL0qk@rktt^kF}FmN-g-)SSo*33w&Og7{aY?az#N z8WcUjqCsAOc4j_IHy5MUJim`v+86NBGs1?yuTxQvrQpI;E|yc?(m4WnPza`g>O{H6 ztMMY&;ns%pLT2K9mXS&;@p~Qa4cHeiokNP)0=@5sLDNQ2`^?I}<>f>n~x?_F8*)JN9(Qztxu zyBauY+`~Wf9D$``PX`SiTKw&%)oUAlG2%}ntiFRPH0(|Twqg- zaljk8ZK@K|O8uIqj38d3O@<8Z6&*HYOb4HHe|R)*pYuehC*t_)c!Js^Zhu@7{ui}E z5pe9=_7sWWbF(+3Wt)K)PPQKorHXkP+za-qHBd(l$*CA=o$L^Mf@CaFjZ|@FgG(N| zQh2CD5eowB5>WsniFB5ZYh*iDI z8Dy8-v(+nU9j#=c8;zihSWNtU04g6XNMtH&DFUXGethH*s>yw>l}dI(as!G4qr-fA z?gH~qy%!EzU3|{Es!tAZsZN)1yGBNvUY|L!ZNzmNKPErWT*~6S8%OQn;GbhkMafp+ zL}BGE%(z0v{uJiI3|u#_aL3l;@XS>?G6v|_xPSxF3oE>N6IuEd5t$WXGUjPbYf>v1 z1}yjvbIO-hNujOsFQ^BoFI(K%@Orq~OUSr%tM-7T?07jw5w^cv^Z7z~16@|Ene9bF zNRBvlBpdDn@+uwJ9(5883Uv-(NBaU>+vq5Na5&!0<~G#)1e6SThH}!P;0h)-3;IPh{+)8Rbqzb5+TT(OC zi9tg{5EDU;CqXdA=`K?`5*H(-L5}7nr;s3FV^f7;;GU$*9NwF9bKWyx4Rb13eRrp- z!L1F0IOic@h<9uSqHQ?8_(z$K9n=AsQ5OiXI`V*43I5CbGNQt)k~6Pi2IUrAws$a$ zg(R4V@SMjmy_k&X*In7KwkE606Rg+bBqgK8?*uu45P;7Ndrg2a=ktuuAzrTUC(H~y zvxvS7ME#uHeX#@KXaQ(!G{ZhIC=$g~anFu^5x&%`p|s(Ii(zbN9)Lre>riDyR@ z=+E%%lMyjhTpxUh;cA_D(HJRyC#E?Ri)+9zMkwuS2e{BW`y*W2F!oDVQ7-dhqueAT zMSLw{N{Z&=td7@TT7ZQhMoNhQDG1ZJ)*(+ID}ONq-I7NE1#Wk(GA=5xV$)~7+TLeel*#7x9i`zGT_W`NgX2&hTcA1U*59B`4>4lJ zv68M*04ZQj8Xj9roXsQjD#KFL=w_*kVXJX7=jU%2!YpU}Phm@UjCTN*1RT9jG=NUSb_!tivPJLx)tfVe`R`su z=*w>8PdZMLS}R(g$r}tSmhe>PVG*+;I5Z09zeNA1$xGyyO$pLyWWQJA24|qrxokp1v__<{+R_=>{|W=L9+DoO!$&hJTj$20pu4cv!Mb z2~kLl2_ElQ>i5`+@4Qw@o!V?ocPT1yLL2X3+|=10L$p%sv`}Le!CV2ZF`cm1bRhIV zjk4i>o#Mu}O4%GRVy31hA3+Km7N;%~=D%l5W}yWItMRTz4) zzF`CbJP}1k_Ir&C+Ee!cHuW8(P8O*~h9h3WhREJboCrBUC#R=QG!4X`dGuO5 z5{;6*%b4U{9^j5smL-_XpKE8Y(cuYTcb4D{RXWi_-ov*y-U0KYY$n%vJ&4};8KP!MbHCO={s=ghKh>=nu94b6d z7dcx1Wo)}1YnhbS`bxyh7mQ}+cVdz|ob}dzG!c1J9w^RqANEnb^#;wzVzfo5jNubD z7xph49z9(KTZhX%yA;RR zJrlbYR&nZ}a3s9)iAJ}^^d6p&`*e7Bl+wwJuHEjQ^*?4Yp(n=_x#qkRl_f(LBI;b| zw=e!?SL@?h!n6dI#>vL5PQ<8OttNv99}&Y;uO{dZDR#H;|>) zkVSb{+l?_Nu$t`mU|I^hL49qNt6V!uI(U;zSp}ICKfk*!2-Y9i6?GE!jD}eP!Ozcy zDkUzuZs)rzt`REVn_cs+JBXt8N6$owj0$}aC#CNxCA&%8+jnTIcfLjYq{{1 zXZO49{Mr)BZ3go_-F9=2X$M#ACns^V&cP)W#NmZ7$xjxN8Q9^vhddWeGxk5xsBO3p zkTED~*opQnA0^WVoFbshx8;3)hJR~Wz$7hLKqpU@b!wZ(KcaSo1ZYYvDWAElMq*kg z6VzL~F>O3cLCLQr-8+S?-%KfzW)&t@llJwC@I7^0`bjbzP%aE}j6q!P7;WcsH~eWP z`5hOxHx2nKMGK)PrFds}Vw#XwJ^$D1PmiRc0(ik$Z~;@n65$S?2jOyMsen9-{?x;C zH?RrJxIGgRIsQm9*8BMD@n@$g5|@u3FP2zl?ZwARV&72BdIxd!jefa!({rl}4sS(} zfA`8vNQ&N{Yijhf3)?uAn%}P1&0Fic=7v9==;dd{jGLuv(#Ch^c9WcU654~F2lMg; z&HNZpvAtEFro*y3yXttE~DMe2ifal=?oh|EYlwy z?V~bYTA4st!eGCLELk)TLXSkdgi)n18!9Cmme30$8+T^ay7Isp7K8`#h|4yX)fC#v zVY-O8A>cxp0__4c61!P8iuJ3<#E&E9{S>^cfFC@8Ft6N75O)8+1Pp zK3m^k5yd1R)1g3{fhbT^XHC0&Aqq;$)Fw)eTu@0|PmKhLY@#q$!@+AC&e&CGRO-x#7%awv^@ zT$iPU=?Zr(eQd`-q|2E}JBk+nin{s>Z|4d({O1ZEQEjCA%DYY<998REgPUwR_bB$= zO^TxifUxG=@m$;8jkp-}p{KTn%R@d~HzYZn1wkk2XAJs9Y!pQujBz`|m7!Fg!Luio z76IZX6s(7Q{Z3!RW)HP)HZC_Vk(aZgDhOKGtqR+p4yrjx=18l}0*`buuuI9_!^W!C zIXB!RufV_W`(YgBZEzmD{d>(v?Cm*$NH+SPWf=}T`xW^e4mA9$Jz{bW6O@yJEtQ>m zycre0E0C=bX+UyPI}4YcMOf)dyjtV>=|p~9ye`y#0PnHV6&xa^>5CSM(aGmY(pkLM zB=ppoMNq1&j1nBP2>Kkfx01LouERNrpDcWT2!}AprGrwV@?ytS0r9%;Wm>XmSYaQ4 z_&0F%SwK`bWzle@85iuoZ@m*O@trmXiKY?X&@Lb{*QN)`Ezd}Ga$ z8&5z2vk5X%T!g6!_U|yd&E81#G2HX{ij9evc)E`4k7$W_I3+}%`F_0VU4BZG*WctN zs!KE~UPWfnkq|!`K*W-OD$AiGR{u5oxLmEGk)C2+gM`~!t+voBVAh7A$}JE+ zm@=>5X3n$i-psM9L&k#L&!K1bq|=YHq0!7GdC(pz@qLc3 z_xLCeKyOkV7sJ>_a_D`&;*7+J)upW9$s`e%Wy5w8 zphdV*r9nNH+>`EpFo;Fe1<@BPNzmZHE(lsG4jEn{BOckNNf!1OCB)@z2MplWLz~3J zjFsP$44|YBi#7~X4LiCEykJ<~f80Q`}XYRPxp0@*;ggzW(4KhN+31rDL%>*K1P z7v)20k)>i&|2q4U5RM}}1rveeZHjr(lfRl^6&FIAdj~YRi>i%kK&f-;r3o{}F9zlMzae=>J_4`*1 z8UHW!cu<=i5pF8a5UOe?n`myWwIA=_nNsO@r2nYZ_xCXWyAK%6Aj#w*>Z(LT;i7N3 zMa~SQK;m=n+%e0P0b^IaTQ$4z<-?%-A!W5<$#deWQf7T##GA}`p}j(1krRTJ>5 z2@56PW5fS@ApHCE2|4j`s6spD8RUPDeFEEUj@`JAlKkdrV zah4o((#-$YH-C@%Vqr)(iQA|3&1@W0Fv`F*(#Jv#-Cm{!1QOl6n&F+Mzx z1W1I7Rxo7wa(-s!R+$V|4C9hsg&A~!kUG3gk2qpXtt1<(p9CI=>!ia9a~_~l*nDvX z(o)rcbNOlh?kjf;P}c^IX`1^6yqLoGwN*eYHTWYA7G8~e`9CVg232Md0M%I+&DfL2em-*dsOtSMD_6#1Dhl)MjJ>!dtojp z=B$hepA0?k;S_8KW-sPaat8u$<$frW~lh!R$xTa`C6Vkz&l#HY*TglMrh?K24;1%hzG73 z_Yl%g%VyLvOU&m$`q>W{p~zAR)dq3Dyjvq^w}#2AXI_xNcn#*9k3i`LxI}qwKr1(( zK)VU1qGZ+6V-9=eeIS+Y%tn|6$~uOxmSJU@)@F;Z@94(Og?|Il>}ldqqWfO#vpnQ%6KaRmw` z7cYdeWUl!q9pevq5#DpZ$^v}gCtd69H%o^wI`P3JsF-iYaAC+Ku^H}x%1$+?cy%AN z-}{q*tc(coFlLiQSiq@U8t=81LVC^!&ncyxw+^Gt5HRgg++WI_GAk4~v|U-QfAIyy zZk653+H$|)J6TlIkSIzz(?q+^dbI;rnd1_-a$?u<-^vUZ7?szh&w&bf^B|-0?pRJp zByaDt(MPfT$Ne<>`YYET(tflZMLSA6a`;|4-2gJ~?m3h4x*uqlfI029IkV9D|$ubPs=Z7jf>KYxZ=kZ}z=%0kp;6 zRscxgdggLa7{0b^eBT5kOP49l!fy8jqJSO*Dmn|G_W0PIcZLIe+A-r(Lh@6W@tMFl zy!Zv&49nE76H;>tHPmOH#UgAofX;K1SZeu+4-a5>*Rz?4la$%tdT#>xCzr#f=3nc! zE5Hu3eb@hL`5hCQ4Wqz}(}>9)a&OAzVZgmDDAk7_&6j8v3GNqIDQMO<4-D7PK$x+> zdNa9w3aehK7e*)bC=~EB^&}KJ=bEwQirs$4bg1l#NSNS3oH|0IV3-E=OO|*F8qTfjduDcC>0Nv0RGbydHQn5Z2VDjpr zmHAr&l3Om=$3-*_K9bSlCD`|oLEoL$7G_^xs&2CrI=|lZ~DjK^)(Hs)gExll#j=?Gxhve%jjr}xWWY0#5 z(BS6uGT4IYCJu^* z!}BBSYRD>G%K}v4Jtm$p2m~#GLFP%Y81=GiyV!XP@~)rD$REyr}`~OW4Gbvk%@NcF5k&Xi~L(2 z-|(YP9<1$W4A=wh&*60ZU?es3VH2F&Jl&!qw@qpr+Ttt4{PQdNS&!l}=pZs0^a9n(!b6wbrb(!0uArL%(sLBBb(!3T zrp9nOp?y^>Xwbo=QO+~xXVAG$s1&9N_iiNQtcQ zC{78oGZWlrrN(z3o4=^KndU>eG4n@<0G4Qzv|T`5Lop@FrFQmN(bCuXBG;bC2Yn?3 zCi#;q#6>*|MH>^G6dNg{2_v)lvcco%C6(!bORYGo%vC3ySbD{^X5FL@r*QQFjo+P-t3bY}H^XlV@>g3g z9efB#niS$L{iw&gGo%B|vgDGt_@Bo4K6p|eaxYv>l(xWQ*X$<((t_`!0+V~te8d>v zo6Se+2$NH4=cl51m*Ya_yATm!o~JB3M&TA}mh`KKZfZWYDyQRadk(>{8F zNYLXr!&%*0uGR%n>>^h5NMom2KU?K3e)%3>OS_-#FYponiikKQGI(3Uv8@W@GZ3>r zf^A-tPLf1BXbbJlQb~F>OcZEw9>N)cln0ym<1i7?9p;B><3;f=zZ!5V6Gm{*HWZo( zVxY(qH71_|oEhNl86Q&5aFIhE;=U8s-0}Ej3>KZ^zh6fD&<9G4iBMn=L_I$2mam~C zs+cO^YI*9txp(~IkPezEC*A%PZ1C=+^)k>HG6u!q{q3#=SangqjO6^ZrVS2C^K#^j z&J;K$Mm`i9If?4uW-5;VHDvjTH~3{&-NH<%F_juKk{Tly*tMOkXF4EHZQg9RM`VUH zYC1N2MWw$RVKg6q zidCXnmtF+I93#w;w)sG(Vc8#edgwKK?5AVWYOYlHwO}eQ0i|*qg4#=f&~im9nvn;< zz5N)|v(3YnXUQJTy`bKs_7iC}FWGa)|7SGzebPl5Ln9Tf4%+*z-*MB_&%Va0NTA@e zVv42rZ;$8E0P#haKK5~$^822r{@ARZ%dfU%;)+Jzhx`l|8ZnQnYCMjMYc!AhCxIr4JMAA)A!g(pRp1roXZ6U#}o7I2gB$ zwflei#h3NG_@>aiS2B7MxG(U-pJ>grRIHzZ&1D@)(J%87P8B}8*=*<3k9i5{PkSpP z#CfpAicH7xJ0-ck;`+KV4;ge{p%EkV`NtXa9)oR)Kyd#=d<>aTlR2sWn342q5y`97 zpht@wES!b{$sIBdjT`u$T+;mRTUagh@b=Q7Cv5Pu()^w-%T^ivn!5wkZ{2FhMYUPN zpq_g?VT(_Dk2K_k-)<~#Og{w;{g(2`4{{)Y%?@=o-E@|=DR;s6QqXXM01q$Q!J@^q z>!~yPB@Fe>0it9B`SNkwUy#IfK10Q?ky+niolcv-WODw2-PjS^hRg}`1F z>`s=btw9XdCQxQ?!K+y-Z zRFszZ(W7^xK4C#;KzJg;=>T;kD?KT$kNxL{=}j_PR@O{7Itl)FVPtd(28+5t{$EV_@;O$gY5P>(4|^Q%%{c_jw@IH_@15|rHJ zkT+c*YblId=;1X&iICw~(!bMCYE{3rANOMVEy_HDKfM<6JtSFs$9YaH_d?(IFez`L zph+U3^kOAoCHY4RJ?YTT$FU$-f&v%aN)v~tg%iK7-rn+Xzs_ZPWdGYdf*yF@6t#*( zSdx4ieH8hIHOo%St~udbe5`92@zby-$1-;Gg_SwtqM? *LOOG2n@?_aFV{#fi= zB@&fm?aYrLQa1Jr1q<&O>NQhkI-kgfv+8NQ&0<3(m+m^pwe$ox8!pO?%1aGT-g2h; zqj$=7D7s141##O+%dI>{3gx2%1w9}i89&!Sl0iUnnJ3U3)c;ib$Tk8<r`s_IZ9A4tcsugYj^WM9=V?l-r_g8 z*D4&%r2I$Y_O=WVY)n^}O4YYJu81zwKofwDhfkgVo%hXDk?4!*p*5I76WGgF1N8v? zp~=m}{MP_`I}5jofCBXw4Gn>Bq+ zL{z9V-|SfqOAORmjb_ejLife~BZs0l^Rk&eYYPAnU56Yi4k&I{A30=RgBh7j_>(>8 zYDr2pkD)cd%H9XISBc#F>my0rtlj!*fzl zPz|^+{?5H9&$szCf7Yq|vJ7%Zk+^Dr-c*7aRw>WGl5&!>^EoG}iYo2Wbh8m9DYqDCox z(p{P|!tF+aXx|q@#%no5=mT)HOGmKW%QWz{ z>l20HMSb^MH6Z4sk9BecH z@+(V~>lcMdMxxPBTIE3qk0EZP_$+8PM;4d)Q95|jUb;iESH~6rI;|_K%!JP>&M~|} z1XmInDjtp$H%G&D5ovr&t=9!C9`-*Wlr`-zTUn}v?}0pq+h)P(Gui|gCU_~8*M28i z0&vuIe_XK{HZ@dP)L4J;UhRO(3(q`j0eWZ$q83=M$YOy%-d9-22U~*n0EoMmD`=&{ zG-ML%S~k9u*7enhz!o7Z8RKFjP&D6zK`ns3gzHFoa*)sL2#nUCS7ZGhq6@CVkveSb z^*yXC-7&H}sU5~(@Z03daz;A=KYG#kpqeZy!1*g(65&<}FoEJnyl@P-No!8TCyXK$ z0x1j6m=A=Dn+EcOXz^H#e2{?;x=P>F6AQ<~GjO=~KoXKzH_js-94eP3oAdiaJFGhah9+db<82 zcddG!>^Uq9+YuW{Fc@;qM6-c2+kzRw3n@R`Vz)tQ7)7AY6L9(78%9YG3j07{888Qq zfyP^qvuPvq-p+Y3-;rTK9KmBd-Tf`uPz1E)Hgb*;y~UO@ldaqCNq=QiV)%j3dnhsZ?Tz_Vi>3_B=`6t(MRw4-kq2tGuK zl-eR`M?*Qh=750$43O_~R!MI2N9?Mo6Xrf2O)4;9D0RP;b+>1AKsx;F*%R$DH&?_J z5u1DY(<=t9YcLQR1a&E9L{B+37sr5;BlF2PAX=;wnoX0@UcXFeG!HBTY=?niR^fml zj=otSzz3Zo2T0%fXDlh(PZpx z-hIEZvzg37z5NNvx9j5)e1b(jn1ivCv7|>$E7#Ad)IDb;)*g3CQgQm6+tp)2^&q&J zPcXNB_8nxSf#A0pERF-c*K@}?ijY7I&`z@uO9;C1#{{HtxJ9aElJ~ZC2*`smI__`dMjH^IRFxzz0WeqE=y@oM_w zVIR?!5KBIF+_hZAnCsneB+jsY`npLke(Oc;j1kF^E9Y(xJolG#Cz3BM{lF4{25VCW z*n;kCaydL=`bL^+$7V%xmGNt|>xh7MI7foJCIcD1(LZ*M{yut3cTMweEr9PnXFvvy z{`vwiTG)dbiyeF)%}D+hXrSL&X>S+8tR6xN<-tZ8z5BWSbcX#m*>@p|e0|5(Zy67{ zHkZGx3bd7eB8Y;XvaeW8_jqR765=F_d!oS==592=X-*=A-eF5S#*lAu9eJ0nm?0My z9gwz*W%Bb?3KNP4FkVOcMjeob9%8eEG+HOpkgv$?0}<#;_S!e(>xfFy zi)R)Pkt<{zdg9-4Hye_}T)Cad?;-_NJZ`-@AWysjXUUbiJ96JLBa_lFN)+=bs6Fsz z{Z8jYlpGrq#X6nQ&55(Im|JEs9eiKhG5@~3X z`j@)P#p_KDJrZu5144q(Ob|myY`TnOjb+O=jBT7iQe>i)W2^jagT>2M%6yc>6p~PQjnwT`}InpF$X~jz8P`fnaf8M zvf5}R{EPm3#4L3f2R8x;a6TV1&RzAKSbu~`q#K(>L{&PX`Z6y;QL?;PJ^+&EoYj(Z z@@lVE!nZrnSD$!w-oHnOeHW4#a)*7ihpGJNItdHo1W$R;lL3lk)7LDk8F72IKVd7Z$98yAvnU-wey-f74Lc3N7i5XUALjmyCmeq|UQO;G=3sG>EJmDkSxNfIrXLBG|z zmdp8U>vE7M4)fzfV1o91VxN6KJ3>Ht>eqS}@D&NhY4#6rx+&8qiw*CYrLXp{sgM{hc*w|Bcb zaECnccTvfGlv@Pex-$B;hoJHXkllG*$(fl%={q#`EXCeLS!_RZEtZpPm!#x?U+#-( z>$I4?^~})jsO@cS#|u933x!&8H(9q3ueS~!4IIs8iuf1=lF0JW(G1ig*1Uf4)^)#A zu#wlyNSm|Vlu=1&73D|h23p}d(S*qo-f9(E%77Pqo{6qK$ijl)f+sRUFb*{XlrDQj zSfTTB74oV0o?buBvh&;CU2nQ~gtZW*$jClak{(i)U!#uwK=hSy=nLDqx2$fyC3ayM z8xem%2yL`97CN}z6cYP3URY12wO@C(%GAz{#W`NSCQ{;?%eL1ApmNp6)`1>!0Ac+c zJfrEPk|TudOdVXbcz&?d94$aSH%%F4|L8NJywVc3BHo7tb=n@3ee*#ZW28hO_{W1R z8mY2{aT7Hzi1?bY6hrKos>ybm{<_1P3 zdM^|%M+Azu@#tTRCu~B;b)X+qPrgw?G0?$IL_szX?J*P4!=B>esp&^4yqn@1-eHHF z?NUbkr+_NCobuXR>FU5)lv17R*_u0Tfa<7QV+h54pZrNaaESBw?KZ%WZC_v5Uos>V zISVIoa9WZ4kQ0WlsUHXAchOT6-#I=yRDHtq&hcS;L1v2(=A;p1q4Pv^f@|0bdL@No zvu<=Pb530p$Sv{G7soDX)1jfNw*c$e(q@WTd7WAuJNvX^&S&hmXD$ulM6 zAWC009>m+#q{f!|x^+`s0MJzfvd<`Orb81btJ3v($df3yk)<>WO++@V+K z5&;Iw($_{l$5*rhMOW8eF-*SlNCW0TFWGYe1~|Zle>uqFUGlst>`hYP74kc>C(wBd zzBW4L!BN}H5&rj67YU;AQx~`_E|nX7bh|3tuU#tTPgJjsTq~dHg{Ide{+_gmLgizd zYUs%Sk$Lqk_yMt<-ngVtOeN zBj-{wTSEGg)M++(Y@=8^gvW+t6l5bC(c{g&aCrI_5e`LYu}rZa>$QbQoD7+Kg6PRq zvfSOt`)+;Z=9H(HcuAO>O`Ka!tijiK@+s1CU84Q=sqc4GHb;{B z#YddTwTut)zzSNfeUC;*WJj@z8NV@{Hgh8A9}9hV(|YLlG<7oGb!}XZf-G9G(*?w` z_(-b-140{uw@*ncX@;UhFkG%$lSy9xd_vjzRtdyA8&52BJPQj=h{$O;9P#N(kOqB% zuby+ead&Z;cdn{7`Z~9ee_cIpY{c^kpzy zmI;XZ$(uY8Y?}NkDgZ?H(U%ny$qx*7P>i2SHC3Y>Ui`_uhE8BZ{G5t78XH~F?qk9d z+q=WQZ1H?TO(+r?xlM>e5;S0UmL25eOK^Nd|8v_}{%X-#X%(A=Ft4w=HGcQ#U2$t; zn2NUuPZa#!+q5f?9!N|t-?$i^5Yz=aFV^|yU9e#VI~cEx3le+A{IN8$FPYZ>4fA-cBpI_ zk4AU+wF!%rXW)|y4m!Om!$UNvUhT8Egk0mjj!>Fout5`+l!f5bBKacc+J9f$0Yoon zv+G7$DI{|UNf*+e66|h2b36rC?PVBbV$8vzvOV8Zz>aa2%sL+cpoJqzxPrQv01|# zB)m!=N-Pg?B@a`|_d1y-^IZIX+A5LM09!2^m<;}eIM7)WH8#@|gF76PA z*{mr>eIUwgx37Lr+y+U|5}v0v(skj~M92#HiaxxHjC;VhSMchIcJYx$@y#autZ%gD zb-xoD9Z3=Q3%i*zL)rDj_4fju%zo(~24{!dmf`a@k2?^%ZYS-c+N3)dD}NiJaJ{y? zjUz z<4s)qCB^?a1d`DJu@)br!Sx@AwLe$Hg1>@cgha=uTmKh=M;8nVk2pUWc{Km$ir{7d zJN1ub^cxAvWPGa4M2|mrL6FH_9m=Ke4?b^qAjg4=o{SSO(OJeGn;jDR(UkKqKxxoh z331Ef$CKZ@)&6?Lzd^I$>BPMtC}p~6E6?Bmom7YY^A%;q{z7UHmCj9cb?>2km zZO$KTwcJ69*& za@AU|>Cq?6>nBk%uIk}+TuH0;DFHSym~d=ENuL*>ibq=o?MDMYld<{<;1ev%Vwag4 z$V7@^0A~e6I6rfR!qTZR6j+Tv!tK264m(YFJwXOLG3vZG9i48%o2S|U3yH0l;Sv7N zjbmT$m_r!0P@O#N|KU3od6r0I>cO`_@TSBRe^2E&}@$F3MR%+>ABYZuf`;@j7K z7Ahq#)eskwq>M!BW*aIn9HMUc$V>wXMpZPHXfgkt?hpEf(Fk%H&9vO6KFCZ0#}P?4 zc7JnDQNdI#I?jAezh0sI-ebHgXN8mo%MqJx-z-K#`wSWTjiJtU@C2JT?I{2ap~7CI zw0&xR09JEAK0OmU;aiAVx>>JFB<>^QAnt2Ga92Z`dJrUy>Ix;WeejX(GdBDY{)e_N z7F)i5kiBZcu=f!5%lUv5qyxrmFSUlnjn6q*#f+8q4D+?+MAFf4BgtKvf0(!mmVDji z2M-R0xY7w)Bw9V4YsGxSQ~7Oeq(nsc>)u3Jv6A=i0fe!K`tYKD)4s~k$HXx;bXm89 z^N1(*8QUZi6&6a+bJ&AObrwtNN>k2#nOLD$A~M!$50hBsYrl#{rx0BfblDlAlsZ$XvknKC;=n$j_SS*==KO4>xjlfGecKriT9Xl9NnYD0 zsZ~P43K9gG*>BgRO)i%2&bvvTL|;8l7>E1y&V6U9?qEN$1FUU=Nks6YEww02`tKfT zxHO9S)b;NnHYp(G2R$J8plSKSiqNcdHFtCI03iP^kzrk*M;~z;1dhl&a$W+T)Iq}pUYI68P)>*qp)Yzr^MIV9)loT9C7p$8v z=x4;~KZ`SjiIYM!p84L<{ptTSTgb#X;efQ@Ol$nBI}p!)^f&@whmWMFnw9{-h~MeS zDPMU88HF}Cz`mY_YbjXgV-=GeX2(2!1TTkfZGU^>b{Gm&C-*VhZ3$xb;O)(5d>PS| z3Uo!--&0e?DMKc7ldJ7w&nFzLa09-*APX8)-PYPzJ*3Fge0eWKa9Unfp0OsK4Zu3z zh_yInk_vntxRRQ_3}yvI?lv>Q(;Q&~m6zmFtjtu9 z8uq6-BO01v;>c9&03Kkaat-hb%~}^gEC+y^G&hi&fSD!?P}>URbq@HI)l`M{YnWed z7kR-TkDyY%9diN1bhZ~5nbBdNQnbIulP7;ZrKx+454`j;X2|ZDsmy;>_S7Le)1Pe} z+pY@7F)P7-A0*peM022mZd;&NN?yIi+Wl5)CE|Vh#)x7OLvY2j4*XHAYa|9TjVU>3xuE_|Ju+DjZAzpCnRl-ePpm z<}i0CA8^c&{r74nUH~gZhI$*7a48g5Pd)$dOQ@~Emd6~7D0P<9FAIrkTO}p`Zl*t| z@a%3Zzy^~fD}2EB|MaF8;3B=H|M)H7yW{<7$Y!&=oaql_HpplH9v*++1x%`7DxNY1 zVE8{{^S`0!5injts9=R&APE$7$i@FWzTTh5|NRbjmZCD?5&k_!VAr9Gfi2=Y9karb z$Kd%!{-+ls{`8`t(*I*G%6_sDp39xJ@ zCfc<>9y&Wx=@H>|Q6nLFrIyyPbt~~r*Hu`k-oXORMXJzwFsAjqPj1uOOY`>!Z;ApT z|KLDc?{l+aF7<`rebxGmk6fOv>P>#wdwr`O6|rw~MofHy_t6}R7Djx(%{97rnQ(p9 zS}(QB6D_g62274IWQ>l5w-=+6qGJe&);@bLKbcHwDLg zNAqRoU^B;K$w3xvcGZ#1z+?&GrjIQ0G$YvTnXFo@mtJSTAl$`}q|8EcqGJ2*C5o-; zSFZ58H(yqcE>zqUs~DlWmswrY|Nd9HwPKsA#Qk<1Z>V?<7Go`kuFXE{^Y#z(evK8U zZgwcuKE`D_{yg-N1q{sfWIdO^AuEv;A~eHh2Sc~GXA&jWGt8A4J(ord#Bzd}0`T9s zBU7{s{vvp7M2eSiyxCa5^{rYuf*+Uv=b_QhG$xGqcGo!&}g(vKn3%HtQy`Gw$; zbI$P*c5yEYd58%Q2yvB=td;OJ9gA82Ye2mZhQ0E_M1=yJ#%PKVcxBhupKb7AqwYB~ zJdU)M&FF3;m?`8R1=$Rl2@;fk`j3f00UHjPTCs9w$`W+WakTip?~2+5?thP1*xiEG zU~x2oY%zbZDDn5Q>KVWYHoA5SUXzS z);_}PFlXC*aZo)yOfs9iF38ZVv=#WEa-q#UD>}fwQHFg}uinPmuZgevAS2DXOp!zK ze9dpE#yz(6_?lU}iqDB;wmFOkN@@d5$UQ$6ZFL#O(64u{A5L$dJKq;FA)Du}Gskpv zM6cm}Hg`0rmf@xmvgRzY4j6WiUD(#CJoRcv$~%z%xOPdcyPxu?{z_OrLy zN`HlX*iQU#^L*P?aqs(4r2Ta8aKY8oE1NSrc7a&o_U_EAmVt1J#ewTnJJWg*_L)uA zc{6}t9MycHaKc;O-aJiC)IPQB70^m1^69gA#l7#1TiWS}o_?)|6;$+jrk-D4-pT$P z@{xZ^hp8`>)26Cv!fR@0mJQ53Md~`1(eoQi zp8LKNx$wcF|9uf3c7FQg$)|wPO^@!ey7TQwMf>Z;=sHE)1dFk`1&XrzfMu~6{HH4p zQk1ucGu0_ezsC%G95=leP+z{4M@poDclSqBI5j~{a;WvRw5upGGBlkrTAT?-v$oI3 zQQt!zCM4=%FfwwWsf=@+RnutEL1SpCoHq(^%&=c+TPbU8$vWF!oD!^cv_RjTsnq@@ zk=E)|iheNjWoKIXYk8lR;rY!|6}|6-d&^6yvj|0J6%HAxI-_OEpUEq{&!TyzizUc* zzkP;|udyRjpsUiHB*(_;Rq7vMgTWGO=%vTUE@c17a5p^8tFGe0%D#rkm9P1to(Nwn z_al#w< zw&8^u98sIs_Liq}U-@0M)j{Vv$o))9c#8E0o7Tw64E0m@SBvw0ZPNKN82S6vl0m5U z3arkm@P~KiDkMJJuNR6OWY`Bg+9t(Bha|n<^Wg1U-DLIIJAX~UAHAn;;wDS6FsEm> zNao>0;^RM|uux@RUTdRH5rD3MUv9viuD3@wSU*Q*Qhen{DA1CDey}p3C($rjt%%!v zQPMNrs%g%Lu{$+BPdoeQrUPk2x1qUF%h+pbr*t zPbGs}<)rF%c#g8NFr``lVsI4}!>qE~IcP`|(YwoX)QNOnRqePm0OKZ{oVxi#oXZKL-Rn~(J^^+GtprhC$vXWW47h;)|&Pu_{hMm z6+`=SWn-((*u1COCGDE4slR4V6h9N-D<*}gf6_jr39rCV%48aAyp19sd&gR*th4Bv z!+Z^a)7$dpw@N_Y{x-x$x^R+!J1f{=tjNB?YMT1dQTnk-h1Bqj^4NKO;Mi_{p3r>6 z8y%@rYviWt4P+GUC@%{Z)-L!0PF|~bPI`UaLCL!NFhepTD zj8|sb4EuX1?Fl->B2@`D*{$p{sBp2vWlN9j*z!HP4^mOD#Uqzc{29L8b*u1KdB%T$8<--omImwAG8kWSU5_()8D}luEl&WjK!ni?z*;X4K6QFjtbdsi5y`Zk?9# z<`aK`&wj)LPb~iA2FZZHV{cLWvr++h!_5(bm!s#b6Fcu#%uge9M9%rwl8SYYlSRaw z3yv13ITu>D0`DmB_s%Gz_{rdVIv?t3g=|c%3kiKczaI9nTV7-RHln=-$NBcj_pTB5 zrSBp70%CTfdypgkhsg)C&w8!X>^lY3AjWn`W@72hUw>a*`U?3x*Cu(TN?-cU*)02O#N3hRNN~#fbCVs4EN`r&bNjyE#}Ck*^>WYNM}iC)h(8`~z!#J88M=9M5njz8K}u4hMs zlDBOJtA6cL+jU9MblnTH>QYpdU55*?>h3!6>V|-OxKcN+JS$@(%Sn?|#bGg?^5Gnr zD7i$^EEgp?5-~sfmPtBimNfHssG~la2G%E((4yESMjb_3P7eFPRm>@!p^n7{pF@9J zvu~mTbV0^-#Es}db_cJ_g6!wo2CP()%rB|@~|_@11-NFm@WL$%-0=K98+o!^DGw= zRoF_O>bVM|>&lgzu{sAvR)NWDqEJxrej+t+vw8)l}@#q|dnG#;Ra+uQ*R=cI6=5-X~i< zudptWNi(P?^-Rd{K|xaCr|{LBunf4;jOX1ZzBO14C*xtyA`ubdQ|g}nL`U&`*Lg}$ zr9(Qy*oS#&QWU7JB=fq{%EM^=b4;hwi@NktRXB(8j-m$F=5P=EwsHyWPtv2qoN1jb zZLi(k3fLnzE|+O{Run`ty~6O|-_si)rB4hyg!9_-fraZ0f^vi8bM*-sLbUZ9=SVvq ze2FOq-wn-&pPs;*hvpROaG&i@FJ^|G9t2N%o54Na{CLbHwRaF6nqes;^{BS0t0LJR zk5dHhVRX%F#C?%u2tG}9@}QARJOdHy(a#>FQ_Tf7;Z}z9y|hpxd5sf(vT*{dkp%lHi=L!h2tgxLRY+EHodwfXmPA)O_RP{%Oi7?hlm$usRi2D@xG^( z8ItW=M>|hWE0sd66IP7O6R;S|L3CI?Tv)n-Jpb>%Kz_xeIP8keu#HX_+ytc%HH*Nx4W8mpzPT*1?$NL*<Q8C!c0uQdE8qS?EBkzg?-XFn^*efN1(X*H47n z%_vx$WvZ%BsypRN_RLY+!J?tAeEC~vo%`PK@m;Jf_bN9Q@0!u-ys^Fc*(94vCE4IsS z&evCfF>FAeToyaDubRX(`;BhMXsPaT|H^Xaxq?cn8w*Z%%l+`==GF; z4brYo)x8&z;d%SU%bS=R%}Qz@lPym^%G-SWTir_<^Ixy)3qL>coLxNOs2Grz_c6|B z8WC*kfV^j5bsmPG&F`0#9Jvel)**C)-YcWwQ$p44c_aTBjGA3%rDCcTr2dkS}2L~ zjV>dNvi^XYcySBMafpH;)m% zb5j^2vKD|)3Kc%3d|FI4P{Vw)Jx}aG-urP#ZaQ>hp3LK3Q*m!d-7x*BigM{tpkEQeqYSl{a8(Gv5$Q z7mVyUw~-BgYwI@wa1H)*sJB*^@D&JN+l2OS^pdvm!$HgEsxO?u&b%D()#+Rmtu`7o zoNfVwzvx1>ZWjQfLJW{GEUL&gX5P&z;n+0c6|LY#w&<1*{5j2zC~8@j!V85u`;fx?*^ueWQzwla%Ehlv~ifb}l^Tx3bClr72sY`>v+) zw4a$gP_z~dI8HKJsHJKX_zPMJK_m)}z9fUQC}%3>q)QC?tydsR2mA27n#O4oaZofS zS|>{_olSt>uM#nAM@1+@O}ROh!GwL*nNo722OD7~qR_4ev&hskxFUBG;u3mi#hFqk z@@IzQa-@EZ)6j(x7fNDtdd;TsIcG}!3!BqMl=OE^Dd*eMEgLh>eIT|fGorAfl_c$s z%EQeG*5hF3QuoHkq8EEEn4jTdzg1O>hSkuS zH~3JM0#PS2DHX^eWC;Rmn4=*I{p&5-tnUmqB8XxB%HAdJx^EZNx}gO{n?>7*ysxw< z+a2m|?hB@@Rkg34Wd@tIzB?j^%V@Mk=-5HEdxDo${Jpq)`4JS25CHoZh8%nv>YF}5 z2URq<>irO_Zf-pySIPKc$_3cA0}^cyypsGd;f#D>63!cNCYPi=(&NJJ7?dPhf$ZQr zI9^wHT6+@{9%!MUTfPY3knq$gpbr$jRVYy|(zq=##pz+^%U=~~D9Y&asAjp=?LeEN z$~-Cr-DHCjPX58s)I9jCn8?f3 zb|2@``7z!1eYn&O1l?=G;$iPNEB7h&Ag+#Z;Rw(Mj)|g5f_I?ciopf8=E-&w!w$RI z>|nUY%D*_sObvq_Nrq&X-D)tL1@_{hvr0>zWob6!TwVkVmyA6IW)Tq z@Uc6mk1(XE^zkN`Mxs|gxmj+fmPq8VdozgQ-xVZcinx)i8RLW@RBcRcO{THBv6Cm= ztrk$0Azd3lwov%hIXB?b>{@AHw3az1ln34(Y!u1hV1yo5ZGMZUkWGop<_|M_mhAHA zinD1@s_b;TA}>>V7yPABLd11ZW<~ozDd`!-QWoQcDLc3x0D8Ei`WiMr_|$}B$?%~% zm@c9cBn~y3FIU+k`Og_cw>pdTkm|8Vg$PHIB?wrcGW`%!SIz3{L@3RdpXPO+<9yCv zY~|EWYH_((3?;Xn9GyCE2y$jGFDc2TY`Ld&6(T>FoXAuty7y$3XTugnSBVqM_kO%=M@EfxCVrPXJJ=eBB4DeC zy{)&64qJwM!97k-u$YUN`Ya=bDlic)C`O+0^Fn>S`xS_LA5^-b!clxU?Q(Ub_s1&g zIA%JIrT{vy`AC>K<`Cy)I*pV9U5l9;UbUG$u{~5`!Y!rLBCXQy z@hVnp#$Ms_CyYmvgjoypV^LUpTCH|_^@SeR?`<#peYp0kB4mHOL*i+yR#D+TQZGUpZ%ZU z*2iPAq2Q(8w0_PO-Uj>^qerNx^s_ASV_fHMoX9zYE>cwY)slYW{6La{%-v2wG#Dl^ z1F}~!bJ<}l3$X*0QQ&u~U0_RYuV`kk3qnX!*C_jbuY&81Gt02_ zYRGuyjQ|!r%V(_U{jq5xoCzP8>Qcy~$V4ObDmMBYAUzG_Ph6rYTda8IN#?7t*f#3A zM3&le2)6+nK{H2(LzAs_S~r%+HqP%Dd8f8Yu7-ovqjqZ5BBKZ__)wJE?*Ww;3vueC zX|dMslT_wM-?qu`ZBor&zW7jQqI)aMxh7Vj9jMV zSjr0Ua$?Ab~ytaKsEzZ*B}&?iIKO)E}GL{tS#T&=w(Hg6IlomLu( z>?tJm6{tgDv!BF_P#I0cc)dykc&aA@x3`CMbb5qM2GY;Dii?s*j5+sLyyER!m#?WS z@uGtdCIxFJ3uChu3oCJ^8noGz>;8tm# zO0eclVSbV?NAT^+fav9GneHIgv6K%$@t*3uKexoNDYG(}EyBv!IdNu#7|3d}ThBXn zso!YXfGSp4!y0K4Sfrh&_uw4iTu7K*aiD=Q{0vI^AT- zZ8H0VHE{9LeEj63?6T0oDX%?J{C(D!<_qJ$wjfo?w<3{n=M)NWr4Ao+e6@v&=Y=w| z6|k%g`;Jle@&8yFFly-K9;ZD13#GjLzTu0*m5eU^6@Ash2V?=Ic-)F!%rhIDZ?&t+xb} z=v%_`w83gEP5B(;-NnUhP1-kwfZO3hn7-R^GJFjxs9LQfQHCVlq$S`S!|`Q-6Gq@S zq#NfEF0bI&qU*v)-ZHsQi~g+cbc1?#mm*l2BY0c!bwRnOU^{i;yxopb!_`IgDN|~{ zc!`DFkOI}9yO)E1SAbl==9HohrE=))TegQ%#CO4tzYOf>L&OLFwejH++)lmvs29V7 z0DE{hiFh~u4KFkv0KhBXHZxC;OVU2@1FD(^>;Zb#zYC$|Paq2&QzJtV*XH=&djlHt zb|i)V_x4d47fGb+U3Q0x%_s_ zXN>ET^_{NQi#iH;$SX+w=)d^|*ce@FW75Z+t`Ew}btm@Rzit=uZ&;9@4LN2fClvB= zr0cp2^fV%M$67qJLQ87359=^sJpEJ5N4)3di@n5FqrC%dWb=F0VYwtu&PYCQpIZ9r z`vkGMR=hvQ-`E*eSCcU>Hc)jtzjaN9l16+KlVyeqH^a_4V{G$oQPgsO%yCUVSTuS0 z02OABoPOIdqkEl#ld)uN4zxn~1+Cch zAuz!Va=RF7>fD@T-vyA(yW7FnY7Jm(NI($7FryFTbSCXJ*38>NhaJKbJJH^$3*!u3 zrb6G=F=bMqr5_sAtGMX6^1!BUn1K8ilt6??TeN}xFi(ah<%w7Db4QdfYhUH)kJjP% z^u5~eWsHU85OPq^O^-ebt4iLX(iy&KhpmY^iA%IAQiob`MZ{ZfOKm`bn>d{FGC6}j zFT4bVbdL+~t-bABh@=0?(>W0yN#p8e(q)dQs@Vd@%oS5M z8S)j{X>W?W-1r3ikiiFfy?g<+D0i-%zg3KMizHZQ=&(5S<0$QPx_&%=;)$u1LcoYn zY-dnU`8()TOCpceCeroH`R}uYRMAao2q8|TE4iTo9x4tOX6%tz+N#5<_h9a&vTA(I zGYeF4NHhTL)O2pvkfKQyzqmdldx{!tByrPJHb&Id>IAj z!b2T52>-zNu1Mck(x)rmee){YJ}RrBSvT#A(dyBj9jpk#mI$vZ+m`F={4e4$j;7SX znzqR8`0p59K`6gbdd@OBrF(2q`mFywohDLCCbMP{=8(!W0#=aKeh1*e%`#{m9R5iU z6+Diam9>>KhfD>PtXz=YFY-+=2gDd@$5m--x9nzk;&nr{zOLbA^j4R;&+gT{-F|x( zRu1oQHlzo`sr48|R$0cOWah2YsLw!h1tB;%Togxi!| zVVWnjz&c`dlj!BW!fuA*0mNl;m>Z~%Q?zUQu)Y=dg3Q`z#;+t)*Nk|&19#wTJwOlk zd^P!pd-r|sjbqZqT;<59hKgDZ!S>~TN9ss-B`y}g%xn|&=xIWf6XEIdF(S|qp`+3e z;MYQ*6&w(%M+Lc}*qgid`d6=?>ngRfT9joS<4Opk5XYs(?Gi@v@!%;{{%!Y@;c`?= zhtJNABot$9Q1T%K3oPHqwR!Q1v-V6^NgDe28w*C^P`DtG-%af?NzypcD@RE!gnG>) z@xRPB1<8*a{P+WL^Km0GL81I_t-xOY_$l*Du)e=LSkC+P0w06w@NO==t({HrA|`+* zyZ^Zcxs_VZb>`RfotUH@3q^-fDAEq+{=KY zH24Y@GCpERQ8GPE*Vlr8M`tUhBNG-$yHKt~Sq;m!8HVt+SHL}s4pry>eO(>oK_4>fo05@UM)^2#v)RTsM;YP3C=-|DvVCRwLb6>F=vD= z{pnhKiWEn3TA@H)6XX^AeVD$T?KaYlzkgyy(Kwb2JmkY;l-xzJ5n0!ZCC0<{#m4$F;+ICq& z_tNGfb%e@vu#|s>gH7wOS3HMqk@tX#B(SoGP05G=)eLgP6dCc`Zo6~Cn%LS)R<+GT zT;?(Xv=+cUS?~o8Yuy`3N$1cXLS;w--|=W5E1 z`#qNi|+!=T;2jxM0new1@|fW3BZjv2VY|fLSg| z=rRvXucM*hYk7f*Nl8tAI?f@WAygTZuSJF_LZTCq)MXRKQ;@I!I04~T6VC_7VN(ty zC<^!$zPE>KAnCONe?4zK^87K*B&}xnGJfgCmD%6p0PPEiJ=M46j^Q{LbV%vNI>kV1 znm*-7tHH&k*&4D&-k`O6#ogd>hxM0PM%Ej|;tU7IfSFscNg4#&<(4_B9~I&=YRL!p z;U&!81YkxCa(k36D+loUokfo*Q50}y3%FVNax)jFN5k&rWD#<36ERsW0*^XyTBAM} zBkLjSk9yE%c&rT+?55rTilQy5yScJb^jeT*uZv+-a<>aCJ0QO7lDc&W(3KNg& z+vVaHFDI^#QyJ=MpacSzs1^-Mi4%6!r{8ZoR`>RklW~PKA_Xn32aJ6l!t@oz66tYi z4LR2*7tC(9sj%1aq&;$&ic(_c-4rQ1nrx8;;iqWA2kz5gFIK2v(8R6($;}Eb4wCK? zOXnv^2%ByfEJ(Ua_ziiveKkklosspofXFwdMm! zDHG3OBKCztLlq(|Cy188B}(}c;1nu7qE;5j#4kt2|G><}h0|Q-DMKCFb&s&>Hi#y? zc4OZ%Pl!pR&X%=e#Uf&Z3!uLDctCt!B-frD>C#k)n=7{%I?{*5269yLJ?hAogdg~t z&;BTTml0o7R2udHO2=2W)|4s`rEiixePB--BI2;W+ zr0%RKN|%d1rRt&Wtkl62Er1H<-{$cX)P42)5kWV}T4pBxPl2s{A*2X;bYJZVk3 z9DaKxu-Ta&F8WBMZ$)Envu4C6&doqKOZ%&Y&6mqi;J$nOk)XL_$va~=t^6>}*tn^J zf9+(4zR#95x52s2%Otdo@N*G#M@c)(!a=OV9U*9iXRnN*VBN7LS=C0WWTiN*d`h`)?vk3@o%$=S|C&DY+9f)CWa&W)MpI z=waI$=pHCBtLS~vQ7Qx3Q+YP;NrD0un8O^+&cb`SI?c>fL}RmL4&t631gaY$BzuWN zLWkhA|8f!POOoplLX2|03C%m{%0V;KraI)pZZHX;l2BT#JGvp^GZS{njzD&!S4NxQ zPn#?q9cC7rEhpVyMuP9J_o?cOE&z_@SlD-vKeB9eLdRVG6!)u1H;ruc$ATg`S*~Qk zl3l_Edrx55^z@~_!4=>@xKldH1&gJdzOYv6w|qnlku`)aqe*Xpih=yXS&Kuu+#V1} ze=^(ByT8fJ33~T$WG6s4R1^Qei^`tZ^WqF3GU9J-*?U{{!mH-VyWo1y z$UkB;Zou_b_t+PTy7T}R$my9s^6RrJiJ?s923laF_edxq>X3~FSzY<}~6wB#mD=}-|OMbC-n6pSH5A{QD2d5%-; zHMjyReQ$o)v-RCRL?rMc|2V=ffY1UdRIjt%8rf>|_}i8PEPxRNes6n2VXvh+B7=b) zQy7GDBx;-St?R4GOTN*j6E698mWEn(zLyK&7ZO_6-7s`PCepf?^|ibjgfNRQxYW@`NYBzrj#xTZ+Qz~yY0$)In4!K}qtG+iH?xKC6tLHYIGn@PhKbFED z6S7t;GIH?5^P|huzq|n?q$aJP=*uGvC4<}XN6In6zX`IU+KtiL@dfc(Z&Iy6P-(Hv zjBx@Qo{E@vhqSW+jFg}7N|(w9okMXlSsVX?KUgR8r2;bTK?+0D=jPat3N6!DA#Y}k zN1q#_mC}ld7&B+v%j_KCX|l{X9W$1q;?A`I&sWcVt1e@YJlhuiQqerM8Np13;;NLd zTgu$d-0ZPF;WCPHpnY3B&l!dx?*MscqBNYylvNHf((~I{tx1EHHS5m=9yHTBMbVv} zh6i9;HXO+SB}9^S@C!-In#sjOb_IfBP;g=50$$Zb4=Rw6C)sd6ouJ=!IvaoBxUhI3x9qw@w(;>+nz{OiLqRVFS3{$IVLqZuRACZps^s!mcF=+!KxG{mB z^oG@+L&ONJ_aD6k^3MJ|@>xSL7$0}2%_KPG2p6KZC8sW2`)@jU>#x2UUxV;`KE9QH zn)O@e2b6Wr*6t_X`BT^KMc!N@;&*==u(JAo;e{}F=N#V|lVg{)Z3j@8n;y=o|HZ2%z;XrhaoQ{AVDz_4gihb!ZD2*@EEWLST<(2I3=~6eoHeY zpN9Y9O>WzHEnIMWEzCWYr<{_{wzkNjw2vZaH@FQNvH>u28exHhHNudffY}j;DCh4C ztaK(_IZtz4rIc!(!0lE4lqu&v^xKq$c{%BFB|#>Hk>c z!nRQ*m9^+&`)5fN+>2{oC@;PHOG`(exwMI{!?u6KMik2Y(d@#pw$?73|d&<6s8W(mT=%@~EmD9=zRsNob>j zMUgT$KSN%uO2PBpoAMQJiw%E+)c5$?tC^>;Cv%v0H^b~58Ye93q2Pj)>fVP!aWv#n zaBfPNT+l5HhKa$cvZA3ftmh~cYRbf!&Nk+p@HJnc=<9vV(p$!X-0XulDr`pxXz}JYj>o`Znd^am_l|_804R>mOJ2%Mc$9+KmAUC z7tU6yENgVeVJ_bLaEPy2Rd8!G3Hl+#G}lM+Q8~-KQN>f?k@ULBalKSY-zbJ>7ep!a z{3z5{F*wrcRJ$=e97nlzGU>y3sM5v>yIdAcZmw2uAQzMh%EwKtNv zag2!?d8#5ffwwvCr*5ShU8?|G=KGlD9c;tfaL3#~4kXFrfqUorCaLpPVL?f^d-yRw z8r{dEKa;b){%+}ZvECga1Vxm%M`wvhR4iZ~b&^QYQPdn8jkil;GWB3|YD0``wi=@jG(?A#9#FvFZ6K;)`!I<4zOz4Tdf~07in4pgugIok+&BGGWybl3Fxa1eriq|aSfs9Toe9|3 z`v_1#r=`{#mZ&{M)FmjhLB}sE!PclVNm+}7QJJH1y*qb&uyD1={Z1~s#gJB-NmE4S zZg6=}MaHS${IN0;3e9V731+1xjtBa^er7APwrAFen9&(``l_-@?)Vgqq`q0fpZLca ze@%G|TEB^BpNfqqIo94s3O%XxymC@`Pc}vbhI4G^zrc8lufm&4Tjk;$qJ*yuTk2(+e)c(a=w~9Y ze$u5~Aq*o<4q@0hGU0%yiXXj zDec4gO!m2G5p<|M{c)kZ*Fge<6oKTi-$FHcX|a?nSu2tMzTWE3H}B8DQpJV0@a+de zW)JM_3XW!Ilx1`+vZFqav#)lHR4eFr+3hlrsDB`xX2nn*^Y*NSw}@@CbRUFSi)t5(#EsGsf)QBv1gg!P~Vo*eAV1eGwIJ7^VbP)SB8g zR}%?7dz<*U$AYbWkO7<6qa=P3J?7~tEL41Wk8d4ydG8D+W%f2#|5p8BBM#R!sDG}L zOF{25O0!a?#=G}X0bLt_LMxI+MY87?8(rDmve>d3Lr@6`zt`#j7%qRqysLX;)AdY% z$7qw=q23jkDqxp=yJ9Svm z2dFq~+dl}2Z$GEc)BQN!ThT-XxVsL6?p{FA;Qx2gfmluX_Z|{!Jy9;=JLt2Nzc=@`$pGXp)SL2nI zErtOj>{+J>OI~?LX_st3#Vii6&o1Axud#i%p_9*CBlCuwI?I~0TY5$y=@F&x{X~dN zQ6*)+$%x5#$@fMMAp0|s@(73vC^_%XYf77jKJA=8XCyCr3KGjpQm->zh~SD>%zDNR zuhWw&#%pT!M;>Z5{K@63Q=mo7V;QY*g1ak<@6u#bM`%Q|wIMhrkMDl4`rs`YKJEIp zH=nNIyZCIUA7T)#jD#oECKWl!wBB=brtVc}iju0blaN^oELoO#$60aX(!b^g-}9Av z?}*PFNPpa}9Ch@)wS(JtaSEGFX)wZ0gG}&Cb)-kZvyPXM^Gfx_v#i+7dYI;Q6~d(e zp4MlfKZk^>T)%D>;su(}TSSaik!+0IH3V!@reih7K7Qihc9f^~l+_?L1MZGdA@SRP=NZuNA~KN+ayIrbn`dZi(A03qdzt|>?dT8=y*$l z1B|J-%|>0Nh(I)a{mz{UdKTRUAtnjfMSb?LAFf6+swdLP7#of5eY8T=!797?hH{Lr z;6P)nZPk|4IGnw58&%AM$3NlAdFrVr*{h*$pi$4rkm5>;ov9#jtZ?y7!o2wO64rE! z))=sOl+3DmlvT*SybR!tYP2;qC|JCi&Xgc@GvgS-5qH;Av-kJ*CD6GT%<<1yNoX}> zbn?#0->G=-RSjHX4$Ctwr=@`^9wwVS2KI#&sB;nwuUgh7xwLv|rwZEEY?N_EGM|fA zdr({98E@yXz+fshy1?J@rJw&h_j zd|i6Fll(D(+MtLrgfu|bq_kvhGLSnR#CUL~*Yuo*pT7#MWvp1rw{=-bAsB6GCq6kw zdbmviCEa4t?D?Hd0f{m)SC$f!v}__lRBR@v-YM6?%Bx#1Eyf;0P<@>Mo^Y+3M>=VV z09*lR>0dt7wYqIOLaQ%hELJ=h2z6;dh~E=PjOD$|N z|F~xhAG+m^*->ttHGb$YR11R#GDSG?%cpz7g@_u7Gqcl5dXHUKEIzp2qW*LvSah7; zI5ZO_Wz#J!P-cA<)?~;mX@lg6CIMR`%KyFpAgnS%)XE}-?2*U6h`lsqWdRc+Vx-0! zO}}X+sejqMXT1S|>1}?uNY5}-S(P52H3tzZKFGqN(N^4r^(}9OI!3Iah-|hPGyDy- zs`;Bxw9tZoWI3{lQ;RtKT&(GoHJ_zLT&GC9Vs^WbuReAp%|@jl6ZLIxk+-D#*=RR2 z+@Fqgi{o1qv1U2wjQboMJ^*i&QU=8A^b9!(UUhAy&cQ-&PYD^;(#=r?Yau?)j5Qsb z6x~*-s6i9R9;Jk6TsYezYX}^n+!0AZkn-+WXA@LU2x}TISR3|I|M<1_n=x<_DRE|NVK#DsdB_$%(b$Mq`(v(*4BD}@m2S4O)z#;0E_guqUcWT0?>?6qt$CxQ znIcH?U79z>P9wMZf&A(aU5#4PJ3lQrD~c7o5h(gUyw!tug;e50S1p=G5PAkra-eIW za4J{qjs-kmVq`5@pk{u|p}JQr2Fn*3nhy1cwf;8hmP*)vaxK2(&T1&+kQy1N7twOm z$NNcXX2w%wb^pTfQ~NJo5pbK<#;q^xCo|$T^QJki{{9nN_=3~Dk8+WO#w#{c-o32K zM-Gu91=#A64orBb!{p3&^{otMAa)_%OpUD@38rN4Gp>=M)}m8 zk%}!aV#le-U9;*9p2$gpQq%@3qC&)RM9by^H~>XfT-TaLHF71+A~lL4(zHq^GjT_lffyVB^7XP$OFLa}_hnw&o0 zYf;gE;kn`2`WIAg21Y)LzD=X0A*u9{JuA|m0%G_7Vu=?PV^3I9Z z0~2Foi8_dt_{WnszP@{F_gvR(uQ-z5L9fM&<~3Ng?qowRwJ0xXNIEi&sF{7N{gl{g80-?6CQ+cAzLfGX7Y?=JhQ$ zZRNV8I;i|y?)~+AyuwiRt17(TRffA>?yNsW~}S1_`YxV@vY2g}T8GF6!)m9nZ^6 z6{fQe!z3iXi|!(KzKcdgwHVpV`SLa!g~~<1hm*+s?sOI! zB#qamOrw#lbZHB(dOq=l8nwdVGH^ii4MfJ5pfUR4Wi~i7&n;w@P3Ou^yV~vqRkffPQ(5rcZ2|bnS*3 z?;o%2b>9A&`t)XP0f5(5Rrtct-;aFD_|XRf1!wnLwdwes5V1{T@PfHLF_60&k3844 zqQXQE4-g2+|Zy`Iu6@dICIs}4c@Y^q%ItWe<%&{&{|XO@(h$WK{cI`)s5F}207p5cDg zb@;GL!AtpwgEaH_#N=_vsSHGvkHU27Q&pT2sQz z&&%GXvOld~P~nM?I8y>gPicJ_(eMCUnVjve?xs52NGq)!q=_u4SWKwV}%buNXr+J2Oqanzt9|7eM~r{zo_OdQwV{e)B9Gz zAt!%5{_aHj6cJ>MpV)Bk$)(OZ(`HpSs!iw zN~BX(@tn6D6Udm#=HPF@I}o`Xw2Ep|{5=rdXDFS4H|rVB?f9RLTky z5|?}Gc$dOXor!Z9#gZly#CP8_Z|NgG?Z!&Bu~pzoI*!RV))2XWP`s^uD3_~}%0wE3 z=FX{*ImtN&_*o~L36AH^c6=oJ7DxDJi->Z?F*y1#?6O0-z= zBy~sk65B}a?};E2Bk`9jI9R{-YP8*MPH8i;q&1nXh1m4^4xXVudopf$9bWjr_^OC( z(&^u{Kw$h-7NG#cn)vUU^u_s{Oo_S&9e0&5*(x{1%9tejE7FJ2q(pXA*cP0PdO4SW zL9|&(+FWraAL^QwO~+&Vsu8rNZvH+R42cG%?0igR2|aAW%F}Lmi?Li-FHUvNk3?w;iNGDFi zWXvV}WdDT|&Xu}N80*m(KY%@;XKYCtod&@v9o#j^nkV6+n$kjX)>JN2_Fri`iVh*h zE+AtB{I1H&6PM(~lkAQh7Z>bw(_STyHpF<*wzO=|CeE?7 z$QT+D9@DhO%hVnx{d`;UQUasi0%>yH=M?3DyOOWXq0S2-Y`+Ws1F9`;x4r3U@Rmjh zIZRKBg{O;by)6Y5{ArtWD()s`d)_OixtYr*XJ?2FJ*(6Boi?^!g6*wfYejkvp$R!c zTS8=K0V8e_E|Hc1fXpsAHsJkh%}?=qHrJu+rrD8QyN&t1`1>)m$oS#@0Xu_%hs=~W zEdsTCJnI}$3G)pO%h4dp?)X^;58x95vv&Y>@;v&PA=@&E-j`l%j^pL}D(6yks^4yN zx$V`PEqxrE`Yx^ycH6)8+8QEG8c>dtj86RjLKQ;zN-K#Mr)IqAl7{iRB_vB(tkP+peu;Vc=8w-Z&_)YiQa6Oz=f-oKrCVcA!68+Gl&M zpzFqGCvZ*M`SgQDMczg(7bj+V{^K~oj@-IypwzE;If!MAUx+Fw$aQUx@HKL0JC^`q zSsA{g-F)m(AoV}#LM85U5&e+{<2{q3QA6!w7N*7|O!G-QYa&@W83z%q-F$Miv$|9f zvwvw6W+&(!81kc6S|?RX{Oo3VIFXq^67Yn(M<|1Ve=t{3|$h8RO;X{`r@`)7CxQlugZ_Bk%5_pu>%(-ka;G>L2-S{!%D`qZbz>`kt?i z+DO2J8u9K~ehTfcw`E+11DTSlJ`Dp1phUi$#X+z08u?T2c=O3=Bp0Vz-`3V4%XAzz zUa__z1U0N)kxVABgn#j5u?RxXIfXt*WWx7-tVtTDgQW>iUeIW#@+J$j7R_awfr-S+ zggCVeDk&n|S5pm!I<f`%+|KzmlEqgE5K=1p{vri?7&z;GS^A;YjuYPm)7ug+F z16rloU43P~v8^s2=flP*=m2_TG<66%&K|!`r3a)Z8iXc`NYn}}@8^BKiEhr!Eh7E1 zg)0%{leQEjUK_>0&D-U*0#MEtH$#ivIO%K3F35L$Es7a*M$RhFmf&0_$*1MqB zQ7;E1Svuo4AIBxNdRX4hA(GqFp)d|PZJ0w?gEi+z7Zy$YYYmAjK{WI;Y)tjA@%0)e zuoU+CpKY&cJ{Q4fZzUx#rr;7#2lA@vy8aF=u2=SCtT$ih6k96Q6?}@W`~qIhSe)!& zG^!u-WX({$%yRJ;+~k(G&H41-3gAc_imBC6r>aWD78F)(bdV6xDgTI=!2lq&NAQpy%m% zz+T{57!!x2F(EHuS%PU?%SZoWMXy@az|>5>k_yLjB9yyF+#lOn-_3_(H;=%ZkZNKM zqgmT!BZ`AScWSAdrq}7Y&p2Fp`{xPnRJuNu$^K+0;6~}N5@hYyj**yt5m-madbZ5V zsBQ7CZn>=UUJsO?WS8PrwWf~z9j4rL-CaREBT%NW2p!Jkzdf_C=%cWIiil=W z_X}QJE|;OKF_Dv!sG{~)Bhd{#Hu^@C#9#Y`Bd>U^5a6t@*L6RI9g6Ix*;umHBD&hE zPH#>YE2`zJJ6hk`W_RrXhyE2Bo00XQYx67V@>@M^Y^B7y52xOlbDv-BHeS5#u+KWp zW*R;9-BIUu@nyXSgkuqvHCd~xQt`&WD^Dg}Cv|ZYCTXdg%Gl;jb{UBbMDc)^uj=@z z`+G%X{Eqz}E!se6V3AAf?`VhSk6 z8jR|KFD$~Fnx?XLr~@thS9~5i+w1)9#f7>MEfx4VNV`$WTI-{9(2v2GULWE=|jC_MVl-{v-h;b_&|ggdX+30sJZ z(!YF@e@MVrQibN-(Ll@`0vr*++^Wr7eLSdm=)BajHEqk4%SiCrju3oeIm+^>!QNU zSuGgP6TQ@b*Sg3f9QPib@my!wL&YgRI^E8K?wJv?dp*2!f16vu^7(wSdM#FnUMtg7 zl0j(Sz@&3gt^(|H<>jztF|yFDh{FD@bg{c?b^oCkE(;zlAsj>xn>_$9(CgP~HTqSAtxFZeE0Gao<#fb%^%X*S?_ZwEViM*#qO3b|CIJ5?M9 zpP0#8!PU)OTrw^?Wc$|7e2SIj&%<0Z5gk1n0yX{nvgUZkavW?BCNSdj+K2n65+wg1 z-ZCVjOos+`93luiOQmE=*su|whU`S1!d3Q*i6PGBk=M;2Us>+rLH<^tB+TM*U?@K( z@*=HCyGCV1v7}r2Dt$ud#F@ng$itpeRMAbolLdV0r+bY?QBzh3{kd+zaFCOhuPhWWn&@ zjyNI@3!0`|fO{IZ>VpIRumsX`g8^0*qFK4^yMu@Ug>05o)Lw3VP!csy9zl_gAQ_NS z0Zy9Q_J8yT5~D}xpinl&51KA%5kbZJQJ5hR{#T3 z`aeviL%%yKXB&_I_w)X96+sO);)OEDw9Uxp4FeEbe1FNV$1~R{%{R>WU@1arVBsn1 zW#rTT?^hbF5+@`!M-*Jf{f>3&tk%)hJ)O79yA<#yKpb)*24ktZGc%<*d5-)F0d!k7 z-4fpOC+F(EzifCPU2R9k4tc!4#2KZ)vr>)p3C``Tc_@vP#HMb>rkd^8Hg%A=@gGE7 zkJZ0qk%c`ATxaN=pj;`sU;5Rr#j!yk1Qts4gC&YZ`_~ByR5p*6*jvEhfQ1E+7=1Tl z0sYh|uKeDoSqGNnywO-jxQG3r6nH*Lw7p;=YyE6Sj@IPcE4_Uo5kwg+mWe0%ng22S z|JKF0pzv|=M-E)1E6eDK=PeWXXb>a!qhW{1mty~OG;v5FO#U84kpg5tL>(gLypFET5#LrQ82{-onu9C@9g) huny$m7Pklg8Q1hO?DxBxzn=jJJYD@<);T3K0RZ;v4;KIc literal 0 HcmV?d00001 diff --git a/docs/images/ds2onlineModel.png b/docs/images/ds2onlineModel.png new file mode 100644 index 0000000000000000000000000000000000000000..97a0e561961433d90db81f641a555f3612f7286e GIT binary patch literal 95445 zcmeFZbx>Sc*Eb4;1Pc;8A!u+91PiXg-Q8V+YhwwJ;O-VAxND=q-Q5!$8h7VAJu{hS z=DBt2R^9sQtGc(|Kd3srd-pzjueJ8l-{Pyh><3ijSI96hFsKsZB8o6DPt<`wW5j2` zNFh~-Hw+B&n1!&gyo9hYiM)fIsfD!(42<~KxC8{{7!6$CmB%~|O&D03Z>~950+QR` zq!1L4BuKt>CSr)}_5MT*fBNEuJUk8!a(K9J=+g>)19|yQKEY==&adfb<&%ij+XwgN zmZ~_mHbG$UVZ7^dhT{{Mt{@6*(;vt%#>kZ-FCKKO>#yKn+2Uispkct`=2y%blD^^L z!6#v^HzJ%)wVoRL&KMzjlCWpN2!z z^Zkg{eqc=$^64W%J8n{v6xB7yXBiA1QW$KO_OtcYPU(BCJ7WzmTsoER36KoZau5@) zX}RiM#)s2Z3RYp3m<~h*7CCGTzhZ~p>m>=jSL8;54(7AZPq~yHlm=tqMMjvl_R^R; zUdPKtTHA@4A3e`Ix=*a+)ELZvxfu+4W2RpHPAHyEbbTn%Al{t4jGR}~bjv=e@(SFP zKl^fcv*+`0IVUfR`E-w#DV_wG@wwyE2i+?7d*f!a;Ky;M44Q?Mt`OwN^Wo%@3_AV6 zZtR{u%e9XnFEdb<$wNCmlLn)KSp8r|$}gs>kg7K(o>h?pT#X#sE`b)sKaO5LgC%)A z>dRCvs_Oz{Z0}e}nR28{t4+F^cT#Eod|hH~JQ1!o?&;9<{PZ*5zAcv<+NNgKa~`3> zD-8m*7ch+uomZCA-Da>Kr?Di!_S2^Ri)g{5Fljc{U#UIk1Jk`aNLrtr2)xXHVv~t3 z?MtDU^ycH!bn!SplIJ3RZsqvpu;|8cFVxXg{mwICu>EZGsi+ZD^!f9jeE9Kp6j|2K z@h4{eGh1KpkHq3_%xfcw+Qv8&l?NtB$pzo)j)NU_Q zk(oX@{_=7|(SA1bBj^|I)oU-*gP`0a6u0M3<;i1dCGkwYW@R@i805n%BTIhP{6s4f zmt`q0lkcL8w@WD*?D6&Hb9YvO`Kx)%{a`eK57~3uCfjV=L)#8cgbhzYuplhnFGQj~ z9rDb{_>Q3{fnUFNqAv5PDY7Qa|ic0gF;-3HexouY(!WH z-T!f67*uY=`C69HEZngp;Fo$my*8{Z#{u&JeJMIe=>XTJ1 z%&$2g94Hw`jwK-CCOO$T137}@RF#->p?fm2y7Jm*vMU9RwX1djTLI; zcWJh0BB+Ts@*Pwiq`ATuHNnVWTreM4?F8Ok*gf#h@LKy?@p|oKWk_U|u0^ht&38jPDW`%qrOA1()Jo(_G)naLtKQ2jsqyN78a;|`gi)&lle*!;HePwM zFJQOx^C67Fer;p zhhMZ+q*cjhQlN*VQ$J2HyNy@apL8kGtbU+;;OJ9Sg+lR|Shi}mS+`}k^qTgX*ct*Q zHWqq}2_+k4opi-_rwOMC-tQcP`m6}7=^6oZ&W^K-W2ts6Jxw!kM!&2{%kwMn%gMD^ z=&KkA7!XR7OZ-btiZx4ghgcI6nC)(xO5InA35!{a(bZS#6e^Lb`>HJJ+>NYE2Wm^o zn@v7coK|UB&6r-!FE6HBwaz?*Kba!!G{_dt>rneZ^&!eAX6;LipHj3^?gWfZ5|QkR zbn9NI{e^=r#H;^0CL~)be&B5azcz!;gbsmr#t=vPNScrSLu5)i!9dBp%&x4XoWsf% zS0gCpDD8O4^lq32SQWHcbkVQCHOO<%CYF}Vf^dF&cvHU^a{R;u8Q$`cl)BX(k`pc)3pW6k4SHkYLJXjKzIk* zoT=N9>XB}qQO8-SqH4HS^uy|29$)V;r=!;-5_4vA0&|#(nN?A(=#!GPlX&_sOb-oP zf4VtHa=y{#uzh=J5v(O?-(RI%H%VCVh6i1j=8$3~MQ_+A4L4FX?t+w;{$Bn@M9f;u zk&=&<<}^n@B9gJbT-noNcwF}zxLkl*XS33Jt5QKBGGF!iSG=zam9bH<1zLt=*Rm{$ z`{_DS_xA3CCu>~6T>hyCIHq*$C2q-(1kaBY9u%G?TeM3m5H-|N)8bOyCnXZP6JS*z zn}_*d&JU2?X{?#NvPX)M_hp=iOM$bf``4P+kdvU6)5p>Zfl>iw=JGLTqp8d7!|Rnx z>&k^{&xtxA*0HEydb1Yv;uVR|1z5{e}H~q6J7WPKaYj zkjv*ibDD2)*zrvZVB@Vg#oO^holc!-o$<<_m9`oODBsLm9GuZ z&E!^eN1A0+G*v`ZGA@rVmkKV*Yzy8N(kw+RWi&tb`6WKlLD)fnSg~1#Shf;jzK)(2 zoz%&9EiAJewVs_v%4d-Q;Nh_^e`q(nI%-wn^o$+A>oWjS!v}Cy4Fikwa;>mq;k>=M4 zyF)mfX6DLz(9)6svb%d|KuAOgK*u49=NtD@J}ufQ8&dqRvm=<%cF+?Qafn_Kq^;G^#I}Hs?6W8ZKq+=L)I~W0NPF^)Rq<8mv`oTYZR(Iw+C11mG&cX_u zy@#Rf$pU?O^(Le5OW>O-iC8M%{XOFwrWbj$bFw{THDuWmJDr_c#^XFmD+*%3o{RX*6F4|E!_b-05AMH)1%6-&w59$ejJ>OjW%f(OrKeF_iAd2s)cLa8EQ0(Q zN+G{!vLEOm2~>nIzdt?(l15JWt9;ogjHKkCA!l_bg_0oi4)I7>&)Y~Ww zm2nPZpY%F|T>bllqbRROJ(#=k|ihnn@Lb|=M(rDkJbq? z%D+~80JzZa^Lz*PKWhpkVTy}siMG*B2ufS9Gy2mA%m)0);NPs0*Mg%~uc7F1|IPWo zzEDKW)X#-6!28$BdjMv1SN}X8`v&?kmet&kr4W&B2WMDPMb*k@ajJ&`E%Df7(YarpCUH?*JdrH zV4fD$u5+UQ*KU!aB%`^W`{T@hWG}9cI?(sK@bs#wgHK8g8ux3f9`0|q(lSE-SWjLP z{@KN$q9$qkDL z=eRjYqYhR~F&#EI8)%W7Md8?$6a0JP(`q`=~O`O2LZt7*H80fOnu(F9Y%L@tGOn z5{7G(!~P{90@s-JSF0iWu}ZRUhkT?ZN78w1KB2OgPpRoGiA50E9Jbt+q7w5p1#%yx zxh`7>2R^Sb=m^wUsI`g7t3m}9`2j2}%$I_1B-9qH@SkOqFt32K?pII97Jbt$sUDZS z=zSBB$fsnoIhgd_m*w&P&}U6}#p^Uo$G!`vC6zM@6}ruE7>K_8OQ?WD)tI~r{4md{ zw32=j38bVhz4^GCz#@)rLqR_z#ja&emjC|jWAI}vj%W;j1}_$h zZ{WHDa$W3U)M=PSVQSL98Kl@sbqwEcIvuuWAL?uP>5sAsVuxP+FEGfsGk@P?6=4Bn z1&8H0GQ?V|8OFm0VwYJ1wv%O-1sk@DIX{7K@XJ?D{sFB*BNRRsn7v#GMekB%ilECV zv-=TyU$wQWj7&-~WI?yA6+STHR)kxyu!w z!&9sM&Yg)otrT{fllK|Y7~&L1-BehsIfD0&T8e9f*0J<}3`7&YdZnrd-*U4{r_`C} zMNZF>DyMrQkeLXN{Y=wmPQ+2{8QSYa29zE^VPeA}%f2ukVw`h6lqROSUXQ`idFRanGWGCv|-5A~H65G#9$MwMypZefur&#+%N z1%_x;Gay?J8{vyw=$t=cv|Q#)tlmk+$9^CNO|GLg?0ip|zgV!-Tf3^Zm}tT|1q7_Z zBwnNYomxKV_JZxCTnXPV=Umc^vH5H~0Ov@{7Q`Pz_7nwb05AY;`4rPjl^&Ki(g`xn zQ1%e^nrj@;6$xs(EP?<%To&y=Gy=CISmUV)Ueu4%5q>)@n;I4EPg#@AfND8zVb4(x zd&SnDr{l4cdjU{r9+|EfV@rWK&~b#9W0{!4Ry*we3WmTPOI+o1gX>}XCAcrNM5|8r z;MKtf+sE=JeuY$ku02l>*m#1^sqoKQ=+tzShf3~sJ3#FxHRLg8cZO=PXj!t{r?vA~s7M)d2V%^`MzUV7*S zV1d`HwbsaHaNHU$sbH!Td@3a_S$NJ<6FRxp=QuYU;hXhHkz30&3BCIC-R}XNWRyV& z*ue&*$vbDc4#R9%9s0XmE@{l)dYuM-{b2^|XqGWws6w}3Rn}~|Za!aKuwT(b^j)rR zx2Vk4s<=WdjK={B75#vg4*bI=EOc*OhQygj+XknY*ypZ7EKR4;rKDm6?GMqhobR6~ z#~0^$c&9Ax|EI|?rX04$ z|1jpVwjw0Efhd)Qj3A8iLF2d10M#3Gi&hUAE!*Ve_SK*t+sYeYcniWlt8l<8@1&|DE7!P45m3@ z-2v)I2GWN^))9nWSGVjz$0ur(|5gZkXW;?G;Rm1b+r}yLz%nqV#y;L{@%JsIF3jlp zfWmkdtgFH#?1lfF&ZPq{EXMbzkMRqUx|^O{_J!u{s{&i7lU&nR%!3y z_AtVJ5l}&ZIQGoZ78PUWnaoFS>`rUhmDpba8nGBkNMwp~gI30XbfE`KD&MC?od{nx z0dqek@pG8_u$m%4n`H}X_SDjRx7pw=CG<;brYqB2=4tq}P~O`$9d};<#-ht&hdtf< z#$v(ibjW6Rvf$Vtjl+C2(=7uPaWe2nH=S$6ky^?rSKb+LZ{n5{N67#oK}MqX6}0ik zmrmyaBU_A_AWnw;NHR;;$oAFqzV2dr4%#iqo6nBmvm|}q=pVZX*c6wuu@9cP^k7@n zMwk7)+6CJp1vG9b*+gLQ1WTVlH8_QU0EBG6`EuE%htTy()a=wIx>y_VcIEM5H7Xce z?;NzvkU_#^artcBFqSb-NK|)CME~Pq+LNIv=MJ?l>uoA1wty2}y*O=*ALf zlleM3Vpj!6<&(H@Lhf#jm<4+>0t{+0*k;rWexDY#@+|6p$mwQ^9iw3g^(&}JoHdH5 zAr%gsm82_)t&N$BrAx~^+$IfrZTJZRz1DuMJD1^;2ta}NNbl_VF%)V`TQ82Uu8z9t zsv>ote+rAH%w&`8jvzjq;ifT$?FBn--4Nu)R@*K$ym(od^C77l>Wt-c!-DVo94Jhz zI={S#w>>ssz%6mts$_wP_5!C+&o*qHA%5c~m& z#`Cbcx&Jupabo)IpOGrEYR<-`v<{b>LFGxz9~2LBNBC~7xdGn?*M7@pp|*tg@$R&? z?HPgcE8LqWu!z_KzFg{0e~{7rzTa^&0=W4y>~rKsR5hD%{>Ikejd+)=s!SUGC-4R%xTd`O5sqtNT1zxMN&4-iQO zSQ;L|DAfP*G#P|hd?MtSAeug7|AbT?9z|7| zuQ@OG_W}NLPIjXNUSJnfp3m|G|E2f-_FDL$QAd9sxEdSoT=-|Hn%K4f#e9z0J+?Rv z1~nXP=>(ZTt~m6r0FGT05_X%v$~vwR5B#pU{`%|=yT{%0$2=Q&xLk2xNHz9{0QSE; ze+M8C>>v8_8b^!-Z0O<8*xn%F+Mg+6eyTk487=6 zAID}JFcDS#o)hP8r3opA4{l8fy}El^BQ|qm%UN z6^dWWR}$gtWeH;VoYm<3)={1pt5130jM^bi5(6B(F_Ms=kR@?{B4cA&YgFJq7bNf& z$+!`>ijdAUg_-~^PMYLA_nB(3B$3U{GrvF}x|w25Ey#^R%2^grO8JCC7l(KoCJ0dE zZN8FO4VL)jDjc0^jULV!F`V-oU@sirsXCPK<4eHgbh-3N$JrnO5N7Yzt3fI`Q0>=B z68S!OmgdPH!76>(vMVI=UV!v;nMtFDNs~;VzN^vrbN`Xh{YSGq3+luASu@d>8`{SK zT>2eTsjm5pJ5;u5_x3cEm8*9=E4+M^^iCP%8>x;O6i&D+8IjTDwALM zP=1Ia>bGgaBO(2kIq2%sMWe3TJx`DTv*RZOFc(KCr0MtfvAljli`GdYDi<9x2qdFF z;gi|i!juvJ4gz0A2^206GlIi7@s&~ikjVkn3WpHK`0to9Oc;pi?CX{O)+K+eF7F4p zhqNE%cm8%E{umzR1cqM)ao0d&oBwDr9C2Wc|5^I~JwcLH^fR_G;VUs`UD6ITA{;T` zas{AzK+KQ*Fa3fHbvrp(hp8RmiWnEZ4SEWk7Wn~}nk|$C^k=3-lH4~$pCX(V^l!!V zDgXwzSgcLAo*?&#QTnB>TxI2t7fqxHc zL-o}E%>GSoC@nMpKR}arbl14!f3Q-Ne6&D@71CZbKg%L+1)T{2|f^L;4Q>xnIxks#a+o1@Mp{f$ap`y7P#o1 z1r*MEwmd>H$&w8Br|jkv09L2rc+DE1r!lvr)|;w)TVuXZn{~C z?)JFbz~zPrRH7B68nQkRUw)Z;IG@Kacd3!nnbsghyx_KBJDqi%6+GWbQ&r*|^8Mmm zPHBQxU8`(ws*aw8NsZG=wz9GClBufZwDUagTBjNI z7SxC)-e*sX#>74WC37f~@c{)IEEvZEsPkxw(b_IUmt-Z?T8+jayEQp1vsuEj(8>`p zxtjQ)l=*o&r*aSnWFwBw#C-px?&D7MWoxpr9f-Y+QZ~itwvnef-W)P9()h?)MpCj( zZ=h8+wWQI6n_eZcBI`0wM5i&+rFGLJ*L0fQyjrZx|Hv{B4_WSZA+D}28Mm_cDL%J5 zze?+D^<25`BYON;tmC@9TYF;Xu1CZ-toxcB;D!0Gg^I3Sso6|eo%@ww;oPP$3ILYl7bGV-+!WWAO*6b5;DGQ`Bb9wOn=2V0~bR6A4>S4Cgrpqf{gR(X=}_&nltzy2Uv zwnC%)_6otsW_t1If$z#a&rtYsMFhXub!o|7`@Pz+Y7y7)#!&{kmn}LvAwj}huR&$c zc^V0`gXx)CoxGqLr+#fpKku(RmACd z{iJm{DS5LOASWf&_X@K3lGxdX|L0|MvFl|myN|aFNB=1CpJ5*i+=e8{=~9!nr${4| zpT;ERx`$}BpHkE)#ujs94zF?o9dKkleb$W|_q(FJ)o|&D;ygMv(odp-mb|juWi8AP zHf_=>tyX3!j=pjD{Toq@`T@K3w7Wa2dzTeMXgG zQqh&nsn+!1{V?6`C}~DVt%_eRX(nxs^arsg(P8P;&pVU}Q!shP1|&3`U<3%0@2P>t6B%yTgIyn%t$uQfJk@b-5%i&-A6c zll;&;y3Fq8vhYffcdK)3bANQlk7fTy>#+|oMfHLkj;Qf7(q^*hryHkALLK+ZMsJ1^{g;f7c>K~;R_S_~*i-{X1b@0GqoYuN zC7JUa@Oi?-^hNOsn0l*-!%$Kz_&1{K@|0pthm-7-ke}lpcg?$4<_cE;nPU0 z>?T<7r&ihUAexK0(0r$?KM`al?HnA_G#RZ9V)tKI`TJl(J-s=HIl4kaA+BkCP z(d#I8PbFB`;T>@Ncn|*a)iJI8yDy1Mni!j#k=jfve>G}ayGO5T^V+qw;w<(Tkt`Up z5mW*z!-&7k&8rv~M*lF0j0QjnJ$q9w-GjSqiADRGm*(=Byk!J zWkRpY$7+-rUE-6PD)Og04_Y6?n%TAc>C{Fz@!C;kK|{4XW?w*e_r@7E{2auVB`f}B zsjV|GW!g*Y*rmdy#HJsXc@|r)h8)hR@|0_wN+YyN7P}rQ?~!${xmFt@tjmf5=ZEoH zY*y|#ry9A4S_gt_9~=n1=FIlzM(EU-I5&<3^2iCDPZ7B{0}Z~pSHMcCmWEsq>+h$U z>}MA7mkXLT+2$=hRE*?tndCiuD{H>qAis5x&a#sQk1y6+3|n3mq41nkh1`AlefSi> zAL=H8-321fSD$~3r{M+~&D>=e>xUS=(=QlC>1dD}Tn$zaZNT})EY59~8?ERxirWl| z#*QUaJ>0CraT6x1>pNH5GnaZ#;l!4QAC|=2QR-vyt#L_b7 zE#tD#5y7iRBbU4~HO={O%LP(lb$b#%@9l22QLNJ@b3$r1hI~xF9-Ys8rIYGR$BBsM zf}APz%LfwMv9YLWaC1A9GLg_4?`U;*JfpGvzV@4=R4mW^gPcoYQB|&%zVt>;DfcGp z@n7|r(H~H0$LE}aO1rTBVr+n95S)OyO`*iZX^NUk3a*qN;`rDQou#-&_mC7aOFu*M zVoyMfnutiBUkVcen!DA+yRAy9{T!)V+~ITEHpA|Y)baIrxR8x8^qx7^;eGj#m##Jo zgYmnS0vh+U1~Q7KqN1@lR_=JEx}=a9SJGXIsxy2^rt2fH!;6vH5&lv5zkKI04padLPC;^Q@YYq?!@o3+&V zv>t&tn|jTt%%V^*17d{X{>lpaVUqlKU5tqjpA0B_> zxHU&)TSWnqmY60FRF^tlqOi2n@F3%pS{#s(T{!m3z1OHcZNE5oZKE9js*Re$<=RBz zup-rhE(4T=tDG#5417-bKrO*qq{1l=0j?~T%W;@=KBe*|(c(0F3>Bb2A%Y-~`T zO8+#)V_@nHhcH;&YbPFCvy2Yij?HWAkaO`0g?E_i9As_krD@11QH%(6|Q`+iqyB%=XVOQ4~I z0jv%d{OMk|IRVKCy%eRpyWBu1v9xM9`c**oX6P;B>noL@Qd4fza;F*#@Tf{jiBhb>DU_#j^uW4;eh3tO$nv_B1n029MVwRw@*t3Ybjz zizCl2^fJ$S84m4SL90)synbO5J}r&BvyHR8xa2Q3Pg~M1G)Yz2|A;4e#GH4xw61-+ z?4WStpL)>~XMO005-ShVb)DU1pAv<5{sQvN;E&swLtXP^-;8NyMOIc8u`-Bm0N0QD zus-c3ak@UbH@Qx_?rHx%i;h;K{Gq8`y>l%&a=rbWU{QTd>3dDOyhPhQp(;K5ABDAv zkQ>6Xhzk_X+t35Oly{3AZ-4t6VWOE|PYv{bCPZaF9e6xegaBa}mxDb%ARPi0Sfj7g zSzYwT*ma!!0!&LoHc3o=vf>5bso|Yv)p;JREYqm)E(*1cfR3ut>{qtfvL^b*t4pga zb)0_CPM>j@?C>*cE`#jCBpRouri+{^lw7oE&{&RZP~eYud$9IWDkh|Z=LGOx*hN;& zOQ&#+$TpaWypCg%P>3(^9xilRUcNVi#BQoKZxkyLra~4ghYf3LmBg`|02TS8>fnB` z#YN%Ke{U5R?cw(4gF#*Q78T>;!;II@Ez_l=t*O;spFZhTr^$0-@P*8UAL9Y9H*cM; ziet)ZY-x-gp+QG#&B5$`eBIL0do9Jy3#SlPy2_vXA4)9f9&zEkqftB`A(M07`-Rg; z8{6Dlvf#rh;r(Hw>EPRhVyy+L2iN-M;|-?TnIE}(`72rYmT9G|2M>eZ((u?4yb&O1 zIJzP9Gr?}9%I<)_MLSuFy9xtHwZR1m1C-ssjBK|*!X5CS3oWLUP#|yO)_rM$j;`bM zYvqx!HQe=^)F%3Zr_N0&=;Bn!wJCCW30$96T=VV5r8dX#+nVOtWx=I)rIzl)vY4zk z{tk0$(FpI%p#c>)1w$Hjdpg22RgWb2_^hVhRm2M z{QbP7)a{JUbNy3=%b4!HHHsOMbZ~5Ko2}rsHVPNjhym1-5}*Mxg|uA?1v)A6<(55R zRe-CBOgcIZUMiW)PNS&?6K#HY4;gS}T-C1-X(Ly4I;YX^G|!Uxm>r@L|3wf;;J0@Wh+8^u#G;#@H zeB3Q|1|^v9tB-_cV^J=UTz284so46XY&?`ywVMQcB=Q8={J~0DAOp4d;iJK=LOE`w zKFXiU6B=;=YEuaKPN5txcRYagdx#=Wz`v7M82Lm%T5yVW`V9M@zrvt~1~Ft$zPmx! z_&eE^=>cReR7TV%l>R;33ytfZC$9brX5qIBWHeQHjb!BhJxm59b!l1jga4iME(`(` z;&~_=@4ulV!P3C?dwpq${<8^SKmW7of0Oio52Z6TNH-o&_B{iPshIuD|!{v?f!hmCWN83 z8~~Q6&y?!ebf9unQ>D9Q)yJ`a+_|xBIUVM{SdWpLr^@ge|52kv_@@wMvH)37mCgEi zkurb7sYX{j?zK=>`e3pJo4Tc9r94@eF;Tpv@d(}v_1j-T#FAOf&NiJ-(0TNqJbPaL z1dTJZRMg)44W`?E%}kSF4S+3KfMQyp*ch9dDb4IH7pZh7GSj%0;c>t8^E-qUimsEA#O%^;LB{ED%7>KcOu*E&O%*fTSfI2}1MkXU558fU(V-H0BEu7wuX9pn(=52rASG6JF*KK6#!_FHv-6;Jt)2lT0Bg5 z-+bwFzx=TSbsE4XEEq-bT2HB{F=!P<=z8;j9us3sob0R!|z@ zO1}0NI7U+bbY6djT>xy}TpEz^Fo{-w$iYNj#657(b6elKC6^SN*87X;EK!t|HAeT``7d4oJ6(?25EiyFd~|y)*iML?ClwxGtHcZ$T-G&f9v^NO zM))7Cc|FdU&zV$8n@%KSDhq<#a^4|T+pR2Xskh#pNF8p{a7-@`_rDS;_4lATx;q_N zNPKGq#hBF|EHwsoaPdRdlH){Wa208UoTC8H)_fou;ci^7{4jt_IOJfHQzWCX9^t*L z1`5|#pa;RsS@^;>53f0sggIy#`@C%Q)}W#%-~@=(+W{%U84hJVAt?uH!Lu?POU48A zRiPQKxnMPehpEZ|Mk^1k2jI=~i(tp*F8u@@m%eP}Q}kNJ;+u1b8NkN^Hkq9Vbkdz0LMB$MMUS`W9H|lQ!C01otj~zv)U=CTiz|Z*FHDpF12!o z0+CcpG`r^Bp-asinx@!nP5`Jnn~Co-r#$RSPD-}`U}m++S|F1TNjc_RooV$<&OSI^77)u)dlVHO~o<<0Vr$#Wpd8_NaD!Su{eKmoBnsLDKyZTici8o8$HlJlE9 zdC|*dPdlg8uT*0D^EIbC?+>z&H&t347lKm_ak!+^*RivkhWp|rHk|fm-)hDCM{otO zI5;|ur(X^*wTcXnZ`^_&uD9}rgbmk#5?yKI9sW*(Gdj;hL-WH8ro%XhH;uyw*q5i-|yrA=nX*wjvD|ccL-!= zL6>QAjT`Y=WS2t2cHB%6z0HXsscc-se;~Ms;NK|__z8?5*4N!VY*tLja!=*20jSH+ zRJt_&??JQLtMod@Vd5OCH<5v=<&&m(0wnY1*)s^@bf!N6Z#YbXo0aN_tRCQ9rj@RAzb0MhWm?!H)dnO+L<9vOGg&N4I8axMJ2VG z>&2)qPwipgoJoKB_lhtYnuYP3iH4_c@H-q7j=hQC!31ycAr}fEhg1oC08k9){gQ%I zdB&v?o`H@b5sQn%_3O${t!~bC#sHiUBm&Y%L*(&dd-HSm78Fky%Ll6szodk%$OCr} z7+d=hOnSb*Fhe{*RSa%7;?5mJ7#!`qbHD-XbJ3;OSWI-MQj9cyinY?CtyxMc)fg(_ z1~eh44kv&qfk2K+y9%BEuAI>Ar=LqdP2Q}ASz4UEIG`c4xHl7xtY^y&md`ni#LxsX z(csS(sOubto@m>GH}(K*No`SC%lydG$xDl^G*|W^94Fj@Cd(+4`ck_~!yk>mb~x%P zMDL}B_oZ#*Ak!r_Y0lHfeE^&@0zEay8^V1v;`6}D#(fL8&|jf0^q5~kps9ro=~rBm zVTxhPjjGQGi^kqd)@ipvtw?#xzVGCuULc?6A&8p=h^HspYoVt%fbY-Cd`j$DUF~r9 zGe4o7^eo=Shz_f1Hk=uYl^}e_nA1d%R=|lmA;l=wsNtM(PT5bMx+eBI^3(gS4?w7D zU%U~crorV^4uo6);Lfm#Dw|Lu-g&(>pQ{eyJ@8g~NL69ljqcrXgwNS2Q3vc(@GJli z^D0LhVu;BnOxm4DebLtnz&2B7>PLF{5wXPun9_yHafUnrWD_B%Q+7yiDpjX>{SU-1 z07x3rsMPJHxr)r1l(e5XwB-t}ioZ>GHGJCkNJ(vu4ZXj7erK#FQmsqX<%3A#V=wu< z>JI}b7tF8%_-3ADQ$Dfc&C1D71zj%91f{x^;`+Wx zAW^02537)CbdoJ3TNgX~IkBoGAxqoe9rxUho@lzMBB$++9_N_zmLXwLCOQ(Hj=`zu zQI2(##?tXpZ68$53;AhAT0Sww@^-R@ipkdbR!}TRQFi6yL*@(c@42{s~R{ zU`6UN0-J#F5z4jhuv2$zdI7t~TymQFK>F{<)Y*8?Fm+rpOPndXWZM|AkrZAq#%(SZ z+__TefHbM3W}0<1sgu98pon|JpN)&6-`x<7v!Ee|X6y4yC>&waytpA$kbfbnD-#SC zPL1&uF0B_i3y*is+bDV~e!_(k#o@?9etp)fiRetL%<3lH^WZHF%?5qjsX-Tx+F3gK zjjY+K^6-7mK1Mz{VE*FXC7Vp*@s83wuO=co@8(U}_7g&<*1W1Q8Hi`Av~0$1{_2YL z8I;4$o5{(4uiiHP>C5t$-8M0W6$ctMMvMS34BUf(_^%n9dl_`LN>hs{W4zdob;jJt zCBbT#lI7}tGKv5>2xzCCNE^qc-j!_9m6M7mwV&*JOD?SyDRD-w@Xy=BRu0khSZXgY zkq3)Jt(EGQ`LBJ}5uS66q^+OQnZqp{>Xe zpjcRK9UwBcUajq0%p_RNxBv(=fs2f!vjab$d>cU}iBwy^3GehcTqTB;!conPuICFf z4~g&Qsfr%wDGzn6_YA|?kcA=8$!){Rc4Hyl3$O^&+p}@$*85;=0y3vv4qa6}Q3Pdh z&3y@aI(nh7PgCLm8``yJNEgz%EmyWgGV?v2NK5n~pwapXTcCv?+yD z(aI^VGabUcA&+seEuycH;LxEl-nogd!eGHp^WFRiZz5tPC-HUKZ6JUt#kkSbc^Lqj z67pB?lvERaKPSP(rq}4takF~(BGk2o3C8MoD1@jU^~DZa}S-oT^% z^&a(}?(EKTDC5YJ(l935jV`W_1ZBiwkg7^)b?`_ zSURL3u%^8FB3{IfbK@16LbHb!y(=|CX>!EplQaFIr=%uR(c|AQ@@UnrV-slPw&k|< zzI)|x5W(tjJIGD3Wd;#wd@xCK7`wU@M zFCg;RCT)#$DRhXlbKf}$c=-H~J0Lut1+U0(sQJi5<3J#-i}ju|fQzQUB!@_Dv!ruh zsEKGDmr=sZM8<=+_f#P3&^hwwdFH)PGf`>7F0EduZoheR5ioMy%Gl|dpVe(9UmM4t z@c99LBuM+33k??W9T8A=3+rhqw2aTCamS*|IsMfg1Nr81y^(Nv@G=>Jf0tu{#v3dd6PPMvADVP7wtGGk9BFCjy_RG%H z)anjt&1>P@)|X4J96A#Ll3xVABb*E{HWrUgRBdi04+T@L34$N{EP3r$MGoJ`?=Dkp zxB<@b5o5Yys;VBy`LH3LUcTQ~_k`L%w?7)EGCG%07rpHjK7FZ#Z0^rfO}!Qvs-YkGr-f}=>= z_RCIx+(O2ttT=XcW{W5+68Uw#@09COteQ4lfOgd{KgpS5ky4OHZ+u)GJK2VGmOE|b zPufLi5YmkGzWDA&XtxmKxWkjF#R=so?n3Xf#de6ro8>DRj4`T45IKIc z+8nORHphX1M)~Yxp}-RlqkIpbNnuDRq&Jy=utNU(XlpUm$MRyt&u@pel2tw;^U_1Y zny04~nLhy+aOoXKk^ERH9gV4j&6IF@>B5b2xxWeHS=s1?ilaA;${d+oa9GxdU8Wf7 za!H%g5b2FL2M?QOqIR}Iu2RLsN(rz#Z$6WHR8<&t4~hJH_{2{t?~N3q7!)uNzAjTw ztG0J&^ZAiuzmRm8(Q&>0VpsW0T%up?U6i&#)%k^@!t{FOw|x0Z{hhFHi|Bv`>TjBv z=vOZ5ALDt;%Y2N>f|c7Xfj(1gS+?)2)yr0dK*{?~svsdiX!kKO!d|}By4ASP5 znu7>}egvmlkV^y!SU|HCCCB;s)8c-JkY_XwKXW8vET{Mb=HtvADs7YJ$f?yEH2Pcb z<$P|GYWLW}Er#ey&L^tO}tb5IH3m^O90*UTZIw|?ihp-N*8#3Nv)52RTgY(?e!$@)w) zg*S;1ywZhH0Hrb;C_sa;J(m5UNlcj!cb&OMQ5QY9TZ32^eN}!%7o8Ss1VKT7M4kvp z&-5Fm_E*Lw&z77!kZV?n+BJ8quL->;&;3v|YlRk1)?gCzlkZKD0OC{%gU?AKZke(Z z6OASS7jM>EknAyeG5Vw|tFmuj#42*q`Jn><1#cIXjv(6gwUp~{!zE+Az%5Wka+sR9 z!lMN)3t-ghg5Tk82CQr4wHSFQmA%J-7n~_1dQ}ryQCtyh4M62*uJ64Dq!Z+r7>uW; zF4e8tNHM6N`RO|smYl0Qmc28R!lW;qn}4Zm8{07R>Be_UxHf7XA6_!l*Yl0P0D!-$ zPszaD|GL2m?`bOKVFS#41&8CPqu0 z(*bhZX)*uNkA;jQkIsP!)iP(sW$QDR?2&VZ>#}WSp<}r=|9zhxd+cK4kX? z(_-52vU-79`rKPPwgv_nYL|GIt0Yzrlb(Uzt!XKBj|{F%OJtY+9T}}ad~?k*g?8z{ z?0^z{duTM>e#+?v1kP6uX<9}^;6A{)d`AhJl==cOmIrq06+RG#dOU183^3H?*pyW< ziyrKoIE@M-E_&s)Gk^FDGZTe zq1dX89qi&%swv@TTeL5^W(s=2gJM92sVoSS1`aL+6QA>A=A%t)Wi~&EY4NUIlyy_w zeoXKF`kI=|1NK`30P1FTSPZRSN76*HM8OYCsKU+<$%(;_r$ zEODS+a8X2>r%{tBCVX3y6cO{zgKvAT2-X42yJA8XXPq)KBIshIPW5ke}{3aaKTQU`=#@z?+*G*1hReFGP&m5MM*H0T@8EpVyJj z_*{s7(a;>FgRs!2$`_*q-KZRkqbr;nfuu8qN3|0SlZn&KOiJ#k1^T?lomdJ0l0Exk zf8j%3nv3Q-oUgGZ?hXK8K2MJG)T=kmt**azbHA)7#i0lHH?XbWX}M|OE^3!GEl7wV zzGH`Vf7k23zDX&Tq+K!41`&LH>}9@J^!0{Ro)dPu%|Y(H_%+5yMoGT+F^)sbqEZva z-diiSK$A}#DiKe8X82y~1V6(T>p4;p0V2JBh1z&A> zf0qgAwmhDD6V6GN9+@E*QezsAJFI%vXBbiica`~0vru5=Ej-+Yh=3H4c@o+doBmBn zE6}(!*gSe9MH2dwwfn=D=Do@R>9$1dPq@vl>FD3hYoUD)H}J{gRCM>I z<3fMuf93K0q?no~nN)si_sTGFJ^&%tY;V*X5a}?at?b1X56TH`HQ=p3AI22xW~OmdPy56nMcq=ktBZO43hgNardUi@<{_wEg089W1~gXvq- z&IN}4PXB7LrGZa=_tt7DtH4>ODVJfUISh5XroP5b=P27%vS2U%iCz!G<^OpA7W99PdVHJ$0)dK(EKD5ICOPo)ODgzAB zWub_L^9x0e$9;j8W$yRyK6rUoxIQz;I;-oF)c4PdB#{bj$!|S%7x@AVKm0!|r&>+o zpJVq1q8-;RKL#|`{w~z{Ns>f*)yncYstK)Zf6@?pio*F1M*Z;!bS7aJx4T9Cg%ta< zR0z0;VsOQ65S!os3xHbv<=DsHtNyMZ7ywQJ@|vjaf8O`UH!p!ZU?S4-*Q~!ULIBh) z3dIBetF!|^2g!j}?iZ*C|2iQ6G7$s3Z)V+fptjQ=`eVH>M#e|bbTCoDh(Bj$^K>nO zzczyJrgw8ltDf1@rvKOL-+Q2i77sTCtz*!Z|#0o`^khk zpt|rl2lPb|0G>b?A@?*;jj>DN^>hh)KVXrkQlx6{$nCs~ZCCYoiYuz{ZjZ0-_*fX= zKHSVDV4ZJg z22#8hGJKU#q8D`s7&?2kQfeS$?GPJva8>)pUS&rEK-fp~7E2T@QyJf2(q({R_Cu-l zL|zzE>wb;T16TytOUF@P+5C!)coS_gSui)2E&8HzT3u(j#d!x4^-7U$)X{3{lFJTc z4*~LL(u+rn0pqVOES!sJ&s|*SxC)agxyoYm7!&fn#MAl{yDh#dV0hSiHlGV|k8A08 z+#EDYGC!4A$1XJoRNTA8#quJ_56{LwqX`jZx&Z`G5sLIpK?k5D=wbG8HsAkdj= z_x3c%RKX@M91{mXl`n_!*XV74&epPc{)ps`Is~f*sRKlt$!3NAcYqOGEb_cKbhr*7 zw0%09#>-xD1J3yXplU2$aSBHnqjhLkF$JiId%%TV3IGBxQuon^CL4w3ury43aI#ao zZK108h&Y?$fSj&rOC*@~Icx(XK7C{!B)0%YbcMWQP^*92_JcuvmsSI=$>tcIK3aOt zM>UmH>Q$KOMrC#QPyE$HpZBLJcC=n>xCTwLr_wqILDo&qF)qOjGFz@Y`X z1mmFUGpeATjV2%)$?=>iJT?+E$j#)Bcdl>2%OQb8BkbX8Pfpjg(m1o7{J~dtU7z8UEeAvlU z5%9uy#8;KN4n*bqiYj<%=!DzAqv&n>l~}nVGFy+HzlrIwhD?4~^&$?A|EnM9Vuw*j z9S`kxxfAVxZ*&SzHfvaqmA$rhL~}+;Te$~x=Rx03Kx}I4x4TbB)6C&$=&tHPsJ8z!63&6*FNCv#v9(vLyj2y&NNYH=xLWuYI!lVy%7Yt~S1;s}*JX#Uxa{6p!a zm~1P3WAH-!^$(XJt$dM$vwOOHrIQ(Kt?{<4lh;djEj!c(iZy!5rLLkA`uCRKv(nlRmTwXl)hV6H#(VaWpVCOotDBBgXtOsBsf4BgbIX(tYuik*8X8uS z)d^(`Kw8XJx@{3e{nzUIJm;MgFyI{bow2||A!h!9`W-6) z9(^Q4w-W>Jw+m*-YbyI+J$QX+yV=G}JdCmmnhO@d_Dfy*uIyPXaM!aMm;c(?ytwpE z&#YW8HQX3T61WFm@nqc%Yz!r2s6i&?N$Ae5*u3+B;3Giqjx_GTZNx3)?P97lztVRe z7pF#^k|4B+Zy=(3)1~&x=oc`MzdasUtHDuD4&#!~FF#dBywU zgE0ZBW-LL#&o7%OknQLhMow9~%y$4dv5j+B9Q{;g8ZxPGC2r8GhG&a~e;i40EV#0J ziA`WY&Mi57AcKP^J*LyCJUREL-iU`tQIrW#=0O2JmF$O{d8jv(;dR_EJ`j71BbJyr z%(XD2%j0^){1(V1f<^C7z#8UkM|-wk{2}6l1u=v4zI17&GrD+L1L5YeOCDa!E0inK}EcQ*9bp@$kpDcyE7x{^8^;65`um!)9Ea z)oI&yHY3OrdQA6C0^K1lkHHGU<_%4oHA7J^!MH?iI!o*336ngfC^lXK;NbZeEX9w= zua8b(EBXcQA3gk8$o>1ZlWK7EGq>))VTNrrSl74-S9b%%zQv^E?A~l;CR$onk&9#B z*e0{U7HTS|pAykk>y zbI>6|kYBv_hAF*UZ9z@2}NJZ9;j!z8@t z^Ol0iyfoMjNO{z9@kY`|Sl)uz#W%PXuMEo-H=Zs3@M6ABH?Wf4*j9800Sgyj;W~f1 z*V(ERN%qw>RjWvm;|YJ>n&s6N{x)2A9Dx?<(hz?b-}aU6O^k{XITL0XN8zcsLJUv@ z;Og0T!NsIHZOo=AFMu`SmDHYb@e{mk7RPX-Pl8D^362Ayc5%2e3e48~*g0Q^xg&0{ zxxj3c|F!vL?}6&w+QdTx`;iGP1TUcFoRS5wr%c4PcfJ_`Qb^HzTohn&ET2_xy+;I*HeG*xj z-zHOkB3M-3b@Yz&Zy@<0m4YS|vSw4|0^joh@rr4}0;dNsfI)^rdEqXe5wOU2Vh$=e z`^3>`^EH|*a)}Y*!GL@=*pXm{8Xr^sE83&U-O%AX^0<K+({N0p_qRp+uIP{-|BnCt#gTX8gNdzi#g(l{6t^$gqwt3bu z-`__RkwcdSs)+~6Yy7Ju^IwB7^vnh_|HeT|Kthl@4homvzeRZAkSMGsFS>t080g^A zw3=!h-<)y&t+rzZk7VsQA1uWFQA=ipLnMOZhLdXl{_iGOHhN9@6vnc*08T)5`}X@$ z|6NsD%9CDMr+aNyxcH}ODnkw}*WrjHisY|XFqI*EJjZu6q<4eKTN@84=z&>~+IFkz$md^y6_ATt zD*kDLcUYR145SG#9!$3KcskE9tcJcMpYLqbkUBS0Yy)&%yiOFY zd(*EpYv`?RcToO|Jqg30M{mfCPIwd~ZsU7?_DLtqGA%JThn6R&nWImC?%wbR*y&a} zkae*&DLp$he9>C(pzWq&HN#Pwky!pV>Yq0GtThB$?A9DO`mEqhUARVJm%2Zoj#g3+ z-cvUEiW9Zv>@=jrOR0X3?O|3fi}`)}KJklx_Aez*=o;O-*WZfbkIgpo4U6$= zH-~D;GUe+oI<9**hes_>>8}(f|7DftpoD^}8%twohHJ2Hw{M#Zv4(NseB%EJr)(^Z zj(u~UW<{~V&NsvM2hC^=e(jc)E6rzn>c2B>AP=($6EH zZI99|VU4RMnpJ|Zcs4*w_`)aT=mfYM7W7k3%S!GiwLq8iK}1i_@o`tyM#Q>wSbI{b zTO5W}Fw|h@lcL^To^Jtu-7q94#G9g;n5G64#n=gc`i`(0GB_4G7QaI&<{{!tCf*SS zerOgH5`)}fJ73n~UO|3z@b*035uPpRI86@+>WPB&?8{EZrHDvpiz>($63qg>F)!(u z$}&VRPE*TB?)csJ&rD6GMM%dqVJBXE$(OY%JAcIjOP?-Waz=leltlbH`$yOYe1y{> z1Y7YqP)uj%1MgBm;sR6|Bp3)q-VeStgj&HHT{B*|0gqrVib)%j4)Ouuf;MV@4Jb#V zdTzaLEVM|i(!=%jJ^o~aVl~A zR$2(kv*7B&;k2as&!5wSt$NG$-D_|=UjkhYX=Z4Hv2z!61PwoU7=gkSNkps8zwZOg z8DtQ|DqDQHhMY8ir+m~&r*LyPV*jqvm#(yZ{M#{A^ig1X4l+Fp_++Pq{Jr!YgS3(R z2}f3eDGLO_441UtoVLdzdYym9wpuAk9=3RvtP5v0l3{j|d-~$p^%>=YVZOpl8Ql@l z1QvOHI|BA`6*U7cGTJPj`+$F^2y`(rqQU8RJY=+%@;KK$o!?`?; zb3ulA2qh^MJ)MmXSN(bU6%u7~6(pYS&oHaR4Sgv(9Z6!AgaM^h5*xs882fnP@A2^w zH1LJOl?u`-OK71hJ&(xaFlkrAg#Xt8!r*{sq{?FM=$caj(|G~}3IOr?`xgv^fJV$c z!9gi!@aH|T8tJl}!GL6l5}+f&oAG z@RP#Wcxh-mK&}bfO#8thH{tJAW~CvZ=eDFtL^RBVbqc1Qo?7V-J9IJUS##<=!Rz9K z+A#k6b|DcWS%ypjK|wN9W1pGm--Egzp=>PIrkFGA{O@N14;ckrcrsP~z7b(9tdB`+ zB2l5DT4*$pp~AV4GV7Eq3+nGiK%S>?K056;ar@1EcdoUY{_|nQ$uhN*f`-X!SyK~M zpJVIRtLCGcP3dR*6BWV4pInZNC&(A#ua?N=*r+;d-KaSP)2((|%e-RYORuzd_kEI3 zi?>$td+D75ZshM3p*s4n>f+c=9-ej8x*eOD#Mo3&Rpg5^hjSn%oW78md=X3U2MMe2 zG86Q@D3v}3A$eX|kIveFRT0O}kNH10_vh6B=FFW=(`j?0F*NjjwOyY=<*U7^WGAcr zT9BRujy;p~<4HQ}i;exWXh#tz-6CS_>1;YWBe6CXY2IykE30GG6V+hd$Td@Ka}r|7 zaP~jHg4P0>mq`;cIE6TJ7Wv{WRQa0elp!9e%X%1F5dC8&lE(UY@ac;-hfZ1Eg47Qr znrIg>oUyg?QR*-GGR*`WS;zxwazX!^xUnUKC++l!S(ht3cL`Hjt@K~h#I>#?8mm=> zd`Ppd-D1|HE-iIJCfa7y_#YtM6Z*rav)s0`E3s5w&vq2I@#dHT@@Ta#W!iI6t5#kO zd3)q1JeU6gI)A^^a)h#}T0O62ZXsr`5L=u<)=%*aSF&>u2Di0z0%YtnEiQYB><<8vJ^-j|Bak>O0{F@lp2*%0qFJw(+u=pN zW%{gR*Jc6`*;;RagLA$4f{@DVQ~(LjYnYH>n9F(@xDSF#tEmz-yFs$vyjMioycdAx zYk*Eb?;I?I=Rl^S_1c#(3$0H8mT{f8CVyxC{pTWGW! zUkDFSbN-_G$bXm8eo~%0Ilcfu*YhwCGTis0zI8QRh!fp&q5I3+CBVL)1MuJy{+Aba z8OS@FFG#hu1f0DtN>%cNm9b21ZlgT}`o04|Do6dn<&O`IApUP9QO{KiQiCi3TEP`a z$}ZV2zzUv&Dmq?9I6USaf}L2`P7ivK< z$}LEx;*m!7xffFLD+>Gya@m>a(E+h+#&JAb;S3e#5Y6qUZXgURV@;rKk+v)Yy8G8w zAVM>}wW*iD>e8VD+yw~J+;7B>OvNtGGCTlatV>1aU-8hL5Wzj`lFrT9UJ_0pQ0bal zRWujWgP>9?K`sCF7<gurW03OuCo<`n4uhu#e{-|)p#L+_yP#1i zd18`t4R{952NI(C9b;81Vy_ed5mQO3zDraFfuk9CQrOiDFtJMwtB2HWOV!KQKBx1! zg#XFA^Pk6vCeQOsxz95;#%$A^l&t_j&I=XX%zJ<#c#dM+HPD<4AisP@a{Ra%*l21K z$D`rT)>IFaTkDA_7gM`^ss(G?wHIHvQAMb+Y(PlgpLX4+i93MkI6uh%QJ6o9Aymao zo2OfB=dI11h0dnK$ci}aZ20uV-U z39k9wWL*qhn*?>^5A4<^UG6QKGy8e(!biP3AnjMB?x0~oVnk$1C`reb*I*g=orr;@ zt82@ti<=yel)@41BF0JleOdr6ua&&+QRe+=Vbdp(<6X+aC*r@gN_(PHz;k6#(AqDq z&>uE9)eF2;=)5L0julbkU~~q-G|F`#99Ggc$MF={mn0PM!xw&FM@!w=BZaU~tnj5B zd9QEwfO8}6kCQsYz?bJbRUl7jP@y7VSx2)p0{kfija_upJ2i~4o3=(Ap8M4? z6;Orf*B;ZjVW}E;&7qGCDvo*Kc$t4>hdvAUINe@dc>8d7apSyIN^ZKEOxadH2+io}{Om=nDSU9LNOw9k#Xjw(yR`q-qdx z%XS1_FHIe#AxmKwn$3@SYi(EqO=N`e_zTs(G)}7+qhiGCNH<*eOoI#GLV+vK_`^8B zmgbIDct-o=E+Ie99Hk41@npqqPBQl~aM`(7YD1T05U0=P~R|13jkX>Gu)-DWbPDOhMTjk1wX2akzG!X@|PV zZ2_B$T;3^8)u(8L8i1uvlO%k(>SD_vs5f9-72Z!kH~y4I1E*tH2(s|bm-!AN$A5Li zD65|aC?X}M`X;zJ>5)?Uw#TQbB6h6S?3%?vl1P)P$0Z1-zn+7c>eBfxWKUf0QvMF~ zw>9T!k_nUH34`{Dr%38Eq<9s%zfl7ZvhR#EK6;GhkyM#hW(5~kW>d1O^|X|{M-ons zNEq@+i9F)fy%tFF5QpXs$5oiDNHp3ou@g6EW`Qq+>s%N~f37e{Q`wsR(aGv$&xe<4 zFjQ2d4JnkRN0>QmhcN%wiQ15 zW8uekFsWpieI(p|Cbd>SFCr!vS-iUyuLh{soY-=}J=$cfU3^pchnS#m3 zF&T<;q=GS*;%fRiXLWy|{mv?{2}bahQ8hpNnY#N4$4hX_@etRy=d94xz$A3db zQV&Bx81@&gM^|!g28{WqaojjHr65uy%$UmUUP?HLScNbp9*;0wqao@mv03CTniIYx zF_KSRZBVD-vGG)QbUc9peC9e40-;5(MgpFOhY7`n>!)pVi3o>#?Ttk~uZmksR1itt ziJGR8hf+){E!I$VnN(176`f0IV)6mG#Ua6|_1LvpZ|O1vJf-U1x_mEIG)q6eG4MW1 z5UTKf&Dd?`<@dW9g2Q}!dO*T-i}Tj9Ywqlgf{eCki3&>@<%qaX%%@eV z{8Vm0!$oI(f4SPIZe>9w?$1csVnAo8f)3Xm&6DIR``8SrPxcB6t~6dUwBcn|HzMuv z7(~5D+ol@SL|UV|&Y=7>ARy(kZ(GxKz=>m3oNCKu`VE}SdfX6=#U)WD*1cP?G@K{$R;&Ci*oo0 zCwH?!83^gW|A-s(WI&y*c2Rusw$xbpQ@S0ocA6G*SS%=8xCA>i37L2E`lnS_-SSn7 zGacRgsc6!jV@GqGoGaqlo6}&&112z=`3qXlbtzxkK_OrLmeMFO!y4}-IZ|ElH8&0t z#lsj_DZ7ih;kKNj?MNo7wP!qI2ONrkZB1?ScXFSK zQ&+_imSj zp&w{JAqh{yVHnYl8{|XRoHxeJGW5dC=yA>Hhxhj-ymiftmnmBec*1v&L)^daR_JbS z#8!9~>!4CnK;ixTN+Nr9J9euz+@Fa2{UtmxcIFW@+f{vqF;3|oU!Zib(%~rzIjX8e z^H2ZL+@DFTSid!C#-v%>)`y`(4>r3Z@msN;hs}Lk04a~D<-0Tw2p8FcR=#vDKgmOI z$1LQy>gAOQHv;_%^d7z$jy?&bcJC$?qgCTeoQWj zenJ~AZ*PVR|FO1+LhmT^W<5TN=fKYQCY42JJRvr_yM88iL{Xw`kh_u`iXJ+CWY$9k zsrkNjY~EP#pwiEcc(K`r49xo806*?a-SaEG+Voc6mbD)o z;&Uh+lk}2<=}o+^?|t$-l`RQHx7{wfpEKu1qg-GzPNZlQ?^(3qS^LbP++{E*R*R*W zOVDk0nw7=Ffc_wkJ|=31F#hbB?wNfloC|vxHhS=l?QBK*1BQmCTlx6vV&`p6i#Ec{ zaMJNWkdxa__tVc6&*zgTmQw5Grh9fqT7K-gFx$1u)oWxAsWtdqrmKAx((R*ZLIJNU z;03>q=L?y)+GHk(6iOmvnOij!UeU7Q3$v>1EOGwLN9L z5y>-Jq>EVwa__fIX{o5_)(GZvs}5U!Ngo%BUZtIVt(3(a7<<#Bji6~nxuGhfQlrNw zc?@KL;&1Rq2G4eMuWsFy{ie}2lGf;hWh40cAG}9jdMkarRvo_#2@X{G!0U!h(jrK! zM6VOxI8W;^BhwM>Xr9}vCZok_H`e~<9#A$ovRU6o=d_PxE_!uyJk{P-e`GiU_2tp* zlP4X)Bg8Kp4;PvhPWsN)7(R_nU1^Ux05w2Z_u!y0neB4ILnNB)8_EwGwdM9l*^$YD z5zX87Sez4TY3hK7eQgW@uT#0ueyb^VQ~eg_)+;s{5!%g%aU2I^y*1(*1cKLHOP6kn zypC~Zb*as=rBgM+kPg2yGWmc_Ef5GfoXdRLaaI*9Ey3l_kTE<#US>5|oHfQ`x5i0=}r==hkXD?_? z8xZ9YRQlyrAW}H~hPS0&K*5$x;j!XFz{@c&*gSB;4p0^YSiC{712Qv`vFM@_bi73`5y`U zyGPKtD}Qec1GybbIm1eMjWUP#Vj2w42%7ce@&do_@(<>GN#gIKu8)LENA+`fm5gqf zXzt&{Evb?$G@M!0j8?NJ)1-**KTic0CrA&X1pPBx03VRc~h)>OZkn zbSV~a1E+~hy#H3*Tgp9xD*b#$3fGC%$rwXsG8oD}m(}QMOt#5J zSgcg&%{N77Vq<-LZW1-9*Rj!Bz>{QL{tDse3oaSyn2|cnEYyN}3Zcm_ZI%E@H+(hmWJZGSU#Sa5-`nZ3?;B~D24e0ap<3G0)<`LYBbejKZ$%vp z*Sz3MJt}L;)9P4JE*QilR*n~~iH#7JmW6eK^CN~O2=gYg%DEedq+Gz6@fs+?bNCJqIqp9ER7RUTWCUhaCRGZ&Y??utHhYwrkE zaj38(;(Cc)^L}h9j53gRP$oQrR#%n{aaKzZZg)A?_N;V!iZB)XXvK)h0t*jT8!HR7 z!88ES@=7nj2QzIXErm#}WUg8>)(UGDmdE+rUt~o$+YKenBo@_Q41-p66sb*Cm0Icv zATTw-31LhGnCeW>R}%EzmqTydF?Fq8`W;o{#)rW`Um+^NpVY2GE=>%%^r{psA~m`h z2U}DstXWJRC%Qk+iZ&59O1w!d61em`m5#M0Y1M+?m!^eWnjRHX#Rj?uT-xjRr57Nr z)|sHPM!=>KCT1K#zypKT1}u6Ny_pmOX!QWCF`X?IXlbG$biS8Sev`a?qBH?4rg&Do zhg$f8m`Usx&=Xu}I(ao+SfDY%x1O|kW-H-rMpPxy%Z`R#LXx0&ky|kdbt0290b;o0e4se8*BKl3qT=|8va80`+9#}x(E|8 zu=-Oz{qHaO{R7Zcuon4O6aIhI)G=si1P(0!CHTrQxjCvzM4HZvk3XTw-HSmtlJmxh zdx&r_WG0KTfbZ&>XAdacoH&?X06*T$Oy zPJ>oh!$v;}Qdr~6k}aHYKL$r=S$wRq3*7%gIX2OruGL^AS$i11G|UW>$R3KeYvpmV zO*F|p@{e~sA(oZ*LX&-q`8l2-=g95O)i2ZK?jDOZVSQ31PUv)<`P8_EZM%_WAHSXP ze52`Zn?b*)92T-IK}+vopYnuhSk@8$;#nOJD{r_669{P?v+nf zp%qvLYq1B4=Z13?o9z_btX51bFRJC9a!6Z`M^^=5n1At)YY>xK7*BLYz_DnAm_O*K$zPZq<72B~OtNv( zu;59Tv(sJChoP?@+7W73YMmn7?=P9K=!{v-F>$fD(cj5uVLVoNV^$y+T1RXW@L5L% zE9=0#VvU8~&?4S4t~3+HtfIwb*RX=$FpKxC;n60!xGfoaT<6d6%#TKg1U9Cl2J2o$ zue`5<%CYKoqhH2-3=XR;pc*TJp|R03tRuFO|M-(I{gI7iZjzzPgn?=&-;b{9%?UFh z`)cW?>kG8dk0>{cdgu)O&?#_SEfMfFxtnMcPuWjW9L} zR#EDVrFX4lQu3_W4&qc_jh(;7RPuh+7W3+bvUf@2>#@9LD&bqOM@m1q%i)BE+;T)L zpA z?Wt01WKY#eoZt`p7O#GP20biCs_mBJQj(mU$9V(?SGMDX8+4UO?+RuLC9uO{j0ZcD z^1s@k4=qjjk2a(Wvx7FUOnO~`HZbWD1>N&((TjDVz`nuS#^fbo*3NtbuVfPXBt;dU zwu_U>f7{Op^Ra^ED}*}Y`Ybsj3!1U_Hj?v$90vDDNSapyuR)X?2ep4ubYGUVQ6gD! zg^<7bQrJxKbz#|oGWxI?$Ab+X1(_(RH*AgzjdtNgl8R%{|969LR=&(CI(Rbw80*I# z!QQ{!lHDBelt@NBSV*SYk6~Nlb7Na-b!8i5YRl6qo{cp*aaIpKZmXK!cwK|TCcN^*9DY16NKmaPzNVexdk zMXZX6g68Kk<$?CW`c%3 z;(>zTmGKF1p9p!Ka|6#;Ml&#<-R8F1f{?ULu(zNCbQB|mP<8^c{!|djX4Z5AwkI71 zB6ZWvk#xd&mg+BAs~^sxtE;d-1n>dhLQ|?EFimxO+=5jt3A>L)2JGi z(y)Kr2ci3Ka;aRy)=jG+_!dKEnXB*M!3>;@U~XRzEJOw1fxd3(xZY*p{!UPjY=GO; zKP$0b%Yu$wxhrQl@+o+8vu}mt$eZo+?Md3v3@>LS)(dF$sG4~|v<<(*pcsOX&Js~X zJG#1HzeUqv9QEgRZD)z@-~Hi-K+K>ibJKU|6JNlBnu55i*R&A28{iS1X-i=H)Zq_4 zrxdkdL%CEy0}43jrUEy~>?Uw0JqOZku5^AkF7PIN-@}$AvzzgGBD%9l5;+7Rv&=V@ zx9pZ|La0cg%PzDbrpCoLQ6Qkt2{`VWL&n_)ewnRy2Q!bX+@CiVfUO-j0gTO#r{^4l z6#_{T#2c~mIU(FS0sG+%VTjn40{2=m6c9t|IyNT2uh8*?yCec2l&*6yBml#AgP{=|Q+}*>zT{?bQfhEF;5qDBzS$WD zTOtI!))nax`TFoHNn#VwoDSZX3`yS0V!v`WVWY0@L`}C{t&;T~SE`>hO%lCswvn8G z*X8W=0332EP_ZvVBD16}$C0p6$kB*+#>xPCx!H}(!U5SPmGCW-dT$+Z4+7QRog&)& zP}-*lYSXTq9@CLeaGMPLft=nmMC@k~yr$h|1}Sbd-nRPQZjL;;oQ^|z#wMgY#nmc7 z?vcDF_ni;Nd@{cx3~tx({F#I_+`Ui%%0KEm2vAu?b#7@IEZ}$1K^{0bxk4PYU0&b0rNVu1NngxEe$>xLUQVmky=Jz&I z>4XfI8y@MiBh2hiw6(R<4EO_aS_aKX_{?Jo4MLr57@SV?(H?(jzx5gaZWWW56IPTP z2rLAH-i0L^uDx7Uwe9`~hOn+xFG06tI^oPZE3Wl(HgeS9C9w416fT=AIMMqVhtRL_iDY!(6fW=RMCjU<~2P)JKlz8Xez>_rUhP z&$Ntc+kahqqxHb?A@VwF?dJr6A5KPh8H5Y6O+KP4OnU=BbdM9@2O?l7YD!K8>6=N} z{7a|X_G24K!Psx2q(c5PZELr-?pS2#OL%8sER|gL^1P|dyc=R-jek_| zSY9@;F7LVJ6xkqE+Shv!qGX>=bpWSAaS4#aV3dsRXu@RO`z7QX$?gfTE#dC7j+6AG zk8cJa_RX7O&=YOqC7H0c7aA+Ag6fCR;{>4Fjp`LT^f#wqxJ`Na9iKP-rM&@Y=*Z=_BiPVFw6&H8sa!;TP(b?SgOoh z3Nw|z3xhb3QBKPm<}Gcn>?#C^6o^;#f6dTXXTNGaS` zB4=;2K0I3?qlVFUvD}IIxPrUn6sFq2(A-z{ndH#=V+1XC z^P90ojdq+((K%%^Obk8qQ8K$e`3DNv13->J!|g=HKRkuEi?t6jo_d*aY&iqP7ci&> zUcfqH84{3lCw*$bd|sC)CBtIOQQp4nvA5w;na{7SuuixF)Li4{q8_@r`R{!C$HDM- z5U+2p^+uOM-hWw&SAwV(sN#6dz+TIagK&A~qU@Tk#0``BJb-_30yAF=s(IQ^dlez@ z(p|0%Bt9y)r(Wkb`LHUc^Kq7?f*rV(N|UKF*@S*(n+g47wVGjkX!$;k)?$vB`Tnuq z{q<}|zg)Kef3g5@PviDXcwuNDJH+ZKU8Ck1ONN_IJ%enbd#FE%%(_FFg@4eI(a_p_ z+C+?AF`xDBM;&p(%$>}9x_M%K{5QgSr=ItT3TXIjUwCs5XeA;p&@U4ov1_i>(7tV) z(Y9(fdC6|-v9x!Y}(3HVg4)t2DR(u4Nh=j`mJ3OM@ z!?GDFbeo-8JguLrdsCW2X7T;AEhhH_!7jXD-U|-Txz|%4IxYg4=V8dYBtN!s#4{m8&CZ-(0I>p{R>2RyZ0&s-G9vH%XUbd? zQG$bMi8CHz`m3-Ko%0(ON4mD?1?$|)vd0u|`U%S$DjqQhilgdSO6E*BKOG_^O6uU{ zEHzx7CEc@b7gE9O`dOZoo7O!~z)&i*?6#p%z1>?X)sYaneRuX556y@~{c*A#NnM8_ zFfO|en3t(56^7j9#VI(});lD2l*s?2X!ZI2V_^(= zhQ51#YVZa7w0?ES+BMKDrm@KE6TW+N)?tHO8rzUN(PALo$_{oh&fOSg%SUYj#oPh6 z`1THkpC3-GT>iAI*$ZjIU&Q20*$h4AZMEmG`rqc`Ioy@9=ZS{8WK{)`(6x=-7rl|t z_Wu+U8{V`D7C(-JIxfu7YB+@Vb?4eQLdIK3%)KaBZZ?X3QEFbj^LB50@y&wZ!>w>7 ziD_8gVj~yF`lAKw^=HX8!_ZiY*7YxyIHAGIbJ*uNopG4wbdkvYz|?GfaI~c(g<-JK z2}$^C$evFJf|nL3<#9TbPPIb|elaK*|4RT3t<^e)9$UJnVe6RJX?4dYB7ET_C~pUz z`ZQd@tMPkex~_LtQ&69PQ#Q@%;O?s+I;RHDE49&Gu^vuAgYlT-CLae4PiW^d8MBJd z9Gph^qKzgH(a;mMeFqUnx9^`r^$vfxv|Vto>U6s^EQt9<2T;ZdTIdo+ce)5I1mf=z z#OrX*M>+1b*J#+b=C(&P@PG9J%2F=%iadg1fTkEJS4}K=9UoHz z@A=VO!EU0d(R@5d{U~pL4HEnOYcIvAImY155#N)vWT7_O?HqklSAlK6u24jV-necw zFZ$ZzX#828bsoBCs$;#F(jZoCWFo%#C=xY3?6zit0V>e{RgY)LBqK97bq=d0VQTZj zkO3so0t<&xEKTDW!bPKueYr0hmL2RM?H#B^K1tt+}_q)%? zN9WG(UuYmqrTR@_qmc*-sPa(^XXQ$MuuUkFffuC5ckWk^1HDDxGn)f%li*A9c20!M zuCqyDgc$_bQl5{}4J@vIX0JA9?p~FMKVTl_T>UV-dt)Aw+VGj|2O6lMzb<|LaPVRY zHJ@&j&|>~rA|M8~)aUM+w-l&C*^P=%93mh#1!Lr7$rZi{|z+p~Y!@bbd>|cZl|V>I-4z zza}xk^l>XNhAVH9<(|uF!@NMATY}VjVD0w=$L%sIsbe7Wz3=&~o%I#6zcW@*-2n)R z7bw?8YqAW>GQcf&;si>sPgM#)LR5OqHXrH@c&e6r|9uO70b($eiFDq_4OtB(U%=ld z&Avi1fTWmr@$Hr6(k1^in@)bLPCM}R0BD+sH1n}WBGV7i%D!U~fz`&#M%yJ3Uv*h6 zLw;ub`y8~-At=Z`Al`((6oWznw&54@oi-Uwm246;KPSl<=JJB52u$fXOBFm~sB5ro z+2ica5_gn+|D4xE14$FMNsNyhKI44zg~orW*~L%=t-ZaBl;p3*KupV+41TvjRr=)5 zJIJ>8X-*@&?g#A9cCqkauY1kf=2I)p9NlGsgd zt|X@BN#5*0%-4zP#p5>aVUf@a>pJzVo}nQ(@qo$e9;6{`-ClE%dBtq?W)$OnLrpm` zlKc!)3irG!kanbgJ`xgFQMIKDrw(Ijc%i}T+1j<-j0a!ubY2wJiHh6YFoYT&#EL@9 zH`QDVpC9Vl(hthyBE)AUdP!wfgx_8^cRu@op($VO|H7#nN8Iz-YeV=hXisMe!;A5H zk4o1oP!zfyO>-@@wUjT$TfI8CO-zmQxMBchK;5Cn#NoP8e#0sd$-6wXB)xgy-Q2SdbR<%B9RtIpDOHZ2ufXsv(S0ls z&u5>*B?-e%!1$}XAczBnvES=@r_k0=5=OwI66zxm-Sbt+I6BUkqF73(9JElr{+1M4 z@g~ED!*FY*lgI zcgYDs-E?CuklD5fio~cIYge4r!KWDTY4M5#xo|fu=pSbyjH-drRx>##=!j#Ers@WSNQ{Mq5i3auy zYdH2Tby=T-C+hp`caD!|f)b}IWal4BL<`%-FR+$a7^qm4jRL==dp*^1MT288F5sUJh2abKB4b^LIF#P5%-pO_{w?*vLCGflC3}RGZD}kiC&oKQuaA_v5C1=?A zwzMkU%UGe(9iDQ&>XE5Qs;ao_825IX3&*PY{MCm9CG%z-1C*2u+#3!2*S1`X_M#UN zqQMOms8ov_{7~__95;Bv_rHBrS%*9Ucv=6XqG@A9D^YRI2fs%xzJQldSLf5}T#Kx{dC7GIaw!P}@2js@{&yv2%57=>l zYeImU#+xR@pQe~D^hhm=#>2VaO=n&U6{f&=HajimNqoo0JDlIDvR zXO0Wv=CGZ)=APhug&$Q5_f`!}n?jq{NrG%g@;3unk!4R!mkX2q+I^>Crs1cNr_oq+ zi&L&e}mV-<0W|)*kdNjbx`&Qq62^+T>+7TJC->Evl|HPxRs~PVIqZo18&w zTD4Oj>MFl2>6kSvhdsG`>U?~KHh%xdyY8==2qQ7E8DOWGp?un z>$_D_)QhenZ%5@{Ls!Ew!m&FYLPMyUO+|{nRCi=NJk;bybaQ2BWpjs#==;rm;>VPh zoAj+bPdWOjyV+sGv80YWrGHIn^ugIky6|AiIANGiHNZfXembh7gc_9m6-GSWx=|mt zS?!=Q{IS1wsE!wTsJo|{y4wp~mn@n9n$J(g?2FV!NgcEW`&&$G?JrRKkL5FJ0;xyY z%`rpqY*Ui_2zjapEbHW$rDc8g)X3|I&&U-&fzN1s0Ts6FxV@fZ3rBoRZPj_u2ZpG4xG^bb@jvunj79O;17q-;L!iW-djdxwY7hvf*@hgA|PGT zNJvR2inN4uOE;3zpkRV1-Jo=Lr+|obcNm1+q@?tFEpc2CCiUQQRwO={1Zx_}Y{=od#)F6?(2M{yAK}^{HNJk2WkIM)&AN^a> z7b^D|QJx^GB`v6xC)b$8_hfQfE?nZK@tUcyH{i7%&Q-T7=D;s3e!dP&dI_2v81r3m z<~yUtc!<3)tPIqf1-?gYH?rt|>9=9|YGo{(a-E2Af?PJFDSrgVy;$P#rNTUb)f}AE zay^;fqX2&hS)8u>xtETu8GNAd8kk7QIM@i&hm2hvduXpNB%f`4eaf=MwoRNJgNU#26@otSK%uNC!0+@ZA%@m1|>CGQikh_Xi`%AFtzfaIB{vytwK*`^~^M{s@td9Zc2#*s~ zPjOkQ8N#%3yjq~ws~~zkUS@5+TDz%O0^+F;iqqgccK;%~rr{YpDjQI1Ufd7nBpN(7 z448xbLU)4k)0ciE6eJh^qnn1q_r_yTJ^UiNxw!SSoqqg9KVy{c{chnUc4iY>*wrak zs_25{PR$r-hGbu;m#Fd(7Mk##WpaXKj@W{R<8d#LlA3MIeli0?ly0$A>PYO?|2!vr z4F4)rc23YSjx*>854SSJ`QiHQC{=ET{~Bp98%3hVKY0l)#2ra9>;A~>@TuUW>|*WU zGL1CwkQs2w>Q48JQcSnbc$J=(RyXE&RB!guWxre~9RXy_mPd=`>kn3)9vsaJG+YHS zi~Xk`R<5!gC)cJQ8ZWrP8M>ZWBMCXOqK_q%iNe8{(&Kbt? z(>Jo8tG_&!w&2c!!KXCUm&nw5m9=LfyG`!93YTGw;HQq z5ytkm_ZNV3%Q(zfuV;cBuI1W8iEI&}&I zbxVRg?44PEs!ZRV)j!Wd;kHE65D2SK#M%~TmSAIZRk%SGHRfbV+KF|i<3tQPSL#h+ z224M}Co)<&MJk3wUGse|pjUg3K3VphQ&(iyL<%P9GyaH_72(9sL)(;Bj-xD`?3#@x z51|z6uS=uZ#_)0Z*1&645N_0P%R#H=R-wi);rp(Um4S5m(jFAJ<2j9Eq;WVPzWv(6 z%(qDz4yM-PU&B7|Zj8&YR)&S};c?FL7)zI-C6o1dsimT-k&ql7LE($99}%)4^0M4$ zjxCGjHGic;%1KiChi)Umf&KGH$DPqQy~Fh_^uogK+j()?0$*^XU+{Gxih zKz6tsz+TX#cXJrBSSagzTsAv-djyx3y}QQO*xy2ZZ|Afor{+L=_b|it`%eU_*N$ook&2G`U?x10g8mQp_Ds#&`eua3as%ZT^R3{B5Zl^ z?6(VTCZ0bi3)z4h!7Ennwx*vHfY0`8@!=!!_j#&IIqwFYzkuCIVy4D(>PBL<$an2EW=Bn!y z`PAEQ$oBe3jdna;?XLDvH<&i#Xklv_5kxu@=|Dzq=1y6#l{xuP=;v>OdKlzcMrmRh zOB3`0L(IL>+g1BjJ(TavY6YtH#~kLfbuuo@SD!u;`xtAddj(Agn@`C9ON#i_kCXi4 zorw)?Z+cI4buimZDRp2H6t*8cGse;6%bt7xLBoki+NxRB+irC&bTaA=P1bf*?PU1E zQV`b1Sh=L5M)ydE?KSEPS1ES1s%sn9G5win|2IUU^I@J)t-WB4jNLhVC#d66kxEMY zGd1_dA4AtM?%lbkF-Y*H&j-^Mx1i#!*hme=vjzejhBC2{7iq*_4lQSYatn9q(&`2Z z0=Y{6_clQKY7mNuX$k|1bH41w<5O*V6NJ**$azj|Fl{*#No#xOGumd|-yKIUTX=C} zSi7G&Uy3<-|D6YYibv{xfQ*3uJM@deR@LIC97n*1YOqixX`;UNIqekm4U(*7v=?;U z=>spqm`~0aXtAB?_`6lET1VcORi=zY~hd8%cKI((}Zu~rEe zzCM#S|D8PedXjilE;XM~?tfuDBY zkH<7}?k5j|mrs^Hnf_dD%*Cv`)32Vb^1{c!Q3^p_GU zC?}3APM{9;Jz$V~_J{(zg$ZBuH*?6Z`3c?c^XAJwuU5`*?Z$83k5xj8#bh7fkT7Kn zmcJAeex695@KBpO-O8_N1vs&{F>_J3Z%h9(%lvuQ`=N4Xo0^kb{j-?<{%e17LSS{$ z+PwQWtNj0+FU!yuTJ+JC7{0G~D#PYo9LH;}_(?`h5X9i6F(PhuPiDM!$X-44laPh@ zb_Qyb6m)dZt(PqhW`$WoD(0nj4FhjeD0gFxNbA`hHX%eZ4$OdKKy}C6to92vhD#+O zQp&LS@%2MGWNjL07ORSCByfJ1&8gU1mcyZdtq}(LU9r8?Js_S;%l=dvSNn*>+LVAS zGzJn2Vj+-_u`~2A0b0TN)1Mq+`Qt|_Iw;pYze@=qZqq``h0QzxZxE-9t+kn%TEMN} zP5_N!%nJmYcu7ZH?h6U#fJTU&e9uRuyn90hJ`qMyOJ4#>^|=Km9Ui*_yxIw+uO+gV z@Yr3o9fmx=60m8E1rLo_0I{cafdN!iTTxsCV(l&@j*-f9`RXS8P3(_vALF0{9^ zhu(;e&O+zsrG+NHyrUy{+H4uYk5gv)bR93 zW;QQ_s#VH$ub&Ph$$@VcDwXN~#YxF58BCKS-3___xzXPY=q&tey=aY$7ERF)3yX$E^7)8lMwC^ z=Y{!zh6^J#VD<6M8C|*VoW*5EysYimx}&DXQ-6gKsddO-xM^WgkxNDI(1; z428i)hIb0$kN!huIdM&8EiAdYT9;pL)D3i+T)%HkMJd|;tETl}(9w9U+o;x+kF}7# ze_HNgS+Jc0;87xO<;oXwl`htIxNU-mz!|92^EZttW*c#D`cqcnE=#K$2EH|nF^$Ce zb_6_-4a_hj`zW~EEv%y^VG8jEY=%|4d&|7n1^ONr+^JTdxa__7_@G-srh831?)4q*}Y~Ul@nK|Tb zk_=IB9k6l98rz6F5<2-x+JL>JvX!8BhIARK|J4Lauk&KCi~YXTh~_%>s%Vc3sp z@li&X?t}z|4fRzd{PF4W%CgEj?7_`yck$ry>#akhe?{2|BLJ^RW)UTYz@zOBgJT}&42mu*%NKf5DR;>~#Dc4b=d0sB>99%5 z(vA?}?SGgqC7x?_BR~66d;6y?+#i{Bm?#QMVr5Ws&jdKmn61{h{ zj00(nF*(go!jcYK5XG#WyWgW;p7(-MB?d5LO2*(=yzvVqJq|6BYS=+!;zO4`$C}M{ zOO7q6D7J5?HOU*(%ndKdE}1Z{+<5C)NXaVTSpSV!QE!f5I$YBq(~oh7VdErpR^ze7Vq#?HZ}yNDaOfPz3OZHfIOdjf z*XQ>MQK}{sdp&ak6A5M$R9@>h%)KEJH5z&V*Cw8OmZPV|ag>~96Pj5l8N{V#Gc5B+ z=&6s8WT$ye2qJ>pxGJ7ue1V)PXXvb!+d_4iuGD})^2$DIZ9%Eg;J$f`Bl#RR@Nwhh zLLi!qc^S;)t?Y0N@_jFPxIxOGU_sio-2aeT+OdNYxG8VsoNAI?DtVNjs;E9rb*p4} zm@>_4`&yDWpY7zx(^IE8w!ctKX&@|~3fX`&l4}qqEO3RDR{nH+-;vq|jQ~$oKv>ez zvUzKFXw@$Hm7#K=cb-%oN=+^L?j!7--1>jmJIfEd{mJH(AEibTSv4?2xo)A5yv;3X zEig(<^O<58$SJ~K7;84)K2csOAG)*$|0_*lGHCuzVHzwgC!vyZh|q(0<#AJ#QZ5I7 zgUqS0xT;XNiMaeNh$U`O$B{OFE;-srB46IjuuYtvK7R0{10L+dFRw_uNZdkLo6U{5 z^n0~447i+7{&BGgIdJr}J*EHs?W%rZEg4UIj(3j2vgCQM$Wcz=Qq`+jCuJOW#p%=D zk4jz;VlbZt8)n;G zXN`oN31-s!!7SC_()G5>5OE|F^<8b8TfP6vos#r9$>}S3z&a7PdY%*{HTergZM#YC zpL%%HwO^lMMK*etaGf&m?L>hsr;zKxEE69Z%S+Xyh)!9QN!p74$9pn&p6@TSFfFu8 z$T_yZ3lEoWd`*`DZIXWXfzSi+eCD#p*Os-|y%7f!UUx0b6+#jEvzxi5jpLljrpFK` z%!9;Zt4c@YZP6w9j>)Me3#jB1d_;EG-D8jsraS4Hl+ky0%QD(}6yv7)JzHvQt!yK> zn7l$I$X>Io))HP2c9fbn;SpgI63>P>)4P#}rMj%c{@A&Lu}n&*Bb@saV=`IpKWrUs zjt2^M+17EzDoZ6Q8ZrsFrJ_oQKuV||d_}meH5l9R(?4?Hgr~_RD2{D+sAmOCI!tqK zUeVZP(6#Kn9lOaONWu#>PZ=4H$up-&dtaA&?$q1s5FhJm>;ATGYVR=T!PwAu5o8#p>xAC}AFoUQ$pP`M%p@u?q_I)0+ z_Xoa3TnmceYX`aIHSMM=P!nX&VeezneW3DT=wO>ydgu#T1TOExWj<$M*_t7Z=MN3j zb=9V+jc3*I46X;w`IDK{duOJKv4#J7vNx(I8Bv=Rb2IX8^`eH!X~Jpw)diyRJmVK> zTozPJBI@pnXxKQ~^Ltfqrf-SO9HVr7Oi4w*Wxwu!se8;w?7{0aFYl|fm5rviLOVck zL0DMo@vVJJ5&tvMX&7x*l$_)jF3|MKV`$Q8-n6t9EgqIQIG-Myt-;Kpn38Sb)ts5; zQg7r1Ji7a7jRNi3YwQhH;V#hye5Wq*3hUkrPF7UoN>f&=6uzW<&``=CvA833ATXH8 z$`L(_Z`h-DSe#iZrlGb zae0NyBFk5Iv+oWy*~-CSRZ_ngvR!yVWOyb=Fr_z8(PjjWIv3WrxTx%cdfSXHV0Edi zO0AWx?#k!u-y-b8UD|7;!W}K?+Sk_`uxx9GLeG#TkaIDTJP4*{8KG_3VNYIXek<1l zT{iv6WQUO_7{elUx!j|+ao^OF z0^vuuPe#fn1@rq(tsw3n@p6lSuN;uoQqDdsW|HSC_u{55w*?>WY%+@ zCo~)ZdlwEODE6WZ!#XT3>c2=o)JmwDG?_Sp90s?843h#9A%vJyF!qa>QwE!kn&i?) z=Fn_4D%7T{0&yZL18-r_U`js{`zpwNI10|#E4 z9R{1_B{th|Z{Z^~IgF!zsf4<>NB8!~s}L3DZF!g z%iq!|-0&>~(aQ&PdZT&Q@D2--o5Sl%v6jx1D^2yh>Mw0iqdbp0qjQ74Fh!_4c#4>O zlq|&%+}LnzGV&wD?SCLf5PK((pSGMgIg~B#vf~=;_QZS32u!B@ZNn2pJRcfvsdanS zOmY=wRARYmg>D*)==iJUk!zy$RaFGL8=9x0=P*JVCtnlyvYFR{{^f9L&t5~rlNbzF zE=@y|OIRQ?!BrDSfzmq48$;-F#S z0IhmFMG>8C^da-=KDa{GoILj^I4J_ggqb1MwdRuqb!nVj;a!Pso+#xBlL%$ZW8Xb! zbCWwS4RdVS^IyVpRpO1(Fc*LfbDA3JHzP&fMrCKQb}FAIg9YFpIuPXY$F2XO z1Btq}1~NVct%|!LR#NjWTW;Y^2iQ`~mi1uQ%21IPQRVS0XE&b2rbcInDs*uY^9BFB zO5CZ0!?i-|L3Tc#T#-N8b)J=;I46xba}r@}%)q ze3f&~7meop!?846f6A%$b;~j=Osm?V==NYA)Zij@NGGJ-ppGA0xm>4u8sZmpuw3n> z6%Vc&w+i}p?J(oGu;5r`YE(t(B&}?Wzd)JvBRf@;mw%UfR4hls(4MU9+1n=+D|_l` z?)ypBLrVY{Ww19uPiWV7=5IF6E4>edqo#rWqIkT-xf{asx7pBxHADW(ZxQ+6u254# zg*}5wx<)eXq$-L&ek(ie)Q9f=cG8qcd<*-)^fkGwv8_=CdGCjd(LNYRcja~u^z-0w z;1u#|77YzVg{Je>DBRSOy-D7$+bU)c6Zl?1s%UQ!PIiiRd9IVk@)eJVhLv~Y8dQ=* zpKs*W-Ws>H^-}})m$Q04VE_1IAfQAA?c$BfFK7Q2 z44;=X`~uPx|D&#$ibgMfH~$zh&sQ|E0G_3NlomVFBUl{e^aM~~I(y42v z6Nghdw0Veh#{YA$L4rc^q!rqs6?v=!op;7q19?u&19jHSC6oPfu@N2|mg;MDL0Z0L zbDind+pcS>SjoY>0e(ZRemMVfNRXj(fbyLDh}%{&*?R&*F7tnM(_%w_V%?=NKYQud zeIS&V_^3L5`b)5A=Tw6ILuiNq(=0{i8203ykUDR^Oj~3I>I`0JV3==}iJV=5w~dFZ zZYy*VBm}?O7=Sky_TYYnDYxDe%fai<50Q5!^*bz+gOm=OEpa{cGukVLj3bpO@v$ol>1`)%OV zLVJoVk@;@-^_OPTweD{n7@x-eh~+VreUfzF$V6}+iG=`xxGVtf*@8Idem`%ZqC)G7 zj@~cuEUDRbx8>13tCRM_-8!Jeo)Z`|GHK{>=sf)kGr1QbVRlw;DEXg%3BgG?7JD;1 z9qiv~kyJQ@ph$KDL95RJG4DGouft~vUW?x3a{{neu2}3#d0L{XiQtE=2P)Lsxvb`Z z3bYZ=*8=BM0>m9vZ;jO0JL35zs+oQvML>NgYu@SVXtEbE{D8Gw{IdY z0#a7nB?(@d*0}P8nfJKm0}aSr-N5w|o!CP#H@Jl(U%cH)F~lrijYFL99-7Qn_Yf88 zB1ULG^rq#wM7$(u;v_gwB>~xG!`dCQg!x_`9y`l1s+AjSI@;QuerTt)IheQB7GyJ*ng@_^?y$fGU``I&IBwbb`{TWvW&w&l^@#Vy2kS6Z^b#WSHR5*m}MO^W`5pBU|YTV`TreH%jDG3vjYoFJOKPC??2xgiXwQpu;G zp4bNi^MV>v!Y`tg&m`)k>!Q_^s;CJmX>E{^e=jA1MHGsl|)1-Df$|y@Fgu{@8nfT$9#^pF%=FE7HNK<`7_bb1YTr5zX-B>BZ(=E z0zS0WTc$+}XmTuW!6?;oW$#2Oeq~0P`-FEI`Ey-)bB1+BJhcqP1P2$qObVY$F!@Vm z_QbH{cMUMR3Jr1;D1dA%&Z<>D(KNkF%kUy6XZ2?CwdeE~u?M3ifKVs3pn&>$#`03c zeHLQv!Ag8WLcLGuk#_}xvzgzMpwHl2%sz?O@6J>^L-^PoA@~^c`R@1T#o9a_9}6 z^7HYzMad5D@&w`fY5T#N@;UkM_!ie-q3BZost{{%a(B=?$u~U_VYyUk6smUr0cq^$oxZ+MghZds7Tk?zg>z^qz56d71xV0Dz|X5_<(jk;we8*=Y%u?I zU@r8)fWw<>s0jV9ffD4{#MwH5(`mG8v#|VFcp~dyPxK`?-)`}CK|545<~v2?d{uNS zO*o1$GTjg}moxr@$P-gg4t-;T{^9t!$L?=i7X_ME4%8nN@&**V@GVTxAE54C76|V2 zD}~9+Rt`?tc8}155+{5Eub8;PrIS!OF%YAlS4d8Kt9th< zt&(5k+ll=6A3LEn(_Ih6zWMRl0n#WI6Qw?3_Pgd1`irn_;$NYPn z1)ZWN?jH{iwnMrqpAU2eX>9^eGxyqT*xx`dG9{;*?{=F>%;=lNomanhU|W{1^&o~3 zZ2$v9R-aDLse5D|1?~+EqWe!`8Vr?*Xz2d%4S)Y!W{^b2E6UDYH@%to!WqMXlqJFl z%qMHNh_1*qdf4}$bLvdPi^UV~m<&qzemPX|zjZjcI0ZCjvu$yYB-CeU)!pq0NPT8< zFHXV+O|EIkXJOj1KAu-xzFF$DI-g{P&V)_9B6Ue+_AosO&wcULjP3?=7Yh0_?V7>yhi>S*L zkHta!EVJ5A)-ayDXX52O$0f6^dZT|@ejh2P_{!YB8QTX4jdTuG zA4Q!_s)9-JpPz$&_<~4@THD*wK9x_=+gAP;9U%3^l=a!cM7BuG^rh4PI=?dd8?DU^QywMF^ss1kb^@4QhK1~rHHKVldV$lpyYkt$u|mj}xK<$Di^8dj{M zsOiMQZb6B~@3d3PU8nNl)xS@(ow@LS>RnOe;R^4S4apYsP~_4sa$acKa_m+^s0n{P zlknTga*0dP@3*8yWBkuW`U))40T<7G`g=^mB8@l^eTyJtLFj(}UF!*9 zr@{6XQ?mH4&tUFs1)1Z}LsO|2fAb?x`3C@RLDu5^YxciSOCtFP!T{Efd2|0?AJe>r zm4{H9o8c_F?F~c7TrdNMfGvTuj1i}HX&ac0&@_iE&v(Xz8AZ)vJ%|TH5a*G75AGpg zCakg>i1mgTcn7>_k}fun*n{T3Em(ZSz3D{YcnEPd8U69+DlG1ya|Fja^uxvgL;}9Y zUQU2_MI*p8!pKaQi{kvn$V3t$o&3R(p@BODOz@u+CX;e-aF_wJllK=J88VZ*0P3vWj>n(hHE8u>OzRs=%li$*xhnb-SG zT2PRt!2?0U(Qois{IkKng!3h0`O)V#T4t@m3vncfJ;4zB62hYpf?iw~$3OxCR_^k&uYsKOJ9^IAEfz$8OuCKa(M_ZBzmA&O1{2e zi#0exX-M$~Ag&jmWORU#5Zq2zlVac~fwSEU?C#wu>B@FfzO$nsW7RDfA5=8W+6a?K zThngl*cXg2t7>(6)=8hw? z7>I1&2|9{&zv!yWCVSI9YG_}#|F*oD8`=SBDFScdR{P((_GcE1uakIvl$^9OAjNQdGhlWXF=(pC|fYJZB-eU83VAL@s<& zM{jF+PLv>C6j=jp^(F)ewM#O%=kUX5u9~%>NBpF=<7xYcIKes86l%Yf!oIT3tNrCh z+)LX*@3h^(OV%=W<%Jwyx-vd>hX;C6l~H%$Sy{v^`<@&a>{4dwMnq*2uyI%q_Unm+ z*(@)YlJQx*M*NcAbyxl-<9iTZxkmeO%k=8a+NToV2nunrCROr)M8!=1132pCCE~NM zC3%)hsB;@jPr*T9KhWf*^HdvxR zw+nnJHE+{#PKouX_nuuou4r`aPcY!vB^Ec(*H*W}Txi4%^-pj)x0zde0zjZ+pQb5; zzuNMb(U*_=2~3SKnzP)^srvQS*Cd;S23kSrpXciAT$_}m)*elZp@s? zogGgcoJ_(;>~rIKZGeO%p&%@QdLKKjE_6!!>Y`agi|njOs_Pkg`g11^kP{VXH%P|y zp);1BtuSTZf9MKnrZG#(g;|JIS!uerFx8EZLHITI-6L))k&=tj64k(XYCaO{6IIqP zy!UbFtfiP9YH*xP=p=dGD@xS66;~m8W?kZu)x9=gSX0L9 zoG|M&?t@nprRMo4qf7dYtYx~Q%m%}Rli>zw9o_NOf?ni~dL6*Ezp8#+eXjmnrnBbx zSTg0D>hIGc>ntevd9 z<;K$Lwvo5<{+vDe-tJBRfGcD!xCs<*9vS@e%H@a#f!L!kz3@N`t(+S@W00LZHEn4am0vZ+ zTD2R`m-5a$kci)PEvaOr2B-}DUJ)FId!*CTXElpMbn;`5-uPu!E{Dofr^dO5={cKm zdBwP>zVai9D$-+G>Nkr@F>CPv;va^J4PA;C8y?|wN-G|fMWme6V_f~pC>pjDd$Aez zVy8xG7`8=7ji@I6 z$Mz>QHEPavRc(5v)-q>xPeTzgmD0TuTEt7Tnq9}?6B5pzTljrB&2gF{ZKQj;>LXHh zOpj~Qc1rKv(H)vpSrawuykv}d!&s!2^D@h+J}u3@pH0}Zl||_4Ppli<6g+E6@!GrN zD|c1$e0y(Tk=0Cgk6x&J6MEj>Il5p+#j+=S=l0R6Tf^Di=)F2KM{Dz}_^9vG95tOi zqiL?)`kq;}eL`yEIgHr>oRj;&|Yk8`dtGi-j{?MXq z2jxzm#=?Wh$11W>{G7%slJ}0oudUn->pQ-c^JPF2twU7j#_-(x3%Zr|N;7YLgh>_l zc0H?zUv;O26`|MFpyGCCl1o10{qD#mK#e~2(hmb^8l^_xpp+Czx!32P@5=Q?ZVTyI zvxu%&^}TPI9wqPq#s=$+n=i;ryv%t}pWNbIxF68c-=K|pvm=Rf5%qH-hC1WfS75DN z6F>Zd=Ool)&Fn9EJ#XLR8|FEdekI7yqp+(q%5=E-D36%>yBALk5atFb2W@t>Or47( z#91(lI(Fi};^9Op`&Vf0F9)e8GV9|Pm*@)3c*=dqu$OGjHrO3jt1^qq3HOrz^f>4O z&rT~Rhf4*a*hEu{DZ7yxdPYzJi<8EA8fl!DcjLm-+cNcKX|}{!`g)H5YFLk1J9Pq1b1r)&9a5B}INE^y7Zl+tH<9(+JeG6MAmfcuxn!NL+R3++ z9doyfYQo!?^YT$q$K@&S__6w4a4$u+vi~tcKI4xfP%`he%5tKg^(mcs%DFun%m2cd z4@5w$mujLubv+vu6?Cu5GPd1~soag_A0Vv0k6Ym9&7DG3p+Ffy+VhKuL zaL2d1b+vsO6_uNZ^liYjN`q8P86)=9-`uQH)|l;adWJhrf~nb95~+r&DSe@=o5-kZ z$y5|-T3GcCiIH0k*i<1~(%F{VU-r4a3Bt*0r&+JdKILm5&CJQsnVvQ%yZp;8B}Dhv z4wB{zGEeABFyNs_5cZBqs0Wt!y>0H(BF9lfeLwnsM;tdQDoZu7$3klqXgVuWU&=6II^-^s`QWm9~ev z>zIXWydcHYRG5AdiZthZeu@mw6`U|-@>~@h()xaY>IFSNEJStQt{T;=UG?6fUP~WU zkyzu3dm5>aif8tA>l5`56Z73Z_9olUSx(uVPRJLtRj!fasvAteN~^UVORZ=8Kshpt z%R5o9Q*Fs)1%{0r&x`OrCdDwJX)W~bftI-eb}ho@`nWE}lvLp2>ryShFH;OloE$$= zT|LK;RTJ5IUBroKbk4H7?p~0bt+Ouu(3o(T%omhyCSu6xKV>{uD|b3xuX~PfebI|L zO{3NI;pwczNb0OHc%9!mT26vKv^GNIj?Ahv(`yp!o1Sd9gih42oLhM(fLLcuc@})) z*yo(7o5Gtrl$LzkPiGt@|0MP()pdds?QGqF;PP_Vzy`DJBiBgA>mhQn-*aCtO?r|{ zs_?J%)P@dAJiw#n(+M{Z?Yf$BES3YL;DkR|sPS0$Aj5at! zmJeKC=uzGvCj;j^rJwh$FiEHbJ2FNLGkC)K$P^qi>_2s@2#aJ5MWst+6^!hD3+@gK z)j3+WNw07v$Em9`e;XxCEw}TbU2QQ!*te?ernDi6FGhbabE0e#iCuH$##CF~-N*H| zZ1R}01$;E9gNDzK&fa8ljpV-eZTaVNs5WUxDZ3ek$kL^BBiI2xSa`9hQzU29Y_DLR zKUSyFoq!@hc&#?*TSXK#hq!4}{40Uq*Ds)-S8U9g-%0qmoFm+ddcAax%H>(fOpaup zq@l~_g6_#8&q(wHWwq=#jmaey?;dC}qA9LaV2Ed3)>yb|<@`Bk?We3nZ;T4+=Ms$; zjv0^et&tpCjq#P-@Fk_CQ@Tl{E=qYS&n$XZ^;rw@BCoqes^>(xvk$sgl;FL;sAKbG zFr7_Q#l4_%(0%wulX6OT!v@EDIlF7)7W8gD;TImNGmoz(;4X*WS-5>OBY^s9=(r<} z+I@O7ZjYaN80={i-RG(|i&HnIIJ2D1HTNnRxfkn%zul*!ea|WG^e76$Bw?5FMmKN& zo+hh-h{z=>lN>kMr^|POE`9mBzDm+??*){PGcn7~pYsMk$z2J;)o@(G^bVwbE3@FL z=Q5+sb=ZHzj#hY<}Wt*p;GRhRcSjWUg z`p8(vk|?qE`$Q+`Th5gv4BIT#_KY7eM9jOkws3HTGf8r6mY_c=LYLC_9lyi4z2Yj> zR8>@>EX2>Sf2y>$8`oS>#HjoxPS(%ieo4JF+df<)MVULCZsP-HnAfOt2c$DhUk#}& zb{LzS)&?c7UHqQDc>hlra$N&^p;-k>1I-AX|;c> zOcvgx8NanMDw*1Fo?%IG<^}3$J+n90*j(&f{2joX{Be3e+I?(Zx zD#P_>c^wy~JOq5i<9+kz)ovQ>RGAK)6F1Q8Z)INb8ZTgG>idQ^dk0tgFtN)P{WTx0^5S(d8MGsbIXo4}b6ew$OSM zF&L$BFqtLzynZ%Rt`p3FES%NDBWdx7W)*rw*@W8d3D+Mb$e8<7rO=LKq0Iuo|BKKEWox3rNHN=DW{!vQoTP`X!t0 z5aK{{ynM`Xj(e&?{7C&_4`fFCx0Ow!B#r@lq zum5V|`(I;7j$zw=I}{bEJhLBgbf&^62t$txeFfe6>#gJIR5G&$r!_ugj zh?Q|%*T|x=dU3zof+7cET=Lfi18dB=_6?td+}pq%-91zPuSNGp6z3B+@%VCf7sqlR zswAw9qDNklZWEsG$Le~T)FsbP%=6nU==am`FQ-Y4$$>e9=u0-oXC{WuP?+`I=i>{y zhIWH^d_A|$^ZLGj7hv+(aUQ5ngpL=B9!dclkC~UK{ z8s1SWcJ&Ph5+zfOYVtF4)p6Yk_p#JIE)(+qea=>rhO38xX8nU&^NW4dfOTFUmyf^J zaeJAW{rKUbVwntf(O|elE?KVcb)nY7!|IIY0C0P)p0A~^xF+X(3w7b`-^TN&Aljy0 zzp(9aKi#}5?`{x|p03yO4m;^N%hZMR%!@ZzN`D@$a6J)AaC7&EJ)VQKhTClQG386jj(1)8o%p^(%u|idDqw?2VpyZ3LNB5E9 z_QCk}T{4!;GYDyopN{t2zXtv(^ezdggUDoKuK)dvPoeX};-W^>c?-$3{O^mxcN$(F zvn!?>|F$d`41nf@0yh!P-)BnGdkz-j*tg-I=rNA(`~&hm|NH}J>~S3eYZv2A?DO-PI`tuv_c|BWa6iur%TJ>r1&42fDj{ z#F;hww z-kpOd=4-;A^RDFY-tYibNBT3&8|}o_7Z?4ai@8&J^1c-OzfergfU+-aX>9 z=gIwP&=OcPJ(&>@T$(=EnXcKb)7edEUMuo6hyNN95)uXkY_7;}l8G)rt2h)g*=2{a z)zY-ftoqAsb&!zsx#AAau#6)&cS#gr*n}l10A_$sCv8ly?J!wV?f(Nk>XiZuXpJR`U^k0WbA-*{wlOJ z)OD1%B?z9~^gAofIleTM!+f5c?=^VIceK5sPqQA$-+)el+|qDfpnNwp4H*&HbnWr} zQaUEjJ>7F$I_2^gId!jqxVap$bblgK13T8)iiV@Sa85m8l-04gdjj~fYnye&G9ofD z=-T*A3PXK$Wvlg~50C*Cu1s$p91ZsE)rFLHW;ta}#fFYa@k1t02FeoC}z z$q*=E5Tn86T_HyQS@cegK;N~Cu~9yIRdp0r262afY0yu-y|P(O)wdMO^p*YwxM14BE78sCG7 z8T6#EyCg=js8^KH7*fvOpF$bC!Iy}xo#&|em9iKXBb>bw29Lj=o`F*iF(=00twJT6 zti)98SuAuUYxnx$dUHSEqZQ4D*+D`SXPM-qKSw%c=vFzdu2}@%{P2Qg4n`pVIFtIj z`9%npcUd}E>0Nl@Vv#=j?)F(E44y9EsOL)&vTvVfj=rW7?=;aBCr~4%@{{=17G6-) zRsYl{h1?jeW!xEaT|$@d7OfU(m6)|&=ZOd&yU~yh2-XZ{`(xr>zSm4o41D&|Ui%*a2*V0Lw=230JnDk0 zXJ_Aw!=~>l>F9MuYKGYJQgPTkKheBfl8s)2AB|M<8{=(kVdw9!y+8@ysvDB7*IWN- zd8{P(d5Ru)Pn-Z(Q^U(+3%+268;|L7)bn@__H-0>}ZIb--^LyX_#M3iVw52WvMrWEw-&Tm|A?A+o^WJb!Ggv1_6&}lJ`zb zZ*3ujc(k^3TPIqamSxwk9gVIb(BUcWyG4Cv-)AUSOLaJ3&w5|MNo>DO>nkr}0~YVl zgmeE#>BsMqt&Jp8HEseu-@zN1_hBMvp$SGu_kDDx*MLG`y!yE3Lw_v4^<;V5L#IiC zJFziRXHPxshUvfr1TU>0Umgc+1#jke3&Z@^UGc)=Cxy~t&DudW{uU|c!pBXYFA?KF|G$JoVv^c~)RmqzyJYC~Ct2na9` zL4rSePPPW_PN~|1!GNmIM@qL_lcx{Pws!sax;v0`Jm`-XeR_s}Aykg@>cWE=!Y-2c zwL*rqm$@Ic9umHE-c2NsZ}JyUIDMus9-zE6VT9o@!8JVs>yAz5EwH?Be!5UhJxORx zc!_rbaZoDq1vq@lTZi<$noU&a#dFz~A7iom&us(pRCl_yC;V|1_jdB4bK|U#p|E^I za-hp{m5pbk9~!~R2G(KCW$uwPc(1I7b5zn^oEE=njB9( zLGM3P9FE}~BMNskBFphp^xt2dj-)>o#KKGkcpLXa3Bmi_r+F7g-QMgzgjI6Ch=()&UB=Dh{+lQI8KnVWGlhppl783u1OVqv|Z zPLK&>5_%SD(XJ&$cfrJKRN(l=>3Ip`MVkA`r_X2Uhye_wnslPY^od9F`k4WxihQ_u zOeg%sevY-Vk_^e#DDNUMzg*+zK{!cSyIMNS$v=2{B6cr^KogtLsMA{cl*HxdJ&338 z19vgcTU*X;HgDAPbuFV05!`uR;3H1VmBfx87i=?)UgRbF&zZ+Rc;W=JD7imc3>Pw4 zw{anl#Gr>QW}5YJ1O%{X5MZOB>u$Xvpe-R(xIxA)J6YBEjhZyp=N8ka;kU$MWW_JN zqDiFUTR&u);#Q8952nfxIdPm>EaY?kuB0l()I&&`cP;viJcDgfb0c|63#aSU>8q}p z3Xkk#`U*!qQ_BOidAOzk>r2PeKlAcqmEpX*&$-Ab8AClD_1ynO+*dzTwXIzXQUcQ5 z-AH#g5=w*8-Hmj2rywbff=YLnbc52}0@7XI+$Zk6?>T?K_lpR7@3q&AHRm(OGoCTz zd!T8YQGyJPE=eDb;7Pb8rC}3@4_=44SE1lj%G=IWvzdB7q|t56osjs%4we#2=01 zH`&bWL*#oZgl3jgpOv?~+4W3VF_kNkHI$2@dJejH08$4vCVw#aGBf?d#ZDq@lTlnjRHSxoc%4@YWrHP z@^J3kGnvY6)?ZW>);0;p&H)!AccEK@%;ul%;2sTb#loRPAj%Qzm!@T1M6brQ`hh70 zdO<|$yLJO9BI8(A44kjbY# zV>6#&tq+%CCpUKE%VAIgvgdo#JlYSv+b{K*6Nfdw=U|_`r1ZFN(i&lXq`GmZrf73` zyhxick2rW!>F$-yct0+C_M=Mjk#`B{eq{okmzu7}pj?6f>4h!CzRGJKRY6|yEey^I z(hCrPezaMrAHSM%Bsz)njWD>2S;q0%l~t$_H9~|Xp?vdo2=)H;pYadb9s*rZ^0`b7 zF^D9>o=_q>8TJKtjMZb^bRJPVjhYHGi>$5vgfllld&Z6+7Jj~Ogwd)F`)c>+w+wn? zv?%xPy<2on*wBiVPSSMJswd_jS+6j!)#J2#4A8)ih<$ZXfBw7e@uB_l*GDYi3PG?h zo*vEo^lkfPPi9B63i3v~KVi!FDir%GWW>$|V`T1*Cy2fsVn(_+1k}qCDd z3244T%0CKXZ-u6-M)(J-J9O;0%MmI9dnRCjJ~czJHB2!PmNEMmljfIzs24t~;@@sS zY!dyFHHb@r>}iP2V&ji{<91wjVmQ{Ja`FsZTMJ9T4CzfheXNYe}xI)Yij83cx&cXPLeI9%z z=`WP3+oovv{c$s@l$}(g3dqE20=-B__}%%Ey)D}~@4kA>N7lSnL=ALFMR>0OJ$&|b z>gf0*WKGoigk3`9u@F4|G^qoRrVLKiN;l0WW6t<0ykR;lB74^SO#aWn5G&wLMil?+ zonNRYe59y2Og*0Dd;a`}UqFjn(kHYRr9^iv;t1ApcxA;HiwElx_(%!lzg+du*_gy) zAgLgo;@29+b+(nlR9u#oA@y-N%WZG_mbL=K>R`lRF}6-CpHC9SMVX3Fw5EKnIK?~@cM=*=g%#L-w|_Vmv&r|e_nz2|j*ji?=Y4zh-w5b8k;MZ6OXTDut9!2HJ76D!NU zA&1;ci1G^;%X*$5$v9^`lGh7P^i4&ycPx7Ey6DEYqa?p|g4Gf?sbBmEHc9^3YKLwT z!`5sijsAsr%5Gj!XCmoS%tO7;i@6>+^F=?~=TmGa?d26Qh8fXC^V9(wugM~=*vKVgr2X_9v{QeBj$L2pT3C?Cs)Lo6=oA-MN}PG=9HlH zaE$nVG{-Nd0Hv6J6=&QjA3kG|DqeP7(DHF{?&jipmU*z{ww9!>)_$q-IUzSfXFnlL z6!HMr%b5}2&{rk4z8%&bPTRQ%Nm_Akd3w7A8rjU}_yeG5Lmq^#Il4!naP$&z{y&qyHy(q z*DhJO?h$R(JN7F&2Tqk|5(}pZg%h?&I8d;DnBx*76$f6J#}1$JrlVkNO3MFPT_*a` zev3Qh_AFvEEy<;{Q&q2#t;o#|r87!}F$q24_n7e;o2h0(`{r?>MZKWQ5jwx_53Aew zFS4f1sQ3wqV;jC{{lp~iw;f@WeolpAuusH_UZhLq%6(mgm%e~84>f5aKHIh3zwy#Y z`zVeh;XFso(b3XKCe|U#Bd)Oe)s)1sj^~?ejn+4Bl5B1^1Hsja4O&u+xTYSZ)b3hoo7|5Jp*-N$oga zLn`uzs5d0ELG76;I#(QmjnCy{4DjswoPSa#Nt;0xIfkZHU$&3!kKIYS;1l46MXA{f z7ltDWi>!zkJK?e&V&OhC<4V5c@Y%>qed&{n?L10l+@5JXwbi8-fhHuvENulL=bYme-8QH|=1uxlX{8GfYAMwj^M zb?Q`6YI-&cSF!HM-mQ>oGjA$_=lhWt5&gdgzM}x_6+ZfTHmvj zOM)ogo|)wxXQTVtA!Ul_v}l3!zW1j-mC+t&j$6!Cd=u_I!XoU&CiV0#D?VB@(Y<`7 z{b!8yUO&&OZVldv2 z+<(TqgPvoYf5jv|VU+7Z;^Un6RF1WTND;Ncv|5VQ5Kg4c6S?n6=x_^i{y1CS zN1nADukv^S)N*zq{(5%b+BB3y>#kCGIa_|f_cEd+ObCc*<&jYXIDN!LZAggz}05N%n0Y&)XWYDq`86?;-k)+{xe7GKCN z`H#*4kzBKJj+(o@;-SJ{F-aBz>sa9P+IR)30kw+=D4EYMjS8f!l57mn^@0+R`?c7@ zCAm`>X{yZLzoHTH1zt`HWJso^D%WQ<%zxa66DCv)MQX;Vlvm*&d;AEc=%nDFmWOk` zY2|u!X`e+TBt)8>KrRQESkK*i0xoGclZ{>*G)nly3WzdsbTNfQKV)$Kz`{tOOv^hT zqt8*WA$XB2AG8S(T8t9flka!r1|iSDp{!JB6F1fhO-wp^bOMNOn>xvlZ%04aBA^(a z1Xg5Mc(3QB%UzEG0_|L|NgF^SO@~t1>^gVe>&p_V{k8bg!%V?5ej*8TmHx92Ajl5r zo*Z7X9AT|hpMjDN#KWo|0;+$${V(VWG7`v?V7Ci}njZY?ZvO)CAfb&rBuZ``)%d$# z|GMTMpi#6xyB0p24$CJ<2SHl-pC7AIAZR!e)^6eWb4(#^{_FetSfQWce^rr5c4(@0 zdldV7djI!sKm_FhbK(ATYN^Kcgn1XgAzV}(?u%LSiCvqf-OTFmz1aW#ra!)yDz2Sj zG<~k==c`s~Axd0y**=4JR&S;cE_W;k7kw6$|JC3P^rO<^7VCoz?FkMi^nhSwjYG_g zrhDJrD$B~lcJj-*-~VaK-whLWfT>T}N}o$~`(``nlCg|HWm10g&YD`|U4zp)%JtwE z{q0L)>%%{W_rHw^U?Oj?;hdhLDy?YvseyLcN|bS7N% zf8FI8rRXsB^q7vk_8LKkwQZpTxl$(-Y(t^GAeRoDpU~*Hxz8G=4Xc^Pg__$%6XhDz z^qNKQy%hdjE&k^Y)?%MfI&jDm`IK7VSa4}}Zzhu1ZZ6klt1plvZr;P2m7@>|Qe6u2 zFa-7^Qd4-U{@vie22_*)oy__C=vzyHOL3xQb0l{bGYm9`R&9`dOeXWA=eEFXwND)L z#J7JBim1P^CMOb+gLD`S*SUi*Wjtrl_YB}E>F<5FD1PlUfsxi~k!4tmLV{vqnL4Uv zx%|(*Ye!&>U`=BPH8QUjg{xM|{?dN5Y5QHAP0PcpZqD()Q|dpBod_50qzklcuI2vr z-?{jAdqh8@fvKz(r)vLy9}mdPtc(Ctc~(-t?BAcu6$evUN$6Dz{=dGkj|O_cz!cM` zrS@Nq4@LyfyZAZpx!IrV)BnssvJA*+N%8D_?(aMM&!dCJ0Q0^B@yGg9#=obm4Fz~c zxyPri8UOLDMS_OWVV5+G|DO-F!v}yZ9lBFGLEdSLO zIz0pv#h}8(f;FzOz$IInf|vXkfxMXj(uY0wR31q|$xpmi>cg|}*{@Gg58?-)x zpyXEViIo2xxbPT%_WFv7XSe9so+0q4AM7Fk(uDQ#16t8=TEN%kAtgiW+(PZg}t?EZRcq~hM+|doH?9;P){h{^A;{C zvStJsw2`VmFW`fya}Z${`@i5&0i z6LIN5b}a~rRIOBpFHZspK*p;S$^krCo?<}tcpa1wK@j+B;5m(?!-IWbO$a)JOl2RR zDVb*1I-VIu&wUk^d~yo-SWLpoaar;^0ZkGeN|2>u1hy6lprq{$K<&wzTn|*>7`0Iu zVWxi^;{=9%u0tb$8AxXQaHLn-_=^Ewz}2SHvtOegI1(c}Z}YyY{A2~`NQ7|F3iI;t zB)KH$Se$PuPB3&*OsYlE`^#N(7u12EVX)wYs5`O>ofL1qFoW z*CDV%XB$K@ax{MOHv@EM)iJ?f7%6^J`6$MZsjm>{yA5FI|AHjukvR4-9u^TXOopj#36UBAAtjthFt~D+ao~jAG7XNUTY)vI~jBYa%cjnqp&anwe%^V zi1!}#X);YA(L}RVxy@{)XlV^aiWvhyXPAyQ+q-fsj7e5s(HP(nXhz@($39xUd6`5vn+DuB zhy>lo6kNH0G3nUbqJo!u(`C4FQM?_>3^gvhDiBZd_e^gl&gf9p zAtXTz9?*TBoe1)g&?u%;*#srNl4CFaRJmsd^*1Ea~n=BV*6_m-!l}%u=aRziG>CaFpwt@ zspG>2+n7VtwBltzkl%_!o&j=nl#tw$p+E2FBt~HLbY}?C@U?l?yA(FQGOda{pw-)! z0M2j9o@YDcpw`D(akwAhO#o7mr9;+wA;hX%rN#3dB)hq|_D#Dk{W5hKcLGR98^F+- z^z2Y7C~C#GHa`CqOM#{VbChQmlCEh>7?z~WYp$Mp`FCb#k0&ey)n@pS_wmycNn%qZc9TqIW6z<+QE){bpJ!!ooPS`Fcye(26Zz{n_}6ty@*5w zS>XEf4Im3foa(>}ESdwW9M`2MHNI9{LM+TH?x9r8e^H=?O}6>@x#{=-2a_V5-jBCh zYrl8goiibHVPZ{#uS0_H5%o`LmYJuIaoq5BJFv*& z5+YDSMi2gzB8GGz>TW#%)OHMU(ptr4_F)db@X^aZZcgy_VZeCKB9n=#J8IBP!_~W0 z{^7M=&>0bev;Y#ka|%?hmFpDmqsKrx@VRR|0zOSt(hQK+-d&4e{kkP6o3DUROUiD< zWi{on=T0kNfx6%%n=;g3Gbu1jp818oE@^Q+PQ>n})ALSXJ(^?4C=|)+g@4e+j}`|U zS9T#BcApl>__v`&pak}JJ!R7HcXME%NQw8AY}EPg>Nqq(1lVsZ%8LM788dH(X01rB z3$&JjsTw4`pR~rBpI#BG+Tk-1d*Sq7X%pMa95q0UNv;C0grQl9?a>`};y5Y(>`ip$ zsm;smKHJUvnZU9TXjRzMw$xxz(62ABXt}=PfI`%gWx4Xpreh2z4<1 z<;ZP_Y59y&i|4sQ(}gFKd{NogZ03PN$9QJsv&LN+QAL=JWXZ|4hjW(c>eyd5)E`-k zIg>j(RkS_<1@SGlIdNk)M&#=Ukm5q>`jYtdo>P6FDt&KUBMqg5QL~hjMr*4b^)Z=F zx%2+zB$G(xbak)qMOF5*YK)WVwg+{KysLiNY&_Mc>5HO4W;YlGWwk-`N1}Xj z?|*eORE^smq?{-2T7Pw|(LqN*0s+{y;LBG=OPN?E;Wm|R9YOF&_azocitsA(XA!yj zh`8TVcJ5y4)7`W=7~TLq-v%U$7X`b{@rXvjd@5ynEjj{rbLzB{(`siD^K=BzcXZ+< zyWG&0IY&=llG3S+ZF_*MeLvtkC~S!JSt#)8h3?S2fg8D#?eOEjPXouG9?+Jvafl}KITz3$ z#0p>~&rybXDyUe-+5d`Y3Tu3J@*!b&O_*4L*`x5~kYjg$^hAM{+D!k5T>Spe+H3{= z1HQ&#VwfvIzmufwRHdeUb@tJu06GIRi_TvV5$;ldvY(5)Gf^Q5$MmC9?u`3d>q+I# znTvCul7^ZLN5{WW#G@j#6L)B}jj6!eA;C5(Sf@gCv9u~#6G^8j9MsX+HAAQoFA*ig zZr`TXo)bxtEkoj?70i6ji0G8$F(rxka5}}jYckrN!?s5~Am2^mR+mPbc?_$_!}Ut=KL6E9xB!$IaoA9EJX1!TTxro$FjY8f-aGx6$;UHYH?h`SviN& z#~z8EpBiHQV?Or#CelYrab6os8en)kn1bqkJ9X*R;6tE+` z4}8?J4h&qrFUCJ{1duw*#TO6Ma?)Gq=O2psZPP%51(gDUpeRyh7#Q^CjPb1BJJ8I_ zjl{EaQ9&=W z@kaI4R*df^RXPI*S6c#ev_sj9KQgOS>w^s|9rIj7ifjX%9nQO+qA)1t`5{GMQqZyr zcvM99-=P@}Z{!UWa7JJ<^v~lypv-ChR39LLcHMvT%=qg^7@v<|Ipa`NYP7Um>y9v+ z_KML0xmLzO#_t7GQ*iZ1a_sN+-vpnUXB-@FC8uLTc4`jS>Q7QK{;jhBx3BIuG}V@P zi(|M{qV=*qtt+0d1g-xEO!L*%dYmZ`Chd{A1j^g&!$){&8CE|^o^5-PK*TGN^Cf$b ziunpa+*|a5!ZV$Dni0sZy7~C+ewhc_{Kk$!Q|vTPhCx%!mz3X1l`r27^Sic)eVdcQ z)!neF4m|s@R0qaRiP9fd1?&o)ChTEYB-Pi1Tvlp23OQ*`dlc4ZFHa}M zoL@*37^!fa$GePPLu`%>oSd9n$pjjoTE-V6Stb^T!B%&TeM7J@7+}LscTzAb$~0U? zcN}ct#uJ1e56J{6w(TP12`2dQZHgO`f--bklU13bUh$$bTvQbpCC6+~rY^Lu&ea67QSj(*lzuXv^R3BbNA&gH=wst|_!T`m3B&;F)X_ZFU>KtEdnx6jw`*h@R zQ=ai?JKm)T*dTfcy`hG$3`v+|Cl%$Mu^z|9*41~(pcbJ8=y>8HIcpnO5&3eYMh213 z-XlQOEqI4r@G8I&7eyRnK$GwtEuY{t+H`&f}UuunIk@-BI(;`*Z@5PRLS*zK>6@gZ3XaUlx&EI0)w-Zm&qw|8k+7~?d?Sw?uj zB#rHX9tTQbNvfknXeKglHQX!U(>AQNffwXDsMBS?pYRe?8_yH-zRciGeS(Rh3U5Y_ z0*fdZ*09`%#=e|>&=f25*ju!I(C2QvB$}a>WxN-JD$MG?>wZeg^ETZR#%WVm;XcbU zs-G-poeaOa+RhN2t^>0|bbyzO_^7~Z*wmg_S+fZMrCmgCT%HsXb5h-v27Ru(9nu%e zFx~oG*=-S29b~4*B#(SJA`{SES=+bB&Ta^F)6W z-SxNTsR1^WTd0^2Snbzq{cZL-MOenA`O|HLb9bB`wv4(l(wNE)qnTeOt=}*w0o;xLkp(-|g{THIr00B)Ju63$4{{>{khe0ro|Gys-twq;rbSeI--zGf2 zIJUKEM>R zb@E{!cLzJTQ$y1nj}%!xq+!bSlYVS+NvSjSKrLdoEmcnp{fI2bc=T!L?;`8~!0PP# z`lf1Q#yAd}W=ul`rZS`*sJf6r%5~2U=BHgC2GYrp0$IpG9XS~6a+Kgym;_WeUP2D) zUYEb0KM(p&=t?BB++p)vxW(=jB^o~jRbWsY=pp0*K8Fw$Xj4#l6!1dCp#2G;(|AGu z7!H$T=pWiwW%#{*0JLM0foq`l17O_&7mSk+LxE+QsxWzIJCmQ{-qWi}UmPy8fMq!u z?9x0Xp|U`XqtLJ`G@uQA{5dkIu+G)xm;|a6=|G=-+g_dhvK|hLo=tFLNy|W=@mSOb zI&`jaxmtR`M1f7SA4FRL;=SPn0$2#AQxW&0_sL7hnSEBo`>GtUCKQ1>DFZ|#-2^}y zvpZ&Q%Zd|+syhkC>tpK>c+bKp;@`mxq%d3Kebpi^)@x}*ha2mpdXsm(sIIAN`j9BZ zgMGfP+XhI1#r46pxd_210b^`h-r~C{G=Ry}r?cLLD)JuS80Mx*pE>Cn%X?cau1_{Y zkn{%)bb&|?Y!5t_CbfJDCk>?|VU(j%uO6~Nq@klDo_$5nEjU>)K-_F%A`IBP&pZb` z!rOt`%8NFjDWfn7D83ALrz-iVyZe7Mp6LAsf}+~O%DyA;nlKNWj)tw1(rvDDA};3TLJiG-rL+lPClE(Dy>BQ3k?tPl7F#?gIF%Y0vyI<+=Ohk}P7;6yMVnKdN&w zq}}S4>NEu70^5qc0x-Xx?Om&nRcwdJ9Wx`{x<_#>HC*j2G*rE4jXWG1{USh{B<8Fk z4|3+oz`{o%oyXqp!S`k|djOohHqAwD)2@9WQ8B<+6wV=gW~I@0#3~V!J~U%rS<*7G zAULom>rliH^5DFcFDFsF0IJYLxzwnmyJNXQcQ?~*+|WX;@+J;?-p5k&&#M+eJg4K( z0XqSFR`K~=KJQ8ZqVWbeNc$}80nX*BVa)VjInD)uvSb^y`p+g zrQEr8pN|!RL98v^kG>0@@?^jcJv7Tsq085cIUU)N|A*Sc*YeDHYeGC?@hh0~y2%i(k5V%Rog zFBg^{Vfx1YaES0dtK2ujlk=@gVT@@u$N>9)Uw?5cT5u% zxJYStjF+)It@EzJRr8=D)2{~}wtmWuMX^uA65F4>DOi7ml(WB}U>-Ow9pwky$V8xa zJNFA+Tkf21+82=DD*^BjZhR3o@($g_*Jku4CK9T*rn4|eqZ0f7{cI*$n7 z+L+H)nxfD^tHRO82YoLN3MqbC)2v+voWzQj{Y-J%|&qdA>_wCm>)P|?h zg8>D-0R)QysD~RW>xUl9guV2IA;yuzgW^5 zx%HRshN9dMUE65i?$vR7xVuI_XazA8ig*{l(xd zQs$@=3k|D`6DyQVby^9jE0IISGr8^aBQ}mJN1|0q^%d8naTh(0P3$^BOyff!c;j$X z;q<%p)@~a(9l0GPaE;3e~(JWLBjS{S_Xof&-5r{sCi{F&Lt!!=s&{AjU!@$~`-3@ZT6WnJSF zi~KDbX#YDwdnB$|^Qw$jOESIjyJczA6B)mH>sS|W>wPzT9I6rU5ZFI}QV3r7=p_80 zC9W_ZEVL)Q;enPvq5Z(3LBy(sg2(AKv$*8gOI-BacfAGXTPl6n&)xvbEWTj65HqAx zerFp1&i_y_Jr9=R@)|$YieE8YT5MjSJCxm}g8ri(7=4bb8}uG8orA@gM+2#W)YD)Fh4Y0RjCf*Ar4GIh0EAU6K{oO3Er| z`o!yX_EqSG<7%G{mmEEkBzwu#KWn$^Gu`?oXLs%g&-ZFfmVi z@o^CLSs-maP%Z2=RvOJX(FY1}ca{imUsQ=2Hih??{yv6qU{^>@emNb(DrTJm>k9tz z18!3j!AY@Q5~OjTSKH^!$T+xxuKQx0JMHG(D|-L|s&k1I^5t!CklHc84TT7A366r& zj0NV&iVwnD!qYwTs;`AiKZ-2B8^OdHiEmiD+bQfEdMDOP2w1tO-Xctnt()H6d>I>V zKB-cKhMHi5Q_y7z`u?3LL=Q_)IY%nbA?l!F5#r3cdYw5MHK=1QI`*goGjaW5jw>u% z375v(%xBfLyGkM#zL)kf=lYoF+HuTZ(!6ZmQ@S6&hl<7D2^MK*LTHgd30NdkbttFw^J#>Df(x3a7Q|Q~msI)c$$MGEpk;IlJP? z19-9tRiRSGThof5If>ijjT7WAZkTdZf9bj{nob?q!R{Y5R!?sU-2vgJcc|k`E1GRL z$=AcD7B8HE5??B7B~y2|txY+;9Zn*bY8(xMTnGBf=~=8$x+2=Ph1~W`z9l0i-kf8v z^?Y9_1gM6b0YfP4-M~Y5<0Tqp+e3ll))T!AQQ_N?s<2xc;fmO5BwLT!>N!KoEkp2; zWpX6R3le(F>(|Q$1ZPxBGc3i-WdjcbZ{?j2;zlnh;JR1tc#M0Hjn{^Kvi9=ECzp36 zpQIZW>$kNYQscR1(F9VZ|N2gOOESYxv`p1hv$=30WVGX+PJ7}t>P$ZN?h9)wO#W=3 znA2dh0OxpA(axR>>9#%V+oGAJq&k$Rx-<6ZoIjQ%U@z`}7mHFeE5SJ71&ncMW$IYS z42iI7+&!5jpNazpY@7iFJxsE>{5-DhCJupUB6qIh6@09`VsDC~d=*WU3|!6gpu`xA z?}}%*?@MG&j;@YBIe+@FBhV%FjPe&1#YLBVwO5e{4(<<=iDnpQw$r5Asg&L74l#=^ z+X&Z*0?DMdZ)Nk82#~sr-!u950=8SmF`rywg!ttrJJQ9UvhNIKOTt?u0)fEk3K^`E zj6{5Uh!9#DYARXsy1y@h82yqCu1T{P$eQ_)yY!1@_gX$Mf%cWyNM==#rf;3_@;=sdAK7Om#@Ob!r%B?c zs)bv6YAzy^2&^Y8l(z}1W*iOtU9j?(zFd>B>I+b<9Iw}hw4zTNn<4E&PKa_srplJ35}1oj#k{Q{d5;j6jp z$`o+D9|=vG*~IZ9#=i%uh^KRpCpL(;O|i4^&A-r6whIO$Xdbd)<1YP3kX;aAO>P%f#`7 z8W+)HP~;GBn5A!eoC1Pk2-*5`bT!3O9q%`0gI+BVYCkO1^3uBRE$4S>e=_GC2<1}g zd%Etfpym?-W?3lcsS7P;)~>S2<+uk$&e5WkXdIX`o`U2~eDh30?;?wv(M&ayue`o# zyjPHNs}^!f=_Gx51ZhLp;uvuUilniR1QcHq3a@iNDCZ_a-|rHNFcsRBPK08l(;*wze4prY6C1x9=jPLdR;B$B za_qH>ZX-Xvb?;}_9U3<7_bO#Z3jFl#i?)#mXGN1iJX=m^r0=MS#*UjJV<0H6 zYoCH-JPqyKNz$SLiMZ$eBvZU_B^9Dy8e>#Pr7W(#cZFd)T|614w7w&pm6)WGbLLxZ zy+Ny7G`K~Wn!Hi$Z7&*bGwmT}nVzTyH2Qdh-^_5(`O7?MZ0U!I%6V|ZU!RoXM!Uz z#xSLpB0EHDmrur)Gf!2ZYH*PVU_Zp_`Yh3Da`J_2Cgj;^)U&Rsid9AY5+W6hnp69zJwZK4d9@vX2EDR`Rc zZ~Y_sH8*6DQTy1&V*K1jbF1Xr0AlD^e?H4#RUA<$;nl=viyG0Hs z@7m|!WER#zasDXEZ|~$^j#BWe$ZMC#Yslxjwc{PRRY8~1jO*Rn`krXyuBb|hF-r{J zjFV+En!zb?YT-F2{QCE>-zhRoFrWhD#|D|59pCXDcmH7^^ndYThd5+SZOC z?Y`EzUiRh%zgGD1CvsL98fLip?`uDB1jZ;*Gxdia&5P@Y-d*^<^MVO*X^qts5I80tWaw$5Y~9rLK9CpBJYdL?+KU zTO1}3l&li4<))u(Lw|@4yLYFZqZn*7w5xv-(mk~iO+l9F53!A{?czokUNnHMsRKOG z6ovQGpOq==kQ1H|^QQ7I^#}9O)r0BPBLzjphobOwsPn_i%T$Ea?*2ZS8RKOSUD?DT zwp-G?@rGw{{hVU#W+&!aX3^VhZ8)7(^QuHN5@tq*KeyK+eGIZ2>S+?7Oh4j9;o|D- zb0}aauq$bg!1S|vILs@4f7RF{{lX^S9$e7^y%fDHn8-;k`}lY&5BjCiJcu9ni8?YluKD;G|578R4!l#K5XWjCo#QxMU zvQYl8O%WgKHg2oqb^-@Q4DWIJ`J^$QddAE9Gy5w=_&YoXpIL1;le-(K7BUGtOnxd% zlm#rxow1+zZ>ATWExHs4EkLC6`^1leJYUc&lY zy!GeNDaRF0P7eieu3->4rk!jim9y8g9;ndN3bzb#Dw4fPk))dMzh;+Snct&tNW!}i zZFCzW_|PJnB_iE3f7*iEjZbG{>&1R-mL#+kp}^cS?atd{MMbtPO(K86jaQQ4CwCns zYS^P!P3Di%=R1V0JYMmk&6tu{==HXHz5)HXw;GWAxZ@0Z2BDSwNd=n~&B?K%B5WD6 zX88M0|Ako5pdlo&4v8u4|M1@a=9mR6hyt?Of;-9_(*IJR{&*P_i47QptdRd={x`MJ zjx0y)h0{8p1799U=Ne;plMfv_W0q?;m)z!-Lrr`tb zZtS3haYT`R#m6_nSCaxRo29uJBOT_IZerxlUlMjuZVPrqS# z`||L7W{vMoNLSdC!c<0{-)w#wdHAhOQvw6r(EKBn_b_KSk?nB^JGWPqIE|kW3J3w6 zZz9G4eJ&gB;KxV{ydo0E2iqiKuh@Ou+?PV?=&T6Q8CCsKolS>0fL9U^4byBq*pwYu zRPzkq`7@r2s{Tf+SR*$j*LtIvO*)cKsa}WpMZY_*6YAImR5l*7_}d zQ45PzrCOBf_?T@8+L&Vs162*NRL%~#8!Z>)8dxbZd@uv#;JzXM%&N4=*k5v^TZ^2s zICv1t$95F;ql11)wnE!*#wefn@>>Z4mT0@Z!gVq>uQC!C7qU%+s}?U`Q~I`Y?#~EV zs^CHc9q?_BD~9+YhcXAr2W#Y4CW>0+-yfzxs+bdcTt#8t->QNMqfrqSbc{+s9zb9V z;Gld+RfF`G{Om8~hPv?VIrIcv$k(R(1L2JO|jiie<9*j#SHeAh6HV3dr z-v!XP*D4Sy?@MJfURYirSFJJnp5TH4sRjeu0HSYeZFYZp?Bs@R0Ig;hKl_<-^0hiI zQMbxa@bzzk^jLI;(3LPdhzOsUe7qn&n%0{wef6fA+K^aVvo6wKitHm6M;d=^sU0?y z2>9CT+(a)^P*feaGc{LZP3e2M*fecg>h4ca21@#5re01-OPby%V1rpagt+giEF}sC zDp#x=UW}RMg)9+$Jo3zjl(zHd?YU$f8L|Yd$%|#HihcV<253m$iwVEFQo9Z?+0co3 z-C5b6BMigE@iRC@IP55ytQ78W=f%nBi29ijfnnr8Y0y(bcR&x%6k0S|?1zIsE2Bte zcq1-H)Y>eQUvthN{6`Bg>3PNe4tsHmQuMS96u75`4kk0w zg1kp`y)3}^HzOHK(d=FVDT%KxzZdxOUIEdJ0+3fJini5tUkyR;J(qDszv@XkWL16* zp3>lW5>1#yi5!@zhgVLYEDjXNk9u~nE3GC;E}ibZI1@v;20PRwHdr6zJ}{8WB-5>u zOcP6y(Ltl4MslJyZclKFz5d}DDQW z^hZLkhNl6kGTL6Fx@YhbhIxnp`bOC7xo3+5o1|!v&c=~M+?J2Lc$GRsf)!)fp-fZJ zpE<910fhKsD*z8Ips1JW>0aSR$pK&zY&Ws*vVTBkU+mkzfBf-!NyotE zr+lA(RvI8ufc*iMtv@YMLYXK9uAwhYCZG79RA+BcaWIIRvOwq39kCPs7D2i_7gDeL z_w^w?5|AkY+L7r+CE`ja7Z5@hC_*wR^ncy~d4Hb}q%AGxYZtOXI`v9B|EwE`$NghK z;&vU>mgP#|7!lF$i~o`2+`s{w=H}1S@O{MsB#jNW`#)+y{=?UXR6KyJB3N@SWqtR@ zR_SP&VkoXCxxtR{e>d3;3dnWVRIhVigRTb+Uo|h({b%olLoOtU!r3QbZj3{a7eQl|)+UWLq_u*n5Ka9KAGQ)SQ zn^}NC8mpsx4IdR*U?j2S(xwLgu(5wBsO3_vw*RuZzO`q* zZ;CsA^zt?f>0SX;m0D$7>df^VvH?ir&)U~WwD3U#$MG8PuHRU?zk1C$d6(j}sOGHI z+w!VylwN)wX2?wo#q22T1rG{YFPcTn^%wP^NWPzp2d`?;l~!$CVW+)WFcT1k*>9uO zjuI>+x2BKlku})UznItwgi_F?(L^dP^8XmJ48}@rdE7 zXNgo3A@&l^^ES22;Y$@r8TUk(=rb6V@M0xT_c8Cwfq^$oN;|J2(fh(2Q&4>rj*Xv5 zIbUBaoP2A`#iRPZLE~g;<8h0D^`OI57yY~KLRD8XuQ&KUOP2EUo=ub*Z>=Mj)@r>} z%m2hweNr$g4W9+EAC~XrsVy@i%}aWcmbDn7%gNR6$6Y6){H4(qSX$>A|M76;AYf5~ zImsZ6s%@|S&qo^ud@LQI2v^@$hzen>RPQCaEDzXnLDX?~rC{4VH;`4;aMn z2E>9peLsnB$F*gw)LA}U>uq;nqmnq8-1i8nVDx`s&}5**9{4a6q-Wk56^3g5kRH-t zlt-i-V#%??2ULDibem8IKDLBy&_#dZuQ|&)?TW?wwp=a4piam9<4ba~3iTxS1i`F=2|YvS{*33&}^AWdsWVK3yp060rvF;5ie7X z^?Q!x#vSWl_6bk5SZrr&HBpG#J)iL=Gv|j&_^t;!Pqg}k4c+M$I$5?cQ(Jf8kbO2x_?K(zxZKA z|E47P`snN1CT&mGFO0hUKUCa(MOVZpM72#?NOP_Hu^a)GMM8yB0*yEO5Ge$H5LU zk}{|K?){PS;FBTu;eyMgv*0sg^gDR>e|U8k!06jdw`3y}F{OD@aZo zDsYwPi|N6g>S3mb!M$VX=C7G~e>m@lr_jZZAI!ItGN_H;Q)L^)aMItGTxop>#aF0m zYdKk-nARKloKBdB-}*u0+G*kKn?|QkI)0Y&tVll!J-Q-K?!2N3Oq1kPVBfl8TZMZI z8ihn>RqBlhf6G1l^;Unb=Cc(}7YpiJy54u64e?%(x<{#_x0#oDm&>MnouRMOn|=F= zw}|E|r`+JJ7S&--1&$(vF>}0uVRT`juC@aqH&Fg5kIt%o)iXLnrMRd0o^jNk#x`9& zfs62>uV9z3kfYU&ValJISmB2;%VCseYf}H)+5fiI*?DSEWiT>_diC58)Q zg7ML`utcarBJww`_D>@8149&!WRl#tE+xbTq1Jbj*uHFDK-sp_sj)?+fKoMlxc5k+ z>?gD&s$aA+I$|=KQ4Z&&5!_J*Z8d8HT&}|37-Xeyf`et>3QNk7ZMeWV=EDP zGZV9R&7egkw}k3ktGc(XetjCF%+jL`e(<=!=G40NAU^Fh&nYTC1G|PqTi}uTCAlS{ zp|!X)=VPes3dtdy^SK|(P$ECqp!}nAM%UNd@U1HD9*j2Ue1inhz2Fo@MtD0_Mh4(+ zDBAf0AO4ENex$ee%|@j#*xYd#_3|uNzK>0L!_uGp_JMB+iyRi_QsrFqJS*fg6F+b> zqg2Qgu{!0g_!(B4`;=-gs63Uvv>LwXNg3)J3TuN`;=4!vV|pGDTTik&Onkz=uMAyl zH$yIh|L%pFp1WIaW}Kso%BOUd(uKGOFCux98Nzndy@S^u2sqvwJ&oO#b`w((CNT@h zj?t#%(f=tv$5{pqch-wYJg=5y7Q@hAOAK$)UH?ObgwD)solJ`K_<+9{HFx9(ZZ&Hl z5(`XAM}f0~&(o~{r1dq~yB=jEx#!DS)77vV*c3e&K0ii6q^-|;TG){S1^FJ16gP$AdXJ=|Bk$#JxKm1xs`5x>Ig>-PZZ@~MlZ%~e@t*0Q9hW~Au^MyY zbl)`{C*t&|%^rTVOq?VkxF`c-8q0apRCCf1uI_|`!(jf-9&eJTNNQb<-F#eQwlXUq zDiZhL``tZ#YUpThX^Q^xLN?t0)c_YG(BosQ=r|%JcFk{^=yutYJ9l8&9 zjjk$7lxG!MV>S(T-%g4~WLU?U?Ifk|Bc`_ogs;$aJzZ6I=lBvED&4Dln!9u8^~H~> z(3iijNp@SDH!hZkr_;W(dY|0Hnqskwsd%QmH&)cRKkp}t^I~4vCWK(Te&_H`oGdqD zWq_+`r7m6u#jbtZder^Qk`?DHlS{>7UH`3vYT7^!ZA9i(4) z(O@z2TXiW(rkA1 zj@gD6QHpN!(FaYJ5)TaUtX}Fbc)^n9#t~yg$8Y;TX0m%#8SdM9q}G{;;*yt-v>E)C zKG;^r<&>#zpD~WmV*Nb%epb7{c+cv452qV3XFKMK-vxQ#p38Us1mR!D;puDGd#-6Y zf?4yLn%On(2D=rRl4RsTQt)0@#Td#$sZ5I5b?s!p2o1}XbbZE~F zSiD{1Zk!aF2mOw5|~2n_C8$j5&da9Lz_`| z@L?UDifTym|8@0EL82_dwqx7aW81cE?y+s{v2EM7ZQHhO8+UHpk9Xqris-1HuCAyk zuFPCXv*k&xv;E_lcDd@zZ`0bCB;PMVN{J{l1ny2d%>!CYezf}!#UwxX`m4UN)`Nw0oU8x+T)cC-_yna zEyKBli1Tt@rNe$(VK|dxlD$gkJyIV*Ryq)w2UtyOu|opTvE}rm2@7ifSU=pg^6ZK@Eyl8KyHS{Nl3)-O4xAHKL<;xU@kf^H=y<}>($1Y)?iRu?cc zI*3k#&;VZ_$|DpRw4n|pI&6LJAe_G$L42*elBB;u@qiza3FjeAmn2d+>B-TSCbpo~ ztnCl72$6K+T=Qw&bw(Rb^uYe@7M&^tdL-=K9#m~~^pe(eGWOU)de%KyzU(~waWf!| z=M;d!766W4O4T|AbglX(eCGtntKN_WmCDh9iLFCMxUB9juDC-;F_Nx~I`r8s+?ls0 zPOANqTzQbUDRX=Zf&v1qZ(ecGM9V;-QC=pj=jnoco8ql`L@{Yqx@w|riEaPKI`@e^ zXwRn$4Vt?Q!6bhoW3@bxA~?jns3G-yAjc##P8>&>LQQz-Kj04*CajR$ z7pVr?_o#WTYopGZqkHN=r`b9YC1b|i9Y8YCOe_Cgl_Frq1SeEI`B>H|$_xOeD4{7p6$*V-pXffy zDcR>d3e*8|y9GCtJq^ZDggceE`6J9Z%9b2+U5kusxD{6N(KgeUfgZ?l90zy<16H>l zI_`i|8J!%L{@)G!QKv9<$-xFQ$a?U&`eyc4fxhA->I@yE`MbIH`8YL*{iqmUCB9+A zQ zALxb_Sk343!_#JJtBsdn+N33jS9{1wz>a@qIH_&*?6LYaDg}{|8TEh%B*9llIQc~h>?V*WeMDOQo5W}(NJa#QAb&%V~6eR7ch=mU*%Kg`om@#|nx{KIkr`(TQm4rBQ2S1f+s; zj1eP>Ii(af(u#5+Pldtuf<=cSRqY15C?dT@CT&QhROXpBBrXUP^sSC5%C%Rvvz(P8R^Y(Bp2sNejk zun}(8^C5_~76}&alT)rSZ0FxhI_29UvO0MZ!Em2L;8{R zodP9jy`>b)8Z=Lf`gpb_wP@s8uUHqPcJ%D` zmphBFe11Neo9#+{JvCSWYB4hsvs)pl@sIb2x*2gmjjpza1wH#P$Sd!AT#o!yjZSsa zRG`R^qHx>MCQjB!9le+J5(<_=W-EXK#DHL<-aCT#F?cP{_bHZ$1|1?Yv~|``N-gAP}wN`ye{!vZa~D6R4Yc*xPns38ttygN#XhNS~Fa; z>sIp#Z|U-oU8O~Kl9+5HQ_;e7#)%)ItoR6{Rz%6Lwo~>l%Pc|(KI@j>h+qooGgGS`z(=#d3BrR5OPf2+)?i;A&Jr8XkSbycM^-H7vK^n%-@ z!WLT4N`3SQ=xzbs>ZBocw3b1qnV!1gl99g6Mn|#B`;{T4UFgcLA%c;2}uwA&_V#?pUM!|lpR z2;$DX1+~{|o$+`-_aK}Y&^1WkZIwq1e#`<-WI&VXH0SB_BLoX!zsz18s6!_RJYC)j zvqlW{*Sn2T*E*kkt=9iUL)cx^u70>aT9k!vdK}S%DT*wjc?+4Ei#kF8$^`lV7Y)f^ z4^C61Oji3gSvT&XoB{;BW@c~@#cL>3wkS>$F# zcEf$ZJ)LsM3H4;CFN3!)grjvPk6vYF3LC%NVGRE`X5}wwk@|<4NY=49^ZEFM-D{GD7lcmKF+L;Mn}%l0~T{dB!Uz(du?ot;?WZ>DZeAR$=-I&5l2c z1j@3QvaqpMt@Z);^=5y+Err@jOH6{}wrPD;d35TJi~tLrvb%rzrj(n(O@|4!EB`f4 z5FfH-iM6XlN?0ZPNHUAdf!nALpbKAZdW8zLicaOK`1DVmBt(W5)u{W~Kgsc+1g5)vMG%6K;7`PJfiN-(uf2BSYQ5S~}2c@CAL686__bBVbWW_&UV z#S#N+%PXTi@g~)oVy*I@)u+IyT$U*>Y@|ZLXH=h4POT@M(7OuE0IVWBEs@6C)SA7P z9#}1Q)6)mO9t3KEYs_Q0%gW%TYdg(KM4nSlx+szuMINe4w8mTbvO`RbVFnu^x)Q1{ zBQ)4EoG_0ayQ}4pctIAe&y0Dy)KsUN4AvA){mrb%+CC)%GT*P)WTv=AtgjU^n=TNB zeEPe>j3L`jri6>bal)Ip$@MVI5NA#$yY+2*h3K=^Vaq;~96j2I{3DDR&R58~uSZen zZ9RpPY+UlbI4EKS0tBG!3$%EAZR!{H*4i|RNTrS|_n@6T9f_07Z;tUMSU4szEtvLX zyYXu7_e@o*-XNlb*MZqgF08kdXc{_s_&%AC;SV2owJFoOoGXdJT9(m#8PKJ96-$p8 zFx}1!0cp5E*zU;p1Q-<@BpvTeF*Kt+X`LYYMt4oq06pefxsY{e3Y*0m)M)SfOvjZ=};esDOsF@bQ9hAB4 zm zaie?gn2a%aA@gSIMr&GMhAHjMVnq$4l`J#C@XMDiUX57kV-sD@tksw)KVM=22l0F~(i#M>n9vx8n={r0>xmVfevMEupi9pcOV} zI=Ta=9;u87nxu3-n7gD>^B(+-9b^SSc2}lstH9$o51SoN#8AS*2^Tw%IbaddC`u(r6f}2$^qbZ6 z^d}Zk!a|{)(x>1*+?$+iG=F`%+M_%s^FLps%Nlrh zy$~FsG%0cqU$Xkcg@vsL9B*i2*F#q~TEu|$V>7xTXqS#wu|!|TK^<8^s*#IrdtG^{ zX|U87ld;0CiP8>W2@I)z<9NE8WiYmmjv14g=JW`&g zI=h@ebfeo^3apDeOz74rzkCpXK$6B4Dnogg7G;TZndyDB)oP?s0A%+Z*T3r& z0bSY@%e%d@kUGOD6xkud3&BJB4Ay@Y2qji?n6rV&Ktzca=d11I_MJOzAM*Vod?OGt z_F}tRooDl^Hq5^wYEok})z-kl1|^<6=3NZNlFobwO=2`VxDZC=s?x6G!d`BG!}Sk)_GC4$*}=2;!{ zKg3D@yrd%EVA8cRQYca`J%JCNKs$lSPaSukyu@K3-!bE{$=Fk_e^wjm7zrweap~&l#$8$9gcSCp~-gb6W9i|edUqg*F&Ual#8RQEQN*74hGlr z?TygrKf6N-)Gzd14%!J3dqD%4?+5RXf zqeJ)lwfjk%k+PeuI+R6aIydbF>cABs!anHcOZ>4ntKVydRlJv<_w(# z&gpvzm27+A_q0T_Fh1`0IT187B>(fE7z9OfRNs(+O&Y5)C!H}!Cwg6hHjBp8qsAGU zj8(O_!Up_Zdlw261*$iQdE~hJHU*_zgkMUu46X$p`R*d)cW_3%3c0r`2;qal!>BDx zuqzmeC|VpTwj-+@19I6B1yD}K>@nwL`J>8+j7EZDvrwY_YpDhYGB>PE@AvdaQe7__ zAp$ec{S8S--FlUn#kfv_?n90#qpEFvqhZxlWwS=P0NY4UOH3}+sNog zaTFUz1)DvBpANTt23kr%`g2XT7~i4~B(~&Qe2UF(p%%1;Iu)*Xb)t9Uhu5SUTkCL# zDAzA_HG)K)S;JkE78Yte)1sOF)EHv)-3rHYSf_Tfy3fCHjbjo11?}Wtf;6Qg>e(@! zn(iL1)Gc-93QC{9uphWuY>xk6d<7!p^Silteef?VxTtgA8@}C*s&qTgD~Q9Apbwwv zEn54urB-SoHor1853&Rl8u>uex8Hvji@NIl{&QM|t@knz=?cC#bZ(Lo)o28xrOu3F za^8a9RU~272?0|+{X=EI_5HG9bzLW#1WZhSJ&X$fPXPi^I+no3zaetcGd3Y8ynIQfE0Yxl)jw=QRjP44(=}zkp zz_d`$h(+3xfY0IO`lXJkhXM*+&d{BiYl!oK&lAtr5)=cyn?(1YqQz zm6;lA({A8F^3XAT%Ih%4c^e2US#ZZmTXD|N7cb^+85H6XLtKs-Pu5__RqR($#={N| zp!g4$3*`x6zUjm=wuLpTex_o(mIwGU_jx}hc;Pl-ApBRiVO=%GeM3t#{taLAC#JC) zu)Z~8y~7b%9ZUb9xfWK?!z&(oV0@akEh}tiJy?m106-x#`U#DlU#XS7tWkX_qT|myw}E z_8P$w>zq!x1=K&aNZ*jRz>1(f-x5wY$P+6047=qU1+f;+w*Yq(NI*GqHpuxJUaqsY z#5j&aT2X)GO)V~WO)2O9=ogWqZa=DDzI|A;X(ahzJQNZld&a$`sf-p8Y!8JLH1Fr8 zh{i)p81o5f>tYHNu7ikCvgn(9`SZHa$>fw~?;oSWrgGHyy$S1B)TBP!67XZf8HRxq zyv6t>Z#YLzpN9f7cS4DV!+>KI*enbNtLb+dct?X}(3Bv}@ik($JY+BXP~n7la$}g+ zmID^qJRF)nicew5dY$}D&tyC(#<#C-z&F#+_CNwcli$p{HF&}bN^0Ke;v9M4q>gus zYj&;+uZnNf*qz-v{nCuVZ$J^KV~LRWkeMCjc~Ki{Sz{O*pAQ*`{WvuZV&6HB9_KI5 z7fh<+l^a~mxGBeKwPyH<_qCqIZCC)==!i5aedd4^A$y~-lKMu=4{|t$U}Y5X34*tF z9-+h51TlDoD;ty+{zWxuOwnaPjCm|_OT-A6Xy(jU=hFX233m5D2+V+nG^I}+qa@>% zU^Se66gC!xHA`4RTZ%sVu|n^Wm=DX}Osho5VIcM>?6i17Q~!R=R1iQ3D#u0x2pO67 zlB!>}yY_!4t+()$x{_7_Vn2-%)}<+sM2HJ1G4IJ^u(ynGqDn9LXLQBJ7zDLvvA+fr3+CrG%w38xyWKkM)OGi3*&2B*V-xZ0U-4SxWGW0NL5rXfaL* zzH~q}e~E_X(K8I9FlO_oy??Ht+fvgNHNKcdxm;Po6Zx>A1{K|39 zIhem8hQZ!V8k~*QR-Y}%f0wjeT$IeA2_{`r2V$c7R^PRSewDQFAqt&`OSOTaIgEl$ zlV8o8zb#VhXoAfDpet*0p*O}oU2uD~S9g^b{aUn6N_{&01Rd_rA#T*LxqZQV*2_*0%LPhEgGP)145>Y=h^D zFlD)|#VM=hWDIJmpNuw%%^vb6Wz)FM65^;LpUXXbHzkzCXZ_yar8MvpKN#QOzOA+F zN&?uUF$7cM<`+-#>FNw|o%vKcT@=-Y_s!z#P~9w3++AvNQCGr^q%PY0+27 z^)vx^OML_RDnDlQ2VLpanx!Yp4*Pjk0UXC+b+*x8}u1oH#JrVZ~_R-3resB&djh9B{)*uoZ2WKltf@1LNO%Sa8aJETvtJ| zB(0AU`U3j>pek5S*9lG@iWyDNcXaa~3IYisBOyygQk-VB87!E61q@~#xAhJ7M?LnA z(Q$V~34`R~N$yBT?`NDKwkUOEahhIxvtaLhi^l-}-s1$nk`Gv=VfuasyZfg*vLAc9 z%vTiz3YuU1O8}IIL;geQb2UuJ8N-3TK_GfIX+SatODx>t$UtTU;;BDEk=4>(pbs-) zu|oO)eLpkJYE-Ei>YadrQ?oL}ekw#fVqn*hpUlRTotVRiL{!P@+cW7Y)5*neA zw8Tg3?knFY%>ppuZM0t)C}ual5wV{*mIR)kgkJ42;W!?NG|6pDv1ggaA=BA8wfM`l zA8R0VsT0!2CJ0{Jzud0EUnwuKXI``Odt3);Z6EuILTT+&ym>gp>h7%eK4L4K23*-M zX6%45x$=AHxth}n+vRkLo|m$-u*j34tYYmyWDR#(?R}a3q(g@fK4$?#^fhG>o@`6N zWSqspP4-=qiqMKev?kZR%3aJI@+@H{N&f}f8HI}Pf2HKaOt1NRnl@qrST{MD0T4E5v&?P%{+b3?{Ge3B5-h%KGzU7`i$ z$5s(YQvbMuB#s88F(8YYIKh=8^#2yT$I1vLwiP`kujO4{Q_=`}K1;pxBlv-azfaZ3 zM8g!AR0BBT7TVj!$Fz;~XOG9H8;+s;$$(uDdJt*|)ebFAN^%60Z~mbj7cj|oJ6G5=woO|+cvJa&5uBo@oZ=nl-X zt6o-DJ|bXkuf|^?bM~btL>>`Z!o@^=AJIVu4;=KkKQ}he8A+bB6wuy>l$adaWoMM`MavDri-r=%(IGsw zy~c`*My(zL5$#xYl$?u1#BK56^K-{gbN0`#)H4m;J`$Xi({zTsgHzNhNI5;1X81+< z8=~4xy~Oab3?E+t9#du23cGKCfWbxK3q%JhPNt!LkT16Njgf?T79_kwUN0lY0&vhP z>S3ylEOs>yS2DdS0(G+Jp;q+gNE=jL$_Z{Q2JhBrLhTRV19<3*8E6b@`C{$ut2|^t zmOn!tomf0!^&OwGqkd&z>TzND3&<*hfHGB>;1V@XfVFc*Hd_rPNAgv#pS|K4PI>r= zH^~?795`NcNDx77eQbl>&X`Y!@Ap^CoT#FA~FH_Y^UY+9~qo+@R zDkIrAVgKE*?p_UnmAqcL6U-5y4$;01G!lbul0#{XJJEW>=npqt0UXIh;t&4VgOg)5 z_m9FPgAdb`F7JL8DJqtfd@`%ncwgjwA|udKE)n-*Ga0frj0ZhWeEu*lo$i@*Pz491 zR_G?Q(ID8w7&G-vik{hx(KdsvX3dq+mYvk{3*OEcy1CWQr}>2T8@WXu2nW{2jb+=i z2YDFc=Qh$T4*0=4{$*flG*6>{a^wXQLWv@iq)7(4s;V)QP(|CyQJ}1_SrBuI`$1FY zGk_{h#LAMYv?@ZYui;p&ZU_Ktqd|AR`0Rmj&*{%wrQ_g_BR*eSf6WDoU3+JlrJq8E z=826hmc5)5PVxpcN0UcG%GNbNEf+}3TYS$oqHTEj<9`+Dzl`btfl1)cZ#p?xjzTDS z^X~`xzY%!?i{)_htT7erRxp;`xqTJAz`*#+(`+l+ z#rVcoR-_|Am|!i4id#a@ z*IQJ|irA>L^rm)9>ecGcsb9Bu+~+DztCCz`h_Ne@s67|~DP-!c@7r2(<6=&+iPiro z@ydVsO_^+spf3in&3E7ySe0D-E5%UKHq@7fm!yp}+$)iZSd#Ilk3s(Xo0@Y4(fm$^ zk!k4tTrKV(I_O~{Kbsg{U>1f1kDV7o0+Lj3L)v-cbQ1?z+Hp<^RTrOhKbr3P1=6J~ zn?G}Z%!3@kTP!_j2bl?9ShWmm1z7?&mVP0ToanAy^F=EjoBKgg=3606zt3CycIF5P zLv9&6`TRBDN6f+RK+hh2fH6zyl(jmJo@H!XIVQ*p<>Iegeq{Jpqr^a$Z*p6pkqGC- zpJDf7t&I50fKE#eSMIg1s%duY|I zc}?m1(jWsKR1dWX13lYFE_6)!;Zb0))Inn!=2Kh$XABr;PWbHVk?8;wf)fj#Q{NDT zMS%;fmtvXS6@0CCNP&=VVW|7IQC|mYEn5&SrX*K(>P*gvm=)dB+?BJ72HvsQ-RT>J)(EhQuf$POupv9&UY>6aX|Ui*0R-vPe9q1 zZpv=ul{MYVwnCUIQkq~{GlqwH%BHl=f5A~)uecE&)prY$-?4%C5ZQT=uEmieusoRw z!>}x-iOg%nJUv5%PZC4?OaONTyAsaWWtabI(TqXDg`UE{m$}EnC)E^#?2X1-M;KG8 zPu~CrX!eKrX_XkV?M1%cB_P_F$y9iKBB?dzybs-Xzh#Dq3LGW43sVAZm%P@d4Ljy%ERJXKaBYxoOVzvWD@iiwx51)?u-Tz>>v6%KusCt-~91> zQ#l1V`&I)#v>J-yQ}ibit|#N{C67LAR0L5u-2`O;PVssA2c)sU)g1yW&`SzP^J|HS zkk|npbUr+>xYcCC_6s$DylffA5o6xgOY9GLKmQbC2kYu=!;Pvq7d=@1`b{fQqMyvS z`pGT=C+1jT^^s4SZalNN#ZI9o&rjB<$O3Hv9@H?Dk!$>P(j*Dsll@mC{_HhJ8xPvwPN`+Q6aHaV*mc_Pb@3HI9LtwbIR8z~k+={fA@ZN^V}0^2+KliEe$- z5b~z3+?QzV4 z45c4Pg;^ffX{5-x+NUx->Vo2T>I_cQ# zmiP3zjwlx>j{<@PN;zO&pbF~MvNPeIvBJ35TOYJ?fEyA!+LvGL3x_jRkr8mh&c%SII^^Hi} z{-JpsiJ{TLf-B{s>_|aYpg9L)-Ut-CA1iI^OCg@(?@4_jk%(&1eQ8bGbWM&|=Z$H#RF3c?ae3F+ITMM2Z7^EO6RbMAt{zmNsd(z~+|$p3A}tp%&9@=J5A*nc0P-GaEP!I)>h}m!{Y;MoPs@nHzh3{hu41EKSrG@SuSd&mwrnO z4l-GHS=;1Qi2V%knv09Dc!FGc#MwwNU!gmIPP~j!O;iX|ZJo{~eB6+Dh_nMIE5sX~ zLFABa$l1RBDaAno9!;oJqO;N(;A;!+0v$=M7zB6sNm$x=DI0Z-1r4>ZF83EqjJIy} zUc57U@aWyrQxIW;9#(Qc$j6FjC1xg7gN=HpbNk>KRXQ#cYt`;@!-J$eopi%b18;|| zjsJ4A{mulRMcHEJRUCN4A*>2@QHc~__MY#w4e-mr)#VtxMCoQPcMdI2(_tx{heqW@ zd;|qSx4*(_b($h!w#GjqcLGk2=$_IB1^)TiJ{uKS9$S|M>rqtCOT?gXDj4e#RDPK% zw^9U+Hjpp4iIvqMEgw>NUcv5ekXJbX>uaZFZ4|5K&P<_&Q>7UL!-ml=UQtuaaM_9| zYaOhEG!QM~+)+xN7z9_OM<@0}dU78iGqYS4j8)h@1^)SbSDXnt1INX@uR!V6nZw9$ab|x<)K$@nb6tw%kCI3dE52-&99n%H!s#| zN~_yJkX3fb75Ll{W57TzJ$G5#2T^;fw(9X}L5vr-mA{rSj_N1&{#4Qb$Ou%xl3(34 z7GCtuE;U=k6K}|$LLB}jprNc%0A`v9$~f-ranEJetWv)JGL~nCs`*$WON9~OHH8*{ z>J**2!K+kslYV+upk9E%<;Q)}zQ#MgT7QRwgp`ynr9AHQzMZDtkxD&0txU0PcvXt- znO1%Sp;A9B)ru)oM(!fDKOJRY=_x58j17}tzM>k#)*^gqFH4?Hu0k!`@27)SQ3Pb+ zhHaIYimt1!STz2w9V0ywZ0ItQ+71rI+H0`KCm4QRIyi^kKZT!m74P4^%&5w5=c$`p zx37P{V(D{A48#Ptq%Zz7RVRvS(|3MeAKIh6zoX zKr)dwaFs*&cVt(Y%w!MVl3|;&^6^nrWf_SgyA|c?nU$>>%zg+eGZ?7Hxv8yFUO}eb z`+t`vHIGs&d_Ckg4~-Yqu__3lMQVc&YOH4=Y7y{9H_s@>VXYcLQ~-%ocP!G@r(Po) z@yl#9(u=|Y{;V4W;FGI!PD@$jlA$LpmI=RnwmNOTT`EBc;2T@mv}HB6!#Uz3VIM(_ zTQwLvie*5zZL@$?aH6e1E!HN6B2c6kA@Ut+G?3fzx*w`7j^!vcV{mR5&C=5KD%uWi zuDLww24%%26G!lr7nzmKv0Fh84!$76EZN2d7EakgRY=hY{WJ)kxdET<1IxsJbn`NRMHgj>6OPU4kay5tfA`ue=Ls2Zu z_-JCMnP9Wm$bd-}2qq<)JPp|_JuPN6Q7|~weFvA7*OJMxtM(h&(^}lSb zmO9=Z1xi`D^*ZQ5o8X(_RoD|qsUeoIsMT4g+($5VanH>Krk16EW`ny|yO>FHuS+7S zpIEVRk8J@}Os13ZTg0DpdD!!+{WQ~uK_^jo$Fg5+9cnQc>j@OImQ#S|=$P zfL}%jp0rnU2xmqZLS3VcXJ!-#%SJICHAw(fYCO%TM&X+2H8f7)W4S-q)ys~W4{nRa zM>nUJgCLQi4a-eawoHj^V}+V7UjwTxc4WJDfqZ6MIAprlP)=N(_S};N-1BC& zNBJv#TMTg2DTlDVK16oR&XP9Ebu0sP=WJH2M_JkRFEB}w1plkphgY9>kE$Z z8vp>6X(*EKHdixn$yB9zMu-=I?l%d5F;4Lh5O$I7Q%PXd9O}2+FbnWAr>J=A4(#4o zAE;30-+898D=o*c!XZxK@cWj3TJ1B~yQHd*7<3E^n&eS=J2nHfE|@~4VI(Y;Y4=%> z&l!!T+yi8;-DGS<`u5G;9Wu_M?~^*ug1or6fWm2n1&hfs3A+S>(25{mwT-x9o;>C8 zZ*vHYkBX6_v!ZFLJviYROyj!VyxVOH>vmNBDriJYtgwiTPPPV1_w`r+|x$$ftXiT++3;DqGx@rS^k{9ay=g;I8OGg`>}I% z?vjNzs2@5l6<9YcfK^ZgPTi#oT7)2SI){pBxnoc(&Mvo?v~UWAd^yN+&RYPn-|GUe zalI!>cUQ&g$|hQp(k#e+3@)}4EYbeR5Pqf~T|b8bT+Zh}Ef$%f4?Gx%$I4~*9gm@H zfw!~Lj0R5})BJcE~Gz>rYa@l!&I|l)phgt+Y zN#}!?y{I>}sO#EaQ(X?{nCCGA1h0;$XOD|s)^fi6<+0t*?y5s^bB*duRmmYYL)jpT zP5gUmc=`6HnnGV2x@lR0RQuFh`|w8_XE8X(A~{Wwq1NuUMYD`M`Qi5mg5!$J-6b*> zJuAEG9!Q~P(IYDIU+ZkS`xN>sf4n?`WI=t0Hu(G=%8LOqH3G5R>}6Y!vUq!fQ3HF9 zS%scAX*iG8uvT1dphMEPlF@RFkt?AcB|H*nA5a-De0mQ2?QJa55T{NZL)BqjNE;{$*$16^N6J1XV_E5(kla+Z#)2=UL@(cY89c9Wxn#XOzcow z?V_Q=t2YrrEN!LP<35W{jF2*eZ9PqKw#ia9w>hux61urxonMJ*(RDiQFVS?}eZo7z z2k(TfM>9XQ`ZLxeUYss%sn{x(D3d`v<$?D~$gdM|64p~1>UpTe~ECaUzVr3(bL&p6EwA&1az8Fv&_*a@_G9x=~^=;P)9z zo}_JvR~~)=-5*avCDHkr4dFWfYUx^`iKp=EEH%>8jk{TeWO>42)SkyYDxdA`_SCYf z5%@UzFjYxYz$+{II`5`ar{n2ql)Ml%gp2s3f|%+ez3K)Qt4Sjxe7plqu=sA?veb9Y z{Q!`nPZG)bIsDjDoX4raz9^c2-=^;^#+x0wn-8a~2t-HDk}HR!3hMLr_*$d0!7wtM zUTR7E@Vd@el49&xefS+3Z+Zhq1!4JLG&mYoyz6gALRdfujNS>LS-E(FE*{5W3jT+W zEE~MmkLsv*;Dm!7$_%)rL2!Pcvx`<5wmF)y+SGGT%ekO;rXh3@p(V(at@fH-X1=x= zWG0D{p9{EAwZe{mfkTFb6ztB;OAC7sJueL}vKbr=Qe75hL!gE`ILA{dq^TP?Szl4! zz1xKrQz0D}Qzef-R-(xQzklYCbZxY{FgEY}vXh2Fl-4Bf6w4zP-8~+xq9UJM?rk74=&ps(3yv zfU0?J$496KlZ_HWlN18v7KIN0kp^9<)5Xg0Xu&?ANtb@wGYprCy4u>-=h(KcR+7}O zF4rB0G?BWREYlO8&R=-gKh?3Jx91`SQ#Y)-pZ@GeVmFJYT@1yNU-)Zqf9{7)BC@fJ zXQlj_aSbBCc$~cRU`{n_WT#VvM&H@NsYczkS2ow-IqN=Q3BlLAZsRdY$XZVHEj}SGt6q-c63eQP?sDbvE_p!Ersv?OAPD)zj)D!B ziH{JoFE3A>8I)clZiEc=ap!ziI-Va<6u9q@PADMy8=$bCcBTPu;r>+EL*)aWMi(Am z-?AnXeJ^Ua^q%5z=vr6nK5Lr`QO+4Qe|A2WotUgf0nqi#3FXfM>=6ZqX)J0>96(`C z6a<`dNt~C@{0LR_dUJ##KKq`*%qyFqRo6_q@77Tu>DFQCAg2*2;b~}EU>Bb?Ny>yznXDTPPe?ijC2)A0M54p%9^X|yVZ zH;0%WYFq6#%w@GM-7-6K@U$NAcuD18i@AOi*lQ6Uu`*tB53W?q?G*q%lT*JGGcTyV zA%w?Hn%YEduw2-70OYCEp2+?=7u**Zz^$VgfB)=nk;Y)6Yw9Iyq*?Ovf`ZEXJLIAq3DCK19+CFqF2V(AJ$s|va_1s zq?kp2S4rzuHFfw-TP4B&dXGxD9|XQf5hP#|+!s?Ac!^XGt_9$>+8i4`1VeM{_SSlwCr~61yz4@DBfiESOngPZUv|ilk+9v`u8vy zkGusOa(1i=&%Sjv?}i@+uC-4T7I6Q1U;|uP6r3y{cv58;eKrCnF`^B=tO+M;J(6On*qZ0yj?N+j^u3d z*OY0V+Wj}+4W9w7(2{1B9c0~b(Oqyo@WaQ}(k_ROR?heJkaMB-d6pry{hn=CGscb3 z6f)5Vncz0>l@0yBJK>Q5fDPXNwfO(u&#w5Xmk{5$$9|Q>D2b(nWwTkR7V_3oj3m^f z<0*zWHahJy_anew52mFyh5!BV|CPHofW5Bs6VE5zdjA7_aNO6CfAIsAU;5M_w0L>z zjR&VGwL(8W$XzDJ84CqXP=i<{EE}HccvQ$s#nmd6(pC)@nd677JwL;Z)fxYMPU^t5 ze3M4VW?;AeQ*EzXE&}%~3;y^bgRx~t8dlDRtiF5ilOF1^o3q#v_wIiX%Z4j1Y!GMT zt6T~b=;mlZgHw9ZSN}NJG2nu0_aB^I;o|d4qIf<%*LB>jT>zr%t$AE;#c{#2{|ARb z;Pa4;TE5BXC9Sz&YdbRS{+v12djB1onc(hK;EaH82fO<=Bf2-kUp+b(;%v1#SAIX> YDA*2pc1$p00Kbofh^%n6pq~H#00)%C`2YX_ literal 0 HcmV?d00001 diff --git a/doc/src/augmentation.md b/docs/src/augmentation.md similarity index 100% rename from doc/src/augmentation.md rename to docs/src/augmentation.md diff --git a/doc/src/data_preparation.md b/docs/src/data_preparation.md similarity index 98% rename from doc/src/data_preparation.md rename to docs/src/data_preparation.md index a3d1b3eb4..34d2a835c 100644 --- a/doc/src/data_preparation.md +++ b/docs/src/data_preparation.md @@ -21,7 +21,7 @@ To perform z-score normalization (zero-mean, unit stddev) upon audio features, w ```bash python3 utils/compute_mean_std.py \ --num_samples 2000 \ ---specgram_type linear \ +--spectrum_type linear \ --manifest_path examples/librispeech/data/manifest.train \ --output_path examples/librispeech/data/mean_std.npz ``` diff --git a/docs/src/deepspeech_architecture.md b/docs/src/deepspeech_architecture.md new file mode 100644 index 000000000..5a6ca8867 --- /dev/null +++ b/docs/src/deepspeech_architecture.md @@ -0,0 +1,190 @@ +# Deepspeech2 +## Streaming + +The implemented arcitecure of Deepspeech2 online model is based on [Deepspeech2 model](https://arxiv.org/pdf/1512.02595.pdf) with some changes. +The model is mainly composed of 2D convolution subsampling layer and stacked single direction rnn layers. + +To illustrate the model implementation clearly, 3 parts are described in detail. +- Data Preparation +- Encoder +- Decoder + +In addition, the training process and the testing process are also introduced. + +The arcitecture of the model is shown in Fig.1. + +