From c7dd20703862580c1abc478dc70ceb8523e51cd3 Mon Sep 17 00:00:00 2001 From: iftaken Date: Fri, 13 May 2022 18:02:39 +0800 Subject: [PATCH 01/30] fixs CORS Error --- paddlespeech/server/bin/paddlespeech_server.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/paddlespeech/server/bin/paddlespeech_server.py b/paddlespeech/server/bin/paddlespeech_server.py index db92f179..1922399f 100644 --- a/paddlespeech/server/bin/paddlespeech_server.py +++ b/paddlespeech/server/bin/paddlespeech_server.py @@ -17,6 +17,7 @@ from typing import List import uvicorn from fastapi import FastAPI +from starlette.middleware.cors import CORSMiddleware from prettytable import PrettyTable from ..executor import BaseExecutor @@ -33,6 +34,12 @@ __all__ = ['ServerExecutor', 'ServerStatsExecutor'] app = FastAPI( title="PaddleSpeech Serving API", description="Api", version="0.0.1") +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"]) @cli_server_register( name='paddlespeech_server.start', description='Start the service') From 450cd98ce0bb77c966628cd81c78b2b6975d5d46 Mon Sep 17 00:00:00 2001 From: iftaken Date: Fri, 20 May 2022 15:27:58 +0800 Subject: [PATCH 02/30] add PP-TTS,PP-ASR,PP-VPR --- README.md | 1 + README_cn.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index d32131c0..2ade8a69 100644 --- a/README.md +++ b/README.md @@ -161,6 +161,7 @@ Via the easy-to-use, efficient, flexible and scalable implementation, our vision - 🧩 *Cascaded models application*: as an extension of the typical traditional audio tasks, we combine the workflows of the aforementioned tasks with other fields like Natural language processing (NLP) and Computer Vision (CV). ### Recent Update +- 👑 2022.05.13: Release [PP-ASR](./docs/source/asr/PPASR.md)、[PP-TTS](./docs/source/tts/PPTTS.md)、[PP-VPR](docs/source/vpr/PPVPR.md) - 👏🏻 2022.05.06: `Streaming ASR` with `Punctuation Restoration` and `Token Timestamp`. - 👏🏻 2022.05.06: `Server` is available for `Speaker Verification`, and `Punctuation Restoration`. - 👏🏻 2022.04.28: `Streaming Server` is available for `Automatic Speech Recognition` and `Text-to-Speech`. diff --git a/README_cn.md b/README_cn.md index ceb9dc18..f5ba9362 100644 --- a/README_cn.md +++ b/README_cn.md @@ -182,6 +182,7 @@ from https://github.com/18F/open-source-guide/blob/18f-pages/pages/making-readme +- 👑 2022.05.13: PaddleSpeech 发布 [PP-ASR](./docs/source/asr/PPASR_cn.md)、[PP-TTS](./docs/source/tts/PPTTS_cn.md)、[PP-VPR](docs/source/vpr/PPVPR_cn.md) - 👏🏻 2022.05.06: PaddleSpeech Streaming Server 上线! 覆盖了语音识别(标点恢复、时间戳),和语音合成。 - 👏🏻 2022.05.06: PaddleSpeech Server 上线! 覆盖了声音分类、语音识别、语音合成、声纹识别,标点恢复。 - 👏🏻 2022.03.28: PaddleSpeech CLI 覆盖声音分类、语音识别、语音翻译(英译中)、语音合成,声纹验证。 From b9e3e49305983ff1b07d8d649dcadebfb1a71e32 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Wed, 15 Jun 2022 07:48:14 +0000 Subject: [PATCH 03/30] refactor stream asr and fix ds2 stream bug --- demos/streaming_asr_server/test.sh | 2 +- .../asr/online/{ => python}/asr_engine.py | 160 ++++++++++-------- paddlespeech/server/engine/engine_factory.py | 5 +- 3 files changed, 96 insertions(+), 71 deletions(-) rename paddlespeech/server/engine/asr/online/{ => python}/asr_engine.py (96%) diff --git a/demos/streaming_asr_server/test.sh b/demos/streaming_asr_server/test.sh index f3075454..f09068d4 100755 --- a/demos/streaming_asr_server/test.sh +++ b/demos/streaming_asr_server/test.sh @@ -4,7 +4,7 @@ wget -c https://paddlespeech.bj.bcebos.com/PaddleAudio/zh.wav # read the wav and pass it to only streaming asr service # If `127.0.0.1` is not accessible, you need to use the actual service IP address. # python3 websocket_client.py --server_ip 127.0.0.1 --port 8290 --wavfile ./zh.wav -paddlespeech_client asr_online --server_ip 127.0.0.1 --port 8290 --input ./zh.wav +paddlespeech_client asr_online --server_ip 127.0.0.1 --port 8090 --input ./zh.wav # read the wav and call streaming and punc service # If `127.0.0.1` is not accessible, you need to use the actual service IP address. diff --git a/paddlespeech/server/engine/asr/online/asr_engine.py b/paddlespeech/server/engine/asr/online/python/asr_engine.py similarity index 96% rename from paddlespeech/server/engine/asr/online/asr_engine.py rename to paddlespeech/server/engine/asr/online/python/asr_engine.py index f230b8b9..9801a6fc 100644 --- a/paddlespeech/server/engine/asr/online/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/python/asr_engine.py @@ -121,13 +121,14 @@ class PaddleASRConnectionHanddler: raise ValueError(f"Not supported: {self.model_type}") def model_reset(self): - if "deepspeech2" in self.model_type: - return - # cache for audio and feat self.remained_wav = None self.cached_feat = None + + if "deepspeech2" in self.model_type: + return + ## conformer # cache for conformer online self.subsampling_cache = None @@ -697,6 +698,67 @@ class ASRServerExecutor(ASRExecutor): self.task_resource = CommonTaskResource( task='asr', model_format='dynamic', inference_mode='online') + def update_config(self)->None: + if "deepspeech2" in self.model_type: + with UpdateConfig(self.config): + # download lm + self.config.decode.lang_model_path = os.path.join( + MODEL_HOME, 'language_model', + self.config.decode.lang_model_path) + + lm_url = self.task_resource.res_dict['lm_url'] + lm_md5 = self.task_resource.res_dict['lm_md5'] + logger.info(f"Start to load language model {lm_url}") + self.download_lm( + lm_url, + os.path.dirname(self.config.decode.lang_model_path), lm_md5) + elif "conformer" in self.model_type or "transformer" in self.model_type: + with UpdateConfig(self.config): + logger.info("start to create the stream conformer asr engine") + # update the decoding method + if self.decode_method: + self.config.decode.decoding_method = self.decode_method + # update num_decoding_left_chunks + if self.num_decoding_left_chunks: + assert self.num_decoding_left_chunks == -1 or self.num_decoding_left_chunks >= 0, f"num_decoding_left_chunks should be -1 or >=0" + self.config.decode.num_decoding_left_chunks = self.num_decoding_left_chunks + # we only support ctc_prefix_beam_search and attention_rescoring dedoding method + # Generally we set the decoding_method to attention_rescoring + if self.config.decode.decoding_method not in [ + "ctc_prefix_beam_search", "attention_rescoring" + ]: + logger.info( + "we set the decoding_method to attention_rescoring") + self.config.decode.decoding_method = "attention_rescoring" + + assert self.config.decode.decoding_method in [ + "ctc_prefix_beam_search", "attention_rescoring" + ], f"we only support ctc_prefix_beam_search and attention_rescoring dedoding method, current decoding method is {self.config.decode.decoding_method}" + else: + raise Exception(f"not support: {self.model_type}") + + def init_model(self) -> None: + if "deepspeech2" in self.model_type : + # AM predictor + logger.info("ASR engine start to init the am predictor") + self.am_predictor = init_predictor( + model_file=self.am_model, + params_file=self.am_params, + predictor_conf=self.am_predictor_conf) + elif "conformer" in self.model_type or "transformer" in self.model_type : + # load model + # model_type: {model_name}_{dataset} + model_name = self.model_type[:self.model_type.rindex('_')] + logger.info(f"model name: {model_name}") + model_class = self.task_resource.get_model_class(model_name) + model = model_class.from_config(self.config) + self.model = model + self.model.set_state_dict(paddle.load(self.am_model)) + self.model.eval() + else: + raise Exception(f"not support: {self.model_type}") + + def _init_from_path(self, model_type: str=None, am_model: Optional[os.PathLike]=None, @@ -718,8 +780,13 @@ class ASRServerExecutor(ASRExecutor): self.model_type = model_type self.sample_rate = sample_rate + self.decode_method = decode_method + self.num_decoding_left_chunks = num_decoding_left_chunks + # conf for paddleinference predictor or onnx + self.am_predictor_conf = am_predictor_conf logger.info(f"model_type: {self.model_type}") + sample_rate_str = '16k' if sample_rate == 16000 else '8k' tag = model_type + '-' + lang + '-' + sample_rate_str self.task_resource.set_task_model(model_tag=tag) @@ -763,62 +830,10 @@ class ASRServerExecutor(ASRExecutor): vocab=self.config.vocab_filepath, spm_model_prefix=self.config.spm_model_prefix) - if "deepspeech2" in model_type: - with UpdateConfig(self.config): - # download lm - self.config.decode.lang_model_path = os.path.join( - MODEL_HOME, 'language_model', - self.config.decode.lang_model_path) - - lm_url = self.task_resource.res_dict['lm_url'] - lm_md5 = self.task_resource.res_dict['lm_md5'] - logger.info(f"Start to load language model {lm_url}") - self.download_lm( - lm_url, - os.path.dirname(self.config.decode.lang_model_path), lm_md5) - - # AM predictor - logger.info("ASR engine start to init the am predictor") - self.am_predictor_conf = am_predictor_conf - self.am_predictor = init_predictor( - model_file=self.am_model, - params_file=self.am_params, - predictor_conf=self.am_predictor_conf) - - elif "conformer" in model_type or "transformer" in model_type: - with UpdateConfig(self.config): - logger.info("start to create the stream conformer asr engine") - # update the decoding method - if decode_method: - self.config.decode.decoding_method = decode_method - # update num_decoding_left_chunks - if num_decoding_left_chunks: - assert num_decoding_left_chunks == -1 or num_decoding_left_chunks >= 0, f"num_decoding_left_chunks should be -1 or >=0" - self.config.decode.num_decoding_left_chunks = num_decoding_left_chunks - # we only support ctc_prefix_beam_search and attention_rescoring dedoding method - # Generally we set the decoding_method to attention_rescoring - if self.config.decode.decoding_method not in [ - "ctc_prefix_beam_search", "attention_rescoring" - ]: - logger.info( - "we set the decoding_method to attention_rescoring") - self.config.decode.decoding_method = "attention_rescoring" - - assert self.config.decode.decoding_method in [ - "ctc_prefix_beam_search", "attention_rescoring" - ], f"we only support ctc_prefix_beam_search and attention_rescoring dedoding method, current decoding method is {self.config.decode.decoding_method}" - - # load model - model_name = model_type[:model_type.rindex( - '_')] # model_type: {model_name}_{dataset} - logger.info(f"model name: {model_name}") - model_class = self.task_resource.get_model_class(model_name) - model = model_class.from_config(self.config) - self.model = model - self.model.set_state_dict(paddle.load(self.am_model)) - self.model.eval() - else: - raise Exception(f"not support: {model_type}") + self.update_config() + + # AM predictor + self.init_model() logger.info(f"create the {model_type} model success") return True @@ -835,6 +850,22 @@ class ASREngine(BaseEngine): super(ASREngine, self).__init__() logger.info("create the online asr engine resource instance") + + def init_model(self) -> bool: + if not self.executor._init_from_path( + model_type=self.config.model_type, + am_model=self.config.am_model, + am_params=self.config.am_params, + lang=self.config.lang, + sample_rate=self.config.sample_rate, + cfg_path=self.config.cfg_path, + decode_method=self.config.decode_method, + num_decoding_left_chunks=self.config.num_decoding_left_chunks, + am_predictor_conf=self.config.am_predictor_conf): + return False + return True + + def init(self, config: dict) -> bool: """init engine resource @@ -860,16 +891,7 @@ class ASREngine(BaseEngine): logger.info(f"paddlespeech_server set the device: {self.device}") - if not self.executor._init_from_path( - model_type=self.config.model_type, - am_model=self.config.am_model, - am_params=self.config.am_params, - lang=self.config.lang, - sample_rate=self.config.sample_rate, - cfg_path=self.config.cfg_path, - decode_method=self.config.decode_method, - num_decoding_left_chunks=self.config.num_decoding_left_chunks, - am_predictor_conf=self.config.am_predictor_conf): + if not self.init_model(): logger.error( "Init the ASR server occurs error, please check the server configuration yaml" ) diff --git a/paddlespeech/server/engine/engine_factory.py b/paddlespeech/server/engine/engine_factory.py index 5fdaacce..019e4684 100644 --- a/paddlespeech/server/engine/engine_factory.py +++ b/paddlespeech/server/engine/engine_factory.py @@ -26,7 +26,10 @@ class EngineFactory(object): from paddlespeech.server.engine.asr.python.asr_engine import ASREngine return ASREngine() elif engine_name == 'asr' and engine_type == 'online': - from paddlespeech.server.engine.asr.online.asr_engine import ASREngine + from paddlespeech.server.engine.asr.online.python.asr_engine import ASREngine + return ASREngine() + elif engine_name == 'asr' and engine_type == 'online-onnx': + from paddlespeech.server.engine.asr.online.onnx.asr_engine import ASREngine return ASREngine() elif engine_name == 'tts' and engine_type == 'inference': from paddlespeech.server.engine.tts.paddleinference.tts_engine import TTSEngine From c8574c7e35a85215d88a6461f27f930d50434ab9 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Wed, 15 Jun 2022 08:44:36 +0000 Subject: [PATCH 04/30] ds2 inference as sepearte engine for streaming asr --- .../conf/ws_ds2_application.yaml | 4 +- paddlespeech/cli/asr/infer.py | 2 +- paddlespeech/resource/pretrained_models.py | 20 + ...plication.yaml => ws_ds2_application.yaml} | 4 +- .../asr/online/paddleinference/__init__.py | 0 .../asr/online/paddleinference/asr_engine.py | 539 ++++++++++++++++++ .../engine/asr/online/python/asr_engine.py | 17 +- paddlespeech/server/engine/engine_factory.py | 3 + paddlespeech/server/ws/asr_api.py | 2 +- utils/zh_tn.py | 2 +- 10 files changed, 575 insertions(+), 18 deletions(-) rename paddlespeech/server/conf/{ws_application.yaml => ws_ds2_application.yaml} (96%) create mode 100644 paddlespeech/server/engine/asr/online/paddleinference/__init__.py create mode 100644 paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index d19bd26d..4f75c07b 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -11,7 +11,7 @@ port: 8090 # protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online'] +engine_list: ['asr_online-inference'] ################################################################################# @@ -20,7 +20,7 @@ engine_list: ['asr_online'] ################################### ASR ######################################### ################### speech task: asr; engine_type: online ####################### -asr_online: +asr_online-inference: model_type: 'deepspeech2online_aishell' am_model: # the pdmodel file of am static model [optional] am_params: # the pdiparams file of am static model [optional] diff --git a/paddlespeech/cli/asr/infer.py b/paddlespeech/cli/asr/infer.py index 00cad150..a943ccfa 100644 --- a/paddlespeech/cli/asr/infer.py +++ b/paddlespeech/cli/asr/infer.py @@ -187,7 +187,7 @@ class ASRExecutor(BaseExecutor): elif "conformer" in model_type or "transformer" in model_type: self.config.decode.decoding_method = decode_method if num_decoding_left_chunks: - assert num_decoding_left_chunks == -1 or num_decoding_left_chunks >= 0, f"num_decoding_left_chunks should be -1 or >=0" + assert num_decoding_left_chunks == -1 or num_decoding_left_chunks >= 0, "num_decoding_left_chunks should be -1 or >=0" self.config.num_decoding_left_chunks = num_decoding_left_chunks else: diff --git a/paddlespeech/resource/pretrained_models.py b/paddlespeech/resource/pretrained_models.py index f79961d6..eb6ca0cc 100644 --- a/paddlespeech/resource/pretrained_models.py +++ b/paddlespeech/resource/pretrained_models.py @@ -224,6 +224,26 @@ asr_static_pretrained_models = { '29e02312deb2e59b3c8686c7966d4fe3' } }, + "deepspeech2online_aishell-zh-16k": { + '1.0': { + 'url': + 'https://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.1.model.tar.gz', + 'md5': + 'df5ddeac8b679a470176649ac4b78726', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' + }, + }, } # --------------------------------- diff --git a/paddlespeech/server/conf/ws_application.yaml b/paddlespeech/server/conf/ws_ds2_application.yaml similarity index 96% rename from paddlespeech/server/conf/ws_application.yaml rename to paddlespeech/server/conf/ws_ds2_application.yaml index 43d83f2d..fb16e5bd 100644 --- a/paddlespeech/server/conf/ws_application.yaml +++ b/paddlespeech/server/conf/ws_ds2_application.yaml @@ -11,7 +11,7 @@ port: 8090 # protocol = ['websocket', 'http'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online'] +engine_list: ['asr_online-inference'] ################################################################################# @@ -20,7 +20,7 @@ engine_list: ['asr_online'] ################################### ASR ######################################### ################### speech task: asr; engine_type: online ####################### -asr_online: +asr_online-inference: model_type: 'deepspeech2online_aishell' am_model: # the pdmodel file of am static model [optional] am_params: # the pdiparams file of am static model [optional] diff --git a/paddlespeech/server/engine/asr/online/paddleinference/__init__.py b/paddlespeech/server/engine/asr/online/paddleinference/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py new file mode 100644 index 00000000..93edd701 --- /dev/null +++ b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py @@ -0,0 +1,539 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import sys +from typing import ByteString +from typing import Optional + +import numpy as np +import paddle +from numpy import float32 +from yacs.config import CfgNode + +from paddlespeech.cli.asr.infer import ASRExecutor +from paddlespeech.cli.log import logger +from paddlespeech.cli.utils import MODEL_HOME +from paddlespeech.resource import CommonTaskResource +from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer +from paddlespeech.s2t.modules.ctc import CTCDecoder +from paddlespeech.s2t.transform.transformation import Transformation +from paddlespeech.s2t.utils.utility import UpdateConfig +from paddlespeech.server.engine.base_engine import BaseEngine +from paddlespeech.server.utils.paddle_predictor import init_predictor + +__all__ = ['PaddleASRConnectionHanddler', 'ASRServerExecutor', 'ASREngine'] + + +# ASR server connection process class +class PaddleASRConnectionHanddler: + def __init__(self, asr_engine): + """Init a Paddle ASR Connection Handler instance + + Args: + asr_engine (ASREngine): the global asr engine + """ + super().__init__() + logger.info( + "create an paddle asr connection handler to process the websocket connection" + ) + self.config = asr_engine.config # server config + self.model_config = asr_engine.executor.config + self.asr_engine = asr_engine + + # model_type, sample_rate and text_feature is shared for deepspeech2 and conformer + self.model_type = self.asr_engine.executor.model_type + self.sample_rate = self.asr_engine.executor.sample_rate + # tokens to text + self.text_feature = self.asr_engine.executor.text_feature + + # extract feat, new only fbank in conformer model + self.preprocess_conf = self.model_config.preprocess_config + self.preprocess_args = {"train": False} + self.preprocessing = Transformation(self.preprocess_conf) + + # frame window and frame shift, in samples unit + self.win_length = self.preprocess_conf.process[0]['win_length'] + self.n_shift = self.preprocess_conf.process[0]['n_shift'] + + assert self.preprocess_conf.process[0]['fs'] == self.sample_rate, ( + self.sample_rate, self.preprocess_conf.process[0]['fs']) + self.frame_shift_in_ms = int( + self.n_shift / self.preprocess_conf.process[0]['fs'] * 1000) + + self.continuous_decoding = self.config.get("continuous_decoding", False) + self.init_decoder() + self.reset() + + def init_decoder(self): + if "deepspeech2" in self.model_type: + assert self.continuous_decoding is False, "ds2 model not support endpoint" + self.am_predictor = self.asr_engine.executor.am_predictor + + self.decoder = CTCDecoder( + odim=self.model_config.output_dim, # is in vocab + enc_n_units=self.model_config.rnn_layer_size * 2, + blank_id=self.model_config.blank_id, + dropout_rate=0.0, + reduction=True, # sum + batch_average=True, # sum / batch_size + grad_norm_type=self.model_config.get('ctc_grad_norm_type', + None)) + + cfg = self.model_config.decode + decode_batch_size = 1 # for online + self.decoder.init_decoder( + decode_batch_size, self.text_feature.vocab_list, + cfg.decoding_method, cfg.lang_model_path, cfg.alpha, cfg.beta, + cfg.beam_size, cfg.cutoff_prob, cfg.cutoff_top_n, + cfg.num_proc_bsearch) + else: + raise ValueError(f"Not supported: {self.model_type}") + + def model_reset(self): + # cache for audio and feat + self.remained_wav = None + self.cached_feat = None + + def output_reset(self): + ## outputs + # partial/ending decoding results + self.result_transcripts = [''] + + def reset_continuous_decoding(self): + """ + when in continous decoding, reset for next utterance. + """ + self.global_frame_offset = self.num_frames + self.model_reset() + + def reset(self): + if "deepspeech2" in self.model_type: + # for deepspeech2 + # init state + self.chunk_state_h_box = np.zeros( + (self.model_config.num_rnn_layers, 1, + self.model_config.rnn_layer_size), + dtype=float32) + self.chunk_state_c_box = np.zeros( + (self.model_config.num_rnn_layers, 1, + self.model_config.rnn_layer_size), + dtype=float32) + self.decoder.reset_decoder(batch_size=1) + else: + raise NotImplementedError(f"{self.model_type} not support.") + + self.device = None + + ## common + # global sample and frame step + self.num_samples = 0 + self.global_frame_offset = 0 + # frame step of cur utterance + self.num_frames = 0 + + ## endpoint + self.endpoint_state = False # True for detect endpoint + + ## conformer + self.model_reset() + + ## outputs + self.output_reset() + + def extract_feat(self, samples: ByteString): + logger.info("Online ASR extract the feat") + samples = np.frombuffer(samples, dtype=np.int16) + assert samples.ndim == 1 + + self.num_samples += samples.shape[0] + logger.info( + f"This package receive {samples.shape[0]} pcm data. Global samples:{self.num_samples}" + ) + + # self.reamined_wav stores all the samples, + # include the original remained_wav and this package samples + if self.remained_wav is None: + self.remained_wav = samples + else: + assert self.remained_wav.ndim == 1 # (T,) + self.remained_wav = np.concatenate([self.remained_wav, samples]) + logger.info( + f"The concatenation of remain and now audio samples length is: {self.remained_wav.shape}" + ) + + if len(self.remained_wav) < self.win_length: + # samples not enough for feature window + return 0 + + # fbank + x_chunk = self.preprocessing(self.remained_wav, **self.preprocess_args) + x_chunk = paddle.to_tensor(x_chunk, dtype="float32").unsqueeze(axis=0) + + # feature cache + if self.cached_feat is None: + self.cached_feat = x_chunk + else: + assert (len(x_chunk.shape) == 3) # (B,T,D) + assert (len(self.cached_feat.shape) == 3) # (B,T,D) + self.cached_feat = paddle.concat( + [self.cached_feat, x_chunk], axis=1) + + # set the feat device + if self.device is None: + self.device = self.cached_feat.place + + # cur frame step + num_frames = x_chunk.shape[1] + + # global frame step + self.num_frames += num_frames + + # update remained wav + self.remained_wav = self.remained_wav[self.n_shift * num_frames:] + + logger.info( + f"process the audio feature success, the cached feat shape: {self.cached_feat.shape}" + ) + logger.info( + f"After extract feat, the cached remain the audio samples: {self.remained_wav.shape}" + ) + logger.info(f"global samples: {self.num_samples}") + logger.info(f"global frames: {self.num_frames}") + + def decode(self, is_finished=False): + """advance decoding + + Args: + is_finished (bool, optional): Is last frame or not. Defaults to False. + + Returns: + None: + """ + if "deepspeech2" in self.model_type: + decoding_chunk_size = 1 # decoding chunk size = 1. int decoding frame unit + + context = 7 # context=7, in audio frame unit + subsampling = 4 # subsampling=4, in audio frame unit + + cached_feature_num = context - subsampling + # decoding window for model, in audio frame unit + decoding_window = (decoding_chunk_size - 1) * subsampling + context + # decoding stride for model, in audio frame unit + stride = subsampling * decoding_chunk_size + + if self.cached_feat is None: + logger.info("no audio feat, please input more pcm data") + return + + num_frames = self.cached_feat.shape[1] + logger.info( + f"Required decoding window {decoding_window} frames, and the connection has {num_frames} frames" + ) + + # the cached feat must be larger decoding_window + if num_frames < decoding_window and not is_finished: + logger.info( + f"frame feat num is less than {decoding_window}, please input more pcm data" + ) + return None, None + + # if is_finished=True, we need at least context frames + if num_frames < context: + logger.info( + "flast {num_frames} is less than context {context} frames, and we cannot do model forward" + ) + return None, None + + logger.info("start to do model forward") + # num_frames - context + 1 ensure that current frame can get context window + if is_finished: + # if get the finished chunk, we need process the last context + left_frames = context + else: + # we only process decoding_window frames for one chunk + left_frames = decoding_window + + end = None + for cur in range(0, num_frames - left_frames + 1, stride): + end = min(cur + decoding_window, num_frames) + + # extract the audio + x_chunk = self.cached_feat[:, cur:end, :].numpy() + x_chunk_lens = np.array([x_chunk.shape[1]]) + + trans_best = self.decode_one_chunk(x_chunk, x_chunk_lens) + + self.result_transcripts = [trans_best] + + # update feat cache + self.cached_feat = self.cached_feat[:, end - cached_feature_num:, :] + + # return trans_best[0] + else: + raise Exception(f"{self.model_type} not support paddleinference.") + + @paddle.no_grad() + def decode_one_chunk(self, x_chunk, x_chunk_lens): + """forward one chunk frames + + Args: + x_chunk (np.ndarray): (B,T,D), audio frames. + x_chunk_lens ([type]): (B,), audio frame lens + + Returns: + logprob: poster probability. + """ + logger.info("start to decoce one chunk for deepspeech2") + input_names = self.am_predictor.get_input_names() + audio_handle = self.am_predictor.get_input_handle(input_names[0]) + audio_len_handle = self.am_predictor.get_input_handle(input_names[1]) + h_box_handle = self.am_predictor.get_input_handle(input_names[2]) + c_box_handle = self.am_predictor.get_input_handle(input_names[3]) + + audio_handle.reshape(x_chunk.shape) + audio_handle.copy_from_cpu(x_chunk) + + audio_len_handle.reshape(x_chunk_lens.shape) + audio_len_handle.copy_from_cpu(x_chunk_lens) + + h_box_handle.reshape(self.chunk_state_h_box.shape) + h_box_handle.copy_from_cpu(self.chunk_state_h_box) + + c_box_handle.reshape(self.chunk_state_c_box.shape) + c_box_handle.copy_from_cpu(self.chunk_state_c_box) + + output_names = self.am_predictor.get_output_names() + output_handle = self.am_predictor.get_output_handle(output_names[0]) + output_lens_handle = self.am_predictor.get_output_handle( + output_names[1]) + output_state_h_handle = self.am_predictor.get_output_handle( + output_names[2]) + output_state_c_handle = self.am_predictor.get_output_handle( + output_names[3]) + + self.am_predictor.run() + + output_chunk_probs = output_handle.copy_to_cpu() + output_chunk_lens = output_lens_handle.copy_to_cpu() + self.chunk_state_h_box = output_state_h_handle.copy_to_cpu() + self.chunk_state_c_box = output_state_c_handle.copy_to_cpu() + + self.decoder.next(output_chunk_probs, output_chunk_lens) + trans_best, trans_beam = self.decoder.decode() + logger.info(f"decode one best result for deepspeech2: {trans_best[0]}") + return trans_best[0] + + def get_result(self): + """return partial/ending asr result. + + Returns: + str: one best result of partial/ending. + """ + if len(self.result_transcripts) > 0: + return self.result_transcripts[0] + else: + return '' + + +class ASRServerExecutor(ASRExecutor): + def __init__(self): + super().__init__() + self.task_resource = CommonTaskResource( + task='asr', model_format='static', inference_mode='online') + + def update_config(self) -> None: + if "deepspeech2" in self.model_type: + with UpdateConfig(self.config): + # download lm + self.config.decode.lang_model_path = os.path.join( + MODEL_HOME, 'language_model', + self.config.decode.lang_model_path) + + lm_url = self.task_resource.res_dict['lm_url'] + lm_md5 = self.task_resource.res_dict['lm_md5'] + logger.info(f"Start to load language model {lm_url}") + self.download_lm( + lm_url, + os.path.dirname(self.config.decode.lang_model_path), lm_md5) + else: + raise NotImplementedError( + f"{self.model_type} not support paddleinference.") + + def init_model(self) -> None: + + if "deepspeech2" in self.model_type: + # AM predictor + logger.info("ASR engine start to init the am predictor") + self.am_predictor = init_predictor( + model_file=self.am_model, + params_file=self.am_params, + predictor_conf=self.am_predictor_conf) + else: + raise NotImplementedError( + f"{self.model_type} not support paddleinference.") + + def _init_from_path(self, + model_type: str=None, + am_model: Optional[os.PathLike]=None, + am_params: Optional[os.PathLike]=None, + lang: str='zh', + sample_rate: int=16000, + cfg_path: Optional[os.PathLike]=None, + decode_method: str='attention_rescoring', + num_decoding_left_chunks: int=-1, + am_predictor_conf: dict=None): + """ + Init model and other resources from a specific path. + """ + if not model_type or not lang or not sample_rate: + logger.error( + "The model type or lang or sample rate is None, please input an valid server parameter yaml" + ) + return False + + self.model_type = model_type + self.sample_rate = sample_rate + self.decode_method = decode_method + self.num_decoding_left_chunks = num_decoding_left_chunks + # conf for paddleinference predictor or onnx + self.am_predictor_conf = am_predictor_conf + logger.info(f"model_type: {self.model_type}") + + sample_rate_str = '16k' if sample_rate == 16000 else '8k' + tag = model_type + '-' + lang + '-' + sample_rate_str + self.task_resource.set_task_model(model_tag=tag) + + if cfg_path is None or am_model is None or am_params is None: + self.res_path = self.task_resource.res_dir + self.cfg_path = os.path.join( + self.res_path, self.task_resource.res_dict['cfg_path']) + + self.am_model = os.path.join(self.res_path, + self.task_resource.res_dict['model']) + self.am_params = os.path.join(self.res_path, + self.task_resource.res_dict['params']) + else: + self.cfg_path = os.path.abspath(cfg_path) + self.am_model = os.path.abspath(am_model) + self.am_params = os.path.abspath(am_params) + self.res_path = os.path.dirname( + os.path.dirname(os.path.abspath(self.cfg_path))) + + logger.info("Load the pretrained model:") + logger.info(f" tag = {tag}") + logger.info(f" res_path: {self.res_path}") + logger.info(f" cfg path: {self.cfg_path}") + logger.info(f" am_model path: {self.am_model}") + logger.info(f" am_params path: {self.am_params}") + + #Init body. + self.config = CfgNode(new_allowed=True) + self.config.merge_from_file(self.cfg_path) + + if self.config.spm_model_prefix: + self.config.spm_model_prefix = os.path.join( + self.res_path, self.config.spm_model_prefix) + logger.info(f"spm model path: {self.config.spm_model_prefix}") + + self.vocab = self.config.vocab_filepath + + self.text_feature = TextFeaturizer( + unit_type=self.config.unit_type, + vocab=self.config.vocab_filepath, + spm_model_prefix=self.config.spm_model_prefix) + + self.update_config() + + # AM predictor + self.init_model() + + logger.info(f"create the {model_type} model success") + return True + + +class ASREngine(BaseEngine): + """ASR model resource + + Args: + metaclass: Defaults to Singleton. + """ + + def __init__(self): + super(ASREngine, self).__init__() + logger.info("create the online asr engine resource instance") + + def init_model(self) -> bool: + if not self.executor._init_from_path( + model_type=self.config.model_type, + am_model=self.config.am_model, + am_params=self.config.am_params, + lang=self.config.lang, + sample_rate=self.config.sample_rate, + cfg_path=self.config.cfg_path, + decode_method=self.config.decode_method, + num_decoding_left_chunks=self.config.num_decoding_left_chunks, + am_predictor_conf=self.config.am_predictor_conf): + return False + return True + + def init(self, config: dict) -> bool: + """init engine resource + + Args: + config_file (str): config file + + Returns: + bool: init failed or success + """ + self.config = config + self.executor = ASRServerExecutor() + + try: + self.device = self.config.get("device", paddle.get_device()) + paddle.set_device(self.device) + except BaseException as e: + logger.error( + f"Set device failed, please check if device '{self.device}' is already used and the parameter 'device' in the yaml file" + ) + logger.error( + "If all GPU or XPU is used, you can set the server to 'cpu'") + sys.exit(-1) + + logger.info(f"paddlespeech_server set the device: {self.device}") + + if not self.init_model(): + logger.error( + "Init the ASR server occurs error, please check the server configuration yaml" + ) + return False + + logger.info("Initialize ASR server engine successfully.") + return True + + def new_handler(self): + """New handler from model. + + Returns: + PaddleASRConnectionHanddler: asr handler instance + """ + return PaddleASRConnectionHanddler(self) + + def preprocess(self, *args, **kwargs): + raise NotImplementedError("Online not using this.") + + def run(self, *args, **kwargs): + raise NotImplementedError("Online not using this.") + + def postprocess(self): + raise NotImplementedError("Online not using this.") diff --git a/paddlespeech/server/engine/asr/online/python/asr_engine.py b/paddlespeech/server/engine/asr/online/python/asr_engine.py index 9801a6fc..231137af 100644 --- a/paddlespeech/server/engine/asr/online/python/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/python/asr_engine.py @@ -125,7 +125,6 @@ class PaddleASRConnectionHanddler: self.remained_wav = None self.cached_feat = None - if "deepspeech2" in self.model_type: return @@ -698,7 +697,7 @@ class ASRServerExecutor(ASRExecutor): self.task_resource = CommonTaskResource( task='asr', model_format='dynamic', inference_mode='online') - def update_config(self)->None: + def update_config(self) -> None: if "deepspeech2" in self.model_type: with UpdateConfig(self.config): # download lm @@ -720,7 +719,7 @@ class ASRServerExecutor(ASRExecutor): self.config.decode.decoding_method = self.decode_method # update num_decoding_left_chunks if self.num_decoding_left_chunks: - assert self.num_decoding_left_chunks == -1 or self.num_decoding_left_chunks >= 0, f"num_decoding_left_chunks should be -1 or >=0" + assert self.num_decoding_left_chunks == -1 or self.num_decoding_left_chunks >= 0, "num_decoding_left_chunks should be -1 or >=0" self.config.decode.num_decoding_left_chunks = self.num_decoding_left_chunks # we only support ctc_prefix_beam_search and attention_rescoring dedoding method # Generally we set the decoding_method to attention_rescoring @@ -738,17 +737,17 @@ class ASRServerExecutor(ASRExecutor): raise Exception(f"not support: {self.model_type}") def init_model(self) -> None: - if "deepspeech2" in self.model_type : + if "deepspeech2" in self.model_type: # AM predictor logger.info("ASR engine start to init the am predictor") self.am_predictor = init_predictor( model_file=self.am_model, params_file=self.am_params, predictor_conf=self.am_predictor_conf) - elif "conformer" in self.model_type or "transformer" in self.model_type : + elif "conformer" in self.model_type or "transformer" in self.model_type: # load model # model_type: {model_name}_{dataset} - model_name = self.model_type[:self.model_type.rindex('_')] + model_name = self.model_type[:self.model_type.rindex('_')] logger.info(f"model name: {model_name}") model_class = self.task_resource.get_model_class(model_name) model = model_class.from_config(self.config) @@ -758,7 +757,6 @@ class ASRServerExecutor(ASRExecutor): else: raise Exception(f"not support: {self.model_type}") - def _init_from_path(self, model_type: str=None, am_model: Optional[os.PathLike]=None, @@ -786,7 +784,6 @@ class ASRServerExecutor(ASRExecutor): self.am_predictor_conf = am_predictor_conf logger.info(f"model_type: {self.model_type}") - sample_rate_str = '16k' if sample_rate == 16000 else '8k' tag = model_type + '-' + lang + '-' + sample_rate_str self.task_resource.set_task_model(model_tag=tag) @@ -831,7 +828,7 @@ class ASRServerExecutor(ASRExecutor): spm_model_prefix=self.config.spm_model_prefix) self.update_config() - + # AM predictor self.init_model() @@ -850,7 +847,6 @@ class ASREngine(BaseEngine): super(ASREngine, self).__init__() logger.info("create the online asr engine resource instance") - def init_model(self) -> bool: if not self.executor._init_from_path( model_type=self.config.model_type, @@ -865,7 +861,6 @@ class ASREngine(BaseEngine): return False return True - def init(self, config: dict) -> bool: """init engine resource diff --git a/paddlespeech/server/engine/engine_factory.py b/paddlespeech/server/engine/engine_factory.py index 019e4684..cfb0deb3 100644 --- a/paddlespeech/server/engine/engine_factory.py +++ b/paddlespeech/server/engine/engine_factory.py @@ -28,6 +28,9 @@ class EngineFactory(object): elif engine_name == 'asr' and engine_type == 'online': from paddlespeech.server.engine.asr.online.python.asr_engine import ASREngine return ASREngine() + elif engine_name == 'asr' and engine_type == 'online-inference': + from paddlespeech.server.engine.asr.online.paddleinference.asr_engine import ASREngine + return ASREngine() elif engine_name == 'asr' and engine_type == 'online-onnx': from paddlespeech.server.engine.asr.online.onnx.asr_engine import ASREngine return ASREngine() diff --git a/paddlespeech/server/ws/asr_api.py b/paddlespeech/server/ws/asr_api.py index 23609b41..ae1c8831 100644 --- a/paddlespeech/server/ws/asr_api.py +++ b/paddlespeech/server/ws/asr_api.py @@ -92,7 +92,7 @@ async def websocket_endpoint(websocket: WebSocket): else: resp = {"status": "ok", "message": "no valid json data"} await websocket.send_json(resp) - + elif "bytes" in message: # bytes for the pcm data message = message["bytes"] diff --git a/utils/zh_tn.py b/utils/zh_tn.py index 73bb8af2..6fee626b 100755 --- a/utils/zh_tn.py +++ b/utils/zh_tn.py @@ -747,7 +747,7 @@ def num2chn(number_string, previous_symbol, (CNU, type(None))): if next_symbol.power != 1 and ( (previous_symbol is None) or - (previous_symbol.power != 1)): + (previous_symbol.power != 1)): # noqa: E129 result_symbols[i] = liang # if big is True, '两' will not be used and `alt_two` has no impact on output From 3cee7db021b971d5221324cbaa17638f1b4bb9f1 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Wed, 15 Jun 2022 10:20:44 +0000 Subject: [PATCH 05/30] onnx ds2 straming asr --- .../conf/ws_ds2_application.yaml | 43 +- paddlespeech/resource/pretrained_models.py | 16 + .../server/conf/ws_ds2_application.yaml | 49 +- .../engine/asr/online/onnx/asr_engine.py | 520 ++++++++++++++++++ .../asr/online/paddleinference/asr_engine.py | 1 - .../engine/asr/online/python/asr_engine.py | 1 - paddlespeech/server/engine/engine_factory.py | 3 +- paddlespeech/server/utils/onnx_infer.py | 26 +- 8 files changed, 638 insertions(+), 21 deletions(-) create mode 100644 paddlespeech/server/engine/asr/online/onnx/asr_engine.py diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index 4f75c07b..f0a98e72 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -7,11 +7,11 @@ host: 0.0.0.0 port: 8090 # The task format in the engin_list is: _ -# task choices = ['asr_online'] +# task choices = ['asr_online-inference', 'asr_online-onnx'] # protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online-inference'] +engine_list: ['asr_online-onnx'] ################################################################################# @@ -19,10 +19,10 @@ engine_list: ['asr_online-inference'] ################################################################################# ################################### ASR ######################################### -################### speech task: asr; engine_type: online ####################### +################### speech task: asr; engine_type: online-inference ####################### asr_online-inference: model_type: 'deepspeech2online_aishell' - am_model: # the pdmodel file of am static model [optional] + am_model: # the pdmodel file of am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' sample_rate: 16000 @@ -47,3 +47,38 @@ asr_online-inference: shift_n: 4 # frame window_ms: 20 # ms shift_ms: 10 # ms + + + +################################### ASR ######################################### +################### speech task: asr; engine_type: online-onnx ####################### +asr_online-onnx: + model_type: 'deepspeech2online_aishell' + am_model: # the pdmodel file of am static model [optional] + am_params: # the pdiparams file of am static model [optional] + lang: 'zh' + sample_rate: 16000 + cfg_path: + decode_method: + num_decoding_left_chunks: + force_yes: True + device: 'cpu' # cpu or gpu:id + + # https://onnxruntime.ai/docs/api/python/api_summary.html#inferencesession + am_predictor_conf: + device: 'cpu' # set 'gpu:id' or 'cpu' + graph_optimization_level: 0 + intra_op_num_threads: 0 # Sets the number of threads used to parallelize the execution within nodes. + inter_op_num_threads: 0 # Sets the number of threads used to parallelize the execution of the graph (across nodes). + log_severity_level: 2 # Log severity level. Applies to session load, initialization, etc. 0:Verbose, 1:Info, 2:Warning. 3:Error, 4:Fatal. Default is 2. + log_verbosity_level: 0 # VLOG level if DEBUG build and session_log_severity_level is 0. Applies to session load, initialization, etc. Default is 0. + + chunk_buffer_conf: + frame_duration_ms: 80 + shift_ms: 40 + sample_rate: 16000 + sample_width: 2 + window_n: 7 # frame + shift_n: 4 # frame + window_ms: 20 # ms + shift_ms: 10 # ms diff --git a/paddlespeech/resource/pretrained_models.py b/paddlespeech/resource/pretrained_models.py index eb6ca0cc..ba4a79d9 100644 --- a/paddlespeech/resource/pretrained_models.py +++ b/paddlespeech/resource/pretrained_models.py @@ -15,6 +15,7 @@ __all__ = [ 'asr_dynamic_pretrained_models', 'asr_static_pretrained_models', + 'asr_onnx_pretrained_models', 'cls_dynamic_pretrained_models', 'cls_static_pretrained_models', 'st_dynamic_pretrained_models', @@ -246,6 +247,21 @@ asr_static_pretrained_models = { }, } + +asr_onnx_pretrained_models = { + "deepspeech2online_wenetspeech-zh-16k": { + '1.0': { + 'url': + 'https://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.2.model.tar.gz', + 'md5': 'b0c77e7f8881e0a27b82127d1abb8d5f', + 'cfg_path':'model.yaml', + 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_10', + 'lm_url': 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': '29e02312deb2e59b3c8686c7966d4fe3' + }, + }, +} + # --------------------------------- # -------------- CLS -------------- # --------------------------------- diff --git a/paddlespeech/server/conf/ws_ds2_application.yaml b/paddlespeech/server/conf/ws_ds2_application.yaml index fb16e5bd..f0a98e72 100644 --- a/paddlespeech/server/conf/ws_ds2_application.yaml +++ b/paddlespeech/server/conf/ws_ds2_application.yaml @@ -7,11 +7,11 @@ host: 0.0.0.0 port: 8090 # The task format in the engin_list is: _ -# task choices = ['asr_online', 'tts_online'] -# protocol = ['websocket', 'http'] (only one can be selected). +# task choices = ['asr_online-inference', 'asr_online-onnx'] +# protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online-inference'] +engine_list: ['asr_online-onnx'] ################################################################################# @@ -19,18 +19,18 @@ engine_list: ['asr_online-inference'] ################################################################################# ################################### ASR ######################################### -################### speech task: asr; engine_type: online ####################### +################### speech task: asr; engine_type: online-inference ####################### asr_online-inference: model_type: 'deepspeech2online_aishell' - am_model: # the pdmodel file of am static model [optional] + am_model: # the pdmodel file of am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' sample_rate: 16000 cfg_path: decode_method: - num_decoding_left_chunks: + num_decoding_left_chunks: force_yes: True - device: # cpu or gpu:id + device: 'cpu' # cpu or gpu:id am_predictor_conf: device: # set 'gpu:id' or 'cpu' @@ -47,3 +47,38 @@ asr_online-inference: shift_n: 4 # frame window_ms: 20 # ms shift_ms: 10 # ms + + + +################################### ASR ######################################### +################### speech task: asr; engine_type: online-onnx ####################### +asr_online-onnx: + model_type: 'deepspeech2online_aishell' + am_model: # the pdmodel file of am static model [optional] + am_params: # the pdiparams file of am static model [optional] + lang: 'zh' + sample_rate: 16000 + cfg_path: + decode_method: + num_decoding_left_chunks: + force_yes: True + device: 'cpu' # cpu or gpu:id + + # https://onnxruntime.ai/docs/api/python/api_summary.html#inferencesession + am_predictor_conf: + device: 'cpu' # set 'gpu:id' or 'cpu' + graph_optimization_level: 0 + intra_op_num_threads: 0 # Sets the number of threads used to parallelize the execution within nodes. + inter_op_num_threads: 0 # Sets the number of threads used to parallelize the execution of the graph (across nodes). + log_severity_level: 2 # Log severity level. Applies to session load, initialization, etc. 0:Verbose, 1:Info, 2:Warning. 3:Error, 4:Fatal. Default is 2. + log_verbosity_level: 0 # VLOG level if DEBUG build and session_log_severity_level is 0. Applies to session load, initialization, etc. Default is 0. + + chunk_buffer_conf: + frame_duration_ms: 80 + shift_ms: 40 + sample_rate: 16000 + sample_width: 2 + window_n: 7 # frame + shift_n: 4 # frame + window_ms: 20 # ms + shift_ms: 10 # ms diff --git a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py new file mode 100644 index 00000000..0bd2f950 --- /dev/null +++ b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py @@ -0,0 +1,520 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import sys +from typing import ByteString +from typing import Optional + +import numpy as np +import paddle +from numpy import float32 +from yacs.config import CfgNode + +from paddlespeech.cli.asr.infer import ASRExecutor +from paddlespeech.cli.log import logger +from paddlespeech.cli.utils import MODEL_HOME +from paddlespeech.resource import CommonTaskResource +from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer +from paddlespeech.s2t.modules.ctc import CTCDecoder +from paddlespeech.s2t.transform.transformation import Transformation +from paddlespeech.s2t.utils.utility import UpdateConfig +from paddlespeech.server.engine.base_engine import BaseEngine +from paddlespeech.server.utils import onnx_infer + +__all__ = ['PaddleASRConnectionHanddler', 'ASRServerExecutor', 'ASREngine'] + + +# ASR server connection process class +class PaddleASRConnectionHanddler: + def __init__(self, asr_engine): + """Init a Paddle ASR Connection Handler instance + + Args: + asr_engine (ASREngine): the global asr engine + """ + super().__init__() + logger.info( + "create an paddle asr connection handler to process the websocket connection" + ) + self.config = asr_engine.config # server config + self.model_config = asr_engine.executor.config + self.asr_engine = asr_engine + + # model_type, sample_rate and text_feature is shared for deepspeech2 and conformer + self.model_type = self.asr_engine.executor.model_type + self.sample_rate = self.asr_engine.executor.sample_rate + # tokens to text + self.text_feature = self.asr_engine.executor.text_feature + + # extract feat, new only fbank in conformer model + self.preprocess_conf = self.model_config.preprocess_config + self.preprocess_args = {"train": False} + self.preprocessing = Transformation(self.preprocess_conf) + + # frame window and frame shift, in samples unit + self.win_length = self.preprocess_conf.process[0]['win_length'] + self.n_shift = self.preprocess_conf.process[0]['n_shift'] + + assert self.preprocess_conf.process[0]['fs'] == self.sample_rate, ( + self.sample_rate, self.preprocess_conf.process[0]['fs']) + self.frame_shift_in_ms = int( + self.n_shift / self.preprocess_conf.process[0]['fs'] * 1000) + + self.continuous_decoding = self.config.get("continuous_decoding", False) + self.init_decoder() + self.reset() + + def init_decoder(self): + if "deepspeech2" in self.model_type: + assert self.continuous_decoding is False, "ds2 model not support endpoint" + self.am_predictor = self.asr_engine.executor.am_predictor + + self.decoder = CTCDecoder( + odim=self.model_config.output_dim, # is in vocab + enc_n_units=self.model_config.rnn_layer_size * 2, + blank_id=self.model_config.blank_id, + dropout_rate=0.0, + reduction=True, # sum + batch_average=True, # sum / batch_size + grad_norm_type=self.model_config.get('ctc_grad_norm_type', + None)) + + cfg = self.model_config.decode + decode_batch_size = 1 # for online + self.decoder.init_decoder( + decode_batch_size, self.text_feature.vocab_list, + cfg.decoding_method, cfg.lang_model_path, cfg.alpha, cfg.beta, + cfg.beam_size, cfg.cutoff_prob, cfg.cutoff_top_n, + cfg.num_proc_bsearch) + else: + raise ValueError(f"Not supported: {self.model_type}") + + def model_reset(self): + # cache for audio and feat + self.remained_wav = None + self.cached_feat = None + + def output_reset(self): + ## outputs + # partial/ending decoding results + self.result_transcripts = [''] + + def reset_continuous_decoding(self): + """ + when in continous decoding, reset for next utterance. + """ + self.global_frame_offset = self.num_frames + self.model_reset() + + def reset(self): + if "deepspeech2" in self.model_type: + # for deepspeech2 + # init state + self.chunk_state_h_box = np.zeros( + (self.model_config.num_rnn_layers, 1, + self.model_config.rnn_layer_size), + dtype=float32) + self.chunk_state_c_box = np.zeros( + (self.model_config.num_rnn_layers, 1, + self.model_config.rnn_layer_size), + dtype=float32) + self.decoder.reset_decoder(batch_size=1) + else: + raise NotImplementedError(f"{self.model_type} not support.") + + self.device = None + + ## common + # global sample and frame step + self.num_samples = 0 + self.global_frame_offset = 0 + # frame step of cur utterance + self.num_frames = 0 + + ## endpoint + self.endpoint_state = False # True for detect endpoint + + ## conformer + self.model_reset() + + ## outputs + self.output_reset() + + def extract_feat(self, samples: ByteString): + logger.info("Online ASR extract the feat") + samples = np.frombuffer(samples, dtype=np.int16) + assert samples.ndim == 1 + + self.num_samples += samples.shape[0] + logger.info( + f"This package receive {samples.shape[0]} pcm data. Global samples:{self.num_samples}" + ) + + # self.reamined_wav stores all the samples, + # include the original remained_wav and this package samples + if self.remained_wav is None: + self.remained_wav = samples + else: + assert self.remained_wav.ndim == 1 # (T,) + self.remained_wav = np.concatenate([self.remained_wav, samples]) + logger.info( + f"The concatenation of remain and now audio samples length is: {self.remained_wav.shape}" + ) + + if len(self.remained_wav) < self.win_length: + # samples not enough for feature window + return 0 + + # fbank + x_chunk = self.preprocessing(self.remained_wav, **self.preprocess_args) + x_chunk = paddle.to_tensor(x_chunk, dtype="float32").unsqueeze(axis=0) + + # feature cache + if self.cached_feat is None: + self.cached_feat = x_chunk + else: + assert (len(x_chunk.shape) == 3) # (B,T,D) + assert (len(self.cached_feat.shape) == 3) # (B,T,D) + self.cached_feat = paddle.concat( + [self.cached_feat, x_chunk], axis=1) + + # set the feat device + if self.device is None: + self.device = self.cached_feat.place + + # cur frame step + num_frames = x_chunk.shape[1] + + # global frame step + self.num_frames += num_frames + + # update remained wav + self.remained_wav = self.remained_wav[self.n_shift * num_frames:] + + logger.info( + f"process the audio feature success, the cached feat shape: {self.cached_feat.shape}" + ) + logger.info( + f"After extract feat, the cached remain the audio samples: {self.remained_wav.shape}" + ) + logger.info(f"global samples: {self.num_samples}") + logger.info(f"global frames: {self.num_frames}") + + def decode(self, is_finished=False): + """advance decoding + + Args: + is_finished (bool, optional): Is last frame or not. Defaults to False. + + Returns: + None: + """ + if "deepspeech2" in self.model_type: + decoding_chunk_size = 1 # decoding chunk size = 1. int decoding frame unit + + context = 7 # context=7, in audio frame unit + subsampling = 4 # subsampling=4, in audio frame unit + + cached_feature_num = context - subsampling + # decoding window for model, in audio frame unit + decoding_window = (decoding_chunk_size - 1) * subsampling + context + # decoding stride for model, in audio frame unit + stride = subsampling * decoding_chunk_size + + if self.cached_feat is None: + logger.info("no audio feat, please input more pcm data") + return + + num_frames = self.cached_feat.shape[1] + logger.info( + f"Required decoding window {decoding_window} frames, and the connection has {num_frames} frames" + ) + + # the cached feat must be larger decoding_window + if num_frames < decoding_window and not is_finished: + logger.info( + f"frame feat num is less than {decoding_window}, please input more pcm data" + ) + return None, None + + # if is_finished=True, we need at least context frames + if num_frames < context: + logger.info( + "flast {num_frames} is less than context {context} frames, and we cannot do model forward" + ) + return None, None + + logger.info("start to do model forward") + # num_frames - context + 1 ensure that current frame can get context window + if is_finished: + # if get the finished chunk, we need process the last context + left_frames = context + else: + # we only process decoding_window frames for one chunk + left_frames = decoding_window + + end = None + for cur in range(0, num_frames - left_frames + 1, stride): + end = min(cur + decoding_window, num_frames) + + # extract the audio + x_chunk = self.cached_feat[:, cur:end, :].numpy() + x_chunk_lens = np.array([x_chunk.shape[1]]) + + trans_best = self.decode_one_chunk(x_chunk, x_chunk_lens) + + self.result_transcripts = [trans_best] + + # update feat cache + self.cached_feat = self.cached_feat[:, end - cached_feature_num:, :] + + # return trans_best[0] + else: + raise Exception(f"{self.model_type} not support paddleinference.") + + @paddle.no_grad() + def decode_one_chunk(self, x_chunk, x_chunk_lens): + """forward one chunk frames + + Args: + x_chunk (np.ndarray): (B,T,D), audio frames. + x_chunk_lens ([type]): (B,), audio frame lens + + Returns: + logprob: poster probability. + """ + logger.info("start to decoce one chunk for deepspeech2") + # state_c, state_h, audio_lens, audio + # 'chunk_state_c_box', 'chunk_state_h_box', 'audio_chunk_lens', 'audio_chunk' + input_names = [n.name for n in self.am_predictor.get_inputs()] + logger.info(f"ort inputs: {input_names}") + # 'softmax_0.tmp_0', 'tmp_5', 'concat_0.tmp_0', 'concat_1.tmp_0' + # audio, audio_lens, state_h, state_c + output_names = [n.name for n in self.am_predictor.get_outputs()] + logger.info(f"ort outpus: {output_names}") + assert (len(input_names) == len(output_names)) + assert isinstance(input_names[0], str) + + input_datas = [self.chunk_state_c_box, self.chunk_state_h_box, x_chunk_lens, x_chunk] + feeds = dict(zip(input_names, input_datas)) + + outputs = self.am_predictor.run( + [*output_names], + {**feeds}) + + output_chunk_probs, output_chunk_lens, self.chunk_state_h_box, self.chunk_state_c_box = outputs + self.decoder.next(output_chunk_probs, output_chunk_lens) + trans_best, trans_beam = self.decoder.decode() + logger.info(f"decode one best result for deepspeech2: {trans_best[0]}") + return trans_best[0] + + def get_result(self): + """return partial/ending asr result. + + Returns: + str: one best result of partial/ending. + """ + if len(self.result_transcripts) > 0: + return self.result_transcripts[0] + else: + return '' + + +class ASRServerExecutor(ASRExecutor): + def __init__(self): + super().__init__() + self.task_resource = CommonTaskResource( + task='asr', model_format='static', inference_mode='online') + + def update_config(self) -> None: + if "deepspeech2" in self.model_type: + with UpdateConfig(self.config): + # download lm + self.config.decode.lang_model_path = os.path.join( + MODEL_HOME, 'language_model', + self.config.decode.lang_model_path) + + lm_url = self.task_resource.res_dict['lm_url'] + lm_md5 = self.task_resource.res_dict['lm_md5'] + logger.info(f"Start to load language model {lm_url}") + self.download_lm( + lm_url, + os.path.dirname(self.config.decode.lang_model_path), lm_md5) + else: + raise NotImplementedError( + f"{self.model_type} not support paddleinference.") + + def init_model(self) -> None: + + if "deepspeech2" in self.model_type: + # AM predictor + logger.info("ASR engine start to init the am predictor") + self.am_predictor = onnx_infer.get_sess( + model_path=self.am_model, sess_conf=self.am_predictor_conf) + else: + raise NotImplementedError( + f"{self.model_type} not support paddleinference.") + + def _init_from_path(self, + model_type: str=None, + am_model: Optional[os.PathLike]=None, + am_params: Optional[os.PathLike]=None, + lang: str='zh', + sample_rate: int=16000, + cfg_path: Optional[os.PathLike]=None, + decode_method: str='attention_rescoring', + num_decoding_left_chunks: int=-1, + am_predictor_conf: dict=None): + """ + Init model and other resources from a specific path. + """ + if not model_type or not lang or not sample_rate: + logger.error( + "The model type or lang or sample rate is None, please input an valid server parameter yaml" + ) + return False + assert am_params is None, "am_params not used in onnx engine" + + self.model_type = model_type + self.sample_rate = sample_rate + self.decode_method = decode_method + self.num_decoding_left_chunks = num_decoding_left_chunks + # conf for paddleinference predictor or onnx + self.am_predictor_conf = am_predictor_conf + logger.info(f"model_type: {self.model_type}") + + sample_rate_str = '16k' if sample_rate == 16000 else '8k' + tag = model_type + '-' + lang + '-' + sample_rate_str + self.task_resource.set_task_model(model_tag=tag) + + if cfg_path is None: + self.res_path = self.task_resource.res_dir + self.cfg_path = os.path.join( + self.res_path, self.task_resource.res_dict['cfg_path']) + else: + self.cfg_path = os.path.abspath(cfg_path) + self.res_path = os.path.dirname( + os.path.dirname(os.path.abspath(self.cfg_path))) + + self.am_model = os.path.join(self.res_path, + self.task_resource.res_dict['model']) if am_model is None else os.path.abspath(am_model) + self.am_params = os.path.join(self.res_path, + self.task_resource.res_dict['params']) if am_params is None else os.path.abspath(am_params) + + logger.info("Load the pretrained model:") + logger.info(f" tag = {tag}") + logger.info(f" res_path: {self.res_path}") + logger.info(f" cfg path: {self.cfg_path}") + logger.info(f" am_model path: {self.am_model}") + logger.info(f" am_params path: {self.am_params}") + + #Init body. + self.config = CfgNode(new_allowed=True) + self.config.merge_from_file(self.cfg_path) + + if self.config.spm_model_prefix: + self.config.spm_model_prefix = os.path.join( + self.res_path, self.config.spm_model_prefix) + logger.info(f"spm model path: {self.config.spm_model_prefix}") + + self.vocab = self.config.vocab_filepath + + self.text_feature = TextFeaturizer( + unit_type=self.config.unit_type, + vocab=self.config.vocab_filepath, + spm_model_prefix=self.config.spm_model_prefix) + + self.update_config() + + # AM predictor + self.init_model() + + logger.info(f"create the {model_type} model success") + return True + + +class ASREngine(BaseEngine): + """ASR model resource + + Args: + metaclass: Defaults to Singleton. + """ + + def __init__(self): + super(ASREngine, self).__init__() + + def init_model(self) -> bool: + if not self.executor._init_from_path( + model_type=self.config.model_type, + am_model=self.config.am_model, + am_params=self.config.am_params, + lang=self.config.lang, + sample_rate=self.config.sample_rate, + cfg_path=self.config.cfg_path, + decode_method=self.config.decode_method, + num_decoding_left_chunks=self.config.num_decoding_left_chunks, + am_predictor_conf=self.config.am_predictor_conf): + return False + return True + + def init(self, config: dict) -> bool: + """init engine resource + + Args: + config_file (str): config file + + Returns: + bool: init failed or success + """ + self.config = config + self.executor = ASRServerExecutor() + + try: + self.device = self.config.get("device", paddle.get_device()) + paddle.set_device(self.device) + except BaseException as e: + logger.error( + f"Set device failed, please check if device '{self.device}' is already used and the parameter 'device' in the yaml file" + ) + logger.error( + "If all GPU or XPU is used, you can set the server to 'cpu'") + sys.exit(-1) + + logger.info(f"paddlespeech_server set the device: {self.device}") + + if not self.init_model(): + logger.error( + "Init the ASR server occurs error, please check the server configuration yaml" + ) + return False + + logger.info("Initialize ASR server engine successfully.") + return True + + def new_handler(self): + """New handler from model. + + Returns: + PaddleASRConnectionHanddler: asr handler instance + """ + return PaddleASRConnectionHanddler(self) + + def preprocess(self, *args, **kwargs): + raise NotImplementedError("Online not using this.") + + def run(self, *args, **kwargs): + raise NotImplementedError("Online not using this.") + + def postprocess(self): + raise NotImplementedError("Online not using this.") diff --git a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py index 93edd701..fb24cab9 100644 --- a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py @@ -471,7 +471,6 @@ class ASREngine(BaseEngine): def __init__(self): super(ASREngine, self).__init__() - logger.info("create the online asr engine resource instance") def init_model(self) -> bool: if not self.executor._init_from_path( diff --git a/paddlespeech/server/engine/asr/online/python/asr_engine.py b/paddlespeech/server/engine/asr/online/python/asr_engine.py index 231137af..c22cbbe5 100644 --- a/paddlespeech/server/engine/asr/online/python/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/python/asr_engine.py @@ -845,7 +845,6 @@ class ASREngine(BaseEngine): def __init__(self): super(ASREngine, self).__init__() - logger.info("create the online asr engine resource instance") def init_model(self) -> bool: if not self.executor._init_from_path( diff --git a/paddlespeech/server/engine/engine_factory.py b/paddlespeech/server/engine/engine_factory.py index cfb0deb3..3c1c3d53 100644 --- a/paddlespeech/server/engine/engine_factory.py +++ b/paddlespeech/server/engine/engine_factory.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Text +from ..utils.log import logger __all__ = ['EngineFactory'] - class EngineFactory(object): @staticmethod def get_engine(engine_name: Text, engine_type: Text): + logger.info(f"{engine_name} : {engine_type} engine.") if engine_name == 'asr' and engine_type == 'inference': from paddlespeech.server.engine.asr.paddleinference.asr_engine import ASREngine return ASREngine() diff --git a/paddlespeech/server/utils/onnx_infer.py b/paddlespeech/server/utils/onnx_infer.py index ac11c534..4287477f 100644 --- a/paddlespeech/server/utils/onnx_infer.py +++ b/paddlespeech/server/utils/onnx_infer.py @@ -16,21 +16,33 @@ from typing import Optional import onnxruntime as ort +from .log import logger + def get_sess(model_path: Optional[os.PathLike]=None, sess_conf: dict=None): + logger.info(f"ort sessconf: {sess_conf}") sess_options = ort.SessionOptions() sess_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL + if sess_conf.get('graph_optimization_level', 99) == 0: + sess_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_DISABLE_ALL sess_options.execution_mode = ort.ExecutionMode.ORT_SEQUENTIAL - if "gpu" in sess_conf["device"]: + # "gpu:0" + providers = ['CPUExecutionProvider'] + if "gpu" in sess_conf.get("device", ""): + providers = ['CUDAExecutionProvider'] # fastspeech2/mb_melgan can't use trt now! - if sess_conf["use_trt"]: + if sess_conf.get("use_trt", 0): providers = ['TensorrtExecutionProvider'] - else: - providers = ['CUDAExecutionProvider'] - elif sess_conf["device"] == "cpu": - providers = ['CPUExecutionProvider'] - sess_options.intra_op_num_threads = sess_conf["cpu_threads"] + logger.info(f"ort providers: {providers}") + + if 'cpu_threads' in sess_conf: + sess_options.intra_op_num_threads = sess_conf.get("cpu_threads", 0) + else: + sess_options.intra_op_num_threads = sess_conf.get("intra_op_num_threads", 0) + + sess_options.inter_op_num_threads = sess_conf.get("inter_op_num_threads", 0) + sess = ort.InferenceSession( model_path, providers=providers, sess_options=sess_options) return sess From 42d28b961ca16adf3f0e7280bd6f16d8fd11c8f3 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Thu, 16 Jun 2022 05:00:30 +0000 Subject: [PATCH 06/30] fix pretrian model error --- paddlespeech/resource/pretrained_models.py | 73 ++++++++++++++-------- 1 file changed, 48 insertions(+), 25 deletions(-) diff --git a/paddlespeech/resource/pretrained_models.py b/paddlespeech/resource/pretrained_models.py index 196edd50..f1371347 100644 --- a/paddlespeech/resource/pretrained_models.py +++ b/paddlespeech/resource/pretrained_models.py @@ -170,14 +170,22 @@ asr_dynamic_pretrained_models = { '1.0.2': { 'url': 'http://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.2.model.tar.gz', - 'md5': '4dd42cfce9aaa54db0ec698da6c48ec5', - 'cfg_path': 'model.yaml', - 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_1', - 'model':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', - 'params':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', - 'onnx_model': 'onnx/model.onnx' - 'lm_url':'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', - 'lm_md5':'29e02312deb2e59b3c8686c7966d4fe3' + 'md5': + '4dd42cfce9aaa54db0ec698da6c48ec5', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' }, }, "deepspeech2offline_librispeech-en-16k": { @@ -241,32 +249,47 @@ asr_static_pretrained_models = { '1.0.2': { 'url': 'http://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.2.model.tar.gz', - 'md5': '4dd42cfce9aaa54db0ec698da6c48ec5', - 'cfg_path': 'model.yaml', - 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_1', - 'model':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', - 'params':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', - 'onnx_model': 'onnx/model.onnx' - 'lm_url':'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', - 'lm_md5':'29e02312deb2e59b3c8686c7966d4fe3' + 'md5': + '4dd42cfce9aaa54db0ec698da6c48ec5', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' }, }, } - asr_onnx_pretrained_models = { "deepspeech2online_aishell-zh-16k": { '1.0.2': { 'url': 'http://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.2.model.tar.gz', - 'md5': '4dd42cfce9aaa54db0ec698da6c48ec5', - 'cfg_path': 'model.yaml', - 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_1', - 'model':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', - 'params':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', - 'onnx_model': 'onnx/model.onnx' - 'lm_url':'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', - 'lm_md5':'29e02312deb2e59b3c8686c7966d4fe3' + 'md5': + '4dd42cfce9aaa54db0ec698da6c48ec5', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' }, }, } From 9106daa2a3b6cce4017fd4b268461b33d2418b18 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Thu, 16 Jun 2022 05:01:08 +0000 Subject: [PATCH 07/30] code format --- .../conf/ws_ds2_application.yaml | 2 +- paddlespeech/resource/resource.py | 4 +++- .../engine/asr/online/onnx/asr_engine.py | 20 ++++++++++--------- paddlespeech/server/engine/engine_factory.py | 3 +++ paddlespeech/server/utils/onnx_infer.py | 9 +++++---- .../examples/ds2_ol/onnx/local/infer_check.py | 4 ++-- 6 files changed, 25 insertions(+), 17 deletions(-) diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index f0a98e72..f67d3157 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -11,7 +11,7 @@ port: 8090 # protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online-onnx'] +engine_list: ['asr_online-inference'] ################################################################################# diff --git a/paddlespeech/resource/resource.py b/paddlespeech/resource/resource.py index 369dba90..2e637f0f 100644 --- a/paddlespeech/resource/resource.py +++ b/paddlespeech/resource/resource.py @@ -164,9 +164,11 @@ class CommonTaskResource: try: import_models = '{}_{}_pretrained_models'.format(self.task, self.model_format) + print(f"from .pretrained_models import {import_models}") exec('from .pretrained_models import {}'.format(import_models)) models = OrderedDict(locals()[import_models]) - except ImportError: + except Exception as e: + print(e) models = OrderedDict({}) # no models. finally: return models diff --git a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py index 0bd2f950..97addc7a 100644 --- a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py @@ -306,12 +306,13 @@ class PaddleASRConnectionHanddler: assert (len(input_names) == len(output_names)) assert isinstance(input_names[0], str) - input_datas = [self.chunk_state_c_box, self.chunk_state_h_box, x_chunk_lens, x_chunk] + input_datas = [ + self.chunk_state_c_box, self.chunk_state_h_box, x_chunk_lens, + x_chunk + ] feeds = dict(zip(input_names, input_datas)) - outputs = self.am_predictor.run( - [*output_names], - {**feeds}) + outputs = self.am_predictor.run([*output_names], {**feeds}) output_chunk_probs, output_chunk_lens, self.chunk_state_h_box, self.chunk_state_c_box = outputs self.decoder.next(output_chunk_probs, output_chunk_lens) @@ -335,7 +336,7 @@ class ASRServerExecutor(ASRExecutor): def __init__(self): super().__init__() self.task_resource = CommonTaskResource( - task='asr', model_format='static', inference_mode='online') + task='asr', model_format='onnx', inference_mode='online') def update_config(self) -> None: if "deepspeech2" in self.model_type: @@ -407,10 +408,11 @@ class ASRServerExecutor(ASRExecutor): self.res_path = os.path.dirname( os.path.dirname(os.path.abspath(self.cfg_path))) - self.am_model = os.path.join(self.res_path, - self.task_resource.res_dict['model']) if am_model is None else os.path.abspath(am_model) - self.am_params = os.path.join(self.res_path, - self.task_resource.res_dict['params']) if am_params is None else os.path.abspath(am_params) + self.am_model = os.path.join(self.res_path, self.task_resource.res_dict[ + 'model']) if am_model is None else os.path.abspath(am_model) + self.am_params = os.path.join( + self.res_path, self.task_resource.res_dict[ + 'params']) if am_params is None else os.path.abspath(am_params) logger.info("Load the pretrained model:") logger.info(f" tag = {tag}") diff --git a/paddlespeech/server/engine/engine_factory.py b/paddlespeech/server/engine/engine_factory.py index 3c1c3d53..6a66a002 100644 --- a/paddlespeech/server/engine/engine_factory.py +++ b/paddlespeech/server/engine/engine_factory.py @@ -12,14 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Text + from ..utils.log import logger __all__ = ['EngineFactory'] + class EngineFactory(object): @staticmethod def get_engine(engine_name: Text, engine_type: Text): logger.info(f"{engine_name} : {engine_type} engine.") + if engine_name == 'asr' and engine_type == 'inference': from paddlespeech.server.engine.asr.paddleinference.asr_engine import ASREngine return ASREngine() diff --git a/paddlespeech/server/utils/onnx_infer.py b/paddlespeech/server/utils/onnx_infer.py index 4287477f..1c9d878f 100644 --- a/paddlespeech/server/utils/onnx_infer.py +++ b/paddlespeech/server/utils/onnx_infer.py @@ -35,14 +35,15 @@ def get_sess(model_path: Optional[os.PathLike]=None, sess_conf: dict=None): if sess_conf.get("use_trt", 0): providers = ['TensorrtExecutionProvider'] logger.info(f"ort providers: {providers}") - + if 'cpu_threads' in sess_conf: - sess_options.intra_op_num_threads = sess_conf.get("cpu_threads", 0) + sess_options.intra_op_num_threads = sess_conf.get("cpu_threads", 0) else: - sess_options.intra_op_num_threads = sess_conf.get("intra_op_num_threads", 0) + sess_options.intra_op_num_threads = sess_conf.get( + "intra_op_num_threads", 0) sess_options.inter_op_num_threads = sess_conf.get("inter_op_num_threads", 0) - + sess = ort.InferenceSession( model_path, providers=providers, sess_options=sess_options) return sess diff --git a/speechx/examples/ds2_ol/onnx/local/infer_check.py b/speechx/examples/ds2_ol/onnx/local/infer_check.py index a5ec7ce3..f821baa1 100755 --- a/speechx/examples/ds2_ol/onnx/local/infer_check.py +++ b/speechx/examples/ds2_ol/onnx/local/infer_check.py @@ -27,7 +27,8 @@ def parse_args(): '--input_file', type=str, default="static_ds2online_inputs.pickle", - help="aishell ds2 input data file. For wenetspeech, we only feed for infer model", ) + help="aishell ds2 input data file. For wenetspeech, we only feed for infer model", + ) parser.add_argument( '--model_type', type=str, @@ -57,7 +58,6 @@ if __name__ == '__main__': iodict = pickle.load(f) print(iodict.keys()) - audio_chunk = iodict['audio_chunk'] audio_chunk_lens = iodict['audio_chunk_lens'] chunk_state_h_box = iodict['chunk_state_h_box'] From 5e03d753acb7e63a37dd34e0647a12c782b1cb13 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Thu, 16 Jun 2022 06:49:19 +0000 Subject: [PATCH 08/30] add ds2 steaming asr onnx --- .../conf/ws_ds2_application.yaml | 6 +++--- paddlespeech/resource/resource.py | 1 - .../server/conf/ws_ds2_application.yaml | 10 +++++----- .../engine/asr/online/onnx/asr_engine.py | 18 +++++++++++++----- .../asr/online/paddleinference/asr_engine.py | 6 ++++++ 5 files changed, 27 insertions(+), 14 deletions(-) diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index f67d3157..a4e6e9a1 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -22,8 +22,8 @@ engine_list: ['asr_online-inference'] ################### speech task: asr; engine_type: online-inference ####################### asr_online-inference: model_type: 'deepspeech2online_aishell' - am_model: # the pdmodel file of am static model [optional] - am_params: # the pdiparams file of am static model [optional] + am_model: # the pdmodel file of am static model [optional] + am_params: # the pdiparams file of am static model [optional] lang: 'zh' sample_rate: 16000 cfg_path: @@ -54,7 +54,7 @@ asr_online-inference: ################### speech task: asr; engine_type: online-onnx ####################### asr_online-onnx: model_type: 'deepspeech2online_aishell' - am_model: # the pdmodel file of am static model [optional] + am_model: # the pdmodel file of onnx am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' sample_rate: 16000 diff --git a/paddlespeech/resource/resource.py b/paddlespeech/resource/resource.py index 2e637f0f..15112ba7 100644 --- a/paddlespeech/resource/resource.py +++ b/paddlespeech/resource/resource.py @@ -168,7 +168,6 @@ class CommonTaskResource: exec('from .pretrained_models import {}'.format(import_models)) models = OrderedDict(locals()[import_models]) except Exception as e: - print(e) models = OrderedDict({}) # no models. finally: return models diff --git a/paddlespeech/server/conf/ws_ds2_application.yaml b/paddlespeech/server/conf/ws_ds2_application.yaml index f0a98e72..430e6fd1 100644 --- a/paddlespeech/server/conf/ws_ds2_application.yaml +++ b/paddlespeech/server/conf/ws_ds2_application.yaml @@ -11,7 +11,7 @@ port: 8090 # protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online-onnx'] +engine_list: ['asr_online-inference'] ################################################################################# @@ -22,8 +22,8 @@ engine_list: ['asr_online-onnx'] ################### speech task: asr; engine_type: online-inference ####################### asr_online-inference: model_type: 'deepspeech2online_aishell' - am_model: # the pdmodel file of am static model [optional] - am_params: # the pdiparams file of am static model [optional] + am_model: # the pdmodel file of am static model [optional] + am_params: # the pdiparams file of am static model [optional] lang: 'zh' sample_rate: 16000 cfg_path: @@ -54,7 +54,7 @@ asr_online-inference: ################### speech task: asr; engine_type: online-onnx ####################### asr_online-onnx: model_type: 'deepspeech2online_aishell' - am_model: # the pdmodel file of am static model [optional] + am_model: # the pdmodel file of onnx am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' sample_rate: 16000 @@ -81,4 +81,4 @@ asr_online-onnx: window_n: 7 # frame shift_n: 4 # frame window_ms: 20 # ms - shift_ms: 10 # ms + shift_ms: 10 # ms \ No newline at end of file diff --git a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py index 97addc7a..aab29f78 100644 --- a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py @@ -331,6 +331,13 @@ class PaddleASRConnectionHanddler: else: return '' + def get_word_time_stamp(self): + return [] + + @paddle.no_grad() + def rescoring(self): + ... + class ASRServerExecutor(ASRExecutor): def __init__(self): @@ -409,17 +416,18 @@ class ASRServerExecutor(ASRExecutor): os.path.dirname(os.path.abspath(self.cfg_path))) self.am_model = os.path.join(self.res_path, self.task_resource.res_dict[ - 'model']) if am_model is None else os.path.abspath(am_model) - self.am_params = os.path.join( - self.res_path, self.task_resource.res_dict[ - 'params']) if am_params is None else os.path.abspath(am_params) + 'onnx_model']) if am_model is None else os.path.abspath(am_model) + + # self.am_params = os.path.join( + # self.res_path, self.task_resource.res_dict[ + # 'params']) if am_params is None else os.path.abspath(am_params) logger.info("Load the pretrained model:") logger.info(f" tag = {tag}") logger.info(f" res_path: {self.res_path}") logger.info(f" cfg path: {self.cfg_path}") logger.info(f" am_model path: {self.am_model}") - logger.info(f" am_params path: {self.am_params}") + # logger.info(f" am_params path: {self.am_params}") #Init body. self.config = CfgNode(new_allowed=True) diff --git a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py index fb24cab9..b3b31a5a 100644 --- a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py @@ -345,6 +345,12 @@ class PaddleASRConnectionHanddler: else: return '' + def get_word_time_stamp(self): + return [] + + @paddle.no_grad() + def rescoring(self): + ... class ASRServerExecutor(ASRExecutor): def __init__(self): From e68f1ce6f5267f7be11ad199266b25ff533e79b3 Mon Sep 17 00:00:00 2001 From: iftaken Date: Thu, 16 Jun 2022 14:58:33 +0800 Subject: [PATCH 09/30] add speech web demo --- demos/speech_web_demo/.gitignore | 16 + demos/speech_web_demo/README.MD | 168 ++ .../conf/tts_online_application.yaml | 103 + ...former_wenetspeech_application_faster.yaml | 48 + demos/speech_web_demo/speech_server/main.py | 492 +++++ .../speech_server/requirements.txt | 14 + .../speech_server/src/AudioManeger.py | 173 ++ .../speech_server/src/SpeechBase/asr.py | 87 + .../speech_server/src/SpeechBase/nlp.py | 28 + .../src/SpeechBase/sql_helper.py | 152 ++ .../speech_server/src/SpeechBase/tts.py | 121 ++ .../speech_server/src/SpeechBase/vpr.py | 152 ++ .../src/SpeechBase/vpr_encode.py | 26 + .../speech_server/src/WebsocketManeger.py | 31 + .../speech_server/src/robot.py | 93 + .../speech_web_demo/speech_server/src/util.py | 18 + demos/speech_web_demo/web_client/.gitignore | 25 + demos/speech_web_demo/web_client/README.md | 7 + demos/speech_web_demo/web_client/index.html | 13 + .../web_client/package-lock.json | 1869 +++++++++++++++++ demos/speech_web_demo/web_client/package.json | 23 + .../web_client/public/favicon.ico | Bin 0 -> 4286 bytes demos/speech_web_demo/web_client/src/App.vue | 19 + .../speech_web_demo/web_client/src/api/API.js | 29 + .../web_client/src/api/ApiASR.js | 30 + .../web_client/src/api/ApiNLP.js | 17 + .../web_client/src/api/ApiTTS.js | 8 + .../web_client/src/api/ApiVPR.js | 32 + .../src/assets/image/ic_大-上传文件.svg | 6 + .../src/assets/image/ic_大-声音波浪.svg | 6 + .../src/assets/image/ic_大-语音.svg | 6 + .../src/assets/image/ic_小-录制语音.svg | 6 + .../src/assets/image/ic_小-结束.svg | 3 + .../src/assets/image/ic_开始聊天.svg | 6 + .../assets/image/ic_开始聊天_hover.svg | 6 + .../assets/image/ic_播放(按钮).svg | 3 + .../assets/image/ic_暂停(按钮).svg | 3 + .../src/assets/image/ic_更换示例.svg | 11 + .../assets/image/icon_小-声音波浪.svg | 6 + .../image/icon_录制声音小语音1.svg | 14 + .../assets/image/在线体验-背景@2x.png | Bin 0 -> 78525 bytes .../src/assets/image/场景齐全@3x.png | Bin 0 -> 8539 bytes .../src/assets/image/教程丰富@3x.png | Bin 0 -> 8960 bytes .../src/assets/image/模型全面@3x.png | Bin 0 -> 9972 bytes .../assets/image/步骤-箭头切图@2x.png | Bin 0 -> 5515 bytes .../src/assets/image/用户头像@2x.png | Bin 0 -> 7509 bytes .../src/assets/image/飞桨头像@2x.png | Bin 0 -> 8144 bytes .../web_client/src/assets/logo.png | Bin 0 -> 6849 bytes .../src/components/Content/Header/Header.vue | 26 + .../src/components/Content/Header/style.less | 148 ++ .../src/components/Content/Tail/Tail.vue | 0 .../src/components/Content/Tail/style.less | 0 .../web_client/src/components/Experience.vue | 50 + .../src/components/SubMenu/ASR/ASR.vue | 154 ++ .../src/components/SubMenu/ASR/ASRT.vue | 38 + .../ASR/AudioFile/AudioFileIdentification.vue | 241 +++ .../SubMenu/ASR/AudioFile/style.less | 293 +++ .../ASR/EndToEnd/EndToEndIdentification.vue | 92 + .../SubMenu/ASR/EndToEnd/style.less | 114 + .../SubMenu/ASR/RealTime/RealTime.vue | 128 ++ .../SubMenu/ASR/RealTime/style.less | 112 + .../src/components/SubMenu/ASR/style.less | 76 + .../src/components/SubMenu/ChatBot/Chat.vue | 298 +++ .../src/components/SubMenu/ChatBot/ChatT.vue | 255 +++ .../src/components/SubMenu/ChatBot/style.less | 181 ++ .../src/components/SubMenu/IE/IE.vue | 125 ++ .../src/components/SubMenu/IE/IET.vue | 166 ++ .../src/components/SubMenu/IE/style.less | 179 ++ .../src/components/SubMenu/TTS/TTS.vue | 726 +++++++ .../src/components/SubMenu/TTS/TTST.vue | 359 ++++ .../src/components/SubMenu/TTS/style.less | 369 ++++ .../src/components/SubMenu/VPR/VPR.vue | 178 ++ .../src/components/SubMenu/VPR/VPRT.vue | 335 +++ .../src/components/SubMenu/VPR/style.less | 419 ++++ .../web_client/src/components/style.less | 83 + demos/speech_web_demo/web_client/src/main.js | 13 + .../speech_web_demo/web_client/vite.config.js | 28 + demos/speech_web_demo/web_client/yarn.lock | 785 +++++++ demos/speech_web_demo/接口文档.md | 406 ++++ 79 files changed, 10247 insertions(+) create mode 100644 demos/speech_web_demo/.gitignore create mode 100644 demos/speech_web_demo/README.MD create mode 100644 demos/speech_web_demo/speech_server/conf/tts_online_application.yaml create mode 100644 demos/speech_web_demo/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml create mode 100644 demos/speech_web_demo/speech_server/main.py create mode 100644 demos/speech_web_demo/speech_server/requirements.txt create mode 100644 demos/speech_web_demo/speech_server/src/AudioManeger.py create mode 100644 demos/speech_web_demo/speech_server/src/SpeechBase/asr.py create mode 100644 demos/speech_web_demo/speech_server/src/SpeechBase/nlp.py create mode 100644 demos/speech_web_demo/speech_server/src/SpeechBase/sql_helper.py create mode 100644 demos/speech_web_demo/speech_server/src/SpeechBase/tts.py create mode 100644 demos/speech_web_demo/speech_server/src/SpeechBase/vpr.py create mode 100644 demos/speech_web_demo/speech_server/src/SpeechBase/vpr_encode.py create mode 100644 demos/speech_web_demo/speech_server/src/WebsocketManeger.py create mode 100644 demos/speech_web_demo/speech_server/src/robot.py create mode 100644 demos/speech_web_demo/speech_server/src/util.py create mode 100644 demos/speech_web_demo/web_client/.gitignore create mode 100644 demos/speech_web_demo/web_client/README.md create mode 100644 demos/speech_web_demo/web_client/index.html create mode 100644 demos/speech_web_demo/web_client/package-lock.json create mode 100644 demos/speech_web_demo/web_client/package.json create mode 100644 demos/speech_web_demo/web_client/public/favicon.ico create mode 100644 demos/speech_web_demo/web_client/src/App.vue create mode 100644 demos/speech_web_demo/web_client/src/api/API.js create mode 100644 demos/speech_web_demo/web_client/src/api/ApiASR.js create mode 100644 demos/speech_web_demo/web_client/src/api/ApiNLP.js create mode 100644 demos/speech_web_demo/web_client/src/api/ApiTTS.js create mode 100644 demos/speech_web_demo/web_client/src/api/ApiVPR.js create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_大-上传文件.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_大-声音波浪.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_大-语音.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_小-录制语音.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_小-结束.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天_hover.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_播放(按钮).svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_暂停(按钮).svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/ic_更换示例.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/icon_小-声音波浪.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/icon_录制声音小语音1.svg create mode 100644 demos/speech_web_demo/web_client/src/assets/image/在线体验-背景@2x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/image/场景齐全@3x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/image/教程丰富@3x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/image/模型全面@3x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/image/步骤-箭头切图@2x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/image/用户头像@2x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/image/飞桨头像@2x.png create mode 100644 demos/speech_web_demo/web_client/src/assets/logo.png create mode 100644 demos/speech_web_demo/web_client/src/components/Content/Header/Header.vue create mode 100644 demos/speech_web_demo/web_client/src/components/Content/Header/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/Content/Tail/Tail.vue create mode 100644 demos/speech_web_demo/web_client/src/components/Content/Tail/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/Experience.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASR.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASRT.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ASR/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/Chat.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/ChatT.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/IE/IE.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/IE/IET.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/IE/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTS.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTST.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/TTS/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPR.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPRT.vue create mode 100644 demos/speech_web_demo/web_client/src/components/SubMenu/VPR/style.less create mode 100644 demos/speech_web_demo/web_client/src/components/style.less create mode 100644 demos/speech_web_demo/web_client/src/main.js create mode 100644 demos/speech_web_demo/web_client/vite.config.js create mode 100644 demos/speech_web_demo/web_client/yarn.lock create mode 100644 demos/speech_web_demo/接口文档.md diff --git a/demos/speech_web_demo/.gitignore b/demos/speech_web_demo/.gitignore new file mode 100644 index 00000000..54418e60 --- /dev/null +++ b/demos/speech_web_demo/.gitignore @@ -0,0 +1,16 @@ +*/.vscode/* +*.wav +*/resource/* +.Ds* +*.pyc +*.pcm +*.npy +*.diff +*.sqlite +*/static/* +*.pdparams +*.pdiparams* +*.pdmodel +*/source/* +*/PaddleSpeech/* + diff --git a/demos/speech_web_demo/README.MD b/demos/speech_web_demo/README.MD new file mode 100644 index 00000000..cfcbe559 --- /dev/null +++ b/demos/speech_web_demo/README.MD @@ -0,0 +1,168 @@ +# Paddle Speech Demo + +PaddleSpeechDemo是一个以PaddleSpeech的语音交互功能为主体开发的Demo展示项目,用于帮助大家更好的上手PaddleSpeech以及使用PaddleSpeech构建自己的应用。 + +智能语音交互部分使用PaddleSpeech,对话以及信息抽取部分使用PaddleNLP,网页前端展示部分基于Vue3进行开发 + +主要功能: + ++ 语音聊天:PaddleSpeech的语音识别能力+语音合成能力,对话部分基于PaddleNLP的闲聊功能 ++ 声纹识别:PaddleSpeech的声纹识别功能展示 ++ 语音识别:支持【实时语音识别】,【端到端识别】,【音频文件识别】三种模式 ++ 语音合成:支持【流式合成】与【端到端合成】两种方式 ++ 语音指令:基于PaddleSpeech的语音识别能力与PaddleNLP的信息抽取,实现交通费的智能报销 + +运行效果: + + ![效果](docs/效果展示.png) + +## 安装 + +### 后端环境安装 + +``` +# 安装环境 +cd speech_server +pip install -r requirements.txt +``` + + +### 前端环境安装 + +前端依赖node.js ,需要提前安装,确保npm可用,npm测试版本8.3.1,建议下载[官网](https://nodejs.org/en/)稳定版的node.js + +``` +# 进入前端目录 +cd web_client + +# 安装yarn,已经安装可跳过 +npm install -g yarn + +# 使用yarn安装前端依赖 +yarn install +``` + + +## 启动服务 + +### 开启后端服务 + +``` +cd speech_server +# 默认8010端口 +python main.py --port 8010 +``` + +### 开启前端服务 + +``` +cd web_client +yarn dev --port 8011 +``` + +默认配置下,前端中配置的后台地址信息是localhost,确保后端服务器和打开页面的游览器在同一台机器上,不在一台机器的配置方式见下方的FAQ:【后端如果部署在其它机器或者别的端口如何修改】 + +## Docker启动 + +### 后端docker +后端docker使用[paddlepaddle官方docker](https://www.paddlepaddle.org.cn/),这里演示CPU版本 +``` +# 拉取PaddleSpeech项目 +cd PaddleSpeechServer +git clone https://github.com/PaddlePaddle/PaddleSpeech.git + +# 拉取镜像 +docker pull registry.baidubce.com/paddlepaddle/paddle:2.3.0 + +# 启动容器 +docker run --name paddle -it -p 8010:8010 -v $PWD:/paddle registry.baidubce.com/paddlepaddle/paddle:2.3.0 /bin/bash + +# 进入容器 +cd /paddle + +# 安装依赖 +pip install -r requirements + +# 启动服务 +python main --port 8010 + +``` + +### 前端docker + +前端docker直接使用[node官方的docker](https://hub.docker.com/_/node)即可 + +```shell +docker pull node +``` + +镜像中安装依赖 + +```shell +cd PaddleSpeechWebClient +# 映射外部8011端口 +docker run -it -p 8011:8011 -v $PWD:/paddle node:latest bin/bash +# 进入容器中 +cd /paddle +# 安装依赖 +yarn install +# 启动前端 +yarn dev --port 8011 +``` + + + + + +## FAQ + +#### Q: 如何安装node.js + +A: node.js的安装可以参考[【菜鸟教程】](https://www.runoob.com/nodejs/nodejs-install-setup.html), 确保npm可用 + +#### Q:后端如果部署在其它机器或者别的端口如何修改 + +A:后端的配置地址有分散在两个文件中 + +修改第一个文件`PaddleSpeechWebClient/vite.config.js` + +```json +server: { + host: "0.0.0.0", + proxy: { + "/api": { + target: "http://localhost:8010", // 这里改成后端所在接口 + changeOrigin: true, + rewrite: (path) => path.replace(/^\/api/, ""), + }, + }, + } +``` + +修改第二个文件`PaddleSpeechWebClient/src/api/API.js`(Websocket代理配置失败,所以需要在这个文件中修改) + +```javascript +// websocket (这里改成后端所在的接口) +CHAT_SOCKET_RECORD: 'ws://localhost:8010/ws/asr/offlineStream', // ChatBot websocket 接口 +ASR_SOCKET_RECORD: 'ws://localhost:8010/ws/asr/onlineStream', // Stream ASR 接口 +TTS_SOCKET_RECORD: 'ws://localhost:8010/ws/tts/online', // Stream TTS 接口 +``` + +#### Q:后端以IP地址的形式,前端无法录音 + +A:这里主要是游览器安全策略的限制,需要配置游览器后重启。游览器修改配置可参考[使用js-audio-recorder报浏览器不支持getUserMedia](https://blog.csdn.net/YRY_LIKE_YOU/article/details/113745273) + +chrome设置地址: chrome://flags/#unsafely-treat-insecure-origin-as-secure + + + + +## 参考资料 + +vue实现录音参考资料:https://blog.csdn.net/qq_41619796/article/details/107865602#t1 + +前端流式播放音频参考仓库: + +https://github.com/AnthumChris/fetch-stream-audio + +https://bm.enthuses.me/buffered.php?bref=6677 diff --git a/demos/speech_web_demo/speech_server/conf/tts_online_application.yaml b/demos/speech_web_demo/speech_server/conf/tts_online_application.yaml new file mode 100644 index 00000000..0460a5e1 --- /dev/null +++ b/demos/speech_web_demo/speech_server/conf/tts_online_application.yaml @@ -0,0 +1,103 @@ +# This is the parameter configuration file for streaming tts server. + +################################################################################# +# SERVER SETTING # +################################################################################# +host: 0.0.0.0 +port: 8092 + +# The task format in the engin_list is: _ +# engine_list choices = ['tts_online', 'tts_online-onnx'], the inference speed of tts_online-onnx is faster than tts_online. +# protocol choices = ['websocket', 'http'] +protocol: 'http' +engine_list: ['tts_online-onnx'] + + +################################################################################# +# ENGINE CONFIG # +################################################################################# + +################################### TTS ######################################### +################### speech task: tts; engine_type: online ####################### +tts_online: + # am (acoustic model) choices=['fastspeech2_csmsc', 'fastspeech2_cnndecoder_csmsc'] + # fastspeech2_cnndecoder_csmsc support streaming am infer. + am: 'fastspeech2_csmsc' + am_config: + am_ckpt: + am_stat: + phones_dict: + tones_dict: + speaker_dict: + spk_id: 0 + + # voc (vocoder) choices=['mb_melgan_csmsc, hifigan_csmsc'] + # Both mb_melgan_csmsc and hifigan_csmsc support streaming voc inference + voc: 'mb_melgan_csmsc' + voc_config: + voc_ckpt: + voc_stat: + + # others + lang: 'zh' + device: 'cpu' # set 'gpu:id' or 'cpu' + # am_block and am_pad only for fastspeech2_cnndecoder_onnx model to streaming am infer, + # when am_pad set 12, streaming synthetic audio is the same as non-streaming synthetic audio + am_block: 72 + am_pad: 12 + # voc_pad and voc_block voc model to streaming voc infer, + # when voc model is mb_melgan_csmsc, voc_pad set 14, streaming synthetic audio is the same as non-streaming synthetic audio; The minimum value of pad can be set to 7, streaming synthetic audio sounds normal + # when voc model is hifigan_csmsc, voc_pad set 19, streaming synthetic audio is the same as non-streaming synthetic audio; voc_pad set 14, streaming synthetic audio sounds normal + voc_block: 36 + voc_pad: 14 + + + +################################################################################# +# ENGINE CONFIG # +################################################################################# + +################################### TTS ######################################### +################### speech task: tts; engine_type: online-onnx ####################### +tts_online-onnx: + # am (acoustic model) choices=['fastspeech2_csmsc_onnx', 'fastspeech2_cnndecoder_csmsc_onnx'] + # fastspeech2_cnndecoder_csmsc_onnx support streaming am infer. + am: 'fastspeech2_cnndecoder_csmsc_onnx' + # am_ckpt is a list, if am is fastspeech2_cnndecoder_csmsc_onnx, am_ckpt = [encoder model, decoder model, postnet model]; + # if am is fastspeech2_csmsc_onnx, am_ckpt = [ckpt model]; + am_ckpt: # list + am_stat: + phones_dict: + tones_dict: + speaker_dict: + spk_id: 0 + am_sample_rate: 24000 + am_sess_conf: + device: "cpu" # set 'gpu:id' or 'cpu' + use_trt: False + cpu_threads: 4 + + # voc (vocoder) choices=['mb_melgan_csmsc_onnx, hifigan_csmsc_onnx'] + # Both mb_melgan_csmsc_onnx and hifigan_csmsc_onnx support streaming voc inference + voc: 'hifigan_csmsc_onnx' + voc_ckpt: + voc_sample_rate: 24000 + voc_sess_conf: + device: "cpu" # set 'gpu:id' or 'cpu' + use_trt: False + cpu_threads: 4 + + # others + lang: 'zh' + # am_block and am_pad only for fastspeech2_cnndecoder_onnx model to streaming am infer, + # when am_pad set 12, streaming synthetic audio is the same as non-streaming synthetic audio + am_block: 72 + am_pad: 12 + # voc_pad and voc_block voc model to streaming voc infer, + # when voc model is mb_melgan_csmsc_onnx, voc_pad set 14, streaming synthetic audio is the same as non-streaming synthetic audio; The minimum value of pad can be set to 7, streaming synthetic audio sounds normal + # when voc model is hifigan_csmsc_onnx, voc_pad set 19, streaming synthetic audio is the same as non-streaming synthetic audio; voc_pad set 14, streaming synthetic audio sounds normal + voc_block: 36 + voc_pad: 14 + # voc_upsample should be same as n_shift on voc config. + voc_upsample: 300 + diff --git a/demos/speech_web_demo/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml b/demos/speech_web_demo/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml new file mode 100644 index 00000000..ba413c80 --- /dev/null +++ b/demos/speech_web_demo/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml @@ -0,0 +1,48 @@ +# This is the parameter configuration file for PaddleSpeech Serving. + +################################################################################# +# SERVER SETTING # +################################################################################# +host: 0.0.0.0 +port: 8090 + +# The task format in the engin_list is: _ +# task choices = ['asr_online'] +# protocol = ['websocket'] (only one can be selected). +# websocket only support online engine type. +protocol: 'websocket' +engine_list: ['asr_online'] + + +################################################################################# +# ENGINE CONFIG # +################################################################################# + +################################### ASR ######################################### +################### speech task: asr; engine_type: online ####################### +asr_online: + model_type: 'conformer_online_wenetspeech' + am_model: # the pdmodel file of am static model [optional] + am_params: # the pdiparams file of am static model [optional] + lang: 'zh' + sample_rate: 16000 + cfg_path: + decode_method: + force_yes: True + device: 'cpu' # cpu or gpu:id + decode_method: "attention_rescoring" + continuous_decoding: True # enable continue decoding when endpoint detected + num_decoding_left_chunks: 16 + am_predictor_conf: + device: # set 'gpu:id' or 'cpu' + switch_ir_optim: True + glog_info: False # True -> print glog + summary: True # False -> do not show predictor config + + chunk_buffer_conf: + window_n: 7 # frame + shift_n: 4 # frame + window_ms: 25 # ms + shift_ms: 10 # ms + sample_rate: 16000 + sample_width: 2 diff --git a/demos/speech_web_demo/speech_server/main.py b/demos/speech_web_demo/speech_server/main.py new file mode 100644 index 00000000..021f1e16 --- /dev/null +++ b/demos/speech_web_demo/speech_server/main.py @@ -0,0 +1,492 @@ +# todo: +# 1. 开启服务 +# 2. 接收录音音频,返回识别结果 +# 3. 接收ASR识别结果,返回NLP对话结果 +# 4. 接收NLP对话结果,返回TTS音频 + +import base64 +import yaml +import os +import json +import datetime +import librosa +import soundfile as sf +import numpy as np +import argparse +import uvicorn +import aiofiles +from typing import Optional, List +from pydantic import BaseModel +from fastapi import FastAPI, Header, File, UploadFile, Form, Cookie, WebSocket, WebSocketDisconnect +from fastapi.responses import StreamingResponse +from starlette.responses import FileResponse +from starlette.middleware.cors import CORSMiddleware +from starlette.requests import Request +from starlette.websockets import WebSocketState as WebSocketState + +from src.AudioManeger import AudioMannger +from src.util import * +from src.robot import Robot +from src.WebsocketManeger import ConnectionManager +from src.SpeechBase.vpr import VPR + +from paddlespeech.server.engine.asr.online.asr_engine import PaddleASRConnectionHanddler +from paddlespeech.server.utils.audio_process import float2pcm + + +# 解析配置 +parser = argparse.ArgumentParser( + prog='PaddleSpeechDemo', add_help=True) + +parser.add_argument( + "--port", + action="store", + type=int, + help="port of the app", + default=8010, + required=False) + +args = parser.parse_args() +port = args.port + +# 配置文件 +tts_config = "conf/tts_online_application.yaml" +asr_config = "conf/ws_conformer_wenetspeech_application_faster.yaml" +asr_init_path = "source/demo/demo.wav" +db_path = "source/db/vpr.sqlite" +ie_model_path = "source/model" + +# 路径配置 +UPLOAD_PATH = "source/vpr" +WAV_PATH = "source/wav" + + +base_sources = [ + UPLOAD_PATH, WAV_PATH +] +for path in base_sources: + os.makedirs(path, exist_ok=True) + + +# 初始化 +app = FastAPI() +chatbot = Robot(asr_config, tts_config, asr_init_path, ie_model_path=ie_model_path) +manager = ConnectionManager() +aumanager = AudioMannger(chatbot) +aumanager.init() +vpr = VPR(db_path, dim = 192, top_k = 5) + +# 服务配置 +class NlpBase(BaseModel): + chat: str + +class TtsBase(BaseModel): + text: str + +class Audios: + def __init__(self) -> None: + self.audios = b"" + +audios = Audios() + +###################################################################### +########################### ASR 服务 ################################# +##################################################################### + +# 接收文件,返回ASR结果 +# 上传文件 +@app.post("/asr/offline") +async def speech2textOffline(files: List[UploadFile]): + # 只有第一个有效 + asr_res = "" + for file in files[:1]: + # 生成时间戳 + now_name = "asr_offline_" + datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M%S') + randName() + ".wav" + out_file_path = os.path.join(WAV_PATH, now_name) + async with aiofiles.open(out_file_path, 'wb') as out_file: + content = await file.read() # async read + await out_file.write(content) # async write + + # 返回ASR识别结果 + asr_res = chatbot.speech2text(out_file_path) + return SuccessRequest(result=asr_res) + # else: + # return ErrorRequest(message="文件不是.wav格式") + return ErrorRequest(message="上传文件为空") + +# 接收文件,同时将wav强制转成16k, int16类型 +@app.post("/asr/offlinefile") +async def speech2textOfflineFile(files: List[UploadFile]): + # 只有第一个有效 + asr_res = "" + for file in files[:1]: + # 生成时间戳 + now_name = "asr_offline_" + datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M%S') + randName() + ".wav" + out_file_path = os.path.join(WAV_PATH, now_name) + async with aiofiles.open(out_file_path, 'wb') as out_file: + content = await file.read() # async read + await out_file.write(content) # async write + + # 将文件转成16k, 16bit类型的wav文件 + wav, sr = librosa.load(out_file_path, sr=16000) + wav = float2pcm(wav) # float32 to int16 + wav_bytes = wav.tobytes() # to bytes + wav_base64 = base64.b64encode(wav_bytes).decode('utf8') + + # 将文件重新写入 + now_name = now_name[:-4] + "_16k" + ".wav" + out_file_path = os.path.join(WAV_PATH, now_name) + sf.write(out_file_path,wav,16000) + + # 返回ASR识别结果 + asr_res = chatbot.speech2text(out_file_path) + response_res = { + "asr_result": asr_res, + "wav_base64": wav_base64 + } + return SuccessRequest(result=response_res) + + return ErrorRequest(message="上传文件为空") + + + +# 流式接收测试 +@app.post("/asr/online1") +async def speech2textOnlineRecive(files: List[UploadFile]): + audio_bin = b'' + for file in files: + content = await file.read() + audio_bin += content + audios.audios += audio_bin + print(f"audios长度变化: {len(audios.audios)}") + return SuccessRequest(message="接收成功") + +# 采集环境噪音大小 +@app.post("/asr/collectEnv") +async def collectEnv(files: List[UploadFile]): + for file in files[:1]: + content = await file.read() # async read + # 初始化, wav 前44字节是头部信息 + aumanager.compute_env_volume(content[44:]) + vad_ = aumanager.vad_threshold + return SuccessRequest(result=vad_,message="采集环境噪音成功") + +# 停止录音 +@app.get("/asr/stopRecord") +async def stopRecord(): + audios.audios = b"" + aumanager.stop() + print("Online录音暂停") + return SuccessRequest(message="停止成功") + +# 恢复录音 +@app.get("/asr/resumeRecord") +async def resumeRecord(): + aumanager.resume() + print("Online录音恢复") + return SuccessRequest(message="Online录音恢复") + + +# 聊天用的ASR +@app.websocket("/ws/asr/offlineStream") +async def websocket_endpoint(websocket: WebSocket): + await manager.connect(websocket) + try: + while True: + asr_res = None + # websocket 不接收,只推送 + data = await websocket.receive_bytes() + if not aumanager.is_pause: + asr_res = aumanager.stream_asr(data) + else: + print("录音暂停") + if asr_res: + await manager.send_personal_message(asr_res, websocket) + aumanager.clear_asr() + + except WebSocketDisconnect: + manager.disconnect(websocket) + # await manager.broadcast(f"用户-{user}-离开") + # print(f"用户-{user}-离开") + + +# Online识别的ASR +@app.websocket('/ws/asr/onlineStream') +async def websocket_endpoint(websocket: WebSocket): + """PaddleSpeech Online ASR Server api + + Args: + websocket (WebSocket): the websocket instance + """ + + #1. the interface wait to accept the websocket protocal header + # and only we receive the header, it establish the connection with specific thread + await websocket.accept() + + #2. if we accept the websocket headers, we will get the online asr engine instance + engine = chatbot.asr.engine + + #3. each websocket connection, we will create an PaddleASRConnectionHanddler to process such audio + # and each connection has its own connection instance to process the request + # and only if client send the start signal, we create the PaddleASRConnectionHanddler instance + connection_handler = None + + try: + #4. we do a loop to process the audio package by package according the protocal + # and only if the client send finished signal, we will break the loop + while True: + # careful here, changed the source code from starlette.websockets + # 4.1 we wait for the client signal for the specific action + assert websocket.application_state == WebSocketState.CONNECTED + message = await websocket.receive() + websocket._raise_on_disconnect(message) + + #4.2 text for the action command and bytes for pcm data + if "text" in message: + # we first parse the specific command + message = json.loads(message["text"]) + if 'signal' not in message: + resp = {"status": "ok", "message": "no valid json data"} + await websocket.send_json(resp) + + # start command, we create the PaddleASRConnectionHanddler instance to process the audio data + # end command, we process the all the last audio pcm and return the final result + # and we break the loop + if message['signal'] == 'start': + resp = {"status": "ok", "signal": "server_ready"} + # do something at begining here + # create the instance to process the audio + # connection_handler = chatbot.asr.connection_handler + connection_handler = PaddleASRConnectionHanddler(engine) + await websocket.send_json(resp) + elif message['signal'] == 'end': + # reset single engine for an new connection + # and we will destroy the connection + connection_handler.decode(is_finished=True) + connection_handler.rescoring() + asr_results = connection_handler.get_result() + connection_handler.reset() + + resp = { + "status": "ok", + "signal": "finished", + 'result': asr_results + } + await websocket.send_json(resp) + break + else: + resp = {"status": "ok", "message": "no valid json data"} + await websocket.send_json(resp) + elif "bytes" in message: + # bytes for the pcm data + message = message["bytes"] + print("###############") + print("len message: ", len(message)) + print("###############") + + # we extract the remained audio pcm + # and decode for the result in this package data + connection_handler.extract_feat(message) + connection_handler.decode(is_finished=False) + asr_results = connection_handler.get_result() + + # return the current period result + # if the engine create the vad instance, this connection will have many period results + resp = {'result': asr_results} + print(resp) + await websocket.send_json(resp) + except WebSocketDisconnect: + pass + +###################################################################### +########################### NLP 服务 ################################# +##################################################################### + +@app.post("/nlp/chat") +async def chatOffline(nlp_base:NlpBase): + chat = nlp_base.chat + if not chat: + return ErrorRequest(message="传入文本为空") + else: + res = chatbot.chat(chat) + return SuccessRequest(result=res) + +@app.post("/nlp/ie") +async def ieOffline(nlp_base:NlpBase): + nlp_text = nlp_base.chat + if not nlp_text: + return ErrorRequest(message="传入文本为空") + else: + res = chatbot.ie(nlp_text) + return SuccessRequest(result=res) + +###################################################################### +########################### TTS 服务 ################################# +##################################################################### + +@app.post("/tts/offline") +async def text2speechOffline(tts_base:TtsBase): + text = tts_base.text + if not text: + return ErrorRequest(message="文本为空") + else: + now_name = "tts_"+ datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M%S') + randName() + ".wav" + out_file_path = os.path.join(WAV_PATH, now_name) + # 保存为文件,再转成base64传输 + chatbot.text2speech(text, outpath=out_file_path) + with open(out_file_path, "rb") as f: + data_bin = f.read() + base_str = base64.b64encode(data_bin) + return SuccessRequest(result=base_str) + +# http流式TTS +@app.post("/tts/online") +async def stream_tts(request_body: TtsBase): + text = request_body.text + return StreamingResponse(chatbot.text2speechStreamBytes(text=text)) + +# ws流式TTS +@app.websocket("/ws/tts/online") +async def stream_ttsWS(websocket: WebSocket): + await manager.connect(websocket) + try: + while True: + text = await websocket.receive_text() + # 用 websocket 流式接收音频数据 + if text: + for sub_wav in chatbot.text2speechStream(text=text): + # print("发送sub wav: ", len(sub_wav)) + res = { + "wav": sub_wav, + "done": False + } + await websocket.send_json(res) + + # 输送结束 + res = { + "wav": sub_wav, + "done": True + } + await websocket.send_json(res) + # manager.disconnect(websocket) + + except WebSocketDisconnect: + manager.disconnect(websocket) + + +###################################################################### +########################### VPR 服务 ################################# +##################################################################### + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"]) + + +@app.post('/vpr/enroll') +async def vpr_enroll(table_name: str=None, + spk_id: str=Form(...), + audio: UploadFile=File(...)): + # Enroll the uploaded audio with spk-id into MySQL + try: + if not spk_id: + return {'status': False, 'msg': "spk_id can not be None"} + # Save the upload data to server. + content = await audio.read() + now_name = "vpr_enroll_" + datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M%S') + randName() + ".wav" + audio_path = os.path.join(UPLOAD_PATH, now_name) + + with open(audio_path, "wb+") as f: + f.write(content) + vpr.vpr_enroll(username=spk_id, wav_path=audio_path) + return {'status': True, 'msg': "Successfully enroll data!"} + except Exception as e: + return {'status': False, 'msg': e} + + +@app.post('/vpr/recog') +async def vpr_recog(request: Request, + table_name: str=None, + audio: UploadFile=File(...)): + # Voice print recognition online + # try: + # Save the upload data to server. + content = await audio.read() + now_name = "vpr_query_" + datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M%S') + randName() + ".wav" + query_audio_path = os.path.join(UPLOAD_PATH, now_name) + with open(query_audio_path, "wb+") as f: + f.write(content) + spk_ids, paths, scores = vpr.do_search_vpr(query_audio_path) + + res = dict(zip(spk_ids, zip(paths, scores))) + # Sort results by distance metric, closest distances first + res = sorted(res.items(), key=lambda item: item[1][1], reverse=True) + return res + # except Exception as e: + # return {'status': False, 'msg': e}, 400 + + +@app.post('/vpr/del') +async def vpr_del(spk_id: dict=None): + # Delete a record by spk_id in MySQL + try: + spk_id = spk_id['spk_id'] + if not spk_id: + return {'status': False, 'msg': "spk_id can not be None"} + vpr.vpr_del(username=spk_id) + return {'status': True, 'msg': "Successfully delete data!"} + except Exception as e: + return {'status': False, 'msg': e}, 400 + + +@app.get('/vpr/list') +async def vpr_list(): + # Get all records in MySQL + try: + spk_ids, vpr_ids = vpr.do_list() + return spk_ids, vpr_ids + except Exception as e: + return {'status': False, 'msg': e}, 400 + + +@app.get('/vpr/database64') +async def vpr_database64(vprId: int): + # Get the audio file from path by spk_id in MySQL + try: + if not vprId: + return {'status': False, 'msg': "vpr_id can not be None"} + audio_path = vpr.do_get_wav(vprId) + # 返回base64 + + # 将文件转成16k, 16bit类型的wav文件 + wav, sr = librosa.load(audio_path, sr=16000) + wav = float2pcm(wav) # float32 to int16 + wav_bytes = wav.tobytes() # to bytes + wav_base64 = base64.b64encode(wav_bytes).decode('utf8') + + return SuccessRequest(result=wav_base64) + except Exception as e: + return {'status': False, 'msg': e}, 400 + +@app.get('/vpr/data') +async def vpr_data(vprId: int): + # Get the audio file from path by spk_id in MySQL + try: + if not vprId: + return {'status': False, 'msg': "vpr_id can not be None"} + audio_path = vpr.do_get_wav(vprId) + return FileResponse(audio_path) + except Exception as e: + return {'status': False, 'msg': e}, 400 + +if __name__ == '__main__': + uvicorn.run(app=app, host='0.0.0.0', port=port) + + + + + + diff --git a/demos/speech_web_demo/speech_server/requirements.txt b/demos/speech_web_demo/speech_server/requirements.txt new file mode 100644 index 00000000..7e7bd168 --- /dev/null +++ b/demos/speech_web_demo/speech_server/requirements.txt @@ -0,0 +1,14 @@ +aiofiles +fastapi +librosa +numpy +pydantic +scikit_learn +SoundFile +starlette +uvicorn +paddlepaddle +paddlespeech +paddlenlp +faiss-cpu +python-multipart \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/AudioManeger.py b/demos/speech_web_demo/speech_server/src/AudioManeger.py new file mode 100644 index 00000000..5cf1296a --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/AudioManeger.py @@ -0,0 +1,173 @@ +import imp +from queue import Queue +import numpy as np +import os +import wave +import random +import datetime +from .util import randName + + +class AudioMannger: + def __init__(self, robot, frame_length=160, frame=10, data_width=2, vad_default = 300): + # 二进制 pcm 流 + self.audios = b'' + self.asr_result = "" + # Speech 核心主体 + self.robot = robot + + self.file_dir = "source" + os.makedirs(self.file_dir, exist_ok=True) + self.vad_deafult = vad_default + self.vad_threshold = vad_default + self.vad_threshold_path = os.path.join(self.file_dir, "vad_threshold.npy") + + # 10ms 一帧 + self.frame_length = frame_length + # 10帧,检测一次 vad + self.frame = frame + # int 16, 两个bytes + self.data_width = data_width + # window + self.window_length = frame_length * frame * data_width + + # 是否开始录音 + self.on_asr = False + self.silence_cnt = 0 + self.max_silence_cnt = 4 + self.is_pause = False # 录音暂停与恢复 + + + + def init(self): + if os.path.exists(self.vad_threshold_path): + # 平均响度文件存在 + self.vad_threshold = np.load(self.vad_threshold_path) + + + def clear_audio(self): + # 清空 pcm 累积片段与 asr 识别结果 + self.audios = b'' + + def clear_asr(self): + self.asr_result = "" + + + def compute_chunk_volume(self, start_index, pcm_bins): + # 根据帧长计算能量平均值 + pcm_bin = pcm_bins[start_index: start_index + self.window_length] + # 转成 numpy + pcm_np = np.frombuffer(pcm_bin, np.int16) + # 归一化 + 计算响度 + x = pcm_np.astype(np.float32) + x = np.abs(x) + return np.mean(x) + + + def is_speech(self, start_index, pcm_bins): + # 检查是否没 + if start_index > len(pcm_bins): + return False + # 检查从这个 start 开始是否为静音帧 + energy = self.compute_chunk_volume(start_index=start_index, pcm_bins=pcm_bins) + # print(energy) + if energy > self.vad_threshold: + return True + else: + return False + + def compute_env_volume(self, pcm_bins): + max_energy = 0 + start = 0 + while start < len(pcm_bins): + energy = self.compute_chunk_volume(start_index=start, pcm_bins=pcm_bins) + if energy > max_energy: + max_energy = energy + start += self.window_length + self.vad_threshold = max_energy + 100 if max_energy > self.vad_deafult else self.vad_deafult + + # 保存成文件 + np.save(self.vad_threshold_path, self.vad_threshold) + print(f"vad 阈值大小: {self.vad_threshold}") + print(f"环境采样保存: {os.path.realpath(self.vad_threshold_path)}") + + def stream_asr(self, pcm_bin): + # 先把 pcm_bin 送进去做端点检测 + start = 0 + while start < len(pcm_bin): + if self.is_speech(start_index=start, pcm_bins=pcm_bin): + self.on_asr = True + self.silence_cnt = 0 + print("录音中") + self.audios += pcm_bin[ start : start + self.window_length] + else: + if self.on_asr: + self.silence_cnt += 1 + if self.silence_cnt > self.max_silence_cnt: + self.on_asr = False + self.silence_cnt = 0 + # 录音停止 + print("录音停止") + # audios 保存为 wav, 送入 ASR + if len(self.audios) > 2 * 16000: + file_path = os.path.join(self.file_dir, "asr_" + datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M%S') + randName() + ".wav") + self.save_audio(file_path=file_path) + self.asr_result = self.robot.speech2text(file_path) + self.clear_audio() + return self.asr_result + else: + # 正常接收 + print("录音中 静音") + self.audios += pcm_bin[ start : start + self.window_length] + start += self.window_length + return "" + + def save_audio(self, file_path): + print("保存音频") + wf = wave.open(file_path, 'wb') # 创建一个音频文件,名字为“01.wav" + wf.setnchannels(1) # 设置声道数为2 + wf.setsampwidth(2) # 设置采样深度为 + wf.setframerate(16000) # 设置采样率为16000 + # 将数据写入创建的音频文件 + wf.writeframes(self.audios) + # 写完后将文件关闭 + wf.close() + + def end(self): + # audios 保存为 wav, 送入 ASR + file_path = os.path.join(self.file_dir, "asr.wav") + self.save_audio(file_path=file_path) + return self.robot.speech2text(file_path) + + def stop(self): + self.is_pause = True + self.audios = b'' + + def resume(self): + self.is_pause = False + + +if __name__ == '__main__': + from robot import Robot + + chatbot = Robot() + chatbot.init() + audio_manger = AudioMannger(chatbot) + + file_list = [ + "source/20220418145230qbenc.pcm", + ] + + for file in file_list: + with open(file, "rb") as f: + pcm_bin = f.read() + print(len(pcm_bin)) + asr_ = audio_manger.stream_asr(pcm_bin=pcm_bin) + print(asr_) + + print(audio_manger.end()) + + print(chatbot.speech2text("source/20220418145230zrxia.wav")) + + + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/asr.py b/demos/speech_web_demo/speech_server/src/SpeechBase/asr.py new file mode 100644 index 00000000..4563d3ed --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/SpeechBase/asr.py @@ -0,0 +1,87 @@ +from re import sub +import numpy as np +import paddle +import librosa +import soundfile + +from paddlespeech.server.engine.asr.online.asr_engine import ASREngine +from paddlespeech.server.engine.asr.online.asr_engine import PaddleASRConnectionHanddler +from paddlespeech.server.utils.config import get_config + +def readWave(samples): + x_len = len(samples) + + chunk_size = 85 * 16 #80ms, sample_rate = 16kHz + if x_len % chunk_size != 0: + padding_len_x = chunk_size - x_len % chunk_size + else: + padding_len_x = 0 + + padding = np.zeros((padding_len_x), dtype=samples.dtype) + padded_x = np.concatenate([samples, padding], axis=0) + + assert (x_len + padding_len_x) % chunk_size == 0 + num_chunk = (x_len + padding_len_x) / chunk_size + num_chunk = int(num_chunk) + for i in range(0, num_chunk): + start = i * chunk_size + end = start + chunk_size + x_chunk = padded_x[start:end] + yield x_chunk + + +class ASR: + def __init__(self, config_path, ) -> None: + self.config = get_config(config_path)['asr_online'] + self.engine = ASREngine() + self.engine.init(self.config) + self.connection_handler = PaddleASRConnectionHanddler(self.engine) + + def offlineASR(self, samples, sample_rate=16000): + x_chunk, x_chunk_lens = self.engine.preprocess(samples=samples, sample_rate=sample_rate) + self.engine.run(x_chunk, x_chunk_lens) + result = self.engine.postprocess() + self.engine.reset() + return result + + def onlineASR(self, samples:bytes=None, is_finished=False): + if not is_finished: + # 流式开始 + self.connection_handler.extract_feat(samples) + self.connection_handler.decode(is_finished) + asr_results = self.connection_handler.get_result() + return asr_results + else: + # 流式结束 + self.connection_handler.decode(is_finished=True) + self.connection_handler.rescoring() + asr_results = self.connection_handler.get_result() + self.connection_handler.reset() + return asr_results + + +if __name__ == '__main__': + config_path = r"../../PaddleSpeech/paddlespeech/server/conf/ws_conformer_application.yaml" + + wav_path = r"../../source/demo/demo_16k.wav" + samples, sample_rate = soundfile.read(wav_path, dtype='int16') + + asr = ASR(config_path=config_path) + end_result = asr.offlineASR(samples=samples, sample_rate=sample_rate) + print("端到端识别结果:", end_result) + + for sub_wav in readWave(samples=samples): + # print(sub_wav) + message = sub_wav.tobytes() + offline_result = asr.onlineASR(message, is_finished=False) + print("流式识别结果: ", offline_result) + offline_result = asr.onlineASR(is_finished=True) + print("流式识别结果: ", offline_result) + + + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/nlp.py b/demos/speech_web_demo/speech_server/src/SpeechBase/nlp.py new file mode 100644 index 00000000..3bf2c764 --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/SpeechBase/nlp.py @@ -0,0 +1,28 @@ +from paddlenlp import Taskflow + +class NLP: + def __init__(self, ie_model_path=None): + schema = ["时间", "出发地", "目的地", "费用"] + if ie_model_path: + self.ie_model = Taskflow("information_extraction", + schema=schema, task_path=ie_model_path) + else: + self.ie_model = Taskflow("information_extraction", + schema=schema) + + self.dialogue_model = Taskflow("dialogue") + + def chat(self, text): + result = self.dialogue_model([text]) + return result[0] + + def ie(self, text): + result = self.ie_model(text) + return result + +if __name__ == '__main__': + ie_model_path = "../../source/model/" + nlp = NLP(ie_model_path=ie_model_path) + text = "今天早上我从大牛坊去百度科技园花了七百块钱" + print(nlp.ie(text)) + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/sql_helper.py b/demos/speech_web_demo/speech_server/src/SpeechBase/sql_helper.py new file mode 100644 index 00000000..c398fa2a --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/SpeechBase/sql_helper.py @@ -0,0 +1,152 @@ +import base64 +import sqlite3 +import os +import numpy as np +from pkg_resources import resource_stream + + +def dict_factory(cursor, row): + d = {} + for idx, col in enumerate(cursor.description): + d[col[0]] = row[idx] + return d + +class DataBase(object): + def __init__(self, db_path:str): + db_path = os.path.realpath(db_path) + + if os.path.exists(db_path): + self.db_path = db_path + else: + db_path_dir = os.path.dirname(db_path) + os.makedirs(db_path_dir, exist_ok=True) + self.db_path = db_path + + self.conn = sqlite3.connect(self.db_path) + self.conn.row_factory = dict_factory + self.cursor = self.conn.cursor() + self.init_database() + + def init_database(self): + """ + 初始化数据库, 若表不存在则创建 + """ + sql = """ + CREATE TABLE IF NOT EXISTS vprtable ( + `id` INTEGER PRIMARY KEY AUTOINCREMENT, + `username` TEXT NOT NULL, + `vector` TEXT NOT NULL, + `wavpath` TEXT NOT NULL + ); + """ + self.cursor.execute(sql) + self.conn.commit() + + def execute_base(self, sql, data_dict): + self.cursor.execute(sql, data_dict) + self.conn.commit() + + def insert_one(self, username, vector_base64:str, wav_path): + if not os.path.exists(wav_path): + return None, "wav not exists" + else: + sql = f""" + insert into + vprtable (username, vector, wavpath) + values (?, ?, ?) + """ + try: + self.cursor.execute(sql, (username, vector_base64, wav_path)) + self.conn.commit() + lastidx = self.cursor.lastrowid + return lastidx, "data insert success" + except Exception as e: + print(e) + return None, e + + def select_all(self): + sql = """ + SELECT * from vprtable + """ + result = self.cursor.execute(sql).fetchall() + return result + + def select_by_id(self, vpr_id): + sql = f""" + SELECT * from vprtable WHERE `id` = {vpr_id} + """ + result = self.cursor.execute(sql).fetchall() + return result + + def select_by_username(self, username): + sql = f""" + SELECT * from vprtable WHERE `username` = '{username}' + """ + result = self.cursor.execute(sql).fetchall() + return result + + def drop_by_username(self, username): + sql = f""" + DELETE from vprtable WHERE `username`='{username}' + """ + self.cursor.execute(sql) + self.conn.commit() + + def drop_all(self): + sql = f""" + DELETE from vprtable + """ + self.cursor.execute(sql) + self.conn.commit() + + def drop_table(self): + sql = f""" + DROP TABLE vprtable + """ + self.cursor.execute(sql) + self.conn.commit() + + def encode_vector(self, vector:np.ndarray): + return base64.b64encode(vector).decode('utf8') + + def decode_vector(self, vector_base64, dtype=np.float32): + b = base64.b64decode(vector_base64) + vc = np.frombuffer(b, dtype=dtype) + return vc + +if __name__ == '__main__': + db_path = "../../source/db/vpr.sqlite" + db = DataBase(db_path) + + # 准备数据 + import numpy as np + vector = np.random.randn((192)).astype(np.float32).tobytes() + vector_base64 = base64.b64encode(vector).decode('utf8') + username = "sss" + wav_path = r"../../source/demo/demo_16k.wav" + + # 插入数据 + db.insert_one(username, vector_base64, wav_path) + + # 查询数据 + res_all = db.select_all() + print("res_all: ", res_all) + + s_id = res_all[0]['id'] + res_id = db.select_by_id(s_id) + print("res_id: ", res_id) + + res_uername = db.select_by_username(username) + print("res_username: ", res_uername) + + # base64还原 + b = base64.b64decode(res_uername[0]['vector']) + vc = np.frombuffer(b, dtype=np.float32) + print(vc) + + # 删除数据 + db.drop_by_username(username) + res_all = db.select_all() + print("删除后 res_all: ", res_all) + db.drop_all() + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/tts.py b/demos/speech_web_demo/speech_server/src/SpeechBase/tts.py new file mode 100644 index 00000000..5cf53a02 --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/SpeechBase/tts.py @@ -0,0 +1,121 @@ +# tts 推理引擎,支持流式与非流式 +# 精简化使用 +# 用 onnxruntime 进行推理 +# 1. 下载对应的模型 +# 2. 加载模型 +# 3. 端到端推理 +# 4. 流式推理 + +import base64 + +import numpy as np +from paddlespeech.server.utils.onnx_infer import get_sess +from paddlespeech.t2s.frontend.zh_frontend import Frontend +from paddlespeech.server.utils.util import denorm, get_chunks +from paddlespeech.server.utils.audio_process import float2pcm +from paddlespeech.server.utils.config import get_config + +from paddlespeech.server.engine.tts.online.onnx.tts_engine import TTSEngine + + +class TTS: + def __init__(self, config_path): + self.config = get_config(config_path)['tts_online-onnx'] + self.config['voc_block'] = 36 + self.engine = TTSEngine() + self.engine.init(self.config) + self.engine.warm_up() + + # 前端初始化 + self.frontend = Frontend( + phone_vocab_path=self.engine.executor.phones_dict, + tone_vocab_path=None) + + def depadding(self, data, chunk_num, chunk_id, block, pad, upsample): + """ + Streaming inference removes the result of pad inference + """ + front_pad = min(chunk_id * block, pad) + # first chunk + if chunk_id == 0: + data = data[:block * upsample] + # last chunk + elif chunk_id == chunk_num - 1: + data = data[front_pad * upsample:] + # middle chunk + else: + data = data[front_pad * upsample:(front_pad + block) * upsample] + + return data + + def offlineTTS(self, text): + get_tone_ids = False + merge_sentences = False + + input_ids = self.frontend.get_input_ids( + text, + merge_sentences=merge_sentences, + get_tone_ids=get_tone_ids) + phone_ids = input_ids["phone_ids"] + wav_list = [] + for i in range(len(phone_ids)): + orig_hs = self.engine.executor.am_encoder_infer_sess.run( + None, input_feed={'text': phone_ids[i].numpy()} + ) + hs = orig_hs[0] + am_decoder_output = self.engine.executor.am_decoder_sess.run( + None, input_feed={'xs': hs}) + am_postnet_output = self.engine.executor.am_postnet_sess.run( + None, + input_feed={ + 'xs': np.transpose(am_decoder_output[0], (0, 2, 1)) + }) + am_output_data = am_decoder_output + np.transpose( + am_postnet_output[0], (0, 2, 1)) + normalized_mel = am_output_data[0][0] + mel = denorm(normalized_mel, self.engine.executor.am_mu, self.engine.executor.am_std) + wav = self.engine.executor.voc_sess.run( + output_names=None, input_feed={'logmel': mel})[0] + wav_list.append(wav) + wavs = np.concatenate(wav_list) + return wavs + + def streamTTS(self, text): + for sub_wav_base64 in self.engine.run(sentence=text): + yield sub_wav_base64 + + def streamTTSBytes(self, text): + for wav in self.engine.executor.infer( + text=text, + lang=self.engine.config.lang, + am=self.engine.config.am, + spk_id=0): + wav = float2pcm(wav) # float32 to int16 + wav_bytes = wav.tobytes() # to bytes + yield wav_bytes + + + def after_process(self, wav): + # for tvm + wav = float2pcm(wav) # float32 to int16 + wav_bytes = wav.tobytes() # to bytes + wav_base64 = base64.b64encode(wav_bytes).decode('utf8') # to base64 + return wav_base64 + + def streamTTS_TVM(self, text): + # 用 TVM 优化 + pass + +if __name__ == '__main__': + text = "啊哈哈哈哈哈哈啊哈哈哈哈哈哈啊哈哈哈哈哈哈啊哈哈哈哈哈哈啊哈哈哈哈哈哈" + config_path="../../PaddleSpeech/demos/streaming_tts_server/conf/tts_online_application.yaml" + tts = TTS(config_path) + + for sub_wav in tts.streamTTS(text): + print("sub_wav_base64: ", len(sub_wav)) + + end_wav = tts.offlineTTS(text) + print(end_wav) + + + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/vpr.py b/demos/speech_web_demo/speech_server/src/SpeechBase/vpr.py new file mode 100644 index 00000000..8b3863e8 --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/SpeechBase/vpr.py @@ -0,0 +1,152 @@ +# vpr Demo 没有使用 mysql 与 muilvs, 仅用于docker演示 +import logging +import faiss +from matplotlib import use +import numpy as np +from .sql_helper import DataBase +from .vpr_encode import get_audio_embedding + +class VPR: + def __init__(self, db_path, dim, top_k) -> None: + # 初始化 + self.db_path = db_path + self.dim = dim + self.top_k = top_k + self.dtype = np.float32 + self.vpr_idx = 0 + + # db 初始化 + self.db = DataBase(db_path) + + # faiss 初始化 + index_ip = faiss.IndexFlatIP(dim) + self.index_ip = faiss.IndexIDMap(index_ip) + self.init() + + def init(self): + # demo 初始化,把 mysql中的向量注册到 faiss 中 + sql_dbs = self.db.select_all() + if sql_dbs: + for sql_db in sql_dbs: + idx = sql_db['id'] + vc_bs64 = sql_db['vector'] + vc = self.db.decode_vector(vc_bs64) + if len(vc.shape) == 1: + vc = np.expand_dims(vc, axis=0) + # 构建数据库 + self.index_ip.add_with_ids(vc, np.array((idx,)).astype('int64')) + logging.info("faiss 构建完毕") + + def faiss_enroll(self, idx, vc): + self.index_ip.add_with_ids(vc, np.array((idx,)).astype('int64')) + + def vpr_enroll(self, username, wav_path): + # 注册声纹 + emb = get_audio_embedding(wav_path) + emb = np.expand_dims(emb, axis=0) + if emb is not None: + emb_bs64 = self.db.encode_vector(emb) + last_idx, mess = self.db.insert_one(username, emb_bs64, wav_path) + if last_idx: + # faiss 注册 + self.faiss_enroll(last_idx, emb) + else: + last_idx, mess = None + return last_idx + + def vpr_recog(self, wav_path): + # 识别声纹 + emb_search = get_audio_embedding(wav_path) + + if emb_search is not None: + emb_search = np.expand_dims(emb_search, axis=0) + D, I = self.index_ip.search(emb_search, self.top_k) + D = D.tolist()[0] + I = I.tolist()[0] + return [(round(D[i] * 100, 2 ), I[i]) for i in range(len(D)) if I[i] != -1] + else: + logging.error("识别失败") + return None + + def do_search_vpr(self, wav_path): + spk_ids, paths, scores = [], [], [] + recog_result = self.vpr_recog(wav_path) + for score, idx in recog_result: + username = self.db.select_by_id(idx)[0]['username'] + if username not in spk_ids: + spk_ids.append(username) + scores.append(score) + paths.append("") + return spk_ids, paths, scores + + def vpr_del(self, username): + # 根据用户username, 删除声纹 + # 查用户ID,删除对应向量 + res = self.db.select_by_username(username) + for r in res: + idx = r['id'] + self.index_ip.remove_ids(np.array((idx,)).astype('int64')) + + self.db.drop_by_username(username) + + def vpr_list(self): + # 获取数据列表 + return self.db.select_all() + + def do_list(self): + spk_ids, vpr_ids = [], [] + for res in self.db.select_all(): + spk_ids.append(res['username']) + vpr_ids.append(res['id']) + return spk_ids, vpr_ids + + def do_get_wav(self, vpr_idx): + res = self.db.select_by_id(vpr_idx) + return res[0]['wavpath'] + + + def vpr_data(self, idx): + # 获取对应ID的数据 + res = self.db.select_by_id(idx) + return res + + def vpr_droptable(self): + # 删除表 + self.db.drop_table() + # 清空 faiss + self.index_ip.reset() + + + +if __name__ == '__main__': + + db_path = "../../source/db/vpr.sqlite" + dim = 192 + top_k = 5 + vpr = VPR(db_path, dim, top_k) + + # 准备测试数据 + username = "sss" + wav_path = r"../../source/demo/demo_16k.wav" + + # 注册声纹 + vpr.vpr_enroll(username, wav_path) + + # 获取数据 + print(vpr.vpr_list()) + + # 识别声纹 + recolist = vpr.vpr_recog(wav_path) + print(recolist) + + # 通过 id 获取数据 + idx = recolist[0][1] + print(vpr.vpr_data(idx)) + + # 删除声纹 + vpr.vpr_del(username) + vpr.vpr_droptable() + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/vpr_encode.py b/demos/speech_web_demo/speech_server/src/SpeechBase/vpr_encode.py new file mode 100644 index 00000000..5a642133 --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/SpeechBase/vpr_encode.py @@ -0,0 +1,26 @@ +from paddlespeech.cli import VectorExecutor +import numpy as np +import logging + +vector_executor = VectorExecutor() + +def get_audio_embedding(path): + """ + Use vpr_inference to generate embedding of audio + """ + try: + embedding = vector_executor( + audio_file=path, model='ecapatdnn_voxceleb12') + embedding = embedding / np.linalg.norm(embedding) + return embedding + except Exception as e: + logging.error(f"Error with embedding:{e}") + return None + +if __name__ == '__main__': + audio_path = r"../../source/demo/demo_16k.wav" + emb = get_audio_embedding(audio_path) + print(emb.shape) + print(emb.dtype) + print(type(emb)) + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/WebsocketManeger.py b/demos/speech_web_demo/speech_server/src/WebsocketManeger.py new file mode 100644 index 00000000..5edde843 --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/WebsocketManeger.py @@ -0,0 +1,31 @@ +from typing import List + +from fastapi import WebSocket + +class ConnectionManager: + def __init__(self): + # 存放激活的ws连接对象 + self.active_connections: List[WebSocket] = [] + + async def connect(self, ws: WebSocket): + # 等待连接 + await ws.accept() + # 存储ws连接对象 + self.active_connections.append(ws) + + def disconnect(self, ws: WebSocket): + # 关闭时 移除ws对象 + self.active_connections.remove(ws) + + @staticmethod + async def send_personal_message(message: str, ws: WebSocket): + # 发送个人消息 + await ws.send_text(message) + + async def broadcast(self, message: str): + # 广播消息 + for connection in self.active_connections: + await connection.send_text(message) + + +manager = ConnectionManager() \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/robot.py b/demos/speech_web_demo/speech_server/src/robot.py new file mode 100644 index 00000000..05ac867e --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/robot.py @@ -0,0 +1,93 @@ +from paddlespeech.cli.asr.infer import ASRExecutor +import soundfile as sf +import os +import librosa + +from src.SpeechBase.asr import ASR +from src.SpeechBase.tts import TTS +from src.SpeechBase.nlp import NLP + + +class Robot: + def __init__(self, asr_config, tts_config,asr_init_path, + ie_model_path=None) -> None: + self.nlp = NLP(ie_model_path=ie_model_path) + self.asr = ASR(config_path=asr_config) + self.tts = TTS(config_path=tts_config) + self.tts_sample_rate = 24000 + self.asr_sample_rate = 16000 + + # 流式识别效果不如端到端的模型,这里流式模型与端到端模型分开 + self.asr_model = ASRExecutor() + self.asr_name = "conformer_wenetspeech" + self.warm_up_asrmodel(asr_init_path) + + + def warm_up_asrmodel(self, asr_init_path): + if not os.path.exists(asr_init_path): + path_dir = os.path.dirname(asr_init_path) + if not os.path.exists(path_dir): + os.makedirs(path_dir, exist_ok=True) + + # TTS生成,采样率24000 + text = "生成初始音频" + self.text2speech(text, asr_init_path) + + # asr model初始化 + self.asr_model(asr_init_path, model=self.asr_name,lang='zh', + sample_rate=16000) + + + def speech2text(self, audio_file): + self.asr_model.preprocess(self.asr_name, audio_file) + self.asr_model.infer(self.asr_name) + res = self.asr_model.postprocess() + return res + + def text2speech(self, text, outpath): + wav = self.tts.offlineTTS(text) + sf.write( + outpath, wav, samplerate=self.tts_sample_rate) + res = wav + return res + + def text2speechStream(self, text): + for sub_wav_base64 in self.tts.streamTTS(text=text): + yield sub_wav_base64 + + def text2speechStreamBytes(self, text): + for wav_bytes in self.tts.streamTTSBytes(text=text): + yield wav_bytes + + def chat(self, text): + result = self.nlp.chat(text) + return result + + def ie(self, text): + result = self.nlp.ie(text) + return result + +if __name__ == '__main__': + tts_config = "../PaddleSpeech/demos/streaming_tts_server/conf/tts_online_application.yaml" + asr_config = "../PaddleSpeech/demos/streaming_asr_server/conf/ws_conformer_application.yaml" + demo_wav = "../source/demo/demo_16k.wav" + ie_model_path = "../source/model" + tts_wav = "../source/demo/tts.wav" + text = "今天天气真不错" + ie_text = "今天晚上我从大牛坊出发去三里屯花了六十五块钱" + + + robot = Robot(asr_config, tts_config, asr_init_path=demo_wav) + res = robot.speech2text(demo_wav) + print(res) + + res = robot.chat(text) + print(res) + print("tts offline") + robot.text2speech(res, tts_wav) + + print("ie test") + res = robot.ie(ie_text) + print(res) + + \ No newline at end of file diff --git a/demos/speech_web_demo/speech_server/src/util.py b/demos/speech_web_demo/speech_server/src/util.py new file mode 100644 index 00000000..34005d91 --- /dev/null +++ b/demos/speech_web_demo/speech_server/src/util.py @@ -0,0 +1,18 @@ +import random + +def randName(n=5): + return "".join(random.sample('zyxwvutsrqponmlkjihgfedcba',n)) + +def SuccessRequest(result=None, message="ok"): + return { + "code": 0, + "result":result, + "message": message + } + +def ErrorRequest(result=None, message="error"): + return { + "code": -1, + "result":result, + "message": message + } \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/.gitignore b/demos/speech_web_demo/web_client/.gitignore new file mode 100644 index 00000000..e33435dc --- /dev/null +++ b/demos/speech_web_demo/web_client/.gitignore @@ -0,0 +1,25 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? +.vscode/* diff --git a/demos/speech_web_demo/web_client/README.md b/demos/speech_web_demo/web_client/README.md new file mode 100644 index 00000000..c0793a82 --- /dev/null +++ b/demos/speech_web_demo/web_client/README.md @@ -0,0 +1,7 @@ +# Vue 3 + Vite + +This template should help get you started developing with Vue 3 in Vite. The template uses Vue 3 ` + + diff --git a/demos/speech_web_demo/web_client/package-lock.json b/demos/speech_web_demo/web_client/package-lock.json new file mode 100644 index 00000000..f1c77978 --- /dev/null +++ b/demos/speech_web_demo/web_client/package-lock.json @@ -0,0 +1,1869 @@ +{ + "name": "paddlespeechwebclient", + "version": "0.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "paddlespeechwebclient", + "version": "0.0.0", + "dependencies": { + "ant-design-vue": "^2.2.8", + "axios": "^0.26.1", + "element-plus": "^2.1.9", + "js-audio-recorder": "0.5.7", + "lamejs": "^1.2.1", + "less": "^4.1.2", + "vue": "^3.2.25" + }, + "devDependencies": { + "@vitejs/plugin-vue": "^2.3.0", + "vite": "^2.9.0" + } + }, + "node_modules/@ant-design/colors": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/@ant-design/colors/-/colors-6.0.0.tgz", + "integrity": "sha512-qAZRvPzfdWHtfameEGP2Qvuf838NhergR35o+EuVyB5XvSA98xod5r4utvi4TJ3ywmevm290g9nsCG5MryrdWQ==", + "dependencies": { + "@ctrl/tinycolor": "^3.4.0" + } + }, + "node_modules/@ant-design/icons-svg": { + "version": "4.2.1", + "resolved": "https://registry.npmmirror.com/@ant-design/icons-svg/-/icons-svg-4.2.1.tgz", + "integrity": "sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw==" + }, + "node_modules/@ant-design/icons-vue": { + "version": "6.1.0", + "resolved": "https://registry.npmmirror.com/@ant-design/icons-vue/-/icons-vue-6.1.0.tgz", + "integrity": "sha512-EX6bYm56V+ZrKN7+3MT/ubDkvJ5rK/O2t380WFRflDcVFgsvl3NLH7Wxeau6R8DbrO5jWR6DSTC3B6gYFp77AA==", + "dependencies": { + "@ant-design/colors": "^6.0.0", + "@ant-design/icons-svg": "^4.2.1" + }, + "peerDependencies": { + "vue": ">=3.0.3" + } + }, + "node_modules/@babel/parser": { + "version": "7.17.9", + "resolved": "https://registry.npmmirror.com/@babel/parser/-/parser-7.17.9.tgz", + "integrity": "sha512-vqUSBLP8dQHFPdPi9bc5GK9vRkYHJ49fsZdtoJ8EQ8ibpwk5rPKfvNIwChB0KVXcIjcepEBBd2VHC5r9Gy8ueg==", + "license": "MIT", + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.17.9", + "resolved": "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.17.9.tgz", + "integrity": "sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==", + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@ctrl/tinycolor": { + "version": "3.4.1", + "resolved": "https://registry.npmmirror.com/@ctrl/tinycolor/-/tinycolor-3.4.1.tgz", + "integrity": "sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/@element-plus/icons-vue": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/@element-plus/icons-vue/-/icons-vue-1.1.4.tgz", + "integrity": "sha512-Iz/nHqdp1sFPmdzRwHkEQQA3lKvoObk8azgABZ81QUOpW9s/lUyQVUSh0tNtEPZXQlKwlSh7SPgoVxzrE0uuVQ==", + "license": "MIT", + "peerDependencies": { + "vue": "^3.2.0" + } + }, + "node_modules/@floating-ui/core": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/@floating-ui/core/-/core-0.6.1.tgz", + "integrity": "sha512-Y30eVMcZva8o84c0HcXAtDO4BEzPJMvF6+B7x7urL2xbAqVsGJhojOyHLaoQHQYjb6OkqRq5kO+zeySycQwKqg==", + "license": "MIT" + }, + "node_modules/@floating-ui/dom": { + "version": "0.4.4", + "resolved": "https://registry.npmmirror.com/@floating-ui/dom/-/dom-0.4.4.tgz", + "integrity": "sha512-0Ulu3B/dqQplUUSqnTx0foSrlYuMN+GTtlJWvNJwt6Fr7/PqmlR/Y08o6/+bxDWr6p3roBJRaQ51MDZsNmEhhw==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^0.6.1" + } + }, + "node_modules/@popperjs/core": { + "version": "2.11.5", + "resolved": "https://registry.npmmirror.com/@popperjs/core/-/core-2.11.5.tgz", + "integrity": "sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, + "node_modules/@simonwep/pickr": { + "version": "1.8.2", + "resolved": "https://registry.npmmirror.com/@simonwep/pickr/-/pickr-1.8.2.tgz", + "integrity": "sha512-/l5w8BIkrpP6n1xsetx9MWPWlU6OblN5YgZZphxan0Tq4BByTCETL6lyIeY8lagalS2Nbt4F2W034KHLIiunKA==", + "dependencies": { + "core-js": "^3.15.1", + "nanopop": "^2.1.0" + } + }, + "node_modules/@types/lodash": { + "version": "4.14.181", + "resolved": "https://registry.npmmirror.com/@types/lodash/-/lodash-4.14.181.tgz", + "integrity": "sha512-n3tyKthHJbkiWhDZs3DkhkCzt2MexYHXlX0td5iMplyfwketaOeKboEVBqzceH7juqvEg3q5oUoBFxSLu7zFag==", + "license": "MIT" + }, + "node_modules/@types/lodash-es": { + "version": "4.17.6", + "resolved": "https://registry.npmmirror.com/@types/lodash-es/-/lodash-es-4.17.6.tgz", + "integrity": "sha512-R+zTeVUKDdfoRxpAryaQNRKk3105Rrgx2CFRClIgRGaqDTdjsm8h6IYA8ir584W3ePzkZfst5xIgDwYrlh9HLg==", + "license": "MIT", + "dependencies": { + "@types/lodash": "*" + } + }, + "node_modules/@vitejs/plugin-vue": { + "version": "2.3.1", + "resolved": "https://registry.npmmirror.com/@vitejs/plugin-vue/-/plugin-vue-2.3.1.tgz", + "integrity": "sha512-YNzBt8+jt6bSwpt7LP890U1UcTOIZZxfpE5WOJ638PNxSEKOqAi0+FSKS0nVeukfdZ0Ai/H7AFd6k3hayfGZqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "vite": "^2.5.10", + "vue": "^3.2.25" + } + }, + "node_modules/@vue/compiler-core": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-core/-/compiler-core-3.2.32.tgz", + "integrity": "sha512-bRQ8Rkpm/aYFElDWtKkTPHeLnX5pEkNxhPUcqu5crEJIilZH0yeFu/qUAcV4VfSE2AudNPkQSOwMZofhnuutmA==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.16.4", + "@vue/shared": "3.2.32", + "estree-walker": "^2.0.2", + "source-map": "^0.6.1" + } + }, + "node_modules/@vue/compiler-dom": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-dom/-/compiler-dom-3.2.32.tgz", + "integrity": "sha512-maa3PNB/NxR17h2hDQfcmS02o1f9r9QIpN1y6fe8tWPrS1E4+q8MqrvDDQNhYVPd84rc3ybtyumrgm9D5Rf/kg==", + "license": "MIT", + "dependencies": { + "@vue/compiler-core": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-sfc/-/compiler-sfc-3.2.32.tgz", + "integrity": "sha512-uO6+Gh3AVdWm72lRRCjMr8nMOEqc6ezT9lWs5dPzh1E9TNaJkMYPaRtdY9flUv/fyVQotkfjY/ponjfR+trPSg==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.16.4", + "@vue/compiler-core": "3.2.32", + "@vue/compiler-dom": "3.2.32", + "@vue/compiler-ssr": "3.2.32", + "@vue/reactivity-transform": "3.2.32", + "@vue/shared": "3.2.32", + "estree-walker": "^2.0.2", + "magic-string": "^0.25.7", + "postcss": "^8.1.10", + "source-map": "^0.6.1" + } + }, + "node_modules/@vue/compiler-ssr": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-ssr/-/compiler-ssr-3.2.32.tgz", + "integrity": "sha512-ZklVUF/SgTx6yrDUkaTaBL/JMVOtSocP+z5Xz/qIqqLdW/hWL90P+ob/jOQ0Xc/om57892Q7sRSrex0wujOL2Q==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "node_modules/@vue/reactivity": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/reactivity/-/reactivity-3.2.32.tgz", + "integrity": "sha512-4zaDumuyDqkuhbb63hRd+YHFGopW7srFIWesLUQ2su/rJfWrSq3YUvoKAJE8Eu1EhZ2Q4c1NuwnEreKj1FkDxA==", + "license": "MIT", + "dependencies": { + "@vue/shared": "3.2.32" + } + }, + "node_modules/@vue/reactivity-transform": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/reactivity-transform/-/reactivity-transform-3.2.32.tgz", + "integrity": "sha512-CW1W9zaJtE275tZSWIfQKiPG0iHpdtSlmTqYBu7Y62qvtMgKG5yOxtvBs4RlrZHlaqFSE26avLAgQiTp4YHozw==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.16.4", + "@vue/compiler-core": "3.2.32", + "@vue/shared": "3.2.32", + "estree-walker": "^2.0.2", + "magic-string": "^0.25.7" + } + }, + "node_modules/@vue/runtime-core": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/runtime-core/-/runtime-core-3.2.32.tgz", + "integrity": "sha512-uKKzK6LaCnbCJ7rcHvsK0azHLGpqs+Vi9B28CV1mfWVq1F3Bj8Okk3cX+5DtD06aUh4V2bYhS2UjjWiUUKUF0w==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/runtime-dom/-/runtime-dom-3.2.32.tgz", + "integrity": "sha512-AmlIg+GPqjkNoADLjHojEX5RGcAg+TsgXOOcUrtDHwKvA8mO26EnLQLB8nylDjU6AMJh2CIYn8NEgyOV5ZIScQ==", + "license": "MIT", + "dependencies": { + "@vue/runtime-core": "3.2.32", + "@vue/shared": "3.2.32", + "csstype": "^2.6.8" + } + }, + "node_modules/@vue/server-renderer": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/server-renderer/-/server-renderer-3.2.32.tgz", + "integrity": "sha512-TYKpZZfRJpGTTiy/s6bVYwQJpAUx3G03z4G7/3O18M11oacrMTVHaHjiPuPqf3xQtY8R4LKmQ3EOT/DRCA/7Wg==", + "license": "MIT", + "dependencies": { + "@vue/compiler-ssr": "3.2.32", + "@vue/shared": "3.2.32" + }, + "peerDependencies": { + "vue": "3.2.32" + } + }, + "node_modules/@vue/shared": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/shared/-/shared-3.2.32.tgz", + "integrity": "sha512-bjcixPErUsAnTQRQX4Z5IQnICYjIfNCyCl8p29v1M6kfVzvwOICPw+dz48nNuWlTOOx2RHhzHdazJibE8GSnsw==", + "license": "MIT" + }, + "node_modules/@vueuse/core": { + "version": "8.2.5", + "resolved": "https://registry.npmmirror.com/@vueuse/core/-/core-8.2.5.tgz", + "integrity": "sha512-5prZAA1Ji2ltwNUnzreu6WIXYqHYP/9U2BiY5mD/650VYLpVcwVlYznJDFcLCmEWI3o3Vd34oS1FUf+6Mh68GQ==", + "license": "MIT", + "dependencies": { + "@vueuse/metadata": "8.2.5", + "@vueuse/shared": "8.2.5", + "vue-demi": "*" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.1.0", + "vue": "^2.6.0 || ^3.2.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + }, + "vue": { + "optional": true + } + } + }, + "node_modules/@vueuse/metadata": { + "version": "8.2.5", + "resolved": "https://registry.npmmirror.com/@vueuse/metadata/-/metadata-8.2.5.tgz", + "integrity": "sha512-Lk9plJjh9cIdiRdcj16dau+2LANxIdFCiTgdfzwYXbflxq0QnMBeOD2qHgKDE7fuVrtPcVWj8VSuZEx1HRfNQA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/shared": { + "version": "8.2.5", + "resolved": "https://registry.npmmirror.com/@vueuse/shared/-/shared-8.2.5.tgz", + "integrity": "sha512-lNWo+7sk6JCuOj4AiYM+6HZ6fq4xAuVq1sVckMQKgfCJZpZRe4i8es+ZULO5bYTKP+VrOCtqrLR2GzEfrbr3YQ==", + "license": "MIT", + "dependencies": { + "vue-demi": "*" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.1.0", + "vue": "^2.6.0 || ^3.2.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + }, + "vue": { + "optional": true + } + } + }, + "node_modules/ant-design-vue": { + "version": "2.2.8", + "resolved": "https://registry.npmmirror.com/ant-design-vue/-/ant-design-vue-2.2.8.tgz", + "integrity": "sha512-3graq9/gCfJQs6hznrHV6sa9oDmk/D1H3Oo0vLdVpPS/I61fZPk8NEyNKCHpNA6fT2cx6xx9U3QS63uuyikg/Q==", + "dependencies": { + "@ant-design/icons-vue": "^6.0.0", + "@babel/runtime": "^7.10.5", + "@simonwep/pickr": "~1.8.0", + "array-tree-filter": "^2.1.0", + "async-validator": "^3.3.0", + "dom-align": "^1.12.1", + "dom-scroll-into-view": "^2.0.0", + "lodash": "^4.17.21", + "lodash-es": "^4.17.15", + "moment": "^2.27.0", + "omit.js": "^2.0.0", + "resize-observer-polyfill": "^1.5.1", + "scroll-into-view-if-needed": "^2.2.25", + "shallow-equal": "^1.0.0", + "vue-types": "^3.0.0", + "warning": "^4.0.0" + }, + "peerDependencies": { + "@vue/compiler-sfc": ">=3.1.0", + "vue": ">=3.1.0" + } + }, + "node_modules/ant-design-vue/node_modules/async-validator": { + "version": "3.5.2", + "resolved": "https://registry.npmmirror.com/async-validator/-/async-validator-3.5.2.tgz", + "integrity": "sha512-8eLCg00W9pIRZSB781UUX/H6Oskmm8xloZfr09lz5bikRpBVDlJ3hRVuxxP1SxcwsEYfJ4IU8Q19Y8/893r3rQ==" + }, + "node_modules/array-tree-filter": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/array-tree-filter/-/array-tree-filter-2.1.0.tgz", + "integrity": "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw==" + }, + "node_modules/async-validator": { + "version": "4.0.7", + "resolved": "https://registry.npmmirror.com/async-validator/-/async-validator-4.0.7.tgz", + "integrity": "sha512-Pj2IR7u8hmUEDOwB++su6baaRi+QvsgajuFB9j95foM1N2gy5HM4z60hfusIO0fBPG5uLAEl6yCJr1jNSVugEQ==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "0.26.1", + "resolved": "https://registry.npmmirror.com/axios/-/axios-0.26.1.tgz", + "integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.14.8" + } + }, + "node_modules/axios/node_modules/follow-redirects": { + "version": "1.14.9", + "resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.14.9.tgz", + "integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/compute-scroll-into-view": { + "version": "1.0.17", + "resolved": "https://registry.npmmirror.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz", + "integrity": "sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg==" + }, + "node_modules/copy-anything": { + "version": "2.0.6", + "resolved": "https://registry.npmmirror.com/copy-anything/-/copy-anything-2.0.6.tgz", + "integrity": "sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==", + "dependencies": { + "is-what": "^3.14.1" + } + }, + "node_modules/core-js": { + "version": "3.22.5", + "resolved": "https://registry.npmmirror.com/core-js/-/core-js-3.22.5.tgz", + "integrity": "sha512-VP/xYuvJ0MJWRAobcmQ8F2H6Bsn+s7zqAAjFaHGBMc5AQm7zaelhD1LGduFn2EehEcQcU+br6t+fwbpQ5d1ZWA==", + "hasInstallScript": true + }, + "node_modules/csstype": { + "version": "2.6.20", + "resolved": "https://registry.npmmirror.com/csstype/-/csstype-2.6.20.tgz", + "integrity": "sha512-/WwNkdXfckNgw6S5R125rrW8ez139lBHWouiBvX8dfMFtcn6V81REDqnH7+CRpRipfYlyU1CmOnOxrmGcFOjeA==", + "license": "MIT" + }, + "node_modules/dayjs": { + "version": "1.11.0", + "resolved": "https://registry.npmmirror.com/dayjs/-/dayjs-1.11.0.tgz", + "integrity": "sha512-JLC809s6Y948/FuCZPm5IX8rRhQwOiyMb2TfVVQEixG7P8Lm/gt5S7yoQZmC8x1UehI9Pb7sksEt4xx14m+7Ug==", + "license": "MIT" + }, + "node_modules/dom-align": { + "version": "1.12.3", + "resolved": "https://registry.npmmirror.com/dom-align/-/dom-align-1.12.3.tgz", + "integrity": "sha512-Gj9hZN3a07cbR6zviMUBOMPdWxYhbMI+x+WS0NAIu2zFZmbK8ys9R79g+iG9qLnlCwpFoaB+fKy8Pdv470GsPA==" + }, + "node_modules/dom-scroll-into-view": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/dom-scroll-into-view/-/dom-scroll-into-view-2.0.1.tgz", + "integrity": "sha512-bvVTQe1lfaUr1oFzZX80ce9KLDlZ3iU+XGNE/bz9HnGdklTieqsbmsLHe+rT2XWqopvL0PckkYqN7ksmm5pe3w==" + }, + "node_modules/element-plus": { + "version": "2.1.9", + "resolved": "https://registry.npmmirror.com/element-plus/-/element-plus-2.1.9.tgz", + "integrity": "sha512-6mWqS3YrmJPnouWP4otzL8+MehfOnDFqDbcIdnmC07p+Z0JkWe/CVKc4Wky8AYC8nyDMUQyiZYvooCbqGuM7pg==", + "license": "MIT", + "dependencies": { + "@ctrl/tinycolor": "^3.4.0", + "@element-plus/icons-vue": "^1.1.4", + "@floating-ui/dom": "^0.4.2", + "@popperjs/core": "^2.11.4", + "@types/lodash": "^4.14.181", + "@types/lodash-es": "^4.17.6", + "@vueuse/core": "^8.2.4", + "async-validator": "^4.0.7", + "dayjs": "^1.11.0", + "escape-html": "^1.0.3", + "lodash": "^4.17.21", + "lodash-es": "^4.17.21", + "lodash-unified": "^1.0.2", + "memoize-one": "^6.0.0", + "normalize-wheel-es": "^1.1.2" + }, + "peerDependencies": { + "vue": "^3.2.0" + } + }, + "node_modules/errno": { + "version": "0.1.8", + "resolved": "https://registry.npmmirror.com/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "optional": true, + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, + "node_modules/esbuild": { + "version": "0.14.36", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.14.36.tgz", + "integrity": "sha512-HhFHPiRXGYOCRlrhpiVDYKcFJRdO0sBElZ668M4lh2ER0YgnkLxECuFe7uWCf23FrcLc59Pqr7dHkTqmRPDHmw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "esbuild-android-64": "0.14.36", + "esbuild-android-arm64": "0.14.36", + "esbuild-darwin-64": "0.14.36", + "esbuild-darwin-arm64": "0.14.36", + "esbuild-freebsd-64": "0.14.36", + "esbuild-freebsd-arm64": "0.14.36", + "esbuild-linux-32": "0.14.36", + "esbuild-linux-64": "0.14.36", + "esbuild-linux-arm": "0.14.36", + "esbuild-linux-arm64": "0.14.36", + "esbuild-linux-mips64le": "0.14.36", + "esbuild-linux-ppc64le": "0.14.36", + "esbuild-linux-riscv64": "0.14.36", + "esbuild-linux-s390x": "0.14.36", + "esbuild-netbsd-64": "0.14.36", + "esbuild-openbsd-64": "0.14.36", + "esbuild-sunos-64": "0.14.36", + "esbuild-windows-32": "0.14.36", + "esbuild-windows-64": "0.14.36", + "esbuild-windows-arm64": "0.14.36" + } + }, + "node_modules/esbuild-darwin-64": { + "version": "0.14.36", + "resolved": "https://registry.npmmirror.com/esbuild-darwin-64/-/esbuild-darwin-64-0.14.36.tgz", + "integrity": "sha512-kkl6qmV0dTpyIMKagluzYqlc1vO0ecgpviK/7jwPbRDEv5fejRTaBBEE2KxEQbTHcLhiiDbhG7d5UybZWo/1zQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true, + "license": "MIT" + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmmirror.com/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "optional": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/image-size": { + "version": "0.5.5", + "resolved": "https://registry.npmmirror.com/image-size/-/image-size-0.5.5.tgz", + "integrity": "sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==", + "optional": true, + "bin": { + "image-size": "bin/image-size.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmmirror.com/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "license": "MIT", + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-plain-object": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/is-plain-object/-/is-plain-object-3.0.1.tgz", + "integrity": "sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-what": { + "version": "3.14.1", + "resolved": "https://registry.npmmirror.com/is-what/-/is-what-3.14.1.tgz", + "integrity": "sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==" + }, + "node_modules/js-audio-recorder": { + "version": "0.5.7", + "resolved": "https://registry.npmmirror.com/js-audio-recorder/-/js-audio-recorder-0.5.7.tgz", + "integrity": "sha512-DIlv30N86AYHr7zGHN0O7V/3Rd8Q6SIJ/MBzVJaT9STWTdhF4E/8fxCX6ZMgRSv8xmx6fEqcFFNPoofmxJD4+A==", + "license": "MIT" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/lamejs": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/lamejs/-/lamejs-1.2.1.tgz", + "integrity": "sha512-s7bxvjvYthw6oPLCm5pFxvA84wUROODB8jEO2+CE1adhKgrIvVOlmMgY8zyugxGrvRaDHNJanOiS21/emty6dQ==", + "license": "LGPL-3.0", + "dependencies": { + "use-strict": "1.0.1" + } + }, + "node_modules/less": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/less/-/less-4.1.2.tgz", + "integrity": "sha512-EoQp/Et7OSOVu0aJknJOtlXZsnr8XE8KwuzTHOLeVSEx8pVWUICc8Q0VYRHgzyjX78nMEyC/oztWFbgyhtNfDA==", + "dependencies": { + "copy-anything": "^2.0.1", + "parse-node-version": "^1.0.1", + "tslib": "^2.3.0" + }, + "bin": { + "lessc": "bin/lessc" + }, + "engines": { + "node": ">=6" + }, + "optionalDependencies": { + "errno": "^0.1.1", + "graceful-fs": "^4.1.2", + "image-size": "~0.5.0", + "make-dir": "^2.1.0", + "mime": "^1.4.1", + "needle": "^2.5.2", + "source-map": "~0.6.0" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "license": "MIT" + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmmirror.com/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/lodash-unified": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/lodash-unified/-/lodash-unified-1.0.2.tgz", + "integrity": "sha512-OGbEy+1P+UT26CYi4opY4gebD8cWRDxAT6MAObIVQMiqYdxZr1g3QHWCToVsm31x2NkLS4K3+MC2qInaRMa39g==", + "license": "MIT", + "peerDependencies": { + "@types/lodash-es": "*", + "lodash": "*", + "lodash-es": "*" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/magic-string": { + "version": "0.25.9", + "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.25.9.tgz", + "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==", + "license": "MIT", + "dependencies": { + "sourcemap-codec": "^1.4.8" + } + }, + "node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "optional": true, + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/memoize-one": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/memoize-one/-/memoize-one-6.0.0.tgz", + "integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==", + "license": "MIT" + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmmirror.com/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "optional": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/moment": { + "version": "2.29.3", + "resolved": "https://registry.npmmirror.com/moment/-/moment-2.29.3.tgz", + "integrity": "sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==", + "engines": { + "node": "*" + } + }, + "node_modules/nanoid": { + "version": "3.3.2", + "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.2.tgz", + "integrity": "sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA==", + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/nanopop": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/nanopop/-/nanopop-2.1.0.tgz", + "integrity": "sha512-jGTwpFRexSH+fxappnGQtN9dspgE2ipa1aOjtR24igG0pv6JCxImIAmrLRHX+zUF5+1wtsFVbKyfP51kIGAVNw==" + }, + "node_modules/needle": { + "version": "2.9.1", + "resolved": "https://registry.npmmirror.com/needle/-/needle-2.9.1.tgz", + "integrity": "sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==", + "optional": true, + "dependencies": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "bin": { + "needle": "bin/needle" + }, + "engines": { + "node": ">= 4.4.x" + } + }, + "node_modules/needle/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmmirror.com/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "optional": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/needle/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "optional": true + }, + "node_modules/normalize-wheel-es": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/normalize-wheel-es/-/normalize-wheel-es-1.1.2.tgz", + "integrity": "sha512-scX83plWJXYH1J4+BhAuIHadROzxX0UBF3+HuZNY2Ks8BciE7tSTQ+5JhTsvzjaO0/EJdm4JBGrfObKxFf3Png==", + "license": "BSD-3-Clause" + }, + "node_modules/omit.js": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/omit.js/-/omit.js-2.0.2.tgz", + "integrity": "sha512-hJmu9D+bNB40YpL9jYebQl4lsTW6yEHRTroJzNLqQJYHm7c+NQnJGfZmIWh8S3q3KoaxV1aLhV6B3+0N0/kyJg==" + }, + "node_modules/parse-node-version": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/parse-node-version/-/parse-node-version-1.0.1.tgz", + "integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "license": "ISC" + }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/postcss": { + "version": "8.4.12", + "resolved": "https://registry.npmmirror.com/postcss/-/postcss-8.4.12.tgz", + "integrity": "sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.1", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prr": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/prr/-/prr-1.0.1.tgz", + "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==", + "optional": true + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + }, + "node_modules/resize-observer-polyfill": { + "version": "1.5.1", + "resolved": "https://registry.npmmirror.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", + "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" + }, + "node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmmirror.com/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rollup": { + "version": "2.70.1", + "resolved": "https://registry.npmmirror.com/rollup/-/rollup-2.70.1.tgz", + "integrity": "sha512-CRYsI5EuzLbXdxC6RnYhOuRdtz4bhejPMSWjsFLfVM/7w/85n2szZv6yExqUXsBdz5KT8eoubeyDUDjhLHEslA==", + "dev": true, + "license": "MIT", + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=10.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "optional": true + }, + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmmirror.com/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "optional": true + }, + "node_modules/scroll-into-view-if-needed": { + "version": "2.2.29", + "resolved": "https://registry.npmmirror.com/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.29.tgz", + "integrity": "sha512-hxpAR6AN+Gh53AdAimHM6C8oTN1ppwVZITihix+WqalywBeFcQ6LdQP5ABNl26nX8GTEL7VT+b8lKpdqq65wXg==", + "dependencies": { + "compute-scroll-into-view": "^1.0.17" + } + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "optional": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/shallow-equal": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/shallow-equal/-/shallow-equal-1.2.1.tgz", + "integrity": "sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==" + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sourcemap-codec": { + "version": "1.4.8", + "resolved": "https://registry.npmmirror.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", + "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==", + "license": "MIT" + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/use-strict": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/use-strict/-/use-strict-1.0.1.tgz", + "integrity": "sha512-IeiWvvEXfW5ltKVMkxq6FvNf2LojMKvB2OCeja6+ct24S1XOmQw2dGr2JyndwACWAGJva9B7yPHwAmeA9QCqAQ==", + "license": "ISC" + }, + "node_modules/vite": { + "version": "2.9.1", + "resolved": "https://registry.npmmirror.com/vite/-/vite-2.9.1.tgz", + "integrity": "sha512-vSlsSdOYGcYEJfkQ/NeLXgnRv5zZfpAsdztkIrs7AZHV8RCMZQkwjo4DS5BnrYTqoWqLoUe1Cah4aVO4oNNqCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.14.27", + "postcss": "^8.4.12", + "resolve": "^1.22.0", + "rollup": "^2.59.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": ">=12.2.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "peerDependencies": { + "less": "*", + "sass": "*", + "stylus": "*" + }, + "peerDependenciesMeta": { + "less": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + } + } + }, + "node_modules/vue": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/vue/-/vue-3.2.32.tgz", + "integrity": "sha512-6L3jKZApF042OgbCkh+HcFeAkiYi3Lovi8wNhWqIK98Pi5efAMLZzRHgi91v+60oIRxdJsGS9sTMsb+yDpY8Eg==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.2.32", + "@vue/compiler-sfc": "3.2.32", + "@vue/runtime-dom": "3.2.32", + "@vue/server-renderer": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "node_modules/vue-demi": { + "version": "0.12.5", + "resolved": "https://registry.npmmirror.com/vue-demi/-/vue-demi-0.12.5.tgz", + "integrity": "sha512-BREuTgTYlUr0zw0EZn3hnhC3I6gPWv+Kwh4MCih6QcAeaTlaIX0DwOVN0wHej7hSvDPecz4jygy/idsgKfW58Q==", + "hasInstallScript": true, + "license": "MIT", + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/vue-types": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/vue-types/-/vue-types-3.0.2.tgz", + "integrity": "sha512-IwUC0Aq2zwaXqy74h4WCvFCUtoV0iSWr0snWnE9TnU18S66GAQyqQbRf2qfJtUuiFsBf6qp0MEwdonlwznlcrw==", + "dependencies": { + "is-plain-object": "3.0.1" + }, + "engines": { + "node": ">=10.15.0" + }, + "peerDependencies": { + "vue": "^3.0.0" + } + }, + "node_modules/warning": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "dependencies": { + "loose-envify": "^1.0.0" + } + } + }, + "dependencies": { + "@ant-design/colors": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/@ant-design/colors/-/colors-6.0.0.tgz", + "integrity": "sha512-qAZRvPzfdWHtfameEGP2Qvuf838NhergR35o+EuVyB5XvSA98xod5r4utvi4TJ3ywmevm290g9nsCG5MryrdWQ==", + "requires": { + "@ctrl/tinycolor": "^3.4.0" + } + }, + "@ant-design/icons-svg": { + "version": "4.2.1", + "resolved": "https://registry.npmmirror.com/@ant-design/icons-svg/-/icons-svg-4.2.1.tgz", + "integrity": "sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw==" + }, + "@ant-design/icons-vue": { + "version": "6.1.0", + "resolved": "https://registry.npmmirror.com/@ant-design/icons-vue/-/icons-vue-6.1.0.tgz", + "integrity": "sha512-EX6bYm56V+ZrKN7+3MT/ubDkvJ5rK/O2t380WFRflDcVFgsvl3NLH7Wxeau6R8DbrO5jWR6DSTC3B6gYFp77AA==", + "requires": { + "@ant-design/colors": "^6.0.0", + "@ant-design/icons-svg": "^4.2.1" + } + }, + "@babel/parser": { + "version": "7.17.9", + "resolved": "https://registry.npmmirror.com/@babel/parser/-/parser-7.17.9.tgz", + "integrity": "sha512-vqUSBLP8dQHFPdPi9bc5GK9vRkYHJ49fsZdtoJ8EQ8ibpwk5rPKfvNIwChB0KVXcIjcepEBBd2VHC5r9Gy8ueg==" + }, + "@babel/runtime": { + "version": "7.17.9", + "resolved": "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.17.9.tgz", + "integrity": "sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==", + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@ctrl/tinycolor": { + "version": "3.4.1", + "resolved": "https://registry.npmmirror.com/@ctrl/tinycolor/-/tinycolor-3.4.1.tgz", + "integrity": "sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw==" + }, + "@element-plus/icons-vue": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/@element-plus/icons-vue/-/icons-vue-1.1.4.tgz", + "integrity": "sha512-Iz/nHqdp1sFPmdzRwHkEQQA3lKvoObk8azgABZ81QUOpW9s/lUyQVUSh0tNtEPZXQlKwlSh7SPgoVxzrE0uuVQ==", + "requires": {} + }, + "@floating-ui/core": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/@floating-ui/core/-/core-0.6.1.tgz", + "integrity": "sha512-Y30eVMcZva8o84c0HcXAtDO4BEzPJMvF6+B7x7urL2xbAqVsGJhojOyHLaoQHQYjb6OkqRq5kO+zeySycQwKqg==" + }, + "@floating-ui/dom": { + "version": "0.4.4", + "resolved": "https://registry.npmmirror.com/@floating-ui/dom/-/dom-0.4.4.tgz", + "integrity": "sha512-0Ulu3B/dqQplUUSqnTx0foSrlYuMN+GTtlJWvNJwt6Fr7/PqmlR/Y08o6/+bxDWr6p3roBJRaQ51MDZsNmEhhw==", + "requires": { + "@floating-ui/core": "^0.6.1" + } + }, + "@popperjs/core": { + "version": "2.11.5", + "resolved": "https://registry.npmmirror.com/@popperjs/core/-/core-2.11.5.tgz", + "integrity": "sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw==" + }, + "@simonwep/pickr": { + "version": "1.8.2", + "resolved": "https://registry.npmmirror.com/@simonwep/pickr/-/pickr-1.8.2.tgz", + "integrity": "sha512-/l5w8BIkrpP6n1xsetx9MWPWlU6OblN5YgZZphxan0Tq4BByTCETL6lyIeY8lagalS2Nbt4F2W034KHLIiunKA==", + "requires": { + "core-js": "^3.15.1", + "nanopop": "^2.1.0" + } + }, + "@types/lodash": { + "version": "4.14.181", + "resolved": "https://registry.npmmirror.com/@types/lodash/-/lodash-4.14.181.tgz", + "integrity": "sha512-n3tyKthHJbkiWhDZs3DkhkCzt2MexYHXlX0td5iMplyfwketaOeKboEVBqzceH7juqvEg3q5oUoBFxSLu7zFag==" + }, + "@types/lodash-es": { + "version": "4.17.6", + "resolved": "https://registry.npmmirror.com/@types/lodash-es/-/lodash-es-4.17.6.tgz", + "integrity": "sha512-R+zTeVUKDdfoRxpAryaQNRKk3105Rrgx2CFRClIgRGaqDTdjsm8h6IYA8ir584W3ePzkZfst5xIgDwYrlh9HLg==", + "requires": { + "@types/lodash": "*" + } + }, + "@vitejs/plugin-vue": { + "version": "2.3.1", + "resolved": "https://registry.npmmirror.com/@vitejs/plugin-vue/-/plugin-vue-2.3.1.tgz", + "integrity": "sha512-YNzBt8+jt6bSwpt7LP890U1UcTOIZZxfpE5WOJ638PNxSEKOqAi0+FSKS0nVeukfdZ0Ai/H7AFd6k3hayfGZqQ==", + "dev": true, + "requires": {} + }, + "@vue/compiler-core": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-core/-/compiler-core-3.2.32.tgz", + "integrity": "sha512-bRQ8Rkpm/aYFElDWtKkTPHeLnX5pEkNxhPUcqu5crEJIilZH0yeFu/qUAcV4VfSE2AudNPkQSOwMZofhnuutmA==", + "requires": { + "@babel/parser": "^7.16.4", + "@vue/shared": "3.2.32", + "estree-walker": "^2.0.2", + "source-map": "^0.6.1" + } + }, + "@vue/compiler-dom": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-dom/-/compiler-dom-3.2.32.tgz", + "integrity": "sha512-maa3PNB/NxR17h2hDQfcmS02o1f9r9QIpN1y6fe8tWPrS1E4+q8MqrvDDQNhYVPd84rc3ybtyumrgm9D5Rf/kg==", + "requires": { + "@vue/compiler-core": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "@vue/compiler-sfc": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-sfc/-/compiler-sfc-3.2.32.tgz", + "integrity": "sha512-uO6+Gh3AVdWm72lRRCjMr8nMOEqc6ezT9lWs5dPzh1E9TNaJkMYPaRtdY9flUv/fyVQotkfjY/ponjfR+trPSg==", + "requires": { + "@babel/parser": "^7.16.4", + "@vue/compiler-core": "3.2.32", + "@vue/compiler-dom": "3.2.32", + "@vue/compiler-ssr": "3.2.32", + "@vue/reactivity-transform": "3.2.32", + "@vue/shared": "3.2.32", + "estree-walker": "^2.0.2", + "magic-string": "^0.25.7", + "postcss": "^8.1.10", + "source-map": "^0.6.1" + } + }, + "@vue/compiler-ssr": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/compiler-ssr/-/compiler-ssr-3.2.32.tgz", + "integrity": "sha512-ZklVUF/SgTx6yrDUkaTaBL/JMVOtSocP+z5Xz/qIqqLdW/hWL90P+ob/jOQ0Xc/om57892Q7sRSrex0wujOL2Q==", + "requires": { + "@vue/compiler-dom": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "@vue/reactivity": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/reactivity/-/reactivity-3.2.32.tgz", + "integrity": "sha512-4zaDumuyDqkuhbb63hRd+YHFGopW7srFIWesLUQ2su/rJfWrSq3YUvoKAJE8Eu1EhZ2Q4c1NuwnEreKj1FkDxA==", + "requires": { + "@vue/shared": "3.2.32" + } + }, + "@vue/reactivity-transform": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/reactivity-transform/-/reactivity-transform-3.2.32.tgz", + "integrity": "sha512-CW1W9zaJtE275tZSWIfQKiPG0iHpdtSlmTqYBu7Y62qvtMgKG5yOxtvBs4RlrZHlaqFSE26avLAgQiTp4YHozw==", + "requires": { + "@babel/parser": "^7.16.4", + "@vue/compiler-core": "3.2.32", + "@vue/shared": "3.2.32", + "estree-walker": "^2.0.2", + "magic-string": "^0.25.7" + } + }, + "@vue/runtime-core": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/runtime-core/-/runtime-core-3.2.32.tgz", + "integrity": "sha512-uKKzK6LaCnbCJ7rcHvsK0azHLGpqs+Vi9B28CV1mfWVq1F3Bj8Okk3cX+5DtD06aUh4V2bYhS2UjjWiUUKUF0w==", + "requires": { + "@vue/reactivity": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "@vue/runtime-dom": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/runtime-dom/-/runtime-dom-3.2.32.tgz", + "integrity": "sha512-AmlIg+GPqjkNoADLjHojEX5RGcAg+TsgXOOcUrtDHwKvA8mO26EnLQLB8nylDjU6AMJh2CIYn8NEgyOV5ZIScQ==", + "requires": { + "@vue/runtime-core": "3.2.32", + "@vue/shared": "3.2.32", + "csstype": "^2.6.8" + } + }, + "@vue/server-renderer": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/server-renderer/-/server-renderer-3.2.32.tgz", + "integrity": "sha512-TYKpZZfRJpGTTiy/s6bVYwQJpAUx3G03z4G7/3O18M11oacrMTVHaHjiPuPqf3xQtY8R4LKmQ3EOT/DRCA/7Wg==", + "requires": { + "@vue/compiler-ssr": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "@vue/shared": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/@vue/shared/-/shared-3.2.32.tgz", + "integrity": "sha512-bjcixPErUsAnTQRQX4Z5IQnICYjIfNCyCl8p29v1M6kfVzvwOICPw+dz48nNuWlTOOx2RHhzHdazJibE8GSnsw==" + }, + "@vueuse/core": { + "version": "8.2.5", + "resolved": "https://registry.npmmirror.com/@vueuse/core/-/core-8.2.5.tgz", + "integrity": "sha512-5prZAA1Ji2ltwNUnzreu6WIXYqHYP/9U2BiY5mD/650VYLpVcwVlYznJDFcLCmEWI3o3Vd34oS1FUf+6Mh68GQ==", + "requires": { + "@vueuse/metadata": "8.2.5", + "@vueuse/shared": "8.2.5", + "vue-demi": "*" + } + }, + "@vueuse/metadata": { + "version": "8.2.5", + "resolved": "https://registry.npmmirror.com/@vueuse/metadata/-/metadata-8.2.5.tgz", + "integrity": "sha512-Lk9plJjh9cIdiRdcj16dau+2LANxIdFCiTgdfzwYXbflxq0QnMBeOD2qHgKDE7fuVrtPcVWj8VSuZEx1HRfNQA==" + }, + "@vueuse/shared": { + "version": "8.2.5", + "resolved": "https://registry.npmmirror.com/@vueuse/shared/-/shared-8.2.5.tgz", + "integrity": "sha512-lNWo+7sk6JCuOj4AiYM+6HZ6fq4xAuVq1sVckMQKgfCJZpZRe4i8es+ZULO5bYTKP+VrOCtqrLR2GzEfrbr3YQ==", + "requires": { + "vue-demi": "*" + } + }, + "ant-design-vue": { + "version": "2.2.8", + "resolved": "https://registry.npmmirror.com/ant-design-vue/-/ant-design-vue-2.2.8.tgz", + "integrity": "sha512-3graq9/gCfJQs6hznrHV6sa9oDmk/D1H3Oo0vLdVpPS/I61fZPk8NEyNKCHpNA6fT2cx6xx9U3QS63uuyikg/Q==", + "requires": { + "@ant-design/icons-vue": "^6.0.0", + "@babel/runtime": "^7.10.5", + "@simonwep/pickr": "~1.8.0", + "array-tree-filter": "^2.1.0", + "async-validator": "^3.3.0", + "dom-align": "^1.12.1", + "dom-scroll-into-view": "^2.0.0", + "lodash": "^4.17.21", + "lodash-es": "^4.17.15", + "moment": "^2.27.0", + "omit.js": "^2.0.0", + "resize-observer-polyfill": "^1.5.1", + "scroll-into-view-if-needed": "^2.2.25", + "shallow-equal": "^1.0.0", + "vue-types": "^3.0.0", + "warning": "^4.0.0" + }, + "dependencies": { + "async-validator": { + "version": "3.5.2", + "resolved": "https://registry.npmmirror.com/async-validator/-/async-validator-3.5.2.tgz", + "integrity": "sha512-8eLCg00W9pIRZSB781UUX/H6Oskmm8xloZfr09lz5bikRpBVDlJ3hRVuxxP1SxcwsEYfJ4IU8Q19Y8/893r3rQ==" + } + } + }, + "array-tree-filter": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/array-tree-filter/-/array-tree-filter-2.1.0.tgz", + "integrity": "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw==" + }, + "async-validator": { + "version": "4.0.7", + "resolved": "https://registry.npmmirror.com/async-validator/-/async-validator-4.0.7.tgz", + "integrity": "sha512-Pj2IR7u8hmUEDOwB++su6baaRi+QvsgajuFB9j95foM1N2gy5HM4z60hfusIO0fBPG5uLAEl6yCJr1jNSVugEQ==" + }, + "axios": { + "version": "0.26.1", + "resolved": "https://registry.npmmirror.com/axios/-/axios-0.26.1.tgz", + "integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==", + "requires": { + "follow-redirects": "^1.14.8" + }, + "dependencies": { + "follow-redirects": { + "version": "1.14.9", + "resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.14.9.tgz", + "integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==" + } + } + }, + "compute-scroll-into-view": { + "version": "1.0.17", + "resolved": "https://registry.npmmirror.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz", + "integrity": "sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg==" + }, + "copy-anything": { + "version": "2.0.6", + "resolved": "https://registry.npmmirror.com/copy-anything/-/copy-anything-2.0.6.tgz", + "integrity": "sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==", + "requires": { + "is-what": "^3.14.1" + } + }, + "core-js": { + "version": "3.22.5", + "resolved": "https://registry.npmmirror.com/core-js/-/core-js-3.22.5.tgz", + "integrity": "sha512-VP/xYuvJ0MJWRAobcmQ8F2H6Bsn+s7zqAAjFaHGBMc5AQm7zaelhD1LGduFn2EehEcQcU+br6t+fwbpQ5d1ZWA==" + }, + "csstype": { + "version": "2.6.20", + "resolved": "https://registry.npmmirror.com/csstype/-/csstype-2.6.20.tgz", + "integrity": "sha512-/WwNkdXfckNgw6S5R125rrW8ez139lBHWouiBvX8dfMFtcn6V81REDqnH7+CRpRipfYlyU1CmOnOxrmGcFOjeA==" + }, + "dayjs": { + "version": "1.11.0", + "resolved": "https://registry.npmmirror.com/dayjs/-/dayjs-1.11.0.tgz", + "integrity": "sha512-JLC809s6Y948/FuCZPm5IX8rRhQwOiyMb2TfVVQEixG7P8Lm/gt5S7yoQZmC8x1UehI9Pb7sksEt4xx14m+7Ug==" + }, + "dom-align": { + "version": "1.12.3", + "resolved": "https://registry.npmmirror.com/dom-align/-/dom-align-1.12.3.tgz", + "integrity": "sha512-Gj9hZN3a07cbR6zviMUBOMPdWxYhbMI+x+WS0NAIu2zFZmbK8ys9R79g+iG9qLnlCwpFoaB+fKy8Pdv470GsPA==" + }, + "dom-scroll-into-view": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/dom-scroll-into-view/-/dom-scroll-into-view-2.0.1.tgz", + "integrity": "sha512-bvVTQe1lfaUr1oFzZX80ce9KLDlZ3iU+XGNE/bz9HnGdklTieqsbmsLHe+rT2XWqopvL0PckkYqN7ksmm5pe3w==" + }, + "element-plus": { + "version": "2.1.9", + "resolved": "https://registry.npmmirror.com/element-plus/-/element-plus-2.1.9.tgz", + "integrity": "sha512-6mWqS3YrmJPnouWP4otzL8+MehfOnDFqDbcIdnmC07p+Z0JkWe/CVKc4Wky8AYC8nyDMUQyiZYvooCbqGuM7pg==", + "requires": { + "@ctrl/tinycolor": "^3.4.0", + "@element-plus/icons-vue": "^1.1.4", + "@floating-ui/dom": "^0.4.2", + "@popperjs/core": "^2.11.4", + "@types/lodash": "^4.14.181", + "@types/lodash-es": "^4.17.6", + "@vueuse/core": "^8.2.4", + "async-validator": "^4.0.7", + "dayjs": "^1.11.0", + "escape-html": "^1.0.3", + "lodash": "^4.17.21", + "lodash-es": "^4.17.21", + "lodash-unified": "^1.0.2", + "memoize-one": "^6.0.0", + "normalize-wheel-es": "^1.1.2" + } + }, + "errno": { + "version": "0.1.8", + "resolved": "https://registry.npmmirror.com/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "optional": true, + "requires": { + "prr": "~1.0.1" + } + }, + "esbuild": { + "version": "0.14.36", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.14.36.tgz", + "integrity": "sha512-HhFHPiRXGYOCRlrhpiVDYKcFJRdO0sBElZ668M4lh2ER0YgnkLxECuFe7uWCf23FrcLc59Pqr7dHkTqmRPDHmw==", + "dev": true, + "requires": { + "esbuild-android-64": "0.14.36", + "esbuild-android-arm64": "0.14.36", + "esbuild-darwin-64": "0.14.36", + "esbuild-darwin-arm64": "0.14.36", + "esbuild-freebsd-64": "0.14.36", + "esbuild-freebsd-arm64": "0.14.36", + "esbuild-linux-32": "0.14.36", + "esbuild-linux-64": "0.14.36", + "esbuild-linux-arm": "0.14.36", + "esbuild-linux-arm64": "0.14.36", + "esbuild-linux-mips64le": "0.14.36", + "esbuild-linux-ppc64le": "0.14.36", + "esbuild-linux-riscv64": "0.14.36", + "esbuild-linux-s390x": "0.14.36", + "esbuild-netbsd-64": "0.14.36", + "esbuild-openbsd-64": "0.14.36", + "esbuild-sunos-64": "0.14.36", + "esbuild-windows-32": "0.14.36", + "esbuild-windows-64": "0.14.36", + "esbuild-windows-arm64": "0.14.36" + } + }, + "esbuild-darwin-64": { + "version": "0.14.36", + "resolved": "https://registry.npmmirror.com/esbuild-darwin-64/-/esbuild-darwin-64-0.14.36.tgz", + "integrity": "sha512-kkl6qmV0dTpyIMKagluzYqlc1vO0ecgpviK/7jwPbRDEv5fejRTaBBEE2KxEQbTHcLhiiDbhG7d5UybZWo/1zQ==", + "dev": true, + "optional": true + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==" + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmmirror.com/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "optional": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "optional": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "image-size": { + "version": "0.5.5", + "resolved": "https://registry.npmmirror.com/image-size/-/image-size-0.5.5.tgz", + "integrity": "sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==", + "optional": true + }, + "is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmmirror.com/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-plain-object": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/is-plain-object/-/is-plain-object-3.0.1.tgz", + "integrity": "sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g==" + }, + "is-what": { + "version": "3.14.1", + "resolved": "https://registry.npmmirror.com/is-what/-/is-what-3.14.1.tgz", + "integrity": "sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==" + }, + "js-audio-recorder": { + "version": "0.5.7", + "resolved": "https://registry.npmmirror.com/js-audio-recorder/-/js-audio-recorder-0.5.7.tgz", + "integrity": "sha512-DIlv30N86AYHr7zGHN0O7V/3Rd8Q6SIJ/MBzVJaT9STWTdhF4E/8fxCX6ZMgRSv8xmx6fEqcFFNPoofmxJD4+A==" + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "lamejs": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/lamejs/-/lamejs-1.2.1.tgz", + "integrity": "sha512-s7bxvjvYthw6oPLCm5pFxvA84wUROODB8jEO2+CE1adhKgrIvVOlmMgY8zyugxGrvRaDHNJanOiS21/emty6dQ==", + "requires": { + "use-strict": "1.0.1" + } + }, + "less": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/less/-/less-4.1.2.tgz", + "integrity": "sha512-EoQp/Et7OSOVu0aJknJOtlXZsnr8XE8KwuzTHOLeVSEx8pVWUICc8Q0VYRHgzyjX78nMEyC/oztWFbgyhtNfDA==", + "requires": { + "copy-anything": "^2.0.1", + "errno": "^0.1.1", + "graceful-fs": "^4.1.2", + "image-size": "~0.5.0", + "make-dir": "^2.1.0", + "mime": "^1.4.1", + "needle": "^2.5.2", + "parse-node-version": "^1.0.1", + "source-map": "~0.6.0", + "tslib": "^2.3.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmmirror.com/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + }, + "lodash-unified": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/lodash-unified/-/lodash-unified-1.0.2.tgz", + "integrity": "sha512-OGbEy+1P+UT26CYi4opY4gebD8cWRDxAT6MAObIVQMiqYdxZr1g3QHWCToVsm31x2NkLS4K3+MC2qInaRMa39g==", + "requires": {} + }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "magic-string": { + "version": "0.25.9", + "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.25.9.tgz", + "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==", + "requires": { + "sourcemap-codec": "^1.4.8" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "optional": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "memoize-one": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/memoize-one/-/memoize-one-6.0.0.tgz", + "integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==" + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmmirror.com/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "optional": true + }, + "moment": { + "version": "2.29.3", + "resolved": "https://registry.npmmirror.com/moment/-/moment-2.29.3.tgz", + "integrity": "sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==" + }, + "nanoid": { + "version": "3.3.2", + "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.2.tgz", + "integrity": "sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA==" + }, + "nanopop": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/nanopop/-/nanopop-2.1.0.tgz", + "integrity": "sha512-jGTwpFRexSH+fxappnGQtN9dspgE2ipa1aOjtR24igG0pv6JCxImIAmrLRHX+zUF5+1wtsFVbKyfP51kIGAVNw==" + }, + "needle": { + "version": "2.9.1", + "resolved": "https://registry.npmmirror.com/needle/-/needle-2.9.1.tgz", + "integrity": "sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==", + "optional": true, + "requires": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmmirror.com/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "optional": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "optional": true + } + } + }, + "normalize-wheel-es": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/normalize-wheel-es/-/normalize-wheel-es-1.1.2.tgz", + "integrity": "sha512-scX83plWJXYH1J4+BhAuIHadROzxX0UBF3+HuZNY2Ks8BciE7tSTQ+5JhTsvzjaO0/EJdm4JBGrfObKxFf3Png==" + }, + "omit.js": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/omit.js/-/omit.js-2.0.2.tgz", + "integrity": "sha512-hJmu9D+bNB40YpL9jYebQl4lsTW6yEHRTroJzNLqQJYHm7c+NQnJGfZmIWh8S3q3KoaxV1aLhV6B3+0N0/kyJg==" + }, + "parse-node-version": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/parse-node-version/-/parse-node-version-1.0.1.tgz", + "integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==" + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "optional": true + }, + "postcss": { + "version": "8.4.12", + "resolved": "https://registry.npmmirror.com/postcss/-/postcss-8.4.12.tgz", + "integrity": "sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg==", + "requires": { + "nanoid": "^3.3.1", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + }, + "prr": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/prr/-/prr-1.0.1.tgz", + "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==", + "optional": true + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + }, + "resize-observer-polyfill": { + "version": "1.5.1", + "resolved": "https://registry.npmmirror.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", + "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmmirror.com/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "rollup": { + "version": "2.70.1", + "resolved": "https://registry.npmmirror.com/rollup/-/rollup-2.70.1.tgz", + "integrity": "sha512-CRYsI5EuzLbXdxC6RnYhOuRdtz4bhejPMSWjsFLfVM/7w/85n2szZv6yExqUXsBdz5KT8eoubeyDUDjhLHEslA==", + "dev": true, + "requires": { + "fsevents": "~2.3.2" + } + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "optional": true + }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmmirror.com/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "optional": true + }, + "scroll-into-view-if-needed": { + "version": "2.2.29", + "resolved": "https://registry.npmmirror.com/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.29.tgz", + "integrity": "sha512-hxpAR6AN+Gh53AdAimHM6C8oTN1ppwVZITihix+WqalywBeFcQ6LdQP5ABNl26nX8GTEL7VT+b8lKpdqq65wXg==", + "requires": { + "compute-scroll-into-view": "^1.0.17" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "optional": true + }, + "shallow-equal": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/shallow-equal/-/shallow-equal-1.2.1.tgz", + "integrity": "sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==" + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, + "source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" + }, + "sourcemap-codec": { + "version": "1.4.8", + "resolved": "https://registry.npmmirror.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", + "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==" + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "use-strict": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/use-strict/-/use-strict-1.0.1.tgz", + "integrity": "sha512-IeiWvvEXfW5ltKVMkxq6FvNf2LojMKvB2OCeja6+ct24S1XOmQw2dGr2JyndwACWAGJva9B7yPHwAmeA9QCqAQ==" + }, + "vite": { + "version": "2.9.1", + "resolved": "https://registry.npmmirror.com/vite/-/vite-2.9.1.tgz", + "integrity": "sha512-vSlsSdOYGcYEJfkQ/NeLXgnRv5zZfpAsdztkIrs7AZHV8RCMZQkwjo4DS5BnrYTqoWqLoUe1Cah4aVO4oNNqCQ==", + "dev": true, + "requires": { + "esbuild": "^0.14.27", + "fsevents": "~2.3.2", + "postcss": "^8.4.12", + "resolve": "^1.22.0", + "rollup": "^2.59.0" + } + }, + "vue": { + "version": "3.2.32", + "resolved": "https://registry.npmmirror.com/vue/-/vue-3.2.32.tgz", + "integrity": "sha512-6L3jKZApF042OgbCkh+HcFeAkiYi3Lovi8wNhWqIK98Pi5efAMLZzRHgi91v+60oIRxdJsGS9sTMsb+yDpY8Eg==", + "requires": { + "@vue/compiler-dom": "3.2.32", + "@vue/compiler-sfc": "3.2.32", + "@vue/runtime-dom": "3.2.32", + "@vue/server-renderer": "3.2.32", + "@vue/shared": "3.2.32" + } + }, + "vue-demi": { + "version": "0.12.5", + "resolved": "https://registry.npmmirror.com/vue-demi/-/vue-demi-0.12.5.tgz", + "integrity": "sha512-BREuTgTYlUr0zw0EZn3hnhC3I6gPWv+Kwh4MCih6QcAeaTlaIX0DwOVN0wHej7hSvDPecz4jygy/idsgKfW58Q==", + "requires": {} + }, + "vue-types": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/vue-types/-/vue-types-3.0.2.tgz", + "integrity": "sha512-IwUC0Aq2zwaXqy74h4WCvFCUtoV0iSWr0snWnE9TnU18S66GAQyqQbRf2qfJtUuiFsBf6qp0MEwdonlwznlcrw==", + "requires": { + "is-plain-object": "3.0.1" + } + }, + "warning": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "requires": { + "loose-envify": "^1.0.0" + } + } + } +} diff --git a/demos/speech_web_demo/web_client/package.json b/demos/speech_web_demo/web_client/package.json new file mode 100644 index 00000000..7f28d4c9 --- /dev/null +++ b/demos/speech_web_demo/web_client/package.json @@ -0,0 +1,23 @@ +{ + "name": "paddlespeechwebclient", + "private": true, + "version": "0.0.0", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "ant-design-vue": "^2.2.8", + "axios": "^0.26.1", + "element-plus": "^2.1.9", + "js-audio-recorder": "0.5.7", + "lamejs": "^1.2.1", + "less": "^4.1.2", + "vue": "^3.2.25" + }, + "devDependencies": { + "@vitejs/plugin-vue": "^2.3.0", + "vite": "^2.9.0" + } +} diff --git a/demos/speech_web_demo/web_client/public/favicon.ico b/demos/speech_web_demo/web_client/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..342038720d7c5a8fbbef1110d098e50f7a0e6274 GIT binary patch literal 4286 zcmeHL>q`_-6rcWsKK0p;{RKUUa<#NnFx0X%4NHqmOOdT0^MeNMhcKcCJxB~GG0DJ^ zgea7XUIdmwnMP!_GqdlPJL~ScbGqkDEaNzO2a*Wdvz%e>J-_=qXYM)oTv$O^!G2j; z0zQSlm4dKJ5QNoC*_2>oit{AQW)x@k8pvp74>C3I57a=;I`FmHNyV+3xaZPUQ$bi) zZ50=5HNWxkcYqSSc#g~IJ836uQkKA(yPk`;5`(YpgvKd3`JA@3kevQTc>n3PW%2Js z&L7jn>1%K0at6+xA#7rDf{V5C{fG&4I*2nq@sZD&w-M(2KEiui8u)k%{a(yxoWX#` z*<8=%40T;4Z0z+5E_Sg!aM$kxPt&2aDkG0I&X>oHjW$R`!&K`IslRaF7! zT>C2*;u{ZuSd_bHe9?#Z$)E9ooETdw6f~M>oS2#fxv~^y?)B3+Iy?m7o-R|YIvaz~ zwM(WkxT<%8@@#aTCcb_lo|&OR@H;vnI{ZMVLxA;xlKlA{T-D{Erc$6L65y_@*5R}@ zxSuISA-uoilxYmc+{k;%(wtIhq3N2~n^(l?J>Cq;=o5O6^!AuyJ*<{cX2;%|+Kv8k ztUMi|HR0PgP1i*Fd&!RH-+j22-cyk*+0e*|L|Vp zywIunVEd<+(2Xl;K4VYDyyyjMMJmou;}`pG{k*5Cp6V&weVx{M*jd6_oEaPdZ%gBR zF4jNsETG+0vzu&@yL~jq+Q+l3=l+v-_rbm3#~#h@O)d}XlYU1#$W=S|ShnXpK0FV2 z)=+}K^Rm<$%sC47)#bP=splzC;>7P-$~W4=x4H?N6xqGXa>T=1vC9=Dtk3fA2jXHm g@*X=1Qc1zG__h*X2E(u8%pU#Udu04S;BSwA0j&u;7XSbN literal 0 HcmV?d00001 diff --git a/demos/speech_web_demo/web_client/src/App.vue b/demos/speech_web_demo/web_client/src/App.vue new file mode 100644 index 00000000..a70dbf9c --- /dev/null +++ b/demos/speech_web_demo/web_client/src/App.vue @@ -0,0 +1,19 @@ + + + + + diff --git a/demos/speech_web_demo/web_client/src/api/API.js b/demos/speech_web_demo/web_client/src/api/API.js new file mode 100644 index 00000000..0feaa63f --- /dev/null +++ b/demos/speech_web_demo/web_client/src/api/API.js @@ -0,0 +1,29 @@ +export const apiURL = { + ASR_OFFLINE : '/api/asr/offline', // 获取离线语音识别结果 + ASR_COLLECT_ENV : '/api/asr/collectEnv', // 采集环境噪音 + ASR_STOP_RECORD : '/api/asr/stopRecord', // 后端暂停录音 + ASR_RESUME_RECORD : '/api/asr/resumeRecord',// 后端恢复录音 + + NLP_CHAT : '/api/nlp/chat', // NLP闲聊接口 + NLP_IE : '/api/nlp/ie', // 信息抽取接口 + + TTS_OFFLINE : '/api/tts/offline', // 获取TTS音频 + + VPR_RECOG : '/api/vpr/recog', // 声纹识别接口,返回声纹对比相似度 + VPR_ENROLL : '/api/vpr/enroll', // 声纹识别注册接口 + VPR_LIST : '/api/vpr/list', // 获取声纹注册的数据列表 + VPR_DEL : '/api/vpr/del', // 删除用户声纹 + VPR_DATA : '/api/vpr/database64?vprId=', // 获取声纹注册数据 bs64格式 + + // websocket + CHAT_SOCKET_RECORD: 'ws://localhost:8010/ws/asr/offlineStream', // ChatBot websocket 接口 + ASR_SOCKET_RECORD: 'ws://localhost:8010/ws/asr/onlineStream', // Stream ASR 接口 + TTS_SOCKET_RECORD: 'ws://localhost:8010/ws/tts/online', // Stream TTS 接口 +} + + + + + + + diff --git a/demos/speech_web_demo/web_client/src/api/ApiASR.js b/demos/speech_web_demo/web_client/src/api/ApiASR.js new file mode 100644 index 00000000..342c5616 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/api/ApiASR.js @@ -0,0 +1,30 @@ +import axios from 'axios' +import {apiURL} from "./API.js" + +// 上传音频文件,获得识别结果 +export async function asrOffline(params){ + const result = await axios.post( + apiURL.ASR_OFFLINE, params + ) + return result +} + +// 上传环境采集文件 +export async function asrCollentEnv(params){ + const result = await axios.post( + apiURL.ASR_OFFLINE, params + ) + return result +} + +// 暂停录音 +export async function asrStopRecord(){ + const result = await axios.get(apiURL.ASR_STOP_RECORD); + return result +} + +// 恢复录音 +export async function asrResumeRecord(){ + const result = await axios.get(apiURL.ASR_RESUME_RECORD); + return result +} \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/api/ApiNLP.js b/demos/speech_web_demo/web_client/src/api/ApiNLP.js new file mode 100644 index 00000000..92259054 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/api/ApiNLP.js @@ -0,0 +1,17 @@ +import axios from 'axios' +import {apiURL} from "./API.js" + +// 获取闲聊对话结果 +export async function nlpChat(text){ + const result = await axios.post(apiURL.NLP_CHAT, { chat : text}); + return result +} + +// 获取信息抽取结果 +export async function nlpIE(text){ + const result = await axios.post(apiURL.NLP_IE, { chat : text}); + return result +} + + + diff --git a/demos/speech_web_demo/web_client/src/api/ApiTTS.js b/demos/speech_web_demo/web_client/src/api/ApiTTS.js new file mode 100644 index 00000000..1d23a4bd --- /dev/null +++ b/demos/speech_web_demo/web_client/src/api/ApiTTS.js @@ -0,0 +1,8 @@ +import axios from 'axios' +import {apiURL} from "./API.js" + +export async function ttsOffline(text){ + const result = await axios.post(apiURL.TTS_OFFLINE, { text : text}); + return result +} + diff --git a/demos/speech_web_demo/web_client/src/api/ApiVPR.js b/demos/speech_web_demo/web_client/src/api/ApiVPR.js new file mode 100644 index 00000000..e3ae2f5e --- /dev/null +++ b/demos/speech_web_demo/web_client/src/api/ApiVPR.js @@ -0,0 +1,32 @@ +import axios from 'axios' +import {apiURL} from "./API.js" + +// 注册声纹 +export async function vprEnroll(params){ + const result = await axios.post(apiURL.VPR_ENROLL, params); + return result +} + +// 声纹识别 +export async function vprRecog(params){ + const result = await axios.post(apiURL.VPR_RECOG, params); + return result +} + +// 删除声纹 +export async function vprDel(params){ + const result = await axios.post(apiURL.VPR_DEL, params); + return result +} + +// 获取声纹列表 +export async function vprList(){ + const result = await axios.get(apiURL.VPR_LIST); + return result +} + +// 获取声纹音频 +export async function vprData(params){ + const result = await axios.get(apiURL.VPR_DATA+params); + return result +} diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_大-上传文件.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_大-上传文件.svg new file mode 100644 index 00000000..4c3c8640 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_大-上传文件.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_大-声音波浪.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_大-声音波浪.svg new file mode 100644 index 00000000..dfbdc0e8 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_大-声音波浪.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_大-语音.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_大-语音.svg new file mode 100644 index 00000000..54571a3e --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_大-语音.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_小-录制语音.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_小-录制语音.svg new file mode 100644 index 00000000..b61f7ac0 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_小-录制语音.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_小-结束.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_小-结束.svg new file mode 100644 index 00000000..01a8dc65 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_小-结束.svg @@ -0,0 +1,3 @@ + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天.svg new file mode 100644 index 00000000..073efd5e --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天_hover.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天_hover.svg new file mode 100644 index 00000000..824f974a --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天_hover.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_播放(按钮).svg b/demos/speech_web_demo/web_client/src/assets/image/ic_播放(按钮).svg new file mode 100644 index 00000000..4dc1461f --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_播放(按钮).svg @@ -0,0 +1,3 @@ + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_暂停(按钮).svg b/demos/speech_web_demo/web_client/src/assets/image/ic_暂停(按钮).svg new file mode 100644 index 00000000..6ede8ea6 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_暂停(按钮).svg @@ -0,0 +1,3 @@ + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_更换示例.svg b/demos/speech_web_demo/web_client/src/assets/image/ic_更换示例.svg new file mode 100644 index 00000000..d126775d --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/ic_更换示例.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/icon_小-声音波浪.svg b/demos/speech_web_demo/web_client/src/assets/image/icon_小-声音波浪.svg new file mode 100644 index 00000000..3dfed9be --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/icon_小-声音波浪.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/demos/speech_web_demo/web_client/src/assets/image/icon_录制声音小语音1.svg b/demos/speech_web_demo/web_client/src/assets/image/icon_录制声音小语音1.svg new file mode 100644 index 00000000..4fe4f0f7 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/assets/image/icon_录制声音小语音1.svg @@ -0,0 +1,14 @@ + + + icon_录制声音(小语音) + + + + + + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/assets/image/在线体验-背景@2x.png b/demos/speech_web_demo/web_client/src/assets/image/在线体验-背景@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..66627e1e66a12ba4efa82135bb16962184c033a2 GIT binary patch literal 78525 zcmeHQ2Ut_d_D@1UiV8NcB3QwKxHgm)DZp{Ow^6vZRTO5hteh4?7YqfRR zGRh?IS(!pT_$`Glx&-_k4*pUo3M?u`0sKo9eaE7Se^q0!6vUqukBBa`RkdGgE27z)i-jFKt$>rj0pd`Pyto z`1t*t-=b&a8C}?O)+l+RZCHNo5+;R8qbqCZFywq#s%S8cz00U_(+gp9Nj$GO`wyRB z?^oGxJ*t|94T+5QbW2{py&yUK`SXVl*Jnq$S48@)sQs?u^AGOrf8O^fOq^k!+fb3; z6c$>+iws(1k!S9g+S1r^RBNBQV2|eKi!}{%GVVG)V;2SajsES<8Gt{z0KZb6&<{1XnSk)(W}n;Qu&{@nCAr)g@mLP1#N50D<~5t z+zYM!t4i>yy3~yC3cfKV*w@>sW+wKj&qW5LZ2>fwxFG`VY$oQT~-MRSLf`L#Ye5_%}%qm~5+N1U*Zv;j) zgr<};WR{wh9Bb)zeaG~57IG-_Vl4D!|0=JmRmwvd_AjT5>)Ae~`o#KD70Qok$3lrh z2Pq4OKT;NMkO*10Bl-s*i}TZ%SV)Jf+;)-Uy}`||?V(@c6XCa%pvi9DgW)_*uJWGG zrKR6$*Dr~>wV`!RZNQ#^a6Xl;9KqmF`1Hrx^KsL|1uuUzoC^bt?wzwOxh+3@OS#9+ zz6|GcE5BpMyI=77lk69e!`-4c9N!EV=#($1 z3}sxNHAx-rrY^v{b9XEri@Pfl5<4-$g%bJtiV55Z_bT>f?aLf;T*lnxkC|~EgyQuv zwQvei)-+`HH?W@xXExu0Q2g!jCkshP02i0eS`dm89G=Ux3;Z&vK9RC;g(79)xfMbd z4jF_j+-V>;h(`?!#VkCop_qjyAt=GXlMs|(pacV#07@`$37`Z6m%u+N!Qg5{EQ4fp zo~yWTTjEpO@=8avN0`0!)JnK_aSl^ZNNRu8_%)`*Z2V9Uglf218&N@0Gr-*TJ3Ax#$5r!0d+H<|`+O$`$9Q2BPwV?HFb~by9b8&v!dx&9x**yZxI_&Dx`*5uSL2+Vt zayT3<3w=G90g_E0A$j$=@c9-9157VY1~(R;98Y~;smeC39{x=0+@Bi>JN|< zGzAnVQVOl%YA!Ajpi-0YWF%aDhXslgN$lN!z?c_Z`50i<+Vza&ctez3LCgddlaSP@3guSE6Hm*%Pq(og(rOkwpKcbz zqz8v_1WkKxhK=ub4IzfgkN`(taZjJ}mD_h~N(!>aR@{lS_<9}bJuM9B=CI#CA_*+}(_;FBk))?zT z8MQK22d-dI3knu>#%E5D*9{N>;}|GCe0_Q^oH;$#0#77XuU$ZZF2R=NNp?D5z|&W& z!quL1oWWxdY2T>8QS{%z6Nwo}Ssk{Dl!Y54LKg0b5VG)K^KW;9M|_0f^nlK@00v?jo5?Wr!;~Ji$TKR}+52*O?@PH!GG2;^F&#Y1D z#~yF}evH_f5~gEyKA@9FHKx5UULVqWlQD5H9Mhy=L$QTl51cP&Ofpz2*fLS_%K1fW z<+-$9*m3^YRX}jBR%k`ZxKyTcl%HxQB)oZ@S8NhJ>Olit`Q90h;BN4p(QqrvfW(g& zTr-O0sZkt5o`hm~T5qc2?-r*BGFp^rbm-H9UMidT-*nCs`j2I>R&ufhSw*pMtGR|N zz#8yo_fWW2Ir?A?_-*pbUQI|s2TQ<$c-XaP&*56l1L`#19e!S-tKO`GV+jf>C-2UM zYsI(>svO&@S3p(2G=>6UnBJOg0oSU0FxX&G>9`%PHZm3n!{YS^SGZP@prR6)V!aFx z1SEv%atUm(c>Hc8Tq|IOl%(5!zmSK32$RQvFtpd@hrCU`FhM_bvMTG9>8!oqo7V`1 zN8hdOpRcnu$#g$k{|}E-5#ddv#=Kvw`)iSDp02yWV@O*H6m7|X7WxLz%5&WX2w02e zR4-C6G)CefeD@DO;eiFVZkHNZyG2OeVB7i2#`m>(g$=0?R!E4SrkT zGNPbSVhvhNb@Q1iJG4l|Qyg?y6hNdYv{O3+T5W(vzbWt;yBiH=!HsY#XvKzh)(Buk z&x`;@OivjIV1xjSA%GF{z;YD92(bc1FalDjG!(&zvHPEpB69B)SDXHTGY-2Y+#QKO zCO*_9VxAZr@Z5+>yQ`QL9{#RRQrGhfER0hcY!baBr{8(alf)}!_f}I#E33Qx(;t45 z1F2%canfn*eD8ZjZT42$O7&_a=*csB+_Tyv))Iy&`+nlu2$&D&5v}YI+c>iKL5dqS zn=A4>g(r(VLt4OLk(eV$yZqd<3VXei_zqku<}C0wPHit09rx%vB!L&0kW6BF#2b`P z zzH=~edOGtt=FbMM{}B^UiIm70gFxavVewJOgNx^bZ2~_n9@bm^KyoNE2N*IvR@^yD zRy>_^9S9zV7=)(PA!?*80ycobBO?^c17VB67?I1seLr#;Vu*CcNaQla@<1*FxeT!_ z5J)rFrHXPwJk3T~1|eNXF5`c}Wn{IQ>K^F(MeUdet*K;3;!cw14Pqs2Z4YVhgz92y zn5+_EoAJ5rUj8c{sYelN+G%AO>Vo&2>TUH(U!={Mhuv^)SBl^DE_D%}*+`O0f(Nr( z#;td@xv}Oj)B^}$zRgpy3eWcaWdw^Sbc_20=`mcH;I+p1u_@_htW%t9^EB=7%-9!J z9=gEcx&adYx#MfJrqn54h7}Ojsk=r;NKI5+xT0Vp30XdzK>)C1zp}eq3Kp%yuRUB&TDP6@ zKL&~x7L%eN1!4zQcat_x4ei<{Bv1d6qj4Drc22DLd`$KX|0%^w6Q?^ zCy#8C973;F_aj)4H&}NxM&4+;^d$LVsOT_}RVHxnHDRzWflAsR>qnA!K8z?~jwM)J zzx3BI$srK0kSXEupw?*p`Y%05%US}&KXynqq(Jn*0uO>3QPc=(Qs6^SlZGsU8Zq_* z_lBaHbY6#|niObIRFjI#D1np~2udKOl3-WyVn9rN#Xm{`lS)%pZZ3%) z&8V~P_Dj!_@E?uU1({V`?fEJ^Qk>AMsoh~N5M)lBe*SAwBfCX_!CDT;!Y`4^JR}Fi z*vqNNZ)|C(Gu1uPQE;+b1=_n#&mzfyIX1MsOmBx9#*US1M1py<8ZfxL4+SJQ+hYr8 z(Prz+wSG<42aOMD)_6vg2YwGnHcyNPFVTE;c_vwrb;{!+;EXX9L%O!+NTu+y70WmS zXziNl7qWkqW7J?;hRo17dm~bY(>05AfSs-jTH5u{Q;%vC-&$uqSdmY1`cqQL;$P*> zaWmlf+Zy!yLD6o^6_g>6vN~~wl+|Uo^lr%7v0}TP_$+|lOi_d^gfd)82xSDPgizK4 z%5a+@NDRNeBO)MV;Zj1#BG?H+7LJr|@e8*Zg2eFa5wfJRO@u6hogif4NcmflW#q{& z-}B(h`megGL;LJs`jobPMUI38U*fc6hpb}=EnTWRPZUU2-P7hjH!3?bt}OWi8IM50 z?6A7a+tPo|@p#xtrqxvncp0B-Ya7i)PYcK+W5l0WEKbXx`=~qbJ4JMEyC_=5t;la~ zDJ?BDt4_xSD+NG;`GISi9kGgP!L?|(GZ{e|u8ZBynQB9FLj%JYb51J?%Wp3-V0XU! zkh>fZM2ph4dE`W{{Iy1NZAspA~gwX z&NjH%oE9oW@WF_51f~=&QNRX9u6A7}CZX!We(oH1DO1g;zSxGmM z{5M;PaAS4<3nhASdd#fKP}ZhzGTVg0q2EQUeeh!Yt;5)pKErdeer@2@FCQ}Q)$7Uv zv+C88M#sgvDeEzu7e3!cWVaIezHGjfvzS3mQo9ZN_6M(iRK;>PJ$_#}8b(h7qx9R@ zI1sW~oGyzJ)NgS#IQIaC_%XD=C=H%E;pbwKYR{QPAI!S(Q~8W;RwrAA;z~pLn41wc zZsA%1DjkD-lg#;SRNa1rXF=+&6wF?0t)RhEvD`j-FGX~xn`+qTqO9D{2}iFc&`Hkw ziRzR&N2AxQ^PSgm$O2`s%>~HFBa-0UIMC2X1;|*{Pj-CqQep`~0=!Ofzn>mx)tT-Jao{H#?!vqD0P} zL+&uczT*wEbL&g`^eg}A0Pj|Zj99u}t`Wv?M{9xt(p+HQJH`$;L`q^^V!_TZX*;bS z!M$#hN-G7Z$UMGChn#@Au(O_~nlp76>;p$X4zgn!%o&JvbwlohtBM|jy*UEzfK44c z8{h;BEa@b-p7ORK2XC?Xk%BgCgL_`WRC=vNzN<=b0zT}VXBx>9(ilG_d?R%n0lbhB- zYt2G}-xXgk=wPE`=pLTxxh58ihMW@9C26!t*&6+b@|1%MPmbFpBK#afnyrBM%<#7F zaw!eS>&0P%OMuWxf&cD`S(xaK0tar1MZa=$t=h|Es}MN@bAmnoFq$Z?=SUAw(tV1j z#fkuII`O13nPWQ$Ju|FbzHP^R$jz=+G1bgunXpY?sNGqU0aI#T#-Q@&s0c|NYw2P^ zt2439NWT{|B2o2Arog?xq}^FYYLYh>VkO?^fkut*otyk+$4`t}bBIxlC$klfY%346 zWA-0@<_cM*bc$?=mK{?&NTGmSZAhF7F+fdcXU@$vSr!0p%z4e#@i(<-GN-&2Tq7EoB zu{IuF+T%o|yL8zJT0Nh`X@?sk9n^37^s`3RekI%s z?s~6GGqlb9Gq3n^>L>HO=rillQy`ZHD zBJaK!26@m05VH`l_^C8G!<9Fs>0rFmV+Q^x76D01QFJwy%wUNTTIRhN$AD~~UZM9; z(AYG)a<%v@09k!7sPFpY3XpY0x6x~i1=WTbljf6A2f=aLG2<0_osp!-eh1sbGL9{^ z6DyLW9tN|jB>U-og8xY>lcfZtrVeNzHHke0LX!y8Bpe_@lU(nI(1g%Lga<-XSGs~y z2vMqrGQF+UA5u`-h0=TK0gmWdZZFqwDrp_rR&7)%L5} zl<43!%s8`gEy@}RU%SkW%dNUTdS}SFsLNdrrMC}^Wz=V!^FAkG-|=9sv9_q)PYq&n?lPllGoG(_(CJY2g_wmd_3|*JbD|=L*7iogd5^WHE6DNxB909#~`P z&cO#gr3KULqz7QJmi?#qc9{qo3lw?P*=fJzX>|<>vjH?al9h5mjGS&nRUF-QZW?0& z=lhoxn+xjiBwt9>5ck2@2v(&Zqz^dWdlz==xE~5k64zx-b#=yNJ5M!2*gvYP&>-%& zx9&CrUc2q7)zU8SLheHrnDA@)v+SBK@dGv#O!4e;74l`W4sC#~Afq6)sj~#EpKo2Y zmuyAqi;!3p_da9{fN;&B0d~yLVW(NR9tc0mAGIDMpR9t!Df1>!{`qFfcsVN}6lh`XDrzdjLC^m`X49EviYyvNIM=69jlR+tjC?`fK1WF+!$kcy& z3IS>f$`}}NG5te8WN5}Pn1cyf*EqGg87tpH^@H-3*hf)s^?E1C!bGs1+T6TZT+y)stx9JzNb-( zCM|i$+Pser%%Z9W2Z$FnzB$KitjkjRG!JeDjZ(`Z61LD|8; zE1_*U=^Y(nT(E9{O#sfGo@6($eDNqQ`@mxjN+w~DbUnu&oQ-gU&4zr>=gs+Go4i7l z1>~NA^2y*xg}nTHaa1H}6K`}_loV=q49UyICSE_pj>)U2a))g8orWE=h*?JV$s{pa z_^Nu0riRAZ+o`|hS%t(keBf>Q!fI-jU20(6Q z!BRn`Dkvp|L|u(|U#uT!HloG^vR1`#2FOh#L76h38jP_`(nt>#+KmozdUaz3=n;9bQ=6tR}QoOK`-m~q7$ zAGOT+NxPY2;2cZsZEjzQ?@n@#Xlhb&Mp~1$9AuCPYtj;sPWzA>L2g7!0?3U>OF$Y# zC}blyA}s+a5TR^D8bl}?L2g7^0@5J*XJjKrW<|x1md!cGF{kqkT78!%3lAPChFUZ? zSmdR(i=G@3J{y6*fynTmXF^oDC&rvqGj+7em>JT z?!^mspPx@`qfn+dSuL8sYD27h71TPb*H)|~V>B=8UUVUi9xNzL3TkF4j?#!7M2h!* z(S5doS3qOGDa479nGQA)tg@I7c;XCO3~5meEpxuFB;~H9ofUYFcp8>j61HHsQ1H$z zVlMG=qe!q)@NvvoHoDWPB}+b7HT0;Oq#;36qXO z0Z0QZ%~g#cF(BfdS)#Ks&Rv-TW#kMd*sAZ@%bp10d_;r7z5+7ea`6`ImJHRLRD$B9 zdzJYUQdAfGkfJ(8fe?i|JA|kXFM?bj9+gmx>P#n4jOt1{Q8Ljf3Y1KsWTHy~D48IV z2N47K`;v)~qpNiz)^QW^@cdE&-0U`C2I9F$xkLoY42W7Z9Tm^s}oc+kJOyW8lxHD+zlCq)bv4 z?c>g*Wq4O!UiWBBzHf@zY!)oL=4dIlHJ44h)w~eT9ZB)8EIqFXZ++w59)7^SrM0FO z7Pp2}M1H7ROzhcpFxVVPuQ!s(?(nSTrKS_H>3^;%2rt(@yNushZhfR;W} zuEV_!!DT$y7jgQ9FU-alz=i(;xOu2>$E=qLN`ehL4YtGDFU`MBoUAOxOIp_v@SX=P z^&CE2t2N+_BToA6Y`7AH9(Y-XqY0w|ZkxzjE3g+i_D&&O5rTyjMl>I!u&&fqRBb{C zBjOz)Od?()mq?5(D27SoHz962~DBXdRYKH;8UOY~K6baJDRR zV;n26``>o>vAdFAOJG{oPt6Y8=EnSYqsrCOJo7!PZ$vwPu$6UP592vB$jB+}S4U=% z2Rnnd)4`xWTsefnRii!3&vs6DJi52K0+AJgL27^pG)IlNo(?zO_u!q1Wm`Utfm7~p zfY<-_d>+I>9zbG@C{THD`q=Px#L}ge>W=%~1DgXcg2mk?xX#fNO00031000^Q000001E2u_0{{R30RRC20H6W@ z1ONa40RR91Y@h=G1ONa40RR91W&i*H0JCetzW@Lr#Ysd#RCodHo%?fK$93ns@4YjF z89az5L4Y70Bq)-ANJRo^N_JvV(K@kYyUK~fRH_`SDz&vqs&=zg`xmV8%l@#x?N8Z9 zs@A);m8vCEwY5oBcBGX|dlS*Jh)9$IiZo#o6d@2GUc_@A_ulUPoI5kX3;;%uNelpz zO%CQh`u6QU-#({LcYnJ_tH&aobCqh*Ct8msLer|Vdxo9uF1G8|TW_Y>DmAI*64}u6 z0y{GDzOU3-qm<_Q#zAj<`O&$0Mp#%>!HpWE!=xQ;v#HEM(mHz`+}z#&frG7e_lXYqf3LYFVYK#nU|#t+AN? z4+y3U0UbkBNeYdL+~%M)I%dI2te?(lr2{I_UC!=k*ZFf-tFgOt>5HSI2hycE-C-JL zbw|f&RXm=H*EsdH23sMSwPC`a&CSoZwT=0MgD0``n-8(L<6;7v5K0qGLrPg4yKe)r z`D7nFW|RpjUR1<5r(z^s@))y$>I_uqq*8mTdwZ=J9@aS`Mo;W0z`DCtq9vI)6_~0% z+M4gV%2tVFw3qqCfq3=z?PBjIoZggcn+;*;ILZlYXfSGAyM1PG6Zyqr+iE^Zu=zJi z>+w33y%&pzw^i@;E5pNwvy#TP+41!SIOjTZHxhx;?Nqf+Wo#EUuJBP^IURPI(AFy@ z9v{YIlOdZmhC(jL2Xh$mSX5It`FN$HIh5HWgH7aeZ>ZdMU)g=Bg<2FVU*=60d~0)V zotM9)8WxwA_GkR%KjU3JWUK{K7F8(By?9c0wRZkf9%ias@#OCLqKIO_}qjZ zVg%3lc<_ms@$J*rgl+hX-vf+8yh1cT$`?T4D5_@8P2j|zu`z&EYG#`Ov%0^3vfA-& zy)pU#HLAnakqswrYXdM^RZl^>Lu*^b)OlWWc4#Q_pw>n%J0Da!@+{Bx+PO?dEluB= z$cAS705dhbI~~UGhHe?pq{F~|Mt!OVv{o`6Ia-ARIDi?W^{S)&t?5@+oNWcHzyC_C z%4zEf9KVafn6F}`+Rz|s(#Dmat34E?({uIBcwAl9!PFS%cM663+^_TJBXDt%ZCT&u zb06H22!E;v)HlcPH8iAZTk8(sQ;Y)6*Qt07ch6KDHb-GL)JO~ z7N*E(?0#TLA*gA1h**3y%;faZP`lDU*)(91Mn2qstgaZN0^fE}y>7bq+{{v_1-dbF z(q3)wUTqxAp%&R}qg#*Ab*#~?!_fU6Uf}Pc@^uj;8|1kB60m9KISTMEVoq>U(kZ{c zyUwbsyNwOav<>Yz1&R7vr6rKkGOUdgiouCy36_E{U3OA?o?dE&h`(*oz9wS06Iyv`BhZD z)GNwMR;y*exJr9=CI3+)+@Kp(9x$Z`9c9ii_SpJL@AbsyOlo-=yX~yv8kVVQ@Ekv$ zvMM|UXfJ3w#cQ<-mtYAXuc3XBI}TDUj#c}KS60C|KWKO!%>U=Sn~;8rG}v!7F!F|;LCqsQj6Ac8~^R2d%So3$d9J$|sb(rI4aTAGa#8b=%tK+m~ zd+-~dOM5nU^yp{rU%jfPi?<)e?+OBDoz~Wg1T(9rly& zbH;d0HfLVM)IEnAFLPAo5~b}z;6G#>;2-AZ8b9;AyoM#c+3T+I{~EAg;Qq5~9`f)o zpQqE=n~LkdJAGnid3yQ!WN|v8Jnrs4D{B+BusHOK({(y$7}m3xLeVablTa2=EbBXt zbJKJ5ZO{B%TVlt<^dB7iZA zoXCXg6*?Q=Wz6*e9-#EvOVrCsKV;fb`nI+H50&!&Zg{wNWoEf5qo!gi`#0Rbg!WAm zmyzq$0DK;{w1&^CIqQ7atC9Bc(F_#;%sTO<>bOz5E7a;Kyg%BlQm{DDxa?L5Q)tHV zgVEZhEd8og*3jTsTPB=$7;DoLpnMPW2RWrIkOb4~N_Pg1dMcw;EgDxLb6=K^EkD_E zz)n3qv%hJdIfal<@_814Bh2!E)3C_rFZrCG@@uCrjC9KwVM)+NKns+6DYVY_jdDIL zxfvazWiSft5uEyyRh~Ze{IR(cy;0w6ZOq+$zU6@FoYRPq8Q@i|Xb~RW>cnKBEy^Oo z1?8A)3{G7$W+OZ#_pN!JmEES-%&m|_d4ll9^&%cW)nC?fhE!#4) zarUfr8K!X4A5T3GQ2!iz)n4W*M0n^d?b#Io_*7+LKdK5B#z!MA-deEo=ib#@H&uK4 zsGo8VD95F5W;+0mPgzL#D*Se6RbSW~ztsO#>ks9pRmvjm){`yMup2j2!oRT~6K{>Q zJuQGGBN1hRhDQcGC-@$BWA5U`h=X_ksgFT*(jOdaU)C`_Ll=4sm5TalrHRr;k{lDA zmeCQ1PW9+K1H*R9%31pEN*r&QhMC2wKHN5@^zHN3F&zl7q{Bvr{dkJDt2AwJie_$Il?}cX%Z0c4TPFYa(Bz{rIJ^Zl zk2)??G;8XaI_bnaEVf(H#uNkiG3zUyVceNeRo-ON$n;8kbncvWuU@saO<8T(@pS%v~|wv6D;9UMhTbAyh9ncBzBC$5rd= zOr90*_x9GzI8}|;9jh;L@rQJnHyS^zfyVM%)^)y9S6|=p@|VuSX z%6RtPOp{R2-~k>$Kr+uI^sSXZ3q^Mf4^Qi1R&HGNeMdNkD_|jfFBPk<3zY2-!{E4| zR>vUDbpG3tk$Sipp*({c-{JBQY0jg*qGh};A2<-ERRfy7H|zTcXXod-g-%zzzgxc_ zdK>0B<^=Zco4DN4ex>q)S+Rz6wNx6-m}Of0O(sTuI()TpWz^#J*I9EjtEzK0=rMu) zT^tgqq``I9sLOZmc$Z15%=EdT_i@&-)9}H5jhK7UW|?OzmWh`_dxkd0^!j(`ncsp= zcat=v8?kG{L)0+q3iT|2AOm3Cd@w-*)CyR5;mmbQAIeoXbkuX zj5=U^-2rc-7RRJsThmTkxQ`jlrrWoLcfJ~u9rHN~ut1u((Weo>S~CKI%gB6yXokm0S3P?S(o$c>8XD0E0PxH8GXdTv9e@G&+6)@6h6@+EvvawHX;OHTX|?mbb7J*w z{s=8kI$FPg`TeTn_VKi_UZ#9-hlM2JMjY(4bmCI}8=2N#d@8qHW&0^u{Dj)M=KF znAjDq&1<-g(VnI7TMh-j=^RjJA6mIr^HQ0khmvs-Vmy)FPY3z6h=L{z!tx*joS;Fv zVPnou5)BP+5B|JOT=HUYSYtC)9~zL3@_m`(2#|CpKpWT&sj6T18~k) zy*=?RTMsq-%p_uOk&RFI+y=BYrV-tSMkjPS9{)zwktw+K>+)F4%Z~njV8Dc8CR8)% z#>ept57W>$t}n50GxSd}OK-X=82`=Rl!kwGyzsr@nUqtdUS#tB4DC$=3;9+?ML0$| z6f@OuIs$v_<6FV0k3K6mji*m5KNee%^}_GseZEQDb#A`2Wq?H6YL8?*_Zgwt?MBo1 zz<^?&s?hi^hBkN+d{Q_W zuzLXty+pvsAAUJb2j7SBK2Is%-(UIyNpEoQTF$ukJv96+)VoxBqo^e3@+|9{ZI2(A z<3h6&x?O2HyW#i&0JB=CWkN9%su@BWU~;nPQkRx}5^Z9D*Zqao$y@i-y%lpMBN*>p zP4}e3%u_fchghcFC8kio0wvL`=v!JOu45WaHw@V`Xxd@%7y+~7eLsfqk-DRt*i9b0 zidDXjX28Qj6_&ZfjRdp&;6_bA1n)$bq z=sWE2m)5ZU{`oz==X{xl{5gcP2MwW*RN9a`c;t?chj*Pphj%hD1Dk0zAT+0;L8W|S zz8di1jyM8Q1o?{EF3#jQx-8$~hyWDYcG%B_XAJaL0!%3_qm?=%EVHFp|es=ElF$#9{!SmS;tG~ni1SfY&LNk|3OwY0$5%v zq?}OFFh_O?H~vbjR=?x>&1LP%$cTzBn`{FDJcBF$BI}n^(q}mhj?QP!ewX>U0m%)M;T#+BKme1Ek;`-(F59H723UA7{78uUA}(Z zP8>e03qwxYE-u>HAXvVYREuxmy>}7s71AzSFhLb)qDFk3B=_!b@&0c|$L!ePpqh(b zwuAqCVGW}rWDZHN5SgQVh)g3I7~Qb8miz5gioNV<(U8mK*G?7V%BoD?A764E_a^kX zn>>WJuT3(d*|l@EP3U({CLj+xKa>c1bD7O_jMn<0Cp%cZ%BKdwm1!xC!xEyO6iS+F zg8${I;A&~gmoKZ-;TIIkg~G2{W4?tstd4NtPXrdRWix^2xd%oVh(8!N*XRXfr5srKt7bGATq8*YC;rVgG$O!l~#vc>(}NdIai&YK{XJ>@8!mX zHb<^zcyONhrGQDQWK&fO{VsGA9;!GW3;3=*xu;VxOC7s*^k|Xiq3y*{o9*nJn{IT2^Q70slYhwL z%p$A?(i4-+LJM={xnmR80s#JKSON0R44c+4p^O>F>9&q@kZLL05!CS!`4vK>7Fn*0 z2^CE!Y22np=i9DG0~%F=2M(bg{lu%Ic1INkWr>tZv`bhyD@rE%R5*#e#iGV>ng8-h z@@$i0#HAHSLgwA+V4U`kc_{ctmCg%KBX7+9M&nRlOhIsn>I38w{-#%eejD zo6@lUvo;=HnQhX3_!@%iifZMWytA9#0xteF9flWU%i%j}Y)l&ZD9FSlo#fWIVn#V{6)7F9`rEY^q!dQS4^1;iwE8E5qEw{2j)6nf?+`IGJg zS;Ix&Or)=O5SRK6&s?B9m?SHd=LpiYv|vb6k0e=efIFDI`Z_ z;F$Uzp@LWo*YnH%I1%s)_X*s?{Jgx_YnfzcGw< zn!X;R8!#Ko=y7%ruZBh*zECqC&eoXNB>)>-O<`voeNG#8&J~K7P{lA+GTSS|M`d6k zc&W*Y6n>7y@DoBoD=pf8x^xNROAV8UW-s=x4LD?Pz{p(_1LYlBhC6AX6|* z@7PY?GJ3NEXQuxN4YT^puc+E(f2kuIroThcWa^}Bv_MKp0jFtyE^zpdb7|+=Kwr(` z2Or3teCYK1_lLgjH!3`>9etem;&{Nc<_`zrQ%H!U%dzyo09B|}1T>BBNaN&;;|QbZ z9@mKVaJ)y2n8*FKN7DFe7hI@=;PT&%n?%}QB9U6J=(pH>l_rEK{=Mzg-As`llxqX z;)p{cfC;FyrCgLx{$Z?ugJ#I)%gxXUx^7J5I)&6Dj~0Un-sG?ZRu5qrhS6CTR7g(B z0wf*VM2CL=LVEhp+}z9>&$BFU#ZSr#RQh%n;l+Jh^&80KIb?zx9}OR^L_shi*BRVZ zBQT}TD?gZK5#EZ+j9~SJ3(C3Y+X2^QAs<`GMeS6<)eul34G}~LG(?f?#l8v*j({S^ z(x!1BQkCm@jpKj-Ktz;H6{9=UE1Tjm7cb;%;MbbbbN`WOD|wPw(e;dg z(H+-~Lf^Y~tr^d^Xg!LlRsMSez|Ofs1&h~u`%KpVGF6-fn3QUHx0BbhD#q#vdd^2^JG5pco}tM6{58d-9H-W$)OYp7K29 z9~IWHo$o(*haKVK=cxh5*pd7X0HXx3C}s}}vm4pr`fuy1yeu@nVZbhPsES#sn)k#IF89@t3KhOfj~Ie7KeXmjB*FjsDF z+X2(f-qjEo(L?0V#AlxK?3hcq6-I{^BwP|Jj3 zRvu%woM|I;JE7fCmN)Tce!e#6z$5b6FZ4a3@$H12<5o;!afdbd71(auLq^-zn8K($ zG#6U(qJKERN&m8V!Yx0R*3z&do zb*_qi7wIc%A>*vUD}PG+MqgVEAe*Lm+#iD8H!E9`Zy0%tFGheRM>|}he9?zIh4!4r z(U!$;8^7AZIMU{K@^@EV@r@(zf~yoHUzYonH(vw~8sC6W8f53VbRsT&HTrkOzD4EA z8MR@br5anT7aRqwCNd#EC$ZMWtz3?F))ot8>$ePnTsNz`%AH$O@F?$D*lAqYm54-sJX_4SLB zlk?-A&W)mSd#Fh;ae+_E9wmjb*~;f_2fR_dz=(vJL$gNdT#p(jJMO3Llu~EbPRmpz zzY$eF-CsG6RE(c?(r4%x3PzM?aiw z!P%(1OD{5t04uya_~@Ftxl;xABwu+6L{7&OVeB3W6f~?78P1bpPMnxXE-%O2M558S z?u7m5qgI(1eBzaE5`qc=#`4J9qmcULg+R&Xl-K&0_?6AuhE|hQZ zwaNTHcF`&Xn9@sWw<#BXrlc z(o4#ni-$?$yB(}}>7f(aLf?+xTftdork<@LdH~T`^!##u;Fc73_b0mLfThzjDM&rd z3@kInZVn)_63BfESYt>bOoq=GQhDlY}(E8ZpqM;_N7b?E{v2>mJ{7z$EMZ zJjVuLZC#-r<&-M|*rP1KN|)^kV3n5m1HSJGU=Jw3NCZ4L`39wpEgN3GvtU*19twYt@xxj&R~*A+s@W#U zQ***(UaOi*-+uPkxBe<#r)HB$yWAq<`j23^abpn6)*e&2*+Z<)ZVfW_pCN-jWW$du ztg(uk9geLT!$94wu3k1p!EKpX&Ye@{kC({=nJWowa7oR2fh%l(m_7xGHJ9QSZXzRE677*W z1R+^0Xfn~hCvi4hW3Es(IxAa*B5Z6nfYFl)6aO$_+?#)|2Er`|OoUujHEfhue~WF6 z;##)ui+++LGK&TMmuZ)1hf@ZM$^7FGupLG7$E^Ii5;H5$4`b-N2YYvmh`f} z>$;d*a>42s`sW*R+BO8a+`l7#1GtB>-{O-wmHWsW!^BQL>ff{5-K{0#zQMK=>|mg` z-b>8q2ea5$w;1Pl!>Cr{)E;V$l?-4a*VnH5l;Lr>Ebg&%f9|1uQ*7__8C$jA$!42b zN1mqwR!GB&vbOHP@!39pildE9TmFdI<~{&B#NEfP|5cPo={L-H*}`y^KJ;I3{dVXl z{$kNq&y9>kzf}{}?25EutC{*!_Q&{_(>Zpc46XkuUBX)Wh4s?lQiE{sr7eTgpLcC5 zT$xsLS*auZ4z!6bv23rzu6Pvq9w(Fh%F`_*@mVmtyfiXdQ?^Q`;zBu#Cv=0cmTb>b zW6aI6$+G<>jczL$WCg#iV=*r?G}OXxp_Q|uE39EkozL&r6rMe4r^Y@?FSDi26*4^GXvd|dvAv&R zMZbzskhOi1iVAfrUj_tpNm)ec@XHz0UDo4`&}p9WlyfbX^v6ERT z%WmC4;OV+_(hb=QHeI_QgdIX*Px#L}ge>W=%~1DgXcg2mk?xX#fNO00031000^Q000001E2u_0{{R30RRC20H6W@ z1ONa40RR91W}pKA1ONa40RR91W&i*H0D(sx<^TX9YDq*vRCodHomp^Q$C;SB&vF-R zmjDQYq(Bm)xJZOHZk8xn5*cSC%PQNMDwE^NWF~Q?Qkmpo%CC8{rjnY<)8t{QQWGbB zn3_y^EZQ!Y?U6SrqHIglLW&Y;k`f7#009yJu@TF~UCue(`Tm2204_jXK$k3tLX#Kw zo^$(j|NZa%_uqZBD!W*{S1B*AlpfyLr0K^F=Gj#6t?6b)%(ZfK?I%WiF@R&*R| z1vb|;@q0F#4?Qom9UZM9x4EaII{`b${N!@@uys4njIFVL)E0!r1`|5lwN~quHcd2; zR#vB!RtZ{40yx0ivfBORp6^%(des*~+k zPO5#u?S||Y2(-GjRV5u~I9Z=o85>1Skv03RavQbP4=NRHf&%LinJW0GiqA=uaNlDcj9|C02~WN0B`zrdmz2-t-@My(IPoA8s3p2 ztBZ^?LZouMkMjOYt@Rn>N8jJMvpIkC=%#ag=s3E2Yj z>%jk7q+^PhS`;p2QQFRFb>&~oi~}V=L3F0ZmasBXtLq?2jWW(=Sn-A`sCC`vMv*Ez zOucEUR&1Cv&`h_ub7y~Ttu=patlbnj?w`{!cEr{KnDp*W*ZX|Fi(}wqu|K_#?U-`( z*@3bA|LX4El&ds8Cb=VS)#?K+HeEHLYD49=M9SZ!tyvGV)T3j)RiX%zL}l9=B&WeB zXOlI0eFo>Jt?jHhJ{2u9s;s4@JC(0bJZU1mNrmQHz_1RjRa4R4?wZE*8b?J>!L&8i zRhfTInYv!3Ub>{zk&3y}Q-r}ubVgDp$lZ@E--3}_3l8fR!J~LD!?2EcCE`+sNK=5R z3AtA>z!vHIw%8In39UD=}f7 zf4<$n-c{ES7TrI=mTcAz;tIA)OKjwZ0Q@nNnIV?r&Y9B1^O@098KEABeiNq!^lWPuZ5l)Ks+X& zM=LrwI2DnMb{V_VYmIY8IqpT}hF4*gG2gXg>9t<= z;K4N$FTObCZq7XR+4vxn=or#&mAllJPH5FvB?=&PQl~KX%}fh~C{<{a6z>p24IwpM zj>?TCJs0DDctKWP?&t}#SU-GlJG`yt#pgs4$BQxC{z4n8K zhM_`xdpD}9AAk?atBiq`Y+OW0}{~U?Y9tLe5QB;8&{kY6maSyLLI=ntK^jdaksB(s` zk^`z-TQYI9mPyQriCk<>^apUz0|9!$#|Xy%Bdy)5&D|>Ot}sz#4Bo^3z)UY{gw@bL{;%G&8z)W6$3Pk)tPS$J}JH zXbOcyz`H%G*4~!5nO7fp*b|vV>J3vK$sF_$HySrKI^$aVadY#|!oa}o2|>*1bVXgn zD%zgBreH`)-<<)@3mdd%cGO6B>xcn?|9Kr-l~u0(P&?}C*|VR0cX;0DR_vu0A)eNY zQaER6+`j4f&d(a*`;6Sz)*wl6vFp$$YI{Y zS_O_u4>FUIF!+)QBdv{IT9Yt3de9iPftl>{P`?3s-OG$eCUEKbc5NO6pSeqjLPXjI zgxya@#`(QX6gn8h(g-s-YP_*HlrNk+mw8cbQQmmj^qMMs_3~ZfukO88HPBa`+P!Pw zfO5+#oEYkF zZ{Ln<{mDfpP11yq%E8pkC5n8<{KW)8A$$CI^%5ee))^QZtXGMs0la?=;2y!Fe3aIb z)8060O`5+e3j~qLDlff-HPJ@n+WR6c64M~&pnSqq5<6m3_U-FQ(`qRb0}F*-%6xR;YDXSC=exaz0H8u;S>{Hn8QGA4XvO1YZ0_K0r52goAv?`-BTu1 z?+uRXi|y?VlXIa(o|=8TqJ(K{Q%)hU>usbrJC15ZYBF<)A{NFGfcZ-0;U+OOccHN{ z0lsdL!4kryLWTe-jNn7yrn6%CVhB*;) z$qwYDJ6)rW*Z6MVtM9CrSls-Y^B(;CT~?GZg@Wt&j@bY~wjdQu9_JQp&WP5||4#V2 zvv0WJ(%T&nYLzH+ku-HxQBNt3A`r1gxB3vJq-`X{c)mN3$s`6kI*tla2(7P{g)mT2 ziIEOOg<;tV??9e;1u2R~#_Zs`XKMyOKT8MmkWl7j30If-+^wn!K}v`+{ebxMtCBK} zjm6(@2VoZPZS|Y7F|bOQvQc$sE3Oje&f>f5-c|`yHmdGy#Z|)GS$vn>+bUtoM%A6I zxJsBii|?{~TO~}{sJgQiR|#`x@m+Rr%MQu3wT(3Rra(TLw}S<`ovcZIn=s_kHn7ob8~aCJ zAU5_@BFeISTsNI;QQDQPi0KUL6yGLZ6Ghg!PVDTwQi<^}wB6Ka^N(yuT`huvWfR7j z;!eW&FIZz97NUs6EGjAUa7|r@XeYbX>?Ss)JkNsJS$4FYCTm^}IrzkK_N-0R*DG(h z5bPk|yDlBc25MFGGU~iHt_m$Jtd7Q8MnAEsidJ~-vQ4c~Dpd%J8xzKCaFtgi8z}Fl zCfXm`oEZhCvZLwCCJdp-9W159l0t|dX<0OGxxnC&6*k-=X$fTH%S8V(sZ`bXsQ8H~ zQj&%9;Z;@JS5UP8e}J{mjiIAljOAEvP>r#1PS4n@?|hj18vtTyVfjTEpr=QrhfR23 z>m$16LZ@mHsoeiX_nnRocF|eeH(M$Nwi6#lU&6*7F~)8HpbbI<7$&EDy?xAtSI%Gd zz7Is5a8sY3U0>7mI#6=9^Wz>oeVq~7Uj{Y|)TO=Q9($Crc<>S-2YPyMI; z&R>7WD(r^Joja%etjadv(5!<#@wPw^PgQxbMKH1tGuBq^rOs{Ge(&Z}?c;9(XaB7b zwd{FJGUfGqzB=c*wi5vQ$#5S8j1drY3*9cPrAHYcL@|TFea%nW^S*C7*y2&5v!?-n z=_TuqB~*&kr&jH_JCKhiq(!_yH-1Rul`RCkNuEI}DCU0L`NFS3_@_f@rlX4mEe{Tv?S9+;>q&f$ssvko?`3bnMxTn$@TjP=yExE+S#0mTHzI|d6O3ShdBgb^$ z-ijAB#OtCoy>;tQrp^yXfM>x1LJN^eymkKay3PeF%)2SvyLBrWf+p9@F6sLj&=yjo zHiE1fqR8NJw?lX|$K>c^5Yp-Ul6|xE^2{;iX-^08~M&%5p)BWARA0dq; z+G5#+S^BV-{zOZg${3@qftP`R$l1WKns0M_*@vSKUPPcS%Qe_qKRT z%6b&Yc;3Adyf4&k&R(&j6Az4<}V8%}YG4!bWRZ6!2J(kfyXw=ZEp8 zJf{g{Hq%!!t`v6!I=Mhv@gnUY#(#+Yk$J>v&V)D{s&Vc4J$Pn))Vq#!W#!4qf6|)B z5NB>E-C(RPsXQl#NAE*S&@W6_xh(#tEwDl3oA!w;*SUv;MPqRX3vIp~ROTB$XM%fOT??)Ia_G8$9mxqI_$1g~A?h^Tm@b<^ z?QSJB_-)4qe`(TLVY)faz=|XcTvIh3skI=OEgX6ybRaki9fWO#DDgk}q(+U!yK~*E zaJj_Vf~B6!0YaSX7-pnzJ!-0j6ka*5g-u+--eS0g=jA`gFe=OWInFh(+<*O;*z~!n zz0&ugLpsFE9EO}-wAoKvqtuiW17oPAA)Xf^(5K!=ofg+r*Pl5Ybh(z#*EmB zCpe4ad4|0Y;%q^wxs%&=1y`UCY%xIQJ@&`=$nOV*@zagTnvu3eJe+u>i~KorW+c;C zoo>k&^w*2#z_AxN0P=#DuNiM^W81?rO3qI;Y0i0iXzN+Po89Z`xudGwS%*^p2ntZ!Lp0aU~G&7;($P0W9L2B zabL|9&2PGu_d7wt;%Hj<8({)@1xD`=toQFxiCc^@38V%aAkPR<=q*y?V6i7K_WNPr z{NT}6IS#D6(wwR~lPHES!Dw%x0?z?{HfFB4b;c#k781R%&Ox?q9t^U1q9-!Y6{gB| zjP13>#t|bCFTf*~KRE}0Ho|Ahl|OunqL6)2imA2}99gEMyzvo{u!)IU+~JMNbJrso z39%S)7t##{PY`<84(?v5u1*YlX^13zQfAq;OGRhSgvETo5k1%s9F|F+OvC~4jFlyw za=?<<9G8QVXUd9_nAZ<7W2w*NVWc9W9wIR?GO+TJam3Pt_~6lG!pJuIE(g8M^JZp7 z_6Z+!F1XKV=UMtV5EY|cCNeej8B<33!V^?MWaKEYAHXF4G&rt$wl(01%`^D7>V_Da zJDB&97?~PuVJ(DqWh_XgsIr*C;_GuOhdRpzp~Mh{J>uEut(v~G-+c$PWLjkYC=)K# z#pS3VDP}Ht2OM6&?$)3l8iK%VI`)2k(`&EAXNi>stV9zeNLe&yJv4p*WKAB`BI+{k zA?5K#xWl`RG9W2r~N;-4(>7NkqBrkDY5>vk?+|H_}XCgAel`C6X}li88SAE}x%k(73BRetv7#65g}4-)YOqRxAOJp)TQ zaUa3}e94#s$I}=2+(Y#DIE#GOIF@h5+gp)@`O?1RWnk}o`ORz#9?L@vtd-F~7?cYm z5kx{1aL9L7H8a5b`7T|RjF*&W)B&|h3cI{zaqR_&d_P~NrrR^IwG|B}^Anw=&KrRV z-+J=FwPkMspYF_89z-!;h-|b60x(w*Ld?Jo=%U6{kx(_sD7tgR&7b`0zB(4(Zk82E z7*S~y<3`R|M)X5$!IvOHBWAKnR2gH82O1HJ;Zrp~R`DF1-xbvqE}!^#Xz=I%P(L<5 z0`RjFwknk!-+~9Uiv>Q3R7+%m$0s=$LI}b{GEVaGiVdP}Kg{78r$tc9Dlfk^$Qn_) zE(i;IaKfIWz5ApKiGvEvQ*gseoNlf)XN%fbIEV~eT3bT=6-gMuf)MAm*RG$7(p8=L zP>5n5XOKxa&xLH01C8T>whCY7IUc!MjLeVCL@IdZ8O~Unbb6$zxVRJJ)y8)uvxg&$ z)lMM_?{N{#i;aOYIAhL9KAj6a)&11I#wA6mbGdXKYPiKl4$*1-EKPF$8&VGc#5}-f zBpXY^>MhOQlUag0b$Cv59}l zd(46mO5-}Ga0nV~;64{jc+ck2LnTYxI1%+!eFh@lE65T9CT+x@VO?-{o$otY#y`yo zYVRde=_L_`b}(C}4NvhKG}}g#E-&s)xyJa%_`*e0Pb+Vu)XEP_0kvV&|Nmd&H|K_^J^g~mV z$KtPEpLjoXO`gf#BQT*XK&3@POz8*M5lK>-Bex_bWjoVO@}FsA9%4O?!ncR;k~Yui zdmf}?)S~pQyZGEq+R=MYKj@!EqRLibv-4ITLk}z*b6YlH9@#Ot!Smu@%Xk97c-8V7GGQHqen`T>F7julf${ks#B;W!b9CO{h9VvxTmA zF{lG{EN9(+k?S~ZXW{PThkw-L)PF{vA!xk2=z5!RPn!0slZ(RgtwZAi*nB?-MI_~D}jO-q7Ce72}`BYnXJsIYk1;A3)}&IRMep+om-G=<3S z!evopEL{EOUASOB9QvQ>_Zd20OBW@2{budCp8!?V5UPj7iYWRIoKy7mpt(iXKo%{B zcd5Wti8rFa`5E(&|7TtI4MI;hrfmg&ZXOkKiUf~SNw+i^gYXUOHQx7OruT?g6|gDA zDV_f|YU_K2g8HC8QdfU{pwtjA3e?mW@4Tae%x2XO(yuVDx`+asoopzc-s?D%KO$xJ zSkf)N+yC)}d1|(_hh?iW+|EYyCv$>0jVKvw+0ynGxg#oBLf$F^r{YrD>l3xUd#j^! z#Y^L%{+HggA#U;}fHe^% zU^$AIwB!9Ah8fvVn=T2@j9fi;BiXR!bwA;MXdL^0MU~0mU03&IO`@-GB|)y+@{8c+ zC$CSu3M>~rS8qcbY=@IjSI!twgyV^n?i;eHvQKTkDKWy4;)ljeR0e?Bum*z2BBMN6 z&BLfg@j=Qd!m17{AGW}!A)X&`T~5j>qr8Z>1e0_Ux%Dm#<`NvC+u#vZL3&ta96Pr@ z)sR~#SL?`;Hd#pPBj7nmhU>-Yt_nF7eEk-yk<<0cdGD)E_mZD9SeVF;dD@KPsIUhp z8jr&8qd$JKp@dm()n-f>+_)lx6(3t9W-fBUoH1VF$dPz;ZDy6wRsNeetmAOTA3>Bq zLS$=sw&qryO@D`oO9Y?HO?6Rw`tM!sb=3S}r?|~!6}VG4sQB7zmrsw^k{#5uL9Jy9 z`ov?{mPwVjXgd;1W2`f(u98~dKeBR3+u_lp)gO*#)W@k>Wts}gzW#=q!P}yM&HDnv zhy>Syei^JV?R>frrwAOkzdv?PXCB9Jj&k^XH!F=@xSyST0a5sYHEWuApG^IksXh6D zdJx3vp`QZt7&_Kh;Px)%nf?>%SXmM?yU9c1tu3Qek{I*5pK4nOv1DS*Zp(9=HCPV| zUB!Jf;#tk149dx^OJ-}5g&|3P;DzF$y#L{vZZ;eBWmMg1Hx>RJVJmc#$|ODeUA}%? z>dY@tzbA~`Z*qKAx9_qv6(xhd!su94RhCB&-7@$%zpcCdI#y-GI$?am-5|Y$ZN(u7 z{q#gpq~NcFXj4^mC_BKqCpQ~OnrxgROXfG*Ar?j(S|$>roWEQz61G}yfdMBug3sXC z0cg^Pt(eih)6d|ER_7!8z<4&LuJ@1)rZ}q)A46q-ESpbJ>QXL<>G+u>@tHP0H zS90Or=H@=6W?`4QyzW+Om5Tvq$R~kE>8mVAb)bPt^gK3ZJyV>2jYW8&smX76^2q`0 z(9))p?IZn^Viqm2w7-m#VsDf%tiRsI^t{*!+EwV|P0y`V+Jh35O1;xT=?o`W{3@ z0<8Trgb^gML~5Fw?RZPQ^DA`K8ulNYW<{3OfQU?Qwr+`?zZPS3fS;0Q zjou#XSYEJj>7d`uPmnd<-&>`9vN8kxFs5M-_?{ZD{6v_|;Q2ld33Sz1-w)>zW;SB9 zEq0#>lWJ^LI%cC~`JZ)YG6}~NQluTk88q{nBKWS!{?RG+fKF#~%&RhWc~b3@_3DZi zI0ZHl*Kq!PsiNJiz{Za1Lik)d(rz_N1Cyy&bD)$25(yOBM2P!a`5C++O^1Vf=u;N6 z<`mW!oa(f0_|g}~8UUd3eb4D_rCl^>&OyoeHj=E;1t#cm&JHWOoV$ijt1xngWuF1d z2ToBe+I-X?CY*8+WJYtj%w9>@izR1%t-JBsI@=VRlLO0s%Q(GjVH4u8lB{j4r53KR zOx*Z>o&1ChaW^)FTGh&{dUjLLill4j->@Rum3*N89}gz|{>fkezMIvWb)MWku)bJm zs4s@*Yb46vk2J8#JFgI@B+B1I65{oKKjcV`3Knk&Wz}d&1D)QXThchma9Xk@tMk6s zoCTE4n@6EW1@p z_*HTW-WlQyIG>9$Yn&yP7acsM^otqoQc)W-3*{E8Kb-U2 zsGy=QrcP@JHE@y=hEL&A}oB)g$Wkks0swHX$oFoC?rnxrI~zrhlH_l@Gnx%T$$ zVt-4@9YaW)I-4k{gsy6?^&;cwT&=g@mX8t$?}8h$-&BSn$(54Fce<}x@|wA~@cv7w a`~Lyj$ITr5*zXVk0000Ry7aCdiihYt-J26uM?gS!*l-GjRXcNpAZ`ESnIi@m6> zcBy`<`t9zDP*#*eL;i{k1qFpBBQ37_$s_+qi144Gd)Tb`C;Q^6DkTb4Jw<%{nGmn)0yS*ijgI`>Zur|$V}P|LIpAh1Lg=@cv+0=jVA8vM zE{_=1h0bG@1*=t@jDN^5H|)+8Py$GfV#1Ckd8zQ94CYHhN+qnC{l@aK6KNrPd~)VK zV;r~isQ82CCmer`B+4}acJI4c%9sYrsf84BR_Fcr`o_?#hwqUhCCyVvikXJS8%epn zufW0(uP1A7Zqt4Jinh`Iw5v7LF+7l)#q9B&C0av>K9;#60!W z;zGZg8r6wkBAI9CPXn}V|9WQVacM8DvbtKL6zNtEgQ@J($(P*k%yCV_zUxS1>#d?c zZ{EuFT9wHLm7AUH>Ijf8*@|mzEGiI^n>2H(GXATTf$F{CE@9D=O z>OOKi4C4SsC~r_Jfkrrgr{hD@E9f^nwLGR`KhLTS2z?`eFV}EI%xfN@ca7gha)CJR zCu_R1(MskzZr-uXFxDS545-UaOLyYvcsg=tcCP9v9aL_~eUc?NX%Si)%qDo1D>-L> z^!J8|S~6U%5S^v1)T)aQUAYt`cBh_~3(GNnO_M3IX$$c zGD*JwL-&eRGV)|5gY=NuYw~?Eyv|4I`e{Q+th9zLkqLkYeFOJil8krgi}Q8UaQ$3X znFaqpysinW=|MoI3Fh60^n*Y%z3W>32N|&G#4Xo|O7KL=@@UvRSD0`U6aCa_y!)hi zjyQI&&=w!W)?s}}-aVERc^YN@Dp+CK8X)HKF*$0Yai)EPLjNwOF^D70%5ty%Ek2vm zx%d^>pZPpct|3?71Xy?rHD*i|ms5N5y0d0rhVDq)PYT zW}2gkuqoos%S_({?>EZB!KTf`6>1Qu%;EGsb1c`);^7;jObr3gA}TEEdK6NF&XWAt zyPl8XL73|1_@JLFEAtD=k^Z73T*No16m=$mg{_|BsJ*+$D1Bu|cv++KaOhA^!2x;p z5|e!F2lQ#6n6=Cn07l<3Te>91XJaBubZncnJ5g_moyuK9x`Qn+?Hxr1xkAHqTJ|8Nj(d)?BE+2?zDcC@x zHl}Iy!A>ZlR0`h1Rgc9HT|4$~qcyD)LoEoxC$VW-vD3^qa;+)SRj z&mt7fQP{e)&`xuRhH0dl`}@dFp!gA1m#i>ddRv5iILDp3=jep&u!5_hJQQ1X zqI3Engy{a2e`B&*1G2Cj?4L_0w7268`6S1@npUIRYjsD$Sj`}dH1vBn;YU0gI zj(*+*PJ(E1em(#Z?gW(`4&}_vO37mN={1Gk^0sN=qp3|Y~k^{ReTbQk`P?+ zlx@Ywd?EcJ7M)x}*@rIVYL)tT5D0$`CwX+4keK*d88!HqEi8$p)t}}Px0XJ9@aEI20-}( zbWuO5ZYJs=;OH!dink`1X#dDX5*LtRJTTB{vl*0gEdA@Ihe9KisGj$Q^tdD~1y$RW z#sI%|=!-*BN82@i3&exT4O4Aczt+^Kz__MrW^_6oPGWIkWRCvDM_|Xrjg}i6f>g09 zy^MEO!=rUUPd}=&04=Yjj)>s=3{kaD$wv6X>b?QL!5mih+^zdNcE$1{jh%re@g80d8IhDny+rcK0_WQa)M|w?oCjU@&|HBaTO+jT1Esshgub|xO!&tBL^?boJ1H}mOV$h%f?Q2$ zKoS@KtLCny@Yv2S)l5ni3I4jUk!Cd~wN7J+UBlR0XOFrZ^U<4b8bheIj^`yt+Y$#S z#VH@cLDI<*fKEru*myD?!uI2G?<_5m6H~ecycbu-r?u~Y;xrsguTIfN{(WoO9b7c{ z)-4a)Wxh);;clcrh3*;=2Hwk!uYB!EQ`Lv&W+cSF_%5$}^p2B@>4veUlsJeeXy_~B zqzB-7TU&tX`h-~cAGM(JC=C((qx24*5pj{8v;bDe!Q=6P^$NM~1B^6qa4$-3tV{jE z?(|&*4G+0qB2i_$Fn}JIq6duo=;+}^zf=ortDFeFa9*TIefI4F6xXQ;UKMuX+4G5w zx~OMx#-#+_AR9dvK1PP7rA-MCFcEg5^Ty@IK41A2A3HlHWQtYIrR26Ju+S*q`6cU$ zP8Mo%Nf;yZq`GV+)Q_+`-x4UBSSUG1iK47-gje1@&=~LhUKMhK@@#(KHVh}~nYE1q ziy8(!sNlcQOjQHkamgHsD)|?xmpt^I8qsPTT-HBl{k4;3So95uODA8|?SXdyxDd&c zXUyHDU$w_dEhAbo?;xRVxpIkMuIMho@SMI8 z`6g3K`TGj#hJ<-r9WdA*zbY2~M}!T~?HyQ>^B#$pMl1-s@q}nTMPF<1HkPng65bUE z!U!5NUt=ZpLl0m|zq#3UXk>@$AxxJq=9Oq0pC5bi&9;7}V?(P5*?LGH4kNnXT&>IzfCjZ#3mU`_>~3z(;S)PK82Vy8qlYa z0$Avvk%+*o`W@sAcRR`PA39y>$HXv#k0b1WN)W54Zs12*-DcEMK6sks(lGbXWzRcC z2We=jK8Ah_rf6`*Nva44E!@E^GKY^+OyFPe1DcniyVMqtQ`8^UKNSUw!$~X$z@zUD z*e%aSQ!$~0vIfz?Ui{R}Vu-z9=+I~1RM!`@ZBLm32;qjIj()HH+rT3udjo_dTUY%#u8@S3IQ~U?TLrx6zuq@mcG)P`0Q?9_6F9gU`;E~4sq^?Lk!@^o^nSaY?9S9u7oR;}Xx69Q`#q$Z zwAX>3620B;RFig`wa-$u4%eCm zRiuw~ot57olAT{|scASS_|-4j?(<=W-Uu$Y=J5_=>xxV$7ME?{JiJ<P%)Lqh+ne>21Ub+p4A!;LlOQWVJRLe-QPgr`PU9jW6x( zATJ+KcYyyO-ZYCV zLJo@f!8L;6Ov~$BvfgXcCp?t1@O8ByQE@n@tKR@Yl3r0~9zfT(v224L6n}&Ri^42>S%!W} zayAm;SHxOA#woTaSh8z2xI;hFR&>E~urviWYa0}QuA2R8!t_kYfYW?&q_*miy+oP( zO5gxY5TY*+p3@&TMJ_mv-rr~sD7ow}Qq1Zb`*)@6m@|Tij>~-{t+|$#bnJ!(1YY`O z<+ccar$}i=sLf#z(9QWh&Zh0J`$wteje$IG5ty0c#PVUkD)&(_#3gvs6>sHm8oxUB)%Za~!J;eqgVDN_*J8sku%oK$2uIvd#>J&Im4~#DE1UVv8ei#`E z&F&V&*%r48o>}$a-lW1JjvN?vo;BbLoM=zg*F!qoAwvqs)LEz@<1Ai&;Bn;Vud@y6 zR*ZHu3>*&~j=R0LJeu*}N<-0$?e>x039tbf%4;tx%5fG>)=Q#=BhHjuRbTR4x`Y-r zqi2^n zgNK68UibN=g6X|pVog|%FN=4`^dY6gayWr#e(rpcamqeQj2w7kt8 z^PomsP{SAORD0;9l7N$|f)b~Vi{HC<#|Olygyd}5m{N}ikccL z^P~jF`cd(5C78ar+Ts>{pOqQcsa_H`EP2q>OGoexuz@nhlEQq0frm*XVR?tl0JiCT zrE+Wnt)jl-`{t~wiKm`^XYfK|xp6;bh?%V}jdkn@wNSA}E81tbN^c4L)=5HmkGlU1 zA>wxf20j8f8r>t(a$2%K-g(Tf28HTAgn2|2IDEPIt-k>4X-Q_P`WTPX8WR*)`Yw5{ zH=e1&ekkQcBOUz8lM(91Xqc4GX6%n%H69M*Ne5Ep1Rdx7q!w5H-uK#9Ug1oP%@{p8 z0~ear!`b}-zJ*f)?n(baw!>h45XL;igpHG|AaOk>eN9l1Z2R0UlTqq!!+7YVsbnrL zFWO*p1c*ezkJ@$89>rVOVC~``#Cf4duG;B;6K%J+pRNUufBAFd#K>54<9M(LCQk(; zBY&If{>pi4urK(iX$fj5mc++H5pq7UX%d=DpScmN=vgb~I%~h5eYYbxrsl8w#uJIs z$KBT7UjS2Jb>ZSJEsWXncRXZ)dSr2$dt52Z=rF}*wD=aAd*v%IrrPXyt!eOg6C~tB z@BZS-0D+H#N)XC!FYR&%Ig9eP7N8cmF`H4@kMrzw~t)q|9|_U7Cmw*Z)Sh zSZkRh`S4q-Iv(Vzs5~;W8;T;`46Q?1F-M#_y|Lwb>U?b2j*cXLL44s>_MEWWq#=Qg!IV%n9}6=?pqc%hR4aVqR<~2GOQkm;7BScHT=QAjE7iHN zxAl|^4j@B@BFE`Cr>L^yHPR7u8;tr@AFc{Mz^)K zvg;qj0-@fZ3x*&DDfq+A0_YU7LK)i)Ycr8VSs1s;eLrQsx0KL-`v~;< z>o?{zUhY%_EYI{Kh_dw2+c)M*k;<%{NKA-mIu0FePSe;1r$4uBIbB#&-KMpD6C-ij za`k+8P@5|?05co-AE>Knj>IT6MEKK+saD@w^vQ?`SP${6eD$4HF!5f7yqn!Pq2quZ z-nbK#UxScbbs>RkGy)DUHytH=%wZZN?H!&GAg83tfVPG)tq;ESiLZe}YsIlVq6f3%)AIT~?Cz#J7Km&l@P3aGs~@2bMqa~6CY z#4%=(glIi{ZBFO5$GjCb;}j0GB6V{CO5$=kI7V;+ZF@XyD?0LhxfmOnHt+W8FTi zMe?np%a%~Tj*k2CRX!I}?ex+>eG-o8=LN=Q^Dh1)b31oRg=D5+U~>37*Uwl!xuLkF z18}}67fL)pWTj1m$M)?m#9xCj0%Y9JUnuHd2L?nM6|5TrNsSsJYjm;X(&o`W9(UWu z?M~Izw8X%;kb`}G;LDQ|yU^50nf2xCSjQ+ebzaXfO#|XQK<`s=oNgi34y6l$zmC5V zzFd!oo_TJTwcBq2b7capDwW9eo;Z`(-+_*m3PhvQhNEW`$-`!5vRA#GO?QlpNE{_R z_wk=b>lV|(Aj1>3p?sGWu4==v%4saiuQzi<+G84$K+oLeW&X-tu`YD)HMtRg?hN^S z-g5T$x|z(VC{?9Gt7|AiQZ1&+5*496p<_v;5Nd7qKRoX`N?Ao|a=O9mi6Y;nZpf;) znhmkJRBGdJM*-M`I!dv7i==K)tUT*ab7->XYO`9V`>9bhaRl!;rRrE0VtLBg^wX9m z+(0_-`HDy5*@U?WO@E%fhPp_8Da+1PW!;6z;FtJ& z%_wE-!4RrNGl?aBUy%=vNNveHE54Pg(0CbVUikgT(xE@YzdN2CqyT1ZA*QW7$ZnVe zw!XJTa|UYGAO$S%d3)ap)1bsooHc3t<)R8v{ZS=zSR`u($LHHNFEb10`q{=KK3^j` zEf*8F@2hCg`NQN!yCo?|k{NZClY8WDcPX=n5{c--A4n4foyqOu1*ITSRi}!UDik4Ze^$0FbEyR0CA2%TmLS2O`7+ zvTs5FDP+DG+Pvj!r4xj%hqyLZbh4`lWVwKOVW#WQWv)$My@Sa2H-?xs{I5x)*_r?S z2;pRb!hj+X!+1oO9(9F|zYu;>b|k~B1h|L2>u4|kT}Y1;k$N1n{eo1rR>|n8^lM58 zYZn<|WB?T@lvSOWAlL@Q-aILn!<7*4Hpl~3ZQJC3%6A$gFUK|5+1cm_875nZtEsxN zIx;c4(sNjMJ@B%+is81bcHzmwFbHRoQKRWXBkJhf^eT9sD5o=Wv~I!j^oVu%(U#5} ziqTBMqUT@mYQIBPb1462J&*Zg!xYnG1h17N6xL$0xJ~q^LJREeBikDl{8L()UEEMhY4NS1!Uu-_B&=e}k z%#+DPQG|f$?fcEPGZdq%pY?~A}Tg^vv$*m4l%rpA((Rh_S|kG zV3L!tA_|%0r4d4I!^yPpJoGV-s`r%QlfF~|uX!tqGxSeUPDB4NH@EF#tYWrV_p6>f ziP#Q}Py+c+5szPmsaJCM1?hUehF6v|h=-o6fTAlgM=TGAR?iN)jwKsLnUQD=D&&G! zk|2~n2zlsk*?_<&x5gX}B0QROwN zh5M}QV5Qt-ypGXl=|1t@9PInk;RR4GZ4q6_Yuf7M9WY@~nz2z?-5=&xEXiiAJ2;YX zA4%jS%Y5nz(#V3?(Pb5K3%k!b?TnEPtgvhDu2}XxDS_D$-QnZ66;!weH zE4ACwsegAmu;yfiB-+_v!5+A6tGjA#Shh3T7}Re!XMQkooI38#XR17kC}=mVtT zFjb7!&P`1OaGcevTjXUjPNl%X7vUrX_!Jr1vO2K5@x}@EsEaWKAl1qOL0S!=~%g!#*FM5+GR+#Z1lV5f=~ z9Rb(z&4*u2(+k4*iH~s=buDXA2zD;lvZ3d>J0XH1@5Z-@*#y$GHq%qt?;TbO$_K*$heED4K&yc|}P5 z<7DOqy(UMYS(Yig;q9^6m7$Nq!FFGrhkO7+bk|_Kg#)`@xROdt9PS=}T!x7vhhY6{ z#E=ha*PpEQv~9)4jM9N@Gm1X1+c6sPR+&>MfhxerFP2fFUBpm%4lhty`L*7rp=wf{ zCb>w~y zVK32vM1!ZRn+psdNJb#Qd6PXP))hW+Ypl#ah z#XI~4lM9*Zvb4=G5+Z#G$rLUHF#^7XQcurSJe(dc4eqk9pyiVi2@Z5=BiEpRr!~~+ zDkR4UYxHvE3P8K|+M^sA;(T}%} zckh;u97rhALGTq$b+?Fa{(^7btn+DP{xs!h5>Xph|3=ijjJH+88aO*dJenjmw{Nr? z27Q1x6>j($$4Rc+te&sRja=ikNN%>hHnG=DObS__zZp=X`IYn!o1rO3QvWzRq1P8D zNaCV8|fZYG>P=5&5`yCr$3>gT?NtbEgr9IKf$xzznqKTCb_wD@R7C*z79(% z%BmN%J&R{n;L>*~hLB@tmcR;n!+L(=2E7-CIu*k|b|MKOw6vZ^5hAy0nCWWf+EUFk zlfI?ePxC4ze6swaI()%kOv)L!mBr-tfDlV>F6svP4aIph@9gPJz5|Z6ALoHN z+BQclAPN+9U0UxJyG{&I=ZoOS^wmISmp{cZc(Cl{V`no#4$RHm5}bHIH=fTyb=y0) z)wd74zNza|_pcUI(UxaVo~}&-yN!e#pP@AM(yIEABhA+{|GQjC@9>59Q@hqIwXl@5 zrzzKNddh-{w0)P+Z_ntFzP^WF+lC7vh;&43=Kbje>JbQ~y?l3=GM@tWeXA&=ydfr9 z%T9&Ga@I$T+#9^upm4ub`K!(Vr687Oeo~|uF&>S!VFrU9u_a_%>d=~Td(J!g=|7V= zV_~r)&ix<|x_vuo?U2|6V{{M}(kaLjED?FK#h0JIsvV0*JX{1Gxvt*WAq+uNLz(W& zrtinL5mTw^@3``{OZD3v>FRizgpHV}jBz1Z0#W?bQ{8k%^6;MsVl#UlV~B{duUf!7 zv-r{QcWivuM|3w<%d-R%P%hvmvdEd$;n~B9PpXE&VJq6R@*mNzniQ(LuTKZEM9Y8f zUq8iX^wN36^tGdboWWCUJLYW!{*Z{(#3P^=b1e*8%c`FLH+W=hqgDd-`bqGXd|!i& z0n~|4nXy7J`iY_+p)(PA`1PGjZ~;1McHBSz_KX56m80s|7>r%d%zOE+bFs z_R%j-Z$YB=A{8Jc{=a{`{4S6hzpfaM(Sz2kVhH;@!>shQtV<4h9=zI}--<(nqoesa oDcg~^{vUo^|5p|dkQegNE;-Jn@j5>EKaVpR2}SX0QDe~m0r+fWD*ylh literal 0 HcmV?d00001 diff --git a/demos/speech_web_demo/web_client/src/assets/image/步骤-箭头切图@2x.png b/demos/speech_web_demo/web_client/src/assets/image/步骤-箭头切图@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..d0cedecce1b6118fc5c29980b9a38435e112635c GIT binary patch literal 5515 zcmV;66?E!}P)Px#L}ge>W=%~1DgXcg2mk?xX#fNO00031000^Q000001E2u_0{{R30RRC20H6W@ z1ONa40RR91QlJ9>1ONa40RR91Z~y=R03;pri~s->^+`lQRCodHom-4#$63ewoYTFt z7kgRTJJU0}Ytj#P6lD_>f?^Vggdrpn4nA<}{q=*-!Tnqxl0~U%b6hr_K zg+z%2hM@QXu##02*%5M#u@gBV2%9XPnV#Ks_UgUNbT_~Mzp766nT+k3+3A_?>8ai6 zI;Sq@RQ=DtzVEBA>U7t7yb$R-%8m^y$;54{ZE$lUpOx z(5Cy1GiT1!8;wT&+O=zqB7_E@UY(npdojzh?&LzWzMh+r&p-csS^ChSLp^}Y0JH|4 zhtHos|HJe1^9RcUdk1SGU`7ID0|4t+u3V|_-@m_6C)~JkqYjYuJkRT$PDd<#ae8{X z4aOD&YhG=F6D!IF&@y4caPI+VP0UQ#kTioLdfgs76C%~P0%W(_HQNNBhPD75EDlQ# zkoAVMXU`rds|0zl%%mPyfIM>KNKG1NKs{o_D)9{`>gx?HbC)tP>*@ zOgk};KMij{b$*B&B~p3bEhutwVPWAO&5p{92|UQ}z4zWy4$2@Ga4Af2prO5QUz#~K zFpX9+SXo1{TZY0|nM*T>f_I>dPD~(%?s7#&9zJ~7HYC7ggIXIfUAPg+C=eAoov77| zqSj-$GyLD$ZnxhC{t0=Pn}iP=H;U+Zy6z;z8ro$)2W8|SBUK=cZn2e9z^z-Bmh4m^fl6SwAFNMJy$g2!)jFpEgTJjyVWdJViC zb#5Z<{^ZQe%-4wp^R5dOWh3K?9NJ3>>Ym0{}YZ-}jjIvTgn>3Xs zjZba>gFW^9@x{e6??Zl0D|fdAE1SWgt)a{8#vn1i%l^uSgs48ZTx3YcVeT}UL9uWT zB_8U7B!)ZSc8MhpI!&;@Uz+`a2Og+A#PUXCdf=f=+t>6*#$nyiY-VL8$1@Z>K^oGU z-Dsvm-9pF98q(daW@{4tdy+en#fKB zq-CV8Qp@si*PiaK8|JnAlkm#>-9eZ&^N?_V-cw@*1VhXM|QiG zlW1s+ack^4hIiM#N^lH@dQp=MRYoiWZIO?e-|hh+YQq21M@I0^=(LxMmHGEx>1CqZ^9(EuY1 zTp&ksNO%-keyr2&{wZ?veK0#ds&vQEiA{#ooSX4SM*9&FhX=@vloctNh)#x;W@Ocl z47VlCc>)@#8Ps?l80q>h#!>Mr$jyi5=fD4&)ZS5wZIGu1ZG}J5*D@pF4?%;}w6hr$ z*=Y7jAt@iA#Y&k+oMnQYDrO}08R9$!CnGX5hWPbQy|B3Wi)6`i8kvv}$sZ}ZD9FiG zT^+O`rP_s?VB^1Kq!87qWhD6k%5`LmhJcKaqWa+C;?kd^Am>v0PRCX@t$|HcYS0$? zBY(gkms~_%2BM0r{Gou1r$JK#j3vX|c|FsW_VCZ(d&MexBj}9#z@tAoHgv zr-;MzN7`>G6A2c|Kj|1anJ#L2S;!P3-KJ0juUk0IY7G#xdBjL=Fz;eY+2apWiI_)% z%SonG_iLTbp^u+9aroR$0ZL!`VTk_7coQ_akBFnuUfvzXh2cyJq;n7>B}S!&tlcFr zMaU2bW;*+)i{hn6=G*Oeg^Jt%X=O7wv|-2){gJXI2||-G9?T$VXb=M^gDj{x2zI0n zql`>IyRt3~2FDyKW?7USawlm}?8#Ub>Oy-ZkoWeV9zq08!ES#P`sVs8=#lTAiGzfa3ildh(g1 zJN6l?E4+7krSoNs__wF*?TF3D9r8ztQwACX$mUOOdkCPw(&i5jN<%xEL;*KL8=w)g zdQ6I8XW%7=R6D-X={`KaxcJB0wF4wE(k+u6iN@|fXmxZTXt_abt!d?uwHswYNSCqq z2U_;09-Qz$Obs<|OA{tpIE+V-(-7w$#7&Y<)_Z7!F%>?XD?fx7Gvqm!oe6LuO>v*6 zg7mki_8*mFtS0-Hl^q$pqOBqv2LbIxg2a( zihwSMxpI~0gpBTh>ua;TIJL0Q{ut%tpczW20112C3ZC#z_#=f_oUUlz4ZET^^wSlo zl?Hm0g1rL`MO~sSpjlH{E=W48A*?UZLr^* zB=ks@GEx9k%Sh-)0X+<*yQ0|YGD^;}x1flgWLES${q&G~v!4PoYqeT#P?C|{#Cv~q zakRIW7<8D2rQC|}~0YmJH4@5bfS{cDiGfbL69;X!5pxn>E&SzL-|5cseY{;V> z>FaEEe`IRJF#M4bJWw;QEdebfp`#3PDIa+5{SoKN1Q?A7xM`ZgOCl!Ap#Uobg<7 z>>b^SLqBpdF*Ke+`d_$w<;pj4fc$*IVxzGQ@>FTm{gJVqVPG?4J-n3&P-Y}>nVlpN z%Sd;KjAV#MR;GN)1cs_uHiMG?B8up*H#5c5(AHHPws3!>=&44=XP)kWoC_?me6#TK zIUiCu8m=0`oa$H?(9q^HDHHljrbn~#g-wu)!ia+Uud=?g?8wzY83I|c3%PIvm^AMD z0cJ(d0On7vuKP;oRyM6x%MF!`RN?XPN78vPPiWEsw_O;~1)Y|5B!H=Z{zxDyLB|Ry zO~>%9grzp~J7KSVCSpu07*^uEymX$8R6*<*MKNO&reHQ zogNA(Jr~6ap5fdLG?EHgvd+9OQ`C5woK*%+;N%dWm`9sAq_D8BO zCSdHWK{dJp%z-$7l~_uZn5XnqPO+XxtgOkwJ;=_d78aI1NS-l^H>sJCyUia-=OiOz zA9d(X3*e9@bObG-uyiig@uOaD6ff>vUru3JlUZJ+R=7Nldv zW9N??Y&_jDyVRxFtpt?1nK~LVF3Y4#1kjC%^QiK(nqsm}bf3yp{Bx`Qkt`|1M;I+5mM0`S6JC9o9O+KZab>Z`&rP(;c{WPB!8r| zu07=TN6K#Ga{SS4F2?YcswA0C>RgRsnIs2K5!1|OaIVcwUiE$L z;qQ-R-V=wDTDz70NDv7K=o9n;Z~?ghaFDuAk0oKhdEMsO5)oL?uY&M&Z8mv_x6|F6UoK5)D4gScawdIeL zX4W(th3uE!bsZ^cvr-!bSS#SAMV1WW{z$)NquI1PiCT8>5^ESsJX);orRVbkJRRe3riO2kae(=E&3 zNM^AgLCqLKd)7P2R2V?lOg879;p`4Wok!?Vqqw;wxt^w|=O z4IWt83=VAs?Zo&a!RpIJHuc8Y+37!nh0#gBK?GxQ4{eUE^C!h0NoOwLO4e@>Lq3w?}NLls|tlj>K?s-iV5Fyt+C#1>qk_taxQg-J_6nwouB07mlnKjj%-^*6tgC4tTL2L2VINr{ zJPEn{g*aq2)ANg;RFL@R_G*Flp!2R6cVckrhW6-&Oxmbqs;O?8HdCtGZD;l3caKd^ zKccn{GgKJ4oBWZ${rUY<``$XY4It_P3CJv&KS@T4V)_OFg>l;-$vVdWA)$U5S@@yV zt}b_INFSaerkTy)T$|xCUj9gJy6AQPuG?98C)2yH4hgT9y=+BmkFh`Uf02jx@tx@p z96NUGrI+pP5Hl*uX59Rd*cf<6^UpMAr~g6&Xoxty>ZNUvr&43)kNj@EUi=!e@Dn=? zqVUtuR_rh+BUjszqvDUmv|7PP`m1Jh=KUDf-%f4c>DbC9Y3)(>M`ATTi%$8Cxw)CY zg@qe-bqy12*|sD9zBUsDy<_*extW=N0zjGRBNr;lM*c`GeQC!o)4}llk<1$O-b@VP z`y)Be8S88UjaycFEQD#}DAORPX?mgWkMzujX^6~K&0_Svwq_rCUz-8SWXF>fU0mywtm<_4l{sD*snfv@W{m1vBeibLcS^?fZJp{nlro$a zzJR|!QV&d+0if2kg)>iqn6g^j&a=ya`1dS-KJNRB(MKK`=_aSSc4~hDF`W<7ABiY_ z^76%tzdw#33ZIIy(Qs9w0F)h>c3LP+VMzNUFCirU5dfbKcNx#m3Xt3GkIZV{;9-4< z@7cXNp78ZsT>(-v8z~QDq<_yw%0}F)&3YG>mv1}*fZt6Q9`{&%6_}g)J)6yZe`KETdunFp_OGDu!!K!! zJA$$O+vbntft}~e;Xm7K-tjNH4MbrnMwU~=t5>h0PZ|YgPn}`yUsythH4Yp&;9pki zbaF;*M?bex2NZn^w7;s0Ys7DT);B)aS+-eLtZ#r0U&mDx&*1Xc&kKyy{CFz|Lq$d= zgPacgeeI;RH@iO)E8zRc(62H_UfRk=kK(n4;*Uf%bpi27rgjgr&wAH`s0V9h!mdfVJCci(D3DQMOnx|oB`2Ch)J@yg4ZbjK_)E~)QQY?HH zkM(Ilv~y#G`#RL7ZQ37+uk1xO5j@WF=l3QZL_Hzfk;Lo%Jsa#x^hy4`NzumZrE!59 zO%rKW4)*2DsdF5<{G?Z>V7{$$Bcv z7P!zlcinZ@Ri=1*vT!r7${)#}*XB1Px#L}ge>W=%~1DgXcg2mk?xX#fNO00031000^Q000001E2u_0{{R30RRC20H6W@ z1ONa40RR91c%TCS1ONa40RR91cmMzZ00`n$?f?KBzez+vRCodHT?v?6Rh2%ky3?H< zLLh;J4v9&K2_Q>E5)dH)1`q->^MS%RE`!l;SY`o(QE?nTKM{~c1(6v<9C5~B@zWtu zVNimC5fBs@z}Uo)kdP1q2_c=emwNO6r|x-u>!qr?mRDU>ZJ)2Z?!D)pbMHCle|M?3 zRUiX^zVoqg=$}NUrvsT0re#LJRAd}n8-;XKifLL#Nq~P+C>@fLjzFXX^&Q}Jpng+R zAnjpF+Ebylj~7|_cmEOWR`~}0fRBww*DnlbcBf^&q-7o)m>mW(381CS8GuF*1%O7W zWy!|@pw$%fDNA1j$BV3m_-}DAJldR+WsC0&R`~LbkT;*|+SVIC8SdZHE$4=z%tyfI z!`K6fpAnsi%FH;g%sHzwIZI2`$4sy>EwT(FeOXhpJh14F;7x0zUQ>%f-Pw1`KZLEj zcF6*O`Vkm+2GSvMBXuM?^!$8WE??Q#@$>oQeLm#7a3DQ|0Ny=pm@NIrTZ27*eZA!C z4&WO<5gv zFZx#S^j_`OkU^K~f^PWx@MLUsuEIz?Rp(n+hP9ZutgA*}FL%lA;mEJpO&-tnAC|r30a;t@_b_Mw;caD{l*) zqP_i1ehmAi@0%|V_X~H)b--dFI_&MAIilzit-yx@nqg;s8sHJLj`guN)7(3QVe*+P zz8Y+aZ0?7Q#<3sz0Z?J^)l20fKze;TEn}@?1Il6BI|9Kl%q*py9HBnw4W%E*=2S`+ zUw*6H6$F8~T-ESfoU7W&ZC8YCdv?oz0kIR=kxjEqNfBrcyh1$IHM%~=^W&{U@GLbz%}_V!2ZTP(t%f)sdec$+ z$!F(X9dyS+kx0?vmPljP$nBSh6ZfR$et=F52OuKElkjdj3lIs1~EgD0)4N>jR% z2;KUr@Jtx^1AseRgKe$abZaU2wy81^rV-DR(|Rjg>dUG?=3}7fBZfQiRqDJ;Zc641 zSRw#_V^Mek!u&m~xy>4GfT4Jx^(gQTjCh&sy?B4_`hs(OfqeL~8-sgX*vd{S2Kbwc z^<452fE0lnZJC`}-r%!MvDOe+j&>TstP(PV`iMjz-8h$h&L!7ZHj*m>_?w>&=b(W{ z0Mu{?k71;#0cR(r(E)t^4DaIvlrW3LF7L8YIlc=v5LIqafA9F=^yZLIPW;4I%G+Htz9rM`SA`RC0^dIPA0Qv9gDLbg2iB-Y)1=5zni6h3IF)M`_>XK;X9-Mv@nZsKcBLI=t z(dT$t$Gj`sX!Yd>^-GT5+?8E&ZAoBdU@2bZ&M$;#g*~zqYc3%jq<)S7f*uAm<>0lTIJ}AK409W z#-gTZUaU`d;(Bfh7wQpUn|l3eIpVV(f#!5UH>W<0e};&MFkJ7>c+se5CLYu5kCi@0tx=0eg=w);|DB=Ng-gZ&SGxmz0&)x z4)@zG@-iy)0s?1VD%sZ?Q;3&+KJx^dG^n3U{1D2`BS*`jAGo6MJzj47Y3y@;>Ni9}^z);jiiN$HQk^AKYX6xk1t2#&!&(^i~JZN|ed z>WCSZV9J=hwMpc$AL20wJh5c{u?vfb@2l1OJj#|5Kls93_1uqqF8?jL1;04shV*a1 zfPhDv52}^TAf_CdW~^q&bfk4`$jYug79(DT`nZWrrW_$M;~0R-2S@vvc;N6rntckM zPP;F=^J@98Aq7V0?bz%ddrKDIUXIKC=XK$k=^pthf%Ok7DkzU^r%e(6FeN`yi!xMj zO{qdM`lI&~nKcLC9+nxQG^Lib={4NWa(DmvPhV3_^8Ua5Ozx{rc}Fm&=Fb}-nmW*k zobH&}vI7sNF42nvkDybA^$yT#=<^_&b)3kNxDgkRa@Gy)*_kn=+Wu=VF8+M<2jvHI z1wrkISA6f<@MQ2Fjq|NcCAOXUK0XSEBR%V^e*GH}RE1wg2>Y>8D zSL%Nr{dph$?A{N&)sL7^E?!K8#d7s0%@8?Zflf;~TCF{6&o1~@ zL=4LH-#m=#qs&qK3#+#j6Kgizb7OccqJOG>8HW~))qhUG%)8X4U3JD}kVbryw>m3V zpg(yQz&)pa!DS?OV~Y0Uq%7`Y1#Buxzvh?%kE>vvmMXW!{q z@8^@$=O*g-vt9eyCf8M|6C=1*@xxlYFdmF7fPZ>R*b0C@px{AL!J@s8Ql1-)-1ZYp zk{yu-EO|-mPn?c5_srft*+;z+M{v^q7aM=PA?O3d(2q?Pz}tId0l?c=!E+!QfK*~$ z328K1$spKH-VOvSSE@gBgvfjTBGaGtUFRQNcvDBs;wQ2%G4w-ZkSPzN_ah1xN(Ph; zRD7_kpdsZ5_5oyB|D6mQOG=;M4#iohMxouDqCrXdUuygn!;A?TJ1oES-^2X@+8GKI zo}o~n(Yj6#9-vv^vfkC1e3JT84i}j?)hw+4lqZ0?b_3iR#t(R$p;)?bMI#umEIJoh z4N;&_X-8l*a#2|*PoN{P6VT_$izAM&e^7l3IX&B1zM91k7!pgfD+0KwN#-j^;3!a# zFwl%orK~(8NZT{Yi&nYR3B8NTlC^858(OQok(*=5vw$SE7$u zbiNvZkLW}PW|#RIa4@s(6+nd=Or!aavW;@`Su}o<`qPhQ!;M7*m`ha6ZM};H;RiNZ(8J zpMF;oKa9Eqi7DIB5zHZtXL`-H%!p+z5%I|Y)M1;cpdlo0U{|c)hUvKx$xc&ROllQB z_V1?Y6*__e&U{WSG>Au=rdE#gSnZBTlXDFwrq7G@jYyr?LDxh4^uP*a9%HB@7)Bk# z-JhZjrW(nV65h}xkmONT^D0203@-_N?w)y#w-HGJhT*l0ALHiwpUr&g(eH$~7eXfK zE|??JP@*6KbRe=mS6*dMm6SejvGHv{(+xw>2zD0NB7R=%CK21_ot-ir;NT_aApOXI zWdoN+MkAIYC@b!cPc?#@lzt;_t{4Ln-UX&)@$losII>*r_|X8@e+s9?r#GjE;1&4T zVOi&DAwF0(;`(d=+K6SH@hO{>K5vd|RPsk~t>R~~WB<5Waz-;UGwgdCXJI#pb$Yh5 zGE`D9QK>+Z;V8=y@G8}B$+{L|y);JJZ5J9~uFZUAd4I*vk3aebz(ASvwt;De5J`gP z8>*w1d;*PB8?(G?xl(=Y!=lnaDdX1(fOUFgENT?LtoZ8)Zj$k~#@g`Hg_=gXAssE; z&Tx&p1KPIU3JmK(mFVM6&W0p{YX@f~{#P15Umy2OkLMJf`KcKTRzu z${JS?IeI}6XwF!Z$<*T0oM6`FE79i_nMNd=o@sj}{wJy*$jHnHHek>?vz%hoNcIMX ztImK8mV(YYP_BF>`i%-OuLE1GD)B$z_>XGFqC84>(Ljog8(_AM=%^$U6wmV@(1Gq0 zE79Kv&o1(o*s-|K(1Hc$ zGp6V!tikvl8$0TvT^OSbBh(PjAo2(*G3SnvWK)tF>}d9i=umoNUf32#)h9$_$!q^L7>W%5+n0i|GjN2M=E*>d!UV~f}5 z;L`UvO*;~!8Nbf3`SnaZ8LMi<&+m7y_@mDhhH(c+4l6l&BXW7=bEJA@%IIiwyh`+W zGkYVC+$df-GF)=19Y4b&7k++?2k%4;>ZLkmiS<5XTE%%3Q*6&#>-l#V{)bH^YY+8 zCC1JG_I*q;e%7wx1zUPg6}m}#6HKdZis_!ZJ8wwx_CWpzlKtRqzF28zb;-{Ui% z)i~>&yS`js`a9mnD}0`HGlkP12+8V~MBc``I{NaTG1GXJ8NY)1z_yt`YmTqM8S*;e zj6r4ljzt0c%4Bsq%2{Dlrv9dNBHID<XnYQ@2 z%G4vY>4x}t8)vUx{B_1uiv>rn#?^vU<@i}Q*1sNH1Q@RtYUk0XM;dI3jyhTEsa3Ccet3|GVxDf0qe2U)A5lfcCQjKGADZmgJ6&dEjqcs#3* z+C&|xSp8jicVaYaWK`?=bH_G1*Bg^c{ZB}LEwRmXW&9K%jzDDx*C-vQlHiC>2ZXcQOMK&Adysvi!KWi&-c@a)rq6)RwPNR{u`I~}SQtN#`*n3r>Z)=}^4wMqLc@`HXHbyAVSp4gl2V2a|DkpEfn zEiryrE;h~urZkhVC;^mZTt^Hv8m#}QRlX}ndCHnI%$uC?E7a%Yum#srOQi!Q;)S5| zO4Z8f@Irkl#HR^)&DRd-I*Ol;UitzUGK5{9}~@G(PKm)mh|_-=d$UBDy9kR(`a zgv8VUw7l^tXKG36_h5IwepTjL&y>$W`*c)c9joy4$y% zm_#({U|EVGzEv3xRb5MwS7#iLdF4~kljD?UPWyc3E79j#$Dq~&@do#GEBn{~V&jKp zW-BrD)${oJhKmdR z?Z1IsDE=7j_c{!tFl7Nu?vNpJr!^9dLIO@CK?G0v2C^%w!<2KfD(~r3s?Q~Z_ZjmP zW-W-23{H#JXKk{q5`*xXbP6v8Cqnrw_4&E-;8jDP=YC(o z4)&laWX?YnJ+xQbX=Bv7AaWFsqat-{q_+$5+bM!is zdYy~sUVJiq8n20eH=T@=em~2>r<`Bs=lwSF@ypR4x=)V`-KSed3{T0hVNKG~5&)Py z#`#8g`L3?C?B1Q09Xont$ChT6&<$B*melgr01SwE0x4xbYoOTAo_a}74Z zpjj@%8+e|;G@ZHM4ugbo<5=N|b&?rsQY24}G!xW>wvlGDX;S7KV9O<#)?ABQc{A${7@nZLkm6e$<)Ia1x z48Y4MmXEZJe3j&{_)T~R;_(UQ)p2-AXVnPxx;M|p97_#VIiKeQlGf)4ZXJc)S3$Q<%j%Fvy_UF8aiAvv! zGW1(U^+?Oe_7iEHw}>w z6$38KGE;N`dOj@HoVdcF12_t@%}4)Wk1Ti7VH!6ryk#i|3h$|{Lsv= zjca`g{udKJ>;-O`dQ|b?(gq8$Z*Rdo*5l??FUn(hGtXQV^qsKQV67Q!srF=ir-Scs zvKH4?d6j0v8t8bVvY-(;Dvv+6TV@{beVNT6Qw-}5Pd3T&AC+|6XEn=>*@Vd={LQDf zX$UZuat(|53~6rKq60skXKNNGC#L!-d1C6}@;*$>C2clTmu;3p>)OMKxR3e?0D|MT z*I1}rI3grG9T7=IU zB|#%bWq!nZ+PiSe5vF%iJQn0xyv%Yd+wcQ_U2BWXZJQCSr+g_?7QpOa&CB7;uuC4t zNszJeM$}Mz&`@*8cm(#@$M7?(2?f!2cEYq!PQR!NH_&NsepR3ObWb1;e`koSUm5qN zPt`sJ`1?=q!li&ik3>u`!Yw9Lq^0p5EmjINzVD`5VtLAxS;3R#+ENj~>~u|gcxI56B^b%Na7CjQb>_o! zkBdD2E61Qxm1*?;2;|w_(l)d2HF@11n&inJH_5g)Dh@?aUNJt0WA}3O$!HTKowg}kXFs$Akh}5! z;Yl-sdrEFNrjVF-*m#`@1Al-=NQbjgzKru{|D6Fj*5_c+MPL}#=2JeHdAFqn1#9$9 z{GZo0t+J6DwP%?m($`joa{%&>H@qTaAGt4cQX+|V2XY&{vHw(n%v+E!FnCet?1QJ5 zzvjkcmDmWjhF|)1c=Utcm*4Eznpu;LKbYo^0N?gE?>u<}-k0 zeDLg1$I5IxQGk<+gH%ie@LN&a@TQ*AlLnW4Cq9DBp_M-kTYvRiS-!SiW*cuX-8%w@ zyi=Yz;TU=E^!MvS2YU-t$f+8@X4m&`2*0vsg)G8Ff~xmreKtahwzpQVU#d!V~=XC}S?Ke)gPn<5-{q@DcHMR7r<^a?A@aiyi{}}o7mOsfNJPS0o zmI0}>?{O1l%b0O;^97%(EwJWCT>xyxFn#!*@ca#{k z?Qa5{q^%#SJAf@T_uUd6fZKw;u=PzjAM5luJX%s$w&GP6;r<898@TcAhhz4YtIk;1 z_dUwyTYsi?6|{Vg|8hwg6J-H@4Dy0)Tjd0-+Xvw>QoY&0mrEi)dW>}8w%FH4j+Q4e z4c|L@qb!+sb!j)fC1FP;Z8RLfxtx9AYvK5CsGN+O`HtJuDTjCNlqq@&SAUyunh@_IN#iO`4{nC*26(%1ZT799!)0~L5P1pLw4d3tT^_w~aZu$O fjBA1~mEP)Px#L}ge>W=%~1DgXcg2mk?xX#fNO00031000^Q000001E2u_0{{R30RRC20H6W@ z1ONa40RR91c%TCS1ONa40RR91cmMzZ00`n$?f?KEI!Q!9RCodHT?>?4Rh2!jyF+3~ zhX4TuBtMdLAP@{7XkfsAKX4cwmoWZdIitb|E|wrJQCTx_0AWVJ0Y4!QiUVs62#U*b zsUu1PDj*W%6Oep>gpd&azYc_uPQTgvyn9}qy7j8Ns=KPYL+T{;?!D)pbI-kJ-`~6M zRdph709v#t9kr}m_G#;pu3nL@1RqIdPc*HwHvWm|U-+PCe|7cNYvY!x{gz|9AMhoTYF09Xgo0wDRn zf|!?vj38Dhlj|n}tYtlP?uFpu9d?k#mz*W{CrMINd_`F!g~7Itcv|q?bli)t$nkLY zF&Lpo!^KlsY4da$U|=ek$hXGDa@F|ih`#logUl@`e{tI=`T4ibO4h5+2)18)+Oo8-mUUkIPep_y|RZ^&cF!3<^aYucPPi8YF6{~H^SbHX`B4uva^%l$JH9n z^oRlY>~E%XdQ$mnTOuc5U2V&q;y}xJj5xjr)5lfb$qj{mTbtaDx?OPT7n5I=*&C+B zumgPNrRkxV?9N5|{}v;2*z40M9MIA3A<*N`lAkR|5>DsCLc`|Jb1qG%zn;ov0QWe9 zmAhPyKqZ?_%`G8aCc``@SC;4azPvJ-q0mp{L5%7%E$ydj*Fi=tX!=|6e%h+7}%iG^7-#PKb1h?|d zC-ZYtEiJe_9sg>NEP}H?N<-#!3+pv*dKPYT`ib1#v4ec{qSKRy>#cje@=XW$oXgX> zy*+Y0hT(Kpu3`;zJj-LqVQZB${u@lc8+Qz!z32JG<)8DP>1o(LUWy?* z+BQ!HmW65Xp*N~N+I7X4@p9&Ze@tF$1_+Tm8)M>|SEoBZ`-1!ksC`5Y z;F1Fc6AGs1Q79m6a#oY=LcdM!lMeatMW-c?TDuz+s{a~Q7|U6(D4n=@v-}c(9cty; z=!`?=dmhscYbKSvOq}oYC>-aL=Vju2pEsQPu>Dj=BFCP0Msg49(I{ej8%6+k(w5C~ z6K;XJyrnJuMgc{ur^x=k=JKE(#|8{z^q*0*IcMYKh3)B6# zY?c2@du1;L38gxWa?m_j-na*KJJ;Xb)-Ins|McW1jR3~`c);ZW+(QX23fq~5#Oo+| z`N-4+g%~L(%8<5qefXvqzBu)~&nH)A9vS#`&;Vmvp4GEO{)j5<8drmbdmJrY26n9E z(k5>#OK%VQXlrlME+_um=aQdV`v(%zX9K457p8mRJo$EvxJgwDacievbF0e78QZu1 zW&m{TVAkXT1AO84(w#S@@&_!mv-4rf(gGkBtwrGD?nG za7w~ghw3K!v2Nl619?qu21fG?01a7^A%l=>sjjOAW|XgnnP)mteoH^|AKdd4{5x;K z0Pd4Z57yP&HC{I3F(1GsE(9iYw1!+CJ26>DS~xxarOC+fOe4RXQ5+x~Oh| zzkWsfF#vOl0Yz&?apPG?G#TeeH=d?j`nf;6FTIZ#$19$5UtEi0-#UpdubIxfDxD!) z2J@Q^x1{cuOC%qR5Hoo|@d?ebHCE<`H}q*WE36D#Vw^K|1}0 z?%{h0dmG+hP$J)YV#W!h7>SmC$rttb4t?G2~|D+RpHpvQ% z;9V%FQz;^xN6+8+!eoMbO{=<3oQRjTB`nM@MJWz3b>t^{nK-z@^(AG!FaKuFz z>qupu0m(Rt;42RbjFVdW4fJ2HT#jXQZnbYc2A5}!_t$S-lkU;eBdhUFRd#x!!#7B{ zxPnKv<5@X*UYze_a~a2L>1Q6uebb-+68YelKb8Ei`OSxJb@XfzfO|IXxkl3nKU^~{ zuA8aACsX;zx3$#d^1POQv;FrqxVVsrMFnx~#i7O;45}S9lcS47X+P3pz6!aEl^)Ag z<2zN2R+Fp7-;VV$=-zwwNy%?x7Zk3+NsoL1nCxhNRA5arXM#~JZn73uldHxzJqxSJ zRpXnUh1KM$@lDUdYI4>1re|R_xoUjVv#^?+3)$%*I^>W2x_$bwhozh=Esved_61H;4NF^!vxoi zGqhg7(D=G0hdjHdWuA}I&~WSTxtqud?-3!$JK(GM1o^%16ggyescyc`K0es`&zvms z!TEUBhnJaX`z;7Rg!Bx&|3W+F8*kIc>$MNu{)sxCDgcJi$+}sBlsA@Fh#=yF|4O#u z%4Mk+@^c<_tU-*2d=}7beU2WhhgLXT-U&XbnfB)$nAwQ0i2hEHoTZJ7rtQtUYWN+Yvt6M#p12 z>uEpz!)dvQ$j7U(jrSj#^zsJzJ8!cLV7!L&2{cT3I?5by3MSGFH6GRwHrixOh6ap> zY;}y~G1T}F>A!$=uZXtvh`fSt12wDuZ1+puMV7OD0Nx+ZJCpg!d_*nd)$Ko#lfBh) zfRV|9ZA!D=Y(3YEu>hS~!0<2z1D3!;rUHeF=aa|!#UWVA4|UIFDA>}^d_y0_ezSB#{fEKI z@<@sYAL@Va27UK-w+z)R(lv-$n{sg=UHjck_7Yd{Ot@xFT z^-l+$nM9rJzLP9ganmLd{1l>W#m36(*IIwxzmCGL+qWNwLGsKC;E`A-1_<}UeK$iA zev3r9VS6KN<87ES{bu{GqeJ#R^&`o09l_tfLH5T{7pexbIU1E@JrzS-tkPOB%7uK5 zBAz>|3HmJZ_@e^AG#Kcq=R|J2Ijfu5?;MBUJw*StwbNvK&>0?oQsn-XB2PR6ZjJRH zAIh`#TY2}c5P9~+EM;I#$4%yYaEKBUJHw;vvvt24V5?7Sv_DY)!yoX1ET`{iFq@uf z)IOn>NM^hcl5IEVCaXNtvyej;JZ_BXk+mYPhI?qs+jC-e3~K43kh98R2V}Nyy*rX9 zupdd3l3v!hp2zwVQ;?2e0Nj<|*CI2tPkSYW*gNwN)6mki<#}F%_4j@YzL#2X8pWxW z^~~#mI_*0(8<}ADOS|tPatwZC!t47svVa3-;7vn}{Fv?!4=TL<{UKW`u@iEp@%p(> z!6cu=`dH}emdgeiEypkk`dD(OEn1p#qC!D!_{jxq2uRuyC>(l(8eyE2Th5Ysr-8gG zhCnOOApN%$jNVOH)AZIOG{*J~4?HaL*GIE@vi>{{?8I^8WONcbb;mbl^|tF2OxaWR z5P1x1b6o#XXcMQ^n7EF=fO~2B#kYU)pEDlinb!gRozXG5EObrDmIz-KA9nDm(ks}6 zS^b%>WkPqb{zpOW$y%A-_TM+AyCSix^$On4;=fjsA<24A9w!eUPC1&#bci=nf42!D zd>RzfUmwYKsIh&@ap#JkBi2I=JQqZeeLGGc%-RKNny z$IEn@q)+F^gjb2OQR;2_c$NCo;kVx9*4$9=Td;O=t%aJln-K!H|MVc>0^Sy}lh7}u zT*t@3jQz*wz<1y|a7@!Mx+}^m(6{pH^*<`u)rRf#Og4zbf>?;fEwoBlnxXYxi(6i! z^v7dr`VtgJJ zJc#Mp%IQnr@)%nA47vM`$vc^Oa|$+0yniJ~W}6|&fVQ?8&xdCJ87TCRwSaaLsz0 z3SkuHjYL+x_Qw>gt5yih-DhmWltJ49fr}BR7}{o= zcm}s8c-&@nQ`>dgKNzP-T=zV^_Bzg~!_>J8-n1y{0L#~8J7bs|ko|UyBEIxz1H$Y- zg-4hyvyuMu{>K+g$MKIzb<&9ToIHYtpi+9ii;$V-P|8t^Y36&rW@teJ*Po85xo9fm z=+!#aX}=va0fFPLn1?1Ytyq(R#%dRhUY>MO#<#Q9_Vbj18@Pkev5H>bS+Tmj+(!7% zrhwopOwmA@_is6%tbHKgsGP@0kjeAmog)36FeQE9h~m>AD=!XNW=wog&U;GIVf%|r z-#d$!T?_P>&mLV=Ci8gQNbpE{AhKr`_t!@r&vxxB<2bY>o@Q0HifxYEYQ*;NWd!q^ zh2~r8<^FYrsHtEt&#&Ttw7nC3OnS@#ssILivnR{ELfn*Rpte=Qr>at+S57zB#s z9mv*`2%5#?kon|A`dw4%o0?fn5iMVXqER;QRk=P7miYS4UmhymZn3K0~xWm#@+PL0tjvR&cx`3M?alZW}7)vlBpu zVugm0VP6v|E2obH*T8s+o{kEmob7u@7DVBY0=Ed zm8J4!0={xfx9pBLI*JZRDzzL3K6o9rS!cfRN86h@YC&vIfBsWJy9~hps+w>Fw@bHR zr>J0AfdydaKte`kA){ivfD!92!MU#rFe9Q^mM@?mdqgCJ8Vud#KPX`}{&0mxh>D ztO=cX2-;8EgYjQq`WT4oJ8)LHg3nL6xl*7JRovLP5YO+?C`P~$GS8+kLr$)#phM*T9(g)}wuIh7mUU62Ks+R z+q`+nCXC9p+9+H&Lap~AmZxpl>U0wLCd+Uj`uuWq_RafIcB5L_KQX@?~l1O z3|0HBZ3FdRfBN_{^eeK?k|3h7%*nZ8;1N{j8;IJQnE+|!LVm_6fnsGA>9dTY`&zzh ztltQ9WftjcWd>+}JdRNLGJ8yXM~Y|9L(%@h)W1gmzw+UC$@&b!RF<=`bY%Vstykv; zFc~K?FkqDtF1aqtaXfVx(q5l0yK#dR6L-#9PRL-qNPjT)yC#+DV)l6u!E@)l4NbI9 z-wdYyHR`YAZ)fqSlB4rZT?cUS>j4{UJ7tbWi_-w*SV=F>girUSUo?7ojAt*#nWN2S zqwMp6f6-H=$8o~NT|4cPnq^rcNukvnNi0UpyKOEzKs;jm--`0+p?sibY! zOD02RNag}e^V63;r$@eX71P5ug8I>?p4qif%R+)_eyZ7L$FikEbtnsA{~B&z`ev9i zBnvIU^hnUhG`^U1(kc4=MGPWfG4ZVl zo)fFXnq;5n*;5K03G!Hy4>fA7KkGP{_BUG};HG%5`xg2*TZ0in;}+8}7jO+3&-hsw zB#nox%pt}{hUWF7&xh|tkKI;d_sWsfO#71oZqcsY&m7IPuRaTY8>s(k@juFb;J70b zJ=oPLn#O?6Vt{;_qLiiGJf{*_cd)t7%h0N&ecWV?2k?s?%W>zLf6vHd85Zd0JhMj` zujAuRpQE$ro|`*%d#I50P!8GfrW_Z@|mZ^p;~Rpl`+(&qzvzGxoPT0D2vhYF$WX4rr0n~QhtHhO)Xnqi;i zSM^nqPbnYyuU365rN6WGuumwx`>^CW^zz@ar`uXHt5Y-W`?{JP&tu(;U(J8Dv>&?9PuWFovbN>`tARGT znnpC91z9}enT*O2bVOJwOKX@Va~@^*zA;}{@t?I%WY0@@}^jdAKAP&$<$QWmO027Xr4jM36B z`DsY~m&i2%l02vY_HqnrUfMw9S>t`20AJ1^9fXV;P?x=e6{! z`fqE}Av53o_T*9e$Wz4#X6F1P>4Bs9w+fXQ+7xWP$ktr)8Ml1KRnJ0ZyZn}Z$v<>% zBm4)SURwmNmd{HO?a?Veg7Nh^AP{1l#6STJ-6OD-PZ`al9OCv-y&%Y-r4QdUz<=;> zieKUT2K`$^g(H}PyjjLe;TI@CNau&}1kEbu0MvZ&1BT_Jwx!=Z|A}08)WHe9t5Hmb zM`14B#A{o!RPMq;c~GH1H6}~7MY)u))5CKI1enQ{9!{;Hg?p# zzrQQ}TyLBF2^+4;&S1NsLggZ6#B1r7ro6~C1>TM&DDtm;flGx-&?<@%x#g~O5hPFH zs8md-<-rMYeV&Uv4PEKRZ|OJAe{l2tM;ji^8o1#ij{$;i~w{1 zT#E{TGoFL387=)r_zyGU!XxJ8H6#`r%ayt$9vNMLgAmu8Hm_S&1qPBrOym; zlPSn*>1S@s-dJ@5!~4qn4;|#by(Y*zrcO=pz_WjK}>;WS>3P#{F z-vBc?GaBf|0<69s)mdCx(3lokvo^ASK_6)Pcz5BQ>h3!Kl*Y( zD0m$ok9)-StPj83Jz)o#KWkQk-yNu*=+^-gjxyW-yi9%yJU^&~IFw>gGz)@d{Md3! zKcDW@Pk1kIhcR;aK{K+K*kgCqk{%SmG_ZPgI_8;Iwj} zmufnmzb)&d9UDe>$l(XhN>=-J*Bc)+z%;n@k#wit*W?$NS{3AaE76j#@W8qb{E^9n zW+#8G$f=Xj5P)fuBl)>k<+^;jMno4`tB|EFjQLFaJRkRY)%ac}j{Eco*T=|Rj^E3E z?*Y>jeAjaD)G&b6hgg&Een_qWP@gey?fy33QJF<^w1w6drb$6=JJY{&$3#A`@4gAX zZ_yYscQ>ZAP|7{a)AMl{@--$sej10x9JDI=T1)_Xd(nqme%(7kK61#O$*bD_23?v0 zFzW`8KLK>F06_TSNHKZviY5Byz7h0s)XV<5T$ej%;wV;?~tI`v3>h*0Pxx0c1%VppNgpFrKXa#veeh|sRZB!pq z@@BO0BK*;>^DrgjNWZzH{%fv@Qt7yrny|4)E&x#f43zXyr3a0Y0ZZFNtFo7g^L^e( z*GHp%i=U1>eZPH@Robv2xC|-4)}DtRP7lV5L<_Mtzuk~j;D8pVVjaZ}Ny{_zRfdHb zx`>+02-RPYs-8J#MskDsV5ma+Wmt%>aGI<`GBBA4?C%`g{_A{s#a)A)kxgn#)QXV0|{g z>-c!%^j}AZ`6Yf4=bC*d%P+Ar&VL`H&w33}=7H z0PSepWX(Yw2Ysl%N6+GMXsz^o@@myjWDTSj!`70q_|dpYld69oh{cW+Vsp1iTDLCU z_0=AE7aV>lFzNzOU1-QY_>@L@QOgYAqCD_0-KO~WILcjtB!9p2-ch}>WXhCebrqEn z&E94KF7hfiY907d@2Pl_H46>f6CibBl=H8vjss|&SSLFXwga?o@Hc|rjoUQ-)fxWn qsAqcHW#!mZR$|JGxcK9~%l^6()50000P$d4lvMt8C^+TcQu4F zQqv!UF!I+kw)c0jhd6+g6oCr9P?7)?!qX1ui*iL{p}sKCAGuJ{{W)0z1pLF|=>h}& zt(2Lr0Z`2ig8<5i%Zk}cO5Fm=LByqGWaS`oqChZdEFmc`0hSb#gg|Aap^{+WKOYcj zHjINK)KDG%&s?Mt4CL(T=?;~U@bU2x_mLKN!#GJuK_CzbNw5SMEJorG!}_5;?R>@1 zSl)jns3WlU7^J%=(hUtfmuUCU&C3%8B5C^f5>W2Cy8jW3#{Od{lF1}|?c61##3dzA zsPlFG;l_FzBK}8>|H_Ru_H#!_7$UH4UKo3lKOA}g1(R&|e@}GINYVzX?q=_WLZCgh z)L|eJMce`D0EIwgRaNETDsr+?vQknSGAi=7H00r`QnI%oQnFxm`G2umXso9l+8*&Q z7WqF|$p49js$mdzo^BXpH#gURy=UO;=IMrYc5?@+sR4y_?d*~0^YP7d+y0{}0)zBM zIKVM(DBvICK#~7N0a+PY6)7;u=dutmNqK3AlsrUU9U`d;msiucB_|8|2kY=(7XA;G zwDA8AR)VCA#JOkxm#6oHNS^YVuOU;8p$N)2{`;oF|rQ?B~K$%rHDxXs+_G zF5|-uqHZvSzq}L;5Kcy_P+x0${33}Ofb6+TX&=y;;PkEOpz%+_bCw_{<&~ zeLV|!bP%l1qxywfVr9Z9JI+++EO^x>ZuCK);=$VIG1`kxK8F2M8AdC$iOe3cj1fo(ce4l-9 z7*zKy3={MixvUk=enQE;ED~7tv%qh&3lR<0m??@w{ILF|e#QOyPkFYK!&Up7xWNtL zOW%1QMC<3o;G9_S1;NkPB6bqbCOjeztEc6TsBM<(q9((JKiH{01+Ud=uw9B@{;(JJ z-DxI2*{pMq`q1RQc;V8@gYAY44Z!%#W~M9pRxI(R?SJ7sy7em=Z5DbuDlr@*q|25V)($-f}9c#?D%dU^RS<(wz?{P zFFHtCab*!rl(~j@0(Nadvwg8q|4!}L^>d?0al6}Rrv9$0M#^&@zjbfJy_n!%mVHK4 z6pLRIQ^Uq~dnyy$`ay51Us6WaP%&O;@49m&{G3z7xV3dLtt1VTOMYl3UW~Rm{Eq4m zF?Zl_v;?7EFx1_+#WFUXxcK78IV)FO>42@cm@}2I%pVbZqQ}3;p;sDIm&knay03a^ zn$5}Q$G!@fTwD$e(x-~aWP0h+4NRz$KlnO_H2c< z(XX#lPuW_%H#Q+c&(nRyX1-IadKR-%$4FYC0fsCmL9ky3 zKpxyjd^JFR+vg2!=HWf}2Z?@Td`0EG`kU?{8zKrvtsm)|7>pPk9nu@2^z96aU2<#` z2QhvH5w&V;wER?mopu+nqu*n8p~(%QkwSs&*0eJwa zMXR05`OSFpfyRb!Y_+H@O%Y z0=K^y6B8Gcbl?SA)qMP3Z+=C(?8zL@=74R=EVnE?vY!1BQy2@q*RUgRx4yJ$k}MnL zs!?74QciNb-LcG*&o<9=DSL>1n}ZNd)w1z3-0Pd^4ED1{qd=9|!!N?xnXjM!EuylY z5=!H>&hSofh8V?Jofyd!h`xDI1fYAuV(sZwwN~{$a}MX^=+0TH*SFp$vyxmUv7C*W zv^3Gl0+eTFgBi3FVD;$nhcp)ka*4gSskYIqQ&+M}xP9yLAkWzBI^I%zR^l1e?bW_6 zIn{mo{dD=)9@V?s^fa55jh78rP*Ze<3`tRCN4*mpO$@7a^*2B*7N_|A(Ve2VB|)_o z$=#_=aBkhe(ifX}MLT()@5?OV+~7cXC3r!%{QJxriXo9I%*3q4KT4Xxzyd{ z9;_%=W%q!Vw$Z7F3lUnY+1HZ*lO;4;VR2+i4+D(m#01OYq|L_fbnT;KN<^dkkCwtd zF7n+O7KvAw8c`JUh6LmeIrk4`F3o|AagKSMK3))_5Cv~y2Bb2!Ibg9BO7Vkz?pAYX zoI=B}+$R22&IL`NCYUYjrdhwjnMx_v=-Qcx-jmtN>!Zqf|n1^SWrHy zK|MwJ?Z#^>)rfT5YSY{qjZ&`Fjd;^vv&gF-Yj6$9-Dy$<6zeP4s+78gS2|t%Z309b z0^fp~ue_}i`U9j!<|qF92_3oB09NqgAoehQ`)<)dSfKoJl_A6Ec#*Mx9Cpd-p#$Ez z={AM*r-bQs6*z$!*VA4|QE7bf@-4vb?Q+pPKLkY2{yKsw{&udv_2v8{Dbd zm~8VAv!G~s)`O3|Q6vFUV%8%+?ZSVUa(;fhPNg#vab@J*9XE4#D%)$UU-T5`fwjz! z6&gA^`OGu6aUk{l*h9eB?opVdrHK>Q@U>&JQ_2pR%}TyOXGq_6s56_`U(WoOaAb+K zXQr#6H}>a-GYs9^bGP2Y&hSP5gEtW+GVC4=wy0wQk=~%CSXj=GH6q z-T#s!BV`xZVxm{~jr_ezYRpqqIcXC=Oq`b{lu`Rt(IYr4B91hhVC?yg{ol4WUr3v9 zOAk2LG>CIECZ-WIs0$N}F#eoIUEtZudc7DPYIjzGqDLWk_A4#(LgacooD z2K4IWs@N`Bddm-{%oy}!k0^i6Yh)uJ1S*90>|bm3TOZxcV|ywHUb(+CeX-o1|LTZM zwU>dY3R&U)T(}5#Neh?-CWT~@{6Ke@sI)uSuzoah8COy)w)B)aslJmp`WUcjdia-0 zl2Y}&L~XfA`uYQboAJ1;J{XLhYjH){cObH3FDva+^8ioOQy%Z=xyjGLmWMrzfFoH; zEi3AG`_v+%)&lDJE;iJWJDI@-X9K5O)LD~j*PBe(wu+|%ar~C+LK1+-+lK=t# z+Xc+J7qp~5q=B~rD!x78)?1+KUIbYr^5rcl&tB-cTtj+e%{gpZZ4G~6r15+d|J(ky zjg@@UzMW0k9@S#W(1H{u;Nq(7llJbq;;4t$awM;l&(2s+$l!Ay9^Ge|34CVhr7|BG z?dAR83smef^frq9V(OH+a+ki#q&-7TkWfFM=5bsGbU(8mC;>QTCWL5ydz9s6k@?+V zcjiH`VI=59P-(-DWXZ~5DH>B^_H~;4$)KUhnmGo*G!Tq8^LjfUDO)lASN*=#AY_yS zqW9UX(VOCO&p@kHdUUgsBO0KhXxn1sprK5h8}+>IhX(nSXZKwlNsjk^M|RAaqmCZB zHBolOHYBas@&{PT=R+?d8pZu zUHfyucQ`(umXSW7o?HQ3H21M`ZJal+%*)SH1B1j6rxTlG3hx1IGJN^M7{$j(9V;MZ zRKybgVuxKo#XVM+?*yTy{W+XHaU5Jbt-UG33x{u(N-2wmw;zzPH&4DE103HV@ER86 z|FZEmQb|&1s5#`$4!Cm}&`^{(4V}OP$bk`}v6q6rm;P!H)W|2i^e{7lTk2W@jo_9q z*aw|U7#+g59Fv(5qI`#O-qPj#@_P>PC#I(GSp3DLv7x-dmYK=C7lPF8a)bxb=@)B1 zUZ`EqpXV2dR}B&r`uM}N(TS99ZT0UB%IN|0H%DcVO#T%L_chrgn#m6%x4KE*IMfjX zJ%4veCEqbXZ`H`F_+fELMC@wuy_ch%t*+Z+1I}wN#C+dRrf2X{1C8=yZ_%Pt6wL_~ zZ2NN-hXOT4P4n$QFO7yYHS-4wF1Xfr-meG9Pn;uK51?hfel`d38k{W)F*|gJLT2#T z<~>spMu4(mul-8Q3*pf=N4DcI)zzjqAgbE2eOT7~&f1W3VsdD44Ffe;3mJp-V@8UC z)|qnPc12o~$X-+U@L_lWqv-RtvB~%hLF($%Ew5w>^NR82qC_0FB z)=hP1-OEx?lLi#jnLzH}a;Nvr@JDO-zQWd}#k^an$Kwml;MrD&)sC5b`s0ZkVyPkb zt}-jOq^%_9>YZe7Y}PhW{a)c39G`kg(P4@kxjcYfgB4XOOcmezdUI7j-!gs7oAo2o zx(Ph{G+YZ`a%~kzK!HTAA5NXE-7vOFRr5oqY$rH>WI6SFvWmahFav!CfRMM3%8J&c z*p+%|-fNS_@QrFr(at!JY9jCg9F-%5{nb5Bo~z@Y9m&SHYV`49GAJjA5h~h4(G!Se zZmK{Bo7ivCfvl}@A-ptkFGcWXAzj3xfl{evi-OG(TaCn1FAHxRc{}B|x+Ua1D=I6M z!C^ZIvK6aS_c&(=OQDZfm>O`Nxsw{ta&yiYPA~@e#c%N>>#rq)k6Aru-qD4(D^v)y z*>Rs;YUbD1S8^D(ps6Jbj0K3wJw>L4m)0e(6Pee3Y?gy9i0^bZO?$*sv+xKV?WBlh zAp*;v6w!a8;A7sLB*g-^<$Z4L7|5jXxxP1}hQZ<55f9<^KJ>^mKlWSGaLcO0=$jem zWyZkRwe~u{{tU63DlCaS9$Y4CP4f?+wwa(&1ou)b>72ydrFvm`Rj-0`kBJgK@nd(*Eh!(NC{F-@=FnF&Y!q`7){YsLLHf0_B6aHc# z>WIuHTyJwIH{BJ4)2RtEauC7Yq7Cytc|S)4^*t8Va3HR zg=~sN^tp9re@w=GTx$;zOWMjcg-7X3Wk^N$n;&Kf1RgVG2}2L-(0o)54C509C&77i zrjSi{X*WV=%C17((N^6R4Ya*4#6s_L99RtQ>m(%#nQ#wrRC8Y%yxkH;d!MdY+Tw@r zjpSnK`;C-U{ATcgaxoEpP0Gf+tx);buOMlK=01D|J+ROu37qc*rD(w`#O=3*O*w9?biwNoq3WN1`&Wp8TvKj3C z3HR9ssH7a&Vr<6waJrU zdLg!ieYz%U^bmpn%;(V%%ugMk92&?_XX1K@mwnVSE6!&%P%Wdi7_h`CpScvspMx?N zQUR>oadnG17#hNc$pkTp+9lW+MBKHRZ~74XWUryd)4yd zj98$%XmIL4(9OnoeO5Fnyn&fpQ9b0h4e6EHHw*l68j;>(ya`g^S&y2{O8U>1*>4zR zq*WSI_2o$CHQ?x0!wl9bpx|Cm2+kFMR)oMud1%n2=qn5nE&t@Fgr#=Zv2?}wtEz^T z9rrj=?IH*qI5{G@Rn&}^Z{+TW}mQeb9=8b<_a`&Cm#n%n~ zU47MvCBsdXFB1+adOO)03+nczfWa#vwk#r{o{dF)QWya9v2nv43Zp3%Ps}($lA02*_g25t;|T{A5snSY?3A zrRQ~(Ygh_ebltHo1VCbJb*eOAr;4cnlXLvI>*$-#AVsGg6B1r7@;g^L zFlJ_th0vxO7;-opU@WAFe;<}?!2q?RBrFK5U{*ai@NLKZ^};Ul}beukveh?TQn;$%9=R+DX07m82gP$=}Uo_%&ngV`}Hyv8g{u z3SWzTGV|cwQuFIs7ZDOqO_fGf8Q`8MwL}eUp>q?4eqCmOTcwQuXtQckPy|4F1on8l zP*h>d+cH#XQf|+6c|S{7SF(Lg>bR~l(0uY?O{OEVlaxa5@e%T&xju=o1`=OD#qc16 zSvyH*my(dcp6~VqR;o(#@m44Lug@~_qw+HA=mS#Z^4reBy8iV?H~I;{LQWk3aKK8$bLRyt$g?- + + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/Content/Header/style.less b/demos/speech_web_demo/web_client/src/components/Content/Header/style.less new file mode 100644 index 00000000..9d026137 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/Content/Header/style.less @@ -0,0 +1,148 @@ +.speech_header { + width: 1200px; + margin: 0 auto; + padding-top: 50px; + // background: url("../../../assets/image/在线体验-背景@2x.png") no-repeat; + box-sizing: border-box; + &::after { + content: ""; + display: block; + clear: both; + visibility: hidden; + } + + ; + + // background: pink; + .speech_header_title { + height: 57px; + font-family: PingFangSC-Medium; + font-size: 38px; + color: #000000; + letter-spacing: 0; + line-height: 57px; + font-weight: 500; + margin-bottom: 15px; + } + + ; + + .speech_header_describe { + height: 26px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #575757; + line-height: 26px; + font-weight: 400; + margin-bottom: 24px; + } + + ; + .speech_header_link_box { + height: 40px; + margin-bottom: 40px; + display: flex; + align-items: center; + }; + .speech_header_link { + display: block; + background: #2932E1; + width: 120px; + height: 40px; + line-height: 40px; + border-radius: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + text-align: center; + font-weight: 500; + margin-right: 20px; + // margin-bottom: 40px; + + &:hover { + opacity: 0.9; + } + + ; + } + + ; + + .speech_header_divider { + width: 1200px; + height: 1px; + background: #D1D1D1; + margin-bottom: 40px; + } + + ; + + .speech_header_content_wrapper { + width: 1200px; + margin: 0 auto; + // background: pink; + margin-bottom: 20px; + display: flex; + justify-content: space-between; + flex-wrap: wrap; + + .speech_header_module { + width: 384px; + background: #FFFFFF; + border: 1px solid rgba(224, 224, 224, 1); + box-shadow: 4px 8px 12px 0px rgba(0, 0, 0, 0.05); + border-radius: 16px; + padding: 30px 34px 0px 34px; + box-sizing: border-box; + display: flex; + margin-bottom: 40px; + .speech_header_background_img { + width: 46px; + height: 46px; + background-size: 46px 46px; + background-repeat: no-repeat; + background-position: center; + margin-right: 20px; + } + + ; + + .speech_header_content { + padding-top: 4px; + margin-bottom: 32px; + + .speech_header_module_title { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 20px; + color: #000000; + letter-spacing: 0; + line-height: 26px; + font-weight: 500; + margin-bottom: 10px; + } + + ; + + .speech_header_module_introduce { + font-family: PingFangSC-Regular; + font-size: 16px; + color: #666666; + letter-spacing: 0; + font-weight: 400; + } + + ; + } + + ; + } + + ; + } + + ; +} + +; + diff --git a/demos/speech_web_demo/web_client/src/components/Content/Tail/Tail.vue b/demos/speech_web_demo/web_client/src/components/Content/Tail/Tail.vue new file mode 100644 index 00000000..e69de29b diff --git a/demos/speech_web_demo/web_client/src/components/Content/Tail/style.less b/demos/speech_web_demo/web_client/src/components/Content/Tail/style.less new file mode 100644 index 00000000..e69de29b diff --git a/demos/speech_web_demo/web_client/src/components/Experience.vue b/demos/speech_web_demo/web_client/src/components/Experience.vue new file mode 100644 index 00000000..5620d6af --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/Experience.vue @@ -0,0 +1,50 @@ + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASR.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASR.vue new file mode 100644 index 00000000..edef6a78 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASR.vue @@ -0,0 +1,154 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASRT.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASRT.vue new file mode 100644 index 00000000..245fddb2 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASRT.vue @@ -0,0 +1,38 @@ + + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue new file mode 100644 index 00000000..4d3cf3c3 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue @@ -0,0 +1,241 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/style.less new file mode 100644 index 00000000..46b33272 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/style.less @@ -0,0 +1,293 @@ +.audioFileIdentification { + width: 1106px; + height: 270px; + // background-color: pink; + padding-top: 40px; + box-sizing: border-box; + display: flex; + // 开始上传 + .public_recognition_speech { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + // 开始上传 + .upload_img { + width: 116px; + height: 116px; + background: #2932E1; + border-radius: 50%; + margin-left: 98px; + cursor: pointer; + margin-bottom: 20px; + display: flex; + justify-content: center; + align-items: center; + .upload_img_back { + width: 34.38px; + height: 30.82px; + background: #2932E1; + background: url("../../../../assets/image/ic_大-上传文件.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 34.38px 30.82px; + cursor: pointer; + } + &:hover { + opacity: 0.9; + }; + + }; + + + .speech_text { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + font-weight: 500; + margin-left: 124px; + margin-bottom: 10px; + }; + .speech_text_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #999999; + font-weight: 400; + margin-left: 84px; + }; + }; + // 上传中 + .on_the_cross_speech { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + + .on_the_upload_img { + width: 116px; + height: 116px; + background: #7278F5; + border-radius: 50%; + margin-left: 98px; + cursor: pointer; + margin-bottom: 20px; + display: flex; + justify-content: center; + align-items: center; + + .on_the_upload_img_back { + width: 34.38px; + height: 30.82px; + background: #7278F5; + background: url("../../../../assets/image/ic_大-上传文件.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 34.38px 30.82px; + cursor: pointer; + + }; + }; + + + .on_the_speech_text { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + font-weight: 500; + margin-left: 124px; + margin-bottom: 10px; + display: flex; + // justify-content: center; + align-items: center; + .on_the_speech_loading { + display: inline-block; + width: 16px; + height: 16px; + background: #7278F5; + // background: url("../../../../assets/image/ic_开始聊天.svg"); + // background-repeat: no-repeat; + // background-position: center; + // background-size: 16px 16px; + margin-right: 8px; + }; + }; + }; + + //开始识别 + .public_recognition_speech_start { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + position: relative; + .public_recognition_speech_content { + width: 100%; + position: absolute; + top: 40px; + left: 50%; + transform: translateX(-50%); + display: flex; + justify-content: center; + align-items: center; + + .public_recognition_speech_title { + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #000000; + font-weight: 400; + }; + .public_recognition_speech_again { + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #2932E1; + font-weight: 400; + margin-left: 30px; + cursor: pointer; + }; + .public_recognition_speech_play { + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #2932E1; + font-weight: 400; + margin-left: 20px; + cursor: pointer; + }; + }; + .speech_promp { + position: absolute; + top: 112px; + left: 50%; + transform: translateX(-50%); + width: 142px; + height: 44px; + background: #2932E1; + border-radius: 22px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + text-align: center; + line-height: 44px; + font-weight: 500; + cursor: pointer; + }; + + + }; + // 识别中 + .public_recognition_speech_identify { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + position: relative; + .public_recognition_speech_identify_box { + width: 143px; + height: 44px; + background: #7278F5; + border-radius: 22px; + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%,-50%); + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + .public_recognition_speech_identify_back_img { + width: 16px; + height: 16px; + // background: #7278F5; + // background: url("../../../../assets/image/ic_开始聊天.svg"); + // background-repeat: no-repeat; + // background-position: center; + // background-size: 16px 16px; + }; + .public_recognition__identify_the_promp { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + margin-left: 12px; + }; + }; + + + + }; + // 重新识别 + .public_recognition_speech_identify_ahain { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + position: relative; + cursor: pointer; + .public_recognition_speech_identify_box_btn { + width: 143px; + height: 44px; + background: #2932E1; + border-radius: 22px; + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%,-50%); + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + .public_recognition__identify_the_btn { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + }; + }; + + + + }; + // 指向 + .public_recognition_point_to { + width: 47px; + height: 67px; + background: url("../../../../assets/image/步骤-箭头切图@2x.png") no-repeat; + background-position: center; + background-size: 47px 67px; + margin-top: 91px; + margin-right: 67px; + }; + // 识别结果 + .public_recognition_result { + width: 680px; + height: 230px; + background: #FAFAFA; + padding: 40px 50px 0px 50px; + div { + &:nth-of-type(1) { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #666666; + line-height: 26px; + font-weight: 500; + margin-bottom: 20px; + }; + &:nth-of-type(2) { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #666666; + line-height: 26px; + font-weight: 500; + }; + }; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue new file mode 100644 index 00000000..651e8c72 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue @@ -0,0 +1,92 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/style.less new file mode 100644 index 00000000..1fc04b2c --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/style.less @@ -0,0 +1,114 @@ +.endToEndIdentification { + width: 1106px; + height: 270px; + // background-color: pink; + padding-top: 40px; + box-sizing: border-box; + display: flex; + // 开始识别 + .public_recognition_speech { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + + .endToEndIdentification_start_recorder_img { + width: 116px; + height: 116px; + background: #2932E1; + background: url("../../../../assets/image/ic_开始聊天.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 116px 116px; + margin-left: 98px; + cursor: pointer; + margin-bottom: 20px; + &:hover { + background: url("../../../../assets/image/ic_开始聊天_hover.svg"); + + }; + + }; + + .endToEndIdentification_end_recorder_img { + width: 116px; + height: 116px; + background: #2932E1; + border-radius: 50%; + display: flex; + justify-content: center; + align-items: center; + margin-left: 98px; + margin-bottom: 20px; + cursor: pointer; + .endToEndIdentification_end_recorder_img_back { + width: 50px; + height: 50px; + background: url("../../../../assets/image/ic_大-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 50px 50px; + + &:hover { + opacity: 0.9; + + }; + }; + + }; + .endToEndIdentification_prompt { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + font-weight: 500; + margin-left: 124px; + margin-bottom: 10px; + }; + .speech_text_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #999999; + font-weight: 400; + margin-left: 90px; + }; + }; + // 指向 + .public_recognition_point_to { + width: 47px; + height: 67px; + background: url("../../../../assets/image/步骤-箭头切图@2x.png") no-repeat; + background-position: center; + background-size: 47px 67px; + margin-top: 91px; + margin-right: 67px; + }; + // 识别结果 + .public_recognition_result { + width: 680px; + height: 230px; + background: #FAFAFA; + padding: 40px 50px 0px 50px; + div { + &:nth-of-type(1) { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #666666; + line-height: 26px; + font-weight: 500; + margin-bottom: 20px; + }; + &:nth-of-type(2) { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #666666; + line-height: 26px; + font-weight: 500; + }; + }; + }; + +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue new file mode 100644 index 00000000..761a5c11 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue @@ -0,0 +1,128 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/style.less new file mode 100644 index 00000000..baa89c57 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/style.less @@ -0,0 +1,112 @@ +.realTime{ + width: 1106px; + height: 270px; + // background-color: pink; + padding-top: 40px; + box-sizing: border-box; + display: flex; + // 开始识别 + .public_recognition_speech { + width: 295px; + height: 230px; + padding-top: 32px; + box-sizing: border-box; + .endToEndIdentification_start_recorder_img { + width: 116px; + height: 116px; + background: #2932E1; + background: url("../../../../assets/image/ic_开始聊天.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 116px 116px; + margin-left: 98px; + cursor: pointer; + margin-bottom: 20px; + &:hover { + background: url("../../../../assets/image/ic_开始聊天_hover.svg"); + + }; + + }; + + .endToEndIdentification_end_recorder_img { + width: 116px; + height: 116px; + background: #2932E1; + border-radius: 50%; + display: flex; + justify-content: center; + align-items: center; + margin-left: 98px; + margin-bottom: 20px; + cursor: pointer; + .endToEndIdentification_end_recorder_img_back { + width: 50px; + height: 50px; + background: url("../../../../assets/image/ic_大-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 50px 50px; + + &:hover { + opacity: 0.9; + + }; + }; + + }; + .endToEndIdentification_prompt { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + font-weight: 500; + margin-left: 124px; + margin-bottom: 10px; + }; + .speech_text_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #999999; + font-weight: 400; + margin-left: 105px; + }; + }; + // 指向 + .public_recognition_point_to { + width: 47px; + height: 67px; + background: url("../../../../assets/image/步骤-箭头切图@2x.png") no-repeat; + background-position: center; + background-size: 47px 67px; + margin-top: 91px; + margin-right: 67px; + }; + // 识别结果 + .public_recognition_result { + width: 680px; + height: 230px; + background: #FAFAFA; + padding: 40px 50px 0px 50px; + div { + &:nth-of-type(1) { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #666666; + line-height: 26px; + font-weight: 500; + margin-bottom: 20px; + }; + &:nth-of-type(2) { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #666666; + line-height: 26px; + font-weight: 500; + }; + }; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/style.less new file mode 100644 index 00000000..92ce9340 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/style.less @@ -0,0 +1,76 @@ +.speech_recognition { + width: 1200px; + height: 410px; + background: #FFFFFF; + padding: 40px 50px 50px 44px; + position: relative; + .frame { + width: 605px; + height: 50px; + border: 1px solid rgba(238,238,238,1); + border-radius: 25px; + position: absolute; + } + .speech_recognition_mytabs { + .ant-tabs-tab { + position: relative; + display: inline-flex; + align-items: center; + // padding: 12px 0; + font-size: 14px; + background: transparent; + border: 0; + outline: none; + cursor: pointer; + padding: 12px 26px; + box-sizing: border-box; + } + .ant-tabs-tab-active { + height: 50px; + background: #EEEFFD; + border-radius: 25px; + padding: 12px 26px; + box-sizing: border-box; + }; + .speech_recognition .speech_recognition_mytabs .ant-tabs-ink-bar { + position: absolute; + background: transparent !important; + pointer-events: none; + } + .ant-tabs-ink-bar { + position: absolute; + background: transparent !important; + pointer-events: none; + } + .experience .experience_wrapper .experience_content .experience_tabs .ant-tabs-nav::before { + position: absolute; + right: 0; + left: 0; + border-bottom: 1px solid transparent !important; + // border: none; + content: ''; + } + .ant-tabs-top > .ant-tabs-nav::before, .ant-tabs-bottom > .ant-tabs-nav::before, .ant-tabs-top > div > .ant-tabs-nav::before, .ant-tabs-bottom > div > .ant-tabs-nav::before { + position: absolute; + right: 0; + left: 0; + border-bottom: 1px solid transparent !important; + // border: none; + content: ''; + } + .ant-tabs-top > .ant-tabs-nav::before, .ant-tabs-bottom > .ant-tabs-nav::before, .ant-tabs-top > div > .ant-tabs-nav::before, .ant-tabs-bottom > div > .ant-tabs-nav::before { + position: absolute; + right: 0; + left: 0; + border-bottom: 1px solid transparent !important; + content: ''; + } + .ant-tabs-nav::before { + position: absolute; + right: 0; + left: 0; + border-bottom: 1px solid transparent !important; + content: ''; + }; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/Chat.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/Chat.vue new file mode 100644 index 00000000..9d356fc8 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/Chat.vue @@ -0,0 +1,298 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/ChatT.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/ChatT.vue new file mode 100644 index 00000000..c37c083f --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/ChatT.vue @@ -0,0 +1,255 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/style.less new file mode 100644 index 00000000..d868fd47 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/style.less @@ -0,0 +1,181 @@ +.voice_chat { + width: 1200px; + height: 410px; + background: #FFFFFF; + position: relative; + // 开始聊天 + .voice_chat_wrapper { + top: 50%; + left: 50%; + transform: translate(-50%,-50%); + position: absolute; + .voice_chat_btn { + width: 116px; + height: 116px; + margin-left: 54px; + // background: #2932E1; + border-radius: 50%; + cursor: pointer; + background: url("../../../assets/image/ic_开始聊天.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 116px 116px; + margin-bottom: 17px; + &:hover { + width: 116px; + height: 116px; + background: url("../../../assets/image/ic_开始聊天_hover.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 116px 116px; + }; + + }; + .voice_chat_btn_title { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + letter-spacing: 0; + text-align: center; + line-height: 22px; + font-weight: 500; + margin-bottom: 10px; + }; + .voice_chat_btn_prompt { + height: 24px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #999999; + letter-spacing: 0; + text-align: center; + line-height: 24px; + font-weight: 400; + }; + }; + .voice_chat_wrapper::after { + content: ""; + display: block; + clear: both; + visibility: hidden; + }; + // 结束聊天 + .voice_chat_dialog_wrapper { + width: 1200px; + height: 410px; + background: #FFFFFF; + position: relative; + .dialog_box { + width: 100%; + height: 410px; + padding: 50px 198px 82px 199px; + box-sizing: border-box; + + .dialog_content { + width: 100%; + height: 268px; + // background: rgb(113, 144, 145); + padding: 0px; + overflow: auto; + li { + list-style-type: none; + margin-bottom: 33px; + display: flex; + align-items: center; + &:last-of-type(1) { + margin-bottom: 0px; + }; + .dialog_content_img_pp { + width: 60px; + height: 60px; + // transform: scaleX(-1); + background: url("../../../assets/image/飞桨头像@2x.png"); + background-repeat: no-repeat; + background-position: center; + background-size: 60px 60px; + margin-right: 20px; + }; + .dialog_content_img_user { + width: 60px; + height: 60px; + transform: scaleX(-1); + background: url("../../../assets/image/用户头像@2x.png"); + background-repeat: no-repeat; + background-position: center; + background-size: 60px 60px; + margin-left: 20px; + }; + .dialog_content_dialogue_pp { + height: 50px; + background: #F5F5F5; + border-radius: 25px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #000000; + line-height: 50px; + font-weight: 400; + padding: 0px 16px; + box-sizing: border-box; + }; + .dialog_content_dialogue_user { + height: 50px; + background: rgba(41,50,225,0.90); + border-radius: 25px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #FFFFFF; + line-height: 50px; + font-weight: 400; + padding: 0px 16px; + box-sizing: border-box; + }; + }; + }; + .move_dialogue { + justify-content: flex-end; + }; + + }; + + .btn_end_dialog { + width: 124px; + height: 42px; + line-height: 42px; + background: #FFFFFF; + box-shadow: 0px 4px 16px 0px rgba(0,0,0,0.09); + border-radius: 21px; + padding: 0px 24px; + box-sizing: border-box; + position: absolute; + left: 50%; + bottom: 40px; + transform: translateX(-50%); + display: flex; + justify-content: space-between; + align-items: center; + cursor: pointer; + span { + display: inline-block; + &:nth-of-type(1) { + width: 16px; + height: 16px; + background: url("../../../assets/image/ic_小-结束.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 16px 16px; + + }; + &:nth-of-type(2) { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #F33E3E; + text-align: center; + font-weight: 400; + line-height: 20px; + margin-left: 4px; + }; + }; + }; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IE.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IE.vue new file mode 100644 index 00000000..c7dd04e9 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IE.vue @@ -0,0 +1,125 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IET.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IET.vue new file mode 100644 index 00000000..50eadec7 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IET.vue @@ -0,0 +1,166 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/IE/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/IE/style.less new file mode 100644 index 00000000..988666a2 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/IE/style.less @@ -0,0 +1,179 @@ +.voice_commands { + width: 1200px; + height: 410px; + background: #FFFFFF; + padding: 40px 50px 50px 50px; + box-sizing: border-box; + display: flex; + // 交通报销 + .voice_commands_traffic { + width: 468px; + height: 320px; + .voice_commands_traffic_title { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + letter-spacing: 0; + line-height: 26px; + font-weight: 500; + margin-bottom: 30px; + // background: pink; + }; + .voice_commands_traffic_wrapper { + width: 465px; + height: 264px; + // background: #FAFAFA; + position: relative; + .voice_commands_traffic_wrapper_move { + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%,-50%); + }; + .traffic_btn_img_btn { + width: 116px; + height: 116px; + background: #2932E1; + display: flex; + justify-content: center; + align-items: center; + border-radius: 50%; + cursor: pointer; + margin-bottom: 20px; + margin-left: 84px; + &:hover { + width: 116px; + height: 116px; + background: #7278F5; + + .start_recorder_img{ + width: 50px; + height: 50px; + background: url("../../../assets/image/ic_开始聊天_hover.svg") no-repeat; + background-position: center; + background-size: 50px 50px; + }; + + }; + + .start_recorder_img{ + width: 50px; + height: 50px; + background: url("../../../assets/image/ic_开始聊天.svg") no-repeat; + background-position: center; + background-size: 50px 50px; + }; + + }; + .traffic_btn_prompt { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + font-weight: 500; + margin-bottom: 16px; + margin-left: 110px; + }; + .traffic_btn_list { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #999999; + font-weight: 400; + width: 112%; + }; + }; + }; + //指向 + .voice_point_to { + width: 47px; + height: 63px; + background: url("../../../assets/image/步骤-箭头切图@2x.png") no-repeat; + background-position: center; + background-size: 47px 63px; + margin-top: 164px; + margin-right: 82px; + }; + //识别结果 + .voice_commands_IdentifyTheResults { + .voice_commands_IdentifyTheResults_title { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + line-height: 26px; + font-weight: 500; + margin-bottom: 30px; + }; + // 显示框 + .voice_commands_IdentifyTheResults_show { + width: 503px; + height: 264px; + background: #FAFAFA; + padding: 40px 0px 0px 50px; + box-sizing: border-box; + .voice_commands_IdentifyTheResults_show_title { + height: 22px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + // text-align: center; + font-weight: 500; + margin-bottom: 30px; + }; + .oice_commands_IdentifyTheResults_show_time { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #666666; + font-weight: 500; + margin-bottom: 12px; + }; + .oice_commands_IdentifyTheResults_show_money { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #666666; + font-weight: 500; + margin-bottom: 12px; + }; + .oice_commands_IdentifyTheResults_show_origin { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #666666; + font-weight: 500; + margin-bottom: 12px; + }; + .oice_commands_IdentifyTheResults_show_destination { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #666666; + font-weight: 500; + }; + }; + //加载状态 + .voice_commands_IdentifyTheResults_show_loading { + width: 503px; + height: 264px; + background: #FAFAFA; + padding: 40px 0px 0px 50px; + box-sizing: border-box; + display: flex; + justify-content: center; + align-items: center; + }; + }; + .end_recorder_img { + width: 50px; + height: 50px; + background: url("../../../assets/image/ic_大-声音波浪.svg") no-repeat; + background-position: center; + background-size: 50px 50px; + }; + .end_recorder_img:hover { + opacity: 0.9; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTS.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTS.vue new file mode 100644 index 00000000..13884ef7 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTS.vue @@ -0,0 +1,726 @@ + + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTST.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTST.vue new file mode 100644 index 00000000..353221f7 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTST.vue @@ -0,0 +1,359 @@ + + + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/style.less new file mode 100644 index 00000000..b5d18965 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/style.less @@ -0,0 +1,369 @@ +.speech_recognition { + width: 1200px; + height: 410px; + background: #FFFFFF; + padding: 40px 0px 50px 50px; + box-sizing: border-box; + display: flex; + .recognition_text { + width: 589px; + height: 320px; + // background: pink; + .recognition_text_header { + margin-bottom: 30px; + display: flex; + justify-content: space-between; + align-items: center; + .recognition_text_title { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + letter-spacing: 0; + line-height: 26px; + font-weight: 500; + }; + .recognition_text_random { + display: flex; + align-items: center; + cursor: pointer; + span { + display: inline-block; + &:nth-of-type(1) { + width: 20px; + height: 20px; + background: url("../../../assets/image/ic_更换示例.svg") no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 5px; + + }; + &:nth-of-type(2) { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #2932E1; + letter-spacing: 0; + font-weight: 400; + }; + }; + }; + }; + .recognition_text_field { + width: 589px; + height: 264px; + background: #FAFAFA; + .textToSpeech_content_show_text{ + width: 100%; + height: 264px; + padding: 0px 30px 30px 0px; + box-sizing: border-box; + .ant-input { + height: 208px; + resize: none; + // margin-bottom: 230px; + padding: 21px 20px; + }; + }; + }; + }; + // 指向 + .recognition_point_to { + width: 47px; + height: 63px; + background: url("../../../assets/image/步骤-箭头切图@2x.png") no-repeat; + background-position: center; + background-size: 47px 63px; + margin-top: 164px; + margin-right: 101px; + margin-left: 100px; + margin-top: 164px; + }; + // 语音合成 + .speech_recognition_new { + .speech_recognition_title { + height: 26px; + font-family: PingFangSC-Medium; + font-size: 16px; + color: #000000; + line-height: 26px; + font-weight: 500; + margin-left: 32px; + margin-bottom: 96px; + }; + // 流式合成 + .speech_recognition_streaming { + width: 136px; + height: 44px; + background: #2932E1; + border-radius: 22px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + text-align: center; + line-height: 44px; + margin-bottom: 40px; + cursor: pointer; + &:hover { + opacity: .9; + }; + }; + // 合成中 + .streaming_ing_box { + display: flex; + align-items: center; + height: 44px; + margin-bottom: 40px; + .streaming_ing { + width: 136px; + height: 44px; + background: #7278F5; + border-radius: 22px; + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + + .streaming_ing_img { + width: 16px; + height: 16px; + // background: url("../../../assets/image/ic_小-录制语音.svg"); + // background-repeat: no-repeat; + // background-position: center; + // background-size: 16px 16px; + // margin-right: 12px; + }; + .streaming_ing_text { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + margin-left: 12px; + }; + }; + // 合成时间文字 + .streaming_time { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #000000; + font-weight: 500; + margin-left: 12px; + }; + }; + + + // 暂停播放 + .streaming_suspended_box { + display: flex; + align-items: center; + height: 44px; + margin-bottom: 40px; + .streaming_suspended { + width: 136px; + height: 44px; + background: #2932E1; + border-radius: 22px; + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + + .streaming_suspended_img { + width: 16px; + height: 16px; + background: url("../../../assets/image/ic_暂停(按钮).svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 16px 16px; + margin-right: 12px; + }; + .streaming_suspended_text { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + margin-left: 12px; + }; + + }; + // 暂停获取时间 + .suspended_time { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #000000; + font-weight: 500; + margin-left: 12px; + } + }; + + // 继续播放 + .streaming_continue { + width: 136px; + height: 44px; + background: #2932E1; + border-radius: 22px; + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + margin-bottom: 40px; + .streaming_continue_img { + width: 16px; + height: 16px; + background: url("../../../assets/image/ic_播放(按钮).svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 16px 16px; + margin-right: 12px; + }; + .streaming_continue_text { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + }; + }; + + + + + + + // 端到端合成 + .speech_recognition_end_to_end { + width: 136px; + height: 44px; + background: #2932E1; + border-radius: 22px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + text-align: center; + line-height: 44px; + cursor: pointer; + &:hover { + opacity: .9; + }; + }; + // 合成中 + .end_to_end_ing_box { + display: flex; + align-items: center; + height: 44px; + .end_to_end_ing { + width: 136px; + height: 44px; + background: #7278F5; + border-radius: 22px; + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + .end_to_end_ing_img { + width: 16px; + height: 16px; + // background: url("../../../assets/image/ic_小-录制语音.svg"); + // background-repeat: no-repeat; + // background-position: center; + // background-size: 16px 16px; + + }; + .end_to_end_ing_text { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + margin-left: 12px; + }; + }; + // 合成时间文本 + .end_to_end_ing_time { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #000000; + font-weight: 500; + margin-left: 12px; + }; + }; + + + // 暂停播放 + .end_to_end_suspended_box { + display: flex; + align-items: center; + height: 44px; + .end_to_end_suspended { + width: 136px; + height: 44px; + background: #2932E1; + border-radius: 22px; + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + .end_to_end_suspended_img { + width: 16px; + height: 16px; + background: url("../../../assets/image/ic_暂停(按钮).svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 16px 16px; + margin-right: 12px; + }; + .end_to_end_suspended_text { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + }; + }; + // 暂停播放时间 + .end_to_end_ing_suspended_time { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #000000; + font-weight: 500; + margin-left: 12px; + }; + }; + + // 继续播放 + .end_to_end_continue { + width: 136px; + height: 44px; + background: #2932E1; + border-radius: 22px; + display: flex; + justify-content: center; + align-items: center; + cursor: pointer; + .end_to_end_continue_img { + width: 16px; + height: 16px; + background: url("../../../assets/image/ic_播放(按钮).svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 16px 16px; + margin-right: 12px; + }; + .end_to_end_continue_text { + height: 20px; + font-family: PingFangSC-Medium; + font-size: 14px; + color: #FFFFFF; + font-weight: 500; + }; + }; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPR.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPR.vue new file mode 100644 index 00000000..1fe71e4d --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPR.vue @@ -0,0 +1,178 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPRT.vue b/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPRT.vue new file mode 100644 index 00000000..e398da00 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPRT.vue @@ -0,0 +1,335 @@ + + + + + \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/style.less b/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/style.less new file mode 100644 index 00000000..cb3df49e --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/style.less @@ -0,0 +1,419 @@ +.voiceprint { + width: 1200px; + height: 410px; + background: #FFFFFF; + padding: 41px 80px 56px 80px; + box-sizing: border-box; + display: flex; + // 录制声纹 + .voiceprint_recording { + width: 423px; + height: 354px; + margin-right: 66px; + .recording_title { + display: flex; + align-items: center; + margin-bottom: 20px; + div { + &:nth-of-type(1) { + width: 24px; + height: 24px; + background: rgba(41,50,225,0.70); + font-family: PingFangSC-Regular; + font-size: 16px; + color: #FFFFFF; + letter-spacing: 0; + text-align: center; + line-height: 24px; + font-weight: 400; + margin-right: 16px; + border-radius: 50%; + }; + &:nth-of-type(2) { + height: 26px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #000000; + line-height: 26px; + font-weight: 400; + }; + }; + }; + // 开始录音 + .recording_btn { + width: 143px; + height: 44px; + cursor: pointer; + background: #2932E1; + padding: 0px 24px 0px 21px; + box-sizing: border-box; + border-radius: 22px; + display: flex; + align-items: center; + margin-bottom: 20px; + margin-top: 10px; + + &:hover { + background: #7278F5; + .recording_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_录制声音小语音1.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + } + .recording_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_录制声音小语音1.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + .recording_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #FFFFFF; + font-weight: 400; + }; + + }; + // 录音中 + .recording_btn_the_recording { + width: 143px; + height: 44px; + cursor: pointer; + background: #7278F5; + padding: 0px 24px 0px 21px; + box-sizing: border-box; + border-radius: 22px; + display: flex; + align-items: center; + justify-content: center; + margin-bottom: 40px; + .recording_img_the_recording { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_小-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + }; + .recording_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #FFFFFF; + font-weight: 400; + }; + }; + // 完成录音 + .complete_the_recording_btn { + width: 143px; + height: 44px; + cursor: pointer; + background: #2932E1; + padding: 0px 24px 0px 21px; + box-sizing: border-box; + border-radius: 22px; + display: flex; + align-items: center; + margin-bottom: 40px; + &:hover { + background: #7278F5; + .complete_the_recording_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_小-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + } + .complete_the_recording_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_小-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + .complete_the_recording_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #FFFFFF; + font-weight: 400; + }; + + }; + // table + .recording_table { + width: 322px; + .recording_table_box { + .ant-table-thead > tr > th { + color: rgba(0, 0, 0, 0.85); + font-weight: 500; + text-align: left; + background: rgba(40,50,225,0.08); + border-bottom: none; + transition: background 0.3s ease; + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #333333; + // text-align: center; + font-weight: 400; + &:nth-of-type(2) { + border-left: 2px solid white; + }; + }; + .ant-table-tbody > tr > td { + border-bottom: 1px solid #f0f0f0; + transition: background 0.3s; + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #333333; + // text-align: center; + font-weight: 400; + }; + }; + }; + // input + .recording_input { + width: 322px; + margin-bottom: 20px; + }; + }; + // 指向 + .recording_point_to { + width: 63px; + height: 47px; + background: url("../../../assets/image//步骤-箭头切图@2x.png"); + background-repeat: no-repeat; + background-position: center; + background-size: 63px 47px; + margin-right: 66px; + margin-top: 198px; + }; + //识别声纹 + .voiceprint_identify { + width: 423px; + height: 354px; + .identify_title { + display: flex; + align-items: center; + margin-bottom: 20px; + div { + &:nth-of-type(1) { + width: 24px; + height: 24px; + background: rgba(41,50,225,0.70); + font-family: PingFangSC-Regular; + font-size: 16px; + color: #FFFFFF; + letter-spacing: 0; + text-align: center; + line-height: 24px; + font-weight: 400; + margin-right: 16px; + border-radius: 50%; + }; + &:nth-of-type(2) { + height: 26px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #000000; + line-height: 26px; + font-weight: 400; + }; + }; + }; + // 开始识别 + .identify_btn { + width: 143px; + height: 44px; + cursor: pointer; + background: #2932E1; + padding: 0px 24px 0px 21px; + box-sizing: border-box; + border-radius: 22px; + display: flex; + align-items: center; + margin-bottom: 40px; + margin-top: 10px; + &:hover { + background: #7278F5; + .identify_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_录制声音小语音1.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + } + .identify_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_录制声音小语音1.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + .identify_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #FFFFFF; + font-weight: 400; + }; + + }; + // 识别中 + .identify_btn_the_recording { + width: 143px; + height: 44px; + cursor: pointer; + background: #7278F5; + padding: 0px 24px 0px 21px; + box-sizing: border-box; + border-radius: 22px; + display: flex; + align-items: center; + justify-content: center; + margin-bottom: 40px; + .identify_img_the_recording { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_录制声音小语音1.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + }; + .recording_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #FFFFFF; + font-weight: 400; + }; + }; + // 完成识别 + .identify_complete_the_recording_btn { + width: 143px; + height: 44px; + cursor: pointer; + background: #2932E1; + padding: 0px 24px 0px 21px; + box-sizing: border-box; + border-radius: 22px; + display: flex; + align-items: center; + margin-bottom: 40px; + &:hover { + background: #7278F5; + .identify_complete_the_recording_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_小-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + } + .identify_complete_the_recording_img { + width: 20px; + height: 20px; + background: url("../../../assets/image//icon_小-声音波浪.svg"); + background-repeat: no-repeat; + background-position: center; + background-size: 20px 20px; + margin-right: 8.26px; + + }; + .identify_complete_the_recording_prompt { + height: 20px; + font-family: PingFangSC-Regular; + font-size: 12px; + color: #FFFFFF; + font-weight: 400; + }; + + }; + + + + + // 结果 + .identify_result { + width: 422px; + height: 184px; + text-align: center; + line-height: 184px; + background: #FAFAFA; + position: relative; + .identify_result_default { + + font-family: PingFangSC-Regular; + font-size: 16px; + color: #999999; + font-weight: 400; + }; + .identify_result_content { + // text-align: center; + // position: absolute; + // top: 50%; + // left: 50%; + // transform: translate(-50%,-50%); + div { + &:nth-of-type(1) { + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #666666; + font-weight: 400; + margin-bottom: 10px; + }; + &:nth-of-type(2) { + height: 33px; + font-family: PingFangSC-Medium; + font-size: 24px; + color: #000000; + font-weight: 500; + }; + }; + }; + }; + }; + .action_btn { + display: inline-block; + height: 22px; + font-family: PingFangSC-Regular; + font-size: 16px; + color: #2932E1; + text-align: center; + font-weight: 400; + cursor: pointer; + }; +}; \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/components/style.less b/demos/speech_web_demo/web_client/src/components/style.less new file mode 100644 index 00000000..98f414f1 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/components/style.less @@ -0,0 +1,83 @@ +.experience { + width: 100%; + height: 709px; + // background: url("../assets/image/在线体验-背景@2x.png") no-repeat; + background-size: 100% 709px; + background-position: initial; + // + .experience_wrapper { + width: 1200px; + height: 709px; + margin: 0 auto; + padding: 0px 0px 0px 0px; + box-sizing: border-box; + // background: red; + .experience_title { + height: 42px; + font-family: PingFangSC-Semibold; + font-size: 30px; + color: #000000; + font-weight: 600; + line-height: 42px; + text-align: center; + margin-bottom: 10px; + }; + .experience_describe { + height: 22px; + font-family: PingFangSC-Regular; + font-size: 14px; + color: #666666; + letter-spacing: 0; + text-align: center; + line-height: 22px; + font-weight: 400; + margin-bottom: 30px; + }; + .experience_content { + width: 1200px; + margin: 0 auto; + display: flex; + justify-content: center; + .experience_tabs { + + margin-top: 15px; + + & > .ant-tabs-nav { + margin-bottom: 20px; + + &::before { + content: none; + } + + .ant-tabs-nav-wrap { + justify-content: center; + } + + .ant-tabs-tab { + font-size: 20px; + } + + .ant-tabs-nav-list { + margin-right: -32px; + flex: none; + } + }; + + .ant-tabs-nav::before { + position: absolute; + right: 0; + left: 0; + border-bottom: 1px solid #f6f7fe; + content: ''; + }; + + }; + }; + }; +}; +.experience::after { + content: ""; + display: block; + clear: both; + visibility: hidden; +} \ No newline at end of file diff --git a/demos/speech_web_demo/web_client/src/main.js b/demos/speech_web_demo/web_client/src/main.js new file mode 100644 index 00000000..3fbf87c8 --- /dev/null +++ b/demos/speech_web_demo/web_client/src/main.js @@ -0,0 +1,13 @@ +import { createApp } from 'vue' +import ElementPlus from 'element-plus' +import 'element-plus/dist/index.css' +import Antd from 'ant-design-vue'; +import 'ant-design-vue/dist/antd.css'; +import App from './App.vue' +import axios from 'axios' + +const app = createApp(App) +app.config.globalProperties.$http = axios + +app.use(ElementPlus).use(Antd) +app.mount('#app') diff --git a/demos/speech_web_demo/web_client/vite.config.js b/demos/speech_web_demo/web_client/vite.config.js new file mode 100644 index 00000000..dc7e6978 --- /dev/null +++ b/demos/speech_web_demo/web_client/vite.config.js @@ -0,0 +1,28 @@ +import { defineConfig } from 'vite' +import vue from '@vitejs/plugin-vue' + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [vue()], + css: + { preprocessorOptions: + { css: + { + charset: false + } + } + }, + build: { + assetsInlineLimit: '2048' // 2kb + }, + server: { + host: "0.0.0.0", + proxy: { + "/api": { + target: "http://localhost:8010", + changeOrigin: true, + rewrite: (path) => path.replace(/^\/api/, ""), + }, + }, + }, +}) diff --git a/demos/speech_web_demo/web_client/yarn.lock b/demos/speech_web_demo/web_client/yarn.lock new file mode 100644 index 00000000..4504eab3 --- /dev/null +++ b/demos/speech_web_demo/web_client/yarn.lock @@ -0,0 +1,785 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ant-design/colors@^6.0.0": + version "6.0.0" + resolved "https://registry.npmmirror.com/@ant-design/colors/-/colors-6.0.0.tgz" + integrity sha512-qAZRvPzfdWHtfameEGP2Qvuf838NhergR35o+EuVyB5XvSA98xod5r4utvi4TJ3ywmevm290g9nsCG5MryrdWQ== + dependencies: + "@ctrl/tinycolor" "^3.4.0" + +"@ant-design/icons-svg@^4.2.1": + version "4.2.1" + resolved "https://registry.npmmirror.com/@ant-design/icons-svg/-/icons-svg-4.2.1.tgz" + integrity sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw== + +"@ant-design/icons-vue@^6.0.0": + version "6.1.0" + resolved "https://registry.npmmirror.com/@ant-design/icons-vue/-/icons-vue-6.1.0.tgz" + integrity sha512-EX6bYm56V+ZrKN7+3MT/ubDkvJ5rK/O2t380WFRflDcVFgsvl3NLH7Wxeau6R8DbrO5jWR6DSTC3B6gYFp77AA== + dependencies: + "@ant-design/colors" "^6.0.0" + "@ant-design/icons-svg" "^4.2.1" + +"@babel/parser@^7.16.4": + version "7.17.9" + resolved "https://registry.npmmirror.com/@babel/parser/-/parser-7.17.9.tgz" + integrity sha512-vqUSBLP8dQHFPdPi9bc5GK9vRkYHJ49fsZdtoJ8EQ8ibpwk5rPKfvNIwChB0KVXcIjcepEBBd2VHC5r9Gy8ueg== + +"@babel/runtime@^7.10.5": + version "7.17.9" + resolved "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.17.9.tgz" + integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg== + dependencies: + regenerator-runtime "^0.13.4" + +"@ctrl/tinycolor@^3.4.0": + version "3.4.1" + resolved "https://registry.npmmirror.com/@ctrl/tinycolor/-/tinycolor-3.4.1.tgz" + integrity sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw== + +"@element-plus/icons-vue@^1.1.4": + version "1.1.4" + resolved "https://registry.npmmirror.com/@element-plus/icons-vue/-/icons-vue-1.1.4.tgz" + integrity sha512-Iz/nHqdp1sFPmdzRwHkEQQA3lKvoObk8azgABZ81QUOpW9s/lUyQVUSh0tNtEPZXQlKwlSh7SPgoVxzrE0uuVQ== + +"@floating-ui/core@^0.6.1": + version "0.6.1" + resolved "https://registry.npmmirror.com/@floating-ui/core/-/core-0.6.1.tgz" + integrity sha512-Y30eVMcZva8o84c0HcXAtDO4BEzPJMvF6+B7x7urL2xbAqVsGJhojOyHLaoQHQYjb6OkqRq5kO+zeySycQwKqg== + +"@floating-ui/dom@^0.4.2": + version "0.4.4" + resolved "https://registry.npmmirror.com/@floating-ui/dom/-/dom-0.4.4.tgz" + integrity sha512-0Ulu3B/dqQplUUSqnTx0foSrlYuMN+GTtlJWvNJwt6Fr7/PqmlR/Y08o6/+bxDWr6p3roBJRaQ51MDZsNmEhhw== + dependencies: + "@floating-ui/core" "^0.6.1" + +"@popperjs/core@^2.11.4": + version "2.11.5" + resolved "https://registry.npmmirror.com/@popperjs/core/-/core-2.11.5.tgz" + integrity sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw== + +"@simonwep/pickr@~1.8.0": + version "1.8.2" + resolved "https://registry.npmmirror.com/@simonwep/pickr/-/pickr-1.8.2.tgz" + integrity sha512-/l5w8BIkrpP6n1xsetx9MWPWlU6OblN5YgZZphxan0Tq4BByTCETL6lyIeY8lagalS2Nbt4F2W034KHLIiunKA== + dependencies: + core-js "^3.15.1" + nanopop "^2.1.0" + +"@types/lodash-es@^4.17.6": + version "4.17.6" + resolved "https://registry.npmmirror.com/@types/lodash-es/-/lodash-es-4.17.6.tgz" + integrity sha512-R+zTeVUKDdfoRxpAryaQNRKk3105Rrgx2CFRClIgRGaqDTdjsm8h6IYA8ir584W3ePzkZfst5xIgDwYrlh9HLg== + dependencies: + "@types/lodash" "*" + +"@types/lodash@*", "@types/lodash@^4.14.181": + version "4.14.181" + resolved "https://registry.npmmirror.com/@types/lodash/-/lodash-4.14.181.tgz" + integrity sha512-n3tyKthHJbkiWhDZs3DkhkCzt2MexYHXlX0td5iMplyfwketaOeKboEVBqzceH7juqvEg3q5oUoBFxSLu7zFag== + +"@vitejs/plugin-vue@^2.3.0": + version "2.3.1" + resolved "https://registry.npmmirror.com/@vitejs/plugin-vue/-/plugin-vue-2.3.1.tgz" + integrity sha512-YNzBt8+jt6bSwpt7LP890U1UcTOIZZxfpE5WOJ638PNxSEKOqAi0+FSKS0nVeukfdZ0Ai/H7AFd6k3hayfGZqQ== + +"@vue/compiler-core@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/compiler-core/-/compiler-core-3.2.32.tgz" + integrity sha512-bRQ8Rkpm/aYFElDWtKkTPHeLnX5pEkNxhPUcqu5crEJIilZH0yeFu/qUAcV4VfSE2AudNPkQSOwMZofhnuutmA== + dependencies: + "@babel/parser" "^7.16.4" + "@vue/shared" "3.2.32" + estree-walker "^2.0.2" + source-map "^0.6.1" + +"@vue/compiler-dom@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/compiler-dom/-/compiler-dom-3.2.32.tgz" + integrity sha512-maa3PNB/NxR17h2hDQfcmS02o1f9r9QIpN1y6fe8tWPrS1E4+q8MqrvDDQNhYVPd84rc3ybtyumrgm9D5Rf/kg== + dependencies: + "@vue/compiler-core" "3.2.32" + "@vue/shared" "3.2.32" + +"@vue/compiler-sfc@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/compiler-sfc/-/compiler-sfc-3.2.32.tgz" + integrity sha512-uO6+Gh3AVdWm72lRRCjMr8nMOEqc6ezT9lWs5dPzh1E9TNaJkMYPaRtdY9flUv/fyVQotkfjY/ponjfR+trPSg== + dependencies: + "@babel/parser" "^7.16.4" + "@vue/compiler-core" "3.2.32" + "@vue/compiler-dom" "3.2.32" + "@vue/compiler-ssr" "3.2.32" + "@vue/reactivity-transform" "3.2.32" + "@vue/shared" "3.2.32" + estree-walker "^2.0.2" + magic-string "^0.25.7" + postcss "^8.1.10" + source-map "^0.6.1" + +"@vue/compiler-ssr@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/compiler-ssr/-/compiler-ssr-3.2.32.tgz" + integrity sha512-ZklVUF/SgTx6yrDUkaTaBL/JMVOtSocP+z5Xz/qIqqLdW/hWL90P+ob/jOQ0Xc/om57892Q7sRSrex0wujOL2Q== + dependencies: + "@vue/compiler-dom" "3.2.32" + "@vue/shared" "3.2.32" + +"@vue/reactivity-transform@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/reactivity-transform/-/reactivity-transform-3.2.32.tgz" + integrity sha512-CW1W9zaJtE275tZSWIfQKiPG0iHpdtSlmTqYBu7Y62qvtMgKG5yOxtvBs4RlrZHlaqFSE26avLAgQiTp4YHozw== + dependencies: + "@babel/parser" "^7.16.4" + "@vue/compiler-core" "3.2.32" + "@vue/shared" "3.2.32" + estree-walker "^2.0.2" + magic-string "^0.25.7" + +"@vue/reactivity@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/reactivity/-/reactivity-3.2.32.tgz" + integrity sha512-4zaDumuyDqkuhbb63hRd+YHFGopW7srFIWesLUQ2su/rJfWrSq3YUvoKAJE8Eu1EhZ2Q4c1NuwnEreKj1FkDxA== + dependencies: + "@vue/shared" "3.2.32" + +"@vue/runtime-core@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/runtime-core/-/runtime-core-3.2.32.tgz" + integrity sha512-uKKzK6LaCnbCJ7rcHvsK0azHLGpqs+Vi9B28CV1mfWVq1F3Bj8Okk3cX+5DtD06aUh4V2bYhS2UjjWiUUKUF0w== + dependencies: + "@vue/reactivity" "3.2.32" + "@vue/shared" "3.2.32" + +"@vue/runtime-dom@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/runtime-dom/-/runtime-dom-3.2.32.tgz" + integrity sha512-AmlIg+GPqjkNoADLjHojEX5RGcAg+TsgXOOcUrtDHwKvA8mO26EnLQLB8nylDjU6AMJh2CIYn8NEgyOV5ZIScQ== + dependencies: + "@vue/runtime-core" "3.2.32" + "@vue/shared" "3.2.32" + csstype "^2.6.8" + +"@vue/server-renderer@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/server-renderer/-/server-renderer-3.2.32.tgz" + integrity sha512-TYKpZZfRJpGTTiy/s6bVYwQJpAUx3G03z4G7/3O18M11oacrMTVHaHjiPuPqf3xQtY8R4LKmQ3EOT/DRCA/7Wg== + dependencies: + "@vue/compiler-ssr" "3.2.32" + "@vue/shared" "3.2.32" + +"@vue/shared@3.2.32": + version "3.2.32" + resolved "https://registry.npmmirror.com/@vue/shared/-/shared-3.2.32.tgz" + integrity sha512-bjcixPErUsAnTQRQX4Z5IQnICYjIfNCyCl8p29v1M6kfVzvwOICPw+dz48nNuWlTOOx2RHhzHdazJibE8GSnsw== + +"@vueuse/core@^8.2.4": + version "8.2.5" + resolved "https://registry.npmmirror.com/@vueuse/core/-/core-8.2.5.tgz" + integrity sha512-5prZAA1Ji2ltwNUnzreu6WIXYqHYP/9U2BiY5mD/650VYLpVcwVlYznJDFcLCmEWI3o3Vd34oS1FUf+6Mh68GQ== + dependencies: + "@vueuse/metadata" "8.2.5" + "@vueuse/shared" "8.2.5" + vue-demi "*" + +"@vueuse/metadata@8.2.5": + version "8.2.5" + resolved "https://registry.npmmirror.com/@vueuse/metadata/-/metadata-8.2.5.tgz" + integrity sha512-Lk9plJjh9cIdiRdcj16dau+2LANxIdFCiTgdfzwYXbflxq0QnMBeOD2qHgKDE7fuVrtPcVWj8VSuZEx1HRfNQA== + +"@vueuse/shared@8.2.5": + version "8.2.5" + resolved "https://registry.npmmirror.com/@vueuse/shared/-/shared-8.2.5.tgz" + integrity sha512-lNWo+7sk6JCuOj4AiYM+6HZ6fq4xAuVq1sVckMQKgfCJZpZRe4i8es+ZULO5bYTKP+VrOCtqrLR2GzEfrbr3YQ== + dependencies: + vue-demi "*" + +ant-design-vue@^2.2.8: + version "2.2.8" + resolved "https://registry.npmmirror.com/ant-design-vue/-/ant-design-vue-2.2.8.tgz" + integrity sha512-3graq9/gCfJQs6hznrHV6sa9oDmk/D1H3Oo0vLdVpPS/I61fZPk8NEyNKCHpNA6fT2cx6xx9U3QS63uuyikg/Q== + dependencies: + "@ant-design/icons-vue" "^6.0.0" + "@babel/runtime" "^7.10.5" + "@simonwep/pickr" "~1.8.0" + array-tree-filter "^2.1.0" + async-validator "^3.3.0" + dom-align "^1.12.1" + dom-scroll-into-view "^2.0.0" + lodash "^4.17.21" + lodash-es "^4.17.15" + moment "^2.27.0" + omit.js "^2.0.0" + resize-observer-polyfill "^1.5.1" + scroll-into-view-if-needed "^2.2.25" + shallow-equal "^1.0.0" + vue-types "^3.0.0" + warning "^4.0.0" + +array-tree-filter@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/array-tree-filter/-/array-tree-filter-2.1.0.tgz" + integrity sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw== + +async-validator@^3.3.0: + version "3.5.2" + resolved "https://registry.npmmirror.com/async-validator/-/async-validator-3.5.2.tgz" + integrity sha512-8eLCg00W9pIRZSB781UUX/H6Oskmm8xloZfr09lz5bikRpBVDlJ3hRVuxxP1SxcwsEYfJ4IU8Q19Y8/893r3rQ== + +async-validator@^4.0.7: + version "4.0.7" + resolved "https://registry.npmmirror.com/async-validator/-/async-validator-4.0.7.tgz" + integrity sha512-Pj2IR7u8hmUEDOwB++su6baaRi+QvsgajuFB9j95foM1N2gy5HM4z60hfusIO0fBPG5uLAEl6yCJr1jNSVugEQ== + +axios@^0.26.1: + version "0.26.1" + resolved "https://registry.npmmirror.com/axios/-/axios-0.26.1.tgz" + integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA== + dependencies: + follow-redirects "^1.14.8" + +compute-scroll-into-view@^1.0.17: + version "1.0.17" + resolved "https://registry.npmmirror.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz" + integrity sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg== + +copy-anything@^2.0.1: + version "2.0.6" + resolved "https://registry.npmmirror.com/copy-anything/-/copy-anything-2.0.6.tgz" + integrity sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw== + dependencies: + is-what "^3.14.1" + +core-js@^3.15.1: + version "3.22.5" + resolved "https://registry.npmmirror.com/core-js/-/core-js-3.22.5.tgz" + integrity sha512-VP/xYuvJ0MJWRAobcmQ8F2H6Bsn+s7zqAAjFaHGBMc5AQm7zaelhD1LGduFn2EehEcQcU+br6t+fwbpQ5d1ZWA== + +csstype@^2.6.8: + version "2.6.20" + resolved "https://registry.npmmirror.com/csstype/-/csstype-2.6.20.tgz" + integrity sha512-/WwNkdXfckNgw6S5R125rrW8ez139lBHWouiBvX8dfMFtcn6V81REDqnH7+CRpRipfYlyU1CmOnOxrmGcFOjeA== + +dayjs@^1.11.0: + version "1.11.0" + resolved "https://registry.npmmirror.com/dayjs/-/dayjs-1.11.0.tgz" + integrity sha512-JLC809s6Y948/FuCZPm5IX8rRhQwOiyMb2TfVVQEixG7P8Lm/gt5S7yoQZmC8x1UehI9Pb7sksEt4xx14m+7Ug== + +debug@^3.2.6: + version "3.2.7" + resolved "https://registry.npmmirror.com/debug/-/debug-3.2.7.tgz" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +dom-align@^1.12.1: + version "1.12.3" + resolved "https://registry.npmmirror.com/dom-align/-/dom-align-1.12.3.tgz" + integrity sha512-Gj9hZN3a07cbR6zviMUBOMPdWxYhbMI+x+WS0NAIu2zFZmbK8ys9R79g+iG9qLnlCwpFoaB+fKy8Pdv470GsPA== + +dom-scroll-into-view@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/dom-scroll-into-view/-/dom-scroll-into-view-2.0.1.tgz" + integrity sha512-bvVTQe1lfaUr1oFzZX80ce9KLDlZ3iU+XGNE/bz9HnGdklTieqsbmsLHe+rT2XWqopvL0PckkYqN7ksmm5pe3w== + +element-plus@^2.1.9: + version "2.1.9" + resolved "https://registry.npmmirror.com/element-plus/-/element-plus-2.1.9.tgz" + integrity sha512-6mWqS3YrmJPnouWP4otzL8+MehfOnDFqDbcIdnmC07p+Z0JkWe/CVKc4Wky8AYC8nyDMUQyiZYvooCbqGuM7pg== + dependencies: + "@ctrl/tinycolor" "^3.4.0" + "@element-plus/icons-vue" "^1.1.4" + "@floating-ui/dom" "^0.4.2" + "@popperjs/core" "^2.11.4" + "@types/lodash" "^4.14.181" + "@types/lodash-es" "^4.17.6" + "@vueuse/core" "^8.2.4" + async-validator "^4.0.7" + dayjs "^1.11.0" + escape-html "^1.0.3" + lodash "^4.17.21" + lodash-es "^4.17.21" + lodash-unified "^1.0.2" + memoize-one "^6.0.0" + normalize-wheel-es "^1.1.2" + +errno@^0.1.1: + version "0.1.8" + resolved "https://registry.npmmirror.com/errno/-/errno-0.1.8.tgz" + integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== + dependencies: + prr "~1.0.1" + +esbuild-android-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-android-64/-/esbuild-android-64-0.14.36.tgz#fc5f95ce78c8c3d790fa16bc71bd904f2bb42aa1" + integrity sha512-jwpBhF1jmo0tVCYC/ORzVN+hyVcNZUWuozGcLHfod0RJCedTDTvR4nwlTXdx1gtncDqjk33itjO+27OZHbiavw== + +esbuild-android-arm64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-android-arm64/-/esbuild-android-arm64-0.14.36.tgz#44356fbb9f8de82a5cdf11849e011dfb3ad0a8a8" + integrity sha512-/hYkyFe7x7Yapmfv4X/tBmyKnggUmdQmlvZ8ZlBnV4+PjisrEhAvC3yWpURuD9XoB8Wa1d5dGkTsF53pIvpjsg== + +esbuild-darwin-64@0.14.36: + version "0.14.36" + resolved "https://registry.npmmirror.com/esbuild-darwin-64/-/esbuild-darwin-64-0.14.36.tgz" + integrity sha512-kkl6qmV0dTpyIMKagluzYqlc1vO0ecgpviK/7jwPbRDEv5fejRTaBBEE2KxEQbTHcLhiiDbhG7d5UybZWo/1zQ== + +esbuild-darwin-arm64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.36.tgz#2a8040c2e465131e5281034f3c72405e643cb7b2" + integrity sha512-q8fY4r2Sx6P0Pr3VUm//eFYKVk07C5MHcEinU1BjyFnuYz4IxR/03uBbDwluR6ILIHnZTE7AkTUWIdidRi1Jjw== + +esbuild-freebsd-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.36.tgz#d82c387b4d01fe9e8631f97d41eb54f2dbeb68a3" + integrity sha512-Hn8AYuxXXRptybPqoMkga4HRFE7/XmhtlQjXFHoAIhKUPPMeJH35GYEUWGbjteai9FLFvBAjEAlwEtSGxnqWww== + +esbuild-freebsd-arm64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.36.tgz#e8ce2e6c697da6c7ecd0cc0ac821d47c5ab68529" + integrity sha512-S3C0attylLLRiCcHiJd036eDEMOY32+h8P+jJ3kTcfhJANNjP0TNBNL30TZmEdOSx/820HJFgRrqpNAvTbjnDA== + +esbuild-linux-32@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-32/-/esbuild-linux-32-0.14.36.tgz#a4a261e2af91986ea62451f2db712a556cb38a15" + integrity sha512-Eh9OkyTrEZn9WGO4xkI3OPPpUX7p/3QYvdG0lL4rfr73Ap2HAr6D9lP59VMF64Ex01LhHSXwIsFG/8AQjh6eNw== + +esbuild-linux-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-64/-/esbuild-linux-64-0.14.36.tgz#4a9500f9197e2c8fcb884a511d2c9d4c2debde72" + integrity sha512-vFVFS5ve7PuwlfgoWNyRccGDi2QTNkQo/2k5U5ttVD0jRFaMlc8UQee708fOZA6zTCDy5RWsT5MJw3sl2X6KDg== + +esbuild-linux-arm64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.36.tgz#c91c21e25b315464bd7da867365dd1dae14ca176" + integrity sha512-24Vq1M7FdpSmaTYuu1w0Hdhiqkbto1I5Pjyi+4Cdw5fJKGlwQuw+hWynTcRI/cOZxBcBpP21gND7W27gHAiftw== + +esbuild-linux-arm@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-arm/-/esbuild-linux-arm-0.14.36.tgz#90e23bca2e6e549affbbe994f80ba3bb6c4d934a" + integrity sha512-NhgU4n+NCsYgt7Hy61PCquEz5aevI6VjQvxwBxtxrooXsxt5b2xtOUXYZe04JxqQo+XZk3d1gcr7pbV9MAQ/Lg== + +esbuild-linux-mips64le@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.36.tgz#40e11afb08353ff24709fc89e4db0f866bc131d2" + integrity sha512-hZUeTXvppJN+5rEz2EjsOFM9F1bZt7/d2FUM1lmQo//rXh1RTFYzhC0txn7WV0/jCC7SvrGRaRz0NMsRPf8SIA== + +esbuild-linux-ppc64le@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.36.tgz#9e8a588c513d06cc3859f9dcc52e5fdfce8a1a5e" + integrity sha512-1Bg3QgzZjO+QtPhP9VeIBhAduHEc2kzU43MzBnMwpLSZ890azr4/A9Dganun8nsqD/1TBcqhId0z4mFDO8FAvg== + +esbuild-linux-riscv64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.36.tgz#e578c09b23b3b97652e60e3692bfda628b541f06" + integrity sha512-dOE5pt3cOdqEhaufDRzNCHf5BSwxgygVak9UR7PH7KPVHwSTDAZHDoEjblxLqjJYpc5XaU9+gKJ9F8mp9r5I4A== + +esbuild-linux-s390x@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.36.tgz#3c9dab40d0d69932ffded0fd7317bb403626c9bc" + integrity sha512-g4FMdh//BBGTfVHjF6MO7Cz8gqRoDPzXWxRvWkJoGroKA18G9m0wddvPbEqcQf5Tbt2vSc1CIgag7cXwTmoTXg== + +esbuild-netbsd-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.36.tgz#e27847f6d506218291619b8c1e121ecd97628494" + integrity sha512-UB2bVImxkWk4vjnP62ehFNZ73lQY1xcnL5ZNYF3x0AG+j8HgdkNF05v67YJdCIuUJpBuTyCK8LORCYo9onSW+A== + +esbuild-openbsd-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.36.tgz#c94c04c557fae516872a586eae67423da6d2fabb" + integrity sha512-NvGB2Chf8GxuleXRGk8e9zD3aSdRO5kLt9coTQbCg7WMGXeX471sBgh4kSg8pjx0yTXRt0MlrUDnjVYnetyivg== + +esbuild-sunos-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-sunos-64/-/esbuild-sunos-64-0.14.36.tgz#9b79febc0df65a30f1c9bd63047d1675511bf99d" + integrity sha512-VkUZS5ftTSjhRjuRLp+v78auMO3PZBXu6xl4ajomGenEm2/rGuWlhFSjB7YbBNErOchj51Jb2OK8lKAo8qdmsQ== + +esbuild-windows-32@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-windows-32/-/esbuild-windows-32-0.14.36.tgz#910d11936c8d2122ffdd3275e5b28d8a4e1240ec" + integrity sha512-bIar+A6hdytJjZrDxfMBUSEHHLfx3ynoEZXx/39nxy86pX/w249WZm8Bm0dtOAByAf4Z6qV0LsnTIJHiIqbw0w== + +esbuild-windows-64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-windows-64/-/esbuild-windows-64-0.14.36.tgz#21b4ce8b42a4efc63f4b58ec617f1302448aad26" + integrity sha512-+p4MuRZekVChAeueT1Y9LGkxrT5x7YYJxYE8ZOTcEfeUUN43vktSn6hUNsvxzzATrSgq5QqRdllkVBxWZg7KqQ== + +esbuild-windows-arm64@0.14.36: + version "0.14.36" + resolved "https://registry.yarnpkg.com/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.36.tgz#ba21546fecb7297667d0052d00150de22c044b24" + integrity sha512-fBB4WlDqV1m18EF/aheGYQkQZHfPHiHJSBYzXIo8yKehek+0BtBwo/4PNwKGJ5T0YK0oc8pBKjgwPbzSrPLb+Q== + +esbuild@^0.14.27: + version "0.14.36" + resolved "https://registry.npmmirror.com/esbuild/-/esbuild-0.14.36.tgz" + integrity sha512-HhFHPiRXGYOCRlrhpiVDYKcFJRdO0sBElZ668M4lh2ER0YgnkLxECuFe7uWCf23FrcLc59Pqr7dHkTqmRPDHmw== + optionalDependencies: + esbuild-android-64 "0.14.36" + esbuild-android-arm64 "0.14.36" + esbuild-darwin-64 "0.14.36" + esbuild-darwin-arm64 "0.14.36" + esbuild-freebsd-64 "0.14.36" + esbuild-freebsd-arm64 "0.14.36" + esbuild-linux-32 "0.14.36" + esbuild-linux-64 "0.14.36" + esbuild-linux-arm "0.14.36" + esbuild-linux-arm64 "0.14.36" + esbuild-linux-mips64le "0.14.36" + esbuild-linux-ppc64le "0.14.36" + esbuild-linux-riscv64 "0.14.36" + esbuild-linux-s390x "0.14.36" + esbuild-netbsd-64 "0.14.36" + esbuild-openbsd-64 "0.14.36" + esbuild-sunos-64 "0.14.36" + esbuild-windows-32 "0.14.36" + esbuild-windows-64 "0.14.36" + esbuild-windows-arm64 "0.14.36" + +escape-html@^1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/escape-html/-/escape-html-1.0.3.tgz" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + +follow-redirects@^1.14.8: + version "1.14.9" + resolved "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.14.9.tgz" + integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w== + +fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.2.tgz" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.1.tgz" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +graceful-fs@^4.1.2: + version "4.2.10" + resolved "https://registry.npmmirror.com/graceful-fs/-/graceful-fs-4.2.10.tgz" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/has/-/has-1.0.3.tgz" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +iconv-lite@^0.4.4: + version "0.4.24" + resolved "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.4.24.tgz" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +image-size@~0.5.0: + version "0.5.5" + resolved "https://registry.npmmirror.com/image-size/-/image-size-0.5.5.tgz" + integrity sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ== + +is-core-module@^2.8.1: + version "2.8.1" + resolved "https://registry.npmmirror.com/is-core-module/-/is-core-module-2.8.1.tgz" + integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA== + dependencies: + has "^1.0.3" + +is-plain-object@3.0.1: + version "3.0.1" + resolved "https://registry.npmmirror.com/is-plain-object/-/is-plain-object-3.0.1.tgz" + integrity sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g== + +is-what@^3.14.1: + version "3.14.1" + resolved "https://registry.npmmirror.com/is-what/-/is-what-3.14.1.tgz" + integrity sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA== + +js-audio-recorder@0.5.7: + version "0.5.7" + resolved "https://registry.npmmirror.com/js-audio-recorder/-/js-audio-recorder-0.5.7.tgz" + integrity sha512-DIlv30N86AYHr7zGHN0O7V/3Rd8Q6SIJ/MBzVJaT9STWTdhF4E/8fxCX6ZMgRSv8xmx6fEqcFFNPoofmxJD4+A== + +"js-tokens@^3.0.0 || ^4.0.0": + version "4.0.0" + resolved "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +lamejs@^1.2.1: + version "1.2.1" + resolved "https://registry.npmmirror.com/lamejs/-/lamejs-1.2.1.tgz" + integrity sha512-s7bxvjvYthw6oPLCm5pFxvA84wUROODB8jEO2+CE1adhKgrIvVOlmMgY8zyugxGrvRaDHNJanOiS21/emty6dQ== + dependencies: + use-strict "1.0.1" + +less@^4.1.2: + version "4.1.2" + resolved "https://registry.npmmirror.com/less/-/less-4.1.2.tgz" + integrity sha512-EoQp/Et7OSOVu0aJknJOtlXZsnr8XE8KwuzTHOLeVSEx8pVWUICc8Q0VYRHgzyjX78nMEyC/oztWFbgyhtNfDA== + dependencies: + copy-anything "^2.0.1" + parse-node-version "^1.0.1" + tslib "^2.3.0" + optionalDependencies: + errno "^0.1.1" + graceful-fs "^4.1.2" + image-size "~0.5.0" + make-dir "^2.1.0" + mime "^1.4.1" + needle "^2.5.2" + source-map "~0.6.0" + +lodash-es@^4.17.15, lodash-es@^4.17.21: + version "4.17.21" + resolved "https://registry.npmmirror.com/lodash-es/-/lodash-es-4.17.21.tgz" + integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== + +lodash-unified@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/lodash-unified/-/lodash-unified-1.0.2.tgz" + integrity sha512-OGbEy+1P+UT26CYi4opY4gebD8cWRDxAT6MAObIVQMiqYdxZr1g3QHWCToVsm31x2NkLS4K3+MC2qInaRMa39g== + +lodash@^4.17.21: + version "4.17.21" + resolved "https://registry.npmmirror.com/lodash/-/lodash-4.17.21.tgz" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.0.0: + version "1.4.0" + resolved "https://registry.npmmirror.com/loose-envify/-/loose-envify-1.4.0.tgz" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +magic-string@^0.25.7: + version "0.25.9" + resolved "https://registry.npmmirror.com/magic-string/-/magic-string-0.25.9.tgz" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== + dependencies: + sourcemap-codec "^1.4.8" + +make-dir@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/make-dir/-/make-dir-2.1.0.tgz" + integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== + dependencies: + pify "^4.0.1" + semver "^5.6.0" + +memoize-one@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/memoize-one/-/memoize-one-6.0.0.tgz" + integrity sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw== + +mime@^1.4.1: + version "1.6.0" + resolved "https://registry.npmmirror.com/mime/-/mime-1.6.0.tgz" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +moment@^2.27.0: + version "2.29.3" + resolved "https://registry.npmmirror.com/moment/-/moment-2.29.3.tgz" + integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +nanoid@^3.3.1: + version "3.3.2" + resolved "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.2.tgz" + integrity sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA== + +nanopop@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/nanopop/-/nanopop-2.1.0.tgz" + integrity sha512-jGTwpFRexSH+fxappnGQtN9dspgE2ipa1aOjtR24igG0pv6JCxImIAmrLRHX+zUF5+1wtsFVbKyfP51kIGAVNw== + +needle@^2.5.2: + version "2.9.1" + resolved "https://registry.npmmirror.com/needle/-/needle-2.9.1.tgz" + integrity sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ== + dependencies: + debug "^3.2.6" + iconv-lite "^0.4.4" + sax "^1.2.4" + +normalize-wheel-es@^1.1.2: + version "1.1.2" + resolved "https://registry.npmmirror.com/normalize-wheel-es/-/normalize-wheel-es-1.1.2.tgz" + integrity sha512-scX83plWJXYH1J4+BhAuIHadROzxX0UBF3+HuZNY2Ks8BciE7tSTQ+5JhTsvzjaO0/EJdm4JBGrfObKxFf3Png== + +omit.js@^2.0.0: + version "2.0.2" + resolved "https://registry.npmmirror.com/omit.js/-/omit.js-2.0.2.tgz" + integrity sha512-hJmu9D+bNB40YpL9jYebQl4lsTW6yEHRTroJzNLqQJYHm7c+NQnJGfZmIWh8S3q3KoaxV1aLhV6B3+0N0/kyJg== + +parse-node-version@^1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/parse-node-version/-/parse-node-version-1.0.1.tgz" + integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.npmmirror.com/path-parse/-/path-parse-1.0.7.tgz" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/picocolors/-/picocolors-1.0.0.tgz" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.npmmirror.com/pify/-/pify-4.0.1.tgz" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + +postcss@^8.1.10, postcss@^8.4.12: + version "8.4.12" + resolved "https://registry.npmmirror.com/postcss/-/postcss-8.4.12.tgz" + integrity sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg== + dependencies: + nanoid "^3.3.1" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prr@~1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/prr/-/prr-1.0.1.tgz" + integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== + +regenerator-runtime@^0.13.4: + version "0.13.9" + resolved "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +resize-observer-polyfill@^1.5.1: + version "1.5.1" + resolved "https://registry.npmmirror.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz" + integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== + +resolve@^1.22.0: + version "1.22.0" + resolved "https://registry.npmmirror.com/resolve/-/resolve-1.22.0.tgz" + integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== + dependencies: + is-core-module "^2.8.1" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +rollup@^2.59.0: + version "2.70.1" + resolved "https://registry.npmmirror.com/rollup/-/rollup-2.70.1.tgz" + integrity sha512-CRYsI5EuzLbXdxC6RnYhOuRdtz4bhejPMSWjsFLfVM/7w/85n2szZv6yExqUXsBdz5KT8eoubeyDUDjhLHEslA== + optionalDependencies: + fsevents "~2.3.2" + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sax@^1.2.4: + version "1.2.4" + resolved "https://registry.npmmirror.com/sax/-/sax-1.2.4.tgz" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +scroll-into-view-if-needed@^2.2.25: + version "2.2.29" + resolved "https://registry.npmmirror.com/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.29.tgz" + integrity sha512-hxpAR6AN+Gh53AdAimHM6C8oTN1ppwVZITihix+WqalywBeFcQ6LdQP5ABNl26nX8GTEL7VT+b8lKpdqq65wXg== + dependencies: + compute-scroll-into-view "^1.0.17" + +semver@^5.6.0: + version "5.7.1" + resolved "https://registry.npmmirror.com/semver/-/semver-5.7.1.tgz" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +shallow-equal@^1.0.0: + version "1.2.1" + resolved "https://registry.npmmirror.com/shallow-equal/-/shallow-equal-1.2.1.tgz" + integrity sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA== + +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.0.2.tgz" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map@^0.6.1, source-map@~0.6.0: + version "0.6.1" + resolved "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sourcemap-codec@^1.4.8: + version "1.4.8" + resolved "https://registry.npmmirror.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +tslib@^2.3.0: + version "2.4.0" + resolved "https://registry.npmmirror.com/tslib/-/tslib-2.4.0.tgz" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +use-strict@1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/use-strict/-/use-strict-1.0.1.tgz" + integrity sha512-IeiWvvEXfW5ltKVMkxq6FvNf2LojMKvB2OCeja6+ct24S1XOmQw2dGr2JyndwACWAGJva9B7yPHwAmeA9QCqAQ== + +vite@^2.9.0: + version "2.9.1" + resolved "https://registry.npmmirror.com/vite/-/vite-2.9.1.tgz" + integrity sha512-vSlsSdOYGcYEJfkQ/NeLXgnRv5zZfpAsdztkIrs7AZHV8RCMZQkwjo4DS5BnrYTqoWqLoUe1Cah4aVO4oNNqCQ== + dependencies: + esbuild "^0.14.27" + postcss "^8.4.12" + resolve "^1.22.0" + rollup "^2.59.0" + optionalDependencies: + fsevents "~2.3.2" + +vue-demi@*: + version "0.12.5" + resolved "https://registry.npmmirror.com/vue-demi/-/vue-demi-0.12.5.tgz" + integrity sha512-BREuTgTYlUr0zw0EZn3hnhC3I6gPWv+Kwh4MCih6QcAeaTlaIX0DwOVN0wHej7hSvDPecz4jygy/idsgKfW58Q== + +vue-types@^3.0.0: + version "3.0.2" + resolved "https://registry.npmmirror.com/vue-types/-/vue-types-3.0.2.tgz" + integrity sha512-IwUC0Aq2zwaXqy74h4WCvFCUtoV0iSWr0snWnE9TnU18S66GAQyqQbRf2qfJtUuiFsBf6qp0MEwdonlwznlcrw== + dependencies: + is-plain-object "3.0.1" + +vue@^3.2.25: + version "3.2.32" + resolved "https://registry.npmmirror.com/vue/-/vue-3.2.32.tgz" + integrity sha512-6L3jKZApF042OgbCkh+HcFeAkiYi3Lovi8wNhWqIK98Pi5efAMLZzRHgi91v+60oIRxdJsGS9sTMsb+yDpY8Eg== + dependencies: + "@vue/compiler-dom" "3.2.32" + "@vue/compiler-sfc" "3.2.32" + "@vue/runtime-dom" "3.2.32" + "@vue/server-renderer" "3.2.32" + "@vue/shared" "3.2.32" + +warning@^4.0.0: + version "4.0.3" + resolved "https://registry.npmmirror.com/warning/-/warning-4.0.3.tgz" + integrity sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w== + dependencies: + loose-envify "^1.0.0" diff --git a/demos/speech_web_demo/接口文档.md b/demos/speech_web_demo/接口文档.md new file mode 100644 index 00000000..a811a3f4 --- /dev/null +++ b/demos/speech_web_demo/接口文档.md @@ -0,0 +1,406 @@ +# 接口文档 + +开启服务后可参照: + +http://0.0.0.0:8010/docs + +## ASR + +### 【POST】/asr/offline + +说明:上传16k,16bit wav文件,返回 offline 语音识别模型识别结果 + +返回: JSON + +前端接口: ASR-端到端识别,音频文件识别;语音指令-录音上传 + +示例: + +```json +{ + "code": 0, + "result": "你也喜欢这个天气吗", + "message": "ok" +} +``` + +### 【POST】/asr/offlinefile + +说明:上传16k,16bit wav文件,返回 offline 语音识别模型识别结果 + wav数据的base64 + +返回: JSON + +前端接口: 音频文件识别(播放这段base64还原后记得添加wav头,采样率16k, int16,添加后才能播放) + +示例: + +```json +{ + "code": 0, + "result": { + "asr_result": "今天天气真好", + "wav_base64": "///+//3//f/8/////v/////////////////+/wAA//8AAAEAAQACAAIAAQABAP" + }, + "message": "ok" +} +``` + + +### 【POST】/asr/collectEnv + +说明: 通过采集环境噪音,上传16k, int16 wav文件,来生成后台VAD的能量阈值, 返回阈值结果 + +前端接口:ASR-环境采样 + +返回: JSON + +```json +{ + "code": 0, + "result": 3624.93505859375, + "message": "采集环境噪音成功" +} +``` + +### 【GET】/asr/stopRecord + +说明:通过 GET 请求 /asr/stopRecord, 后台停止接收 offlineStream 中通过 WS协议 上传的数据 + +前端接口:语音聊天-暂停录音(获取NLP,播放TTS时暂停) + +返回: JSON + +```JSON +{ + "code": 0, + "result": null, + "message": "停止成功" +} +``` + +### 【GET】/asr/resumeRecord + +说明:通过 GET 请求 /asr/resumeRecord, 后台停止接收 offlineStream 中通过 WS协议 上传的数据 + +前端接口:语音聊天-恢复录音(TTS播放完毕时,告诉后台恢复录音) + +返回: JSON + +```JSON +{ + "code": 0, + "result": null, + "message": "Online录音恢复" +} +``` + +### 【Websocket】/ws/asr/offlineStream + +说明:通过 WS 协议,将前端音频持续上传到后台,前端采集 16k,Int16 类型的PCM片段,持续上传到后端 + +前端接口:语音聊天-开始录音,持续将麦克风语音传给后端,后端推送语音识别结果 + +返回:后端返回识别结果,offline模型识别结果, 由WS推送 + + +### 【Websocket】/ws/asr/onlineStream + +说明:通过 WS 协议,将前端音频持续上传到后台,前端采集 16k,Int16 类型的PCM片段,持续上传到后端 + +前端接口:ASR-流式识别开始录音,持续将麦克风语音传给后端,后端推送语音识别结果 + +返回:后端返回识别结果,online模型识别结果, 由WS推送 + +## NLP + +### 【POST】/nlp/chat + +说明:返回闲聊对话的结果 + +前端接口:语音聊天-获取到ASR识别结果后,向后端获取闲聊文本 + +上传示例: + +```json +{ + "chat": "天气非常棒" +} +``` + +返回示例: + +```json +{ + "code": 0, + "result": "是的,我也挺喜欢的", + "message": "ok" +} +``` + + +### 【POST】/nlp/ie + +说明:返回信息抽取结果 + +前端接口:语音指令-向后端获取信息抽取结果 + +上传示例: + +```json +{ + "chat": "今天我从马来西亚出发去香港花了五十万元" +} +``` + +返回示例: + +```json +{ + "code": 0, + "result": [ + { + "时间": [ + { + "text": "今天", + "start": 0, + "end": 2, + "probability": 0.9817976247505698 + } + ], + "出发地": [ + { + "text": "马来西亚", + "start": 4, + "end": 8, + "probability": 0.974892389414169 + } + ], + "目的地": [ + { + "text": "马来西亚", + "start": 4, + "end": 8, + "probability": 0.7347504438136951 + } + ], + "费用": [ + { + "text": "五十万元", + "start": 15, + "end": 19, + "probability": 0.9679076530644402 + } + ] + } + ], + "message": "ok" +} +``` + + +## TTS + +### 【POST】/tts/offline + +说明:获取TTS离线模型音频 + +前端接口:TTS-端到端合成 + +上传示例: + +```json +{ + "text": "天气非常棒" +} +``` + +返回示例:对应音频对应的 base64 编码 + +```json +{ + "code": 0, + "result": "UklGRrzQAABXQVZFZm10IBAAAAABAAEAwF0AAIC7AAACABAAZGF0YZjQAAADAP7/BAADAAAA...", + "message": "ok" +} +``` + +### 【POST】/tts/online + +说明:流式获取语音合成音频 + +前端接口:流式合成 + +上传示例: +```json +{ + "text": "天气非常棒" +} + +``` + +返回示例: + +二进制PCM片段,16k Int 16类型 + +## VPR + +### 【POST】/vpr/enroll + +说明:声纹注册,通过表单上传 spk_id(字符串,非空), 与 audio (文件) + +前端接口:声纹识别-声纹注册 + +上传示例: + +```text +curl -X 'POST' \ + 'http://0.0.0.0:8010/vpr/enroll' \ + -H 'accept: application/json' \ + -H 'Content-Type: multipart/form-data' \ + -F 'spk_id=啦啦啦啦' \ + -F 'audio=@demo_16k.wav;type=audio/wav' +``` + +返回示例: + +```json +{ + "status": true, + "msg": "Successfully enroll data!" +} +``` + +### 【POST】/vpr/recog + +说明:声纹识别,识别文件,提取文件的声纹信息做比对 音频 16k, int 16 wav格式 + +前端接口:声纹识别-上传音频,返回声纹识别结果 + +上传示例: + +```shell +curl -X 'POST' \ + 'http://0.0.0.0:8010/vpr/recog' \ + -H 'accept: application/json' \ + -H 'Content-Type: multipart/form-data' \ + -F 'audio=@demo_16k.wav;type=audio/wav' +``` + +返回示例: + +```json +[ + [ + "啦啦啦啦", + [ + "", + 100 + ] + ], + [ + "test1", + [ + "", + 11.64 + ] + ], + [ + "test2", + [ + "", + 6.09 + ] + ] +] + +``` + + +### 【POST】/vpr/del + +说明: 根据 spk_id 删除用户数据 + +前端接口:声纹识别-删除用户数据 + +上传示例: +```json +{ + "spk_id":"啦啦啦啦" +} +``` + +返回示例 + +```json +{ + "status": true, + "msg": "Successfully delete data!" +} + +``` + + +### 【GET】/vpr/list + +说明:查询用户列表数据,无需参数,返回 spk_id 与 vpr_id + +前端接口:声纹识别-获取声纹数据列表 + +返回示例: + +```json +[ + [ + "test1", + "test2" + ], + [ + 9, + 10 + ] +] + +``` + + +### 【GET】/vpr/data + +说明: 根据 vpr_id 获取用户vpr时使用的音频 + +前端接口:声纹识别-获取vpr对应的音频 + +访问示例: + +```shell +curl -X 'GET' \ + 'http://0.0.0.0:8010/vpr/data?vprId=9' \ + -H 'accept: application/json' +``` + +返回示例: + +对应音频文件 + +### 【GET】/vpr/database64 + +说明: 根据 vpr_id 获取用户vpr时注册使用音频转换成 16k, int16 类型的数组,返回base64编码 + +前端接口:声纹识别-获取vpr对应的音频(注意:播放时需要添加 wav头,16k,int16, 可参考tts播放时添加wav的方式,注意更改采样率) + +访问示例: + +```shell +curl -X 'GET' \ + 'http://localhost:8010/vpr/database64?vprId=12' \ + -H 'accept: application/json' +``` + +返回示例: +```json +{ + "code": 0, + "result":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "message": "ok" +``` + + From 27f2833bf7c7f0d6682753c3fc0e2cb5d5ede37f Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Thu, 16 Jun 2022 07:10:32 +0000 Subject: [PATCH 10/30] format --- .../server/engine/asr/online/paddleinference/asr_engine.py | 1 + 1 file changed, 1 insertion(+) diff --git a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py index b3b31a5a..a450e430 100644 --- a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py @@ -352,6 +352,7 @@ class PaddleASRConnectionHanddler: def rescoring(self): ... + class ASRServerExecutor(ASRExecutor): def __init__(self): super().__init__() From 729fe6a09624ff75265f9909d6d048d96531221d Mon Sep 17 00:00:00 2001 From: iftaken Date: Thu, 16 Jun 2022 15:12:06 +0800 Subject: [PATCH 11/30] rename speech_web --- demos/{speech_web_demo => speech_web}/.gitignore | 0 demos/{speech_web_demo => speech_web}/README.MD | 0 .../speech_server/conf/tts_online_application.yaml | 0 ...ws_conformer_wenetspeech_application_faster.yaml | 0 .../speech_server/main.py | 0 .../speech_server/requirements.txt | 0 .../speech_server/src/AudioManeger.py | 0 .../speech_server/src/SpeechBase/asr.py | 0 .../speech_server/src/SpeechBase/nlp.py | 0 .../speech_server/src/SpeechBase/sql_helper.py | 0 .../speech_server/src/SpeechBase/tts.py | 0 .../speech_server/src/SpeechBase/vpr.py | 0 .../speech_server/src/SpeechBase/vpr_encode.py | 0 .../speech_server/src/WebsocketManeger.py | 0 .../speech_server/src/robot.py | 0 .../speech_server/src/util.py | 0 .../web_client/.gitignore | 0 .../web_client/README.md | 0 .../web_client/index.html | 0 .../web_client/package-lock.json | 0 .../web_client/package.json | 0 .../web_client/public/favicon.ico | Bin .../web_client/src/App.vue | 0 .../web_client/src/api/API.js | 0 .../web_client/src/api/ApiASR.js | 0 .../web_client/src/api/ApiNLP.js | 0 .../web_client/src/api/ApiTTS.js | 0 .../web_client/src/api/ApiVPR.js | 0 .../src/assets/image/ic_大-上传文件.svg | 0 .../src/assets/image/ic_大-声音波浪.svg | 0 .../web_client/src/assets/image/ic_大-语音.svg | 0 .../src/assets/image/ic_小-录制语音.svg | 0 .../web_client/src/assets/image/ic_小-结束.svg | 0 .../web_client/src/assets/image/ic_开始聊天.svg | 0 .../src/assets/image/ic_开始聊天_hover.svg | 0 .../src/assets/image/ic_播放(按钮).svg | 0 .../src/assets/image/ic_暂停(按钮).svg | 0 .../web_client/src/assets/image/ic_更换示例.svg | 0 .../src/assets/image/icon_小-声音波浪.svg | 0 .../assets/image/icon_录制声音小语音1.svg | 0 .../src/assets/image/在线体验-背景@2x.png | Bin .../web_client/src/assets/image/场景齐全@3x.png | Bin .../web_client/src/assets/image/教程丰富@3x.png | Bin .../web_client/src/assets/image/模型全面@3x.png | Bin .../src/assets/image/步骤-箭头切图@2x.png | Bin .../web_client/src/assets/image/用户头像@2x.png | Bin .../web_client/src/assets/image/飞桨头像@2x.png | Bin .../web_client/src/assets/logo.png | Bin .../src/components/Content/Header/Header.vue | 0 .../src/components/Content/Header/style.less | 0 .../web_client/src/components/Content/Tail/Tail.vue | 0 .../src/components/Content/Tail/style.less | 0 .../web_client/src/components/Experience.vue | 0 .../web_client/src/components/SubMenu/ASR/ASR.vue | 0 .../web_client/src/components/SubMenu/ASR/ASRT.vue | 0 .../ASR/AudioFile/AudioFileIdentification.vue | 0 .../src/components/SubMenu/ASR/AudioFile/style.less | 0 .../SubMenu/ASR/EndToEnd/EndToEndIdentification.vue | 0 .../src/components/SubMenu/ASR/EndToEnd/style.less | 0 .../components/SubMenu/ASR/RealTime/RealTime.vue | 0 .../src/components/SubMenu/ASR/RealTime/style.less | 0 .../src/components/SubMenu/ASR/style.less | 0 .../src/components/SubMenu/ChatBot/Chat.vue | 0 .../src/components/SubMenu/ChatBot/ChatT.vue | 0 .../src/components/SubMenu/ChatBot/style.less | 0 .../web_client/src/components/SubMenu/IE/IE.vue | 0 .../web_client/src/components/SubMenu/IE/IET.vue | 0 .../web_client/src/components/SubMenu/IE/style.less | 0 .../web_client/src/components/SubMenu/TTS/TTS.vue | 0 .../web_client/src/components/SubMenu/TTS/TTST.vue | 0 .../src/components/SubMenu/TTS/style.less | 0 .../web_client/src/components/SubMenu/VPR/VPR.vue | 0 .../web_client/src/components/SubMenu/VPR/VPRT.vue | 0 .../src/components/SubMenu/VPR/style.less | 0 .../web_client/src/components/style.less | 0 .../web_client/src/main.js | 0 .../web_client/vite.config.js | 0 .../web_client/yarn.lock | 0 .../{speech_web_demo => speech_web}/接口文档.md | 0 79 files changed, 0 insertions(+), 0 deletions(-) rename demos/{speech_web_demo => speech_web}/.gitignore (100%) rename demos/{speech_web_demo => speech_web}/README.MD (100%) rename demos/{speech_web_demo => speech_web}/speech_server/conf/tts_online_application.yaml (100%) rename demos/{speech_web_demo => speech_web}/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml (100%) rename demos/{speech_web_demo => speech_web}/speech_server/main.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/requirements.txt (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/AudioManeger.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/SpeechBase/asr.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/SpeechBase/nlp.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/SpeechBase/sql_helper.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/SpeechBase/tts.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/SpeechBase/vpr.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/SpeechBase/vpr_encode.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/WebsocketManeger.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/robot.py (100%) rename demos/{speech_web_demo => speech_web}/speech_server/src/util.py (100%) rename demos/{speech_web_demo => speech_web}/web_client/.gitignore (100%) rename demos/{speech_web_demo => speech_web}/web_client/README.md (100%) rename demos/{speech_web_demo => speech_web}/web_client/index.html (100%) rename demos/{speech_web_demo => speech_web}/web_client/package-lock.json (100%) rename demos/{speech_web_demo => speech_web}/web_client/package.json (100%) rename demos/{speech_web_demo => speech_web}/web_client/public/favicon.ico (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/App.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/api/API.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/api/ApiASR.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/api/ApiNLP.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/api/ApiTTS.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/api/ApiVPR.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_大-上传文件.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_大-声音波浪.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_大-语音.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_小-录制语音.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_小-结束.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_开始聊天.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_开始聊天_hover.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_播放(按钮).svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_暂停(按钮).svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/ic_更换示例.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/icon_小-声音波浪.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/icon_录制声音小语音1.svg (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/在线体验-背景@2x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/场景齐全@3x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/教程丰富@3x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/模型全面@3x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/步骤-箭头切图@2x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/用户头像@2x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/image/飞桨头像@2x.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/assets/logo.png (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/Content/Header/Header.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/Content/Header/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/Content/Tail/Tail.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/Content/Tail/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/Experience.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/ASR.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/ASRT.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/AudioFile/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/EndToEnd/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/RealTime/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ASR/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ChatBot/Chat.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ChatBot/ChatT.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/ChatBot/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/IE/IE.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/IE/IET.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/IE/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/TTS/TTS.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/TTS/TTST.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/TTS/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/VPR/VPR.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/VPR/VPRT.vue (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/SubMenu/VPR/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/components/style.less (100%) rename demos/{speech_web_demo => speech_web}/web_client/src/main.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/vite.config.js (100%) rename demos/{speech_web_demo => speech_web}/web_client/yarn.lock (100%) rename demos/{speech_web_demo => speech_web}/接口文档.md (100%) diff --git a/demos/speech_web_demo/.gitignore b/demos/speech_web/.gitignore similarity index 100% rename from demos/speech_web_demo/.gitignore rename to demos/speech_web/.gitignore diff --git a/demos/speech_web_demo/README.MD b/demos/speech_web/README.MD similarity index 100% rename from demos/speech_web_demo/README.MD rename to demos/speech_web/README.MD diff --git a/demos/speech_web_demo/speech_server/conf/tts_online_application.yaml b/demos/speech_web/speech_server/conf/tts_online_application.yaml similarity index 100% rename from demos/speech_web_demo/speech_server/conf/tts_online_application.yaml rename to demos/speech_web/speech_server/conf/tts_online_application.yaml diff --git a/demos/speech_web_demo/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml b/demos/speech_web/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml similarity index 100% rename from demos/speech_web_demo/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml rename to demos/speech_web/speech_server/conf/ws_conformer_wenetspeech_application_faster.yaml diff --git a/demos/speech_web_demo/speech_server/main.py b/demos/speech_web/speech_server/main.py similarity index 100% rename from demos/speech_web_demo/speech_server/main.py rename to demos/speech_web/speech_server/main.py diff --git a/demos/speech_web_demo/speech_server/requirements.txt b/demos/speech_web/speech_server/requirements.txt similarity index 100% rename from demos/speech_web_demo/speech_server/requirements.txt rename to demos/speech_web/speech_server/requirements.txt diff --git a/demos/speech_web_demo/speech_server/src/AudioManeger.py b/demos/speech_web/speech_server/src/AudioManeger.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/AudioManeger.py rename to demos/speech_web/speech_server/src/AudioManeger.py diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/asr.py b/demos/speech_web/speech_server/src/SpeechBase/asr.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/SpeechBase/asr.py rename to demos/speech_web/speech_server/src/SpeechBase/asr.py diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/nlp.py b/demos/speech_web/speech_server/src/SpeechBase/nlp.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/SpeechBase/nlp.py rename to demos/speech_web/speech_server/src/SpeechBase/nlp.py diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/sql_helper.py b/demos/speech_web/speech_server/src/SpeechBase/sql_helper.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/SpeechBase/sql_helper.py rename to demos/speech_web/speech_server/src/SpeechBase/sql_helper.py diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/tts.py b/demos/speech_web/speech_server/src/SpeechBase/tts.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/SpeechBase/tts.py rename to demos/speech_web/speech_server/src/SpeechBase/tts.py diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/vpr.py b/demos/speech_web/speech_server/src/SpeechBase/vpr.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/SpeechBase/vpr.py rename to demos/speech_web/speech_server/src/SpeechBase/vpr.py diff --git a/demos/speech_web_demo/speech_server/src/SpeechBase/vpr_encode.py b/demos/speech_web/speech_server/src/SpeechBase/vpr_encode.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/SpeechBase/vpr_encode.py rename to demos/speech_web/speech_server/src/SpeechBase/vpr_encode.py diff --git a/demos/speech_web_demo/speech_server/src/WebsocketManeger.py b/demos/speech_web/speech_server/src/WebsocketManeger.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/WebsocketManeger.py rename to demos/speech_web/speech_server/src/WebsocketManeger.py diff --git a/demos/speech_web_demo/speech_server/src/robot.py b/demos/speech_web/speech_server/src/robot.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/robot.py rename to demos/speech_web/speech_server/src/robot.py diff --git a/demos/speech_web_demo/speech_server/src/util.py b/demos/speech_web/speech_server/src/util.py similarity index 100% rename from demos/speech_web_demo/speech_server/src/util.py rename to demos/speech_web/speech_server/src/util.py diff --git a/demos/speech_web_demo/web_client/.gitignore b/demos/speech_web/web_client/.gitignore similarity index 100% rename from demos/speech_web_demo/web_client/.gitignore rename to demos/speech_web/web_client/.gitignore diff --git a/demos/speech_web_demo/web_client/README.md b/demos/speech_web/web_client/README.md similarity index 100% rename from demos/speech_web_demo/web_client/README.md rename to demos/speech_web/web_client/README.md diff --git a/demos/speech_web_demo/web_client/index.html b/demos/speech_web/web_client/index.html similarity index 100% rename from demos/speech_web_demo/web_client/index.html rename to demos/speech_web/web_client/index.html diff --git a/demos/speech_web_demo/web_client/package-lock.json b/demos/speech_web/web_client/package-lock.json similarity index 100% rename from demos/speech_web_demo/web_client/package-lock.json rename to demos/speech_web/web_client/package-lock.json diff --git a/demos/speech_web_demo/web_client/package.json b/demos/speech_web/web_client/package.json similarity index 100% rename from demos/speech_web_demo/web_client/package.json rename to demos/speech_web/web_client/package.json diff --git a/demos/speech_web_demo/web_client/public/favicon.ico b/demos/speech_web/web_client/public/favicon.ico similarity index 100% rename from demos/speech_web_demo/web_client/public/favicon.ico rename to demos/speech_web/web_client/public/favicon.ico diff --git a/demos/speech_web_demo/web_client/src/App.vue b/demos/speech_web/web_client/src/App.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/App.vue rename to demos/speech_web/web_client/src/App.vue diff --git a/demos/speech_web_demo/web_client/src/api/API.js b/demos/speech_web/web_client/src/api/API.js similarity index 100% rename from demos/speech_web_demo/web_client/src/api/API.js rename to demos/speech_web/web_client/src/api/API.js diff --git a/demos/speech_web_demo/web_client/src/api/ApiASR.js b/demos/speech_web/web_client/src/api/ApiASR.js similarity index 100% rename from demos/speech_web_demo/web_client/src/api/ApiASR.js rename to demos/speech_web/web_client/src/api/ApiASR.js diff --git a/demos/speech_web_demo/web_client/src/api/ApiNLP.js b/demos/speech_web/web_client/src/api/ApiNLP.js similarity index 100% rename from demos/speech_web_demo/web_client/src/api/ApiNLP.js rename to demos/speech_web/web_client/src/api/ApiNLP.js diff --git a/demos/speech_web_demo/web_client/src/api/ApiTTS.js b/demos/speech_web/web_client/src/api/ApiTTS.js similarity index 100% rename from demos/speech_web_demo/web_client/src/api/ApiTTS.js rename to demos/speech_web/web_client/src/api/ApiTTS.js diff --git a/demos/speech_web_demo/web_client/src/api/ApiVPR.js b/demos/speech_web/web_client/src/api/ApiVPR.js similarity index 100% rename from demos/speech_web_demo/web_client/src/api/ApiVPR.js rename to demos/speech_web/web_client/src/api/ApiVPR.js diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_大-上传文件.svg b/demos/speech_web/web_client/src/assets/image/ic_大-上传文件.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_大-上传文件.svg rename to demos/speech_web/web_client/src/assets/image/ic_大-上传文件.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_大-声音波浪.svg b/demos/speech_web/web_client/src/assets/image/ic_大-声音波浪.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_大-声音波浪.svg rename to demos/speech_web/web_client/src/assets/image/ic_大-声音波浪.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_大-语音.svg b/demos/speech_web/web_client/src/assets/image/ic_大-语音.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_大-语音.svg rename to demos/speech_web/web_client/src/assets/image/ic_大-语音.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_小-录制语音.svg b/demos/speech_web/web_client/src/assets/image/ic_小-录制语音.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_小-录制语音.svg rename to demos/speech_web/web_client/src/assets/image/ic_小-录制语音.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_小-结束.svg b/demos/speech_web/web_client/src/assets/image/ic_小-结束.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_小-结束.svg rename to demos/speech_web/web_client/src/assets/image/ic_小-结束.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天.svg b/demos/speech_web/web_client/src/assets/image/ic_开始聊天.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天.svg rename to demos/speech_web/web_client/src/assets/image/ic_开始聊天.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天_hover.svg b/demos/speech_web/web_client/src/assets/image/ic_开始聊天_hover.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_开始聊天_hover.svg rename to demos/speech_web/web_client/src/assets/image/ic_开始聊天_hover.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_播放(按钮).svg b/demos/speech_web/web_client/src/assets/image/ic_播放(按钮).svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_播放(按钮).svg rename to demos/speech_web/web_client/src/assets/image/ic_播放(按钮).svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_暂停(按钮).svg b/demos/speech_web/web_client/src/assets/image/ic_暂停(按钮).svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_暂停(按钮).svg rename to demos/speech_web/web_client/src/assets/image/ic_暂停(按钮).svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/ic_更换示例.svg b/demos/speech_web/web_client/src/assets/image/ic_更换示例.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/ic_更换示例.svg rename to demos/speech_web/web_client/src/assets/image/ic_更换示例.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/icon_小-声音波浪.svg b/demos/speech_web/web_client/src/assets/image/icon_小-声音波浪.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/icon_小-声音波浪.svg rename to demos/speech_web/web_client/src/assets/image/icon_小-声音波浪.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/icon_录制声音小语音1.svg b/demos/speech_web/web_client/src/assets/image/icon_录制声音小语音1.svg similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/icon_录制声音小语音1.svg rename to demos/speech_web/web_client/src/assets/image/icon_录制声音小语音1.svg diff --git a/demos/speech_web_demo/web_client/src/assets/image/在线体验-背景@2x.png b/demos/speech_web/web_client/src/assets/image/在线体验-背景@2x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/在线体验-背景@2x.png rename to demos/speech_web/web_client/src/assets/image/在线体验-背景@2x.png diff --git a/demos/speech_web_demo/web_client/src/assets/image/场景齐全@3x.png b/demos/speech_web/web_client/src/assets/image/场景齐全@3x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/场景齐全@3x.png rename to demos/speech_web/web_client/src/assets/image/场景齐全@3x.png diff --git a/demos/speech_web_demo/web_client/src/assets/image/教程丰富@3x.png b/demos/speech_web/web_client/src/assets/image/教程丰富@3x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/教程丰富@3x.png rename to demos/speech_web/web_client/src/assets/image/教程丰富@3x.png diff --git a/demos/speech_web_demo/web_client/src/assets/image/模型全面@3x.png b/demos/speech_web/web_client/src/assets/image/模型全面@3x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/模型全面@3x.png rename to demos/speech_web/web_client/src/assets/image/模型全面@3x.png diff --git a/demos/speech_web_demo/web_client/src/assets/image/步骤-箭头切图@2x.png b/demos/speech_web/web_client/src/assets/image/步骤-箭头切图@2x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/步骤-箭头切图@2x.png rename to demos/speech_web/web_client/src/assets/image/步骤-箭头切图@2x.png diff --git a/demos/speech_web_demo/web_client/src/assets/image/用户头像@2x.png b/demos/speech_web/web_client/src/assets/image/用户头像@2x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/用户头像@2x.png rename to demos/speech_web/web_client/src/assets/image/用户头像@2x.png diff --git a/demos/speech_web_demo/web_client/src/assets/image/飞桨头像@2x.png b/demos/speech_web/web_client/src/assets/image/飞桨头像@2x.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/image/飞桨头像@2x.png rename to demos/speech_web/web_client/src/assets/image/飞桨头像@2x.png diff --git a/demos/speech_web_demo/web_client/src/assets/logo.png b/demos/speech_web/web_client/src/assets/logo.png similarity index 100% rename from demos/speech_web_demo/web_client/src/assets/logo.png rename to demos/speech_web/web_client/src/assets/logo.png diff --git a/demos/speech_web_demo/web_client/src/components/Content/Header/Header.vue b/demos/speech_web/web_client/src/components/Content/Header/Header.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/Content/Header/Header.vue rename to demos/speech_web/web_client/src/components/Content/Header/Header.vue diff --git a/demos/speech_web_demo/web_client/src/components/Content/Header/style.less b/demos/speech_web/web_client/src/components/Content/Header/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/Content/Header/style.less rename to demos/speech_web/web_client/src/components/Content/Header/style.less diff --git a/demos/speech_web_demo/web_client/src/components/Content/Tail/Tail.vue b/demos/speech_web/web_client/src/components/Content/Tail/Tail.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/Content/Tail/Tail.vue rename to demos/speech_web/web_client/src/components/Content/Tail/Tail.vue diff --git a/demos/speech_web_demo/web_client/src/components/Content/Tail/style.less b/demos/speech_web/web_client/src/components/Content/Tail/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/Content/Tail/style.less rename to demos/speech_web/web_client/src/components/Content/Tail/style.less diff --git a/demos/speech_web_demo/web_client/src/components/Experience.vue b/demos/speech_web/web_client/src/components/Experience.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/Experience.vue rename to demos/speech_web/web_client/src/components/Experience.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASR.vue b/demos/speech_web/web_client/src/components/SubMenu/ASR/ASR.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASR.vue rename to demos/speech_web/web_client/src/components/SubMenu/ASR/ASR.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASRT.vue b/demos/speech_web/web_client/src/components/SubMenu/ASR/ASRT.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/ASRT.vue rename to demos/speech_web/web_client/src/components/SubMenu/ASR/ASRT.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue b/demos/speech_web/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue rename to demos/speech_web/web_client/src/components/SubMenu/ASR/AudioFile/AudioFileIdentification.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/style.less b/demos/speech_web/web_client/src/components/SubMenu/ASR/AudioFile/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/AudioFile/style.less rename to demos/speech_web/web_client/src/components/SubMenu/ASR/AudioFile/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue b/demos/speech_web/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue rename to demos/speech_web/web_client/src/components/SubMenu/ASR/EndToEnd/EndToEndIdentification.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/style.less b/demos/speech_web/web_client/src/components/SubMenu/ASR/EndToEnd/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/EndToEnd/style.less rename to demos/speech_web/web_client/src/components/SubMenu/ASR/EndToEnd/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue b/demos/speech_web/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue rename to demos/speech_web/web_client/src/components/SubMenu/ASR/RealTime/RealTime.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/style.less b/demos/speech_web/web_client/src/components/SubMenu/ASR/RealTime/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/RealTime/style.less rename to demos/speech_web/web_client/src/components/SubMenu/ASR/RealTime/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ASR/style.less b/demos/speech_web/web_client/src/components/SubMenu/ASR/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ASR/style.less rename to demos/speech_web/web_client/src/components/SubMenu/ASR/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/Chat.vue b/demos/speech_web/web_client/src/components/SubMenu/ChatBot/Chat.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/Chat.vue rename to demos/speech_web/web_client/src/components/SubMenu/ChatBot/Chat.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/ChatT.vue b/demos/speech_web/web_client/src/components/SubMenu/ChatBot/ChatT.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/ChatT.vue rename to demos/speech_web/web_client/src/components/SubMenu/ChatBot/ChatT.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/style.less b/demos/speech_web/web_client/src/components/SubMenu/ChatBot/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/ChatBot/style.less rename to demos/speech_web/web_client/src/components/SubMenu/ChatBot/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IE.vue b/demos/speech_web/web_client/src/components/SubMenu/IE/IE.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/IE/IE.vue rename to demos/speech_web/web_client/src/components/SubMenu/IE/IE.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/IE/IET.vue b/demos/speech_web/web_client/src/components/SubMenu/IE/IET.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/IE/IET.vue rename to demos/speech_web/web_client/src/components/SubMenu/IE/IET.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/IE/style.less b/demos/speech_web/web_client/src/components/SubMenu/IE/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/IE/style.less rename to demos/speech_web/web_client/src/components/SubMenu/IE/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTS.vue b/demos/speech_web/web_client/src/components/SubMenu/TTS/TTS.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTS.vue rename to demos/speech_web/web_client/src/components/SubMenu/TTS/TTS.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTST.vue b/demos/speech_web/web_client/src/components/SubMenu/TTS/TTST.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/TTS/TTST.vue rename to demos/speech_web/web_client/src/components/SubMenu/TTS/TTST.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/TTS/style.less b/demos/speech_web/web_client/src/components/SubMenu/TTS/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/TTS/style.less rename to demos/speech_web/web_client/src/components/SubMenu/TTS/style.less diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPR.vue b/demos/speech_web/web_client/src/components/SubMenu/VPR/VPR.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPR.vue rename to demos/speech_web/web_client/src/components/SubMenu/VPR/VPR.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPRT.vue b/demos/speech_web/web_client/src/components/SubMenu/VPR/VPRT.vue similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/VPR/VPRT.vue rename to demos/speech_web/web_client/src/components/SubMenu/VPR/VPRT.vue diff --git a/demos/speech_web_demo/web_client/src/components/SubMenu/VPR/style.less b/demos/speech_web/web_client/src/components/SubMenu/VPR/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/SubMenu/VPR/style.less rename to demos/speech_web/web_client/src/components/SubMenu/VPR/style.less diff --git a/demos/speech_web_demo/web_client/src/components/style.less b/demos/speech_web/web_client/src/components/style.less similarity index 100% rename from demos/speech_web_demo/web_client/src/components/style.less rename to demos/speech_web/web_client/src/components/style.less diff --git a/demos/speech_web_demo/web_client/src/main.js b/demos/speech_web/web_client/src/main.js similarity index 100% rename from demos/speech_web_demo/web_client/src/main.js rename to demos/speech_web/web_client/src/main.js diff --git a/demos/speech_web_demo/web_client/vite.config.js b/demos/speech_web/web_client/vite.config.js similarity index 100% rename from demos/speech_web_demo/web_client/vite.config.js rename to demos/speech_web/web_client/vite.config.js diff --git a/demos/speech_web_demo/web_client/yarn.lock b/demos/speech_web/web_client/yarn.lock similarity index 100% rename from demos/speech_web_demo/web_client/yarn.lock rename to demos/speech_web/web_client/yarn.lock diff --git a/demos/speech_web_demo/接口文档.md b/demos/speech_web/接口文档.md similarity index 100% rename from demos/speech_web_demo/接口文档.md rename to demos/speech_web/接口文档.md From 2b6fab33e93facf066169e7e48f5fa0f8bc49f5a Mon Sep 17 00:00:00 2001 From: iftaken Date: Thu, 16 Jun 2022 15:14:07 +0800 Subject: [PATCH 12/30] rm TTS.vue --- .../src/components/SubMenu/TTS/TTS.vue | 726 ------------------ 1 file changed, 726 deletions(-) delete mode 100644 demos/speech_web/web_client/src/components/SubMenu/TTS/TTS.vue diff --git a/demos/speech_web/web_client/src/components/SubMenu/TTS/TTS.vue b/demos/speech_web/web_client/src/components/SubMenu/TTS/TTS.vue deleted file mode 100644 index 13884ef7..00000000 --- a/demos/speech_web/web_client/src/components/SubMenu/TTS/TTS.vue +++ /dev/null @@ -1,726 +0,0 @@ - - - - - - - \ No newline at end of file From 30d43045fb27fdc4e93be494ccbd72cc01ba2375 Mon Sep 17 00:00:00 2001 From: iftaken Date: Thu, 16 Jun 2022 15:17:46 +0800 Subject: [PATCH 13/30] update demo show png --- demos/speech_web/docs/效果展示.png | Bin 0 -> 86155 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 demos/speech_web/docs/效果展示.png diff --git a/demos/speech_web/docs/效果展示.png b/demos/speech_web/docs/效果展示.png new file mode 100644 index 0000000000000000000000000000000000000000..5f7997c173a685ff546664925a839527bd639d49 GIT binary patch literal 86155 zcmeFYbyHm3w>1jEL$DAaxCKaXcS!<)kRTxtB)Ge~2X_nZF2UVtf;)`_ZQQ+qhHju6 zxc!{-oYIG` zBI3874p7mHg35wl#eaFP@a}`n@Yd5x3=$$WQWq7Op1I^^(*8hO6`Mse6O}Iu1A%2_ zCwSt$X8=#|gc#qW#ClfNW0L=w|2{g(CvNRmqk-R0hDkpSsJsJQ1^2%aJ$v)}iRoK( zVS~}Vk22%K!`wfH8y`A4yV2;{0G5n#*Wm6)4VCT7S5zo@Cc&z0&y@swsfWELi6V+o z^6=^13X8ucWYsV#5Hn&hATqONOK*a*R9VOsbFLyHE~V4(QF6x3qGnO)v>DyJI9aG8 zq*Gu9EIUtwvG*BI1~wbouyfxoii(NJO-f^DGCnY%ydFM8kv-BkDtYAaD`$|*%&y+} z1z^SibH#_(FOEF|3vbtdW;10@8b-47>QhlMNvZB2&dm0LSvAurXm}=`oc(DVOXtFk z%{Ghe=d#Rbvw8e7uvU3qYpFzxUjM?es4~rRPyR_ksa6(o@o+LT)L4m6TRc%Cg}kB` z;>rF2A?~Mu;NO3`)my6dlbl*~d%LtY%2Xxt&a;rQpS!lZt}U4P1Pa38$LeDF!=1x z@j3KtCp(z`oUgf4a0inM9Ys~h_;b+{hnE*c=@X05;V5PEPo`3O6C#{TZ7golf_#XQ zQ1L8KB@EEV1Mt4);+~^Oek`V^d(!_yrWV@)h1LX(NAu}r&-X9rpMIOwQmX}8)sj)6 zg!QOQJyH6>xACO)Np%ot1N8tcXanOVD!4}&{he_56BT-nkF>l2m2_C)sGjdozr1{b z!Sw#k7tvS2CSN2Q@mT{#^FEbfmWTNTFX!=1F_mGpV_i%4O;H{YM+c5@3QPU%=|JB}RQZrZE)I5#+&(p~VXa{Cj zB3FtoQeNoyB+@+x29?}rFG-nE6Jxa^-}ELKA8`-y?DMqn1f{_=+11gkLtO_jjfv~X z7fBYsp4izCH=}`qsDH<9IS6xPkxUYk1sHD^wKA+ItM+eJ}U=a{coto?FF7%jJ&-k!*<`4`T}jz6$Zu|aXK~3NC6LK)Z|yfzV2xp6YGpfZHSJXt zGvE9tr>0BII}Yl3n%ZF5AYtZgMh27=g#+*_jeijrH@r8mnf#WmqARgVr<-u5wA#?l zb?X|Won#z zzV>(R$awr_6tVO@(x<`&9z%l5;mpBloFWA1muV~C<{LeW5noE~u)M0#j z3Hca1u(IbQUu0kf8HJxsprJgW~aL6ny1Z2b%UB6 zTDVo#Ro=G4f(n^BnV6Y=+IS#t&=U|TNW(dL|JB42Z>ja9hM0K=dxx(Z!O7f-^a<_> z=yY9JQ`CW~oXSq*D-{~GhFG&Gz^&eCOXSqC{aC>@XTNi;ZT2=Qsv>H!c-nMvZc$*s zHPxCcXfWI)iI=n8aK&d;y(8NJesH$M=_+;f5Ed3AS2FZ2F2Bvp+0z*|L4D4DW#T>W z0ri~j`q~MA9YF-o&Ab=9q%L?aH}C%3iW`6D=1U7ptMNJY82}9V4k1?V)$d$y3K2nw zsQV7IKTnL%SkY*(C@@Q~1kq#BMX;){kFW>P!!Ye}&P)&Mg)6=0KZaWmkH$Oa?@S#` zjZU!`rWkJ3LOstr<2|+aC7uc4kcGC${J>W4pAVNyFiZH%;GYwhO)JuE^)UMyUWO>E zwy?U|ItZEu8~Ydw4v2>f#3c{74Zay-8k!r58GsL^4#1ek@nHjDcBLiogn4Z_OMKV7F2#R?UeDB@^nY_stoBsT9Nk>aJla&& zj&927Ty3%y(CfANMSBFbyQ zyRzni^uB5XbcPp8>1KEHeVhZJW2Es_@3JjP)$3La4rT>pE~Yitc_)sQNMzP!hGc?m zF4mm#6q?v;Yz&S&t0q7Ppvaak5WoXo>{NPrMBPtaPwh)$NKz#h<8HTh+xAo6vtzWS z+%k2t8n0u}&Dni@al4_k?t4pn(cLU85DcD2pW|Q1m`?!@oK394UG!G~)wD0A{w!E7 zB>(YU;#ygEr#t%H=CyEl7bJ=$B=8%rK6F!hB&?biDz&m~F{?RSG_B@@PL)9I`CvR6 z=N=nMb1FV4Jb#loqTCUy8b>B1Ju#Lk_x*;$w<@Aa!j|4P`;_K%$iMmC^ARVA_#4Zd zvYqG$@oyehTP|yd)A%YwV|<6^n4)CvlERq|(D}C8{dQnjPPYgBdG8XZ*}`E|4wQpY4(Jq1Y05-Tzwt zDiI4UD}bz9-VZLWd6RV6h)92ExT6otC+-XATgc_|ki2<+WrQ%D2A6k(y-XjDN~L@r zT19_y^WJ<%cx@>jYFD+u*G0OHsbZ*n5t8ux8_f%>DgrD?UP(9K$j2)ipF{JN{YWlS z{pIdZKVRVl2_AZ*sHMb0l&4w_-ziY+p^OcV3@B%HQ_)Yn!YRa1_Dx$C=@3B!(!z@wIk$0%b9|iJsi}A0PLIksjGYceg&4pXFloFZJv2M0U1BJhCs9Z zDblgSfBeS_9eW2y!TKw@%-jEXQK905wG;lo{KIqXkHQLX3($!OQUBA0d;{kHzJc(c zNd9jekc;I1CviB*jxPxI#|w~pM9hi{8l~MrJM33^kWJ@9iB?03eB5K(ehv?0Z$g0S zQxf+Om1?irFQ>>i>XjZdi0^by82-`B^+pqM<{bT0;JxwmZ=|x>J*bu-(dHw}SAB zbxoJ}HoL$7#lZevAZ~e^vCYa3=WMermh+-vn7Nm?aOS6Fbg zYKekis=k}yO1@90N5~u15*wZG>SooQTSk1|(DB9cCaYB|8d?!iL14da%N%cwvWGy5 z+emS0rsFnq|K8(dggxMPS^ReABcJ`5P6KpS08?4p9y_e~bE<$Gt#7nQo0I8mUeD=X zyfprG)b+g(vBg|@2)Tfhj7^Psj@E>Nm%EjX9`WA`?2-D(bNt5u=iB3&Zi?Wa1s|fA zR^gE(x&0BMlE(W=rxP3R(8kfaX!uk0{fd=i5rc*GP!6FFoFHOimM%#1A*e+;g&-zC zue#c?stva0rgb?!ei_@)x<%`cNEi^{)8k0CeMCl#-2Q-Tvd-<4g3o05ElVT|h~x>Z zH-xqgXk8N{q}$=bJCV6=oY7*DekPG*xzOKEKW>r?&CQKHkp4cpdF zw-B_^%w^mUpNTRbCf44ny#3G& zxyA0fS?w-WwC{x{2TUbnMmxmfr>SV6W09zweB^;QS3S#v^IJ#fA7_jA++wr zWtr=S&9Lfy#S3e+GH!P}wUHGUGZO@$9EVJ7^tn&xedL1&6rHz1r+@;xWgWZQy&J%Tfrfb=DEKeWYY(0LhYU#hocCP# zE-&OJsADn*i^I$h`s{!Ipx-OWw8|KC=jXny8lqzqp%)B>;_Eb4kRF!89$CmkkXb}+_m!+no{0LRU9efQqC z6D9P{Ljm)9_iy+Vm7b{)uQl&A$mL1H;m-xXRBAdN6M5IP-TWMTHk$=o_k&N^tRa$b zr@Z>1$M7$`uA{D?WoX-wD+BSrCC6bcL+N3AGA84w(=6Z_wQiGLc<|dYfD85%1-Oy ziivBUV>&Ju#g2y=x8^wPy7h}2zLbrLO64|so~}mk*OOK>!owGIoV(yIV7KnWxe9NG zZ$f3C{Mjn^2_j&Q>HLiU@&Os*EuVPuc7>{kzaoT2YBxFrv`vRH>AUVSB^1!-4-&7Z zy&$)vY75ehI5F>$gWwhJ<#;O@z2w{)nFWg5pYxtf)kS&2x-3gPE((nb=rfUfEM25y z*>aDD&4Sm-dFo|)t8po3PmzRQ#=1_69op=E4*+MVeJa#$49fnbLY*b@fLhc=i4deYC{GMInyLXRO_f zFwpaL?L34#Q@vLr7e`|Z z1=)88U-?VF55ja$gP+-wxR+V0*TiNL00}P@hHpv~DJL}aeJufNh^Dcx?Jkz8J&|R* zqsc;YRE{PdP0#kp=R77d=0YV|r1TnxcctzQ3MWFybd7|imUyZ|;-vI;#FZARw9Vtd z*v>x1#N!xqT)dmd++uL~W={G`w$rlps8k=D6i%(JdTOTw`VzHYGkYt}K%tPQBudTY zpVwO*rF$>c(h1$xDsNXqr;~#OU60_7_sP$<5ZiIu##+!-_ijxjKZiK)lHkIn?gk3_ znvJG%*((WOyi%>(x~LPcBT>y-Y&VV4YPX5dvH^~LI@Z;xefy3rn8qx&LUSa`)2wOO z0Uj+lA<$^IT51-t+j+AcH!)wOjhA%@e=#|NsXl7a;UQh4*EMSEKC(4XJr;xvZ0l)q z<9_^LP~~#7EofW}+-doec35l_$%8Xypheqc)oLTq`@q} z*L=2fKw|GV8i9gk4Kl}8;fm1+oiRZM`*Ge>ZS#fn36j4uS-DxjGaOY34cpSTR~Af; zm7=%0=w_&T8}OvWWaR7Ktn$KOELp&-Q2^o+m(S*akk98PJ}r$RIj)hdx^wvoEVURi z&OEj~>wdOkDc5*r+U|C^llUB9-~KCwoSR#vQE~X?r;V0BZB1q-;i6jPINoWe$Kd+0 znrBt%d=1zd9ALm$NJweR2S3`NyNyE%Qx%5IlO;aIflotnrpg}~O}alNt3KPG0>z$+ zA&TI3rf>-#MBQdxBd4x+8pz(>=;+Tc5+N@S_P*+fBQiZ3*~N#qsw7bw)jEb0e`<)2 zDd;*S_S#x91gtHwMB0Uz41Zde>33r(B8zRIAlc`_i_M;}yW$n>i&W!=xXkB3WOAd1?GLa>$NmqqxOq{z+N!MF4yGSba_rbjl; zmm}Z4F0IgHW5t!`k*xSXZd7ItvYq&s=A57dkoBF%&{G%{I9+cVD_GNc>ExlZA`U9| z9aB7v46Qux#Wfx$FV%1j>gczn%T9M+^PA1`l$|Rops(24^}4^-=m_wbSyD?O?K!!c zWi@|IQSa!d-&Q~L@(vcwD|1pSVDDFYT}Dl&6q^N1WQj|cG>+A3@{&H0KJcFcJf3xI zR9?sH7*2}7uUtMzt9!W5fXn(-JI(pI&XsyJK{D4Zv<+f;0^R&u}J%yFP^?2dI!KIH2QZdF}Gy_wSO` zS6h}|1@!3_2w}k2w&0%3UeHYEzd#b{x4nk=*-__ZqkauFoxIb}DERqRgJKAe+I*J2 zc1Hx;zsHf25*TFHX=FDf$MJu-Ev?9sLO9r~o>o4#jou-V0|R8uZz$XBVvA(ysdDtn zxm5f|>~dO^K`>y;F&Vkk20x$rKZ=Ra>+6)f;&WfoTco@bK%+WJ6C&5qq6tsFQ!z7ivYLCeqKt7a3Ug!^ejf$Ty%1YI|H?bhLd3_IC z=L??QD~dtl@lg5)KYtzR{aknT_ri_(lR|pM;r;16H{XyTp=g0-!E85?ExlzeD=mG3 z0yA3P&A)-Zx3xm8I_VM&6Dwsyll27-n@wh&^>0OA)=?2P`G)lqHK}T8c>c`*Sc09EbE%$kX!_Um2DDdSNC;vbssUsRpAqP>NPdl2$e}_dSh;Up0v+NgX zfWvgcy*+3r&a8}@?bm6P1cO=D*W0Aui^BHP_6J32g75j3-51%?03WdM!$~ie%rtNI zC|*&epZ46f?d$Uf2ZrIrZB1l$KV=a^ZT4(n#yO-~Kw@^6g`LVJ$T%MmuyJSf%-qIo zVHZ|n6=NeT3@rOKYt~`STYikzDN00X`gdR8Eg8_|w+4U!B_6q92*&=g8iRn&LmG?I z<(4+TNmeys!Brpje(&mogbzBoLD$Cu{0qAN#(~%Y@vb+99Ur9ne<1r(7d?q&))YlT z`o%L0G98QWYqNt{?rRuk%9Ld&o9N2bdA9c#8f@_;>m)3CKl;K51SGs&aH@|=a4fF_ zNpE9WX=vACP{Ljl3rwLi;8Sg$Oz=S=KF-ynscb~4aW3e6l=hV)CRv*yWK?C$t>?za z_gWMHcTX3TnGX+O&xS(Del#P%;~Td&*b`P~i#V_>%hVG?|9Eol|3?tqq~D^CD? zu`-z51*1UtF{r_@Sa_R_W)sUjuY@L=oI$otTfz-L;QX&y?`pR7ob57UQX zCa&@uZg{w0R=FNGmuSPV0_DyA3={2DcTfx!J4!|S9wra_Q59AT-Gz0+)}VE^uZ*MO z$6ZPJgBQG9tyw(mo2^`PkFogDG-m(h1JfWm84(YqSftYQv5vDdij?E~l0(;|-Bj>- z{RTp4-G}-O#f|t#uyjfsjl?Lw<;s-lfY`Imy1}`Hb``z*JMU%pB!|UnU6w=^`vN+6 z1<45DX3M$3u+M9OMf;C^58rVFOKt>5hNN$K+e3TB7COLWD@XrudNVF9g=0IUa6;c+ z0FHI1)Q`)<{(Z|fl+2jd`>f4;C68ir_ub}H)KHIjxW)0jf4a+#^?M&>@%;DiH67K1 zrGr(g5A%{av?s^wXkthD#rpd`b4p507Cu9@8U3TxGtpl{#_@flQ@Bu;F3$9+!a4kS z-qymd>$=$m>oVeb%cOWi2Bcn*IgQv(uU)OK^*HF>xBVe0%l$0f@d>M5kVlhOlu=YI{E80 z&tgGFO;-D9HG=j$+RYC}19`xzFAa-2&J)*;p1piSxei9cBbB(<*Fw&NjjiJ0iY=Pp znX3~_N~_owW3OiRg4{9nD)47kih7Rfh}i7*qL!tk)QManA$l&jGBd?b5&N~yl4NQL zVGEDhD$NUd?l*$ITM;aoWcpO#Nvt*B@aRMGEzm~Kz3ywt9DnSgU(2iREuR*~NO^9c zAaFE@3a;H`(6$6(;AW`98b1aKB1`s}X%gsUxqOa|SkSQyFY|sUaTn*XCm0Q1oLl&~WI?+b zNql_#2*b%DcprFcv)nkX;Bzm)=Z6@R4>6o))JaQ-)UWMjyeBm?2{tk}SeYAhYFBB(|84@BxYIcp{owBD8p*0V^Jhl_nHpbXGVlp(rh0S zuk-BZ>Pu8quvCT_ECPH>haawb(}}zXCgc^{Me}XrL^;(TP4@x$+ePx(7xCp<GPoNL>R8>FSGb(m`$zs(7RBT4_!Q8da7KHZkHBe82cK0SB`1K zZZi?&1cN_qq-Z%bDTs9dHwXw7G2YTTZL<_0y3+r#2a}-ZbzcKs0#3roFk=++1eu=u zN^6}PVS*V>puVs^E5u_cq{?EtV6$l0vvYK&ArD(dvJ({O%U06DiFR(HEv!QW)=gb| z;Qiigx7r?EFT+{d4bZmxwJqrp0zF%ueMT$ziryX$g?)E?m?-IT6U)T(2o(;;XmdFk-A*{1EQjPU>uFmtVVIse6-X-$8rzTVBs!AdP^e-rZy5c@W@r5U_7cxg>*B?P`&ZXK( zkwZrHwlc-uT;t-Nv#|N7??PG>_e3S;E5P&%W(9lg@63PA=zRUZ*FH~S945ynL?68N zP?uv|1PLq7@fpU!>qZ_oA^yhdalYVA?@VLf^VYGW=ESNnEd3;lVf*%3Xq7uMqmsX8 zy~w*PSQ<@cQ}Ck6^biX@9pmbL#`0lO&XWz3FNIT;8JP@P?3Rr1+0|atOJos3)t_-G zo96f(mnddFPbJy(2EU@ob0nAM3&ACGd-G3g-qz=}idh8=@>OgwvwZKxEwSW+>>ekg z$!8~s3u&Up0^jB+2-s5u>W~M%;;oC&QadL>+AE;*y3MNJfK4 zwU?f<^=9ky^WeU#_i!S(AF^9*07}ANok*n-3cOVM^)?N1F<&A4n#%8Xmu%^*l<(-) z;rtzzxp#-@=&Pq;9wKJ1s5rIT`=K?965#_br}KQ5BBZJEe1D^`N0BEYWQ?#}>BJ^l zmpgLvfhu9gEo(=JA0|9YPFgo1yUr?c>z!`I7_+eo}~%#!HWebWoU84KLsY~5xZ$^_V7?!Z>+uO4jI0jssybjM`+E55$y7|Pw*r8!eoY@oaIdIY%a*S?|BxBE=CaE8 z6oxU%F*GC!PP07^r!Rk9R|#zobOn<& zgkwLj`$5K#@@g|xJDMmhsM%sOu!w=GV6$mKhiE*l%3+j(9pV~88C@8i8Yr}*uORp~$oZo|nxJgW+g z8#_dH0oC>B4?n2c79w9~|2~PU#TA+IEv zSX{HuvNU_Mf#qJ-=cTAi{NZx3>iZm|B$)P@vui~rL?W@R{W{VcQUx22QP;d=7(}wM z8<-ER-JjjXgg-*i!9~H@h%sVpMuJEyZ25497zzQwIj+t{?i-APlLnx;sh)?S!e}>X zKwJ-d2EX0Q79}>e-cF%gT{f$1^H{O!pAhaW0q0P1YWi10RWH-pnv^8i9>fXvvxn`4 z?;BZ5+)hCX$>rZB9xz}+v>a*nIg}8Ezysn%wG?J#3f}fWhhIxK8g1kphr|tLRodIlQ7^T|O8()$ z*t7lkkaX70o&SzFtrSIFG7me9-Vyh_eCE7lN=6~W zXOEo_+~Rf&{XzUGa5GjC=)c+Fb}DM7_B!|_(OHgn2FtzYIUZl!X!HSOW|8bUI&~-R zt;rlAAFbz(3ep~6r_9?taLc%Ys1cNz?vW17WaEq@7&3b_j9Vx(_S{<{d*16XB6o9) zgcPR&t07Cm#-N^KK0CvH1+XMR=0B8(k-W?EeFVUpkCXz7fM&0M=Ug4h3(dP6EHZ5_ z5(&-{3oYR<%r2(t#k@GAJ-E(G=iW>XKDIsIc-WNwnAW}O#XXM~k!4b#JQ5HTnh{kg zWInY4YO&8X__}lD4D6=D{S`Z&C-ax%AmEW7HH7sMqm+zjJCdy?7IcZ!k<*Elh3CcL zC#s>S&t9_ATd?GKI62HK*64N&M({Cxrm|dKTM&KsMfm#9?0DyGlEIOT5lwi$- zjj$K7xUDSv^vB42K5buwy+XY&b4uK}6VgVJiGiDH`U%}s8d+@x52<==A{E(uw+?CW_3(?9);9YYzPuQBPxv*ueab{3PB7q zEvBPEf5OXtg}dlvR;b%@3s}$N+c6ks1bBK~o!HJJ^;t_$vvKeJ&)VK< zbaY{vk>rYbQ2h4I`MJq)yr0~5$^0>F)dh$b__*R3#Q=YbXAe)&K1#c#<&b~LB$^cm z3HdEzJuj}>bo6U;z!9>Nxjocv7mDXEp1(fc8toQptlMc)jGHLesstoNDOJk0J!?Nc zdnVFiIXhXY*~Y26$=_P@J_HxboUs)5g#sluvs3=_4}baYpqfv-?>l|>bG=Wd@5cyi z_QntQdXFL)heQ46qqt(@AYTw$Q9#ARuLFHcE8;n-XqeAV^J~Q&k*>s^Ew;GfSi%4x$ z*FSzNmCLozIkkcD2H953C%snT&Cl%`rvf5X)5E*ZGPQe>2Krg41?t!z)Y!gPru}1u zNz?=S>9@=SmO2raQGW9d>vZ!*HqMh-OMAqW^@!qC+Jbu=mMoLe#cd#~lx)ME==VCK zHZc|~8i%|$)&ru=^7;|sMOr-K{)g_)xAnz)Vq(Gyhcl*bzM~l?Puy7H0jD#?pI5Hs z|7nrae=>bae-0HB11XxWkN&NKpM&Y?HqW^vH6Z)IrotxmxGM^ig>U6qMkR`mbnmb| z|KRh#JE#?6g+4QKLJAKoJJ+5!la&s`qSz|igE0c9%gqs#!D!&N@!++3j&$r39P}At zd|2@IEYH7~E`N6iyq#Y$c);tnArO%b?)g?BBsv`jm<=2}7~_hirhS7hXyG9@9u-Yw zLFS8yXuBB9Nbn2-jgRuLeOb`=Pt>Y+Nj_$anHoj1J!9&R&RQb)Rew+Ezg=Us`-JgB zo9jND0Qi}9gX1=)^EeL^?=!8{X4_0dHBhJLwshRAn29+4Sh583q%(Kdm)$a3o-a@7 zGf%XcpFGMwxfDhSGOXaoD&I>QExkPIUf3(!}&jJa-* z{~MM6J0AbNAO1fal~5cX*lLT}N-Nm}vWC&9{>(iP;a!`|FdSN;3Nh!bR9o0zZ3&+W$4N-lp~>b{X9 z>F4bdP|%itwp2Mn+p#b7+6-DrUdNxKU!hSGIbm+Pg)~U=M4dEnACZ%g$Lmc$k56WcHH?d~!NBIZwoc$2b})Y<19&T$O$w)&Id|e0 z+1Nn<$(Lb{Q4w z&6j?EHLJXbRP$q2n)S`$(9`Oy#5r?8&@J-OII<_TnfGmRqC%}bw$ zTXSar2Smxo1nHmE#7#eX@j+XvWnT=PHv&Qm>Q8wr|1m+`T7m>sD{oY^M41=Y+`+5l z)!Hj|-QICn#XUBoTh_-H z>wxK7a@4l?B0FEC1NQQ#GDG9CO0oT4t?VdqjvMKp?vI@x%nO^iW&O|0ZQ`8 z(P;hIS$EeDA{_MGfSt*Qend`)-o1uNta{^K`Ge{9;Pn|5345!5{kN}5rFjo4&dQk` z+H2Oa_4{ieqzP#p$?6(#wp~5VO*&UJb)Cus?1fTVr;l-V%$#d`!H!bmxzfJm_tuqo zM$K~`3VBal;C#>Ng7VsHZ^puR`ia@F$l4C?p&;OiDdZJjFmeTY&QRIqCL=eLQN4}) zY_6YI5|W}q?EU=Ashq*mN+*VpyfG-zsx|O)a6VK5n?iZ7j|7|+_~5py<>|ll?u}HB zQ$SS2d-u-FOQHCZ2L_P0HGsN-F)6LR{vfNlN!Kkpl8`o9KZCucr}hDX7`g!DK3q;> z*ILeUKEq&MU-Qn~W~BkoAblHiKKF`^4n7C;Vd?N?Hzzz+vMm3E+kv$>G)!?5uj@1G zSmA;)?ObPI1D_2p*`!cS_e10xoddc=E`31jfcNeQvG>pb$8hhsAl$}!mH8IJW0fc! z7R&E>!eg-H>DTZe<@s0?ApybbWdf#NA4>{kp0o{T3e|K-2`zi4@0%_v{koHa0q7(; zVBn)UwpB6b(`A7((YBMzij^~2soU0cTX2lYd<)nhajhW^v(^^^wjNEch_@lGyE<9v zu8Ud3K`)ptq!rpdh5Tby>-k`Jw`j~ZP^_AtYcqz4bh-~tEe8BRgU3>L4BXUj@d3pR ztGgfeT-LRbQm_+n6Uk&v>v+xy)cLB)A_>U1&L?$4>3l#@v6M3wkH_wdLFUkclX(; zOc-@@onsgYZ7X1Lvi8I9!_8b7M+^NHPj#gjxf}u24`sMEInr9NhBj@&VfuzIr+`@;m zpVo!mEwOu3(gZ@bXQvyJU;-(GL5Y$t(2u#}@;TVGdP3@Pg&Z_E!lJ>0gxwUGaTy-d zPyLa?qw0z))#T7MU`>zM5M-Du!ms`aJ+9e{<|D3ILL(5LO7;yrrg!i|a(c~K?KKtP zA`MCG>eM+);d=v^TpPLf@>-$BGDPtTv11oy}kC(ees66q@Ee6GtF+8C^(Oo)U zQPbIhQ*xM#sqfa(EM6qX&F4qdbX(Ay6_ z_3L1J20zvK{iocuLS#&x9VX+wd24Q4amT>tF>I_QD*pJSs;7CQIO*R_md%zP* z63-dvNK4!Z<+27hm`q)`go*bx6$3NUHo=}^J70M?*;n)xX7`w|zQ=XiSAsFwWQ$=KW=fKB8NytaBEtw;HP3-4nbQlUjZ;=44yJdBLie&&G;;#dKnGDMdJ4N=>J7?*tkM8Xqsm5#=%!Ctxy>H!aRES`} zT;&ul*BmL9oEKavmkI&2fs)WLl2{&!V7u%ue4>EJ)mp=#mFMEIM8c^{#ye?Y_>_k; zHEmGLhCwd|z|{<-#BtkfPZjXADr)g198x=`8@aX;_*5ZK`~%U1ci2f~mp zasD=S_`?G=SsOQTgqUqe+WMe?&p4-KrR2(k9V>i4+uzW}daQ~i2V&BMXT4SrL6}iK z?%W4KCHI!5%^vRw9170nIkd;SV6HB}PiG1F(pc@sRH%!TB$Gjv=O=9AOKtv3d$`ap zuhy>SUk1)yu16j=h_NE1-5e1q{wbDb?budX>|7IWs#ScPULEGhnvuOyh!tQd>)?|LA$!ejPCt>ip zzn{fo5xaY~FIaXdjCRs(MSE?vpQHNArZ6Pmmj8FTn#e$8JA6>QbS+J|hnj0)xNb<@ z#q%btWm|?#G6!kF-;+4D{E*IIf&C~)?Hf%@C)`}fK;c2eU8o-1I^QwcNk=#OH;ks0#9Sm z5BK`bY*BBhLPM_YOISBotFBbcHY&{Nocb^oquVlGwpGCltc9AQyr?8~&kkE-q^<#| zFwzRF_^-8M0b78lnN<$^lpe1ufOU98rMW5-tA&#G>fQSjna0QRzzm`|iR9V{rYI>l zOaA3q?&tgzb#ERK4?|!cARKuRV$O6^a%(%r2^GvT1E^mM7kA%`oHp1mO_#IS4O;c{ zR`s@nv|2V_{NiAoNKYia=ywj}Wiw&|Vw~;19q{s7gUi+Vhnkt-O1YA3NRDXlnebP!|H-Y?KIp6C3L>d^T1FtZLDMJA1bJWvyo3UzN{Fh6Odha%0py# zbCnmNO-39>?=)qw_XROcwehM9jT}WQMbKqadxc&kd9jJ@p|!8^|~hKLB4b49l~>_?Riw(dU_R zIc`KD{gU%rc{C*05U`{ha}&Abxvy)XUFRN^6)qP>v59!3JL-GAfB3p# zVc2X~mX>0=r784zK)q5pE(gNf&}=&2Y?`J6(q%(fM)P+aPl%!N)b zUmE!DW>eP|D4k`q)W(CgQ)x-s$G-$ zfEL>3y4+%Idd}a2#$6s(yGw>=5ZLD{4dtQbZmm2UB-&3h#G@o!haujTJH` z+nTTt=oy9oSc9&K<%%pvQYW)%T+5%e1WRu;KoC&Mhtf0Jexdb6*^iWXY@1G_yQ#Xn zojQoRnU55WNc7ZmdSmI2PdMVg+}v^G^@BtdzhRfCGLIyl5Q|$JK`QHUa*2dD$*JQ- z^7idMJ^rw@g%QvE{r>wgw~GuUTB~9JHdIe^ou0yZb3sgo75DJ#I@7+-TQbt5GboPT z_jEeITzy~{n8;bUniJOId<+5JlTtcGVm+5F{GF^(;z?q+bSbTn^;!cE80EFGCb9eH zv8~fs-)AyH#v|t99si}?m&&M(_v!dtOqI(^`x))mb}dHJ)MstJP?3ERE|Y%W9B=m) zWX`|~5dfx1&fQ~D%veyhK7?jg4M8Q(x;)GiG517|J!RWok6L^g(2jDjiNII z;ecInf6C`G1f`z$ZgXB?cG^Oi!fwYdxLRV~@Fy80#WgRP*O)6%7){^n(l{`X$Eb-g zQPDWwBtXS;2RXA;2ENo`ZT958djDODI5ofKa6C;$f@HJzheOf+bF*qzGb@ zZ3#}4pGQj>Jj`gC$Jlay{`r4UGlN&gXswAi^d66b^dd0UY7V#`D&IhSuT)xW|#d`*0#UyUe zJeQUJVgJlxzVq9PSj_&nMM7=MvJ1R&VOlyDBRN`K22jky=DnpJx(uEB^N$rIdz0&D zeggDGfF~cH^ipop!WwOk?pkokhG8;l!Ie|xpc$`fkHWXN-Kxq$u+@c`>vvcBr1(gM zgojKc8@hqMeZH#y;s4I5`+OhnOIOjw+FfhywdWpl&N0S{BhRFAwo)A^uw~Y$8Em#T zyh?3+yVN~zGTQz>UI07rBiTtgvmMN{a>KJtzT4tDkO}Dz3$x-ZOdrF_%sa?4CGaDw z!oFd=Ln|V9#JL~3OLX+BrvcJb1svE>_LQn#Nynux5_Uf) zUXe8#h4e2&{kOOg5n4LC_?dabL~BF~XfaFYZI$KcB?mj)hW4&D&J;BFfiHd1GmcQW zTWX>nfI1KQ>9HA#MN$hJQrWE|SE|DI>~?T^U7h4yoaZ~xrvIpv`%CTgcU8`E8t_-# zby@k=g8Iqez#`&(I|Tk=+K4BiMyVzI89I#9s9*3@HYtWaPwV@pr@aADvNr<&ssxqiu$=ET2O6l^aHRU zjQTP63v4QoW4u$e>=QvC_vzbSsnECaaP$0cY9q-l=*(cH(zLD*)c#foF0lHXOfl+1 zj_Bf7)jS@HPdAoGss&Kf!i7{nSm=XU>Pz%$Yrw(26IM0$fD(*0|MrNK_ znfeh@WGXA^Ewri}8YQW_q4yb=5$ST*CgSS&cyddLl=L&mm5age=jjR>QJeN>bw7*| z8*WA_4Gf~yz2o|j+1wxKy?xd3`JVmBiE`^_uI5R?AuHH4r<+#u+%NAA!!m!aW%cKz zxl>kK5>(d$kD;TJcXGif=Y?7}Hr=Vfc0Kit&L6ADjAXSjeg-iYFY^^?r4AnlSh9IT z#WF4gN7#I3nBJ^O`c4ysy**gk=flMtK1|yS$(h;0RI>Sx zgoMxnKBegAqWYW96Ad+4qwKEX?V zbXBOa{5_~I=~Yz=olt2Td=B;C8>R8~<~lf;gJDI7lz z=EtdO9Du5a%k}LvD~U3EXFsx+ko*si43_SvS=?VKsWr(2aqzr&pLysH;QoBIKf5-y zgKztyn&aplL;5==?BaNt7Y#*<6rOWz@|%%nTOEq}ebmA0bK?J@)wLCp-y09-P{Ae{9`<}Xb}C)u!L|^Y z3>v<+zpJR{9vco^zc7EWdS{HqNBo%f!i(w~UH_YpE93m%gW=1dYpvikldJr2lFxnD zf49LGL!%(*#>D(tQ!~XsXd_O1HR*h4_ch1wsABpX_)3J z_yMg=$XME+WL5eu%gtU@x-rsTY6;{~n+YKee&w3-D(>RQ?0{yA$~jm0aF~li`)CfY zsXoxco~kI{>&r2Jq`oNjG>Z%VH@Nt_=NkMBxC7%^nDkYJp3ZR)H3rFq;m2&0w2_x= z;YhkZUqEfu4W%*X(adNKzd&0&Im(_*HNSX$kmE-hrxu!jWhlwUq7^02D9ZH;|AQ8; z4VT{IC*-1iGN6WgI@^!t(giTKJ^-0}?t+seVcC|ebTL9YOlI<(zN7)mM4jU|N^)Sv#I)%aVZVJVAk zrx@HfB$TKqg(b;a6qT^GE35J8QuQ*17)r_j)~W9b?eqVkk;W1U;39_Dn_9|aNUQb@ zc`lcF=I%%2KsOB1k@(A@143y|a`w_#vs`Yye{LKG5Q7~jZ=@4Dr?f|Mm4G>6Du&Eh z_I~WY9oF+zbNnRGRtsVi5>UFC^OL}w;Z%6Z`C&wNuq`=kh|i409fXaotSD7p>9W$; zdxZhbv#r#KF;{0`b}oKCfh}fHiFo3C(gox zvP2zVXCEsY0#PS+YZ$_(%5=FRhFXCzE!Q4~W{YmsO3svY<|cLZ;^F{QeK(pH4e~d> zaNaPsXmX5Pl(OjhUdpxc^Rn;&$@rLgYe%1U+y8@H#N+f(HRI`6Nn|uGGeI>)?x*gq z_}z?YzrF^tg6F{B(2cA>rW#dxM{%FrgjA`0r3_crlJ`H3#>y&uu4YwkV~?M3C4TrG zdfGAc!S0G>N8&Na7WgcfU5c3Il6c~J{{=>pDMGbqg?=ZJaRu-T%HIL9xE>XniVm!o zF?`adjhqkYRYJTgd1=sJ-73kj8%1I2_M!z>P1>_i*wYNoD6M(q=P<)4Gj$4SU z5{%jY*?ODG=|D)$xhH+z-yIha4WBP&JjDBiobe%-r+ez0`eQGTyuX*t^hDwzf@1Yv z4Y22>s)*GNKd5rP(77gL*kn(DcDW7aNcmupU zWw~^=%+vFdsW0Epj~WVdqP5}7!PdNvlq8%BfrZ7ngr9vGqSj5e_YR-Q#!o`kktH~} zV;oh3U0XrQ!!JG@bdZd3FOpoO{D=J<{X=M?rW*iM~gUuSxlmGD|eKxKFgk@rXnIc#E=_s@%*NWX+oQ z^?Cu1$h1@F6c;W)H5!KT@FY|Y0TC9#<(sc;WZE}17N|sVjhqy9$;HTGBqmlB5VFS+ z<@A{RJ0^X%gwXXkzmzo`>+sBE7lfR0ZjK};l+pET)h}D{s*O~!Shoo!!4+1}EEY%` zYf@j!c1G=HISm7i$YCcA{-s2=v63g~3d|?Jj?o-H}XW357Y)VTUT8EG?96-dz&$K&oT~l3d0~N zo7#}sMUoCw&}^K3SiXP62HjtL!p+EL=wG3IMQK1*DsMi`TtjnfG&2260v0yz3O|Me~ZB9tJ?j(+%StujDwl#7|qllsBK&DvBb=;!a~h1a}W{-GS4 zRx%+|YzslRbFNy#j-8d~Syqud(A+>ts}#r~cPs1Oy-IPcY+`G)z3ERj3|R5Ob>>!X zao(H91uUfQZt>sV*}R?FT{@g+U~bUW7f^{``3R>FhOk%_T91tst;}jgP0g+Z*t0%H zYl)JJreiHY>@fQm*j_z`@O{PO)H)_Nm zqLN6Z%Le4#Ln(}g_Qa^&JJ^eWUH;X*3gKxhZkMequ@U!+3H6U z1x^HrZ3L@Ia8VV;zsCBo5M-l_<~g=Q75pTiOypmD$v09`;W6SpI@xeeSoPq1O0Mu& z>@9r4?ZqkwNnTb`<2zt$>RaPc*wp8KcFaO=)%69N^7r~)!r|WjSp>XBZW~& zo~Xd9UTdBz9Fy(TLd?(?>MwfMYrl5{xmP%Ro<>cTGIx=&3ayd)-qQ_V%i9hix}Uji zm$$6D;4DO~XTLgOpl>{B!j!XpLBA8@zHSu1=Mx%S^7OGyOy^n%JmF$}VB=AtRn$

`rtA=QK1U(^!ONQVKju!Rc$L0~RaZG2?6$rK%rFz! zn_S|U(-jlH3lCx){=VgJWgNLCL1f)+u9LOkmtoJebYohN!p||hvDfgg4l1@@@c?h$@yRtH~>%6yIUZKTcC;`X7Y zYor;9);R$~*B-xzjO*)7uLDkD{0YMik6%U!;BS&yvf1D1;x^8n$vc_%%zF{mr!&21 zpP`ekm$b715%^j?0oRt4E9FJ3l~cnJzWsKKZ#V{9#&pX1*+=4{wiN{Oo5c5SmDsaw!ml_58r6~#Pel=fBF+}1bwr&2bFAz=e6-^xhp$~7%zHHm3^attQsi+K%9Rr8eTpFnN#n`*|T z;%ROW<>tY12;muKp(Z-=p0lZaHpMg1z`?HMt`g&b#2VG1vE=iJ-a4j%_;zoGwhvFv zN1~CTicC2hI@3L;L9fN%?-*M+URd|mdR3FNP#;rAJxqti7yAZN*Z-6q$|nyBQ;CFC zf8sTZO!h*2-!k4dE@f5kJ<&9&Ehd|AGqxFY<5@6xdU1Ny$M*C+7hLWThge_t{Y!B~ zU_z`Im0N)!=A`3E!c(AOaz=QI%lloslfq z9t5;C*gKLeOcIRPHwND;p&lH9tUlOXpw+=Uon*oYxlQx#?HTT{(K?5lX#n$M2h)B+ zDoM`fpz(QL*GxN7m^_3S2q?qWneHvq6s*M>YJzsAnktgq+qg5Mj?S{~R?i7araM)q z9%7+zISX*;v17dkY>Dy?v`;Q(QP z^bhfGwYaZh&vrwAi~Nbm$%FfMuLIHPNxKVMpNRd!LZ^mbjrHYll(>?b09YsrSdGe=V>n6gXM}8ke%R=r?EmIIZ(7y%b~vj<*Ibe0`lwwIJEBE z*M6Gd0=w}-v|q!%#YLizkQD;eWj%gOOSJ!N`o`+1#h>Y!*VAF_p#d zK{)`)B=g)ise$8*^(0U6qQDTLW{lV<@iC$x@jT%m_e#H1c0zytvdY}Qzkx^x z+5vl3qQC+6U%L}o!@QtTK5+oH^0)eUQfwR zT<1KwG&cyTDZe=BDG$dJE}4zw5&AJ6U_ZVDG~iVI324bF6ixxM9IHUvJR}frZYAMp znE!0D|MTObmj?_NWi9HuuP7 z!s$|sBSSj_6}*dbhxq>XLs&Ie;~q)Hi~ls>G)fMm+87YUXF*XMDj8 zJP&)yl5Wl{35lj)<5EysSl=SHO3A1c1Bw|Xxz~PFae5J{9lxgb1w2LCnH7wh|+UH%EP<{#=`5<_rE@Unk-9dBO=yoD_b5bY0Z=scPv zt3vbOopYP38X{Jv;pA!5h%aAXVONf(tR+OM;$HMf{Mvc-9q$a^18nkehs;!RwwiD8 zNl|}|_Js&vwI7$w>*l!gf45^?sy!&eKuy`o^6g41eXI!@sRV51LJKySHqDy_R9OUY zh)$@x3`5D@Pfc4@x<{k9oKDshG*#2*#mC8Nc5yB47A1X$r0@IR&EG z5^2Ne;?_Fa%hGORyVW0Pd}?gdI8$3Cy`5~RAOYm*xG>sLeZ6RqPM&afv(t9c|JR*| z2QSk{?wAd)ZpNB#y=WO9qHr6r9jshP314tNtO|`$P@?#mEeFtT{jrBSqOglRydmg3Clx-Me4B|{h5IDMt>9yM_%{r!wE844IA;Y^Zb{S zDLV$+ZVXvOTZtpOlup-{?KH6bt=J(~)%_IG8ed_zv`}o2Ac;`Uy zSG5Q*ML7C+fKQhUzojB&Kb{3BI{(a~@8da3|JjkORj5MS-q=G10(#`4;jH(`a;&Tt zvopwq|C4m7pyFxVk)xVB!F3)#%~TE+CJ~hMV%G0{uh+SFXj1(VK0*$mQ&!l}BoA@Bxhu`{NT+!Gb&B4m=5;O}Iu2U1KarH+ ze>*AilOsXDD`W#a$j+BURqvnY{=qZhtl$Wde;h`TB+dAolS0{uYe!>#D`yMVR(6a% zd)IaPT{XTKrGMbsE6D_Zv8wQT5nT(Hbj3%A%aBQk*bE8#%CVsDcOTbm=?B4gaXcRT zsp0n_r>#XMJY&(R#00nSzbroPt{~t7JCozbO`}qezD#HI`oobYbwNg=8;?@|!~QpV z_IEa?LR0iP{r71uYj$Y5pyC(%>4K-Pi5!zoa#kI?yd_rTGA=&W^GAlMmg@Z?9|Yp5 zxC8vwp9`ss*{($htm8ZpP3+d{H_7q+IQ2X)=yFbBkC@k9K_PU0kUeo>l{B4nf6Mn& zdx-N<1P;^8nRtMWbtax>fInyD`OMGL_1jw6G!t4)`QG}O%y(ZxNrU#ei$9jXs+%4z zlb?k$S9vJ(nP<$ObaaZy#dIJ8TS$>MA>{SGE#ZwGIdH~@SVW*q9zk< zJs}jD{PK5Xwm~&L1z$PYjV#{1;FM}{jhg1dIt69bXtcmzf4Qs0SO>tdmg!^yiYSQN zn#+QE(>;5=_juEsas*TpfAncirSJx^+`61kbVe8Z9^VEYP4$z!gSW7T&9Z6^MBR(j zW9g-~+%tP5Ox1}{DgF-ZVGF_GHZ`B}^*@63^bIq0q0CCmPuW#JKI!rk=F6zWC01Tk*9=P&P!$yYlxQ=itT#})l`n1lGD0c0CL&S4 zTVP3umAzM}m7j|_V$^ygR!6`>W%kF$uqO$(D}(bZCpQxhr<5vzzDMkp%z{x)j2c?X z($Xxn5?Rs@nLQoxA@e}3zgVTY?2}|A2N-RU^H;$v_3=~O7XES&*|pPoA|KBld1JJQ z&3hyHxl4DgUPbPKZ0|9`Awr6O3I!83S%f@)Tbm7)-MDV5w4;D9ey5RHeNS@79#7=7 zQ1<5NM~`&wHiuCS8IT_s?9A;MQw3?u$r^=&%5(v4pI{dB zCY&1U)?#NHC z+5rR?vg5JNv~cXrD4IA^>vfiC;>#~8{yl;C8xsjJ^7aBS0$k444qFC;0uH&=vD4*N zJig|ijNnc8<*9TrH)D@1-}ioGBtVSC7f*YnU<;@38GfpvdbRXNMKR<3j>=m#ZRs`f zU{Wb0f#Z=D^HN{VTB-9QSs_OFmv5()gAEem_RRMUvwA(@q{NS1XX;$% z@0i7UNZ6*JOG8zh?~3n610J7KiH=dEN|4{$O=MHUAK3Z3vo!6;o?JOcXiSk+wm}K< zO5R6CKL?Abtj0M-;gTXKhR#A=EQgs*MuRVs>>iEBjGU=pF@jl#q%yi*oGlbdk2Wb} zwz$j}E@U)?G!hHReshLS%9(Dl-Qv4lUva}$55eI7h?hTR$XC;l2w|!@ky+F5Q>sv2 zs1Rj|5js-h`Y+f#XWfz9oD0i8%78a1pJKYs2LznVdMxcNPd83)L-uTvULEOA*l zUYr5lvw2G^wA!lw&iJNN=GDj03LZI0>qd>Sb1`6fPda|&t`=<};Ag(~(I4LV^HB*} zXB@@vnwq7Nvv8?C$q3`qUV9_EXZApD^38zduE?L*rWq1=DD;>|=Hu7F;acT>uD@qp zi&T|ySi;6$NqUqP9_v)Ks;w#FC!qU7zG(#k9faZA<1Y(AN`_>Uu!PumwchB;ZO)-f zdzF2aVI2=2<@d?v6eDz*y575ge>|PfW7xD|w@MHsb27Z%8GJeI5Ju)7ZuLH{${?df z7Y4}7bE{^ftv2geB3z8jHdK;ic0OKOwMnefoR0l*mgbmV>ZrRw_cB}7Ux4dPS5p3| zs9#s7Bl78{gd$D@Dzw1EXwUxc3m)3F7{{*csMIcIeKbCqv(GIIG;e8c4Cx0ouAi@a zsY=D@PK7Zu_Fn3i=u1LuJ&CNKJd+7%NWeZMR-%5fW z(~7T{iLLkoiO~Kan?3~WEIzc-Q7{L~1*#?3tg4LQkW0Axggb3Fs6VEzSIZ_ipf+z5 zBsvY)>*X^&0b9(px18QS)TYtK<9^9I8fLKOg98B}Cku6}SBKjfcDu+5{@L$>Wm;2Z z+fS+!S~nBM(tm72KcoFAx&Wt+8v*@?x$t^1h^tQBtmli`JFz(;7Ic|yp;aUXIhBaGAm+q;-*b@IEoQoG( z>Q@i8!R?)`x4TP!Whvr6W`I-P(v|gQ(J+U@@yE-ywpn`_+F5)D2v5aXURmy)I#%{YTIDk;sxLQn$Xx>oS!{fA zEu5V$Q`pVu^3#Gf*3c@3$=Ud!M-ig*E2kk@B2MkWV%@1HF+I*iFmL({&Ufn=Ci5z4 zK$|?`XiAAcM>Y)MbdO^cDEp&yF#jUJ4@_m~G_%cnuDrr}-qiHC)gV0TCMrKxKEW@t zRN^lRMlAT{3uFkV-mk_$4Vl~Rt#l{Yf=Ex7U$-)`tnw;85e%fCYTcnxd`CCVn0ZQk zvI0(p_Jt?6`3_clb4Ke+Ay*b|RR?S?y&?P$A#mcw9I2*+G&8@#elKTqW^bNP=$5@kj)slp3D9gYTW2bT+ZB3P%51;%5Y`u=F zEKm;hb6Yc=8I&9Qwr?|-}Gb)(JDvskZ^|0iN0sy5W8qyV z`0Ha|2qi%I^71$I!t>*ziPE&C+sLwCIe9`u)^o>yn89jBP`+z&G`ig*o$whXuXN4) z0Tst-gVjz3;AXc%L=`*b*M=>(*I6ry-DETT-SLn8IVWV0Y=B^gkaWk_%bQz6se%AgE?EDS23z_zcI(@`l_6aiQp+YhO@~SrfDRpe0{^@&Yp{%7 ze#JI)D|y`->-s2JcP!7fI0oQe;;|qmFXZ=CESpSRpZH`^+ zsF##I7SHD6p?c1V$e+X3k}@GHzXvUqb+h+aBKm(sLxFcQMAa(-9-9@_%M`gBw8JWL z%aA*@y^vfx$yF__jJ~oYt4o%*5Yi)9fx2$n=gd+Rs2MOD|MbHO?&W_fINHz9c6C^q zKpwiYcm)o7ssS?bvLkFf+biREj}ybfIeQFGQt^;>;m!tT~fImw_HCOw|2&m-@? zB~ubwK>Xg^(omT*xg4vNC`DbyUHmM}PrQk&V=*qt1w1$~sN7{DZ(KG;bxPHP6bryoI2(#AdT;FJa4vK;QmdE*?6w(H~bj9EWJ<~y-bQK zyMfr_bj1Ir3H`Ryuau1GnNi(&1Ph+?s+T6nd5*n#+7C~R3{tL2B2%zkfpdSgr)tC$ zy&Nf>9;U%@?~6`}B*6g}E1ktB%6ykw6vlA}lCZQys`ZhT08!Nx`>lLrAIfQGo_yFy zBKg&yOF-!wur#IJ_Y&Xe_2p~JNdk;BwAspM-Z5ESmg$%fR`S@+;?Gq&ns&+~(JTja z9V%qvdqOZe?w-K#(ry;*w(Gk9x?pyFE!ybkm^@LON`vn4#Dk06AalMd|BLx+bH4>* z+4Uz!a>^;|_&w-`zQ2zPsK{mB-w0w2;JznfBeS^+QG686Eas{(AU7GD`wR;YyP?Lg zZbc75B5^roSx*5_6R)?|0&7WCo$8*4nZM^D^-b0=o}&F-DBpV%IKmGvR6_WpIvr+J z49bg_AE8Xo#_z;H6A~=H8*`s@$gthjuZtx}X!V7GF0Q~1!Kb(Ws2rw{2y&TAOH$vi zgDAz!yX#qGiB*&xS0v&}|CuGWwc?l`3#+pBm;BE$ZNdy8$a-SD1p4CdftY7x(;lr% zml26eozWCj1?HXYON1Z#4RL-PKhdUt%zZVS36h1{({iC3Pa3hoD94V__1t0%oPL{d z%>(y-n&r|Y9%&0MgK~P4JObeJ+MsJ+8$Y)*lUw3)dSYt7R$E~|GBBz2OGuyL)E4hz z*Q<*=73smhD?o>A?EP`Xf?QPNWA||*GH3yc?*lDQzJAs6O100?!U--jn{c+fsS4fe znEmO(28K5xok-y{8{*y3$$N$s5-=Xz=2tpJ_ZKK0vZx?FHl$+y*qXS75f3Jh&?~X} z5oJy<;b3`oFPsu>PfKrcN?R-~4pPCaS{p(}-%p`ff*|L81nvt}fgZyQB4wME0Sz$pRfF`uo!Ze8+Gk`At`w&Ox zNS1b*a3-xAy$56aodmi~PS3}Yy0vlXd$BjBjo>J5#u-hyqkYoFwpeM7xIGifRr0C8}3s-Uo&jMh;l)Z&f!u~Bd@oy zHaUW8wIQC~^oN5t17)hKZukNBY_i}AKy*iS23BRZDVGJ3~097N}5=6yzA8o$HMsj{b$)+zLXCB#tgDTqrL3u|D6c75EVXu#y#vYlkz-~Z-Hw#X0|~T1A+Q{? zPk9mrpiYFpj@EPX;Ejsvl&E{*KY)S?(B~y{Gj?ndLaO9nfRSOAz$w&3DLiM)HpAP; z;&KftTO)TUN4j^>-c_r|a>bh{xpYswG>UTGM`bs2G{ON%qOL0gfQKm>|9uhiIHYUZ zpJy`oM03yDofqpLL~igfu*uXS<*P|ZMEXwBDT9$k(Xu=l9xO}YOHOqY+LvG6OcpLE4DO00-&={@@8Msnp+uEM6r zD-gUshgbU-JK(HI=qmaYakUQ!1^cW)4zSwaqC(a_N*Gskt^Q$eE}P%K;4cCnd^M|f z2zJkya|H9qvq*=N5`0~DnE*mG%uL0DGm!lj3JtLcb#1MQ!dyPXS-iKMP%r*(RJ`Dp zA;QoViEE|mvwe`EG2}OmkOHC>lWA3fnYrwEtoIP`}zvq z@>o5#S(vT1kEyce)F_mH%k->q|BKx@)Gr@`{4V|63bwjJJu z`(&*8rO(kJ&g?ITO{XVq^!ScpmGEp#(fiy#EL6X&9?)kyA?2(8l+Y6+*c&XmYwj{| zzjn{=ELJUlcrZUC)w&lnt^fEE^?4Nr`142iy#-|N9ah5KueCe``^$=-^jE!Vg>}n& zGDWE9p|(u6_T9=pvKB*>M8BhHtuu;Kg_RFBOYWX>PE_ ztR70doy{5E>S`+rDc$olbvB?N%)AV=Xvz-*1&gfszesoK1W4T9S5V^yz#s<*5nbZy zVdmhTZo8}3iH?yt7~gTTrjb8kViva`O~g|Jc8rScZFokASjF1tf<6wS&!nIrwkx$p zucA3`UM`~z6ui1>2WV2)oDiMc)mbPR!$hpq7`xMd=o>;9|hu1+!5i6oz zdzKcn7T~}%qCguwjz6+>ue-PATIw7h{{l^-XT1nc*7;z)+TFjp#)w#!$12ChQaeC3 z6RpDX@;UExBfsV^KjQ^o?RWkn)3$Ig+eyM~Uij3uq%OzaYrm6eu=ov>XLRrKS4hMS zKo6+}(_u1zo#l%*%C0%Mb6g=SQT!U-yvmo%_PfSoD?zi~C$K8^I~oVVeLYsX*R>tH{@h>X5eD{W zp5$ihou~u}Swg%8UhFW^JH%-|>5mSlX+W9drzc1&kyh%f#h+ie_1jQAdKW79_s(8` z5m)p%x=OC6#5I_)O-^8(4wq{qHU;M%@{VxHL5n?egVzcrh>)>^q(2My&0faAy?a=% zkMQRR^}xQRUya-RjcEx%nab7wa*GsLz*Ny!|QD00xB-xTQs_FP zt>Z!xt=CEsr!^1ZPgI|QL8*??Wj-Ito*ar3O39&)D0Ez!2T1RF56eIwg7fF17xVYL z^$rz-xo|KHYSlrALN%6nDjwz&N1k$>^MRjhwiWs{u})oStqG16{3g4Ok2*4Q54zZz zhhiQqN5AHo15i;3^_9~Fo?4F#JJgxrQfv=NpE{e{vov)YuMWYtB@YLVY@83Wt?o72 zgJZIHqX*0tFB(;F^o|V@y2_VgMngFES{)zcM(5sY(XyqmrV^WbzRSIO&?0_v|Cf~O z%3kgDyUnil?qBA!Tw~gO7k=&i8euXe053!4Ttuut}XS zX<=!$Cqzs*V1zbnMSWFwx9>bivgJ9 z;BgboSJ*0zr@L&1_+lS!=VQFlWzdwP`MQ(i8(?T-;K}LH=yzP28HMJU5jo*LKgklW zsa0n$8NQ~ef(N)KUA=C^4Z)XX-|M8i1a;2b192xK@i+X&ArCNiCVfRV^MJ#yuAyny29-+_j183YwDv z!&GxT$!HHQ+%!q-R|b4}&nl3BEkig8luq5*<+%8~;;h<+lLu{lrco7-d}5?vb%bg% zLC{OtpsDL+60L~eGWD;=-*L z)3MkzC<=3C;4LM>U(Z+bAoH!o)F5?dx+B7=XrCpzeU0G+ULjk$?^~q!u`|k%^OyIZ zwUdT)*>`)4mqI1ouj>)-$&mE#*+U;;nsCXxw-#%YFYs~oT4B55WT3yp-!}g8m14Mq zBxlCyWZM{huUg!LaEj!%=z|2_P_J{EX!zQOIgP0R^768<^%?@N_GVHJO$_H0<; zUq;KbDq>{C{;{-9Gi=mD<)0VQCsuNicPx7^<$L zO93zbTs(A{`$pXCu|?v4JJO|QoM}|>#J^H^5U5|QFI!>K9V#MOn=j(5%UKWp9m)2; zruv^pvZG#pDK?EE1}N$oU7}%P*C}V>xOQc36<1QT_Itiky;#2dmw(mkj;Hh0HXaqm z3E5I<&5T6i;vYTy zuQ>4cpKbc+v2iA-9h&i@IXvpcu|bCVg|I;>rF{2MX`c<=;-iRb^fB|}7n%PZMOZ`S ztI5%U;By5Fz#XIMaZ2ma+c;fh?f8VPPqEIX>3_!+`A1*W2*?;;tV}Lf=h?A$ng6Iw zT{P3mAN7c=LfY*34C-uU|I3(Fb5@Wm0Ouv}kzzlk^+~AG%k?>#!he|wevx_Tx_e4_ zCWp?fz?EC=aj^0#S`7SeVMyK)2cIOT6j#w|d;3pUozM2P7tD+FJv!&4Dr}Zp2J-2e=Y?YuJI*Tg!e=8cqiIeRTqk} zTT46s9It=;$G=*NQIE{^ZuoW@93`9hUjBG}yr{u#9NBpuSyL)|8*o|Ue9X_s%omq4 zV!=7l-fcALDDskw@5Il@qnpRHNBPAUCT|U)P648ST=>zK7=rDk^;5W3h4b{~)$*0k zuE&5N{LD1n`KSB7^!!oLQy-O#Zx2Rnw$3lLMU$FtEVRZ=dOH26cWy4?799*SMy!u_ zX`o)HiEhPnUM~%Aw|~7;5)>m%g13)c+qw+`^e7haY=ulewOjyJFd{3P(< zI@0GhxEQSwf48tF2VTfA+g+?HrM&8(1ayz-m#T*#ZwiaAb-_3xi~eF6^{)cA>o{E1@=$kR zNB#N;BHF|EqX%9MX1TNX4Kg>M^mF}#-$=p2zGW@O&i&g~TO|0{m^0y8fnS{^;4|UV z`76YNujN~HiN#9;B!Aue%tldHtqRfC_xa4>UFPIbQGolVpYv3j7a?#Tv7hU@{{#wOT&PEydN<-nmwhi+ zyF(0JUi0x;;W3$t&rY;GT(QS3PvxJek3ehEP8ueeCmnO|03j>QlP!Cw*mImji*^-jz%E05a zwWXYlkV#1uI)e+rX5F&y;OMWtuZ{t~oT_Dlmn3!a%Q2*#TmHuj;6CuOnz0h%3X(d9 zxVFTt_xy7o_NY_4dryA0GirLtxJ+&U@PS^UZNVH}b4|jFq~Q+FD~!0iNtZ*8J7(xv zbQTEPwbJ#@p&A!oHm#Uu@YIaN0i8AFhT~U8Wv~3LRXn#k%vbxb*J_e2ssW#V8`>G< z2uo#y2gKa?8WdP~TQC6nEr16jr<~>A@g}GdETT#{_d6FvN08>4o0t$?WbaFVg+<&A1SWBbxW|xR0Z| zBinejeZL^rk<7I^J5cAPQ>`0D_u^6$!~KN~D7^)EmWDhk;26909yf5V-MY^C6h~bQ ze!^q+!lwUo;C924EEQe8brnrM-H#VFd43_mfxH)S544SAqV2Qgo0-~fA8r>!Qb@+i z{;GI?FTw1tS#V6xh+8u(^e-T|rTtc`Ax zeGwe7U1{!nL&_cOZrH8R>z;yy-GT@^$kX(y544x&JJS0>a-CBkwC`02F~v~Zrzxi2 znT*CG0o|F*^Jde6g+3d;0N8%ZS|;F+m;c21<*7CjT{4Od8+z{f%EuU3o^JQ$LlZ~K zuAxao)d?@oKi5s3)ef^wRGeoAv9+w#y8%y`QR-;qEjulJ0Td(2bqWrtp@u9zxRrtt zvte=k{5yE6x0^fhnL)-7)cuanYJ%<22=!}c5EkJ=>e?%W&L4zs#`EeZn|*D6=Op7> z=Pm|&m5C(13Xh$!_bw)3)!_t=}a^5b@%5W{@V~iiC&c1!N3N(Ss z9JPHWsxlA9)E~a;ZyoELPs%F|e74GlL-c=n5fQvwMc_COfcHfUS89ZthEr(r|B&S0QQrV8FpyttT_IIP`FVXYL(nNY3ahZvoRA3# z)}lUlAirH|3K*IeIWsR;Xkla>s2G`xdn2EFJN~NtVzT?*i;4e})*r8cPKG^O#2%AQ z|HX#+4zE3R&fv>B+JD)-mO_|xdW)hBR)6%l4Y~A0YIrld?iHJA0{L60vT$kqdx+h& zt5jr>);fSlp`pqqyIj*fr4|Ak@KR0imXlo9&C$~}i}ltueG?CGhNX|~WaICG)mW71 z0u0N*w#DS_d^wZem@^_xD6dd0qJvm4U?0AjZ#q4(Yb z1PHw)0VE`Wge0>)_i*kxbMMTr`7txk{Ts;6-uqkMx5~TL`*O*vFq+QE{MR6S>-1TA zVI8-+x-h{f#SUBVo{Z989F`bs{O`g2uRNu*0!S3jGMI?FVP6IX*PH*>6JneeNHXH? ze>DEFdiuM8gC_ptumelOqohm!@!p=x-LUQGrDNlC7UXL9!UxqhE0HYi*5sZ4(wga% zn?z%=|Bmj%v((@x(eM2JGjhM-4OCHVe3*{_N%n^FDMC}#gTx~3s?5{seWhcvx35Zg z%KKwP|HHw4DFAYhngyIvYW1L(U-ftsvayv}5d@|3E`6DQ`jy6jHO~xFnHzhk-h|bm zs;hQH;D6bSrB{L99{BBcG1+(gKAWIZmEsjhr)<*(Np!O6CFKtFWK%1=APVbIpvA!h{WzZ+; zW@9ZA?322Bvx$&j6U}9C%cnn8im%VMrt34@uCeFy<-uhRrry{}K;O|OGIlZU*q)p1 zI7^+IoaSvkL{$E|sdbwEc(E>EG83s}ycl51P(BYv(y@`&ImjYjDN=q-66AximwW(a zqN$MM(g%m+fHpuFHyEQQHj*>MQth&-iiF{qFp4JReXq=MdVXg$;W)Jyk zuFSMg6F1sd(#5+sX3`Z_4Tv?}(l5sNE(V#SC-opfeQ|$59!`nB3DP^m+1I`P%N6VEbs^; zEQ%r|TZ8G@jn~~p#ftq}jPxWBUe;RH2(pM8W-dYSKJiYM)wa0Dt~(Ckd7n-Yv^Exw zOibZl1+j^g`qiRF`-*>w?0&$Ad1Y8l3^EqJ@W}SL_Gg~1KCsq&FQ>uXlcqon$8cZrLwypor|BeY=P`FG1mA%eeo4f1JAVc(3+7C zB$^PiR8d*4qUkleBv`mL`>`+&i(1rPUr9N@o^d-VGFb%KKi0Z7KasByX<*tH!&8AQ zZoIEQnU8%mnWtt>`)t;*alYCOEv6kpe>VvJ(Em7wT44qvlv$^ox;KB?;?ydYW|1L4 zj34nyY$)q>%m)mxM~5mU_2XmAymuNSv+CB<&a>e%=RB5OTME%tS_a<4Wc&j_cp5xS zCp`Nu)#u0`P*k2Aa)<8l0s?-^ZRmAj-2ZKqm&d`M!Dxi#YASOCxnI5UTLVSp6@Txmg$#30Nk#(bu@Il>M# zT{Q1c9IJQ~Hc-1p-H*h-eyQ+@Vy5Mg?c^EIZkU1^D1$PXZfH42MR)xeer4F)%z zS`>*Ku8b8rEVkm(;cpZ_2bLRmI29NcJ$snA4`|vZ!q-7Q>@~a=cI*&`J*lfL*2#^y zy#n$pcfwCfK3rw=DqWh!%|fHtIcq1jJ#k)6RD|cZ6NZz(DGk3 zw~vK@t-Z77VF~_ot3eXq6Gqr1eZCjzr9OJmo0sEq-uj-9)%aA3-_6XS=1+`wAi$$@-nM`ZAa5tt1{$dH|RB`Bz_mmkoSLxyE+w zz6b#U5*Xo2Ux1RxH zac}II3zNssALMiMg~O#F@l!2o0y7x6J^rll&iLj@rsr-| zO)8kxN=b^;pi$CfK5DD;@pv(wzXZwwuiT@o|4dT>Xwl;RMY`UL-}5yJfJg8&Q2_bt zez|h3)3$->vFluS&sNQDfn=jFL<D&UtNQ3@}{QA56_X;f4INAVFqDf5Q%8&aN0?j{XmDT1`KvbEnuV2#_DutkCnb#A9oVq&61PR!*csp> zI|KWM!Bi+A#3yH+xCjQA*w0ZX)Mdiw;^n^33%~pK!5jyi2kmzEVvoxgBNyWYt<7dT zP<7_vwTDwr)hmCuPeBE?VTPc?0$vU_TpENqg_gw1Ok_s_j2311ZsM?XG=E8 zB$%jg;Ck+IBs}YllssxS4unj>NPB%lZC@NyeYc3L;GTdsMkUdX;<_~&XhFg`hAYEE zx>Z(;-uI-~fxXy(0wWCKt5p!;UP-Q#PbP5(lei(ND3XvoWI3UT zfTl<5@WZhaI>(XE+2{I3nAG1m>6=&og76{v%}P@A;bMZ=5f;5m)P!zGyhF*vh`e@7 zz|Tz9yR^7FN-6L7#3>2~s={uKx5sjO7kfW^0(zM-H)ma8a~pweA11(VO>9|*_12FQ z1X5vKCa2fi(fFy%q$+zqW05O;RLMhC2)Xbwe$(DgTLPdV)jnV@3@Ds4NvXP&DyB+; zrhazS(3<(aKY-hFq5GAYWDgnscS8Tuy=>{6~CN811CQzlQII)jT8qgb6g<&>+b zZ*wl;d>4QON3gq!xE)R^l<3!u0um^~#Kn*nA&*_WTPZCJ;4dlOy^!yiEAK|)G4$-n z0y}&&%2892kcd+t%&rpH+7=w<8|t!2-i^e!v(+KjoN}XI<|jJ6OL%ef_fV(D^?d-;E2V!1;6!VNgtW zd_K{hm}o{WpZWdjFf_GsWSJfDT+{crPWm_HJ7A)DJ~mc++2M5pqdjdmx&~qM*nF_m z(WL9cfOp{XkvK{o(u2Kv<-Cn*sZooO#^`ztV*6t7`qIefa z+({WG?BgqXqHf|`Zj(yX@H5;2yS#BqK4jZ*?L>Z8x7p>q9R4%+Ud~F!!ESKTJPMRN z5xDB534M>CXT1Qq*Jp|L>*=^U@Qtb6z~`r_UGq04ayt36$?H=6A>mnvD9g%~v<;D- zL3tz#kU>gPju;_?tv>WZAe)YHbG~haWPMdi>mHzatzEGu{{ge5q++VvUsWYiZd#l2 z@8CrEtn%_h;aSt-i^ofn-weoMqDr8hGjSt+Ge2JXg{UbgtgqN5Lg!GMd z8m%;DGoA%45&6ROHqAQ2*_N$%I$?3dvE;EyW-5<-s@k0;QF(He7=T- zhFWxl)b{BD^+({+m?=73F^kma{Z%b}+aW2uOYNr!SPi14sd^!VqiJ341$jU~)yX*R z3R`LZlk!*7<=%jHExX-Q=>tG}V;YcfVY{xUn8C!l_2k|_WcOL-KvVI*k{T7}8zNzQ zbE`9dI&`BPnqLl@-rh?&`v!MGlq#LLpm55=w++~hpSrd2t;KTTtRa||c0h(1aPs=O z%8Rv!?SO)kh@j&L^FXS*p*fyFJvDE)zubtQ@4By^TPsWKuoGB{U3BDpfNyJ>z~hu@LpW*x)$i5_@x9!K0_tiD*2Q(L zXU=b!7hJNEyd$=s9wLYh%(h5=HvCHxecEiAckDg4lK9t0GZ^BIpRw%c%VmJ?HJwqa zzMJZkfJI^bdGx9U^g)#tWe)Xiv`XhS%bjQQJ7GeV&T~clp4)4F!20eNVkh`Za-M&Te!gI&7*;qfF+cxk?(2 z0{kkie8bpwZ#6O-xp_d(N;N;m(N>)zjKHA9AC4Zs>AMW0%E&rhWB=|ta@ITKQluZ0*BW=8;eLLl5MhA#v; zp89q;XA$7pzQzmt@T~7;)F?4Wou_Lc{4{&bm3 zCEZLJu2!5i;)Hh&+<}HBlwR)-JGm$lr@vcH?Nz)d44Las@NXrw8@wY)nK?bQG zIehn@mWueiv0_?P@;!F zaCYO%WsY|#t)V6LDqmhtO_gP$BjEO*^kPMvmtNkz%iHT@NV?=Ex>Klt4BRaP!oQ`- zM$BMhw!CImI-7{878i{KP*h>ns~5-wwM{u&Pd*UjV8Ve#e~F z`PPgv<}X}FOkMgdER?U&Yyw~Sc5VmK$J>Z{g&fVY_Mm6A)KQFq62V`1^h=u~#|vlt zxAc?+>EJa|BQ(qpuGn!X+@f!|^gzq`E!_itAb2_Yoeto&KF1DJfgZ~_Z^I~Jz)S_Q zgm8=fsxBuzY_6a<*%em@(I0?dL|vE3JOWkkL7{LSBK~}`P2<0i`|`- zI~9-W#?XP{{&IxBo#2yp=w>_dhbj9fKLAWS%KkUL$m7i9*9&|}wXyt=i! zJCDEj>)yYp1IgI`9`}#B+y9fRE2gmk`wE?^Ez4XUTpYMwGWG3t=yHs0Pd`wBLuE1` zs#g1A&Fc<>nB>j zYohIU!Z>1UMaGVLqq%KHe|&Q6Gw+YH6&_+ZhyW@rbjTU_twyw;Dye4<#SGI7UrNlcc2c6Q{K_7)C4b$P6= zuG`GdK3O!L!@~@HA0;u|ap47|Y+n(Et?g__T*(htAVqTn!E|g^Qe=^L6~s-63bTPN z8;(yqdhF^%tWQoz5s<-Elw7m{7$XyEVIz*P5>V{^*9qUV5go^ERnPTqfGYVS3y#AC z0}W~a_AU;nIgsR+`fly8rL5UQ!z)c9t<7)0`hGsv^4W>y*L)IeHK-Y6y>2*bmCr}) zT4KpeVf?5rZsyN%WEwKxsjuq)$4Y!D1XpF!^gD3K2fe%suPjQglpXE z#Z0Cz_qdI#6rp+C?Ow+`_}P^#XdV13g#;5_pC5=O4??>?GjU%Zv^EC3wd4V0YYVv? zb2stoa%;EyJh<(R)bcg4{oyv!9fY6ou-RZzij6bWW)ooNJTPIH`6_=MJ)}m9P4GLC z?8U)Lz9Qrnm-98ad3bmRf@_BheM6#1dpl5$cL1|?qJb}#PG}}5cp@LYiLx`(s}}df)bH*D)vm(yy)!2Yb>JZ} z2(Z`<{T%CosJkW6TzZpyiBk5(meV<@;45~{?HA?aPs#>`^}L$%B5GsS85m{2^i1Z^L%D?W5>ViUzwZ@&h@|(=*DA$j6poe7zNBbOpKd8%Gk=yJw zZ?aAMwlDHRkb{X8bd6PSQ6xGy^|J?kd~K)?Zg+Llp9LKab${y!4VYMTUhbrx9x00W ziK&_MYGk+;7){VQ*v#SJr`po>Gt3D^0fIZ{g;tNNkWTTy@yH8V)aU)z@d)|c3|j6y zNc-_7xn9V%7fP-avdQldG9d`)v8x5XYEiJQnF}|tuW8_Y*Bi^1I_JK_4)oAog(8%s z$VN0@E(=LuS2%gY#7C_r^U<*ItE7-d;yfIzOUVt6vAO~#x!OZ*f(p`eS+#%~taR~7l5dR1OoQmsR6;QbRbUwL$cQ>QiO1)c^(d=<@PKQp^lu~W(|$FjBiG9QQJvrXE6 z6e|ws1~3|9~Jr~FjA=|PM98iIn19vro4O?QQ-Y2Sc z*AE(~+32kM1emsX--&~{(PZI)0KMGiuGt>(l~QXSv?VJh7$zQs1yj*A=SoUo{`4!!#kF_yCxp?N5~{JsKYy ztn?KQift)hI9v=FwGB8dOySllUXsC$y%FP{{ozL`v2}bFH&utZt=UFZ0YoEh6dNv) zhJ$b124|DN5;KSCJU#=vLC3hBfA_B_Zac>5>A&0*>iS8LA1O;vqM=SPZZ}pjelOBm zw5}7Vn3hW&0_{5k_caSqR_rmQ4ZOD++NoH9f<t|ugVw)?JX+UAuEEeQg#PwZWWc2XpwZ3B6{=(g1mGF|^WglC0SRQgfDOMT< znl-vr$kU0o7|gsktVYis0uOP|w-hc_lJYC`#H<}yvy0sDH59}Ku!3i#(_wn{joWzS z@XznNSf&q}MXCdau8VJDk@i2Uert=iGo}PnPlXH5ua#_u3mf7DuTb0x)5Uq)R-k2- zs)<*%rOL7`1+A(Q++iAHb0rgEG}nlp_BEuR_#%wtB! z+?D3k?1G5bT-w@(I1BEKX-fd5b+v3@fa%`#=_Lz}kD7U^6Vyh4-tEJ~{M=86y-eIT zQtnOlUDVfxZ;AY1;?-|gD!7yNxwY|V+jqQ}^il|wlTMb%uvUM(;(v-ju-%W`{xoPl z(i*hLFNoCK$itj_l{cE@E1h=;oJ$7_Dls&_2ky;7><@d!hD3z5-vLrg_%0e2n}lJ~ zf$70Rv8C?JSz==vl+PXET97AaHQ(@)15Sw-?@{m5yqVw2898e%Yfv;;P+UI`3f%^k zEp1k51H~}Tf|?%dvDb%29)n3bWZ&Uj6%#EJtgBkHHo)s(#Hg10=D2h`%X5Kp?v}fr z!*M`X8h9EEc*%)b0!+77I`^kP^`VvqU0tQPWw!M5mgIyRHSQ;rY*Y}Q;rb5+{N}|$ zYOe<(5Z*iN-0HQZ43Gd@+RG0%tP}FC+Q_hc27qq(ygR&u8I4&8RGP|F;q<-oqgk1R z(a2TiI-g`*gBaZe#E#%!S`dc=Yywk5zvdOkuhm;k>|c1Z7<01F$S^&FAfp~Gg~i~j zY3aJ^X5s*De18U6jVSw1-VgNZ$EhP|R>p0#2OKrVp+*sR`jC5UD~xq{sR>I6#JQC)|C4B zw5wjFDkUlQgqK17YbQ0CCi>N)-cf~Ez0ohwWQU3$$iuY6vkwmtev$ZeJDc!^8*#L_ zk&0pn2vK6pa=5vFXLd6bkP_{_j9EOqC=b!Hdlk=sIPmJY80#Ao>fSf zJnoGCKZo67`qEG1tfM z`J~`Sht39mAvp)Ej$Vz0ub#NuNL=?>xUR+M5RNDWK@Un5y0l8OZm5_RvkgB_*R_d% zN8_`V1Xmi6oHk5r2GO!;;$RaYiZ zzpw`;VTV_{x=$H5)>FOtwcmMf>x|ublVn8AF*;685t&z4(~vEpOlW~Vz^NERS#lVr z9-G-M0ld-PgX|wZE?AjH7|A9J&!Z zCyLoOPp(fH@nNo;J*7krNfK-Aw~7ElscFgG6_uL(NQFK3^jcye3+O zXgF431mZe(iZ>mG_ti5y^C^-Ot*jK7h~8R2WFszry^SGAE#Eaq*E|1H&H_N-KgD_F z{O=y&{-?$gGY3-ytsw?QR&G>P5HrY(Ax%?4UCZ5h&m8$tn9T$s&cPdNo=c{pF#8n;8#L&;L3p4C3I(wR*@%XP6PvOHdpzLwK&c zvRu!?XVoH_=zJgllwq)5YtulNB)GWGd4{r;uK5akD99C6{J7g_nGT#ViXH7=R8M7F z7+hera#k$hD*D{Yg~jG3I6fVCIdNY-d=w}ej8Eu4K-G*%vF@$~v81@+z4I7CH8Pc3 z$_Tt+Jnn9gCrP%;R{I@iOq;UP_1MJrN)v<%f|s6Bb8ICd^Oc@QS5}HQL0?EMlSh1q z?;%2-6cqLb)BSv!7BE#90Gq;mmPkq1D(FnN3QFnXmif<2NWXBfzn4EdK(jN&Y={*?*d1;IIlW%a{OEb(fX z6MIt58U6G>OB>SLK!)Alh)>~`|0CB0N^H5=NzbCg{2%p8Qc#>y_nu$S+-ScH_Ss*A zXhO+g$=(jRHGk2MDjR;|}bl3QAS>_B+1kDAkYb2^?o_BXr?9s4Rl|2z>bvnXTknrXS zR)OVrj)gY4aJeGsgLXzG?9G&U#)PI4A}1IMw9N3+iD!)DxYFIH**1QSdNGJ)Lm{;+ z1>Ol-`t8is_*VRER%l5k1gop0Cb9}VuR0ec_*zr3+vxpuXj`@)nd}|lx&t;xWjGL^ zhnPaSfrIQW*7Nc^L#oBhML_1@O|*tJ(g%oPLu&`IsaGyznet{b?p7i<61@wmSAnYX z22o2rtr$6yJ6$hF>c7psg;#z1)5rbIeJ%az4&g%p$xeHDScAX`>vRM+_e6B?|5+sY zm<`1J0z2GV{{riqdAZYmwd~U z_W`J>->W|1lczrV;e@I>hpR2zt#k*<29#$h&(R!uy|E=0Eo7R-`C0W$Zns3V1PPr+ zcI3M#P!q0KYbkdsP2*FO?O2+oH&0gg9m=dW^4wC-oF1uf=XWrY+Hp^0>(0`t zV3rh7H>#>Ko!QT;_89pck!o1(tKqCP&?e|rX4NI=eq!gxOUAYn+JN@$n$^eZvUI8Q zIRCf+{yBH5uK)-&EgEd(`>ugeQHU!a^9L%2duaPUioAmRAJSU zzu6cd9*(-E$85*?K^89$!+l43tHSfDH)26xv%7W8f>1U=y{Z#|dnEiRp47)P-aGpV z-*gcZbS{jihgi=M+Zeob;Eba0t`fK|OmJ57$s`9RpskzZQuW6iqB#wuKDJ0dZb4$g z{;YD<$pFP7_gVAeOaCfM8K@e?)BffSC4RE}@?F@wfi-0rS-kc=fF{TF(8x2nfU|FTmR)~mKU81M5zC%O7L+<% zIYp_rnlETN#Ta_#-iS;|MIzQnl5WmDY2!S6DCld__|4pAmjm zUAz;B_pbk~F1|!IPgw<)Zrk^r7kP$3^wGBRQ;S?;iAuS~Wme?10JE(YdKypcyt?9i zJ1>jiaQHK|3H^qRr9RTxp%(BlVkW(P>BUZwsJp>qtYzvNC{aO*4e0H=_Vj-I~1Oo#%A3U#BY7F;krvmUq3Z(jA(t`C1Z3BI zf?I$W(!h(3f!GT3q4Hm(LHH9_9`tSN_`bOBVeK@06RiB@X&`}k;Z6FiJA!^cu=rY> z>|-r`d;Uw^Vtr4F(-!7o?zx<%vrJ)@jervzAwgCrArIWr;u^QM(Xp0w zR_fQc2=?Px`>E1-27%1%Zkop$Y^t1e+^5xRvmBHJDVIgT2XFcOd`OwF@%Mc9S=+Pv zTw{rAOJqYE0e9b1s8~?&;Y_52H|}gA(u(N2C0sg~%=c5VPb0J>p}-%%D}yI29L6q6 z>HNy^V8ymk(N~N86epI%7T%wV0*qDQRporaeNl$S$O#+W0n}=ywHEZV7axA>Fvi}i z5Fc8sbE0037GV#S15>8_F84|jUbw5-PKM5P_tUJWy7+S3cM0Rkl&5SX5>54^*5s%K zA`Dh#+_6$zNZUMLR`-X;;D=ES^GAx8YfaPTH`o8HAiD{l2)f;M9X1)nc<2-LD%p=h z2EkQ$Z_qreh!Pfr6scsId3drOx&bK`03%9rd~2?{9$Y2>X^zQj$ThY2NhNZ;dIj4JHY;G4N-C zY4oaIF>JJ+diUs2x8RsWd-(kZyE$E{Qn>-$D&Mm? zR&)LqcQg0_FjNg=wU zjI|mN_OLY>)}%Qx0NvYjc02ls_VtSZn6XP6caPM_uIeT#z9;iP)g-V*vWaz$pVu=D zD#*)kRuzO-tP1Y zg|uTcW-aXW)@0WZ<~Za|lFskhLqy%F%_brU__~0Bcd}LL0EPypNGgZFNJaontX7fY zKB;U9Yv&+qGi*mg2{_xWGpHx{23k@bcj06m!_~ba`#Z(XTQgXCmS^t5)B2thSRGtm zJW5>qxZ7_8m>LQjY+QNuFT?=gkh#oiXE}1?5&$=cFvzKMTA{N~2wuW#A)HvV@CVgs zbZ+KS;_Us0st;MtE32d#od6XmcWGW>fgh<0-q2v<*TKNrVG2pGh09Mp7{?zJG*~t> z5BTjqRpNLR!;?5K?mjclg==SIID|n-5#GxQ==?jWRtoudbd&`wUfKA4jW!q<2%P)a zPLo~z;~vmXW}BakCNQXnKgDu+Y-VX48VHs<`nn(O=%;?&5>~B(1vqQv=K3pNCUXso z_tF_>zE?Q`lF+hNUR>N5@jF<)w-P0@IhwWnW^*|bf#E!hthEX*<{zUfva&18AQ^|Y z>d_sQHB)su9e6K!aJ(?wc~+Jd*#gvk4p`l2_$PL@|d2Ypfct&+ng=stuE?yuL)BAYnV)i06i9YUaR^ zMUMVUQA(01(3{MwH5McHeKs?Ahcz#dd%1@~`qT?l2YT+lxv~>0D7kyXkD--LNb&S` zmH{HL5qdDrr*R{$uwf@bklUvTSD)CrtFrSISwD+<)=c!}QEg1sxcq7>ogYyDq=?&Z z>r37HD#y>KlJS8p{gB>#WG=j4yj!GeaK^|j=}4yJ?Hs+L;$Zk~7Zon~K27|cIs1CS2B6#Q|y-NrHv7$$yffKds0OoL{I zusUI9yQ_f0+dki;mY^`!GEGGrleSu^*)$&36WS+U^?b|vmORMJqefTp$b`IKMXU_8 zDgqj>X9PyePAX7|)ZYFc#h)DE2==iLsW&Sr3(Vi} zprN=hjOQ4u!0?8~7`w*#^ICn6BE17A-CFb)vdEOr%sghUTjK*R43&{qy7?PeV~Hw- z#U!hJ%jDTW*7PlT<*I%4;JUA2c=ycv)T9$y!~ zGx?5T#rCW}s$3l~AFtIF!~?I?qRk zo8v2-U}>=1Ekp-O(lF(oX-R%&0tCx-8OjdP1w3pYvH-FxEx~6@ILqN1RPp&=xYbxdOc7CaMG5^uEd>-prCq4@kB*?nTmWb3 zGrjHW{s^J{=bqoFITHeDL#tfKBr{deM!JO{7q0yczMT5iH*c$A!tt+&TlF4*B|cN2 z|EoFnS8m0{Xr#XSUB&4y-1;9-Vg@HL{g>?B6a8lz_7BkW#ut9z`u{)s?ic&z&JF$Z zT7OJafAKy&I_fl3<&^bz>;DzoQJNBN%!T0Nef4py}8w+G)FX;WsL+Sw3KZ#{* zG;e^&M@NfQI=6-X02=`cn91VB*Chs9!4CX?zwU*XCkP^8E4Hf{|GY^~`TG_B!kJeTfcV`UjQTGF&wz>D0)|^TJ@xAD{df5K4q!Hq zrEq_+mi;xRa11hk190)L4MqBw|0}lrgb~Qv<2k(lD*_EXZ$R6Nc}-rL<464a)%}wI zIAE|brAfB%UukAWvjIxIUL3;O4-`Fa^}xBpw-f4%ho zE$`oq?|-}Z?>puHt-H6|8;*9LXX@dGpJS;4XHE+*4{V{{#;h6o;Z!T?xcixlUy<*r z3_pkzQn(kz5+@-t%;QpKhvq-WEj++$U<*LC(*e;}BcqZ1F8xJAzc+xWWOxVtne%ehmT3rrqRocvKB^VN?UPzOsn|EuHB58jsOX zFv*Ke{6GxuJj5Ck6SE>`HpA)L(%^{!#ILCUhNf?|^4IZZ_q2!SniWm^Jy0>j-*!Wkx3k-dgc77uKFt&@WRKSepw;z!}x2kT6csvvmC+V9YvDY1_ zS?h4IK(k1<`RnFYipMv0GzVqN4EUg&#n59C7{DamjCU?um2%g)eovO$X$v|+qNH<#@c2@Ym3}1G57^ByIIFPT zB|_|djb7YT8|iLkoq^S~^8iE6toy$jfk&8L zD80z=_7&Uj^@ANUZepw7JQs&9DeQ)=YtGi$8~E+aqedc43qxL8^v8GR@!FIINeM zt%kR(+0NXaATNu&cp3w_xXMoewtGnW`kk=Qu%-crel<5 zGO9ueh47=4y-60e*R_^xXM32I%J7U{@?hA(^Wh_l;ol&bL3oibF~V&*To}I85sFT7 z9y|+7QUY^$WL)#9$OBVgY@_VwGUc;gNj!j?b^=qYrkrwN4nsV)qd#6|FJQjC=B+t8 z#F)>3we%iZ3`-*?(UZf=^WHm7Zc^r+2^@Y$K|n#LhYI31Q>atXik>XeGe1R5?|`?- z<^eT50Ok;SqYDlkw8joz-^@f=P{Oo?IL;$HOjw9O`b1iKWFiM|W00}~dU8I_J%C%1 z!oZjTEw{CJb3H^8s17gf=e?N)n;o*4w!yFN24PJU)-Af|KFQ13$a?rO>W*&Mok~1(V zi30k8&c`OZPFjL>OD2F3#mOHpqa5b8`A70Ktlg7o(ZGm88DK?lL72Whx8{Qy$K_6f zY0aEl&IABvHze(?&asU}1D(1isbGR*t$5rpc&>hqxu39JZL4%PQ08da!bYA*T~a)Z z4`wSa1Db@oQoOdFKo1hEi%WqiF+_^Tk`4zwHGIo+d$#VS3>moo;eh1Pt}h;=2BRo6 zkqO*xz{D%$u+f;I?Dd?3x#vM+5vksqBcyJ{mhrpMiKiXGg3JQdQ?C`qzYBkQbf?oo!T1tm<|ABy z1di4E^3zXr9RXC&m6e~1K7jO1;>FGD+WJus9+ZmV=aUhKJ~8oi&g&`8P;+Gu5!1-U z$@IWLU5yAy!WFo2C=V%8?kE@4h?gYup$guVMM@@Y$-T>pNKz}Uo;EAa)MN@uEJ_b! z_b#-pTm6kA3B1>HSMoxV7~a~?8W>s&NafFC0j5Kpj0xBwuv9m*ymWD6fLCEd{d;9m-9;j2a}#Xe^9e9!Y9!d>P!0e z8!OjAB0_%!Ou1%fp6nXI?ml}Cyj7=L@Q|Dya(x#X>$RWD5b4@bTdU2mjqi1mn$YC* zWct#PXY&yt2HdIL@fvHQ>@?zwDUFw=t~oSIg2yWixh*7+g*%XiAv!7dRm_litx7XB zaU_vI&7i0VUXA|ghMn-k-rSA^@2f#C2hNY3H6nr8#BgWDNc$DQBx2_sLI)@e^!FU> z#yXI6BZ^ei}G={6LYACc)UgUNgLQCDmQsxtuyJiXdmhagd zPQ3e?NS8=9OcAT{Q#g}y?}mEZSA7A3(ruE5fkBa!r0W(naN86z3EfKTXGWksEUJlc zev6>zZ6;K(F?j@2LMtXOsX63A74SRIAc>>hKuLiSSppWhlT9SeQH680$&s*ZL|IN0 ze|Why>L`~3=JGP=>3#o@>yeUPg`g5;|8H{d?uSi$%zb1+zqfAneSe z$EQNLR{ZuT$#6F~diKSF_3X#Cb9PTYJf5xp9*7QmJm{pB%2Dn|#<1cs43PB{2x=4< zfq&cP7VgC|Ecy4W(BdZGg3JQuDxH%-Acy%&6-;)^-|h=5uT=zLSzwqqcGB?_@3+Yr zwW5h@{CB8AX$=Mixw-7`J9rzZLln44+Iz@DZcXYCJ=-O@S~>^6>7=Pjwh^1^K1$S~ zzGCd){=A@x!IiCmpryK$nC|s~0!y-DlZ*vzKZ0`t=<2^ADz=hJ^b>B7!nlG^X=?uXu9j ze5;Y7tfoU&xlzkAs8BZ8&jw1um7S#rzONX#x3@)s-+o*aS_`x-3u_#u;yT$%a>B;P zUks+pgy^gfCX;qvl9s!b{0Poc{AB>0J-D(`?!Zdscb<_nXC~(B-~;y~sT7x;c^M7m zf9y$lptO;gx6I(?`3+3oI3vroqNn=SO@6d+8rXE>*b?Me6kcCaeLE(n-lB5)dg4T+ zcv8yc8)jCT9Kw6{cUdmqiRCj8IkX}1YxRV%`4(>u$k#(tOIT-@hP+*aX&-8ll0nFu z^jpbEcJ4b0F}c24pc9 zd5P`o#&h)d!V9mF?Az7w>vrp-cYWSpLDh;Ix4#%0s)8A!0~!5(YCy%w2PQ(My^v2b zfb^GQ;iK&2?^Q9{HFk3fi?3h46r+ecIw_=vu~yY=-rWU&P8R*$!1g%{s~_NijLjnK z`cZqhoOeGpt1hR#qU>Ur5cn(&+5dwl(T0G8h_$0}b7dxX*`7F!Zv_kN*=!6OsAV4@ zE8XYgQIVDdGDRKk!%ssFl&qsNj4){{7e9yVi(U#zTqa7HbSglPo|_1NCzD{F8X6O1 zsHyxl$6WYttIvrmQIT`?A={wh(a$lo!{6U2;{6Dl>qmD4gy#=RzE@N-x_NpAv2A)5 zNfqCw-fkiyxK5b%QhWTx_wg$kd~#FTVd~l3Y*9OEomg_Xq?_~bo|0cBj;rgGzD3;i zYgVVCjYKi|Po;&jz%QTF1RIk!I@9PSwz?PLw>chp4^9+)jUdD?XFsA?AtsP~*(q1i zTCA>_PN*`3qQa-~grMavC@T9Vr5BomJ;QYE%x%Q&ys>QGEg2cxh>R+_^#NjM$waoL zH-M+-rIzP7EZ>zn^xQ2Ai=<(y(p>7|P{K;}+@*r_-M=uxYTUEhfZ3A2cHM2tUH`XA ztv$Um-(u;_ZO9JDga~nSeR+_-cZ3<52BusuHS`~N^grs#IvuikcM+=lBa4J3%DK*Z z@63T`4z?25B*!9F5@N1>&w4#qYNJ&Q*_InSJMTOs0q3J0uC~>yIWO&pCt`1NT(b&q zGn#8#sBrYOvGgOI>cPS&WA?MR7)htPfgfI!kt#|Sbx0#>6>6-K@Htb!$;#NUPYEBj z!+>f{Px40_=}?nQ`uwRU`MADpibwpto+WK}_QJJlWhv`rMyfz4_`Wk0X%@v7E~LPl z;F)dN65ktnz1L;jCXG#-ML`2h3MHiNE^9nChtzM5SaUO)$8-724xqEeE0vNB@SmsW zc2=gjWTMuhi_yPd^aKX=?R{VKHXX#$=~fO&dJBnUp8^IF-24{~$r44DxlZImDO}6q z3F@#i-~h*@X9)RCIS+j$lRCj8ygyh6FiW9B#7(eQk)^+^tSt$I!q%ay;qz4Q-0LiZ zLT7_-NbQMnb3af|3>*myH;{VMB}k>(Vsznic8x)%2w&~OU9J#8iKCvosV|=}H(U|J zbuU8X-mxR(bRh36xi<+T`;^YQXC!<@ZJGG7jk#9c>Ij0 zBM%R{#P8Nf_cN;Cty4v1tlz(K;(?K7{J}yN{Pe0lndA8->RHC3<%GOny(-|NWsL8z z2W}cmN&EVLe$(53ylD`(54_wv8R7Dyf_AsQc>W`-DQPGBOsr6$wskWJVL}&)e6csa zwDK4Lmdr88Q*AJ_aUI96I`+a}kQh(1IXEoDkFP`*SJEARPk|Ry?=MxOQyKjEssi+D?`kY%5PeI3$Rg0Qd7a8?IdbM1YJ&*tF11;ho4j5XP==z!!HM(R1na2L__q{mSV zw^R5G8}9uSA7eaH(qq7Oa&YE)w_fJO~ab6B@ zns5%6RFAuII+Xn5%F*a3nNtq6ne!3h3S-}f^3>#fw2)yW-C`-a8J~C5QDi-pqr?$a zBV;~eVNq1!g&A{8o7)^7^?H zL7&;`uh4?5ous29fr@B5TN$bksY3+}XXWX;E0(EY(7YM14QN*}X7?Wyb>zET_Ry$z%E=ry2 zfKu|SZzb=Ba5Ewg*gaQ9>>A5`guT`&N9&cG6_YpARHU3j$R75;RLJuGgq~&oBt2?-a0JGu8kKJ1Y?j;38mFnDG8O5Zd6*jQ6z_< zI|W26L_|coB&8X;1eB5#n4!Ce5*T6-*o*gNM*Tj{KIiN|_P)-!-v79sd05X{_r2EL z>vt2H0Kk=+t`osQ3Oxvcu!wv1UR$WmwfSXZ-k-P7P5rMcf7H{62vx5(i}pS_owGGU z3XH4d0rOe9YVkmk{XVm<-4hJ`YEvIW74rbrfEO%0=*v#uD$+-nnvI&p3ltOoHC%B5ESEtrz=y){|LE&iYH=pZ%<-Bj+_`MUNS+yyn&Dyi*VJn9}y$5uB9b zyJx%a>%_aXD753Q5);&yAOi2+uD1K81{+S^3oUCtzC6|Epo;P|cU|BrsJZ*Vgp=>s zGSYa^I-T>w&}v4X2S?8K9z8ng94d#b)v%Cl6(n=5sc(O%7?|zX`NTtik~-E?JO-zHoWAl>frWU0yfV}54{D6w-$Sa*?mc}q0yoL}o2p*E{dQye}c;O(ud+vVNxSoLza7m=&d6 z5z{(DuT>BmD4pA*dNfZj8pP`ckHQyM7}@V@=YVGwQP1+8=x_9y0lnx=`Gy?*M=Ffh zXLkqJ`#=4hC}U8>)tV`}Qq*nTq%kC5PTDW^&@o59ybbC7mtA z`+-U6yI|49q&7BH+wS|MjF>mPTAlr&Sr!D2`2I}4=S*_j@QL?%^V-5kk2m29>x=O< z657d<-i#`@Oda1|vg>@&W7bnL_cbUiTvUPhl>Zv{O|DW;Gml%lB{yU?syQiWBZp>q zSG_i03eXff#d^7<3Per5{=|HyJMtxKdpX%!A1Nx8d;bwbvCKoSD)s&848vku5c#l| zYt~L`FwKI3tX7XH-rk-5{LqwXskviOT%&sBE9NepaJn(LcnSnVLgtS-` zI&{35Vz)#+=F^#e?od~{{`gvr9_vd};Rth*j>=f5-uKQ=22UU=g(jTz6zk-M09e6|%;SZIW zyP>-W8Br1`-K%!O1U{ckY>l`lr@7nSP?_MP)=Bas;>Dhd1YW8PrfU8BOKbR8&=P}8 z_WHj5fGG{1TI?T))3-U_Xle}i&b!^VDKI$JyyggdsGC0?lY&0U#^O0FVz-aPfvX>gu-vSaa?mGRD_}DrrkI}^%C-F zkrE9-OC-;?fMmz)RI!JooGi>tpl*bt!l?qAKT`4F1zNeF`r_>{HxzV zBMT(n?(G)ed9~6xD--~gmJTSYBM~g)IX94E$dR8DDooyNNj^*OaYiXqKlQ0n^wx`8 z-<0~SOy|2vW47f5C*RcF9FF3R?lfjdZ@IdyXI6(!ZhyC^5ytrOOpv2!he;fNCFyhATbB8BlFNNp;^hIyrYRb4 zv0b;VhQGb9u5A_|oi*caY1z^bx40Tp2m2#=_USA$^K~m>pC6Vym9;?!KeaL|>q0eD zzg3^*xxy0uXVTkV;&H2rePUGU-QNQnxI8;T0d(gXs^CAm<{+93WcQz2PyZ2)#icd1 z_&{{TX>Hu<$dSQ^N4c2FBBGAGyKXcDVrj_Mmj!=N54-;wPamY&XHBjDsYb>=KAg3$ z6DT^+4D(ugelXzYMYaR1*lCK_@i4WCtk0*dvWXTPCI z{5JHP1E{&oP}NJnXIyZ_;yM1s0^ulzKER5&`VQgve7@Qe;HHrL7=)s9&o%2r{Tt<+S*{ zes!A(wIUE4i4}OhXyAS7k#dXRqEG+`F@IGcY9ChE8lh@p*p^Wg5vSfq6{p$v!o*3# zKnKdEucbBDmTq97p{s7@#G$6v6qE0rlrbw?^axgVwTibMR?$^%IEcHLM1fI}n(4_T2HJ zpBJ+t(o1O%>H9@1zT)y=?`zOR%aA@27b~!O_E?6(n>XwGHHGWN`<3^t*Gx5Z`x4)u zh9D7b&O@tNX8ZU9#yRI`MDo9!ICK3W_jrGYE5qUBbSi;7yrbXptnSTdI_2R*391jb z3!3Yz)l4}>RZSfR8*As^>|Yh6`}n!_%-n>`9nRliz-s?;%{WvrzL^lUlFlbNip&K&)`q6HhO^=Qk(nAJR$)`c zHwNs@L+E^TmTyB(P)Y|2@{zKgT+yrpd+yYT@`e3NPBIn~#{E!_Fzx_#*2lNWbB^{Yfl}e&$Q|SO3_fZWe0l}FDHX~O zoW`ubARlf<&aBqPY;qPY8+P;*4P~c&ogb^ksxYw+V4O>H=l>YxA^yw3*NvOQrQNY* z<=pV05Z#4o>e5_iW0CcbB+j)>Fr}2et;Ig_305#X@rutg!9oycDSUoI4bM09z za4w2RD!S>*-AdKVgOu${$xDb;U-N%TigDI3L-qQ02A2NRA07UOyXNc%OF{nyCbOc3 zy-wVEjSFfTw!1+^WHmjBDBNy(ijQWv|EPM$!3Bk^ww2`kI)gj3%7nEfB-^RI%H?s* zxQFQT7xwQ*?9O((J$ZU@i~M7=jLdIG4NcX^#+K9CbesJZzurD4aonLF?z=(&ms=Hf z;V2l$)_c3A8j3^^MV5JD0`wQYgbGGDKS;ruFm~rpd3?E4^I)**2At|}%iC+j=V!EK zm%Bd~cHhm9=Fy6y5nUZr+^A!4$xD>@UWfiMfdxY)i#?$J#;f1Axlttn>tH>T=AL)L z?&dHB3canj-%-T`FDCM-4527jd4^j7BM1J2lYyCb7s72Ii`cnGQ!sOaR-)dOk2`FS zSK&eQOqk_xK>v@$x`4eCHT^{!w}qJT*)$P-6yY4XnhXz;f{7Q8U2MLeg8RlX-$D@J z$R2uj3u^=UDieSJ;U1$=Qg!4|s4m2?=;kkCpW+4qpxzSVWgPhvPF3)k zBwQqe|SeKl>DSi8+2(zsZZ^h!A0V2oe z!5#K^9qeoI31V)I-;iA6xewxO2@ba!S!0HnlR}LC#;1TMPqo#RvgA>87!g}9@n3XE z0}-gnD4e1FVhO$SPhXR7mW#ce zhT46J{VOz*M49@Mf6!0y2x_TLd1Tm&OE)?s*UkHFEY@a3U>hkN z^Xs+fF{3q2%qYQrs~haU4hQT*B4&;nUO8%yj()x9{~Dip>9m$ij#u zoc7j-H3a=^p4Zagyl)sFwAt`whsoNq(w#~wL!TS#jMytNuAdEr^-t0ZxZq0orNl=u z&~P$pnS?9f`{Nlky&U=&%2O-rueup-YKvv2IhNe=?ly zAr0en`Bksj4};-!@9VY{Bw`Lv!YHAH;lvy7U*1(?_Gh11jvy2BG?k}249MXP(X<%i z{=DA)qSk_`Z_q_3U$P5=N}WE>B?o`gKzIL^r|a{ek(8ELw~yUbAMWpaJ16sH@)DNM zf0}r`41Yj1-i14J@`M-cE_A(4>N|WwrsPXaB|}#pKoc~p6>$bH=Pp_mMptf7SAOlq+A+tLEiN6 z`F>BAi_PhHNU&8h%OlznXh^HT$4a`57?PrPk;SZx#I|fq>q9m}@|@3VcsUrW0oY5# zaA`EIm<)as&sp8tgsyoJ>sZ4H>C1fF;4~i+oQ8HNdRacnP_uGZOqE^T_+H6I{p4wl z>?i#@Tb)nBc$emrH&*=7o8?GzzOUQ88#OF>?yrpFzCu3KW0mkDAQ-O<@Nl2OnpaU` z>gq9-KSy8m4k2di2DE+nc3qPPc-#iF zVulM1BoI}Iq+1A{4IEac?npc4)yqt5gWWc40D;OXneH15yL32#ffq1Vb%gVmvJyy< zL$8#uuU3FfF)`!=$0Bk(WK`DIHyiQRsIE6W6DfC|=BzS92Inv7kqxTArtc{^#QoyV zcXiZG0OA;{H7_0}zmK_;4C@kzfkmH)kve~5lMLnWg}&;y3|VabxzcNbH+GC_n68MbXrkpVTal ztOV_hqDVMg_ek!V>2UuwaZk?s8)rPmHs# zD17m+%W`JR7Y?>GBmC-?AZ}CTkPsx2Q0cVPa}CbRojjhlX`;I1>wwcWDG+jQv{W0e zOBk7!s4t;IQswRDi!)B(Ivi0J9F9m4GR*VlEA@@TwAs;FZ9Q||Xm9?yF)wLC<68KB z$F+MWe`+b3?XZ?ej03_o&PC-B<8dg_frD{fmuzv=wlVh{YNWcu`JzU&diZ4X#&BvS zdKms0Q|c^LDpkpuWF)tN$|*e5E1POJ@$+ z`#^c%1P^<3wLAPHeifR%a-;7!I~EJT9U1sm{7!^t47;KWozKefy%CNe{)J&qr|vPTyW$4+pNq5kZkjh> zl{?F2 zH&v8ck>@V?jxI3uwZKnH_d<3<0~F^i`|7tTdf7F%`)}O7V7E24gT7?4dbg(kMS)&g z3HLZn6QFabzS}Q9H|*i*==8`cOgV#kjuL*|SWzweUp*!iTC0jsISurzrrMp2Wn zHhSzV+K=$UjJ`t$y3am`LLGskr}MZc3mxEXKl#Q+F|Ar-<#v!QZHZHe&+YB39z4?( z?{4~};c%6;L}Bd7vjKji>n20iKSs?l-wUT4mnjxUbITIj4BRXN!f9{9KZ^5j%~s2K z&8DWeFvSct3FOr6z7haXB|K%7)f=vwffCJ=H^)DpLv`O6;Z|^~eQDBUBfaX@y>s)e zIXCvM`$IMm4IRi%8pD2=%2xrCJ-}SawS>ysx##7YN@mldf)+RIcumTb31?#9M01i} z=&E@wS{zE;Tj>4H$W{@P#C!ktY&VH>^Til1uC4Te$5Xy`-xMyHC_A<#$U3fY7K(p^ z^CeYQB~VibR-N&5!(}je)fh28FNQh_~Hr8J|e`9$6SFKRMvts@7Zc2_cr<#WzG&d6e zwuwYst83{#{b=BukF|llT-8g{dNg3ADth_H@#2fkb)?sek#UwKMPomhoK`DSJD-Kd zDtL)z3{i(yy~p#bv^xMLMiXZ&19_S|5)sW*s@3fOHL9-LiH_ErN*=@1wAH) zWFvt_VQc=%CnR0>vl_d#!N!{QPgXfz1KQPl0U~1Z)eKphXDtSpz=a;ed{{*EO!w2#zskbIJ_IB1%Mq@xc-1_>L@e2`AgW}&%HTi{Q~cT| zpLGZXTwb($J9dqv!3781pUmuj{1j*HIZax&RW3(D>NI+VPe4VqfAnKzdG&_=s@2!G zm{Be*nj10F&fryy)$S?=S-TE(Zveb*aHE(z^8+S ztGIEFnYpnb=p=b>aa@lL?>Tgel5JNg&x;juzHwzJ1*_0tng@*s)KmIl-MdkGa?q$p z+%`T$cCof%Bw_J9O^P||8at9buau#oPO3OrM9uI%PQ~Z}45XH@*l7{0K2`G-CcT~~ zE22wpNfW74nsQxKIJ-M8n^>NlKGw;aYT!YteYN%2t6^Ut0cEQ6Ul<=A|MkQ$b`1hmGg zm@vG~fa^b{*ok?wd2wji(7xs;wJwMe5m9|qHGqt)I49NOXazTHiWHDYKD( zs4tG(-*CJ)@P<+HbL<&@@V6h0QO~?G*!bRD=lMWqFmC;iV`{UC*jEKvHkXF#$;l`wrut-r`?Yj8(-P0VpbjYM2;$Jo&F!D|RyXbv zN#|Cu*?V}e=PB>$1nb2W}~#P{U&MY14^YRLHC!OWa6*s;>M|L`2Q`ke(lH9)P!<{;b>{VS74$LW+l4+SCFSvQ z`%%oZu^7&i*UwX$*QPRZmYO%E&JJr9WgVDBg=VFkhYy}bfA%)+Z{X#Zuh$bOu3x`3 zG>>FiSj7Lr$nW{Hek*h0gQpVIBl{0)|4kf}uLW!DAH#oe`V6g>dE~0x^YF)UDT|ob zIO<0clpl;n6EMq1X*I1I&X<)zW9m>oWJ#JoR?sYrM;*@Y!`d*NZDWA5*4g_*4xjg$-wBTzzamE`BQU8^p%51Nr0 zQ$UAdUi~6WqL_iU8%&>L*!46~=*+`nhxUJ1E+t{qm#-t}*=Y4C>$#M}P2Pty$6vs9 z(5Fal8Ca!j35bgQae5B(i7W_gvEY#{E9?g6`dMh4jrrzRyo8qMIG=RCA^MO&>*+ABc>3(X5s(jR`L zs#Ou<*7pp*zi{(p^%C_%qBH?*F|QiM=BCR+Sc^*UKWBXo|0Dxq_{0K&_Z+NgN+yl8 zn+iAWW4`Wqev(zEabq{)6T(&(-ilg1BK*4W?wp2Zer?!C^FE}zvO|&!>Kkgqe5U`S`x+LqT~q>&w@;KD|bZiwT1FEoO?G_onnEVUiqb9LCzJ*l*(z+&rl5a~@;H zS_fnqpF?C_;1k|0WghH3r}M*Z{90zb9T>SD=6Vtvjy$D)z{|qpN=syxG&;spH^bO} zy=~MA<_+B#)5lSe8xPj|qu^y(Uz%}$0OBb^Bk#INvng{{mRac`vKX29K}F_!<*7&< zpKhIo!=lWn4Eu*RgkG%JO5Ozl?g*UP#Uns~AaRNFl{Q>}OM-%#rKv#9dt_)sIKeAz z@tMO^erUq=-w3L3{vZ*M1?^AILS=%STSsr1`@8ag=oOD9Qq$<^bJFC@lg%+iU~Gym zF0V8?2=&}QEa+&mnN zaUOrdkhP$C-#UanR{uOY<>~jTY_EY{CGUaO4f@vpHwli%!mxpz0^t4g5>1pDJ^{71 zty|c>a#C_eh!)nJMNZ1zmZq>yORdJNInj@e-kUQ^2T|un_5^nd!dcFUu+sy9E#T_cgLRc}IWl(XWFz$F$y4z}L#73!xX-~Wv@jBe9 z9g&0?Lm%I2GQArpp$YGQRjh-YX3-^K%U5liv+{V&3bKOl<1AW7jFKcnOS7p-4K|%~^nUvCH28WR-tm{2`lpkK9F}#P z=&-6L9}Ji;Lm79T*7R&t2KJ1s;#07ptlAbVqcCIlJT|i+J1s#{dfdhR_o`tLEUk3B zZN=7=5Ua;D+Wf9sd9Z3pT~t@`XHc$1DYN%Y;yBixfdYA@ourucY?Q0V|5tP`|FO)K zw*8|1>6Y|-PBZm^miCN=m%uUmnlHe{nN=9ZW|{RpMz!*LW>H4lyvEc^w{6qy05^>e zL}p{q1*-v(k$VD@x;u()Mcb4C2p_Qo7$WC0W}O92Pea9(!F4fa1}Ewc$VGjp$lx5U zsazyBHWFWWs=Le<>|APHcrIdBZD3rQr^wMFCajb!FC$}*gJ{V9$)cWCZ>dA9(;}k2 z45l{fy_y9L=Y78{A^n?H=mKgx%DN}h>>MpG8xz&9P#8{dl9`+O=K4$S4HZWSzWavT z?AYlYRE%{{1NopU$!Sme`%_%DiMs%-;FiDrZLB=`Rp|o$((wnsLT9Cyu=yjb!2DaC z_?jMMmQ9s80xlauy>5rb5 zDNuH%C#LL=Bqsa>_!ZIda_~4-H~%q%R*>-QZHd<-!LITbA3<3Of)V^m?H=Os`%~PT z@ev1`;o|=mChHW6X=_Ie*?(i_wfO!KUDX~)@7}l`8f3pY`p0kkv!iKdf9gZ6U!_OOHjInK&Q0GT5$RJ05+ zBf8Vqet;q&7hMecY4uYXdOld2Zk40$3hgT}8!(N6nF4sMTunnSo+;r<*R8QY*g|g} z@#{E`rP89cHo={h^{nFI{AI6Y4@9wz;m&lF&AFLjcU0EiHn_8Cg0^* zekp*Qu~WDm_A4@-33;dagl2%^5!yN(oBAOWGpZz9Dw!bZQgT3z;SfBiL1D#-n8oF z+Sf8LSYq%u6xp6UPjmCpot2wEE)cx+`8mO`2vE4V314gKJB7i(^7o=+oco)cN4#7- zcG1X1(4(^$bY(zLUe_8$iOxhjScU65=P|{)){@+wBpa~&l$8mGcMCYq8pZkSm6B4d zt*-tAou5a9RLoW;!3AWrQn1i!Msc`tm^X{A=8kkaMSGXkXLb{;nvv z2tNU%;WBzWR~gSN2DR)1OQv~s=8T9(r!-Yx+}=7<6aYX+STAl z8PBOOO-yh^f{t4QY3DQ146dn=x^9SPoL_O^P#5w$mirn?$z zCKKN%O!b+?DTr7F1b_e_-{e<-t#uwn#bH=A$h}yq6er@nKA~+EGsSr@r=13I{}bDD zHJig8Ir{cZS1F%_i0_WyggPEzI2aF`qwxP*7%=f zVz5TcM6OmmYC0#l|8s3IfXX1(aCcr)Lrv`wpnF**Xp}5M(B1t=!55+PvMWVl4Go>Q z$B-|W#k$o8dO_%s$>vug7|n@A_&}@MeaECA-h6;%MGy!s|8H+=+`s?Uwe>ts82_hh zCPm1-s}=S55o^DN7LD&;!Plb&U9xv481@?--5&5vbE9a0KdZ}-Q@BqzXCR&P2dBC3UxUweL$h9{6_NoEG%e8{?S|==2xHI}-5-|s zmC}2CB-UeT4ta}~&pLW{ny)&~=r~nOxm92O7d4YgYs=0%GFfgP($lPWKZf%tAQeT! zkyueJ#&M7cy>H`?bNGae&6h2^h)>i|fH zf06+R0DV#S8&gK-4QRdAJG~{tCsldeRZvnH=K5dPRl;?GL$HP{P|^Qc*P;b6Ukfq9 z*kFjw9ph4np+hzD(pPA}_22~nULjxMwNyHTQSk!+0j~FYspONY&PkA5e!T6q2(QeZb2ml^-ZA1y+vx9(2QU3l;_5NCy zf1K<1^Dk!pJS1puWjHj?nVO`wXng!3XqYLvI{+CZ$G2?GYm?)Me4BYLnx=wkAwP4j zO)i`R9SDxe@;kBnNoW1fy~6LVXOVn*<}}HUY}Q&DU5vVEd+Z`Z!)rPeQdo~F6ri#6 zjWhtI<23-#*X0Gd_ECoW`3$?WvU`y~e_*Cy{cB~0kd3K^x| z>AW3fU1s6`ULj6II1gdu&lgH!j(oWIIXa!-3Ki1ir19dbpB<8<+qOzw{{Dq${9Esl zE4M}QasEhhpk_;ragH3Z&_=PGph-11dlWWl(b@zRN->lx7m9OeyS3^HfHmj9sdK9{^# zf;a|5@fBFhha2qQ^?H)?vyn(@|C-2W2dX4#7m@1AH|&};M_<_aoEHYv)!xGciBqix zkiT{(Ku>O{&(6Szyy_bu;_qj4zI&84kk$@;H)dVFIGxI5=&@M12hu0W$;m8nV}!pH z^?$}WlnJ>ZD?qLmk~U>zWc*b8T9bA=tJS6UG*j=+a`m$8qD4}us^aah%VLAkL9qZO z^72n`8CnuC_no?l>WKsOrL^{wR|t{L2>@P%GjHr=3MDiHp2BMg1fw?wUA@0+!*Gt3 zKuv`?&A}Vf6_=u?bA?z*=!cZYSOC>Rygx}kcxCzdrY!Ix4`kSguufMJ4UJvI)-ffl zdFD$?X%tjPyX0w&4Y(yqvgeGT2!Y3Bi0seC zI)J0ue9{u&m?r4^?4nutlF-Kw^!~Ni7VB~MR`ZO4YQEfEdM?8ckbNHPy}|z+E9`Nt zy0U)8vKn8IFu<}Mpe~plNF|0#>7)kU+f&ai8r&P`nj0zD+ z+3Bsi*44Yt`w^pF^uKn{JTL>dui+Hg*|Qzp11`e=WKVS8vzjt4GI?|bz(*A{g);5T zM85Yx>bqV1rN{HKZg`s@P$Ep~VOErdGsWv#?6fpodpCv%-V#O_SyGPjgO?Ko^Sp6(~}{dgifIQwZJ@cm-~x; zb$1{l74)Kh!-Fs-2=pYvu@#hfg+C1PmK)B}N;*S9$!e>5D#fB0Qbk@yK}m^OqqxpW zK-AnAa%DG#1*zwtV9Pv_#Cy8Ws-X2g0PGWo8QFivJNc1T$@@q7$S$}{J6@WC$M)eT zklM4sEL&YSV77@YJ&?1ZsUk`-0zEnUA1wT-^u;y}3=FL60!cT0<3$+A6-G$#n?^q@ z?Enxsbh#KQy%0bD;B1l%uVw!xfMU37-kWv~S0che^mkSG)Mc@|cJuiCrRJZ7ae4P& zlIWC_=09cKttyE-L-8iR{et7^sVAWP=K{4b#ZmvyNOn-&Ji9^6ex&=t$PIq&m`%Tc za~yv2=X_@mfZ~g7%rD|jJXR1Ld^`b2{HHQXUyq!VlqLvRlzB;h!!qcP51ayb#ksiM zf0fGp{uW~y@Z~xeX>_mwr9VC(W)6N$=v2LN#F>H8zyVzL9z-F#gQTgUO|oE*U^OB&#V|1$4?Cu0YY;*v?J53#!P zx8Lvx5)YF5Lsbd?k60^q&9}c_M{e(QTY6?D9N=_yv9hxINw#9PfK}d0#91+lQmk45 zXfWmgC+n`rWSpj^<^TZjrKUytf?kO&z|f6(wVcf02|P0xFq7s2X(|w2KHF2_EDbZQ zQD*HxDm3SEU%31x>-hVp z8{~6VZC$qWR8YcfF57?!xHPlqmpT53i@Ry^Vi7>G?p3C%rec&qJE#XNGxGtpB7`u( z<%)vEGF0|Px2n>1uKY59Kps5#=9Xzohn(Sorvyc2>#uwl^#+dV1^^`O`*6ef{%bS$ z1TK4kwp=t|UET*gpGl<=KvGUXjtdd-plJepxrY z>!|M+JK>8wmhYC6fJjcI72O*3&jP-N2>hL-@+81Iks3h?TvMV0h%@^%(GP*$bc66k zP(Tc(?F=XC-je7e#q_Y6z)2z~Clb6s6^gq8_nquI&w>`7> z0oY|_@2fP$OMzdkaA`F1p8Y(n`00Kg+f@ZYx1s-%~u+;#N&7OCXA5Q}7s!(vL zr0(Qp$WLxd06reT>o}{-P-gL07=8ksb8ijrkYm?7-BU8Y0LYSm%Jl#O5*iSaQ-!e{ z7uJ`%(#Gv4FMTQ4kFxLzGtg?Oq;ce8ERAX(lGoIH$oRc}{%Fw9&@e5Fizla`AXM5- zlJOA+Hd%_t_uUNUj;=J>0H*`Lv%$~Mao5iepCK(>Bp?tHO0GBrHcySk^9JMBx9j_h zZEPUdd2|$Divz_uqA5jl!xZZ*^7oIWGICUY43vjlrdVM))8&Ct*`1SaXcItP_t^r_ zdmF(5n&K0{NMA4<0bbj4GPVn+!#!grjcb^N=!jlq7f-#{jnq4cLVR%oHuLEL;4gbM zZZxyyxZWycx?l$oZXj2jMw+gkLVpk5T<8NP(cXbG94h*DRjA8jWM_D@!ofDNs#u)H z&$z}orTygcRZ&**o>Lu@VmlymQT+Y;_sK3#qfbT~dppaUcv%>rxH}O5K>mx{+$EcF z;$JSX;^Wabxa6ahj9ASF39+F0@7)i=7;O&%DDp{Zr+)fvQgE}V$#Fm|9lZi<#5+%M2AD&pxO+5{DAdVwS<)Z|z_gtf z`3wTe?*q6R;huU(RseRDfpZa2$@hnvA?No{;|lSEVqLEk)>25zH*R;s#aPe zZ#uES4A_0^s%kthl*c|SNnSydKeLcBQ++9;p5XoJVREQuu`Q6+H^&jI?ZETMv|9D>kf2 zZuUJ%Kg)oYThUjf)j~@(@K~ziJ$&#Wq56Fl5=ah1|2<$b7Lle*rLP`16zyrEXs>Qz zg59jox+4XXiOYAnlJ!RDE?T~`Ty+uL7I`cB%>k=X#DGrpBt2B3jQ02tU-B<7a~0<9 z-dPTPUAz~F@2c_d^~lQraQelR5P!%C9tk-p9V=Yb_2Z~)*9Ud-NS1e<>lT$c=Q25| zDiE{03OkE4DKA;?cmT+D&-kRdN=&rOv3c2L3$>UwO|p-+ccm|>aJk=CHSIY;C?QMd z^bQ8^lo62!K*6Y!Ni68wgi0A~A6{etlPR10sRemzZ3g3D_jZ%?rI_rjL|J`J)e`k$ zKi!@z#(Nyp(#*FgVh((Str?8;W}-?2^|mWGlsguc8LI969da{GOsA^3*|oduK?_VJ z5zFH2GarC`L$M zgrB1;-h4i1uE<@`vgR^T3l-I`<|KgApsCY^kSG3%rLsYo0Ew01aYyw9d^!u zFLN~sZK@^NRiHR}*3DjvmLPji!5f!x{SVJN=`!fARMtB#r42XRA2Qy&pi8w{`5|B)<~I5u6fX~U=_N|!hgo^VkJX# z&W!kJkxy@!E)Z@2f2RGU8?}16n4)+}_9qmZUf22~I?WwNy$7KpImhlMXu6Y8c2fwk zy584HL*r9iuv8@{bUUy}7ZpgB{MreEA;Rb+5oP9Ni|!iorBPZ!<yV#Calzb$qZQusYWt*QGPk~&in|E8gnk;6A8(Pi)w=06dIQz5PU+j@%}$6q3e-; zm$FRKSb8ujy~!$e)at25H!iSW&|-O|DwdX5cwRk+*QRrp+snXr8tAl{<7|Wq3M`Oa z?jGH)=bQqrCrOqIo>>?%}E3Ud>h#a#V! z+Nj<%E99bjptoO@oU{C%P&>{k1Vs|q0u!>^ctMS-%wc-u{8t0%nwIC>xB_nRXQ#(N zh$vo|>!kD2T4c_aL4wu%RLp{6272wh6feETPnb-cE1ugtN5C8Dof+~3LW15l`YUl* zbnav5eZRa(D68kaVIk?7xA;my@@M?YCs~Rh9o&{m^g0a6U70)?IqA}isw*E`1-T*j zyKH+52sN6d#}W~*)EG0A<$W{o^@AeSRQOYQNGKRB$)oY_4t+{_2X)Uy$aS~X&kO3y zo?wNL@g!@ME1kr%b?*(c2qh&9h^);#`w?=Fq9UESga~n^` zCi@>sKZ8H6r6$yiyhiit*crcsNwLhlDCNhgDppA{L95UPi;s~K*+!u+*V|dRs0E!f zMKjmxgBjwy?}ZvdT?QuKR%jTm+6ilZ2p4(F$?K2CDVT5w7wiC^8XRnw2JXGZ7BO*b{e zmDv7-xpCH2jUB(lv#&CA$4tx8Q%-(@>3mZ5;y3SL95K*WYA>9-i-`=#SM4k$k3ZoX zH~%KW?>_o2SDErm6~K7i`>-TllaTEJ+C0p&8AC0-IwQ>vBrnXZ+xv@mb3Esf-rOd6Vh?EdZ?j%3Huu1fYA2fSW<$*ETx`o-qmDjEeRTI+uy!5=hEgxDlXdBpZa&IT2kojfX+@IJ){%4%m@-4u6PhMQFk zNJ=v0#csF`MOMfMT15+~c;`xuIE(&bKmLFqh@pC1oBB$cM+ddP&Uxh-(|JikGXbS# z)Gcx5^eXkcqzJeqTqeTuV}{JcTr6Ds)slEu6neJ{#kjssrHy`xkr#zbtXE0!5k4!k z8G@#Syv&dsQ7^Tilj(5*cJjcIq56p`_+{@b0Om;6Z{pQ#~bbTngS=oY$rbVy@hE%bD1e1WFlZvY~*CRt)-IK z$bW;VuU)I~Vv4DX4)tl(opa=d%=&2?;toXGE45}YDq6nlKkSJKyz zKF7Hi#e;|c_oLC{r{FO7v6Ophela>V+PbVnqV*f?8Sso=RxL}98FND2_Bhdz01W^~ z+p4amrmov?;D2gv*3vYrmv?*|O^&l;r~uaJ<|clNUrHVY{IUIh>f>20HiA!kUuw(0};>8VxNMQ7;GQI27)d#QEiAthramXIj=3byAvA=28f4tnHeh z&BhGWpTp*0s3mhRGg6Lp^%xBBE0^#a&Qen96{SE%Ax-tP=xx7Q*82nZhK|c-)Uw%H z@7U>vSN~l7wwWD}aXu=lb`GV36e#-47d~`84~A@&G=?n<1?I=+@w|gk#0V24zbP#A zv9hK%q$t^jdzI$q-R3`2Sx#y)v-mSmw<~HBA+~0FA;W-=e)k`8e{MamZ0EW*J^1YO z0rfxUXc!HmvXxWsWf#S0uxnk_QCBziu&%lPVB9-Q4NqZA1J8KCU~?HyYqVQ6lxPs5 zMK9!<>p{Kvcvc7&luc3pO2bu+0-5%RW_Q9xTx~>CUF(uE68)9R@Kv!(wPA(pk2X{h zg-eV&@sdqB-I&7WKB_7Z#?EnSD2LbTn>AAN883dBi=y|WG@e@6bN!jFR3(?qZPb=Q zajVl{sGZOq(|vNUlxDbJW>s-Byr_qFX=ihJc{+u$06Bqa_|Yo6ym&HIy@_IyuIN?6 z?wOs~3LMEgr_sjbiLL+ zslL%OqQ||&da}WbS@x?T#hU{`+9y;WOyPXQZiI zPo(Fc<2Ve9l~^QM$qL zj?FWpEJx#w#?|o)6WMF4!9`MLgKn~H>lygj2)muEmAuFKwl_en4sx?c(@;OA{1_Zk z?pK$cjuMTo%TSNX!EX~0_3Lcws^susR2X>9AnIMQlbxNgoo1iAccy=MzUGc2*WB(G;Ot3v z$v`X}2&11E0?2&7sqV`CH$3=ozY^9`h=>f^6ILPG9>b*lvJtwGT@oT^2!!YgkrQMi z!~J?ZZO<#OBIw*s0}rjBjkuY52;uWWi4^>_+6Y#pC817`#eTnw&BE>t))B}=eV6m* zum}d3o%1=93o5F-Sf~D(V#7)4Yi-t9lGo=cNGMr@-aucKf6ipQzF(L7$r!R_>9yC9 z3}yMSptDLdqHWIOS5@0kH0G>O_@K*W%5b)ahoQW|Xg46EL1)^_@RCJbA8ETlHWZc_ zloOx5l7&PT)_y}Y^xho&NulGsXPnhuOusXXU-@Gf_SJi2_j*IA&WO6j(#gD5n=xx& zLsTY8nDVt|h|Yzf!l@{<_Y`c#TA!|{txxE6si@}&D1JKFz@v4iD=>3)_0GlKTVtm0 zX}$Nh(fi^*ZNJs#mqBf5_~Ae~(d2%+{Rv@Hic^`=61yPM;!SI!E`%v%~40BFQ@H z>jc;EYR04z{g0S6UQGhef2|eF=JQ{S{LyzDkNQ3%Km5c$0^rYYeINii@8=}{5ckiu z5(FZS`Zj(jxUX}$Q~b}D{~r7CXb1ICL@_!?MyvRjz){~Pqt!Wn6j9{y{py}eo&0mM z|BUS?2|eQ5$9(@~%D+tcudVd&N%`+1^zTXeSA6}q!2f@LQh*(a=X<1zScXLJqvC9a z7fUbYcyIQ_(A>OV_t$@GtX%!kpGB?B13k1yXcHql-?fo_75kDcS{nKzHgPNd{kqmi zBuD6*+$x}{doE%|UO?Ys9IT%Fmkhxt;3WdL4nl7qp>{rF zfTBXr^q%_Lzxk6GR+MD4*4G^VeE7F=_Ce9c z<10szQXAYdiDICMN)$^LZWg=D(-^*%VO9 z^ybMU>ydN?3GnYkC2VDgf!JaxdqZwgzYP8hY<1X0fl@ zuu^!ZHMO{HDVCE{kYw)b0`S$XEaoukYv@#D5JrQGL^IFU@}$!KF=eSdz`0lESXkQqnN z6D+@e9ly^_!0Kj7a=d)Ri2q}+Ie>Y5rnj;pW)W3j1k86QHRg6;G0_TG43$2+_HQ25B4v=d#Wzp$P!cj Date: Thu, 16 Jun 2022 07:15:55 +0000 Subject: [PATCH 14/30] add blank between characters for vits, test=tts --- examples/csmsc/vits/conf/default.yaml | 8 +- examples/csmsc/vits/local/preprocess.sh | 4 + examples/csmsc/vits/local/synthesize_e2e.sh | 6 +- examples/csmsc/vits/run.sh | 5 +- examples/ljspeech/voc0/local/synthesize.sh | 3 +- .../t2s/exps/fastspeech2/normalize.py | 24 +----- .../t2s/exps/fastspeech2/preprocess.py | 9 -- .../t2s/exps/gan_vocoder/normalize.py | 24 +----- .../t2s/exps/gan_vocoder/preprocess.py | 9 -- .../t2s/exps/speedyspeech/normalize.py | 23 ------ .../t2s/exps/speedyspeech/preprocess.py | 9 -- paddlespeech/t2s/exps/tacotron2/preprocess.py | 9 -- .../t2s/exps/transformer_tts/normalize.py | 24 +----- .../t2s/exps/transformer_tts/preprocess.py | 9 -- paddlespeech/t2s/exps/vits/normalize.py | 82 +++++++++++++------ paddlespeech/t2s/exps/vits/preprocess.py | 9 -- paddlespeech/t2s/exps/vits/synthesize_e2e.py | 12 ++- paddlespeech/t2s/exps/vits/train.py | 13 ++- paddlespeech/t2s/exps/waveflow/preprocess.py | 5 -- paddlespeech/t2s/exps/waveflow/synthesize.py | 2 - paddlespeech/t2s/frontend/zh_frontend.py | 42 ++++++++-- paddlespeech/t2s/models/vits/vits.py | 6 +- 22 files changed, 136 insertions(+), 201 deletions(-) diff --git a/examples/csmsc/vits/conf/default.yaml b/examples/csmsc/vits/conf/default.yaml index 47af780d..32f995cc 100644 --- a/examples/csmsc/vits/conf/default.yaml +++ b/examples/csmsc/vits/conf/default.yaml @@ -178,6 +178,8 @@ generator_first: False # whether to start updating generator first ########################################################## # OTHER TRAINING SETTING # ########################################################## -max_epoch: 1000 # number of epochs -num_snapshots: 10 # max number of snapshots to keep while training -seed: 777 # random seed number +num_snapshots: 10 # max number of snapshots to keep while training +train_max_steps: 250000 # Number of training steps. == total_iters / ngpus, total_iters = 1000000 +save_interval_steps: 1000 # Interval steps to save checkpoint. +eval_interval_steps: 250 # Interval steps to evaluate the network. +seed: 777 # random seed number diff --git a/examples/csmsc/vits/local/preprocess.sh b/examples/csmsc/vits/local/preprocess.sh index 1d3ae593..1cd6d1f9 100755 --- a/examples/csmsc/vits/local/preprocess.sh +++ b/examples/csmsc/vits/local/preprocess.sh @@ -4,6 +4,7 @@ stage=0 stop_stage=100 config_path=$1 +add_blank=$2 if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then # get durations from MFA's result @@ -44,6 +45,7 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then --feats-stats=dump/train/feats_stats.npy \ --phones-dict=dump/phone_id_map.txt \ --speaker-dict=dump/speaker_id_map.txt \ + --add-blank=${add_blank} \ --skip-wav-copy python3 ${BIN_DIR}/normalize.py \ @@ -52,6 +54,7 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then --feats-stats=dump/train/feats_stats.npy \ --phones-dict=dump/phone_id_map.txt \ --speaker-dict=dump/speaker_id_map.txt \ + --add-blank=${add_blank} \ --skip-wav-copy python3 ${BIN_DIR}/normalize.py \ @@ -60,5 +63,6 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then --feats-stats=dump/train/feats_stats.npy \ --phones-dict=dump/phone_id_map.txt \ --speaker-dict=dump/speaker_id_map.txt \ + --add-blank=${add_blank} \ --skip-wav-copy fi diff --git a/examples/csmsc/vits/local/synthesize_e2e.sh b/examples/csmsc/vits/local/synthesize_e2e.sh index edbb07bf..3f3bf651 100755 --- a/examples/csmsc/vits/local/synthesize_e2e.sh +++ b/examples/csmsc/vits/local/synthesize_e2e.sh @@ -3,9 +3,12 @@ config_path=$1 train_output_path=$2 ckpt_name=$3 +add_blank=$4 + stage=0 stop_stage=0 + if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then FLAGS_allocator_strategy=naive_best_fit \ FLAGS_fraction_of_gpu_memory_to_use=0.01 \ @@ -14,5 +17,6 @@ if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then --ckpt=${train_output_path}/checkpoints/${ckpt_name} \ --phones_dict=dump/phone_id_map.txt \ --output_dir=${train_output_path}/test_e2e \ - --text=${BIN_DIR}/../sentences.txt + --text=${BIN_DIR}/../sentences.txt \ + --add-blank=${add_blank} fi diff --git a/examples/csmsc/vits/run.sh b/examples/csmsc/vits/run.sh index 80e56e7c..c284b7b2 100755 --- a/examples/csmsc/vits/run.sh +++ b/examples/csmsc/vits/run.sh @@ -10,6 +10,7 @@ stop_stage=100 conf_path=conf/default.yaml train_output_path=exp/default ckpt_name=snapshot_iter_153.pdz +add_blank=true # with the following command, you can choose the stage range you want to run # such as `./run.sh --stage 0 --stop-stage 0` @@ -18,7 +19,7 @@ source ${MAIN_ROOT}/utils/parse_options.sh || exit 1 if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then # prepare data - ./local/preprocess.sh ${conf_path} || exit -1 + ./local/preprocess.sh ${conf_path} ${add_blank}|| exit -1 fi if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then @@ -32,5 +33,5 @@ fi if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then # synthesize_e2e, vocoder is pwgan - CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize_e2e.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1 + CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize_e2e.sh ${conf_path} ${train_output_path} ${ckpt_name} ${add_blank}|| exit -1 fi diff --git a/examples/ljspeech/voc0/local/synthesize.sh b/examples/ljspeech/voc0/local/synthesize.sh index 1d5e1183..11874e49 100755 --- a/examples/ljspeech/voc0/local/synthesize.sh +++ b/examples/ljspeech/voc0/local/synthesize.sh @@ -8,5 +8,4 @@ python ${BIN_DIR}/synthesize.py \ --input=${input_mel_path} \ --output=${train_output_path}/wavs/ \ --checkpoint_path=${train_output_path}/checkpoints/${ckpt_name} \ - --ngpu=1 \ - --verbose \ No newline at end of file + --ngpu=1 \ No newline at end of file diff --git a/paddlespeech/t2s/exps/fastspeech2/normalize.py b/paddlespeech/t2s/exps/fastspeech2/normalize.py index 8ec20ebf..92d10832 100644 --- a/paddlespeech/t2s/exps/fastspeech2/normalize.py +++ b/paddlespeech/t2s/exps/fastspeech2/normalize.py @@ -58,30 +58,8 @@ def main(): "--phones-dict", type=str, default=None, help="phone vocabulary file.") parser.add_argument( "--speaker-dict", type=str, default=None, help="speaker id map file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") - args = parser.parse_args() - # set logger - if args.verbose > 1: - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - elif args.verbose > 0: - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - else: - logging.basicConfig( - level=logging.WARN, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - logging.warning('Skip DEBUG/INFO messages') + args = parser.parse_args() dumpdir = Path(args.dumpdir).expanduser() # use absolute path diff --git a/paddlespeech/t2s/exps/fastspeech2/preprocess.py b/paddlespeech/t2s/exps/fastspeech2/preprocess.py index eac75f98..0045c5a3 100644 --- a/paddlespeech/t2s/exps/fastspeech2/preprocess.py +++ b/paddlespeech/t2s/exps/fastspeech2/preprocess.py @@ -209,11 +209,6 @@ def main(): parser.add_argument("--config", type=str, help="fastspeech2 config file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") @@ -248,10 +243,6 @@ def main(): with open(args.config, 'rt') as f: config = CfgNode(yaml.safe_load(f)) - if args.verbose > 1: - print(vars(args)) - print(config) - sentences, speaker_set = get_phn_dur(dur_file) merge_silence(sentences) diff --git a/paddlespeech/t2s/exps/gan_vocoder/normalize.py b/paddlespeech/t2s/exps/gan_vocoder/normalize.py index ba95d3ed..4cb7e41c 100644 --- a/paddlespeech/t2s/exps/gan_vocoder/normalize.py +++ b/paddlespeech/t2s/exps/gan_vocoder/normalize.py @@ -47,30 +47,8 @@ def main(): default=False, action="store_true", help="whether to skip the copy of wav files.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") - args = parser.parse_args() - # set logger - if args.verbose > 1: - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - elif args.verbose > 0: - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - else: - logging.basicConfig( - level=logging.WARN, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - logging.warning('Skip DEBUG/INFO messages') + args = parser.parse_args() dumpdir = Path(args.dumpdir).expanduser() # use absolute path diff --git a/paddlespeech/t2s/exps/gan_vocoder/preprocess.py b/paddlespeech/t2s/exps/gan_vocoder/preprocess.py index 54636796..05c65768 100644 --- a/paddlespeech/t2s/exps/gan_vocoder/preprocess.py +++ b/paddlespeech/t2s/exps/gan_vocoder/preprocess.py @@ -167,11 +167,6 @@ def main(): required=True, help="directory to dump feature files.") parser.add_argument("--config", type=str, help="vocoder config file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") parser.add_argument( @@ -197,10 +192,6 @@ def main(): with open(args.config, 'rt') as f: config = CfgNode(yaml.safe_load(f)) - if args.verbose > 1: - print(vars(args)) - print(config) - sentences, speaker_set = get_phn_dur(dur_file) merge_silence(sentences) diff --git a/paddlespeech/t2s/exps/speedyspeech/normalize.py b/paddlespeech/t2s/exps/speedyspeech/normalize.py index 249a4d6d..f29466f6 100644 --- a/paddlespeech/t2s/exps/speedyspeech/normalize.py +++ b/paddlespeech/t2s/exps/speedyspeech/normalize.py @@ -50,11 +50,6 @@ def main(): "--tones-dict", type=str, default=None, help="tone vocabulary file.") parser.add_argument( "--speaker-dict", type=str, default=None, help="speaker id map file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--use-relative-path", @@ -63,24 +58,6 @@ def main(): help="whether use relative path in metadata") args = parser.parse_args() - # set logger - if args.verbose > 1: - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - elif args.verbose > 0: - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - else: - logging.basicConfig( - level=logging.WARN, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - logging.warning('Skip DEBUG/INFO messages') - dumpdir = Path(args.dumpdir).expanduser() # use absolute path dumpdir = dumpdir.resolve() diff --git a/paddlespeech/t2s/exps/speedyspeech/preprocess.py b/paddlespeech/t2s/exps/speedyspeech/preprocess.py index aa7608d6..e4084c14 100644 --- a/paddlespeech/t2s/exps/speedyspeech/preprocess.py +++ b/paddlespeech/t2s/exps/speedyspeech/preprocess.py @@ -195,11 +195,6 @@ def main(): parser.add_argument("--config", type=str, help="fastspeech2 config file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") @@ -230,10 +225,6 @@ def main(): with open(args.config, 'rt') as f: config = CfgNode(yaml.safe_load(f)) - if args.verbose > 1: - print(vars(args)) - print(config) - sentences, speaker_set = get_phn_dur(dur_file) merge_silence(sentences) diff --git a/paddlespeech/t2s/exps/tacotron2/preprocess.py b/paddlespeech/t2s/exps/tacotron2/preprocess.py index 6137da7f..c27b9769 100644 --- a/paddlespeech/t2s/exps/tacotron2/preprocess.py +++ b/paddlespeech/t2s/exps/tacotron2/preprocess.py @@ -184,11 +184,6 @@ def main(): parser.add_argument("--config", type=str, help="fastspeech2 config file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") @@ -223,10 +218,6 @@ def main(): with open(args.config, 'rt') as f: config = CfgNode(yaml.safe_load(f)) - if args.verbose > 1: - print(vars(args)) - print(config) - sentences, speaker_set = get_phn_dur(dur_file) merge_silence(sentences) diff --git a/paddlespeech/t2s/exps/transformer_tts/normalize.py b/paddlespeech/t2s/exps/transformer_tts/normalize.py index 87e975b8..e5f052c6 100644 --- a/paddlespeech/t2s/exps/transformer_tts/normalize.py +++ b/paddlespeech/t2s/exps/transformer_tts/normalize.py @@ -51,30 +51,8 @@ def main(): "--phones-dict", type=str, default=None, help="phone vocabulary file.") parser.add_argument( "--speaker-dict", type=str, default=None, help="speaker id map file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") - args = parser.parse_args() - # set logger - if args.verbose > 1: - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - elif args.verbose > 0: - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - else: - logging.basicConfig( - level=logging.WARN, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - logging.warning('Skip DEBUG/INFO messages') + args = parser.parse_args() # check directory existence dumpdir = Path(args.dumpdir).resolve() diff --git a/paddlespeech/t2s/exps/transformer_tts/preprocess.py b/paddlespeech/t2s/exps/transformer_tts/preprocess.py index 28ca3de6..2ebd5ecc 100644 --- a/paddlespeech/t2s/exps/transformer_tts/preprocess.py +++ b/paddlespeech/t2s/exps/transformer_tts/preprocess.py @@ -186,11 +186,6 @@ def main(): type=str, help="yaml format configuration file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") @@ -210,10 +205,6 @@ def main(): _C = Configuration(_C) config = _C.clone() - if args.verbose > 1: - print(vars(args)) - print(config) - phone_id_map_path = dumpdir / "phone_id_map.txt" speaker_id_map_path = dumpdir / "speaker_id_map.txt" diff --git a/paddlespeech/t2s/exps/vits/normalize.py b/paddlespeech/t2s/exps/vits/normalize.py index 6fc8adb0..5881ae95 100644 --- a/paddlespeech/t2s/exps/vits/normalize.py +++ b/paddlespeech/t2s/exps/vits/normalize.py @@ -16,6 +16,7 @@ import argparse import logging from operator import itemgetter from pathlib import Path +from typing import List import jsonlines import numpy as np @@ -23,6 +24,50 @@ from sklearn.preprocessing import StandardScaler from tqdm import tqdm from paddlespeech.t2s.datasets.data_table import DataTable +from paddlespeech.t2s.utils import str2bool + +INITIALS = [ + 'b', 'p', 'm', 'f', 'd', 't', 'n', 'l', 'g', 'k', 'h', 'zh', 'ch', 'sh', + 'r', 'z', 'c', 's', 'j', 'q', 'x' +] +INITIALS += ['y', 'w', 'sp', 'spl', 'spn', 'sil'] + + +def intersperse(lst, item): + result = [item] * (len(lst) * 2 + 1) + result[1::2] = lst + return result + + +def insert_after_character(lst, item): + result = [item] + for phone in lst: + result.append(phone) + if phone not in INITIALS: + # finals has tones + assert phone[-1] in "12345" + result.append(item) + return result + + +def add_blank(phones: List[str], + filed: str="character", + blank_token: str=""): + if filed == "phone": + """ + add blank after phones + input: ["n", "i3", "h", "ao3", "m", "a5"] + output: ["n", "", "i3", "", "h", "", "ao3", "", "m", "", "a5"] + """ + phones = intersperse(phones, blank_token) + elif filed == "character": + """ + add blank after characters + input: ["n", "i3", "h", "ao3"] + output: ["n", "i3", "", "h", "ao3", "", "m", "a5"] + """ + phones = insert_after_character(phones, blank_token) + return phones def main(): @@ -58,29 +103,12 @@ def main(): parser.add_argument( "--speaker-dict", type=str, default=None, help="speaker id map file.") parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") - args = parser.parse_args() + "--add-blank", + type=str2bool, + default=True, + help="whether to add blank between phones") - # set logger - if args.verbose > 1: - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - elif args.verbose > 0: - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - else: - logging.basicConfig( - level=logging.WARN, - format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" - ) - logging.warning('Skip DEBUG/INFO messages') + args = parser.parse_args() dumpdir = Path(args.dumpdir).expanduser() # use absolute path @@ -135,13 +163,19 @@ def main(): else: wav_path = wave - phone_ids = [vocab_phones[p] for p in item['phones']] + phones = item['phones'] + text_lengths = item['text_lengths'] + if args.add_blank: + phones = add_blank(phones, filed="character") + text_lengths = len(phones) + + phone_ids = [vocab_phones[p] for p in phones] spk_id = vocab_speaker[item["speaker"]] record = { "utt_id": item['utt_id'], "text": phone_ids, - "text_lengths": item['text_lengths'], + "text_lengths": text_lengths, 'feats': str(feats_path), "feats_lengths": item['feats_lengths'], "wave": str(wav_path), diff --git a/paddlespeech/t2s/exps/vits/preprocess.py b/paddlespeech/t2s/exps/vits/preprocess.py index 6aa139fb..f89ab356 100644 --- a/paddlespeech/t2s/exps/vits/preprocess.py +++ b/paddlespeech/t2s/exps/vits/preprocess.py @@ -197,11 +197,6 @@ def main(): parser.add_argument("--config", type=str, help="fastspeech2 config file.") - parser.add_argument( - "--verbose", - type=int, - default=1, - help="logging level. higher is more logging. (default=1)") parser.add_argument( "--num-cpu", type=int, default=1, help="number of process.") @@ -236,10 +231,6 @@ def main(): with open(args.config, 'rt') as f: config = CfgNode(yaml.safe_load(f)) - if args.verbose > 1: - print(vars(args)) - print(config) - sentences, speaker_set = get_phn_dur(dur_file) merge_silence(sentences) diff --git a/paddlespeech/t2s/exps/vits/synthesize_e2e.py b/paddlespeech/t2s/exps/vits/synthesize_e2e.py index c82e5c03..33a41375 100644 --- a/paddlespeech/t2s/exps/vits/synthesize_e2e.py +++ b/paddlespeech/t2s/exps/vits/synthesize_e2e.py @@ -23,6 +23,7 @@ from yacs.config import CfgNode from paddlespeech.t2s.exps.syn_utils import get_frontend from paddlespeech.t2s.exps.syn_utils import get_sentences from paddlespeech.t2s.models.vits import VITS +from paddlespeech.t2s.utils import str2bool def evaluate(args): @@ -55,6 +56,7 @@ def evaluate(args): output_dir = Path(args.output_dir) output_dir.mkdir(parents=True, exist_ok=True) merge_sentences = False + add_blank = args.add_blank N = 0 T = 0 @@ -62,7 +64,9 @@ def evaluate(args): with timer() as t: if args.lang == 'zh': input_ids = frontend.get_input_ids( - sentence, merge_sentences=merge_sentences) + sentence, + merge_sentences=merge_sentences, + add_blank=add_blank) phone_ids = input_ids["phone_ids"] elif args.lang == 'en': input_ids = frontend.get_input_ids( @@ -125,6 +129,12 @@ def parse_args(): help="text to synthesize, a 'utt_id sentence' pair per line.") parser.add_argument("--output_dir", type=str, help="output dir.") + parser.add_argument( + "--add-blank", + type=str2bool, + default=True, + help="whether to add blank between phones") + args = parser.parse_args() return args diff --git a/paddlespeech/t2s/exps/vits/train.py b/paddlespeech/t2s/exps/vits/train.py index dbda8b71..1a68d132 100644 --- a/paddlespeech/t2s/exps/vits/train.py +++ b/paddlespeech/t2s/exps/vits/train.py @@ -211,13 +211,18 @@ def train_sp(args, config): generator_first=config.generator_first, output_dir=output_dir) - trainer = Trainer(updater, (config.max_epoch, 'epoch'), output_dir) + trainer = Trainer( + updater, + stop_trigger=(config.train_max_steps, "iteration"), + out=output_dir) if dist.get_rank() == 0: - trainer.extend(evaluator, trigger=(1, "epoch")) - trainer.extend(VisualDL(output_dir), trigger=(1, "iteration")) + trainer.extend( + evaluator, trigger=(config.eval_interval_steps, 'iteration')) + trainer.extend(VisualDL(output_dir), trigger=(1, 'iteration')) trainer.extend( - Snapshot(max_size=config.num_snapshots), trigger=(1, 'epoch')) + Snapshot(max_size=config.num_snapshots), + trigger=(config.save_interval_steps, 'iteration')) print("Trainer Done!") trainer.run() diff --git a/paddlespeech/t2s/exps/waveflow/preprocess.py b/paddlespeech/t2s/exps/waveflow/preprocess.py index ef3a2917..c7034aea 100644 --- a/paddlespeech/t2s/exps/waveflow/preprocess.py +++ b/paddlespeech/t2s/exps/waveflow/preprocess.py @@ -143,8 +143,6 @@ if __name__ == "__main__": nargs=argparse.REMAINDER, help="options to overwrite --config file and the default config, passing in KEY VALUE pairs" ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="print msg") config = get_cfg_defaults() args = parser.parse_args() @@ -153,8 +151,5 @@ if __name__ == "__main__": if args.opts: config.merge_from_list(args.opts) config.freeze() - if args.verbose: - print(config.data) - print(args) create_dataset(config.data, args.input, args.output) diff --git a/paddlespeech/t2s/exps/waveflow/synthesize.py b/paddlespeech/t2s/exps/waveflow/synthesize.py index 53715b01..a3190c6e 100644 --- a/paddlespeech/t2s/exps/waveflow/synthesize.py +++ b/paddlespeech/t2s/exps/waveflow/synthesize.py @@ -72,8 +72,6 @@ if __name__ == "__main__": nargs=argparse.REMAINDER, help="options to overwrite --config file and the default config, passing in KEY VALUE pairs" ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="print msg") args = parser.parse_args() if args.config: diff --git a/paddlespeech/t2s/frontend/zh_frontend.py b/paddlespeech/t2s/frontend/zh_frontend.py index 129aa944..143ccbc1 100644 --- a/paddlespeech/t2s/frontend/zh_frontend.py +++ b/paddlespeech/t2s/frontend/zh_frontend.py @@ -29,6 +29,29 @@ from paddlespeech.t2s.frontend.generate_lexicon import generate_lexicon from paddlespeech.t2s.frontend.tone_sandhi import ToneSandhi from paddlespeech.t2s.frontend.zh_normalization.text_normlization import TextNormalizer +INITIALS = [ + 'b', 'p', 'm', 'f', 'd', 't', 'n', 'l', 'g', 'k', 'h', 'zh', 'ch', 'sh', + 'r', 'z', 'c', 's', 'j', 'q', 'x' +] +INITIALS += ['y', 'w', 'sp', 'spl', 'spn', 'sil'] + + +def intersperse(lst, item): + result = [item] * (len(lst) * 2 + 1) + result[1::2] = lst + return result + + +def insert_after_character(lst, item): + result = [item] + for phone in lst: + result.append(phone) + if phone not in INITIALS: + # finals has tones + # assert phone[-1] in "12345" + result.append(item) + return result + class Frontend(): def __init__(self, @@ -280,12 +303,15 @@ class Frontend(): print("----------------------------") return phonemes - def get_input_ids(self, - sentence: str, - merge_sentences: bool=True, - get_tone_ids: bool=False, - robot: bool=False, - print_info: bool=False) -> Dict[str, List[paddle.Tensor]]: + def get_input_ids( + self, + sentence: str, + merge_sentences: bool=True, + get_tone_ids: bool=False, + robot: bool=False, + print_info: bool=False, + add_blank: bool=False, + blank_token: str="") -> Dict[str, List[paddle.Tensor]]: phonemes = self.get_phonemes( sentence, merge_sentences=merge_sentences, @@ -299,6 +325,10 @@ class Frontend(): for part_phonemes in phonemes: phones, tones = self._get_phone_tone( part_phonemes, get_tone_ids=get_tone_ids) + + if add_blank: + phones = insert_after_character(phones, blank_token) + if tones: tone_ids = self._t2id(tones) tone_ids = paddle.to_tensor(tone_ids) diff --git a/paddlespeech/t2s/models/vits/vits.py b/paddlespeech/t2s/models/vits/vits.py index ab8eda26..5c476be7 100644 --- a/paddlespeech/t2s/models/vits/vits.py +++ b/paddlespeech/t2s/models/vits/vits.py @@ -227,11 +227,7 @@ class VITS(nn.Layer): lids (Optional[Tensor]): Language index tensor (B,) or (B, 1). forward_generator (bool): Whether to forward generator. Returns: - Dict[str, Any]: - - loss (Tensor): Loss scalar tensor. - - stats (Dict[str, float]): Statistics to be monitored. - - weight (Tensor): Weight tensor to summarize losses. - - optim_idx (int): Optimizer index (0 for G and 1 for D). + """ if forward_generator: return self._forward_generator( From 357b177232eac9cbac939e18394ec9b531f53689 Mon Sep 17 00:00:00 2001 From: iftaken Date: Thu, 16 Jun 2022 15:34:26 +0800 Subject: [PATCH 15/30] rename readme and fixed conflict --- demos/speech_web/{README.MD => README_cn.md} | 0 paddlespeech/server/bin/paddlespeech_server.py | 4 ---- 2 files changed, 4 deletions(-) rename demos/speech_web/{README.MD => README_cn.md} (100%) diff --git a/demos/speech_web/README.MD b/demos/speech_web/README_cn.md similarity index 100% rename from demos/speech_web/README.MD rename to demos/speech_web/README_cn.md diff --git a/paddlespeech/server/bin/paddlespeech_server.py b/paddlespeech/server/bin/paddlespeech_server.py index 57d72887..175e8ffb 100644 --- a/paddlespeech/server/bin/paddlespeech_server.py +++ b/paddlespeech/server/bin/paddlespeech_server.py @@ -45,10 +45,6 @@ app.add_middleware( allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) -<<<<<<< HEAD -======= - ->>>>>>> develop @cli_server_register( name='paddlespeech_server.start', description='Start the service') From 0f8e9cdd32cef875200a9f0c90cc1dd5630a82aa Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Thu, 16 Jun 2022 08:41:10 +0000 Subject: [PATCH 16/30] add init file --- .../server/engine/asr/online/onnx/__init__.py | 13 +++++++++++++ .../engine/asr/online/paddleinference/__init__.py | 13 +++++++++++++ .../server/engine/asr/online/python/__init__.py | 13 +++++++++++++ 3 files changed, 39 insertions(+) create mode 100644 paddlespeech/server/engine/asr/online/onnx/__init__.py create mode 100644 paddlespeech/server/engine/asr/online/python/__init__.py diff --git a/paddlespeech/server/engine/asr/online/onnx/__init__.py b/paddlespeech/server/engine/asr/online/onnx/__init__.py new file mode 100644 index 00000000..c747d3e7 --- /dev/null +++ b/paddlespeech/server/engine/asr/online/onnx/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/paddlespeech/server/engine/asr/online/paddleinference/__init__.py b/paddlespeech/server/engine/asr/online/paddleinference/__init__.py index e69de29b..c747d3e7 100644 --- a/paddlespeech/server/engine/asr/online/paddleinference/__init__.py +++ b/paddlespeech/server/engine/asr/online/paddleinference/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/paddlespeech/server/engine/asr/online/python/__init__.py b/paddlespeech/server/engine/asr/online/python/__init__.py new file mode 100644 index 00000000..c747d3e7 --- /dev/null +++ b/paddlespeech/server/engine/asr/online/python/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file From 704b5f8bc4bb7b5fef963b83d60fe3758a4a5b94 Mon Sep 17 00:00:00 2001 From: huangyuxin Date: Fri, 17 Jun 2022 03:51:47 +0000 Subject: [PATCH 17/30] fix win len in ds2 server --- demos/streaming_asr_server/conf/ws_ds2_application.yaml | 4 ++-- paddlespeech/server/conf/ws_application.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index d19bd26d..5391e5e4 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -39,11 +39,11 @@ asr_online: summary: True # False -> do not show predictor config chunk_buffer_conf: - frame_duration_ms: 80 + frame_duration_ms: 85 shift_ms: 40 sample_rate: 16000 sample_width: 2 window_n: 7 # frame shift_n: 4 # frame - window_ms: 20 # ms + window_ms: 25 # ms shift_ms: 10 # ms diff --git a/paddlespeech/server/conf/ws_application.yaml b/paddlespeech/server/conf/ws_application.yaml index 43d83f2d..243f549b 100644 --- a/paddlespeech/server/conf/ws_application.yaml +++ b/paddlespeech/server/conf/ws_application.yaml @@ -39,11 +39,11 @@ asr_online: summary: True # False -> do not show predictor config chunk_buffer_conf: - frame_duration_ms: 80 + frame_duration_ms: 85 shift_ms: 40 sample_rate: 16000 sample_width: 2 window_n: 7 # frame shift_n: 4 # frame - window_ms: 20 # ms + window_ms: 25 # ms shift_ms: 10 # ms From f0ae81b9120bb6243e2a0533e41634bfe9e8ffe2 Mon Sep 17 00:00:00 2001 From: freeliuzc Date: Fri, 17 Jun 2022 11:52:46 +0800 Subject: [PATCH 18/30] Update install_cn.md Update "docker installation_zh" chapter. --- docs/source/install_cn.md | 26 +++++++------------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/docs/source/install_cn.md b/docs/source/install_cn.md index 5a967f40..f59cafa2 100644 --- a/docs/source/install_cn.md +++ b/docs/source/install_cn.md @@ -131,25 +131,13 @@ pip install . -i https://pypi.tuna.tsinghua.edu.cn/simple 为了避免各种环境配置问题,我们非常推荐你使用 docker 容器。如果你不想使用 docker,但是可以使用拥有 root 权限的 Ubuntu 系统,你也可以完成**困难**方式的安装。 ### 选择1: 使用Docker容器(推荐) -Docker 是一种开源工具,用于在和系统本身环境相隔离的环境中构建、发布和运行各类应用程序。你可以访问 [hub.docker.com](https://hub.docker.com) 来下载各种版本的 docker,目前已经有适用于 `PaddleSpeech` 的 docker 提供在了该网站上。Docker 镜像需要使用 Nvidia GPU,所以你也需要提前安装好 [nvidia-docker](https://github.com/NVIDIA/nvidia-docker) 。 -你需要完成几个步骤来启动docker: -- 下载 docker 镜像: - 例如,拉取 paddle2.2.0 镜像: -```bash -sudo nvidia-docker pull registry.baidubce.com/paddlepaddle/paddle:2.2.0-gpu-cuda10.2-cudnn7 -``` -- 克隆 `PaddleSpeech` 仓库 -```bash -git clone https://github.com/PaddlePaddle/PaddleSpeech.git -``` -- 启动 docker 镜像 -```bash -sudo nvidia-docker run --net=host --ipc=host --rm -it -v $(pwd)/PaddleSpeech:/PaddleSpeech registry.baidubce.com/paddlepaddle/paddle:2.2.0-gpu-cuda10.2-cudnn7 /bin/bash -``` -- 进入 PaddleSpeech 目录 -```bash -cd /PaddleSpeech -``` +Docker 是一种开源工具,用于在和系统本身环境相隔离的环境中构建、发布和运行各类应用程序。如果您没有 Docker 运行环境,请参考 [Docker 官网](https://www.docker.com/)进行安装,如果您准备使用 GPU 版本镜像,还需要提前安装好 [nvidia-docker](https://github.com/NVIDIA/nvidia-docker) 。 + +PaddleSpeech 提供了带有最新代码的 docker 镜像供您使用,您只需要**拉取 docker 镜像 **,然后**运行 docker 镜像**,无需其他任何额外操作,即可开始使用 PaddleSpeech 的所有功能。 + +在 [Docker Hub](https://hub.docker.com/repository/docker/paddlecloud/paddlespeech) 中获取这些镜像及相应的使用指南,包括 CPU、GPU、ROCm 版本。 + +如果您对自动化制作docker镜像感兴趣,或有自定义需求,请访问 [PaddlePaddle/PaddleCloud](https://github.com/PaddlePaddle/PaddleCloud/tree/main/tekton) 做进一步了解。 完成这些以后,你就可以在 docker 容器中执行训练、推理和超参 fine-tune。 ### 选择2: 使用有 root 权限的 Ubuntu - 使用apt安装 `build-essential` From cf9254f42e2fb4026487f6ae0dd97586f21ab892 Mon Sep 17 00:00:00 2001 From: freeliuzc Date: Fri, 17 Jun 2022 12:06:34 +0800 Subject: [PATCH 19/30] Update install.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update chapter “Running in Docker Container (Recommend)”. --- docs/source/install.md | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/docs/source/install.md b/docs/source/install.md index e3ea74b2..eeaf6d46 100644 --- a/docs/source/install.md +++ b/docs/source/install.md @@ -139,28 +139,13 @@ pip install . -i https://pypi.tuna.tsinghua.edu.cn/simple To avoid the trouble of environment setup, running in a Docker container is highly recommended. Otherwise, if you work on `Ubuntu` with `root` privilege, you can still complete the installation. ### Choice 1: Running in Docker Container (Recommend) -Docker is an open-source tool to build, ship, and run distributed applications in an isolated environment. A Docker image for this project has been provided in [hub.docker.com](https://hub.docker.com) with dependencies of cuda and cudnn installed. This Docker image requires the support of NVIDIA GPU, so please make sure its availability and the [nvidia-docker](https://github.com/NVIDIA/nvidia-docker) has been installed. +Docker is an open-source tool to build, ship, and run distributed applications in an isolated environment. If you do not have a Docker environment, please refer to [Docker](https://www.docker.com/). If you will use GPU version, you also need to install [nvidia-docker](https://github.com/NVIDIA/nvidia-docker). -Take several steps to launch the Docker image: -- Download the Docker image +We provides built docker images with latest code. All you have to do is to **pull the docker image** and **run the docker image**. Then you can enjoy PaddleSpeech without any extra action. -For example, pull paddle 2.2.0 image: -```bash -sudo nvidia-docker pull registry.baidubce.com/paddlepaddle/paddle:2.2.0-gpu-cuda10.2-cudnn7 -``` -- Clone this repository -```bash -git clone https://github.com/PaddlePaddle/PaddleSpeech.git -``` -- Run the Docker image -```bash -sudo nvidia-docker run --net=host --ipc=host --rm -it -v $(pwd)/PaddleSpeech:/PaddleSpeech registry.baidubce.com/paddlepaddle/paddle:2.2.0-gpu-cuda10.2-cudnn7 /bin/bash -``` -- Enter PaddleSpeech directory. -```bash -cd /PaddleSpeech -``` -Now you can execute training, inference, and hyper-parameters tuning in Docker container. +Get these images and guidance in [docker hub](https://hub.docker.com/repository/docker/paddlecloud/paddlespeech), including CPU, GPU, ROCm environment versions. + +If you have some customized requirements about automatic building docker images, you can get it in github repo [PaddlePaddle/PaddleCloud](https://github.com/PaddlePaddle/PaddleCloud/tree/main/tekton). ### Choice 2: Running in Ubuntu with Root Privilege - Install `build-essential` by apt From 59a78f2a4648430227def9e872bbf612817c6db9 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Fri, 17 Jun 2022 04:40:11 +0000 Subject: [PATCH 20/30] ds2 wenetspeech to onnx and support streaming asr server --- demos/streaming_asr_server/.gitignore | 2 + .../conf/ws_ds2_application.yaml | 6 +- .../local/rtf_from_log.py | 40 ++++++ demos/streaming_asr_server/local/test.sh | 21 +++ .../{ => local}/websocket_client.py | 5 +- paddlespeech/cli/utils.py | 5 +- paddlespeech/resource/pretrained_models.py | 128 ++++++++++++++---- .../server/conf/ws_ds2_application.yaml | 8 +- .../server/engine/asr/online/onnx/__init__.py | 2 +- .../asr/online/paddleinference/__init__.py | 2 +- .../engine/asr/online/python/__init__.py | 2 +- speechx/examples/ds2_ol/onnx/README.md | 2 +- .../ds2_ol/onnx/local/onnx_infer_shape.py | 2 + speechx/examples/ds2_ol/onnx/local/ort_opt.py | 45 ++++++ speechx/examples/ds2_ol/onnx/run.sh | 21 ++- 15 files changed, 242 insertions(+), 49 deletions(-) create mode 100644 demos/streaming_asr_server/.gitignore create mode 100755 demos/streaming_asr_server/local/rtf_from_log.py create mode 100755 demos/streaming_asr_server/local/test.sh rename demos/streaming_asr_server/{ => local}/websocket_client.py (94%) create mode 100755 speechx/examples/ds2_ol/onnx/local/ort_opt.py diff --git a/demos/streaming_asr_server/.gitignore b/demos/streaming_asr_server/.gitignore new file mode 100644 index 00000000..0f09019d --- /dev/null +++ b/demos/streaming_asr_server/.gitignore @@ -0,0 +1,2 @@ +exp + diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index a4e6e9a1..e7ce59c2 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -11,7 +11,7 @@ port: 8090 # protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online-inference'] +engine_list: ['asr_online-onnx'] ################################################################################# @@ -21,7 +21,7 @@ engine_list: ['asr_online-inference'] ################################### ASR ######################################### ################### speech task: asr; engine_type: online-inference ####################### asr_online-inference: - model_type: 'deepspeech2online_aishell' + model_type: 'deepspeech2online_wenetspeech' am_model: # the pdmodel file of am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' @@ -53,7 +53,7 @@ asr_online-inference: ################################### ASR ######################################### ################### speech task: asr; engine_type: online-onnx ####################### asr_online-onnx: - model_type: 'deepspeech2online_aishell' + model_type: 'deepspeech2online_wenetspeech' am_model: # the pdmodel file of onnx am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' diff --git a/demos/streaming_asr_server/local/rtf_from_log.py b/demos/streaming_asr_server/local/rtf_from_log.py new file mode 100755 index 00000000..a5634388 --- /dev/null +++ b/demos/streaming_asr_server/local/rtf_from_log.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +import argparse + +if __name__ == '__main__': + parser = argparse.ArgumentParser(prog=__doc__) + parser.add_argument( + '--logfile', type=str, required=True, help='ws client log file') + + args = parser.parse_args() + + rtfs = [] + with open(args.logfile, 'r') as f: + for line in f: + if 'RTF=' in line: + # udio duration: 6.126, elapsed time: 3.471978187561035, RTF=0.5667610492264177 + line = line.strip() + beg = line.index("audio") + line = line[beg:] + + items = line.split(',') + vals = [] + for elem in items: + if "RTF=" in elem: + continue + _, val = elem.split(":") + vals.append(eval(val)) + keys = ['T', 'P'] + meta = dict(zip(keys, vals)) + + rtfs.append(meta) + + T = 0.0 + P = 0.0 + n = 0 + for m in rtfs: + n += 1 + T += m['T'] + P += m['P'] + + print(f"RTF: {P/T}, utts: {n}") diff --git a/demos/streaming_asr_server/local/test.sh b/demos/streaming_asr_server/local/test.sh new file mode 100755 index 00000000..d70dd336 --- /dev/null +++ b/demos/streaming_asr_server/local/test.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +if [ $# != 1 ];then + echo "usage: $0 wav_scp" + exit -1 +fi + +scp=$1 + +# calc RTF +# wav_scp can generate from `speechx/examples/ds2_ol/aishell` + +exp=exp +mkdir -p $exp + +python3 local/websocket_client.py --server_ip 127.0.0.1 --port 8090 --wavscp $scp &> $exp/log.rsl + +python3 local/rtf_from_log.py --logfile $exp/log.rsl + + + \ No newline at end of file diff --git a/demos/streaming_asr_server/websocket_client.py b/demos/streaming_asr_server/local/websocket_client.py similarity index 94% rename from demos/streaming_asr_server/websocket_client.py rename to demos/streaming_asr_server/local/websocket_client.py index 8e1f19a5..03712402 100644 --- a/demos/streaming_asr_server/websocket_client.py +++ b/demos/streaming_asr_server/local/websocket_client.py @@ -1,3 +1,4 @@ +#!/usr/bin/python # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,9 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -#!/usr/bin/python -# -*- coding: UTF-8 -*- -# script for calc RTF: grep -rn RTF log.txt | awk '{print $NF}' | awk -F "=" '{sum += $NF} END {print "all time",sum, "audio num", NR, "RTF", sum/NR}' +# calc avg RTF(NOT Accurate): grep -rn RTF log.txt | awk '{print $NF}' | awk -F "=" '{sum += $NF} END {print "all time",sum, "audio num", NR, "RTF", sum/NR}' import argparse import asyncio import codecs diff --git a/paddlespeech/cli/utils.py b/paddlespeech/cli/utils.py index 21c887e9..0161629e 100644 --- a/paddlespeech/cli/utils.py +++ b/paddlespeech/cli/utils.py @@ -25,10 +25,10 @@ from typing import Dict import paddle import requests +import soundfile as sf import yaml from paddle.framework import load -import paddlespeech.audio from . import download from .entry import commands try: @@ -282,7 +282,8 @@ def _note_one_stat(cls_name, params={}): if 'audio_file' in params: try: - _, sr = paddlespeech.audio.load(params['audio_file']) + # recursive import cased by: utils.DATA_HOME + _, sr = sf.read(params['audio_file']) except Exception: sr = -1 diff --git a/paddlespeech/resource/pretrained_models.py b/paddlespeech/resource/pretrained_models.py index c3cef499..37303331 100644 --- a/paddlespeech/resource/pretrained_models.py +++ b/paddlespeech/resource/pretrained_models.py @@ -135,15 +135,21 @@ asr_dynamic_pretrained_models = { }, }, "deepspeech2online_wenetspeech-zh-16k": { - '1.0': { + '1.0.3': { 'url': - 'https://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.2.model.tar.gz', + 'http://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.3.model.tar.gz', 'md5': - 'b0c77e7f8881e0a27b82127d1abb8d5f', + 'cfe273793e68f790f742b411c98bc75e', 'cfg_path': 'model.yaml', 'ckpt_path': 'exp/deepspeech2_online/checkpoints/avg_10', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_10.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_10.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', 'lm_url': 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', 'lm_md5': @@ -170,14 +176,22 @@ asr_dynamic_pretrained_models = { '1.0.2': { 'url': 'http://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.2.model.tar.gz', - 'md5': '4dd42cfce9aaa54db0ec698da6c48ec5', - 'cfg_path': 'model.yaml', - 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_1', - 'model':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', - 'params':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', - 'onnx_model': 'onnx/model.onnx', - 'lm_url':'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', - 'lm_md5':'29e02312deb2e59b3c8686c7966d4fe3' + 'md5': + '4dd42cfce9aaa54db0ec698da6c48ec5', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' }, }, "deepspeech2offline_librispeech-en-16k": { @@ -241,14 +255,44 @@ asr_static_pretrained_models = { '1.0.2': { 'url': 'http://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.2.model.tar.gz', - 'md5': '4dd42cfce9aaa54db0ec698da6c48ec5', - 'cfg_path': 'model.yaml', - 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_1', - 'model':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', - 'params':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', - 'onnx_model': 'onnx/model.onnx', - 'lm_url':'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', - 'lm_md5':'29e02312deb2e59b3c8686c7966d4fe3' + 'md5': + '4dd42cfce9aaa54db0ec698da6c48ec5', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' + }, + }, + "deepspeech2online_wenetspeech-zh-16k": { + '1.0.3': { + 'url': + 'http://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.3.model.tar.gz', + 'md5': + 'cfe273793e68f790f742b411c98bc75e', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_10', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_10.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_10.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' }, }, } @@ -258,14 +302,44 @@ asr_onnx_pretrained_models = { '1.0.2': { 'url': 'http://paddlespeech.bj.bcebos.com/s2t/aishell/asr0/asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.2.model.tar.gz', - 'md5': '4dd42cfce9aaa54db0ec698da6c48ec5', - 'cfg_path': 'model.yaml', - 'ckpt_path':'exp/deepspeech2_online/checkpoints/avg_1', - 'model':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', - 'params':'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', - 'onnx_model': 'onnx/model.onnx', - 'lm_url':'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', - 'lm_md5':'29e02312deb2e59b3c8686c7966d4fe3' + 'md5': + '4dd42cfce9aaa54db0ec698da6c48ec5', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_1', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_1.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' + }, + }, + "deepspeech2online_wenetspeech-zh-16k": { + '1.0.3': { + 'url': + 'http://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.3.model.tar.gz', + 'md5': + 'cfe273793e68f790f742b411c98bc75e', + 'cfg_path': + 'model.yaml', + 'ckpt_path': + 'exp/deepspeech2_online/checkpoints/avg_10', + 'model': + 'exp/deepspeech2_online/checkpoints/avg_10.jit.pdmodel', + 'params': + 'exp/deepspeech2_online/checkpoints/avg_10.jit.pdiparams', + 'onnx_model': + 'onnx/model.onnx', + 'lm_url': + 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm', + 'lm_md5': + '29e02312deb2e59b3c8686c7966d4fe3' }, }, } diff --git a/paddlespeech/server/conf/ws_ds2_application.yaml b/paddlespeech/server/conf/ws_ds2_application.yaml index 430e6fd1..e7ce59c2 100644 --- a/paddlespeech/server/conf/ws_ds2_application.yaml +++ b/paddlespeech/server/conf/ws_ds2_application.yaml @@ -11,7 +11,7 @@ port: 8090 # protocol = ['websocket'] (only one can be selected). # websocket only support online engine type. protocol: 'websocket' -engine_list: ['asr_online-inference'] +engine_list: ['asr_online-onnx'] ################################################################################# @@ -21,7 +21,7 @@ engine_list: ['asr_online-inference'] ################################### ASR ######################################### ################### speech task: asr; engine_type: online-inference ####################### asr_online-inference: - model_type: 'deepspeech2online_aishell' + model_type: 'deepspeech2online_wenetspeech' am_model: # the pdmodel file of am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' @@ -53,7 +53,7 @@ asr_online-inference: ################################### ASR ######################################### ################### speech task: asr; engine_type: online-onnx ####################### asr_online-onnx: - model_type: 'deepspeech2online_aishell' + model_type: 'deepspeech2online_wenetspeech' am_model: # the pdmodel file of onnx am static model [optional] am_params: # the pdiparams file of am static model [optional] lang: 'zh' @@ -81,4 +81,4 @@ asr_online-onnx: window_n: 7 # frame shift_n: 4 # frame window_ms: 20 # ms - shift_ms: 10 # ms \ No newline at end of file + shift_ms: 10 # ms diff --git a/paddlespeech/server/engine/asr/online/onnx/__init__.py b/paddlespeech/server/engine/asr/online/onnx/__init__.py index c747d3e7..97043fd7 100644 --- a/paddlespeech/server/engine/asr/online/onnx/__init__.py +++ b/paddlespeech/server/engine/asr/online/onnx/__init__.py @@ -10,4 +10,4 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file +# limitations under the License. diff --git a/paddlespeech/server/engine/asr/online/paddleinference/__init__.py b/paddlespeech/server/engine/asr/online/paddleinference/__init__.py index c747d3e7..97043fd7 100644 --- a/paddlespeech/server/engine/asr/online/paddleinference/__init__.py +++ b/paddlespeech/server/engine/asr/online/paddleinference/__init__.py @@ -10,4 +10,4 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file +# limitations under the License. diff --git a/paddlespeech/server/engine/asr/online/python/__init__.py b/paddlespeech/server/engine/asr/online/python/__init__.py index c747d3e7..97043fd7 100644 --- a/paddlespeech/server/engine/asr/online/python/__init__.py +++ b/paddlespeech/server/engine/asr/online/python/__init__.py @@ -10,4 +10,4 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file +# limitations under the License. diff --git a/speechx/examples/ds2_ol/onnx/README.md b/speechx/examples/ds2_ol/onnx/README.md index 566a4597..eaea8b6e 100644 --- a/speechx/examples/ds2_ol/onnx/README.md +++ b/speechx/examples/ds2_ol/onnx/README.md @@ -9,7 +9,7 @@ Please make sure [Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX) and The example test with these packages installed: ``` -paddle2onnx 0.9.8rc0 # develop af4354b4e9a61a93be6490640059a02a4499bc7a +paddle2onnx 0.9.8 # develop 62c5424e22cd93968dc831216fc9e0f0fce3d819 paddleaudio 0.2.1 paddlefsl 1.1.0 paddlenlp 2.2.6 diff --git a/speechx/examples/ds2_ol/onnx/local/onnx_infer_shape.py b/speechx/examples/ds2_ol/onnx/local/onnx_infer_shape.py index c41e66b7..2d364c25 100755 --- a/speechx/examples/ds2_ol/onnx/local/onnx_infer_shape.py +++ b/speechx/examples/ds2_ol/onnx/local/onnx_infer_shape.py @@ -492,6 +492,8 @@ class SymbolicShapeInference: skip_infer = node.op_type in [ 'If', 'Loop', 'Scan', 'SplitToSequence', 'ZipMap', \ # contrib ops + + 'Attention', 'BiasGelu', \ 'EmbedLayerNormalization', \ 'FastGelu', 'Gelu', 'LayerNormalization', \ diff --git a/speechx/examples/ds2_ol/onnx/local/ort_opt.py b/speechx/examples/ds2_ol/onnx/local/ort_opt.py new file mode 100755 index 00000000..8e995bcf --- /dev/null +++ b/speechx/examples/ds2_ol/onnx/local/ort_opt.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +import argparse + +import onnxruntime as ort + +# onnxruntime optimizer. +# https://onnxruntime.ai/docs/performance/graph-optimizations.html +# https://onnxruntime.ai/docs/api/python/api_summary.html#api + + +def parse_arguments(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--model_in', required=True, type=str, help='Path to onnx model.') + parser.add_argument( + '--opt_level', + required=True, + type=int, + default=0, + choices=[0, 1, 2], + help='Path to onnx model.') + parser.add_argument( + '--model_out', required=True, help='path to save the optimized model.') + parser.add_argument('--debug', default=False, help='output debug info.') + return parser.parse_args() + + +if __name__ == '__main__': + args = parse_arguments() + + sess_options = ort.SessionOptions() + + # Set graph optimization level + print(f"opt level: {args.opt_level}") + if args.opt_level == 0: + sess_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_BASIC + elif args.opt_level == 1: + sess_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_EXTENDED + else: + sess_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL + + # To enable model serialization after graph optimization set this + sess_options.optimized_model_filepath = args.model_out + + session = ort.InferenceSession(args.model_in, sess_options) diff --git a/speechx/examples/ds2_ol/onnx/run.sh b/speechx/examples/ds2_ol/onnx/run.sh index 57cd9416..583abda4 100755 --- a/speechx/examples/ds2_ol/onnx/run.sh +++ b/speechx/examples/ds2_ol/onnx/run.sh @@ -5,10 +5,11 @@ set -e . path.sh stage=0 -stop_stage=100 -#tarfile=asr0_deepspeech2_online_wenetspeech_ckpt_1.0.2.model.tar.gz -tarfile=asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.1.model.tar.gz -model_prefix=avg_1.jit +stop_stage=50 +tarfile=asr0_deepspeech2_online_wenetspeech_ckpt_1.0.2.model.tar.gz +#tarfile=asr0_deepspeech2_online_aishell_fbank161_ckpt_1.0.1.model.tar.gz +model_prefix=avg_10.jit +#model_prefix=avg_1.jit model=${model_prefix}.pdmodel param=${model_prefix}.pdiparams @@ -80,6 +81,14 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ];then fi +if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ] ;then + # ort graph optmize + ./local/ort_opt.py --model_in $exp/model.onnx --opt_level 0 --model_out $exp/model.ort.opt.onnx + + ./local/infer_check.py --input_file $input_file --model_type $model_type --model_dir $dir --model_prefix $model_prefix --onnx_model $exp/model.ort.opt.onnx +fi + + # aishell rnn hidden is 1024 # wenetspeech rnn hiddn is 2048 if [ $model_type == 'aishell' ];then @@ -90,9 +99,9 @@ else echo "not support: $model_type" exit -1 fi - -if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ] ;then + +if [ ${stage} -le 51 ] && [ ${stop_stage} -ge 51 ] ;then # wenetspeech ds2 model execed 2GB limit, will error. # simplifying onnx model ./local/onnx_opt.sh $exp/model.onnx $exp/model.opt.onnx "$input_shape" From d21e6d8adb827877abfa680b25123d475412f59b Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Fri, 17 Jun 2022 04:56:01 +0000 Subject: [PATCH 21/30] fix window ms config --- demos/streaming_asr_server/conf/ws_ds2_application.yaml | 4 ++-- demos/streaming_asr_server/local/websocket_client.py | 2 ++ demos/streaming_asr_server/test.sh | 2 -- paddlespeech/server/conf/ws_ds2_application.yaml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/demos/streaming_asr_server/conf/ws_ds2_application.yaml b/demos/streaming_asr_server/conf/ws_ds2_application.yaml index e7ce59c2..f3f60b96 100644 --- a/demos/streaming_asr_server/conf/ws_ds2_application.yaml +++ b/demos/streaming_asr_server/conf/ws_ds2_application.yaml @@ -45,7 +45,7 @@ asr_online-inference: sample_width: 2 window_n: 7 # frame shift_n: 4 # frame - window_ms: 20 # ms + window_ms: 25 # ms shift_ms: 10 # ms @@ -80,5 +80,5 @@ asr_online-onnx: sample_width: 2 window_n: 7 # frame shift_n: 4 # frame - window_ms: 20 # ms + window_ms: 25 # ms shift_ms: 10 # ms diff --git a/demos/streaming_asr_server/local/websocket_client.py b/demos/streaming_asr_server/local/websocket_client.py index 03712402..51ae7a2f 100644 --- a/demos/streaming_asr_server/local/websocket_client.py +++ b/demos/streaming_asr_server/local/websocket_client.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # calc avg RTF(NOT Accurate): grep -rn RTF log.txt | awk '{print $NF}' | awk -F "=" '{sum += $NF} END {print "all time",sum, "audio num", NR, "RTF", sum/NR}' +# python3 websocket_client.py --server_ip 127.0.0.1 --port 8290 --punc.server_ip 127.0.0.1 --punc.port 8190 --wavfile ./zh.wav +# python3 websocket_client.py --server_ip 127.0.0.1 --port 8290 --wavfile ./zh.wav import argparse import asyncio import codecs diff --git a/demos/streaming_asr_server/test.sh b/demos/streaming_asr_server/test.sh index f09068d4..67a5ec4c 100755 --- a/demos/streaming_asr_server/test.sh +++ b/demos/streaming_asr_server/test.sh @@ -3,11 +3,9 @@ wget -c https://paddlespeech.bj.bcebos.com/PaddleAudio/zh.wav # read the wav and pass it to only streaming asr service # If `127.0.0.1` is not accessible, you need to use the actual service IP address. -# python3 websocket_client.py --server_ip 127.0.0.1 --port 8290 --wavfile ./zh.wav paddlespeech_client asr_online --server_ip 127.0.0.1 --port 8090 --input ./zh.wav # read the wav and call streaming and punc service # If `127.0.0.1` is not accessible, you need to use the actual service IP address. -# python3 websocket_client.py --server_ip 127.0.0.1 --port 8290 --punc.server_ip 127.0.0.1 --punc.port 8190 --wavfile ./zh.wav paddlespeech_client asr_online --server_ip 127.0.0.1 --port 8290 --punc.server_ip 127.0.0.1 --punc.port 8190 --input ./zh.wav diff --git a/paddlespeech/server/conf/ws_ds2_application.yaml b/paddlespeech/server/conf/ws_ds2_application.yaml index e7ce59c2..f3f60b96 100644 --- a/paddlespeech/server/conf/ws_ds2_application.yaml +++ b/paddlespeech/server/conf/ws_ds2_application.yaml @@ -45,7 +45,7 @@ asr_online-inference: sample_width: 2 window_n: 7 # frame shift_n: 4 # frame - window_ms: 20 # ms + window_ms: 25 # ms shift_ms: 10 # ms @@ -80,5 +80,5 @@ asr_online-onnx: sample_width: 2 window_n: 7 # frame shift_n: 4 # frame - window_ms: 20 # ms + window_ms: 25 # ms shift_ms: 10 # ms From c6b846c4fe348940bf917a9578cfaf0d800f4b6a Mon Sep 17 00:00:00 2001 From: freeliuzc Date: Fri, 17 Jun 2022 14:21:32 +0800 Subject: [PATCH 22/30] Update install_cn.md update doc --- docs/source/install_cn.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/install_cn.md b/docs/source/install_cn.md index f59cafa2..74c91fa6 100644 --- a/docs/source/install_cn.md +++ b/docs/source/install_cn.md @@ -133,7 +133,7 @@ pip install . -i https://pypi.tuna.tsinghua.edu.cn/simple ### 选择1: 使用Docker容器(推荐) Docker 是一种开源工具,用于在和系统本身环境相隔离的环境中构建、发布和运行各类应用程序。如果您没有 Docker 运行环境,请参考 [Docker 官网](https://www.docker.com/)进行安装,如果您准备使用 GPU 版本镜像,还需要提前安装好 [nvidia-docker](https://github.com/NVIDIA/nvidia-docker) 。 -PaddleSpeech 提供了带有最新代码的 docker 镜像供您使用,您只需要**拉取 docker 镜像 **,然后**运行 docker 镜像**,无需其他任何额外操作,即可开始使用 PaddleSpeech 的所有功能。 +我们提供了包含最新 PaddleSpeech 代码的 docker 镜像,并预先安装好了所有的环境和库依赖,您只需要**拉取并运行 docker 镜像**,无需其他任何额外操作,即可开始享用 PaddleSpeech 的所有功能。 在 [Docker Hub](https://hub.docker.com/repository/docker/paddlecloud/paddlespeech) 中获取这些镜像及相应的使用指南,包括 CPU、GPU、ROCm 版本。 From 7fc82ef1c1a5811ef1fb456e04e39920f55beebc Mon Sep 17 00:00:00 2001 From: freeliuzc Date: Fri, 17 Jun 2022 14:29:40 +0800 Subject: [PATCH 23/30] Update install.md update doc --- docs/source/install.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/install.md b/docs/source/install.md index eeaf6d46..ac48d88b 100644 --- a/docs/source/install.md +++ b/docs/source/install.md @@ -141,7 +141,7 @@ To avoid the trouble of environment setup, running in a Docker container is high ### Choice 1: Running in Docker Container (Recommend) Docker is an open-source tool to build, ship, and run distributed applications in an isolated environment. If you do not have a Docker environment, please refer to [Docker](https://www.docker.com/). If you will use GPU version, you also need to install [nvidia-docker](https://github.com/NVIDIA/nvidia-docker). -We provides built docker images with latest code. All you have to do is to **pull the docker image** and **run the docker image**. Then you can enjoy PaddleSpeech without any extra action. +We provide docker images containing the latest PaddleSpeech code, and all environment and package dependencies are pre-installed. All you have to do is to **pull and run the docker image**. Then you can enjoy PaddleSpeech without any extra steps. Get these images and guidance in [docker hub](https://hub.docker.com/repository/docker/paddlecloud/paddlespeech), including CPU, GPU, ROCm environment versions. From efaeb14a145d5a698c39af3f5aae7c21f2db3e07 Mon Sep 17 00:00:00 2001 From: freeliuzc Date: Fri, 17 Jun 2022 14:32:38 +0800 Subject: [PATCH 24/30] Update install_cn.md Add space aside Docker --- docs/source/install_cn.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/install_cn.md b/docs/source/install_cn.md index 74c91fa6..345e79bb 100644 --- a/docs/source/install_cn.md +++ b/docs/source/install_cn.md @@ -130,7 +130,7 @@ pip install . -i https://pypi.tuna.tsinghua.edu.cn/simple - 选择 2: 使用`Ubuntu` ,并且拥有 root 权限。 为了避免各种环境配置问题,我们非常推荐你使用 docker 容器。如果你不想使用 docker,但是可以使用拥有 root 权限的 Ubuntu 系统,你也可以完成**困难**方式的安装。 -### 选择1: 使用Docker容器(推荐) +### 选择1: 使用 Docker 容器(推荐) Docker 是一种开源工具,用于在和系统本身环境相隔离的环境中构建、发布和运行各类应用程序。如果您没有 Docker 运行环境,请参考 [Docker 官网](https://www.docker.com/)进行安装,如果您准备使用 GPU 版本镜像,还需要提前安装好 [nvidia-docker](https://github.com/NVIDIA/nvidia-docker) 。 我们提供了包含最新 PaddleSpeech 代码的 docker 镜像,并预先安装好了所有的环境和库依赖,您只需要**拉取并运行 docker 镜像**,无需其他任何额外操作,即可开始享用 PaddleSpeech 的所有功能。 From fe345409bb469fb77f5feed381e748f7a9ebfad2 Mon Sep 17 00:00:00 2001 From: KP <109694228@qq.com> Date: Fri, 17 Jun 2022 14:42:28 +0800 Subject: [PATCH 25/30] Fix circular import error in paddlespeech.cli.utils and paddlespeech.audio --- paddlespeech/audio/utils/__init__.py | 4 +-- paddlespeech/cli/utils.py | 33 +------------------ paddlespeech/resource/resource.py | 2 +- .../engine/asr/online/onnx/asr_engine.py | 2 +- .../asr/online/paddleinference/asr_engine.py | 2 +- .../engine/asr/online/python/asr_engine.py | 2 +- .../engine/asr/paddleinference/asr_engine.py | 2 +- 7 files changed, 8 insertions(+), 39 deletions(-) diff --git a/paddlespeech/audio/utils/__init__.py b/paddlespeech/audio/utils/__init__.py index 742f9f8e..f1e5deb0 100644 --- a/paddlespeech/audio/utils/__init__.py +++ b/paddlespeech/audio/utils/__init__.py @@ -11,8 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from ...cli.utils import DATA_HOME -from ...cli.utils import MODEL_HOME +from ...utils.env import DATA_HOME +from ...utils.env import MODEL_HOME from .download import decompress from .download import download_and_decompress from .download import load_state_dict_from_url diff --git a/paddlespeech/cli/utils.py b/paddlespeech/cli/utils.py index 21c887e9..d9c092f8 100644 --- a/paddlespeech/cli/utils.py +++ b/paddlespeech/cli/utils.py @@ -30,6 +30,7 @@ from paddle.framework import load import paddlespeech.audio from . import download +from ..utils.env import CONF_HOME from .entry import commands try: from .. import __version__ @@ -161,38 +162,6 @@ def load_state_dict_from_url(url: str, path: str, md5: str=None) -> os.PathLike: return load(os.path.join(path, os.path.basename(url))) -def _get_user_home(): - return os.path.expanduser('~') - - -def _get_paddlespcceh_home(): - if 'PPSPEECH_HOME' in os.environ: - home_path = os.environ['PPSPEECH_HOME'] - if os.path.exists(home_path): - if os.path.isdir(home_path): - return home_path - else: - raise RuntimeError( - 'The environment variable PPSPEECH_HOME {} is not a directory.'. - format(home_path)) - else: - return home_path - return os.path.join(_get_user_home(), '.paddlespeech') - - -def _get_sub_home(directory): - home = os.path.join(_get_paddlespcceh_home(), directory) - if not os.path.exists(home): - os.makedirs(home) - return home - - -PPSPEECH_HOME = _get_paddlespcceh_home() -MODEL_HOME = _get_sub_home('models') -CONF_HOME = _get_sub_home('conf') -DATA_HOME = _get_sub_home('datasets') - - def _md5(text: str): '''Calculate the md5 value of the input text.''' md5code = hashlib.md5(text.encode()) diff --git a/paddlespeech/resource/resource.py b/paddlespeech/resource/resource.py index 15112ba7..45707eb4 100644 --- a/paddlespeech/resource/resource.py +++ b/paddlespeech/resource/resource.py @@ -18,8 +18,8 @@ from typing import List from typing import Optional from ..cli.utils import download_and_decompress -from ..cli.utils import MODEL_HOME from ..utils.dynamic_import import dynamic_import +from ..utils.env import MODEL_HOME from .model_alias import model_alias task_supported = ['asr', 'cls', 'st', 'text', 'tts', 'vector'] diff --git a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py index aab29f78..06793164 100644 --- a/paddlespeech/server/engine/asr/online/onnx/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/onnx/asr_engine.py @@ -23,7 +23,6 @@ from yacs.config import CfgNode from paddlespeech.cli.asr.infer import ASRExecutor from paddlespeech.cli.log import logger -from paddlespeech.cli.utils import MODEL_HOME from paddlespeech.resource import CommonTaskResource from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer from paddlespeech.s2t.modules.ctc import CTCDecoder @@ -31,6 +30,7 @@ from paddlespeech.s2t.transform.transformation import Transformation from paddlespeech.s2t.utils.utility import UpdateConfig from paddlespeech.server.engine.base_engine import BaseEngine from paddlespeech.server.utils import onnx_infer +from paddlespeech.utils.env import MODEL_HOME __all__ = ['PaddleASRConnectionHanddler', 'ASRServerExecutor', 'ASREngine'] diff --git a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py index a450e430..efb726aa 100644 --- a/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/paddleinference/asr_engine.py @@ -23,7 +23,6 @@ from yacs.config import CfgNode from paddlespeech.cli.asr.infer import ASRExecutor from paddlespeech.cli.log import logger -from paddlespeech.cli.utils import MODEL_HOME from paddlespeech.resource import CommonTaskResource from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer from paddlespeech.s2t.modules.ctc import CTCDecoder @@ -31,6 +30,7 @@ from paddlespeech.s2t.transform.transformation import Transformation from paddlespeech.s2t.utils.utility import UpdateConfig from paddlespeech.server.engine.base_engine import BaseEngine from paddlespeech.server.utils.paddle_predictor import init_predictor +from paddlespeech.utils.env import MODEL_HOME __all__ = ['PaddleASRConnectionHanddler', 'ASRServerExecutor', 'ASREngine'] diff --git a/paddlespeech/server/engine/asr/online/python/asr_engine.py b/paddlespeech/server/engine/asr/online/python/asr_engine.py index c22cbbe5..daa9fc50 100644 --- a/paddlespeech/server/engine/asr/online/python/asr_engine.py +++ b/paddlespeech/server/engine/asr/online/python/asr_engine.py @@ -23,7 +23,6 @@ from yacs.config import CfgNode from paddlespeech.cli.asr.infer import ASRExecutor from paddlespeech.cli.log import logger -from paddlespeech.cli.utils import MODEL_HOME from paddlespeech.resource import CommonTaskResource from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer from paddlespeech.s2t.modules.ctc import CTCDecoder @@ -36,6 +35,7 @@ from paddlespeech.server.engine.asr.online.ctc_endpoint import OnlineCTCEndpoint from paddlespeech.server.engine.asr.online.ctc_search import CTCPrefixBeamSearch from paddlespeech.server.engine.base_engine import BaseEngine from paddlespeech.server.utils.paddle_predictor import init_predictor +from paddlespeech.utils.env import MODEL_HOME __all__ = ['PaddleASRConnectionHanddler', 'ASRServerExecutor', 'ASREngine'] diff --git a/paddlespeech/server/engine/asr/paddleinference/asr_engine.py b/paddlespeech/server/engine/asr/paddleinference/asr_engine.py index 1a3b4620..572004eb 100644 --- a/paddlespeech/server/engine/asr/paddleinference/asr_engine.py +++ b/paddlespeech/server/engine/asr/paddleinference/asr_engine.py @@ -21,7 +21,6 @@ from yacs.config import CfgNode from paddlespeech.cli.asr.infer import ASRExecutor from paddlespeech.cli.log import logger -from paddlespeech.cli.utils import MODEL_HOME from paddlespeech.resource import CommonTaskResource from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer from paddlespeech.s2t.modules.ctc import CTCDecoder @@ -29,6 +28,7 @@ from paddlespeech.s2t.utils.utility import UpdateConfig from paddlespeech.server.engine.base_engine import BaseEngine from paddlespeech.server.utils.paddle_predictor import init_predictor from paddlespeech.server.utils.paddle_predictor import run_model +from paddlespeech.utils.env import MODEL_HOME __all__ = ['ASREngine', 'PaddleASRConnectionHandler'] From fc5f0b14e06dae7f930492265bb6d25379ee92e6 Mon Sep 17 00:00:00 2001 From: KP <109694228@qq.com> Date: Fri, 17 Jun 2022 14:50:46 +0800 Subject: [PATCH 26/30] Fix circular import error in paddlespeech.cli.utils and paddlespeech.audio --- paddlespeech/utils/env.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 paddlespeech/utils/env.py diff --git a/paddlespeech/utils/env.py b/paddlespeech/utils/env.py new file mode 100644 index 00000000..18e8ca45 --- /dev/null +++ b/paddlespeech/utils/env.py @@ -0,0 +1,33 @@ +import os + + +def _get_user_home(): + return os.path.expanduser('~') + + +def _get_paddlespcceh_home(): + if 'PPSPEECH_HOME' in os.environ: + home_path = os.environ['PPSPEECH_HOME'] + if os.path.exists(home_path): + if os.path.isdir(home_path): + return home_path + else: + raise RuntimeError( + 'The environment variable PPSPEECH_HOME {} is not a directory.'. + format(home_path)) + else: + return home_path + return os.path.join(_get_user_home(), '.paddlespeech') + + +def _get_sub_home(directory): + home = os.path.join(_get_paddlespcceh_home(), directory) + if not os.path.exists(home): + os.makedirs(home) + return home + + +PPSPEECH_HOME = _get_paddlespcceh_home() +MODEL_HOME = _get_sub_home('models') +CONF_HOME = _get_sub_home('conf') +DATA_HOME = _get_sub_home('datasets') From 220bcffac8eea7ef6948bcb817e3b636257831c1 Mon Sep 17 00:00:00 2001 From: KP <109694228@qq.com> Date: Fri, 17 Jun 2022 15:08:15 +0800 Subject: [PATCH 27/30] Fix circular import error in paddlespeech.cli.utils and paddlespeech.audio --- paddlespeech/utils/env.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/paddlespeech/utils/env.py b/paddlespeech/utils/env.py index 18e8ca45..03c8757b 100644 --- a/paddlespeech/utils/env.py +++ b/paddlespeech/utils/env.py @@ -1,3 +1,16 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import os From 80adf54a1038bbf300537c18eb93a2cb8743c281 Mon Sep 17 00:00:00 2001 From: iftaken Date: Fri, 17 Jun 2022 15:29:06 +0800 Subject: [PATCH 28/30] remove error url for paddlepaddle --- demos/speech_web/README_cn.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demos/speech_web/README_cn.md b/demos/speech_web/README_cn.md index cfcbe559..0de4ed51 100644 --- a/demos/speech_web/README_cn.md +++ b/demos/speech_web/README_cn.md @@ -65,7 +65,7 @@ yarn dev --port 8011 ## Docker启动 ### 后端docker -后端docker使用[paddlepaddle官方docker](https://www.paddlepaddle.org.cn/),这里演示CPU版本 +后端docker使用[paddlepaddle官方docker](https://www.paddlepaddle.org.cn),这里演示CPU版本 ``` # 拉取PaddleSpeech项目 cd PaddleSpeechServer From b452be3d8deeb1cdf4ce0378e6f28a4276d8ef0c Mon Sep 17 00:00:00 2001 From: KP <109694228@qq.com> Date: Fri, 17 Jun 2022 15:31:54 +0800 Subject: [PATCH 29/30] Fix circular import error in paddlespeech.cli.utils and paddlespeech.audio --- paddlespeech/cli/asr/infer.py | 2 +- paddlespeech/cli/st/infer.py | 2 +- paddlespeech/cls/models/panns/panns.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/paddlespeech/cli/asr/infer.py b/paddlespeech/cli/asr/infer.py index a943ccfa..24839a89 100644 --- a/paddlespeech/cli/asr/infer.py +++ b/paddlespeech/cli/asr/infer.py @@ -26,11 +26,11 @@ import paddle import soundfile from yacs.config import CfgNode +from ...utils.env import MODEL_HOME from ..download import get_path_from_url from ..executor import BaseExecutor from ..log import logger from ..utils import CLI_TIMER -from ..utils import MODEL_HOME from ..utils import stats_wrapper from ..utils import timer_register from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer diff --git a/paddlespeech/cli/st/infer.py b/paddlespeech/cli/st/infer.py index e1ce181a..4e099c40 100644 --- a/paddlespeech/cli/st/infer.py +++ b/paddlespeech/cli/st/infer.py @@ -26,10 +26,10 @@ import soundfile from kaldiio import WriteHelper from yacs.config import CfgNode +from ...utils.env import MODEL_HOME from ..executor import BaseExecutor from ..log import logger from ..utils import download_and_decompress -from ..utils import MODEL_HOME from ..utils import stats_wrapper from paddlespeech.s2t.frontend.featurizer.text_featurizer import TextFeaturizer from paddlespeech.s2t.utils.utility import UpdateConfig diff --git a/paddlespeech/cls/models/panns/panns.py b/paddlespeech/cls/models/panns/panns.py index 4befe7aa..37deae80 100644 --- a/paddlespeech/cls/models/panns/panns.py +++ b/paddlespeech/cls/models/panns/panns.py @@ -16,8 +16,8 @@ import os import paddle.nn as nn import paddle.nn.functional as F -from paddlespeech.audio.utils import MODEL_HOME from paddlespeech.audio.utils.download import load_state_dict_from_url +from paddlespeech.utils.env import MODEL_HOME __all__ = ['CNN14', 'CNN10', 'CNN6', 'cnn14', 'cnn10', 'cnn6'] From 63ad0469798363cddab2667f83d7e7fe16044a45 Mon Sep 17 00:00:00 2001 From: iftaken Date: Fri, 17 Jun 2022 16:39:13 +0800 Subject: [PATCH 30/30] del dead link --- demos/speech_web/web_client/README.md | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 demos/speech_web/web_client/README.md diff --git a/demos/speech_web/web_client/README.md b/demos/speech_web/web_client/README.md deleted file mode 100644 index c0793a82..00000000 --- a/demos/speech_web/web_client/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Vue 3 + Vite - -This template should help get you started developing with Vue 3 in Vite. The template uses Vue 3 `