fix optional

pull/3965/head
liyulingyue 8 months ago
parent 226cfac0bd
commit 8fab6ff462

@ -13,3 +13,7 @@
# limitations under the License. # limitations under the License.
import _locale import _locale
_locale._getdefaultlocale = (lambda *args: ['en_US', 'utf8']) _locale._getdefaultlocale = (lambda *args: ['en_US', 'utf8'])
__version__ = '0.0.0'
__commit__ = '9cf8c1985a98bb380c183116123672976bdfe5c9'

@ -74,7 +74,7 @@ class BaseEncoder(nn.Layer):
concat_after: bool=False, concat_after: bool=False,
static_chunk_size: int=0, static_chunk_size: int=0,
use_dynamic_chunk: bool=False, use_dynamic_chunk: bool=False,
global_cmvn: paddle.nn.Layer=None, global_cmvn: Optional[nn.Layer]=None,
use_dynamic_left_chunk: bool=False, use_dynamic_left_chunk: bool=False,
max_len: int=5000): max_len: int=5000):
""" """
@ -366,7 +366,7 @@ class TransformerEncoder(BaseEncoder):
concat_after: bool=False, concat_after: bool=False,
static_chunk_size: int=0, static_chunk_size: int=0,
use_dynamic_chunk: bool=False, use_dynamic_chunk: bool=False,
global_cmvn: nn.Layer=None, global_cmvn: Optional[nn.Layer]=None,
use_dynamic_left_chunk: bool=False, ): use_dynamic_left_chunk: bool=False, ):
""" Construct TransformerEncoder """ Construct TransformerEncoder
See Encoder for the meaning of each parameter. See Encoder for the meaning of each parameter.
@ -440,7 +440,7 @@ class ConformerEncoder(BaseEncoder):
concat_after: bool=False, concat_after: bool=False,
static_chunk_size: int=0, static_chunk_size: int=0,
use_dynamic_chunk: bool=False, use_dynamic_chunk: bool=False,
global_cmvn: nn.Layer=None, global_cmvn: Optional[nn.Layer]=None,
use_dynamic_left_chunk: bool=False, use_dynamic_left_chunk: bool=False,
positionwise_conv_kernel_size: int=1, positionwise_conv_kernel_size: int=1,
macaron_style: bool=True, macaron_style: bool=True,
@ -541,7 +541,7 @@ class SqueezeformerEncoder(nn.Layer):
adaptive_scale: bool=True, adaptive_scale: bool=True,
activation_type: str="swish", activation_type: str="swish",
init_weights: bool=True, init_weights: bool=True,
global_cmvn: paddle.nn.Layer=None, global_cmvn: Optional[nn.Layer]=None,
normalize_before: bool=False, normalize_before: bool=False,
use_dynamic_chunk: bool=False, use_dynamic_chunk: bool=False,
concat_after: bool=False, concat_after: bool=False,

@ -68,7 +68,7 @@ class Tacotron2(nn.Layer):
postnet_layers: int=5, postnet_layers: int=5,
postnet_chans: int=512, postnet_chans: int=512,
postnet_filts: int=5, postnet_filts: int=5,
output_activation: str=None, output_activation: Optional[str]=None,
use_batch_norm: bool=True, use_batch_norm: bool=True,
use_concate: bool=True, use_concate: bool=True,
use_residual: bool=False, use_residual: bool=False,

@ -13,7 +13,9 @@
# limitations under the License. # limitations under the License.
# Modified from espnet(https://github.com/espnet/espnet) # Modified from espnet(https://github.com/espnet/espnet)
"""Fastspeech2 related modules for paddle""" """Fastspeech2 related modules for paddle"""
from optparse import Option
from typing import Dict from typing import Dict
from typing import Optional
from typing import Sequence from typing import Sequence
from typing import Tuple from typing import Tuple
@ -199,7 +201,7 @@ class TransformerTTS(nn.Layer):
encoder_concat_after: bool=False, encoder_concat_after: bool=False,
decoder_concat_after: bool=False, decoder_concat_after: bool=False,
reduction_factor: int=1, reduction_factor: int=1,
spk_embed_dim: int=None, spk_embed_dim: Optional[int]=None,
spk_embed_integration_type: str="add", spk_embed_integration_type: str="add",
use_gst: bool=False, use_gst: bool=False,
gst_tokens: int=10, gst_tokens: int=10,

Loading…
Cancel
Save