Merge pull request #899 from Jackwaterveg/fix_bug

Revert the ctc loss to 2.1.2 and fix the bug of export model
pull/905/head
Hui Zhang 3 years ago committed by GitHub
commit ac04615af5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -362,19 +362,11 @@ def ctc_loss(logits,
label_lengths,
blank=0,
reduction='mean',
norm_by_times=True,
norm_by_batchsize=False,
norm_by_total_logits_len=False):
norm_by_times=True):
#logger.info("my ctc loss with norm by times")
## https://github.com/PaddlePaddle/Paddle/blob/f5ca2db2cc/paddle/fluid/operators/warpctc_op.h#L403
loss_out = paddle.fluid.layers.warpctc(
logits,
labels,
blank,
norm_by_times,
input_lengths,
label_lengths,
norm_by_batchsize, )
loss_out = paddle.fluid.layers.warpctc(logits, labels, blank, norm_by_times,
input_lengths, label_lengths)
loss_out = paddle.fluid.layers.squeeze(loss_out, [-1])
assert reduction in ['mean', 'sum', 'none']

@ -219,10 +219,10 @@ class DeepSpeech2Model(nn.Layer):
The model built from pretrained result.
"""
model = cls(
#feat_size=dataloader.collate_fn.feature_size,
feat_size=dataloader.dataset.feature_size,
#dict_size=dataloader.collate_fn.vocab_size,
dict_size=dataloader.dataset.vocab_size,
feat_size=dataloader.collate_fn.feature_size,
#feat_size=dataloader.dataset.feature_size,
dict_size=dataloader.collate_fn.vocab_size,
#dict_size=dataloader.dataset.vocab_size,
num_conv_layers=config.model.num_conv_layers,
num_rnn_layers=config.model.num_rnn_layers,
rnn_size=config.model.rnn_layer_size,

Loading…
Cancel
Save