From 1d163ad15f7bd37799c7015024cbebb110680b95 Mon Sep 17 00:00:00 2001 From: Xinghai Sun Date: Thu, 31 Aug 2017 12:22:27 +0800 Subject: [PATCH] Fixed a serious mistake of bidirectional simple rnn for DS2. --- cloud/pcloud_submit.sh | 4 ++-- layer.py | 16 ++++++++++------ 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/cloud/pcloud_submit.sh b/cloud/pcloud_submit.sh index a7fb42cb..3c9a1c26 100644 --- a/cloud/pcloud_submit.sh +++ b/cloud/pcloud_submit.sh @@ -1,6 +1,6 @@ TRAIN_MANIFEST="cloud/cloud.manifest.train" DEV_MANIFEST="cloud/cloud.manifest.dev" -CLOUD_MODEL_DIR="/pfs/dlnel/home/USERNAME/deepspeech2/model" +CLOUD_MODEL_DIR="./checkpoints" BATCH_SIZE=256 NUM_GPU=8 NUM_NODE=1 @@ -11,7 +11,7 @@ DS2_PATH=${PWD%/*} cp -f pcloud_train.sh ${DS2_PATH} paddlecloud submit \ --image bootstrapper:5000/wanghaoshuang/pcloud_ds2:latest \ +-image bootstrapper:5000/paddlepaddle/pcloud_ds2:latest \ -jobname ${JOB_NAME} \ -cpu ${NUM_GPU} \ -gpu ${NUM_GPU} \ diff --git a/layer.py b/layer.py index 3b492645..ef25c0a1 100644 --- a/layer.py +++ b/layer.py @@ -55,16 +55,20 @@ def bidirectional_simple_rnn_bn_layer(name, input, size, act): :rtype: LayerOutput """ # input-hidden weights shared across bi-direcitonal rnn. - input_proj = paddle.layer.fc( + input_proj_forward = paddle.layer.fc( input=input, size=size, act=paddle.activation.Linear(), bias_attr=False) - # batch norm is only performed on input-state projection - input_proj_bn = paddle.layer.batch_norm( - input=input_proj, act=paddle.activation.Linear()) + input_proj_backward = paddle.layer.fc( + input=input, size=size, act=paddle.activation.Linear(), bias_attr=False) + # batch norm is only performed on input-state projection + input_proj_bn_forward = paddle.layer.batch_norm( + input=input_proj_forward, act=paddle.activation.Linear()) + input_proj_bn_backward = paddle.layer.batch_norm( + input=input_proj_backward, act=paddle.activation.Linear()) # forward and backward in time forward_simple_rnn = paddle.layer.recurrent( - input=input_proj_bn, act=act, reverse=False) + input=input_proj_bn_forward, act=act, reverse=False) backward_simple_rnn = paddle.layer.recurrent( - input=input_proj_bn, act=act, reverse=True) + input=input_proj_bn_backward, act=act, reverse=True) return paddle.layer.concat(input=[forward_simple_rnn, backward_simple_rnn])