forked from apeterswu/RL4NMT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_zhen.sh
37 lines (33 loc) · 1.03 KB
/
test_zhen.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
export PYTHONPATH=./rl4nmt/:${PYTHONPATH}
export CUDA_VISIBLE_DEVICES=7
binFile=./rl4nmt/tensor2tensor/bin
beamsize=${2:-6}
PROBLEM=translate_zhen_wmt17
MODEL=transformer
HPARAMS=zhen_wmt17_transformer_rl_delta_setting_random
DATA_DIR=../transformer_data/zhen
USR_DIR=../rl4nmt/zhen_wmt17
ROOT_MODEL=./rl4nmt/model/${HPARAMS}
for ii in {100000..120000..500}; do
tmpdir=${ROOT_MODEL}_${ii}
rm -rf $tmpdir
mkdir -p $tmpdir
cp ${ROOT_MODEL}/model.ckpt-${ii}* $tmpdir/
cd $tmpdir
touch checkpoint
echo model_checkpoint_path: \"model.ckpt-${ii}\" >> checkpoint
echo all_model_checkpoint_paths: \"model.ckpt-${ii}\" >> checkpoint
cd ../transformer_data/zhen # test data path
cp $DATA_DIR/test.zh $tmpdir/
echo ${ii}
${binFile}/t2t-decoder \
--t2t_usr_dir=$USR_DIR \
--data_dir=$DATA_DIR \
--problems=$PROBLEM \
--model=$MODEL \
--hparams_set=$HPARAMS \
--output_dir=$tmpdir \
--decode_hparams="beam_size=${beamsize},alpha=1.1,batch_size=32" \
--decode_from_file=$tmpdir/test.zh \
--worker_gpu=1
done