Skip to content

Commit b136dfb

Browse files
authored
Merge pull request bytedance#68 from bytedance/move-inference-code
separate inference and training
2 parents 5eef7e2 + 8cc3edb commit b136dfb

File tree

111 files changed

+353
-283
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

111 files changed

+353
-283
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,3 +141,6 @@ cython_debug/
141141
test_case/
142142
build/
143143
*.pb
144+
.vscode
145+
*.bin
146+
*.idx

CMakeLists.txt

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -47,11 +47,11 @@ if(DEBUG_MODE)
4747
message(STATUS "Debug computation result")
4848
endif()
4949

50-
add_subdirectory(kernels)
51-
add_subdirectory(tools)
52-
add_subdirectory(proto)
53-
add_subdirectory(model)
54-
add_subdirectory(example)
50+
add_subdirectory(lightseq/inference/kernels)
51+
add_subdirectory(lightseq/inference/tools)
52+
add_subdirectory(lightseq/inference/proto)
53+
add_subdirectory(lightseq/inference/model)
54+
add_subdirectory(examples/inference/cpp)
5555
add_subdirectory(3rdparty/pybind11)
56-
add_subdirectory(pywrapper)
57-
add_subdirectory(server)
56+
add_subdirectory(lightseq/inference/pywrapper)
57+
add_subdirectory(lightseq/inference/server)

README.md

Lines changed: 29 additions & 170 deletions
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

docs/training/images/features.png

104 KB
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

lightseq/training/examples/deepspeed/README.md renamed to examples/training/deepspeed/README.md

Lines changed: 1 addition & 1 deletion

lightseq/training/examples/deepspeed/ds_fairseq.py renamed to examples/training/deepspeed/ds_fairseq.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
from fairseq import tasks, distributed_utils
99
from fairseq.logging import metrics
1010

11-
from ds_fairseq_data import BatchIterator
12-
from ds_fairseq_argument import gen_ds_fairseq_arg
11+
from examples.training.deepspeed.ds_fairseq_data import BatchIterator
12+
from examples.training.deepspeed.ds_fairseq_argument import gen_ds_fairseq_arg
1313

1414

1515
best_bleu = 0.0

lightseq/training/examples/deepspeed/ds_fairseq_wmt14en2de.sh renamed to examples/training/deepspeed/ds_fairseq_wmt14en2de.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
#!/usr/bin/env bash
22
set -ex
33
THIS_DIR=$(dirname $(readlink -f $0))
4-
cd $THIS_DIR/../..
4+
cd $THIS_DIR/../../..
55

6-
if [ ! -d "wmt14_en_de" ]; then
6+
if [ ! -d "/tmp/wmt14_en_de" ]; then
77
echo "Downloading dataset"
8-
wget http://sf3-ttcdn-tos.pstatp.com/obj/nlp-opensource/lightseq/wmt_data/databin_wmt14_en_de.tar.gz
9-
tar -zxvf databin_wmt14_en_de.tar.gz && rm databin_wmt14_en_de.tar.gz
8+
wget http://sf3-ttcdn-tos.pstatp.com/obj/nlp-opensource/lightseq/wmt_data/databin_wmt14_en_de.tar.gz -P /tmp
9+
tar -zxvf /tmp/databin_wmt14_en_de.tar.gz -C /tmp && rm /tmp/databin_wmt14_en_de.tar.gz
1010
fi
1111

1212
deepspeed ${THIS_DIR}/ds_fairseq.py \
13-
./wmt14_en_de/ \
14-
--user-dir ./examples/fairseq/fs_modules \
13+
/tmp/wmt14_en_de/ \
14+
--user-dir ${THIS_DIR}/../fairseq/fs_modules \
1515
--arch ls_transformer_wmt_en_de_big_t2t --share-decoder-input-output-embed \
1616
--optimizer ls_adam --adam-betas '(0.9, 0.98)' --clip-norm 0.0 \
1717
--lr 5e-4 --lr-scheduler inverse_sqrt --warmup-updates 4000 \

lightseq/training/examples/fairseq/README.md renamed to examples/training/fairseq/README.md

Lines changed: 2 additions & 2 deletions

lightseq/training/examples/fairseq/ls_fairseq_wmt14en2de.sh renamed to examples/training/fairseq/ls_fairseq_wmt14en2de.sh

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
#!/usr/bin/env bash
22
set -ex
33
THIS_DIR=$(dirname $(readlink -f $0))
4-
cd $THIS_DIR/../../
4+
cd $THIS_DIR/../../..
55

6-
if [ ! -d "wmt14_en_de" ]; then
6+
if [ ! -d "/tmp/wmt14_en_de" ]; then
77
echo "Downloading dataset"
8-
wget http://sf3-ttcdn-tos.pstatp.com/obj/nlp-opensource/lightseq/wmt_data/databin_wmt14_en_de.tar.gz
9-
tar -zxvf databin_wmt14_en_de.tar.gz && rm databin_wmt14_en_de.tar.gz
8+
wget http://sf3-ttcdn-tos.pstatp.com/obj/nlp-opensource/lightseq/wmt_data/databin_wmt14_en_de.tar.gz -P /tmp
9+
tar -zxvf /tmp/databin_wmt14_en_de.tar.gz -C /tmp && rm /tmp/databin_wmt14_en_de.tar.gz
1010
fi
1111

12-
lightseq-train ./wmt14_en_de/ \
12+
lightseq-train /tmp/wmt14_en_de/ \
1313
--task translation \
1414
--arch ls_transformer_wmt_en_de_big_t2t --share-decoder-input-output-embed \
1515
--optimizer ls_adam --adam-betas '(0.9, 0.98)' \

lightseq/training/examples/fairseq/native_fairseq_wmt14en2de.sh renamed to examples/training/fairseq/native_fairseq_wmt14en2de.sh

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
#!/usr/bin/env bash
22
set -ex
33
THIS_DIR=$(dirname $(readlink -f $0))
4-
cd $THIS_DIR/../../
4+
cd $THIS_DIR/../../..
55

6-
if [ ! -d "wmt14_en_de" ]; then
7-
wget http://sf3-ttcdn-tos.pstatp.com/obj/nlp-opensource/lightseq/wmt_data/databin_wmt14_en_de.tar.gz
8-
tar -zxvf databin_wmt14_en_de.tar.gz && rm databin_wmt14_en_de.tar.gz
6+
if [ ! -d "/tmp/wmt14_en_de" ]; then
7+
echo "Downloading dataset"
8+
wget http://sf3-ttcdn-tos.pstatp.com/obj/nlp-opensource/lightseq/wmt_data/databin_wmt14_en_de.tar.gz -P /tmp
9+
tar -zxvf /tmp/databin_wmt14_en_de.tar.gz -C /tmp && rm /tmp/databin_wmt14_en_de.tar.gz
910
fi
1011

11-
fairseq-train ./wmt14_en_de/ \
12+
fairseq-train /tmp/wmt14_en_de/ \
1213
--arch transformer_wmt_en_de_big_t2t --share-decoder-input-output-embed \
1314
--optimizer adam --adam-betas '(0.9, 0.98)' --clip-norm 0.0 \
1415
--lr 5e-4 --lr-scheduler inverse_sqrt --warmup-updates 4000 \

0 commit comments

Comments
 (0)