@@ -155,37 +155,6 @@ function install_yaml_cpp() {
155
155
cd " $BWD " || exit
156
156
}
157
157
158
- function install_sentencepiece() {
159
- SENTENCEPIECE_SRC_DIR=$BASE_DIR /third-party/sentencepiece
160
- SENTENCEPIECE_BUILD_DIR=$DEPS_DIR /sentencepiece-build
161
-
162
- if [ ! -d " $SENTENCEPIECE_SRC_DIR " ] ; then
163
- echo -e " ${COLOR_GREEN} [ INFO ] Cloning sentencepiece repo ${COLOR_OFF} "
164
- git clone https://github.com/google/sentencepiece.git " $SENTENCEPIECE_SRC_DIR "
165
- cd $SENTENCEPIECE_SRC_DIR
166
- git checkout tags/v0.1.99
167
- fi
168
-
169
- if [ ! -d " $SENTENCEPIECE_BUILD_DIR " ] ; then
170
- echo -e " ${COLOR_GREEN} [ INFO ] Building sentencepiece ${COLOR_OFF} "
171
-
172
- mkdir $SENTENCEPIECE_BUILD_DIR
173
- cd $SENTENCEPIECE_BUILD_DIR
174
- cmake $SENTENCEPIECE_SRC_DIR
175
- make -i $( nproc)
176
- if [ " $PLATFORM " = " Linux" ]; then
177
- sudo make install
178
- sudo ldconfig -v
179
- elif [ " $PLATFORM " = " Mac" ]; then
180
- make install
181
- fi
182
-
183
- echo -e " ${COLOR_GREEN} [ INFO ] sentencepiece is installed ${COLOR_OFF} "
184
- fi
185
-
186
- cd " $BWD " || exit
187
- }
188
-
189
158
function build_llama_cpp() {
190
159
BWD=$( pwd)
191
160
LLAMA_CPP_SRC_DIR=$BASE_DIR /third-party/llama.cpp
@@ -208,14 +177,23 @@ function prepare_test_files() {
208
177
if [ ! -f " ${EX_DIR} /babyllama/babyllama_handler/stories15M.bin" ]; then
209
178
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O " ${EX_DIR} /babyllama/babyllama_handler/stories15M.bin"
210
179
fi
211
- if [ ! -f " ${EX_DIR} /aot_inductor/llama_handler/stories15M.so" ]; then
212
- local HANDLER_DIR=${EX_DIR} /aot_inductor/llama_handler/
213
- if [ ! -f " ${HANDLER_DIR} /stories15M.pt" ]; then
214
- wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt? download=true -O " ${HANDLER_DIR} /stories15M.pt"
180
+ # PT2.2 torch.expport does not support Mac
181
+ if [ " $PLATFORM " = " Linux" ]; then
182
+ if [ ! -f " ${EX_DIR} /aot_inductor/llama_handler/stories15M.so" ]; then
183
+ local HANDLER_DIR=${EX_DIR} /aot_inductor/llama_handler/
184
+ if [ ! -f " ${HANDLER_DIR} /stories15M.pt" ]; then
185
+ wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt? download=true -O " ${HANDLER_DIR} /stories15M.pt"
186
+ fi
187
+ local LLAMA_SO_DIR=${BASE_DIR} /third-party/llama2.so/
188
+ PYTHONPATH=${LLAMA_SO_DIR} :${PYTHONPATH} python ${BASE_DIR} /../examples/cpp/aot_inductor/llama2/compile.py --checkpoint ${HANDLER_DIR} /stories15M.pt ${HANDLER_DIR} /stories15M.so
189
+ fi
190
+ if [ ! -f " ${EX_DIR} /aot_inductor/resnet_handler/resne50_pt2.so" ]; then
191
+ local HANDLER_DIR=${EX_DIR} /aot_inductor/resnet_handler/
192
+ cd ${HANDLER_DIR}
193
+ python ${BASE_DIR} /../examples/cpp/aot_inductor/resnet/resnet50_torch_export.py
215
194
fi
216
- local LLAMA_SO_DIR=${BASE_DIR} /third-party/llama2.so/
217
- PYTHONPATH=${LLAMA_SO_DIR} :${PYTHONPATH} python ${BASE_DIR} /../examples/cpp/aot_inductor/llama2/compile.py --checkpoint ${HANDLER_DIR} /stories15M.pt ${HANDLER_DIR} /stories15M.so
218
195
fi
196
+ cd " $BWD " || exit
219
197
}
220
198
221
199
function build() {
@@ -401,7 +379,6 @@ install_folly
401
379
install_kineto
402
380
install_libtorch
403
381
install_yaml_cpp
404
- install_sentencepiece
405
382
build_llama_cpp
406
383
prepare_test_files
407
384
build
0 commit comments