Skip to content

Commit 146832a

Browse files
author
udaij12
committed
Merge branch 'master' into m1
2 parents a7ccdec + 616c1ad commit 146832a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+722
-547
lines changed

.github/workflows/ci-cpu-cpp.yml

+33-8
Original file line numberDiff line numberDiff line change
@@ -16,17 +16,42 @@ jobs:
1616
strategy:
1717
fail-fast: false
1818
matrix:
19-
os: [ubuntu-20.04, macOS-latest]
19+
os: [ubuntu-20.04]
2020
steps:
21+
# - name: Setup Python for M1
22+
# if: matrix.os == 'macos-14'
23+
# uses: actions/setup-python@v5
24+
# with:
25+
# python-version: '3.10'
26+
- name: Setup Python for all other OS
27+
if: matrix.os != 'macos-14'
28+
uses: actions/setup-python@v5
29+
with:
30+
python-version: 3.9
31+
architecture: x64
32+
- name: Setup Java 17
33+
uses: actions/setup-java@v3
34+
with:
35+
distribution: 'zulu'
36+
java-version: '17'
2137
- name: Checkout TorchServe
22-
uses: actions/checkout@v2
23-
- name: Install libtorch - macOS
24-
if: matrix.os == 'macOS-latest'
25-
run: |
26-
brew install libtorch
38+
uses: actions/checkout@v3
39+
with:
40+
submodules: recursive
41+
# - name: Install libtorch - macOS
42+
# if: matrix.os == 'macOS-latest'
43+
# run: |
44+
# brew install libtorch
2745
- name: Install dependencies
2846
run: |
29-
python ts_scripts/install_dependencies.py --environment=dev --cpp
47+
sudo apt update && python ts_scripts/install_dependencies.py --environment=dev --cpp
48+
- name: Install TorchServe
49+
run: |
50+
python ts_scripts/install_from_src.py
51+
- name: Print Env
52+
run: |
53+
python ts_scripts/print_env_info.py
3054
- name: Build
3155
run: |
32-
cd cpp && ./build.sh
56+
cd cpp && rm -rf _build && sudo mkdir /mnt/_build && sudo chmod 777 /mnt/_build && mkdir _build && sudo mount --bind /mnt/_build _build
57+
./build.sh

.github/workflows/torchserve-nightly-build.yml

+4
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@ jobs:
1010
steps:
1111
- name: Setup Conda
1212
uses: s-weigand/setup-conda@v1
13+
with:
14+
activate-conda: true
15+
update-conda: false
16+
python-version: "3.9"
1317
- run: conda --version
1418
- run: conda install -y conda-build anaconda-client
1519
- name: Checkout TorchServe

.gitmodules

-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
[submodule "third_party/google/rpc"]
22
path = third_party/google/rpc
33
url = https://github.com/googleapis/googleapis.git
4-
[submodule "cpp/third-party/llama.cpp"]
5-
path = cpp/third-party/llama.cpp
6-
url = https://github.com/ggerganov/llama.cpp.git
74
[submodule "cpp/third-party/llama2.c"]
85
path = cpp/third-party/llama2.c
96
url = https://github.com/karpathy/llama2.c

MANIFEST.in

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
include ts/frontend/model-server.jar
22
include ts/cpp/bin/*
33
include ts/cpp/lib/*
4-
include ts/cpp/resources/logging.config
4+
include ts/cpp/resources/logging.yaml
55
include PyPiDescription.rst
66
include ts/configs/*
7-
include ts/version.txt
7+
include ts/version.txt

cpp/CMakeLists.txt

+33-12
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ project(torchserve_cpp VERSION 0.1)
44
set(CMAKE_CXX_STANDARD 17)
55
set(CMAKE_CXX_STANDARD_REQUIRED True)
66
set(CMAKE_CXX_EXTENSIONS OFF)
7-
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W -Wall -Wextra -fPIC -D_GLIBCXX_USE_CXX11_ABI=1")
7+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W -Wall -Wextra -fPIC")
88

99
find_program(CLANG_TIDY_EXE NAMES "clang-tidy" REQUIRED)
1010
set(CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_EXE}")
@@ -18,35 +18,56 @@ if(CLANG_FORMAT_EXE)
1818
${PROJECT_SOURCE_DIR}/test/*.hh
1919
)
2020

21-
add_custom_target(format
22-
COMMAND
23-
${CLANG_FORMAT_EXE} -i -style=google ${ALL_CXX_SOURCE_FILES}
24-
)
2521
endif()
2622

2723

2824
find_package(Boost REQUIRED)
29-
find_package(folly REQUIRED)
30-
find_package(fmt REQUIRED)
31-
find_package(gflags REQUIRED)
3225
find_package(Torch REQUIRED)
3326

27+
include(FetchContent)
28+
29+
FetchContent_Declare(
30+
spdlog
31+
GIT_REPOSITORY https://github.com/gabime/spdlog
32+
GIT_TAG v1.13.0
33+
)
34+
FetchContent_GetProperties(spdlog)
35+
36+
if(NOT spdlog_POPULATED)
37+
message(STATUS "Fetching spdlog...")
38+
FetchContent_Populate(spdlog)
39+
add_subdirectory(${spdlog_SOURCE_DIR} ${spdlog_BINARY_DIR})
40+
endif()
41+
42+
43+
FetchContent_Declare(
44+
json
45+
GIT_REPOSITORY https://github.com/nlohmann/json
46+
GIT_TAG v3.11.3
47+
)
48+
FetchContent_GetProperties(json)
49+
50+
if(NOT json_POPULATED)
51+
message(STATUS "Fetching json...")
52+
FetchContent_Populate(json)
53+
add_subdirectory(${json_SOURCE_DIR} ${json_BINARY_DIR})
54+
endif()
55+
56+
include_directories("${json_SOURCE_DIR}/include" "${spdlog_SOURCE_DIR}/include")
57+
3458
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}")
3559

3660
include_directories(${TORCH_INCLUDE_DIRS})
37-
include_directories(${FOLLY_INCLUDE_DIRS})
3861
include_directories(${GTEST_INCLUDE_DIRS})
3962
include_directories(${GMOCK_INCLUDE_DIRS})
4063

4164
include_directories("${CMAKE_INSTALL_PREFIX}/_deps")
4265
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
4366

44-
set(FOLLY_LIBRARIES Folly::folly)
45-
4667
# add subdirectories in src
4768
add_subdirectory(src/utils)
4869
add_subdirectory(src/backends)
4970
add_subdirectory(src/examples)
5071
add_subdirectory(test)
5172

52-
FILE(COPY src/resources/logging.config DESTINATION "${CMAKE_INSTALL_PREFIX}/resources")
73+
FILE(COPY src/resources/logging.yaml DESTINATION "${CMAKE_INSTALL_PREFIX}/resources")

cpp/README.md

+18-15
Original file line numberDiff line numberDiff line change
@@ -3,38 +3,41 @@
33
* C++17
44
* GCC version: gcc-9
55
* cmake version: 3.18+
6+
* Linux
67
## Installation and Running TorchServe CPP
8+
This installation instruction assumes that TorchServe is already installed through pip/conda/source. If this is not the case install it after the `Install dependencies` step through your preferred method.
79

810
### Install dependencies
911
```
1012
cd serve
1113
python ts_scripts/install_dependencies.py --cpp --environment dev [--cuda=cu121|cu118]
1214
```
1315
### Building the backend
16+
Don't forget to install or update TorchServe at this point if it wasn't previously installed. E.g. with:
1417
```
15-
## Dev Build
16-
cd cpp
17-
./build.sh [-g cu121|cu118]
18-
19-
## Install TorchServe from source
20-
cd ..
2118
python ts_scripts/install_from_src.py
2219
```
23-
### Set Environment Var
24-
#### On Mac
25-
```
26-
export DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib
27-
```
28-
#### On Ubuntu
20+
21+
Then build the backend:
2922
```
30-
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib
23+
## Dev Build
24+
cd cpp
25+
./build.sh [-g cu121|cu118]
3126
```
3227

3328
### Run TorchServe
3429
```
3530
mkdir model_store
31+
export LD_LIBRARY_PATH=`python -c "import torch;from pathlib import Path;p=Path(torch.__file__);print(f\"{(p.parent / 'lib').as_posix()}:{(p.parents[1] / 'nvidia/nccl/lib').as_posix()}\")"`:$LD_LIBRARY_PATH
3632
torchserve --ncs --start --model-store model_store
3733
```
34+
35+
### Clean the build directory
36+
To clean the build directory in order to rebuild from scratch simply delete the cpp/_build directory with
37+
```
38+
rm -rf cpp/_build
39+
```
40+
3841
## Backend
3942
TorchServe cpp backend can run as a process, which is similar to [TorchServe Python backend](https://github.com/pytorch/serve/tree/master/ts). By default, TorchServe supports torch scripted model in cpp backend. Other platforms such as MxNet, ONNX can be supported through custom handlers following the TorchScript example [src/backends/handler/torch_scripted_handler.hh](https://github.com/pytorch/serve/blob/master/cpp/src/backends/handler/torch_scripted_handler.hh).
4043
### Custom Handler
@@ -89,11 +92,11 @@ python -c "import ts; from pathlib import Path; print((Path(ts.__file__).parent
8992
3. Make sure you have the right conda/venv environment activated during building that you're also using to run TorchServe.
9093
9194
Q: Build on Mac fails with `Library not loaded: @rpath/libomp.dylib`
92-
A: Install libomp with brew and link in /usr/local/lib
95+
A: Install libomp with brew and link in /usr/local/lib
9396
```bash
9497
brew install libomp
9598
sudo ln -s /opt/homebrew/opt/libomp/lib/libomp.dylib /usr/local/lib/libomp.dylib
9699
```
97100
98101
Q: When loading a handler which uses a model exported with torch._export.aot_compile the handler dies with "error: Error in dlopen: MODEL.SO : undefined symbol: SOME_SYMBOL".
99-
A: Make sure that you are using matching libtorch and Pytorch versions for inference and export, respectively.
102+
A: Make sure that you are using matching libtorch and Pytorch versions for inference and export, respectively.

0 commit comments

Comments
 (0)