diff --git a/.gitignore b/.gitignore index a34c0d5753..2eb6e5e1a2 100644 --- a/.gitignore +++ b/.gitignore @@ -47,4 +47,7 @@ python/fastdeploy/code_version.py *.pdmodel *.pdiparams *.pdiparams.info -log.txt \ No newline at end of file +log.txt +serving/build +serving/build.encrypt +serving/build.encrypt.auth \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index ae6937ebaf..6b10ab7eaa 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -392,7 +392,11 @@ if(ENABLE_TRT_BACKEND) if(EXISTS "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib") file(REMOVE_RECURSE "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib") endif() - find_package(Python COMPONENTS Interpreter Development REQUIRED) + + if (NOT Python_EXECUTABLE) + find_package(Python COMPONENTS Interpreter Development REQUIRED) + endif() + message(STATUS "Copying ${TRT_DIRECTORY}/lib to ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib ...") execute_process(COMMAND ${Python_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/copy_directory.py ${TRT_DIRECTORY}/lib ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib) file(GLOB_RECURSE TRT_STATIC_LIBS ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib/*.a) @@ -515,12 +519,9 @@ if(MSVC) endif() target_link_libraries(${LIBRARY_NAME} ${DEPEND_LIBS}) -# Note(qiuyanjun): Currently, we need to manually link the whole -# leveldb static lib into fastdeploy lib if PADDLEINFERENCE_WITH_ENCRYPT_AUTH -# is 'ON'. Will remove this policy while the bug of paddle inference lib with -# auth & encrypt fixed. + if(ENABLE_PADDLE_BACKEND) - enable_paddle_encrypt_auth_link_policy(${LIBRARY_NAME}) + set_paddle_encrypt_auth_link_policy(${LIBRARY_NAME}) endif() if(ANDROID) diff --git a/FastDeploy.cmake.in b/FastDeploy.cmake.in index c39546ab3c..4b1e9c2d39 100644 --- a/FastDeploy.cmake.in +++ b/FastDeploy.cmake.in @@ -63,7 +63,8 @@ set(WITH_ANDROID_JAVA @WITH_ANDROID_JAVA@) set(WITH_ANDROID_TENSOR_FUNCS @WITH_ANDROID_TENSOR_FUNCS@) # encryption and auth -set(PADDLEINFERENCE_WITH_ENCRYPT_AUTH @PADDLEINFERENCE_WITH_ENCRYPT_AUTH@) +set(PADDLEINFERENCE_WITH_ENCRYPT @PADDLEINFERENCE_WITH_ENCRYPT@) +set(PADDLEINFERENCE_WITH_AUTH @PADDLEINFERENCE_WITH_AUTH@) set(FASTDEPLOY_LIBS "") set(FASTDEPLOY_INCS "") @@ -152,15 +153,21 @@ if(ENABLE_PADDLE_BACKEND) else() set(DNNL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mkldnn/lib/libmkldnn.so.0") set(IOMP_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mklml/lib/libiomp5.so") - if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH) - set(FDMODEL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so") - set(FDMODEL_AUTH_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_auth.so") - set(FDMODEL_MODEL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0") - set(LEVELDB_LIB_DIR "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/leveldb/lib/") - list(APPEND FASTDEPLOY_LIBS ${FDMODEL_LIB} ${FDMODEL_AUTH_LIB} ${FDMODEL_MODEL_LIB}) - # link_directories(LEVELDB_LIB_DIR) - # list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto -lleveldb) - list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto) + set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so") + set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0") + set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so") + if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_MODEL_LIB})) + set(PADDLEINFERENCE_WITH_ENCRYPT ON CACHE BOOL "" FORCE) + list(APPEND FASTDEPLOY_LIBS ${FDMODEL_LIB} ${FDMODEL_MODEL_LIB}) + endif() + if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_AUTH_LIB})) + set(PADDLEINFERENCE_WITH_AUTH ON CACHE BOOL "" FORCE) + list(APPEND FASTDEPLOY_LIBS ${FDMODEL_AUTH_LIB}) + endif() + if(PADDLEINFERENCE_WITH_ENCRYPT OR PADDLEINFERENCE_WITH_AUTH) + if(WITH_KUNLUNXIN) + list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto) + endif() endif() endif() list(APPEND FASTDEPLOY_LIBS ${PADDLE_LIB}) diff --git a/cmake/paddle_inference.cmake b/cmake/paddle_inference.cmake index 0aa715d7ac..bf79ad8202 100755 --- a/cmake/paddle_inference.cmake +++ b/cmake/paddle_inference.cmake @@ -22,7 +22,6 @@ endif() # Custom options for Paddle Inference backend option(PADDLEINFERENCE_DIRECTORY "Directory of custom Paddle Inference library" OFF) -option(PADDLEINFERENCE_WITH_ENCRYPT_AUTH "Whether the Paddle Inference is built with FD encryption and auth" OFF) set(PADDLEINFERENCE_PROJECT "extern_paddle_inference") set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference) @@ -43,44 +42,15 @@ if(PADDLEINFERENCE_DIRECTORY) endif() include_directories(${PADDLEINFERENCE_INC_DIR}) -if(WIN32) - set(PADDLEINFERENCE_COMPILE_LIB - "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib" - CACHE FILEPATH "paddle_inference compile library." FORCE) - set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib") - set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib") - set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib") - set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib") -elseif(APPLE) - set(PADDLEINFERENCE_COMPILE_LIB - "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib" - CACHE FILEPATH "paddle_inference compile library." FORCE) - set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2") - set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so") - set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib") - set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib") -else() - set(PADDLEINFERENCE_COMPILE_LIB - "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" - CACHE FILEPATH "paddle_inference compile library." FORCE) - set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2") - set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so") - set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so") - set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so") - if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH) - set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so") - set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so") - set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0") - set(LEVELDB_LIB_DIR "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb/lib") - endif() -endif(WIN32) if(PADDLEINFERENCE_DIRECTORY) # Use custom Paddle Inference libs. if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference") file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference") endif() - find_package(Python COMPONENTS Interpreter Development REQUIRED) + if(NOT Python_EXECUTABLE) + find_package(Python COMPONENTS Interpreter Development REQUIRED) + endif() message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...") if(WIN32) execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install) @@ -166,6 +136,51 @@ else() endif(PADDLEINFERENCE_DIRECTORY) +# check libs +set(PADDLEINFERENCE_WITH_AUTH OFF) +set(PADDLEINFERENCE_WITH_ENCRYPT OFF) +if(WIN32) + set(PADDLEINFERENCE_COMPILE_LIB + "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib" + CACHE FILEPATH "paddle_inference compile library." FORCE) + set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib") + set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib") + set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib") + set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib") +elseif(APPLE) + set(PADDLEINFERENCE_COMPILE_LIB + "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib" + CACHE FILEPATH "paddle_inference compile library." FORCE) + set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2") + set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so") + set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib") + set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib") +else() + set(PADDLEINFERENCE_COMPILE_LIB + "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" + CACHE FILEPATH "paddle_inference compile library." FORCE) + set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2") + set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so") + set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so") + set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so") + # Check whether the encrypt and auth tools exists. only support PADDLEINFERENCE_DIRECTORY now. + if(PADDLEINFERENCE_DIRECTORY) + set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so") + set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0") + set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so") + set(FDMODEL_LEVELDB_LIB_DIR "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb") + set(FDMODEL_LEVELDB_LIB_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb/lib/libleveldb.a") + if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_MODEL_LIB})) + set(PADDLEINFERENCE_WITH_ENCRYPT ON CACHE BOOL "" FORCE) + message(STATUS "Detected ${FDMODEL_LIB} and ${FDMODEL_MODEL_LIB} exists, fource PADDLEINFERENCE_WITH_ENCRYPT=${PADDLEINFERENCE_WITH_ENCRYPT}") + endif() + if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_AUTH_LIB})) + set(PADDLEINFERENCE_WITH_AUTH ON CACHE BOOL "" FORCE) + message(STATUS "Detected ${FDMODEL_LIB} and ${FDMODEL_AUTH_LIB} exists, fource PADDLEINFERENCE_WITH_AUTH=${PADDLEINFERENCE_WITH_AUTH}") + endif() + endif() +endif(WIN32) + # Path Paddle Inference ELF lib file if(UNIX AND (NOT APPLE) AND (NOT ANDROID)) add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME}) @@ -198,28 +213,37 @@ set_property(TARGET external_omp PROPERTY IMPORTED_LOCATION add_dependencies(external_omp ${PADDLEINFERENCE_PROJECT}) set(ENCRYPT_AUTH_LIBS ) -if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH) +if(PADDLEINFERENCE_WITH_ENCRYPT) add_library(external_fdmodel STATIC IMPORTED GLOBAL) set_property(TARGET external_fdmodel PROPERTY IMPORTED_LOCATION ${FDMODEL_LIB}) - add_library(external_fdmodel_auth STATIC IMPORTED GLOBAL) - set_property(TARGET external_fdmodel_auth PROPERTY IMPORTED_LOCATION - ${FDMODEL_AUTH_LIB}) + add_library(external_fdmodel_model STATIC IMPORTED GLOBAL) set_property(TARGET external_fdmodel_model PROPERTY IMPORTED_LOCATION ${FDMODEL_MODEL_LIB}) - add_dependencies(external_fdmodel ${PADDLEINFERENCE_PROJECT}) - add_dependencies(external_fdmodel_auth ${PADDLEINFERENCE_PROJECT}) - add_dependencies(external_fdmodel_model ${PADDLEINFERENCE_PROJECT}) - list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel external_fdmodel_auth external_fdmodel_model) + list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel external_fdmodel_model) +endif() + +if(PADDLEINFERENCE_WITH_AUTH) + add_library(external_fdmodel_auth STATIC IMPORTED GLOBAL) + set_property(TARGET external_fdmodel_auth PROPERTY IMPORTED_LOCATION + ${FDMODEL_AUTH_LIB}) + list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel_auth) endif() -function(enable_paddle_encrypt_auth_link_policy LIBRARY_NAME) - if(ENABLE_PADDLE_BACKEND AND PADDLEINFERENCE_WITH_ENCRYPT_AUTH) - link_directories(${LEVELDB_LIB_DIR}) - target_link_libraries(${LIBRARY_NAME} ${ENCRYPT_AUTH_LIBS} -lssl -lcrypto) - target_link_libraries(${LIBRARY_NAME} ${LEVELDB_LIB_DIR}/libleveldb.a) - set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS - "-Wl,--whole-archive ${LEVELDB_LIB_DIR}/libleveldb.a -Wl,-no-whole-archive") +function(set_paddle_encrypt_auth_link_policy LIBRARY_NAME) + if(ENABLE_PADDLE_BACKEND AND (PADDLEINFERENCE_WITH_ENCRYPT OR PADDLEINFERENCE_WITH_AUTH)) + target_link_libraries(${LIBRARY_NAME} ${ENCRYPT_AUTH_LIBS}) + # Note(qiuyanjun): Currently, for XPU, we need to manually link the whole + # leveldb static lib into fastdeploy lib if PADDLEINFERENCE_WITH_ENCRYPT + # or PADDLEINFERENCE_WITH_AUTH is 'ON'. Will remove this policy while + # the bug of paddle inference lib with auth & encrypt fixed. + if((EXISTS ${FDMODEL_LEVELDB_LIB_LIB}) AND WITH_KUNLUNXIN) + target_link_libraries(${LIBRARY_NAME} -lssl -lcrypto) + link_directories(${FDMODEL_LEVELDB_LIB_DIR}) + target_link_libraries(${LIBRARY_NAME} ${FDMODEL_LEVELDB_LIB_LIB}) + set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS + "-Wl,--whole-archive ${FDMODEL_LEVELDB_LIB_LIB} -Wl,-no-whole-archive") + endif() endif() endfunction() diff --git a/cmake/summary.cmake b/cmake/summary.cmake index 935e5910b1..9e53c3be65 100755 --- a/cmake/summary.cmake +++ b/cmake/summary.cmake @@ -62,6 +62,8 @@ function(fastdeploy_summary) endif() if(ENABLE_PADDLE_BACKEND) message(STATUS " Paddle Inference version : ${PADDLEINFERENCE_VERSION}") + message(STATUS " PADDLE_WITH_ENCRYPT : ${PADDLEINFERENCE_WITH_ENCRYPT}") + message(STATUS " PADDLE_WITH_AUTH : ${PADDLEINFERENCE_WITH_AUTH}") endif() if(ENABLE_POROS_BACKEND) message(STATUS " Poros version : ${POROS_VERSION}") diff --git a/scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh b/scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh new file mode 100755 index 0000000000..527d2f21c3 --- /dev/null +++ b/scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh @@ -0,0 +1,106 @@ +#!/usr/bin/env bash +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e +set +x + +# ------------------------------------------------------------------------------- +# readonly global variables +# ------------------------------------------------------------------------------- +readonly ROOT_PATH=$(pwd) +readonly BUILD_ROOT=build/Linux +readonly BUILD_DIR="${BUILD_ROOT}/x86_64_gpu" + +# ------------------------------------------------------------------------------- +# tasks +# ------------------------------------------------------------------------------- +__make_build_dir() { + if [ ! -d "${BUILD_DIR}" ]; then + echo "-- [INFO] BUILD_DIR: ${BUILD_DIR} not exists, setup manually ..." + if [ ! -d "${BUILD_ROOT}" ]; then + mkdir -p "${BUILD_ROOT}" && echo "-- [INFO] Created ${BUILD_ROOT} !" + fi + mkdir -p "${BUILD_DIR}" && echo "-- [INFO] Created ${BUILD_DIR} !" + else + echo "-- [INFO] Found BUILD_DIR: ${BUILD_DIR}" + fi +} + +__check_cxx_envs() { + if [ $LDFLAGS ]; then + echo "-- [INFO] Found LDFLAGS: ${LDFLAGS}, \c" + echo "unset it before crossing compiling ${BUILD_DIR}" + unset LDFLAGS + fi + if [ $CPPFLAGS ]; then + echo "-- [INFO] Found CPPFLAGS: ${CPPFLAGS}, \c" + echo "unset it before crossing compiling ${BUILD_DIR}" + unset CPPFLAGS + fi + if [ $CPLUS_INCLUDE_PATH ]; then + echo "-- [INFO] Found CPLUS_INCLUDE_PATH: ${CPLUS_INCLUDE_PATH}, \c" + echo "unset it before crossing compiling ${BUILD_DIR}" + unset CPLUS_INCLUDE_PATH + fi + if [ $C_INCLUDE_PATH ]; then + echo "-- [INFO] Found C_INCLUDE_PATH: ${C_INCLUDE_PATH}, \c" + echo "unset it before crossing compiling ${BUILD_DIR}" + unset C_INCLUDE_PATH + fi +} + +__build_fastdeploy_linux_x86_64_gpu_shared_custom_paddle() { + + local FASDEPLOY_INSTALL_DIR="${ROOT_PATH}/${BUILD_DIR}/fastdeploy_install" + cd "${BUILD_DIR}" && echo "-- [INFO] Working Dir: ${PWD}" + + cmake -DCMAKE_BUILD_TYPE=Release \ + -DWITH_GPU=ON \ + -DTRT_DIRECTORY=${TRT_DIRECTORY} \ + -DCUDA_DIRECTORY=${CUDA_DIRECTORY} \ + -DENABLE_ORT_BACKEND=ON \ + -DENABLE_TRT_BACKEND=ON \ + -DENABLE_PADDLE_BACKEND=ON \ + -DPADDLEINFERENCE_DIRECTORY=${PADDLEINFERENCE_DIRECTORY} \ + -DPADDLEINFERENCE_VERSION=${PADDLEINFERENCE_VERSION} \ + -DENABLE_OPENVINO_BACKEND=ON \ + -DENABLE_PADDLE2ONNX=ON \ + -DENABLE_VISION=OFF \ + -DENABLE_BENCHMARK=OFF \ + -DBUILD_EXAMPLES=OFF \ + -DPython_EXECUTABLE=/usr/bin/python3 \ + -DCMAKE_INSTALL_PREFIX=${FASDEPLOY_INSTALL_DIR} \ + -DLIBRARY_NAME=fastdeploy_runtime \ + -Wno-dev ../../.. && make -j8 && make install + + echo "-- [INFO][built][x86_64_gpu}][${FASDEPLOY_INSTALL_DIR}]" + echo "-- [INFO][${PADDLEINFERENCE_DIRECTORY}][${PADDLEINFERENCE_VERSION}]" +} + +main() { + __make_build_dir + __check_cxx_envs + __build_fastdeploy_linux_x86_64_gpu_shared_custom_paddle + exit 0 +} + +main + +# Usage: +# export PADDLEINFERENCE_DIRECTORY=xxx +# export PADDLEINFERENCE_VERSION=xxx +# export CUDA_DIRECTOY=/usr/local/cuda +# export TRT_DIRECTORY=/home/qiuyanjun/TensorRT-8.5.2.2 +# ./scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh \ No newline at end of file diff --git a/serving/Dockerfile b/serving/Dockerfile index cfc368afdb..e7afb432c8 100755 --- a/serving/Dockerfile +++ b/serving/Dockerfile @@ -80,4 +80,9 @@ COPY build/fastdeploy_install/* /opt/fastdeploy/ # Set environment variable ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib/:/opt/fastdeploy/third_libs/install/tensorrt/lib/:/opt/fastdeploy/third_libs/install/opencv/lib64/:$LD_LIBRARY_PATH" -ENV PATH="/opt/tritonserver/bin:$PATH" \ No newline at end of file +ENV PATH="/opt/tritonserver/bin:$PATH" + +ENV http_proxy= +ENV https_proxy= +ENV no_proxy= +ENV TZ=Asia/Shanghai \ No newline at end of file diff --git a/serving/Dockerfile_CUDA_11_2_TRT_8_5_PADDLE_2_4_2 b/serving/Dockerfile_CUDA_11_2_TRT_8_5_PADDLE_2_4_2 index e5d509fe74..39d47d74fb 100644 --- a/serving/Dockerfile_CUDA_11_2_TRT_8_5_PADDLE_2_4_2 +++ b/serving/Dockerfile_CUDA_11_2_TRT_8_5_PADDLE_2_4_2 @@ -58,5 +58,5 @@ RUN python3 -m pip install https://paddle-wheel.bj.bcebos.com/2.4.2/linux/linux- COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/ COPY build/fastdeploy_install /opt/fastdeploy/ -ENV LD_LIBRARY_PATH="/opt/TensorRT-8.5.2.2/lib/:/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH" +ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH" ENV PATH="/opt/tritonserver/bin:$PATH" diff --git a/serving/Dockerfile_CUDA_11_4_TRT_8_4 b/serving/Dockerfile_CUDA_11_4_TRT_8_4 index 22087b1c80..1be59c8d10 100644 --- a/serving/Dockerfile_CUDA_11_4_TRT_8_4 +++ b/serving/Dockerfile_CUDA_11_4_TRT_8_4 @@ -55,5 +55,5 @@ RUN python3 -m pip install paddlepaddle-gpu==2.4.1.post112 -f https://www.paddle COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/ COPY build/fastdeploy_install /opt/fastdeploy/ -ENV LD_LIBRARY_PATH="/opt/TensorRT-8.4.1.5/lib/:/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH" +ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH" ENV PATH="/opt/tritonserver/bin:$PATH" diff --git a/serving/scripts/build_fd_xpu_encrypt_auth.sh b/serving/scripts/build_fd_xpu_encrypt_auth.sh index 2fd1e2b1a0..824147b7fb 100755 --- a/serving/scripts/build_fd_xpu_encrypt_auth.sh +++ b/serving/scripts/build_fd_xpu_encrypt_auth.sh @@ -41,8 +41,11 @@ docker run -i --rm --name build_fd_xpu_auth_dev \ python setup.py build; python setup.py bdist_wheel; cd /workspace/fastdeploy; + wget ${PADDLEINFERENCE_URL} && tar -zxvf ${PADDLEINFERENCE_URL##*/} + mv ${PADDLEINFERENCE_URL##*/} paddle_inference + PADDLEINFERENCE_DIRECTORY=${PWD}/paddle_inference rm -rf build; mkdir build; cd build; - cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${PWD}/fastdeploy_install -DWITH_KUNLUNXIN=ON -DENABLE_PADDLE_BACKEND=ON -DPADDLEINFERENCE_URL=${PADDLEINFERENCE_URL} -DPADDLEINFERENCE_WITH_ENCRYPT_AUTH=ON -DENABLE_VISION=ON -DENABLE_BENCHMARK=ON -DLIBRARY_NAME=fastdeploy_runtime; + cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${PWD}/fastdeploy_install -DWITH_KUNLUNXIN=ON -DENABLE_PADDLE_BACKEND=ON -DPADDLEINFERENCE_DIRECTORY=${PADDLEINFERENCE_DIRECTORY} -DENABLE_BENCHMARK=ON -DLIBRARY_NAME=fastdeploy_runtime; make -j`nproc`; make install; # fix the link error of libbkcl.so diff --git a/serving/scripts/build_triton_fd_backend.sh b/serving/scripts/build_triton_fd_backend.sh new file mode 100755 index 0000000000..a96166d928 --- /dev/null +++ b/serving/scripts/build_triton_fd_backend.sh @@ -0,0 +1,8 @@ +# This script offer a demo to build triton fastdeploy backend only. + +cd serving +rm -rf build && mkdir build + +cd build +cmake .. -DFASTDEPLOY_DIR=${FD_GPU_SDK} -DTRITON_COMMON_REPO_TAG=r21.10 -DTRITON_CORE_REPO_TAG=r21.10 -DTRITON_BACKEND_REPO_TAG=r21.10; +make -j`nproc` \ No newline at end of file