diff --git a/.gitignore b/.gitignore index dde3895..16a60a0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ .DS_Store *.pyc +build/ +build_*/ diff --git a/README.md b/README.md index d69069f..ce49f59 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,6 @@ Here mainly describes how to deploy PaddlePaddle to the mobile end, as well as s - [Build PaddlePaddle for Raspberry Pi3](https://github.com/PaddlePaddle/Paddle/blob/develop/doc/howto/cross_compiling/cross_compiling_for_raspberry_cn.md) - Build PaddlePaddle for PX2 - How to build PaddlePaddle mobile inference library with minimum size. -- How to build PaddlePaddle with NNPACK. ## Deployment optimization methods - [Merge batch normalization before deploying the model to the mobile.](./tool/merge_batch_normalization/README.md) diff --git a/benchmark/tool/C/CMakeLists.txt b/benchmark/tool/C/CMakeLists.txt new file mode 100644 index 0000000..819cc93 --- /dev/null +++ b/benchmark/tool/C/CMakeLists.txt @@ -0,0 +1,50 @@ +cmake_minimum_required(VERSION 3.0) +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/") + + +if(ANDROID_ABI) + set(ANDROID_COMPILER_FLAGS -ffunction-sections -fdata-sections) + set(ANDROID_LINKER_FLAGS -Wl,--gc-sections) + + if(ANDROID_ABI STREQUAL "armeabi-v7a") + set(CMAKE_SYSROOT "${ANDROID_STANDALONE_TOOLCHAIN}/sysroot") + set(ANDROID_TOOLCHAIN_PREFIX + "${ANDROID_STANDALONE_TOOLCHAIN}/bin/arm-linux-androideabi") + list(APPEND ANDROID_COMPILER_FLAGS -mfpu=neon) + list(APPEND ANDROID_LINKER_FLAGS -Wl,--fix-cortex-a8) + elseif(ANDROID_ABI STREQUAL "arm64-v8a") + set(CMAKE_SYSROOT "${ANDROID_STANDALONE_TOOLCHAIN}/sysroot") + set(ANDROID_TOOLCHAIN_PREFIX + "${ANDROID_STANDALONE_TOOLCHAIN}/bin/aarch64-linux-android") + list(APPEND ANDROID_COMPILER_FLAGS -march=armv8-a) + endif() + string(REPLACE ";" " " ANDROID_COMPILER_FLAGS "${ANDROID_COMPILER_FLAGS}") + string(REPLACE ";" " " ANDROID_LINKER_FLAGS "${ANDROID_LINKER_FLAGS}") + + set(CMAKE_C_FLAGS "${ANDROID_COMPILER_FLAGS} ${CMAKE_C_FLAGS}" CACHE STRING "C flags") + set(CMAKE_CXX_FLAGS "${ANDROID_COMPILER_FLAGS} ${CMAKE_CXX_FLAGS}" CACHE STRING "CXX flags") + set(CMAKE_SHARED_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}" CACHE STRING "shared linker flags") + set(CMAKE_EXE_LINKER_FLAGS "-pie -fPIE ${ANDROID_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}" CACHE STRING "executable linker flags") + + set(CMAKE_C_COMPILER ${ANDROID_TOOLCHAIN_PREFIX}-gcc CACHE PATH "C compiler" FORCE) + set(CMAKE_CXX_COMPILER ${ANDROID_TOOLCHAIN_PREFIX}-g++ CACHE PATH "CXX compiler" FORCE) +endif() +project(inference CXX C) +include(FindPaddle) + +aux_source_directory(. SRC_LIST) +add_executable(${PROJECT_NAME} ${SRC_LIST}) + +set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS) +set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS) +if(ANDROID_ABI) + target_link_libraries(${PROJECT_NAME} + -Wl,--start-group -Wl,--whole-archive -lpaddle_capi_layers + -Wl,--no-whole-archive -lpaddle_capi_engine -Wl,--end-group + ${THIRD_PARTY_LIBRARYS}) +else() + target_link_libraries(${PROJECT_NAME} + -Wl,--start-group -Wl,--whole-archive -lpaddle_capi_whole + -Wl,--no-whole-archive -Wl,--end-group + ${THIRD_PARTY_LIBRARYS} -lrt -ldl -lpthread) +endif() diff --git a/benchmark/tool/C/FindPaddle.cmake b/benchmark/tool/C/FindPaddle.cmake new file mode 100644 index 0000000..1b695e6 --- /dev/null +++ b/benchmark/tool/C/FindPaddle.cmake @@ -0,0 +1,23 @@ +set(PADDLE_ROOT $ENV{PADDLE_ROOT} CACHE PATH "Paddle Path") +find_path(PADDLE_INC NAMES capi.h PATHS ${PADDLE_ROOT}/include/paddle) +find_library(PADDLE_LIB NAMES paddle_capi_shared PATHS ${PADDLE_ROOT}/lib/${ANDROID_ABI}) +if(PADDLE_INC AND PADDLE_LIB) + message(STATUS "Found PaddlePaddle (include: ${PADDLE_INC}; library: ${PADDLE_LIB})") +else() + message(FATAL_ERROR "Cannot find PaddlePaddle on ${PADDLE_ROOT}") +endif() +include_directories(${PADDLE_ROOT}/include) + +set(THIRD_PARTY_LIBRARYS) +list(APPEND THIRD_PARTY_LIBRARYS -lglog -lgflags -lprotobuf -lz) +link_directories(${PADDLE_ROOT}/lib/${ANDROID_ABI}) +link_directories(${PADDLE_ROOT}/third_party/gflags/lib/${ANDROID_ABI}) +link_directories(${PADDLE_ROOT}/third_party/glog/lib/${ANDROID_ABI}) +link_directories(${PADDLE_ROOT}/third_party/protobuf/lib/${ANDROID_ABI}) +link_directories(${PADDLE_ROOT}/third_party/zip/lib/${ANDROID_ABI}) + +find_library(OPENBLAS NAMES openblas PATHS ${PADDLE_ROOT}/third_party/openblas/lib/${ANDROID_ABI}) +if(OPENBLAS) + list(APPEND THIRD_PARTY_LIBRARYS -lopenblas) + link_directories(${PADDLE_ROOT}/third_party/openblas/lib/${ANDROID_ABI}) +endif() \ No newline at end of file diff --git a/benchmark/tool/C/README.md b/benchmark/tool/C/README.md index 243575c..c1db5fa 100644 --- a/benchmark/tool/C/README.md +++ b/benchmark/tool/C/README.md @@ -6,8 +6,34 @@ The demo can be run from the command line and used to test the inference perform ## Android To compile and run this demo in the Android environment, follow these steps: -1. Refer to [this document](https://github.com/PaddlePaddle/Paddle/blob/develop/doc/howto/cross_compiling/cross_compiling_for_android_cn.md) to compile the paddle of android version. -2. Compile this inference.cc to an executable program for the Android environment. +1. Refer to [this document](https://github.com/PaddlePaddle/Paddle/blob/develop/doc/howto/cross_compiling/cross_compiling_for_android_cn.md) to compile the paddle of android version. After executing make install will generate an output directory containing three subdirectories of include, lib, and third_party. +2. Compile this inference.cc to an executable program for the Android environment as follow. + - armeabi-v7a + ``` + mkdir build + cd build + + cmake .. \ + -DANDROID_ABI=armeabi-v7a \ + -DANDROID_STANDALONE_TOOLCHAIN=your/path/to/arm_standalone_toolchain \ + -DPADDLE_ROOT=The output path generated in the first step \ + -DCMAKE_BUILD_TYPE=MinSizeRel + + make + ``` + - arm64-v8a + ``` + mkdir build + cd build + + cmake .. \ + -DANDROID_ABI=arm64-v8a \ + -DANDROID_STANDALONE_TOOLCHAIN=your/path/to/arm64_standalone_toolchain \ + -DPADDLE_ROOT=The output path generated in the first step \ + -DCMAKE_BUILD_TYPE=MinSizeRel + + make + ``` 3. Run the demo program by logging into the Android environment via adb and specifying the paddle model from the command line. ``` ./inference --merged_model ./model/mobilenet.paddle --input_size 150528 diff --git a/benchmark/tool/C/inference.cc b/benchmark/tool/C/inference.cc index 01084b7..515ee1f 100644 --- a/benchmark/tool/C/inference.cc +++ b/benchmark/tool/C/inference.cc @@ -76,7 +76,7 @@ int main(int argc, char* argv[]) { { Timer time("init paddle"); - char* argv[] = {"--use_gpu=False"}; + char* argv[] = {(char*)"--use_gpu=False"}; if (paddle_init(1, (char**)argv) != kPD_NO_ERROR) { std::cout << "paddle init error!" << std::endl; }