Skip to content

Commit

Permalink
bump up paddle versions (deepjavalibrary#2116)
Browse files Browse the repository at this point in the history
  • Loading branch information
lanking520 authored Nov 3, 2022
1 parent b8b132d commit 857c9b3
Show file tree
Hide file tree
Showing 11 changed files with 78 additions and 19 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/continuous.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
runs-on: ${{ matrix.operating-system }}
strategy:
matrix:
operating-system: [ ubuntu-18.04, macos-latest ]
operating-system: [ ubuntu-18.04, macos-12 ]

steps:
- uses: actions/checkout@v3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,12 @@ public void load(Path modelPath, String prefix, Map<String, ?> options) throws I
JniUtils.switchIrOptim(
config, ArgumentsUtil.booleanValue(options, "SwitchIrOptim"));
}
if (options.containsKey("enableONNXRuntime")) {
JniUtils.enableONNXRuntime(config);
}
if (options.containsKey("enableOrtOptimization")) {
JniUtils.enableOrtOptimization(config);
}
}
paddlePredictor = new PaddlePredictor(JniUtils.createPredictor(config));
JniUtils.deleteConfig(config);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,14 @@ public static void useFeedFetchOp(long config) {
PaddleLibrary.LIB.useFeedFetchOp(config);
}

public static void enableONNXRuntime(long config) {
PaddleLibrary.LIB.analysisConfigEnableONNXRuntime(config);
}

public static void enableOrtOptimization(long config) {
PaddleLibrary.LIB.analysisConfigEnableORTOptimization(config);
}

public static void deleteConfig(long config) {
PaddleLibrary.LIB.deleteAnalysisConfig(config);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ public static void loadLibrary() {
loadLinuxDependencies(libName);
} else if (System.getProperty("os.name").startsWith("Win")) {
loadWindowsDependencies(libName);
} else if (System.getProperty("os.name").startsWith("Mac")) {
loadMacOsDependencies(libName);
}
logger.debug("Now loading " + libName);
System.load(libName); // NOPMD
Expand All @@ -91,7 +93,13 @@ public static void loadLinuxDependencies(String libName) {
+ libDir
+ ", the current one is set to: "
+ Utils.getenv("LD_LIBRARY_PATH"));
List<String> names = Arrays.asList("libdnnl.so.2", "libiomp5.so", "libmklml_intel.so");
List<String> names =
Arrays.asList(
"libdnnl.so.2",
"libiomp5.so",
"libmklml_intel.so",
"libonnxruntime.so",
"libpaddle2onnx.so");
names.forEach(
name -> {
Path path = libDir.resolve(name);
Expand All @@ -108,7 +116,24 @@ public static void loadLinuxDependencies(String libName) {

public static void loadWindowsDependencies(String libName) {
Path libDir = Paths.get(libName).getParent();
List<String> names = Arrays.asList("openblas.dll", "mkldnn.dll");
List<String> names =
Arrays.asList("openblas.dll", "mkldnn.dll", "onnxruntime.dll", "paddle2onnx.dll");
names.forEach(
name -> {
Path path = libDir.resolve(name);
if (Files.isRegularFile(path)) {
String lib = path.toAbsolutePath().toString();
logger.debug("Now loading " + lib);
System.load(lib);
} else {
logger.debug(name + " is not found, skip loading...");
}
});
}

public static void loadMacOsDependencies(String libName) {
Path libDir = Paths.get(libName).getParent();
List<String> names = Arrays.asList("libonnxruntime.dylib", "libpaddle2onnx.dylib");
names.forEach(
name -> {
Path path = libDir.resolve(name);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ private PaddleLibrary() {}

native void analysisConfigRemovePass(long handle, String pass);

native void analysisConfigEnableONNXRuntime(long handle);

native void analysisConfigEnableORTOptimization(long handle);

native void useFeedFetchOp(long handle);

native void deleteAnalysisConfig(long handle);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@ private static Predictor<Image, Classifications> getClassifier()
.setTypes(Image.class, Classifications.class)
.optArtifactId("ai.djl.paddlepaddle:mask_classification")
.optFilter("flavor", "server")
.optOption("enableONNXRuntime", "true")
.optOption("enableOrtOptimization", "true")
.build();
ZooModel<Image, Classifications> model = criteria.loadModel();
return model.newPredictor();
Expand All @@ -121,6 +123,8 @@ private static DetectedObjects detectFaces(Image img)
.setTypes(Image.class, DetectedObjects.class)
.optArtifactId("ai.djl.paddlepaddle:face_detection")
.optFilter("flavor", "server")
.optOption("enableONNXRuntime", "true")
.optOption("enableOrtOptimization", "true")
.build();

try (ZooModel<Image, DetectedObjects> model = criteria.loadModel();
Expand Down
6 changes: 3 additions & 3 deletions engines/paddlepaddle/paddlepaddle-native/build.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
set FILEPATH="paddle"

if "%1" == "cpu" (
set DOWNLOAD_URL="https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/CPU/x86-64_vs2017_avx_openblas/paddle_inference.zip"
) else if "%1" == "cu110" (
set DOWNLOAD_URL="https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/GPU/x86-64_vs2017_avx_mkl_cuda11.0_cudnn8/paddle_inference.zip"
set DOWNLOAD_URL="https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Windows/CPU/x86-64_avx-openblas-vs2017/paddle_inference.zip"
) else if "%1" == "cu112" (
set DOWNLOAD_URL="https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Windows/GPU/x86-64_cuda11.2_cudnn8.2.1_trt8.0.1.6_mkl_avx_vs2019/paddle_inference.zip"
)

if exist %FILEPATH% (
Expand Down
14 changes: 7 additions & 7 deletions engines/paddlepaddle/paddlepaddle-native/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@ task prepareNativeLibs() {
delete "${buildDir}/native"

def files = [
"cpu/linux" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/CPU/gcc5.4_avx_openblas/paddle_inference.tgz",
"cu102/linux": "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda10.2_cudnn8.1.1_trt7.2.3.4/paddle_inference.tgz",
"cu112/linux": "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda11.2_cudnn8.2.1_trt8.0.3.4/paddle_inference.tgz",
"cpu/osx" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/MacOS/CPU/x86-64_clang_avx_openb/paddle_inference_install_dir.tgz",
"cpu/win" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/CPU/x86-64_vs2017_avx_openblas/paddle_inference.zip",
"cu110/win" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/GPU/x86-64_vs2017_avx_mkl_cuda11.0_cudnn8/paddle_inference.zip"
"cpu/linux" : "https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Linux/CPU/gcc5.4_avx_openblas/paddle_inference.tgz",
"cu102/linux": "https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda10.2_cudnn8.1.1_trt7.2.3.4/paddle_inference.tgz",
"cu112/linux": "https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda11.2_cudnn8.2.1_trt8.0.3.4/paddle_inference.tgz",
"cpu/osx" : "https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/MacOS/CPU/x86-64_clang_avx_openblas/paddle_inference_install_dir.tgz",
"cpu/win" : "https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Windows/CPU/x86-64_avx-openblas-vs2017/paddle_inference.zip",
"cu112/win" : "https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Windows/GPU/x86-64_cuda11.2_cudnn8.2.1_trt8.0.1.6_mkl_avx_vs2019/paddle_inference.zip"
]

def downloadDir = file("${buildDir}/download")
Expand Down Expand Up @@ -124,7 +124,7 @@ task uploadS3 {
"${buildDir}/native/cu112/linux/native/lib/",
"${buildDir}/native/cpu/osx/native/lib/",
"${buildDir}/native/cpu/win/native/lib/",
"${buildDir}/native/cu110/win/native/lib/"
"${buildDir}/native/cu112/win/native/lib/"
]
uploadDirs.each { item ->
fileTree(item).files.name.each {
Expand Down
10 changes: 5 additions & 5 deletions engines/paddlepaddle/paddlepaddle-native/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,29 +14,29 @@ pushd $WORK_DIR

echo "Trying to find paddle folder..."

# https://www.paddlepaddle.org.cn/inference/v2.2/user_guides/download_lib.html 2.2.2
# https://www.paddlepaddle.org.cn/inference/v2.3/user_guides/download_lib.html 2.3.2

if [[ ! -d "paddle" ]]; then
echo "Folder not found. Downloading C++ package..."
if [[ $PLATFORM == 'linux' ]]; then
if [[ $1 == "cpu" ]]; then
curl -s https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/CPU/gcc5.4_avx_openblas/paddle_inference.tgz -o paddle.tgz
curl -s https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Linux/CPU/gcc5.4_avx_openblas/paddle_inference.tgz -o paddle.tgz
tar -xvzf paddle.tgz
mv paddle_inference paddle
elif [[ $1 == "cu102" ]]; then
curl -s https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda10.2_cudnn7.6.5_trt6.0.1.5/paddle_inference.tgz -o paddle.tgz
curl -s https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda10.2_cudnn7.6.5_trt6.0.1.5/paddle_inference.tgz -o paddle.tgz
tar -xvzf paddle.tgz
mv paddle_inference paddle
elif [[ $1 == "cu112" ]]; then
curl -s https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda11.2_cudnn8.2.1_trt8.0.3.4/paddle_inference.tgz -o paddle.tgz
curl -s https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda11.2_cudnn8.2.1_trt8.0.3.4/paddle_inference.tgz -o paddle.tgz
tar -xvzf paddle.tgz
mv paddle_inference paddle
else
echo "$1 is not supported."
exit 1
fi
elif [[ $PLATFORM == 'darwin' ]]; then
curl -s https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/MacOS/CPU/x86-64_clang_avx_openb/paddle_inference_install_dir.tgz -o paddle.tgz
curl -s https://paddle-inference-lib.bj.bcebos.com/2.3.2/cxx_c/MacOS/CPU/x86-64_clang_avx_openblas/paddle_inference_install_dir.tgz -o paddle.tgz
tar -xvzf paddle.tgz
mv paddle_inference_install_dir paddle
else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,18 @@ JNIEXPORT void JNICALL Java_ai_djl_paddlepaddle_jni_PaddleLibrary_analysisConfig
config_ptr->pass_builder()->DeletePass(djl::utils::jni::GetStringFromJString(env, jpass));
}

JNIEXPORT void JNICALL Java_ai_djl_paddlepaddle_jni_PaddleLibrary_analysisConfigEnableONNXRuntime
(JNIEnv* env, jobject jthis, jlong jhandle) {
auto* config_ptr = reinterpret_cast<paddle::AnalysisConfig*>(jhandle);
config_ptr->EnableONNXRuntime();
}

JNIEXPORT void JNICALL Java_ai_djl_paddlepaddle_jni_PaddleLibrary_analysisConfigEnableORTOptimization
(JNIEnv* env, jobject jthis, jlong jhandle) {
auto* config_ptr = reinterpret_cast<paddle::AnalysisConfig*>(jhandle);
config_ptr->EnableORTOptimization();
}

JNIEXPORT void JNICALL Java_ai_djl_paddlepaddle_jni_PaddleLibrary_deleteAnalysisConfig(
JNIEnv* env, jobject jthis, jlong jhandle) {
const auto* config_ptr = reinterpret_cast<paddle::AnalysisConfig*>(jhandle);
Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ tflite_version=2.6.2
dlr_version=1.6.0
trt_version=8.4.1
onnxruntime_version=1.13.1
paddlepaddle_version=2.2.2
paddlepaddle_version=2.3.2
sentencepiece_version=0.1.96
tokenizers_version=0.12.0
fasttext_version=0.9.2
Expand Down

0 comments on commit 857c9b3

Please sign in to comment.