Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【PaddlePaddle Hackathon】31. 为 Paddle Inference 添加新的前端语言 #37162

Merged
merged 28 commits into from
Feb 10, 2022
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
b811747
【PaddlePaddle Hackathon - rfc 提交】31 为 Paddle Inference 添加新的前端语言
chenyanlann Oct 20, 2021
66ade3d
Rename 31为 Paddle Inference 添加新的前端语言-.md to 31 为 Paddle Inference 添加新…
chenyanlann Oct 20, 2021
5754c4b
Merge branch 'PaddlePaddle:develop' into develop
chenyanlann Nov 3, 2021
ee15919
Merge branch 'PaddlePaddle:develop' into develop
chenyanlann Nov 12, 2021
9f20117
Delete 31 为 Paddle Inference 添加新的前端语言.md
chenyanlann Nov 12, 2021
82fd6d8
Merge branch 'PaddlePaddle:develop' into java_inference
chenyanlann Nov 12, 2021
2b72abf
Add paddle inference java api
chenyanlann Nov 12, 2021
421d759
Add readme.md
chenyanlann Nov 12, 2021
f67573c
Update build.sh
chenyanlann Nov 16, 2021
e51c391
Modify paddle inference java api
chenyanlann Nov 16, 2021
5cf5259
re-lint
chenyanlann Nov 17, 2021
65724d1
fix:remove redundant files;add PD_TensorDestroy function;unified naming
chenyanlann Nov 25, 2021
a2f9b4b
fix:add PD_PredictorDestroy fuction;modify test code
chenyanlann Nov 25, 2021
51d5084
Add:check if the tensor pointer is empty
chenyanlann Nov 26, 2021
67a61a9
Fix:fix memory reclamation errors;modify java function exceptions;add…
zeuswuwuwuwu Nov 26, 2021
8e68f56
re-lint
chenyanlann Nov 26, 2021
a453a60
fix:fix predictor's memory release
chenyanlann Nov 27, 2021
34ec990
Add:add cmakelist.txt;modify build.sh
chenyanlann Nov 29, 2021
6878542
Fix:add finalize and config destroy
chenyanlann Dec 10, 2021
a6d2b3b
Add:config finalize
chenyanlann Dec 10, 2021
2166f34
Fix:finalize
chenyanlann Dec 10, 2021
710f5ef
Merge branch 'PaddlePaddle:develop' into java_inference
chenyanlann Dec 10, 2021
d503c4e
re-lint
chenyanlann Dec 13, 2021
3035862
re-lint
chenyanlann Dec 13, 2021
7c4f0bd
Merge branch 'java_inference' of https://github.com/chenyanlann/Paddl…
chenyanlann Dec 13, 2021
107f673
Merge branch 'PaddlePaddle:develop' into java_inference
chenyanlann Dec 21, 2021
0e28531
Mobile directory
chenyanlann Dec 21, 2021
430ae2f
Merge branch 'PaddlePaddle:develop' into java_inference
chenyanlann Feb 10, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions paddle/fluid/inference/javaapi/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
include_directories($ENV{jni_path} $ENV{jni_sub_path} $ENV{paddle_path})
find_library(PADDLE_INFERENCE_C libpaddle_inference_c.so HINTS $ENV{paddle_inference_lib})
aux_source_directory(native JNI_SRCS)
add_library(paddle_inference SHARED ${JNI_SRCS})
target_link_libraries(paddle_inference ${PADDLE_INFERENCE_C})
23 changes: 23 additions & 0 deletions paddle/fluid/inference/javaapi/build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/bash
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#

mkdir build && cd build
export library_path=$1
export jni_path=$2
export jni_sub_path=$3
mkldnn_lib=$library_path"/third_party/install/mkldnn/lib"
mklml_lib=$library_path"/third_party/install/mklml/lib"
export paddle_inference_lib=$library_path"/paddle/lib"
export paddle_path=$library_path"/paddle/include"
export LD_LIBRARY_PATH=mkldnn_lib:mklml_lib:paddle_inference_lib
cmake .. && make
#g++ -fPIC -D_REENTRANT -I $jni_path -I $jni_sub_path -I $paddle_path -L $paddle_inference_lib -c com_baidu_paddle_inference_Predictor.cpp com_baidu_paddle_inference_Config.cpp com_baidu_paddle_inference_Tensor.cpp
#g++ -shared -I $paddle_path -L $paddle_inference_lib com_baidu_paddle_inference_Config.o com_baidu_paddle_inference_Predictor.o com_baidu_paddle_inference_Tensor.o -o libpaddle_inference.so -lpaddle_inference_c

cd ../src/main/java/com/baidu/paddle/inference
javac Config.java Predictor.java Tensor.java
cd ../../../../../../../
cp ./native/libpaddle_inference.so libpaddle_inference.so
pwd
jar cvf JavaInference.jar -C src/main/java/ .
Original file line number Diff line number Diff line change
@@ -0,0 +1,257 @@
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "com_baidu_paddle_inference_Config.h"
#include <iostream>
#include "jni_convert_util.h" // NOLINT

#include "pd_inference_api.h" // NOLINT

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_cppConfigDestroy(
JNIEnv*, jobject, jlong cppPaddleConfigPointer) {
PD_ConfigDestroy(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
}

// 1. create Config

JNIEXPORT jlong JNICALL Java_com_baidu_paddle_inference_Config_createCppConfig(
JNIEnv* env, jobject obj) {
jlong cppPaddleConfigPointer = reinterpret_cast<jlong>(PD_ConfigCreate());
return cppPaddleConfigPointer;
}

JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_inference_Config_isCppConfigValid(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag =
PD_ConfigIsValid(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

// 2. not combined model settings

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppModel(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jstring modelFile,
jstring paramsFile) {
PD_ConfigSetModel(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jstring_to_cpp_string(env, modelFile).c_str(),
jstring_to_cpp_string(env, paramsFile).c_str());
}

// 3. combined model settings

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppModelDir(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jstring modelDir) {
PD_ConfigSetModelDir(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jstring_to_cpp_string(env, modelDir).c_str());
}

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppProgFile(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jstring progFile) {
PD_ConfigSetProgFile(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jstring_to_cpp_string(env, progFile).c_str());
}

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppParamsFile(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer,
jstring paramsFile) {
PD_ConfigSetParamsFile(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jstring_to_cpp_string(env, paramsFile).c_str());
}

JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_modelDir(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
return cpp_string_to_jstring(
env, PD_ConfigGetModelDir(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
}

JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_progFile(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
return cpp_string_to_jstring(
env, PD_ConfigGetProgFile(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
}

JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_paramsFile(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
return cpp_string_to_jstring(
env, PD_ConfigGetParamsFile(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
}

// 4. cpu settings

JNIEXPORT void JNICALL
Java_com_baidu_paddle_inference_Config_setCpuMathLibraryNumThreads(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer,
jint mathThreadsNum) {
int math_threads_num = reinterpret_cast<int>(mathThreadsNum);
PD_ConfigSetCpuMathLibraryNumThreads(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer), math_threads_num);
}

JNIEXPORT jint JNICALL
Java_com_baidu_paddle_inference_Config_cpuMathLibraryNumThreads(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
jint mathThreadsNum =
reinterpret_cast<jint>(PD_ConfigGetCpuMathLibraryNumThreads(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
return mathThreadsNum;
}

// 5. MKLDNN settings

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableMKLDNN(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
PD_ConfigEnableMKLDNN(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
}

JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Config_mkldnnEnabled(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag = PD_ConfigMkldnnEnabled(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

JNIEXPORT void JNICALL
Java_com_baidu_paddle_inference_Config_enableMkldnnBfloat16(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
PD_ConfigEnableMkldnnBfloat16(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
}

JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_inference_Config_mkldnnBfloat16Enabled(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag = PD_ConfigMkldnnBfloat16Enabled(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

// 6. gpu setting

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableUseGpu(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jlong memorySize,
jint deviceId) {
PD_ConfigEnableUseGpu(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
(uint64_t)memorySize, (int32_t)deviceId);
}

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_disableGpu(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
PD_ConfigDisableGpu(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
}

JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Config_useGpu(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag =
PD_ConfigUseGpu(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

JNIEXPORT jint JNICALL Java_com_baidu_paddle_inference_Config_gpuDeviceId(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
int device_id = PD_ConfigGpuDeviceId(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return reinterpret_cast<jint>(device_id);
}

JNIEXPORT jint JNICALL
Java_com_baidu_paddle_inference_Config_memoryPoolInitSizeMb(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
int memory_pool_init_size_mb = PD_ConfigMemoryPoolInitSizeMb(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return reinterpret_cast<jint>(memory_pool_init_size_mb);
}

JNIEXPORT jfloat JNICALL
Java_com_baidu_paddle_inference_Config_fractionOfGpuMemoryForPool(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
float fraction_of_gpuMemory_for_pool = PD_ConfigFractionOfGpuMemoryForPool(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return (jfloat)fraction_of_gpuMemory_for_pool;
}

// 7. TensorRT To Do

// 8. optim setting

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_switchIrOptim(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jboolean flag) {
PD_ConfigSwitchIrOptim(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jboolean_to_cpp_bool(env, flag));
}

JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Config_irOptim(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag =
PD_ConfigIrOptim(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_switchIrDebug(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jboolean flag) {
PD_ConfigSwitchIrDebug(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jboolean_to_cpp_bool(env, flag));
}

// 9. enable memory optimization

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableMemoryOptim(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jboolean flag) {
PD_ConfigEnableMemoryOptim(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
jboolean_to_cpp_bool(env, flag));
}

JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_inference_Config_memoryOptimEnabled(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag = PD_ConfigMemoryOptimEnabled(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

// 10. profile setting

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableProfile(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
PD_ConfigEnableProfile(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
}

JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_inference_Config_profileEnabled(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
bool flag = PD_ConfigProfileEnabled(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
return cpp_bool_to_jboolean(env, flag);
}

// 11. log setting

JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_disableGlogInfo(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
PD_ConfigDisableGlogInfo(
reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
}

// 12. view config configuration

JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_summary(
JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
return cpp_string_to_jstring(
env,
PD_ConfigSummary(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
}
Loading