Skip to content

Commit

Permalink
[JitLayer]Pybind Fucniton and hide ExecutorEngine and PEEngine (#44984)
Browse files Browse the repository at this point in the history
* Pybind Fucniton and hide ExecutorEngine and PEEngine

* Remove FunctionNames in compilation_unit
  • Loading branch information
0x45f committed Aug 9, 2022
1 parent cd0b03c commit 2832ab2
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 41 deletions.
9 changes: 7 additions & 2 deletions paddle/fluid/jit/function_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,17 @@ void ShareIntoScope(const std::vector<std::string> &ordered_input_names,
void ShareParamsIntoScope(const std::vector<std::string> &param_names,
const Name2VariableMap &params_dict,
framework::Scope *scope) {
VLOG(3) << "param_names size: " << param_names.size();
for (size_t i = 0; i < param_names.size(); ++i) {
std::string name = param_names[i];
PADDLE_ENFORCE_EQ(params_dict.count(name),
1,
phi::errors::InvalidArgument(
"Parameter named %s is not exist in param_names. "
"Please check that your model was saved correctly",
name));

auto &param = params_dict.find(name)->second;
auto &dense_tensor = param->Get<DenseTensor>();
VLOG(3) << "share into scope: " << name;
auto *var = scope->Var(name);
auto *dst_tensor = var->GetMutable<DenseTensor>();
*dst_tensor = dense_tensor;
Expand Down
8 changes: 8 additions & 0 deletions paddle/fluid/jit/layer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,14 @@ const std::shared_ptr<jit::FunctionInfo>& Layer::FunctionInfo(
return info_map_.at(name);
}

std::vector<std::string> Layer::FunctionNames() const {
std::vector<std::string> names;
for (auto it = info_map_.begin(); it != info_map_.end(); ++it) {
names.emplace_back(it->first);
}
return names;
}

#define PD_SPECIALZE_ATTRIBUTE_TYPE(T) \
template <> \
T Layer::Attribute<T>(const std::string& name) const { \
Expand Down
2 changes: 2 additions & 0 deletions paddle/fluid/jit/layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ class Layer {
const std::shared_ptr<jit::FunctionInfo>& FunctionInfo(
const std::string& name) const;

std::vector<std::string> FunctionNames() const;

private:
Name2VariableMap params_map_;
Name2VariableMap attrs_map_;
Expand Down
5 changes: 3 additions & 2 deletions paddle/fluid/pybind/eager_functions.cc
Original file line number Diff line number Diff line change
Expand Up @@ -372,8 +372,9 @@ static PyObject* eager_api_jit_function_call(PyObject* self,
PyObject* args,
PyObject* kwargs) {
EAGER_TRY
std::shared_ptr<jit::BaseEngine> function =
CastPyArg2BaseEngine(PyTuple_GET_ITEM(args, 0), 0);

std::shared_ptr<jit::Function> function =
CastPyArg2JitFunction(PyTuple_GET_ITEM(args, 0), 0);
std::vector<paddle::experimental::Tensor> ins =
CastPyArg2VectorOfTensor(PyTuple_GET_ITEM(args, 1), 1);
std::vector<paddle::experimental::Tensor> outs = (*function)(ins);
Expand Down
19 changes: 7 additions & 12 deletions paddle/fluid/pybind/eager_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ limitations under the License. */
#include "paddle/fluid/framework/convert_utils.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/scope_guard.h"
#include "paddle/fluid/jit/engine/executor_engine.h"
#include "paddle/fluid/jit/engine/pe_engine.h"
#include "paddle/fluid/jit/function.h"
#include "paddle/fluid/memory/allocation/allocator.h"
#include "paddle/fluid/operators/py_func_op.h"
#include "paddle/fluid/operators/utils.h"
Expand Down Expand Up @@ -54,8 +53,7 @@ extern PyTypeObject* g_customplace_pytype;
extern PyTypeObject* g_framework_tensor_pytype;
extern PyTypeObject* g_framework_lodtensorarray_pytype;
extern PyTypeObject* g_custom_op_kernel_ctx_pytype;
extern PyTypeObject* g_executor_engine_pytype;
extern PyTypeObject* g_pe_engine_pytype;
extern PyTypeObject* g_jit_function_pytype;

int TensorDtype2NumpyDtype(phi::DataType dtype) {
switch (dtype) {
Expand Down Expand Up @@ -232,14 +230,11 @@ std::shared_ptr<imperative::VarBase> CastPyArg2VarBase(PyObject* obj,
return py::cast<std::shared_ptr<imperative::VarBase>>(obj);
}

std::shared_ptr<jit::BaseEngine> CastPyArg2BaseEngine(PyObject* obj,
ssize_t arg_pos) {
if (PyObject_IsInstance(
obj, reinterpret_cast<PyObject*>(g_executor_engine_pytype))) {
return ::pybind11::handle(obj).cast<std::shared_ptr<jit::ExecutorEngine>>();
} else if (PyObject_IsInstance(
obj, reinterpret_cast<PyObject*>(g_pe_engine_pytype))) {
return ::pybind11::handle(obj).cast<std::shared_ptr<jit::PEEngine>>();
std::shared_ptr<jit::Function> CastPyArg2JitFunction(PyObject* obj,
ssize_t arg_pos) {
if (PyObject_IsInstance(obj,
reinterpret_cast<PyObject*>(g_jit_function_pytype))) {
return ::pybind11::handle(obj).cast<std::shared_ptr<jit::Function>>();
} else {
PADDLE_THROW(platform::errors::InvalidArgument(
"argument (position %d) must be "
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/pybind/eager_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ typedef SSIZE_T ssize_t;
#include "paddle/fluid/eager/hooks.h"
#include "paddle/fluid/framework/lod_tensor.h"
#include "paddle/fluid/framework/tensor.h"
#include "paddle/fluid/jit/engine/base_engine.h"
#include "paddle/fluid/jit/function.h"
#include "paddle/fluid/platform/place.h"
#include "paddle/phi/common/backend.h"
#include "paddle/phi/common/data_type.h"
Expand Down Expand Up @@ -75,8 +75,8 @@ framework::proto::VarType::Type CastPyArg2ProtoType(PyObject* obj,
std::unordered_map<std::wstring, int> CastPyArg2Vocab(PyObject* obj,
ssize_t arg_pos);
std::vector<std::string> CastPyArg2Strings(PyObject* obj, ssize_t arg_pos);
std::shared_ptr<jit::BaseEngine> CastPyArg2BaseEngine(PyObject* obj,
ssize_t arg_pos);
std::shared_ptr<jit::Function> CastPyArg2JitFunction(PyObject* obj,
ssize_t arg_pos);

PyObject* ToPyObject(int value);
PyObject* ToPyObject(uint32_t value);
Expand Down
25 changes: 8 additions & 17 deletions paddle/fluid/pybind/jit.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ limitations under the License. */
#include "paddle/fluid/imperative/layer.h"
#include "paddle/fluid/platform/place.h"

#include "paddle/fluid/jit/engine/executor_engine.h"
#include "paddle/fluid/jit/engine/pe_engine.h"
#include "paddle/fluid/jit/function.h"
#include "paddle/fluid/jit/function_schema.h"
#include "paddle/fluid/jit/layer.h"
#include "paddle/fluid/jit/serializer.h"
Expand All @@ -29,26 +28,18 @@ namespace py = pybind11;
namespace paddle {
namespace pybind {

PyTypeObject *g_executor_engine_pytype = nullptr;
PyTypeObject *g_pe_engine_pytype = nullptr;
PyTypeObject *g_jit_function_pytype = nullptr;
using Variable = paddle::framework::Variable;

void BindJit(pybind11::module *m) {
py::class_<jit::Layer>(*m, "Layer", R"DOC(Layer Class.)DOC")
.def("function_dict",
&jit::Layer::EngineMap,
py::return_value_policy::reference);
.def("function_names", &jit::Layer::FunctionNames)
.def("function", &jit::Layer::Function)
.def("function_info", &jit::Layer::FunctionInfo);

py::class_<jit::ExecutorEngine, std::shared_ptr<jit::ExecutorEngine>>
executor_engine(*m, "ExecutorEngine", R"DOC(ExecutorEngine Class.)DOC");
g_executor_engine_pytype =
reinterpret_cast<PyTypeObject *>(executor_engine.ptr());
executor_engine.def("info", &jit::ExecutorEngine::Info);

py::class_<jit::PEEngine, std::shared_ptr<jit::PEEngine>> pe_engine(
*m, "PEEngine", R"DOC(PEEngine Class.)DOC");
g_pe_engine_pytype = reinterpret_cast<PyTypeObject *>(pe_engine.ptr());
pe_engine.def("info", &jit::PEEngine::Info);
py::class_<jit::Function, std::shared_ptr<jit::Function>> function(
*m, "Function", R"DOC(Function Class.)DOC");
g_jit_function_pytype = reinterpret_cast<PyTypeObject *>(function.ptr());

py::class_<jit::FunctionInfo, std::shared_ptr<jit::FunctionInfo>>(
*m, "FunctionInfo", R"DOC(FunctionInfo Class.)DOC")
Expand Down
11 changes: 6 additions & 5 deletions python/paddle/jit/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,19 @@ def __init__(self):

def load(self, load_path, place):
self.cpp_layer = Load(load_path, place)
function_dict = self.cpp_layer.function_dict()

for name, function in function_dict.items():
self.functions[name] = Function(function)
for name in self.cpp_layer.function_names():
function = self.cpp_layer.function(name)
info = self.cpp_layer.function_info(name)
self.functions[name] = Function(function, info)
setattr(self, name, self.functions[name])


class Function():

def __init__(self, function):
def __init__(self, function, info):
self.function = function
self.info = FunctionInfo(function.info())
self.info = FunctionInfo(info)

def __call__(self, *args):
return core.eager.jit_function_call(self.function, args)
Expand Down

0 comments on commit 2832ab2

Please sign in to comment.