From 2832ab22f3f19929e9cdf8149965ed38ab94de58 Mon Sep 17 00:00:00 2001 From: WangZhen <23097963+0x45f@users.noreply.github.com> Date: Tue, 9 Aug 2022 14:05:59 +0800 Subject: [PATCH] [JitLayer]Pybind Fucniton and hide ExecutorEngine and PEEngine (#44984) * Pybind Fucniton and hide ExecutorEngine and PEEngine * Remove FunctionNames in compilation_unit --- paddle/fluid/jit/function_utils.cc | 9 +++++++-- paddle/fluid/jit/layer.cc | 8 ++++++++ paddle/fluid/jit/layer.h | 2 ++ paddle/fluid/pybind/eager_functions.cc | 5 +++-- paddle/fluid/pybind/eager_utils.cc | 19 +++++++------------ paddle/fluid/pybind/eager_utils.h | 6 +++--- paddle/fluid/pybind/jit.cc | 25 ++++++++----------------- python/paddle/jit/layer.py | 11 ++++++----- 8 files changed, 44 insertions(+), 41 deletions(-) diff --git a/paddle/fluid/jit/function_utils.cc b/paddle/fluid/jit/function_utils.cc index 83da12d2652a3..c3811935a52cc 100644 --- a/paddle/fluid/jit/function_utils.cc +++ b/paddle/fluid/jit/function_utils.cc @@ -73,12 +73,17 @@ void ShareIntoScope(const std::vector &ordered_input_names, void ShareParamsIntoScope(const std::vector ¶m_names, const Name2VariableMap ¶ms_dict, framework::Scope *scope) { - VLOG(3) << "param_names size: " << param_names.size(); for (size_t i = 0; i < param_names.size(); ++i) { std::string name = param_names[i]; + PADDLE_ENFORCE_EQ(params_dict.count(name), + 1, + phi::errors::InvalidArgument( + "Parameter named %s is not exist in param_names. " + "Please check that your model was saved correctly", + name)); + auto ¶m = params_dict.find(name)->second; auto &dense_tensor = param->Get(); - VLOG(3) << "share into scope: " << name; auto *var = scope->Var(name); auto *dst_tensor = var->GetMutable(); *dst_tensor = dense_tensor; diff --git a/paddle/fluid/jit/layer.cc b/paddle/fluid/jit/layer.cc index 6cf8b98b1cbcb..868b5a3ee250a 100644 --- a/paddle/fluid/jit/layer.cc +++ b/paddle/fluid/jit/layer.cc @@ -68,6 +68,14 @@ const std::shared_ptr& Layer::FunctionInfo( return info_map_.at(name); } +std::vector Layer::FunctionNames() const { + std::vector names; + for (auto it = info_map_.begin(); it != info_map_.end(); ++it) { + names.emplace_back(it->first); + } + return names; +} + #define PD_SPECIALZE_ATTRIBUTE_TYPE(T) \ template <> \ T Layer::Attribute(const std::string& name) const { \ diff --git a/paddle/fluid/jit/layer.h b/paddle/fluid/jit/layer.h index 6f92ac44d6379..8a4001cf89c16 100644 --- a/paddle/fluid/jit/layer.h +++ b/paddle/fluid/jit/layer.h @@ -70,6 +70,8 @@ class Layer { const std::shared_ptr& FunctionInfo( const std::string& name) const; + std::vector FunctionNames() const; + private: Name2VariableMap params_map_; Name2VariableMap attrs_map_; diff --git a/paddle/fluid/pybind/eager_functions.cc b/paddle/fluid/pybind/eager_functions.cc index 62cfc330ae3ff..9596551136c20 100644 --- a/paddle/fluid/pybind/eager_functions.cc +++ b/paddle/fluid/pybind/eager_functions.cc @@ -372,8 +372,9 @@ static PyObject* eager_api_jit_function_call(PyObject* self, PyObject* args, PyObject* kwargs) { EAGER_TRY - std::shared_ptr function = - CastPyArg2BaseEngine(PyTuple_GET_ITEM(args, 0), 0); + + std::shared_ptr function = + CastPyArg2JitFunction(PyTuple_GET_ITEM(args, 0), 0); std::vector ins = CastPyArg2VectorOfTensor(PyTuple_GET_ITEM(args, 1), 1); std::vector outs = (*function)(ins); diff --git a/paddle/fluid/pybind/eager_utils.cc b/paddle/fluid/pybind/eager_utils.cc index 82e1fa873f8d1..a92ddf388c220 100644 --- a/paddle/fluid/pybind/eager_utils.cc +++ b/paddle/fluid/pybind/eager_utils.cc @@ -22,8 +22,7 @@ limitations under the License. */ #include "paddle/fluid/framework/convert_utils.h" #include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/scope_guard.h" -#include "paddle/fluid/jit/engine/executor_engine.h" -#include "paddle/fluid/jit/engine/pe_engine.h" +#include "paddle/fluid/jit/function.h" #include "paddle/fluid/memory/allocation/allocator.h" #include "paddle/fluid/operators/py_func_op.h" #include "paddle/fluid/operators/utils.h" @@ -54,8 +53,7 @@ extern PyTypeObject* g_customplace_pytype; extern PyTypeObject* g_framework_tensor_pytype; extern PyTypeObject* g_framework_lodtensorarray_pytype; extern PyTypeObject* g_custom_op_kernel_ctx_pytype; -extern PyTypeObject* g_executor_engine_pytype; -extern PyTypeObject* g_pe_engine_pytype; +extern PyTypeObject* g_jit_function_pytype; int TensorDtype2NumpyDtype(phi::DataType dtype) { switch (dtype) { @@ -232,14 +230,11 @@ std::shared_ptr CastPyArg2VarBase(PyObject* obj, return py::cast>(obj); } -std::shared_ptr CastPyArg2BaseEngine(PyObject* obj, - ssize_t arg_pos) { - if (PyObject_IsInstance( - obj, reinterpret_cast(g_executor_engine_pytype))) { - return ::pybind11::handle(obj).cast>(); - } else if (PyObject_IsInstance( - obj, reinterpret_cast(g_pe_engine_pytype))) { - return ::pybind11::handle(obj).cast>(); +std::shared_ptr CastPyArg2JitFunction(PyObject* obj, + ssize_t arg_pos) { + if (PyObject_IsInstance(obj, + reinterpret_cast(g_jit_function_pytype))) { + return ::pybind11::handle(obj).cast>(); } else { PADDLE_THROW(platform::errors::InvalidArgument( "argument (position %d) must be " diff --git a/paddle/fluid/pybind/eager_utils.h b/paddle/fluid/pybind/eager_utils.h index 94e8ce4e04aa4..df959b9abf4f1 100644 --- a/paddle/fluid/pybind/eager_utils.h +++ b/paddle/fluid/pybind/eager_utils.h @@ -20,7 +20,7 @@ typedef SSIZE_T ssize_t; #include "paddle/fluid/eager/hooks.h" #include "paddle/fluid/framework/lod_tensor.h" #include "paddle/fluid/framework/tensor.h" -#include "paddle/fluid/jit/engine/base_engine.h" +#include "paddle/fluid/jit/function.h" #include "paddle/fluid/platform/place.h" #include "paddle/phi/common/backend.h" #include "paddle/phi/common/data_type.h" @@ -75,8 +75,8 @@ framework::proto::VarType::Type CastPyArg2ProtoType(PyObject* obj, std::unordered_map CastPyArg2Vocab(PyObject* obj, ssize_t arg_pos); std::vector CastPyArg2Strings(PyObject* obj, ssize_t arg_pos); -std::shared_ptr CastPyArg2BaseEngine(PyObject* obj, - ssize_t arg_pos); +std::shared_ptr CastPyArg2JitFunction(PyObject* obj, + ssize_t arg_pos); PyObject* ToPyObject(int value); PyObject* ToPyObject(uint32_t value); diff --git a/paddle/fluid/pybind/jit.cc b/paddle/fluid/pybind/jit.cc index 752b5a3021af5..a9c844093d1a5 100644 --- a/paddle/fluid/pybind/jit.cc +++ b/paddle/fluid/pybind/jit.cc @@ -18,8 +18,7 @@ limitations under the License. */ #include "paddle/fluid/imperative/layer.h" #include "paddle/fluid/platform/place.h" -#include "paddle/fluid/jit/engine/executor_engine.h" -#include "paddle/fluid/jit/engine/pe_engine.h" +#include "paddle/fluid/jit/function.h" #include "paddle/fluid/jit/function_schema.h" #include "paddle/fluid/jit/layer.h" #include "paddle/fluid/jit/serializer.h" @@ -29,26 +28,18 @@ namespace py = pybind11; namespace paddle { namespace pybind { -PyTypeObject *g_executor_engine_pytype = nullptr; -PyTypeObject *g_pe_engine_pytype = nullptr; +PyTypeObject *g_jit_function_pytype = nullptr; using Variable = paddle::framework::Variable; void BindJit(pybind11::module *m) { py::class_(*m, "Layer", R"DOC(Layer Class.)DOC") - .def("function_dict", - &jit::Layer::EngineMap, - py::return_value_policy::reference); + .def("function_names", &jit::Layer::FunctionNames) + .def("function", &jit::Layer::Function) + .def("function_info", &jit::Layer::FunctionInfo); - py::class_> - executor_engine(*m, "ExecutorEngine", R"DOC(ExecutorEngine Class.)DOC"); - g_executor_engine_pytype = - reinterpret_cast(executor_engine.ptr()); - executor_engine.def("info", &jit::ExecutorEngine::Info); - - py::class_> pe_engine( - *m, "PEEngine", R"DOC(PEEngine Class.)DOC"); - g_pe_engine_pytype = reinterpret_cast(pe_engine.ptr()); - pe_engine.def("info", &jit::PEEngine::Info); + py::class_> function( + *m, "Function", R"DOC(Function Class.)DOC"); + g_jit_function_pytype = reinterpret_cast(function.ptr()); py::class_>( *m, "FunctionInfo", R"DOC(FunctionInfo Class.)DOC") diff --git a/python/paddle/jit/layer.py b/python/paddle/jit/layer.py index 4aee7a8f5c02a..97b598948500b 100644 --- a/python/paddle/jit/layer.py +++ b/python/paddle/jit/layer.py @@ -26,18 +26,19 @@ def __init__(self): def load(self, load_path, place): self.cpp_layer = Load(load_path, place) - function_dict = self.cpp_layer.function_dict() - for name, function in function_dict.items(): - self.functions[name] = Function(function) + for name in self.cpp_layer.function_names(): + function = self.cpp_layer.function(name) + info = self.cpp_layer.function_info(name) + self.functions[name] = Function(function, info) setattr(self, name, self.functions[name]) class Function(): - def __init__(self, function): + def __init__(self, function, info): self.function = function - self.info = FunctionInfo(function.info()) + self.info = FunctionInfo(info) def __call__(self, *args): return core.eager.jit_function_call(self.function, args)