Skip to content

Commit

Permalink
Merge pull request #4725 from dzhwinter/fix/scope
Browse files Browse the repository at this point in the history
change: NewVar() to Var()
  • Loading branch information
dzhwinter committed Oct 15, 2017
2 parents ec783d6 + 54793e3 commit e593113
Show file tree
Hide file tree
Showing 36 changed files with 131 additions and 112 deletions.
2 changes: 1 addition & 1 deletion doc/design/block.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ class SymbolTable {
// TODO determine whether name is generated by python or C++.
// Currently assume that a unique name will be generated by C++ if the
// argument name is left default.
VarDesc* NewVar(const string& name="");
VarDesc* Var(const string& name="");

// find a VarDesc by name, if recursive is true, find parent's SymbolTable
// recursively.
Expand Down
8 changes: 4 additions & 4 deletions doc/design/scope.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ Scope is an association of a name to variable. All variables belong to `Scope`.
```cpp
class Scope {
public:
Variable* NewVar(const std::string& name);
Variable* Var(const std::string& name);
const Variable* FindVar(const std::string& name) const;

private:
Expand Down Expand Up @@ -98,7 +98,7 @@ class Scope {
Variable* FindVar(const std::string& name) const;

// return if already contains same name variable.
Variable* NewVar(const std::string& name);
Variable* Var(const std::string& name);

private:
std::shared_ptr<Scope> parent_;
Expand All @@ -107,7 +107,7 @@ class Scope {
```
## Only scope can create a variable
To ensure `only scope can create a variable`, we should mark `Variable`'s constructor as a private member function, and Scope is a friend class of Variable. And then only `NewVar` can construct `Variable`.
To ensure `only scope can create a variable`, we should mark `Variable`'s constructor as a private member function, and Scope is a friend class of Variable. And then only `Var` can construct `Variable`.
## When scope destroyed, all variables inside this scope should be destroyed together
Expand All @@ -121,4 +121,4 @@ Also, as the parent scope is a `shared_ptr`, we can only `Create()` a scope shar
## Orthogonal interface
`FindVar` will return `nullptr` when `name` is not found. It can be used as `Contains` method. `NewVar` will return an `Error` when there is a name conflict locally. Combine `FindVar` and `NewVar`, we can implement `NewVar` easily.
`FindVar` will return `nullptr` when `name` is not found. It can be used as `Contains` method. `Var` will return an `Error` when there is a name conflict locally. Combine `FindVar` and `Var`, we can implement `Var` easily.
2 changes: 1 addition & 1 deletion doc/design/tensor_array.md
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class TensorArray:
@name: str
the name of the variable to output.
'''
tensor = NewVar(name)
tensor = Var(name)
tensor_array_stack(self.name, tensor)
return tensor

Expand Down
14 changes: 12 additions & 2 deletions paddle/framework/backward.cc
Original file line number Diff line number Diff line change
Expand Up @@ -280,12 +280,21 @@ static void CreateGradVarInBlock(
auto ops = block_desc->AllOps();
for (size_t op_index = grad_op_start_index; op_index < ops.size();
++op_index) {
// <<<<<<< HEAD
// for (const auto& output : ops[op_index]->Outputs()) {
// for (const auto& real_output : output.second) {
// if (!block_desc->HasVar(real_output)) {
// block_desc->Var(real_output);
// }
// }
// }
// =======
ForEachVarName(ops[op_index]->Outputs(),
[&](const std::string& grad_var_name) {
if (block_desc->HasVar(grad_var_name)) {
return false;
}
block_desc->NewVar(grad_var_name);
block_desc->Var(grad_var_name);
auto it = param_name_map.find(grad_var_name);
if (it == param_name_map.end()) {
return false;
Expand All @@ -297,6 +306,7 @@ static void CreateGradVarInBlock(
grad_record.op_idx_ = static_cast<int>(op_index);
return false; /* not break */
});
// >>>>>>> origin/develop
}
}

Expand Down Expand Up @@ -448,7 +458,7 @@ AppendBackward(ProgramDescBind& program_desc, const VarDescBind& target,
for (auto& ptr : backward_op_descs) {
all_ops.push_back(std::move(ptr));
}
root_block->NewVar(fill_one_op_out);
root_block->Var(fill_one_op_out);

// create grad_var for all blocks in this program
CreateGradVarInBlock(&retv, root_block, forward_op_num, grad_to_var);
Expand Down
15 changes: 9 additions & 6 deletions paddle/framework/block_desc.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,22 @@ limitations under the License. */
namespace paddle {
namespace framework {

VarDescBind *BlockDescBind::NewVar(const std::string &name) {
VarDescBind *BlockDescBind::Var(const std::string &name) {
need_update_ = true;
auto it = vars_.find(name);
PADDLE_ENFORCE(it == vars_.end(), "Duplicated variable %s", name);
auto var = new VarDescBind(name);
if (it != vars_.end()) {
return it->second.get();
}
auto *var = new VarDescBind(name);
vars_[name].reset(var);
return var;
}

VarDescBind *BlockDescBind::Var(const std::string &name) const {
VarDescBind *BlockDescBind::FindVar(const std::string &name) const {
auto it = vars_.find(name);
PADDLE_ENFORCE(it != vars_.end(),
"Can not find variable %s in current block.", name);
if (it == vars_.end()) {
return nullptr;
}
return it->second.get();
}

Expand Down
4 changes: 2 additions & 2 deletions paddle/framework/block_desc.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ class BlockDescBind {

int32_t Parent() const { return desc_->parent_idx(); }

VarDescBind *NewVar(const std::string &name_bytes);
VarDescBind *Var(const std::string &name_bytes);

VarDescBind *Var(const std::string &name_bytes) const;
VarDescBind *FindVar(const std::string &name_bytes) const;

bool HasVar(const std::string &var_name) const;

Expand Down
4 changes: 2 additions & 2 deletions paddle/framework/executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id) {

// Instantiate all the vars in the global scope
for (auto& var : block.vars()) {
scope->NewVar(var.name());
scope->Var(var.name());
}

Scope& local_scope = scope->NewScope();
Expand All @@ -78,7 +78,7 @@ void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id) {
for (auto& var : block.ops(i).outputs()) {
for (auto& argu : var.arguments()) {
if (local_scope.FindVar(argu) == nullptr) {
local_scope.NewVar(argu);
local_scope.Var(argu);
}
}
}
Expand Down
8 changes: 7 additions & 1 deletion paddle/framework/executor_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,16 @@ void AddOp(const std::string& type, const VariableNameMap& inputs,
// insert output
for (auto kv : outputs) {
for (auto v : kv.second) {
// <<<<<<< HEAD
// auto var = block->Var(v);
// var->SetType(VarDesc::LOD_TENSOR);
// var->SetDataType(paddle::framework::DataType::FP32);
// =======
if (!block->HasVar(v)) {
auto var = block->NewVar(v);
auto var = block->Var(v);
var->SetDataType(paddle::framework::DataType::FP32);
}
// >>>>>>> origin/develop
}
}

Expand Down
4 changes: 2 additions & 2 deletions paddle/framework/operator.h
Original file line number Diff line number Diff line change
Expand Up @@ -403,11 +403,11 @@ class CompileTimeInferShapeContext : public InferShapeContext {

private:
DDim GetDim(const std::string& name) const override {
return framework::make_ddim(block_.Var(name)->Shape());
return framework::make_ddim(block_.FindVar(name)->Shape());
}

void SetDim(const std::string& name, const DDim& dim) override {
block_.Var(name)->SetShape(framework::vectorize(dim));
block_.FindVar(name)->SetShape(framework::vectorize(dim));
}

const OpDescBind& op_;
Expand Down
14 changes: 7 additions & 7 deletions paddle/framework/operator_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ TEST(OperatorBase, all) {
paddle::framework::Scope scope;

auto op = paddle::framework::OpRegistry::CreateOp(op_desc);
scope.NewVar("OUT1");
scope.Var("OUT1");
ASSERT_EQ(paddle::framework::op_run_num, 0);
op->Run(scope, device_context);
ASSERT_EQ(paddle::framework::op_run_num, 1);
Expand Down Expand Up @@ -237,12 +237,12 @@ TEST(OpKernel, multi_inputs) {

paddle::platform::CPUDeviceContext cpu_device_context;
paddle::framework::Scope scope;
scope.NewVar("x0")->GetMutable<Tensor>();
scope.NewVar("x1")->GetMutable<Tensor>();
scope.NewVar("x2")->GetMutable<Tensor>();
scope.NewVar("k0")->GetMutable<Tensor>();
scope.NewVar("y0")->GetMutable<Tensor>();
scope.NewVar("y1")->GetMutable<Tensor>();
scope.Var("x0")->GetMutable<Tensor>();
scope.Var("x1")->GetMutable<Tensor>();
scope.Var("x2")->GetMutable<Tensor>();
scope.Var("k0")->GetMutable<Tensor>();
scope.Var("y0")->GetMutable<Tensor>();
scope.Var("y1")->GetMutable<Tensor>();

auto op = paddle::framework::OpRegistry::CreateOp(op_desc);
op->Run(scope, cpu_device_context);
Expand Down
10 changes: 5 additions & 5 deletions paddle/framework/scope.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Scope& Scope::NewScope() const {
return *kids_.back();
}

Variable* Scope::NewVar(const std::string& name) {
Variable* Scope::Var(const std::string& name) {
auto iter = vars_.find(name);
if (iter != vars_.end()) {
return iter->second;
Expand All @@ -42,8 +42,8 @@ Variable* Scope::NewVar(const std::string& name) {
return v;
}

Variable* Scope::NewVar() {
return NewVar(string::Sprintf("%p.%d", this, vars_.size()));
Variable* Scope::Var() {
return Var(string::Sprintf("%p.%d", this, vars_.size()));
}

Variable* Scope::FindVar(const std::string& name) const {
Expand Down Expand Up @@ -71,8 +71,8 @@ framework::Scope& GetGlobalScope() {
static std::unique_ptr<framework::Scope> g_scope{nullptr};
std::call_once(feed_variable_flag, [&]() {
g_scope.reset(new framework::Scope());
g_scope->NewVar("feed_value");
g_scope->NewVar("fetch_value");
g_scope->Var("feed_value");
g_scope->Var("fetch_value");
});
return *(g_scope.get());
}
Expand Down
4 changes: 2 additions & 2 deletions paddle/framework/scope.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,10 @@ class Scope {
Scope& NewScope() const;

/// Create a variable with given name if it doesn't exist.
Variable* NewVar(const std::string& name);
Variable* Var(const std::string& name);

/// Create a variable with a scope-unique name.
Variable* NewVar();
Variable* Var();

/// Find a variable in the scope or any of its ancestors. Returns
/// nullptr if cannot find.
Expand Down
8 changes: 4 additions & 4 deletions paddle/framework/scope_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ TEST(Scope, VarsShadowing) {
Scope& ss1 = s.NewScope();
Scope& ss2 = s.NewScope();

Variable* v0 = s.NewVar("a");
Variable* v1 = ss1.NewVar("a");
Variable* v0 = s.Var("a");
Variable* v1 = ss1.Var("a");

EXPECT_NE(v0, v1);

Expand All @@ -40,7 +40,7 @@ TEST(Scope, FindVar) {
EXPECT_EQ(nullptr, s.FindVar("a"));
EXPECT_EQ(nullptr, ss.FindVar("a"));

ss.NewVar("a");
ss.Var("a");

EXPECT_EQ(nullptr, s.FindVar("a"));
EXPECT_NE(nullptr, ss.FindVar("a"));
Expand All @@ -49,7 +49,7 @@ TEST(Scope, FindVar) {
TEST(Scope, FindScope) {
Scope s;
Scope& ss = s.NewScope();
Variable* v = s.NewVar("a");
Variable* v = s.Var("a");

EXPECT_EQ(&s, s.FindScope(v));
EXPECT_EQ(&s, ss.FindScope(v));
Expand Down
2 changes: 1 addition & 1 deletion paddle/operators/cond_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ void CondOp::PrepareDataForSubnet(
for (int i = 0; i < BRANCH_NUM; ++i) {
for (auto& output : (*sub_net_op_[i]).Outputs()) {
for (auto& var_name : output.second) {
sub_scopes[i]->NewVar(var_name);
sub_scopes[i]->Var(var_name);
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions paddle/operators/dynamic_recurrent_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ namespace detail {
inline void CreateVariables(Scope& scope,
const std::vector<std::string>& var_names) {
for (const auto& name : var_names) {
scope.NewVar(name);
scope.Var(name);
}
}

Expand Down Expand Up @@ -136,7 +136,7 @@ void DynamicRecurrentOp::WriteStepInputs() const {
auto& step_scope = cache_.GetScope(step);
Variable* var = step_scope.FindVar(item.first);
if (var == nullptr) {
var = step_scope.NewVar(item.first);
var = step_scope.Var(item.first);
}
var->GetMutable<LoDTensor>()->ShareDataWith<value_type>(tensor);
}
Expand Down
4 changes: 2 additions & 2 deletions paddle/operators/dynamic_recurrent_op_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ void OpDescNewVar(const std::string& param_name,
// create a LoD tensor in scope with specific dims
LoDTensor* CreateVar(Scope& scope, std::string name, framework::DDim dims,
const platform::Place& place) {
auto* var = scope.NewVar(name);
auto* var = scope.Var(name);
auto* tensor = var->GetMutable<LoDTensor>();
tensor->Resize(dims);
tensor->mutable_data<float>(place);
Expand Down Expand Up @@ -85,7 +85,7 @@ class DynamicRecurrentOpTestHelper : public ::testing::Test {

void CreateGlobalVariables() {
platform::CPUPlace place;
scope.NewVar("step_scopes");
scope.Var("step_scopes");
CreateVar(scope, "boot_mem", framework::make_ddim({10, 20}), place);
CreateVar(scope, "out0", framework::make_ddim({10, 20}), place);
auto* in0 = CreateVar(scope, "in0", framework::make_ddim({10, 8}), place);
Expand Down
10 changes: 5 additions & 5 deletions paddle/operators/recurrent_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope,
// the weight are located in parent scope
for (auto& var_name : input.second) {
if (!step_scope.FindVar(var_name)) {
step_scope.NewVar(var_name)->GetMutable<LoDTensor>();
step_scope.Var(var_name)->GetMutable<LoDTensor>();
}
}
}
// create stepnet's outputs
for (const auto& output : (*stepnet_)->Outputs()) {
for (auto& var_name : output.second) {
step_scope.NewVar(var_name);
step_scope.Var(var_name);
}
}
step_scopes->emplace_back(&step_scope);
Expand All @@ -87,7 +87,7 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope,

void RecurrentAlgorithm::InitMemories(Scope* step_scope) const {
for (auto& attr : arg_->memories) {
auto* pre_mem = step_scope->NewVar(attr.pre_var)->GetMutable<LoDTensor>();
auto* pre_mem = step_scope->Var(attr.pre_var)->GetMutable<LoDTensor>();
PADDLE_ENFORCE(step_scope->FindVar(attr.boot_var) != nullptr,
"memory [%s]'s boot variable [%s] not exists", attr.var,
attr.boot_var);
Expand Down Expand Up @@ -167,9 +167,9 @@ void RecurrentGradientAlgorithm::LinkBootMemoryGradients(
"memory variable [%s] does not exists", attr.var);
PADDLE_ENFORCE(step_scope->FindVar(attr.boot_var) != nullptr,
"boot variable [%s] does not exists", attr.boot_var);
auto* mem_grad = step_scope->NewVar(attr.var)->GetMutable<LoDTensor>();
auto* mem_grad = step_scope->Var(attr.var)->GetMutable<LoDTensor>();
auto* boot_mem_grad =
step_scope->NewVar(attr.boot_var)->GetMutable<LoDTensor>();
step_scope->Var(attr.boot_var)->GetMutable<LoDTensor>();
boot_mem_grad->Resize(mem_grad->dims());
boot_mem_grad->ShareDataWith<float>(*mem_grad);
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/operators/rnn/recurrent_op_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ void SegmentInputs(const std::vector<Scope*>& step_scopes,
f::DDim step_dims = slice_ddim(dims, 1, dims.size());
for (size_t j = 0; j < seq_len; j++) {
Tensor* step_input =
step_scopes[j]->NewVar(inlinks[i])->GetMutable<Tensor>();
step_scopes[j]->Var(inlinks[i])->GetMutable<Tensor>();
// The input of operators of each step is Tensor here.
// Maybe need to modify Slice function.
*step_input = input->Slice<float>(j, j + 1);
Expand Down
8 changes: 4 additions & 4 deletions paddle/pybind/protobuf.cc
Original file line number Diff line number Diff line change
Expand Up @@ -145,16 +145,16 @@ void BindBlockDesc(py::module &m) {
py::return_value_policy::reference)
.def("prepend_op", &BlockDescBind::PrependOp,
py::return_value_policy::reference)
.def("new_var",
.def("var",
[](BlockDescBind &self, py::bytes byte_name) {
std::string name = byte_name;
return self.NewVar(name);
return self.Var(name);
},
py::return_value_policy::reference)
.def("var",
.def("find_var",
[](BlockDescBind &self, py::bytes byte_name) {
std::string name = byte_name;
return self.Var(name);
return self.FindVar(name);
},
py::return_value_policy::reference)
.def("all_vars", &BlockDescBind::AllVars,
Expand Down
Loading

0 comments on commit e593113

Please sign in to comment.