Skip to content

Commit

Permalink
Cherry-pick-PR41407, fix device_id bug for final_state op in multipro…
Browse files Browse the repository at this point in the history
…cess testcase (PaddlePaddle#41407)

* support final_state in multiprocess

* fix no place.device

* set device_id in eager_gen
  • Loading branch information
pangyoki committed Apr 6, 2022
1 parent 5b85f3d commit 2963187
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,16 @@ class {} : public egr::GradNodeBase {{
// Get Input AutoGradMeta
{}
// Set Device Id
auto place = egr::Controller::Instance().GetExpectedPlace();
if (paddle::platform::is_gpu_place(place)) {{
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
phi::backends::gpu::SetDeviceId(place.device);
#else
PADDLE_THROW(paddle::platform::errors::PreconditionNotMet(
"PaddlePaddle should compile with GPU if use CUDAPlace."));
#endif
}}
// Forward API Call
{}
// Get Outputs
Expand Down Expand Up @@ -284,6 +294,7 @@ class {} : public egr::GradNodeBase {{
#include "paddle/fluid/platform/profiler/event_tracing.h"
#include "paddle/fluid/eager/amp_utils.h"
#include "paddle/fluid/eager/eager_amp_auto_cast.h"
#include "paddle/phi/backends/gpu/gpu_info.h"
{}
{}
Expand Down
5 changes: 4 additions & 1 deletion python/paddle/fluid/dygraph/math_op_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,10 @@ def __impl__(self, other_var):

# 4. calculation
axis = -1
math_op = getattr(_C_ops, op_type)
if framework._in_eager_mode_ and op_type == 'elementwise_add':
math_op = getattr(_C_ops, 'final_state_add')
else:
math_op = getattr(_C_ops, op_type)
return math_op(self, other_var, 'axis', axis)

comment = OpProtoHolder.instance().get_op_proto(op_type).comment
Expand Down
4 changes: 1 addition & 3 deletions python/paddle/fluid/tests/unittests/test_inplace.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,7 @@ def func_test_backward_success_2(self):

var_b[1:2] = 3 # var_b is modified inplace before using it

var_c = paddle.add(
var_b,
var_b) # Here, the grad op of sum doesn't use the value of var_b
var_c = var_b + var_b # Here, the grad op of sum doesn't use the value of var_b
loss = var_c.sum()

var_b[1:2] = 3 # var_b is modified inplace after using it
Expand Down

0 comments on commit 2963187

Please sign in to comment.