Skip to content

Commit

Permalink
rename the fuse op, test=allcase (#34120)
Browse files Browse the repository at this point in the history
  • Loading branch information
FeixLiu committed Jul 14, 2021
1 parent 14fd6cf commit 6febe5f
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/softmax_mask_fuse_upper_triangle_op.h"
#include "paddle/fluid/operators/fused_softmax_mask_upper_triangle_op.h"
#include "paddle/fluid/framework/generator.h"
#include "paddle/fluid/framework/op_registry.h"
namespace paddle {
Expand Down Expand Up @@ -82,7 +82,7 @@ class SoftmaxMaskFuseUpperTriangleGradOpMaker

protected:
void Apply(GradOpPtr<T> op) const override {
op->SetType("softmax_mask_fuse_upper_triangle_grad");
op->SetType("fused_softmax_mask_upper_triangle_grad");
op->SetInput("Softmax", this->Output("Out"));
op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
Expand All @@ -94,13 +94,13 @@ class SoftmaxMaskFuseUpperTriangleGradOpMaker

namespace ops = paddle::operators;
REGISTER_OPERATOR(
softmax_mask_fuse_upper_triangle, ops::SoftmaxMaskFuseUpperTriangleOp,
fused_softmax_mask_upper_triangle, ops::SoftmaxMaskFuseUpperTriangleOp,
ops::SoftmaxMaskFuseUpperTriangleOpMaker,
ops::SoftmaxMaskFuseUpperTriangleGradOpMaker<paddle::framework::OpDesc>,
ops::SoftmaxMaskFuseUpperTriangleGradOpMaker<paddle::imperative::OpBase>);
REGISTER_OPERATOR(softmax_mask_fuse_upper_triangle_grad,
REGISTER_OPERATOR(fused_softmax_mask_upper_triangle_grad,
ops::SoftmaxMaskFuseUpperTriangleOpGrad);
REGISTER_OP_CPU_KERNEL(softmax_mask_fuse_upper_triangle,
REGISTER_OP_CPU_KERNEL(fused_softmax_mask_upper_triangle,
ops::SoftmaxMaskFuseUpperTriangleCPUKernel<
paddle::platform::CPUDeviceContext, float>,
ops::SoftmaxMaskFuseUpperTriangleCPUKernel<
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ limitations under the License. */
#include "paddle/fluid/framework/generator.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/memory/memcpy.h"
#include "paddle/fluid/operators/softmax_mask_fuse_upper_triangle_op.h"
#include "paddle/fluid/operators/fused_softmax_mask_upper_triangle_op.h"
#include "paddle/fluid/platform/float16.h"

namespace paddle {
Expand Down Expand Up @@ -534,12 +534,12 @@ class SoftmaxMaskFuseUpperTriangleGradKernel : public framework::OpKernel<T> {
namespace ops = paddle::operators;
namespace plat = paddle::platform;
REGISTER_OP_CUDA_KERNEL(
softmax_mask_fuse_upper_triangle,
fused_softmax_mask_upper_triangle,
ops::SoftmaxMaskFuseUpperTriangleKernel<plat::CUDADeviceContext,
plat::float16>,
ops::SoftmaxMaskFuseUpperTriangleKernel<plat::CUDADeviceContext, float>);
REGISTER_OP_CUDA_KERNEL(
softmax_mask_fuse_upper_triangle_grad,
fused_softmax_mask_upper_triangle_grad,
ops::SoftmaxMaskFuseUpperTriangleGradKernel<plat::CUDADeviceContext,
plat::float16>,
ops::SoftmaxMaskFuseUpperTriangleGradKernel<plat::CUDADeviceContext,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def _get_softmax_upper(x, fp16=True):
"core is not compiled with CUDA")
class TestSoftmaxMaskFuseOp(OpTest):
def setUp(self):
self.op_type = "softmax_mask_fuse_upper_triangle"
self.op_type = "fused_softmax_mask_upper_triangle"
x = np.random.random((1, 1, 32, 32)).astype("float16")
self.inputs = {'X': x}
rst = _get_softmax_upper(x)
Expand All @@ -59,7 +59,7 @@ def test_check_grad(self):
"core is not compiled with CUDA")
class TestSoftmaxMaskFuseOp1(OpTest):
def setUp(self):
self.op_type = "softmax_mask_fuse_upper_triangle"
self.op_type = "fused_softmax_mask_upper_triangle"
x = np.random.random((1, 1, 32, 32))
self.inputs = {'X': x}
rst = _get_softmax_upper(x)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@ def softmax_mask_fuse_upper_triangle(x):
:return: the result of softmax mask fuse (upper triangle)
"""
if in_dygraph_mode():
out = core.ops.softmax_mask_fuse_upper_triangle(x)
out = core.ops.fused_softmax_mask_upper_triangle(x)
return out

helper = LayerHelper('softmax_mask_fuse_upper_triangle', **locals())
helper = LayerHelper('fused_softmax_mask_upper_triangle', **locals())

out = helper.create_variable_for_type_inference(dtype=x.dtype)

helper.append_op(
type='softmax_mask_fuse_upper_triangle',
type='fused_softmax_mask_upper_triangle',
inputs={'X': [x]},
outputs={'Out': [out]})
return out

0 comments on commit 6febe5f

Please sign in to comment.