Skip to content

Commit

Permalink
fix log_softmax if any dimension is 0-d
Browse files Browse the repository at this point in the history
  • Loading branch information
juncaipeng committed Aug 5, 2021
1 parent 7a38b76 commit ea60301
Showing 1 changed file with 9 additions and 4 deletions.
13 changes: 9 additions & 4 deletions paddle/fluid/operators/log_softmax_op.h
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,10 @@ class LogSoftmaxKernel : public framework::OpKernel<T> {
// allocate memory on device.
Out->mutable_data<T>(context.GetPlace());

LogSoftmaxFunctor<DeviceContext, T>()(
context.template device_context<DeviceContext>(), X, Out, axis);
if (X->numel() != 0) {
LogSoftmaxFunctor<DeviceContext, T>()(
context.template device_context<DeviceContext>(), X, Out, axis);
}
}
};

Expand Down Expand Up @@ -183,8 +185,11 @@ class LogSoftmaxGradKernel : public framework::OpKernel<T> {
// allocate memory on device.
dX->mutable_data<T>(context.GetPlace());

LogSoftmaxGradFunctor<DeviceContext, T>()(
context.template device_context<DeviceContext>(), Out, dOut, dX, axis);
if (Out->numel() != 0) {
LogSoftmaxGradFunctor<DeviceContext, T>()(
context.template device_context<DeviceContext>(), Out, dOut, dX,
axis);
}
}
};

Expand Down

1 comment on commit ea60301

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.