Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix _hiden_size to _hidden_size #60485

Merged
merged 1 commit into from
Jan 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions test/dygraph_to_static/seq2seq_dygraph_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def __init__(
):
super().__init__(dtype)

self._hiden_size = hidden_size
self._hidden_size = hidden_size
self._param_attr = param_attr
self._bias_attr = bias_attr
self._gate_activation = gate_activation or paddle.nn.functional.sigmoid
Expand All @@ -52,13 +52,13 @@ def __init__(

self._weight = self.create_parameter(
attr=self._param_attr,
shape=[self._input_size + self._hiden_size, 4 * self._hiden_size],
shape=[self._input_size + self._hidden_size, 4 * self._hidden_size],
dtype=self._dtype,
)

self._bias = self.create_parameter(
attr=self._bias_attr,
shape=[4 * self._hiden_size],
shape=[4 * self._hidden_size],
dtype=self._dtype,
is_bias=True,
)
Expand Down
18 changes: 9 additions & 9 deletions test/dygraph_to_static/test_fallback.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,15 @@ def unsupport_func(x):
return paddle.to_tensor(t)


class SuppportNet(paddle.nn.Layer):
class SupportNet(paddle.nn.Layer):
def __init__(self):
super().__init__()

def forward(self, x):
return support_func(x)


class UnsuppportNet(paddle.nn.Layer):
class UnsupportNet(paddle.nn.Layer):
def __init__(self):
super().__init__()

Expand Down Expand Up @@ -76,8 +76,8 @@ def test_case_func_fallback(self):
np.testing.assert_allclose(output.numpy(), unsupport_func(self.x))

def test_case_net_fallback(self):
s_net = SuppportNet()
u_net = UnsuppportNet()
s_net = SupportNet()
u_net = UnsupportNet()
np.testing.assert_allclose(
paddle.jit.to_static(s_net)(self.x).numpy(), 4
)
Expand All @@ -92,8 +92,8 @@ def test_case_net_fallback(self):

@test_ast_only
def test_case_net_error(self):
s_net = SuppportNet()
u_net = UnsuppportNet()
s_net = SupportNet()
u_net = UnsupportNet()
np.testing.assert_allclose(
paddle.jit.to_static(s_net)(self.x).numpy(), 4
)
Expand All @@ -111,7 +111,7 @@ def test_case_training(self):
build_strategy = paddle.static.BuildStrategy()
build_strategy.build_cinn_pass = True
u_net = paddle.jit.to_static(
UnsuppportNet(), build_strategy=build_strategy
UnsupportNet(), build_strategy=build_strategy
)
u_net.eval()
np.testing.assert_allclose(u_net(self.x).numpy(), [1, 1])
Expand All @@ -122,7 +122,7 @@ def test_case_save_error(self):
"""
test the save will raise error.
"""
u_net = UnsuppportNet()
u_net = UnsupportNet()
u_net = paddle.jit.to_static(
u_net, input_spec=[paddle.static.InputSpec(name='x', shape=[1])]
)
Expand All @@ -133,7 +133,7 @@ def test_case_save_error_2(self):
"""
test the save will raise error.
"""
u_net = UnsuppportNet()
u_net = UnsupportNet()
build_strategy = paddle.static.BuildStrategy()
build_strategy.build_cinn_pass = True
u_net = paddle.jit.to_static(u_net, build_strategy=build_strategy)
Expand Down