Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][NPU] use np.testing.assert_allclose instead of self.assertTrue(np.allclose(...)) (part 1) #44988

Merged
merged 14 commits into from
Aug 17, 2022
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -123,13 +123,13 @@ def convert_to_fp16(self, model_name, model_data_url, model_data_md5,
res_fp16 = self.run_models(save_model_dir, model_filename,
params_filename, input_data, True)

self.assertTrue(
np.allclose(res_fp32,
res_fp16,
rtol=1e-5,
atol=1e-08,
equal_nan=True),
msg='Failed to test the accuracy of the fp32 and fp16 model.')
np.testing.assert_allclose(
res_fp32,
res_fp16,
rtol=1e-05,
atol=1e-08,
equal_nan=True,
err_msg='Failed to test the accuracy of the fp32 and fp16 model.')

try:
os.system("rm -rf {}".format(save_model_dir))
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/contrib/tests/test_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def test_check_output(self):
},
fetch_list=[out.name, loss.name])

self.assertTrue(np.allclose(res[0], out_np))
np.testing.assert_allclose(res[0], out_np, rtol=1e-05, atol=1e-8)


class Net(fluid.dygraph.Layer):
Expand Down Expand Up @@ -159,7 +159,7 @@ def test_check_output(self):
corr_pd = Net('corr_pd')
y = corr_pd(x1, x2)
out = y.numpy()
self.assertTrue(np.allclose(out, out_np))
np.testing.assert_allclose(out, out_np, rtol=1e-05, atol=1e-8)


if __name__ == '__main__':
Expand Down
10 changes: 6 additions & 4 deletions python/paddle/fluid/contrib/tests/test_model_cast_to_bf16.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,10 +132,12 @@ def _graph_common(self, _amp_fun, startup_prog=None):
amp_fun=_amp_fun,
startup_prog=startup_prog)

self.assertTrue(
np.allclose(cutf(static_ret_bf16), cutf(static_ret), 1e-2))
self.assertTrue(
np.allclose(cutf(static_ret_bf16), cutf(ret_fp32bf16), 1e-2))
np.testing.assert_allclose(cutf(static_ret_bf16),
cutf(static_ret),
rtol=0.01)
np.testing.assert_allclose(cutf(static_ret_bf16),
cutf(ret_fp32bf16),
rtol=0.01)

with self.static_graph():
t = layers.data(name='t', shape=[size, size], dtype='float32')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,18 +228,18 @@ def do_test(use_nesterov=False, optimizer=""):
use_nesterov=use_nesterov,
optimizer=optimizer)

self.assertTrue(np.allclose(np.array(train_loss_fp16),
np.array(train_loss_fp32),
rtol=1e-02,
atol=1e-05,
equal_nan=True),
msg='Failed to train in pure FP16.')
self.assertTrue(np.allclose(np.array(test_loss_fp16),
np.array(test_loss_fp32),
rtol=1e-02,
atol=1e-05,
equal_nan=True),
msg='Failed to test in pure FP16.')
np.testing.assert_allclose(np.array(train_loss_fp16),
np.array(train_loss_fp32),
rtol=0.01,
atol=1e-05,
equal_nan=True,
err_msg='Failed to train in pure FP16.')
np.testing.assert_allclose(np.array(test_loss_fp16),
np.array(test_loss_fp32),
rtol=0.01,
atol=1e-05,
equal_nan=True,
err_msg='Failed to test in pure FP16.')

do_test(use_nesterov=False)
do_test(use_nesterov=True)
Expand Down
9 changes: 6 additions & 3 deletions python/paddle/fluid/contrib/tests/test_weight_decay_extend.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,9 +185,12 @@ def test_weight_decay(self):
param_sum2 = self.check_weight_decay2(place, model)

for i in range(len(param_sum1)):
self.assertTrue(
np.allclose(param_sum1[i], param_sum2[i]),
"Current place: {}, i: {}, sum1: {}, sum2: {}".format(
np.testing.assert_allclose(
param_sum1[i],
param_sum2[i],
rtol=1e-05,
err_msg='Current place: {}, i: {}, sum1: {}, sum2: {}'.
format(
place, i, param_sum1[i]
[~np.isclose(param_sum1[i], param_sum2[i])],
param_sum2[i]
Expand Down
28 changes: 17 additions & 11 deletions python/paddle/fluid/tests/custom_op/test_custom_tanh_double_grad.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,18 +77,24 @@ def func_double_grad_dynamic(self):
custom_ops.custom_tanh, device, dtype, x)
pd_out, pd_dx_grad, pd_dout = custom_tanh_double_grad_dynamic(
paddle.tanh, device, dtype, x)
self.assertTrue(
np.allclose(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
np.testing.assert_allclose(
out,
pd_out,
rtol=1e-05,
err_msg='custom op out: {},\n paddle api out: {}'.format(
out, pd_out))
self.assertTrue(
np.allclose(dx_grad, pd_dx_grad),
"custom op dx grad: {},\n paddle api dx grad: {}".format(
dx_grad, pd_dx_grad))
self.assertTrue(
np.allclose(dout, pd_dout),
"custom op out grad: {},\n paddle api out grad: {}".format(
dout, pd_dout))
np.testing.assert_allclose(
dx_grad,
pd_dx_grad,
rtol=1e-05,
err_msg='custom op dx grad: {},\n paddle api dx grad: {}'.
format(dx_grad, pd_dx_grad))
np.testing.assert_allclose(
dout,
pd_dout,
rtol=1e-05,
err_msg='custom op out grad: {},\n paddle api out grad: {}'.
format(dout, pd_dout))

def test_func_double_grad_dynamic(self):
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
Expand Down
9 changes: 6 additions & 3 deletions python/paddle/fluid/tests/test_if_else_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,12 @@ def compare_ifelse_op_and_numpy(self, place):
fetch_list=[out])
o2 = self.numpy_cal()

self.assertTrue(
np.allclose(o1, o2, atol=1e-8),
"IfElse result : " + str(o1) + "\n Numpy result :" + str(o2))
np.testing.assert_allclose(
o1,
o2,
rtol=1e-05,
atol=1e-08,
)

def test_cpu(self):
self.compare_ifelse_op_and_numpy(fluid.CPUPlace())
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/fluid/tests/unittests/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,10 @@ def _assert_cpu_gpu_same(self, cpu_outs, gpu_outs, fetch_list, atol):
actual_t = np.array(item_gpu_out)
var_name = variable if isinstance(
variable, six.string_types) else variable.name
self.assertTrue(
np.allclose(actual_t, expect_t, atol=atol), "Output (" +
var_name + ") has diff" + str(actual_t) + "\n" + str(expect_t))
np.testing.assert_allclose(actual_t,
expect_t,
rtol=1e-05,
atol=atol)
self.assertListEqual(actual.lod(), expect.lod(),
"Output (" + var_name + ") has different lod")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,12 +105,11 @@ def check_results(self, no_pass_rets, pass_rets):
if out_var_no_pass is None:
self.assertTrue(out_var_pass is None)
else:
self.assertTrue(
np.allclose(out_var_no_pass,
out_var_pass,
rtol=self.rtol,
atol=self.atol,
equal_nan=self.equal_nan))
np.testing.assert_allclose(out_var_no_pass,
out_var_pass,
rtol=self.rtol,
atol=self.atol,
equal_nan=self.equal_nan)

@classmethod
def _to_var_names(cls, names_or_vars):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,7 @@ def test_transformed_static_result(self):
self.dygraph_func = func
dygraph_res = self.get_dygraph_output()
static_res = self.get_static_output()
self.assertTrue(np.allclose(dygraph_res, static_res),
msg='dygraph is {}\n static_res is {}'.format(
dygraph_res, static_res))
np.testing.assert_allclose(dygraph_res, static_res, rtol=1e-05)


# 1. test Apis that inherit from layers.Layer
Expand Down Expand Up @@ -252,9 +250,7 @@ def get_static_output(self):
def test_transformed_static_result(self):
dygraph_res = self.get_dygraph_output()
static_res = self.get_static_output()
self.assertTrue(np.allclose(dygraph_res, static_res),
msg='dygraph is {}\n static_res is \n{}'.format(
dygraph_res, static_res))
np.testing.assert_allclose(dygraph_res, static_res, rtol=1e-05)


class TestDygraphBasicApi_BilinearTensorProduct(TestDygraphBasicApi):
Expand Down Expand Up @@ -419,9 +415,7 @@ def get_static_output(self):
def test_transformed_static_result(self):
dygraph_res = self.get_dygraph_output()
static_res = self.get_static_output()
self.assertTrue(np.allclose(dygraph_res, static_res),
msg='dygraph is {}\n static_res is \n{}'.format(
dygraph_res, static_res))
np.testing.assert_allclose(dygraph_res, static_res, rtol=1e-05)


class TestDygraphBasicApi_ExponentialDecay(TestDygraphBasicApi_CosineDecay):
Expand Down
32 changes: 17 additions & 15 deletions python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,12 +181,8 @@ def test_train(self):
self.data_reader)
dygraph_loss, dygraph_ppl = self.train_dygraph(self.bert_config,
self.data_reader)
self.assertTrue(np.allclose(static_loss, dygraph_loss),
msg="static_loss: {} \n dygraph_loss: {}".format(
static_loss, dygraph_loss))
self.assertTrue(np.allclose(static_ppl, dygraph_ppl),
msg="static_ppl: {} \n dygraph_ppl: {}".format(
static_ppl, dygraph_ppl))
np.testing.assert_allclose(static_loss, dygraph_loss, rtol=1e-05)
np.testing.assert_allclose(static_ppl, dygraph_ppl, rtol=1e-05)

self.verify_predict()

Expand All @@ -200,19 +196,25 @@ def verify_predict(self):
for dy_res, st_res, dy_jit_res, predictor_res in zip(
dygraph_pred_res, static_pred_res, dygraph_jit_pred_res,
predictor_pred_res):
self.assertTrue(
np.allclose(st_res, dy_res),
"dygraph_res: {},\n static_res: {}".format(
np.testing.assert_allclose(
st_res,
dy_res,
rtol=1e-05,
err_msg='dygraph_res: {},\n static_res: {}'.format(
dy_res[~np.isclose(st_res, dy_res)],
st_res[~np.isclose(st_res, dy_res)]))
self.assertTrue(
np.allclose(st_res, dy_jit_res),
"dygraph_jit_res: {},\n static_res: {}".format(
np.testing.assert_allclose(
st_res,
dy_jit_res,
rtol=1e-05,
err_msg='dygraph_jit_res: {},\n static_res: {}'.format(
dy_jit_res[~np.isclose(st_res, dy_jit_res)],
st_res[~np.isclose(st_res, dy_jit_res)]))
self.assertTrue(
np.allclose(st_res, predictor_res),
"dygraph_jit_res: {},\n static_res: {}".format(
np.testing.assert_allclose(
st_res,
predictor_res,
rtol=1e-05,
err_msg='dygraph_jit_res: {},\n static_res: {}'.format(
predictor_res[~np.isclose(st_res, predictor_res)],
st_res[~np.isclose(st_res, predictor_res)]))
break
Expand Down
44 changes: 28 additions & 16 deletions python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py
Original file line number Diff line number Diff line change
Expand Up @@ -705,11 +705,14 @@ def test_train(self):

static_res = self.train_bmn(self.args, self.place, to_static=True)
dygraph_res = self.train_bmn(self.args, self.place, to_static=False)
self.assertTrue(
np.allclose(dygraph_res, static_res),
"dygraph_res: {},\n static_res: {}".format(
np.testing.assert_allclose(
dygraph_res,
static_res,
rtol=1e-05,
err_msg='dygraph_res: {},\n static_res: {}'.format(
dygraph_res[~np.isclose(dygraph_res, static_res)],
static_res[~np.isclose(dygraph_res, static_res)]))
static_res[~np.isclose(dygraph_res, static_res)]),
atol=1e-8)

# Prediction needs trained models, so put `test_predict` at last of `test_train`
self.verify_predict()
Expand All @@ -728,21 +731,30 @@ def verify_predict(self):
for dy_res, st_res, dy_jit_res, predictor_res in zip(
dygraph_pred_res, static_pred_res, dygraph_jit_pred_res,
predictor_pred_res):
self.assertTrue(
np.allclose(st_res, dy_res),
"dygraph_res: {},\n static_res: {}".format(
np.testing.assert_allclose(
st_res,
dy_res,
rtol=1e-05,
err_msg='dygraph_res: {},\n static_res: {}'.format(
dy_res[~np.isclose(st_res, dy_res)],
st_res[~np.isclose(st_res, dy_res)]))
self.assertTrue(
np.allclose(st_res, dy_jit_res),
"dygraph_jit_res: {},\n static_res: {}".format(
st_res[~np.isclose(st_res, dy_res)]),
atol=1e-8)
np.testing.assert_allclose(
st_res,
dy_jit_res,
rtol=1e-05,
err_msg='dygraph_jit_res: {},\n static_res: {}'.format(
dy_jit_res[~np.isclose(st_res, dy_jit_res)],
st_res[~np.isclose(st_res, dy_jit_res)]))
self.assertTrue(
np.allclose(st_res, predictor_res),
"dygraph_jit_res: {},\n static_res: {}".format(
st_res[~np.isclose(st_res, dy_jit_res)]),
atol=1e-8)
np.testing.assert_allclose(
st_res,
predictor_res,
rtol=1e-05,
err_msg='dygraph_jit_res: {},\n static_res: {}'.format(
predictor_res[~np.isclose(st_res, predictor_res)],
st_res[~np.isclose(st_res, predictor_res)]))
st_res[~np.isclose(st_res, predictor_res)]),
atol=1e-8)
break

def predict_dygraph(self, data):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -230,9 +230,12 @@ def run_static_mode(self):
def test_transformed_static_result(self):
static_res = self.run_static_mode()
dygraph_res = self.run_dygraph_mode()
self.assertTrue(np.allclose(dygraph_res, static_res),
msg='dygraph res is {}\nstatic_res is {}'.format(
dygraph_res, static_res))
np.testing.assert_allclose(
dygraph_res,
static_res,
rtol=1e-05,
err_msg='dygraph res is {}\nstatic_res is {}'.format(
dygraph_res, static_res))


class TestContinueInForAtEnd(TestContinueInFor):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,11 @@ def test_cache(self):
prev_out, (tuple, list)) else prev_out.numpy()
cur_out_numpy = cur_out[0].numpy() if isinstance(
cur_out, (tuple, list)) else cur_out.numpy()
self.assertTrue(
np.allclose(prev_out_numpy, cur_out_numpy),
msg=
np.testing.assert_allclose(
prev_out_numpy,
cur_out_numpy,
rtol=1e-05,
err_msg=
'Output in previous batch is {}\n Output in current batch is \n{}'
.format(prev_out_numpy, cur_out_numpy))
self.assertEqual(prev_ops, cur_ops)
Expand Down Expand Up @@ -106,9 +108,12 @@ def train(self, to_static=False):
def test_with_optimizer(self):
dygraph_loss = self.train_dygraph()
static_loss = self.train_static()
self.assertTrue(np.allclose(dygraph_loss, static_loss),
msg='dygraph is {}\n static_res is \n{}'.format(
dygraph_loss, static_loss))
np.testing.assert_allclose(
dygraph_loss,
static_loss,
rtol=1e-05,
err_msg='dygraph is {}\n static_res is \n{}'.format(
dygraph_loss, static_loss))


def simple_func(x):
Expand Down
Loading