Skip to content

Commit

Permalink
Refine UnitTest which have acc issue or unspport issue in sp
Browse files Browse the repository at this point in the history
1.Skip elementwise/relational op unittest if input/output have INF which sp cannot
handle
2.Set different tolerance in Layernom/SoftMax/GRU unittest if SP supported

Type: Bug Fix
Issue: 37103
Signed-off-by: Feiyue Chen <Feiyue.Chen@verisilicon.com>
  • Loading branch information
chenfeiyue-cfy committed Nov 17, 2023
1 parent a24d2be commit ef801f4
Show file tree
Hide file tree
Showing 9 changed files with 48 additions and 17 deletions.
1 change: 1 addition & 0 deletions include/tim/vx/context.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class Context {
virtual std::shared_ptr<Graph> CreateGraph(const CompileOption& options) = 0;

virtual bool isClOnly() = 0;
virtual bool isSpOnly() = 0;

static std::shared_ptr<Context> Create();
};
Expand Down
4 changes: 4 additions & 0 deletions src/tim/vx/context.cc
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,9 @@ bool ContextImpl::isClOnly() {
return VSI_NN_HW_EVIS_NONE == context_->config.evis.ver;
}

bool ContextImpl::isSpOnly() {
return 0 != context_->config.support_stream_processor;
}

} // namespace vx
} // namespace tim
3 changes: 2 additions & 1 deletion src/tim/vx/context_private.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ class ContextImpl : public Context {
std::shared_ptr<Graph> CreateGraph() override;
std::shared_ptr<Graph> CreateGraph(const CompileOption&) override;
bool isClOnly() override;

bool isSpOnly() override;

protected:
vsi_nn_context_t context_;
};
Expand Down
2 changes: 2 additions & 0 deletions src/tim/vx/ops/elementwise_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
TEST(FloorDiv, shape_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
if (ctx->isSpOnly()) GTEST_SKIP();

tim::vx::ShapeType io_shape({1});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32,
Expand Down Expand Up @@ -135,6 +136,7 @@ TEST(FloorDiv, shape_5_1_broadcast_uint8) {
TEST(Div, shape_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
if (ctx->isSpOnly()) GTEST_SKIP();

tim::vx::ShapeType io_shape({1});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32,
Expand Down
3 changes: 2 additions & 1 deletion src/tim/vx/ops/grucell_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ std::shared_ptr<tim::vx::Tensor> make_empty_tensor(
TEST(GRUCell, unit_4) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
float tolerance = ctx->isSpOnly() ? 1e-4f : 1e-5f;

uint32_t num_units = 2;
uint32_t feature = 4;
Expand Down Expand Up @@ -118,5 +119,5 @@ TEST(GRUCell, unit_4) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
EXPECT_TRUE(ArraysMatch(golden, output, tolerance));
}
9 changes: 6 additions & 3 deletions src/tim/vx/ops/layernormalization_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
TEST(LayerNorm, axis_0_shape_3_6_1_float) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
float tolerance = ctx->isSpOnly() ? 0.01 : 1e-5f;

tim::vx::ShapeType io_shape({3, 6, 1});
tim::vx::ShapeType param_shape({6});
Expand Down Expand Up @@ -81,12 +82,13 @@ TEST(LayerNorm, axis_0_shape_3_6_1_float) {

std::vector<float> output(18);
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
EXPECT_TRUE(ArraysMatch(golden, output, tolerance));
}

TEST(LayerNorm, axis_0_shape_2_3_6_1_float) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
float tolerance = ctx->isSpOnly() ? 0.01 : 1e-5f;

tim::vx::ShapeType io_shape({2, 3, 6, 1});
tim::vx::ShapeType param_shape({6});
Expand Down Expand Up @@ -139,12 +141,13 @@ TEST(LayerNorm, axis_0_shape_2_3_6_1_float) {

std::vector<float> output(36);
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
EXPECT_TRUE(ArraysMatch(golden, output, tolerance));
}

TEST(LayerNorm, axis_2_shape_4_2_3_1_float) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
float tolerance = ctx->isSpOnly() ? 0.01 : 1e-5f;

tim::vx::ShapeType io_shape({4, 2, 3, 1});
tim::vx::ShapeType param_shape({1,1,3,1});
Expand Down Expand Up @@ -194,7 +197,7 @@ TEST(LayerNorm, axis_2_shape_4_2_3_1_float) {

std::vector<float> output(24);
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
EXPECT_TRUE(ArraysMatch(golden, output, tolerance));
}

#if 0
Expand Down
5 changes: 5 additions & 0 deletions src/tim/vx/ops/relational_operations_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ TEST(Equal, shape_1_uint8) {

TEST(NotEqual, shape_5_fp32) {
auto ctx = tim::vx::Context::Create();
if (ctx->isSpOnly()) GTEST_SKIP();
auto graph = ctx->CreateGraph();

tim::vx::ShapeType io_shape({5});
Expand Down Expand Up @@ -101,6 +102,7 @@ TEST(NotEqual, shape_5_fp32) {
TEST(Less, shape_5_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
if (ctx->isSpOnly()) GTEST_SKIP();

tim::vx::ShapeType io_shape({1,5});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32,
Expand Down Expand Up @@ -136,6 +138,7 @@ TEST(Less, shape_5_1_fp32) {
TEST(GreaterOrEqual, shape_5_2_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
if (ctx->isSpOnly()) GTEST_SKIP();

tim::vx::ShapeType io_shape({5,2,1});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32,
Expand Down Expand Up @@ -175,6 +178,7 @@ TEST(GreaterOrEqual, shape_5_2_1_fp32) {
TEST(Greater, shape_5_2_1_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
if (ctx->isSpOnly()) GTEST_SKIP();

tim::vx::ShapeType io_shape({5,2,1,1});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32,
Expand Down Expand Up @@ -214,6 +218,7 @@ TEST(Greater, shape_5_2_1_1_fp32) {
TEST(LessOrEqual, shape_1_5_2_1_1_fp32) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
if (ctx->isSpOnly()) GTEST_SKIP();

tim::vx::ShapeType io_shape({1,5,2,1,1});
tim::vx::TensorSpec input_spec(tim::vx::DataType::FLOAT32,
Expand Down
35 changes: 24 additions & 11 deletions src/tim/vx/ops/softmax_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,10 @@ TEST(Softmax, shape_3_1_float_axis_0) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
if (!ctx->isSpOnly())
EXPECT_EQ(golden, output);
else
EXPECT_TRUE(ArraysMatch(golden, output, 1e-3f));
}

TEST(Softmax, shape_3_4_float_axis_0) {
Expand Down Expand Up @@ -96,8 +99,10 @@ TEST(Softmax, shape_3_4_float_axis_0) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_EQ(golden, output);
// EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
if (!ctx->isSpOnly())
EXPECT_EQ(golden, output);
else
EXPECT_TRUE(ArraysMatch(golden, output, 1e-3f));
}

TEST(Softmax, shape_3_4_float_axis_1) {
Expand Down Expand Up @@ -136,8 +141,10 @@ TEST(Softmax, shape_3_4_float_axis_1) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_EQ(golden, output);
// EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
if (!ctx->isSpOnly())
EXPECT_EQ(golden, output);
else
EXPECT_TRUE(ArraysMatch(golden, output, 1e-3f));
}

TEST(Softmax, shape_3_3_2_float_axis_0) {
Expand Down Expand Up @@ -182,8 +189,10 @@ TEST(Softmax, shape_3_3_2_float_axis_0) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_EQ(golden, output);
// EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
if (!ctx->isSpOnly())
EXPECT_EQ(golden, output);
else
EXPECT_TRUE(ArraysMatch(golden, output, 1e-3f));
}

TEST(Softmax, shape_3_3_2_float_axis_1) {
Expand Down Expand Up @@ -228,8 +237,10 @@ TEST(Softmax, shape_3_3_2_float_axis_1) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_EQ(golden, output);
// EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
if (!ctx->isSpOnly())
EXPECT_EQ(golden, output);
else
EXPECT_TRUE(ArraysMatch(golden, output, 1e-3f));
}

TEST(Softmax, shape_3_3_2_float_axis_2) {
Expand Down Expand Up @@ -274,6 +285,8 @@ TEST(Softmax, shape_3_3_2_float_axis_2) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_EQ(golden, output);
// EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
if (!ctx->isSpOnly())
EXPECT_EQ(golden, output);
else
EXPECT_TRUE(ArraysMatch(golden, output, 1e-3f));
}
3 changes: 2 additions & 1 deletion src/tim/vx/ops/unidirectional_sequence_gru_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ std::shared_ptr<tim::vx::Tensor> make_empty_tensor(
TEST(UnidirectionalSequenceGRU, unit_3) {
auto ctx = tim::vx::Context::Create();
auto graph = ctx->CreateGraph();
float tolerance = ctx->isSpOnly() ? 1e-4f : 1e-5f;

const int timesteps = 1;
const int batchs = 1;
Expand Down Expand Up @@ -125,5 +126,5 @@ TEST(UnidirectionalSequenceGRU, unit_3) {

std::vector<float> output(golden.size());
EXPECT_TRUE(output_tensor->CopyDataFromTensor(output.data()));
EXPECT_TRUE(ArraysMatch(golden, output, 1e-5f));
EXPECT_TRUE(ArraysMatch(golden, output, tolerance));
}

0 comments on commit ef801f4

Please sign in to comment.