Skip to content

Commit

Permalink
[GPU] Add unit tests for resample_onnx kernel (openvinotoolkit#27893)
Browse files Browse the repository at this point in the history
### Details:
- *Check padding values for blocked format when feature size is not
aligned to 16.*

### Tickets:
 - *157149*
  • Loading branch information
jade-cho authored Dec 4, 2024
1 parent 571e98d commit b2a2270
Showing 1 changed file with 116 additions and 0 deletions.
116 changes: 116 additions & 0 deletions src/plugins/intel_gpu/tests/unit/test_cases/resample_gpu_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2198,6 +2198,91 @@ struct resample_opt_random_test_ext : resample_opt_random_test
}
};

struct resample_onnx_random_test : resample_opt_random_test {
template <typename T>
void compare(const memory::ptr out_ref, const memory::ptr out_opt) {
auto ref_count = out_ref->count();
auto opt_count = out_opt->count();

ASSERT_EQ(ref_count, opt_count);

mem_lock<T> ref_ptr{out_ref, get_test_stream()};
mem_lock<T> opt_ptr{out_opt, get_test_stream()};
for (size_t i = 0; i < ref_count; ++i) {
ASSERT_NEAR(static_cast<float>(opt_ptr[i]), static_cast<float>(ref_ptr[i]), 1.e-1f);
}
}

void execute_compare(const resample_opt_random_test_params& params, bool check_result) {
auto& engine = get_test_engine();

const format origin_format = format::dimension(params.in_format) == 4 ? format::bfyx : format::bfzyx;
auto in_layout = layout(params.input_type, origin_format, params.input_size);
auto in_mem = engine.allocate_memory(in_layout);
fill_random(in_mem);

/// bfyx or bfzyx
cldnn::topology topo;
topo.add(input_layout("in", in_layout));
topo.add(reorder("in_to_input_type", input_info("in"), params.in_format, params.input_type));
auto prim = resample("resample", input_info("in_to_input_type"), params.output_size, params.num_filter, params.operation_type);
topo.add(prim);
topo.add(reorder("res_to_bfyx", input_info("resample"), origin_format, params.input_type));

ExecutionConfig config = get_test_default_config(engine);
config.set_property(ov::intel_gpu::custom_outputs(std::vector<std::string>{"resample", "res_to_bfyx"}));
ov::intel_gpu::ImplementationDesc resample_impl = { params.in_format, "resample_ref", impl_types::ocl };
config.set_property(ov::intel_gpu::force_implementations(ov::intel_gpu::ImplForcingMap{ { "resample", resample_impl } }));

network net(engine, topo, config);
net.set_input_data("in", in_mem);

// first execution of ref
auto result = net.execute();
auto output = result.at("resample").get_memory();

cldnn::topology topo_opt;
topo_opt.add(input_layout("in", in_layout));
topo_opt.add(reorder("in_to_input_type", input_info("in"), params.in_format, params.input_type));
auto prim_opt = resample("resample_opt", input_info("in_to_input_type"), params.output_size, params.num_filter, params.operation_type);
topo_opt.add(prim_opt);
topo_opt.add(reorder("res_to_bfyx", input_info("resample_opt"), origin_format, params.input_type));

ExecutionConfig config_opt = get_test_default_config(engine);
config_opt.set_property(ov::intel_gpu::custom_outputs(std::vector<std::string>{"resample_opt", "res_to_bfyx"}));
ov::intel_gpu::ImplementationDesc resample_opt_impl = { params.in_format, "resample_onnx", impl_types::ocl };
config_opt.set_property(ov::intel_gpu::force_implementations(ov::intel_gpu::ImplForcingMap{ { "resample_opt", resample_opt_impl } }));

cldnn::network::ptr net_opt = get_network(engine, topo_opt, config_opt, get_test_stream_ptr(), false);

// Use in_mem from ref network
net_opt->set_input_data("in", in_mem);

// first execution of opt
auto result_opt = net_opt->execute();
auto output_opt = result_opt.at("resample_opt").get_memory();
if (!format::is_simple_data_format(params.in_format)) {
ASSERT_FALSE(format::is_simple_data_format(result_opt.at("resample_opt").get_memory()->get_layout().format));
}

// Compares not only outputs but also padding values for block format.
if (check_result == true) {
// Check data_types
if (params.input_type == data_types::f32) {
compare<float>(output, output_opt);
} else if (params.input_type == data_types::f16) {
compare<ov::float16>(output, output_opt);
} else if (params.input_type == data_types::i8) {
compare<int8_t>(output, output_opt);
} else if (params.input_type == data_types::u8) {
compare<uint8_t>(output, output_opt);
} else {
FAIL() << "Not supported data type: " << static_cast<size_t>(params.input_type);
}
}
}
};

TEST_P(resample_opt_random_test, random) {
auto param = GetParam();
execute_compare(param, true, false);
Expand All @@ -2213,6 +2298,37 @@ TEST_P(resample_opt_random_test_ext, DISABLED_random) {
execute_perf_test(param, "resample_ref", true);
}

TEST_P(resample_onnx_random_test, random) {
auto param = GetParam();
execute_compare(param, true);
}

INSTANTIATE_TEST_SUITE_P(resample_onnx_smoke_not_aligned,
resample_onnx_random_test,
testing::ValuesIn(
std::vector<resample_opt_random_test_params>{
{ data_types::f16, {1, 24, 13, 13}, {1, 24, 26, 26}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::b_fs_yx_fsv32, format::b_fs_yx_fsv32, {}, {}},
{ data_types::f16, {1, 24, 13, 13}, {1, 24, 26, 26}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_yx_bsv32_fsv16, format::bs_fs_yx_bsv32_fsv16, {}, {}},
{ data_types::f16, {1, 24, 13, 13}, {1, 24, 26, 26}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_yx_bsv32_fsv32, format::bs_fs_yx_bsv32_fsv32, {}, {}},
{ data_types::f16, {1, 24, 13, 13}, {1, 24, 26, 26}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_yx_bsv16_fsv16, format::bs_fs_yx_bsv16_fsv16, {}, {}},
{ data_types::f16, {1, 24, 13, 13}, {1, 24, 26, 26}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::b_fs_yx_fsv16, format::b_fs_yx_fsv32, {}, {}},

{ data_types::f16, {1, 9, 13, 13, 5}, { 1, 9, 26, 26, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::b_fs_zyx_fsv16, format::b_fs_zyx_fsv16, {}, {}},
{ data_types::f32, {1, 9, 13, 13, 5}, { 1, 9, 26, 26, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::b_fs_zyx_fsv16, format::b_fs_zyx_fsv16, {}, {}},
{ data_types::f16, {16, 9, 7, 7, 5}, {16, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv16_fsv16, format::bs_fs_zyx_bsv16_fsv16, {}, {}},
{ data_types::f32, {16, 9, 7, 7, 5}, {16, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv16_fsv16, format::bs_fs_zyx_bsv16_fsv16, {}, {}},
{ data_types::f16, {32, 9, 7, 7, 5}, {32, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv32_fsv16, format::bs_fs_zyx_bsv32_fsv16, {}, {}},
{ data_types::f32, {32, 9, 7, 7, 5}, {32, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv32_fsv16, format::bs_fs_zyx_bsv32_fsv16, {}, {}},

{ data_types::i8, {1, 9, 13, 13, 5}, {1, 9, 26, 26, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::b_fs_zyx_fsv32, format::b_fs_zyx_fsv32, {}, {}},
{ data_types::u8, {1, 9, 13, 13, 5}, {1, 9, 26, 26, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::b_fs_zyx_fsv32, format::b_fs_zyx_fsv32, {}, {}},
{ data_types::i8, {16, 9, 7, 7, 5}, {16, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv16_fsv32, format::bs_fs_zyx_bsv16_fsv32, {}, {}},
{ data_types::u8, {16, 9, 7, 7, 5}, {16, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv16_fsv32, format::bs_fs_zyx_bsv16_fsv32, {}, {}},
{ data_types::i8, {32, 9, 7, 7, 5}, {32, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv32_fsv32, format::bs_fs_zyx_bsv32_fsv32, {}, {}},
{ data_types::u8, {32, 9, 7, 7, 5}, {32, 9, 14, 14, 5}, 1, resample::InterpolateOp::InterpolateMode::LINEAR_ONNX, 1, format::bs_fs_zyx_bsv32_fsv32, format::bs_fs_zyx_bsv32_fsv32, {}, {}},
}
));

INSTANTIATE_TEST_SUITE_P(resample_opt_smoke_nearest,
resample_opt_random_test,
testing::ValuesIn(
Expand Down

0 comments on commit b2a2270

Please sign in to comment.