Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into itikhono/transfor…
Browse files Browse the repository at this point in the history
…mations/ngraph_to_ov_refactoring
  • Loading branch information
itikhono committed Sep 28, 2023
2 parents 47cf4e4 + 886be26 commit f03dff4
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,9 @@ bool ConvolutionKernel_b_fs_yx_fsv_16_32_imad_dw::ValidateAutoTuneParams(const c
const AutoTuneParams& tparams) const {
bool valid_tune_params = true;

if (!IsSIMDSizeSupported(params.engineInfo, tparams.simd))
return false;

auto total_lws = tparams.simd * tparams.lws0 * tparams.lws1;
valid_tune_params &= total_lws <= params.engineInfo.maxWorkGroupSize;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,10 @@ bool GemmKernelMMADint8::Validate(const Params& params, const optional_params& o
(input1_type != Datatype::UINT8 && input1_type != Datatype::INT8))
return false;

GemmTuningData tuning_data = SetTuningParams(gmm_params);
if (!IsSIMDSizeSupported(params.engineInfo, tuning_data.simd_size))
return false;

return true;
}
} // namespace kernel_selector
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,9 @@ bool GemmKernelMMADslmInt8::Validate(const Params& params, const optional_params
if (HasLeftovers(tuning_data))
return false;

if (!IsSIMDSizeSupported(params.engineInfo, tuning_data.simd_size))
return false;

if ((input0_type != Datatype::UINT8 && input0_type != Datatype::INT8) ||
(input1_type != Datatype::UINT8 && input1_type != Datatype::INT8))
return false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ GemmKernelTiledOpt::GemmTuningData GemmKernelTiledOpt::SetTuningParams(const gem

bool leftovers = m_size % tuning_data.tile_m_size || k_size % tuning_data.tile_k_size || n_size % tuning_data.tile_n_size;

if (leftovers || total_batches > 1 || params.transpose_input0 || params.transpose_input1) {
if (leftovers || total_batches > 1 || params.transpose_input0 || params.transpose_input1 || !IsSIMDSizeSupported(params.engineInfo, 8)) {
tuning_data.simd_size = 16;
tuning_data.tile_n_size = tuning_data.simd_size;
tuning_data.tile_k_size = tuning_data.simd_size;
Expand Down

0 comments on commit f03dff4

Please sign in to comment.