Skip to content

Commit

Permalink
Remove InferenceEngine::Layout
Browse files Browse the repository at this point in the history
  • Loading branch information
riverlijunjie committed Oct 26, 2023
1 parent 3eeccb6 commit 75bd1c7
Show file tree
Hide file tree
Showing 6 changed files with 2 additions and 29 deletions.
1 change: 0 additions & 1 deletion src/core/src/runtime/itensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ bool ITensor::is_continuous() const {
if (get_element_type().bitwidth() < 8)
// OpenVINO doesn't support strides for lp types
return true;

const auto& shape = get_shape();
const auto& type = get_element_type();
std::vector<size_t> strides(shape.size());
Expand Down
21 changes: 0 additions & 21 deletions src/plugins/intel_cpu/src/node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1206,27 +1206,6 @@ bool Node::isFusedWith(Type fusedNodeType) const {
return false;
}

InferenceEngine::Layout Node::getWeightsLayoutByDims(SizeVector dims, bool isGrouped) {
switch (dims.size()) {
case 0:
return InferenceEngine::Layout::SCALAR;
case 1:
return InferenceEngine::Layout::C;
case 2:
return InferenceEngine::Layout::NC;
case 3:
return InferenceEngine::Layout::CHW;
case 4:
return InferenceEngine::Layout::OIHW;
case 5:
return isGrouped ? InferenceEngine::Layout::GOIHW : InferenceEngine::Layout::OIDHW;
case 6:
return isGrouped ? InferenceEngine::Layout::GOIDHW : InferenceEngine::Layout::BLOCKED;
default:
return InferenceEngine::Layout::BLOCKED;
}
}

dnnl::memory::format_tag Node::getWeightsFormatTagByDims(const SizeVector& dims) const {
switch (dims.size()) {
case 1:
Expand Down
1 change: 0 additions & 1 deletion src/plugins/intel_cpu/src/node.h
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,6 @@ class Node {

virtual std::vector<dnnl::memory::format_tag> getAvailableFormatsForDims(const Shape& dims) const;

InferenceEngine::Layout getWeightsLayoutByDims(InferenceEngine::SizeVector dims, bool isGrouped);
dnnl::memory::format_tag getWeightsFormatTagByDims(const InferenceEngine::SizeVector& dims) const;

/**
Expand Down
4 changes: 0 additions & 4 deletions src/plugins/intel_cpu/src/nodes/adaptive_pooling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,6 @@ void AdaptivePooling::initSupportedPrimitiveDescriptors() {
// we supports only fp32 currently
precision = Precision::FP32;

InferenceEngine::LayerConfig config;
config.inConfs.resize(2);
config.outConfs.resize((algorithm == Algorithm::AdaptivePoolingAvg ? 1 : 2));

std::vector<LayoutType> dataFormats{ LayoutType::ncsp };
const auto &inDims = getInputShapeAtPort(0).getDims();
if (inDims[1] != Shape::UNDEFINED_DIM && inDims[1] != 1) {
Expand Down
2 changes: 1 addition & 1 deletion src/plugins/intel_cpu/src/nodes/proposal.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ void Proposal::execute(dnnl::stream strm) {

InferenceEngine::Extensions::Cpu::XARCH::proposal_exec(probabilitiesData, anchorsData, inProbDims,
{imgHeight, imgWidth, scaleHeight, scaleWidth}, anchors.data(), roi_indices.data(), outRoiData, outProbData, conf);
} catch (const InferenceEngine::Exception& e) {
} catch (const ov::Exception& e) {
std::string errorMsg = e.what();
IE_THROW() << errorMsg;
}
Expand Down
2 changes: 1 addition & 1 deletion src/plugins/intel_cpu/src/plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -853,7 +853,7 @@ ov::SupportedOpsMap Engine::query_model(const std::shared_ptr<const ov::Model>&
std::unique_ptr<Node> ptr;
try {
ptr.reset(Node::factory().create(op, context));
} catch (const InferenceEngine::Exception&) {
} catch (const ov::Exception&) {
return false;
}
return true;
Expand Down

0 comments on commit 75bd1c7

Please sign in to comment.