Skip to content

Commit

Permalink
Build fix
Browse files Browse the repository at this point in the history
  • Loading branch information
vurusovs committed Dec 13, 2023
1 parent 72ecef3 commit b85980e
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ std::string ConvolutionLayerCPUTest::getTestCaseName(const testing::TestParamInf
InputShape inputShape;
std::string targetDevice;
std::tie(convParams, netType, inType, outType, inputShape, targetDevice) = basicParamsSet;
ngraph::op::PadType padType;
ov::op::PadType padType;
InferenceEngine::SizeVector kernel, stride, dilation;
std::vector<ptrdiff_t> padBegin, padEnd;
size_t convOutChannels;
Expand Down Expand Up @@ -93,11 +93,11 @@ void ConvolutionLayerCPUTest::checkBiasFusing(ov::CompiledModel& execNet) const
ASSERT_TRUE(foundConv) << "Can't find Convolution node";
}

std::shared_ptr<ngraph::Node> ConvolutionLayerCPUTest::modifyGraph(const ngraph::element::Type& ngPrc,
ngraph::ParameterVector& params,
const std::shared_ptr<ngraph::Node>& lastNode) {
std::shared_ptr<ov::Node> ConvolutionLayerCPUTest::modifyGraph(const ov::element::Type& ngPrc,
ov::ParameterVector& params,
const std::shared_ptr<ov::Node>& lastNode) {
auto retNode = CpuTestWithFusing::modifyGraph(ngPrc, params, lastNode);
std::shared_ptr<ngraph::Node> opToShapeInfer = nullptr;
std::shared_ptr<ov::Node> opToShapeInfer = nullptr;
for (auto& targetShapes : targetStaticShapes) {
for (size_t i = targetShapes.size(); i < params.size(); ++i) {
const auto& shape = params[i]->get_output_partial_shape(0);
Expand All @@ -109,13 +109,13 @@ std::shared_ptr<ngraph::Node> ConvolutionLayerCPUTest::modifyGraph(const ngraph:
// operation, and it has to match the convolution output shape. So the most suitable solution here is to
// perform shape inference on the convolution node
if (!opToShapeInfer) {
ngraph::OutputVector inputsForShapeInfer;
ov::OutputVector inputsForShapeInfer;
for (size_t j = 0; j < lastNode->get_input_size(); j++) {
if (ngraph::is_type<ngraph::opset1::Constant>(lastNode->get_input_node_ptr(j))) {
if (ov::is_type<ov::op::v0::Constant>(lastNode->get_input_node_ptr(j))) {
inputsForShapeInfer.push_back(lastNode->get_input_node_shared_ptr(j));
} else {
inputsForShapeInfer.push_back(
std::make_shared<ngraph::opset1::Parameter>(lastNode->get_input_element_type(j),
std::make_shared<ov::op::v0::Parameter>(lastNode->get_input_element_type(j),
lastNode->get_input_partial_shape(j)));
}
}
Expand Down Expand Up @@ -169,7 +169,7 @@ void ConvolutionLayerCPUTest::SetUp() {
selectedType = makeSelectedTypeStr(selectedType, netType);
}

ngraph::op::PadType padType;
ov::op::PadType padType;
InferenceEngine::SizeVector stride;
std::vector<ptrdiff_t> padBegin, padEnd;
size_t convOutChannels;
Expand Down Expand Up @@ -636,7 +636,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_GEMM_2D() {
::testing::ValuesIn(padEnds2d()),
::testing::ValuesIn(dilations2d()),
::testing::ValuesIn(numOutChannels_Gemm()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_GEMM_2D;
}

Expand All @@ -648,7 +648,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_GEMM_2D_
::testing::ValuesIn(padEnds2d()),
::testing::Values(SizeVector{2, 2}),
::testing::ValuesIn(numOutChannels_Gemm()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_GEMM_2D_dilated;
}

Expand All @@ -660,7 +660,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_GEMM_1D() {
::testing::ValuesIn(padEnds1d()),
::testing::ValuesIn(dilations1d()),
::testing::ValuesIn(numOutChannels_Gemm()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_GEMM_1D;
}

Expand All @@ -672,7 +672,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_2D() {
::testing::ValuesIn(padEnds2d()),
::testing::ValuesIn(dilations2d()),
::testing::ValuesIn(numOutChannels()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_2D;
}

Expand All @@ -684,7 +684,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_2D_dilat
::testing::ValuesIn(padEnds2d()),
::testing::Values(SizeVector{2, 2}),
::testing::ValuesIn(numOutChannels()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_2D_dilated;
}

Expand All @@ -696,7 +696,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_GEMM_3D() {
::testing::ValuesIn(padEnds3d()),
::testing::ValuesIn(dilations3d()),
::testing::ValuesIn(numOutChannels_Gemm()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_GEMM_3D;
}

Expand All @@ -708,7 +708,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_GEMM_3D_
::testing::ValuesIn(padEnds3d()),
::testing::Values(SizeVector{2, 2, 2}),
::testing::ValuesIn(numOutChannels_Gemm()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_GEMM_3D_dilated;
}

Expand All @@ -720,7 +720,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_3D() {
::testing::ValuesIn(padEnds3d()),
::testing::ValuesIn(dilations3d()),
::testing::ValuesIn(numOutChannels()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_3D;
}

Expand All @@ -732,7 +732,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_3D_dilat
::testing::ValuesIn(padEnds3d()),
::testing::Values(SizeVector{2, 2, 2}),
::testing::ValuesIn(numOutChannels()),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_3D_dilated;
}

Expand All @@ -744,7 +744,7 @@ const convParams_ExplicitPadding_1x1_Type& convParams_ExplicitPadding_1x1_1D() {
::testing::Values(std::vector<ptrdiff_t>({0})),
::testing::Values(SizeVector({1})),
::testing::Values(63),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_1x1_1D;
}

Expand All @@ -756,7 +756,7 @@ const convParams_ExplicitPadding_1x1_Type& convParams_ExplicitPadding_1x1_2D() {
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::Values(SizeVector({1, 1})),
::testing::Values(63),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::Values(ov::op::PadType::EXPLICIT));
return convParams_ExplicitPadding_1x1_2D;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
wss// Copyright (C) 2022 Intel Corporation
// Copyright (C) 2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

Expand Down

0 comments on commit b85980e

Please sign in to comment.