From b85980ea3a4854e6c9b53ceef919007860e5e19f Mon Sep 17 00:00:00 2001 From: vurusovs Date: Wed, 13 Dec 2023 11:08:47 +0400 Subject: [PATCH] Build fix --- .../classes/convolution.cpp | 40 +++++++++---------- .../single_layer_tests/dynamic/gather.cpp | 2 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/plugins/intel_cpu/tests/functional/single_layer_tests/classes/convolution.cpp b/src/plugins/intel_cpu/tests/functional/single_layer_tests/classes/convolution.cpp index d6c83c16ba5978..cc8731556fb093 100644 --- a/src/plugins/intel_cpu/tests/functional/single_layer_tests/classes/convolution.cpp +++ b/src/plugins/intel_cpu/tests/functional/single_layer_tests/classes/convolution.cpp @@ -28,7 +28,7 @@ std::string ConvolutionLayerCPUTest::getTestCaseName(const testing::TestParamInf InputShape inputShape; std::string targetDevice; std::tie(convParams, netType, inType, outType, inputShape, targetDevice) = basicParamsSet; - ngraph::op::PadType padType; + ov::op::PadType padType; InferenceEngine::SizeVector kernel, stride, dilation; std::vector padBegin, padEnd; size_t convOutChannels; @@ -93,11 +93,11 @@ void ConvolutionLayerCPUTest::checkBiasFusing(ov::CompiledModel& execNet) const ASSERT_TRUE(foundConv) << "Can't find Convolution node"; } -std::shared_ptr ConvolutionLayerCPUTest::modifyGraph(const ngraph::element::Type& ngPrc, - ngraph::ParameterVector& params, - const std::shared_ptr& lastNode) { +std::shared_ptr ConvolutionLayerCPUTest::modifyGraph(const ov::element::Type& ngPrc, + ov::ParameterVector& params, + const std::shared_ptr& lastNode) { auto retNode = CpuTestWithFusing::modifyGraph(ngPrc, params, lastNode); - std::shared_ptr opToShapeInfer = nullptr; + std::shared_ptr opToShapeInfer = nullptr; for (auto& targetShapes : targetStaticShapes) { for (size_t i = targetShapes.size(); i < params.size(); ++i) { const auto& shape = params[i]->get_output_partial_shape(0); @@ -109,13 +109,13 @@ std::shared_ptr ConvolutionLayerCPUTest::modifyGraph(const ngraph: // operation, and it has to match the convolution output shape. So the most suitable solution here is to // perform shape inference on the convolution node if (!opToShapeInfer) { - ngraph::OutputVector inputsForShapeInfer; + ov::OutputVector inputsForShapeInfer; for (size_t j = 0; j < lastNode->get_input_size(); j++) { - if (ngraph::is_type(lastNode->get_input_node_ptr(j))) { + if (ov::is_type(lastNode->get_input_node_ptr(j))) { inputsForShapeInfer.push_back(lastNode->get_input_node_shared_ptr(j)); } else { inputsForShapeInfer.push_back( - std::make_shared(lastNode->get_input_element_type(j), + std::make_shared(lastNode->get_input_element_type(j), lastNode->get_input_partial_shape(j))); } } @@ -169,7 +169,7 @@ void ConvolutionLayerCPUTest::SetUp() { selectedType = makeSelectedTypeStr(selectedType, netType); } - ngraph::op::PadType padType; + ov::op::PadType padType; InferenceEngine::SizeVector stride; std::vector padBegin, padEnd; size_t convOutChannels; @@ -636,7 +636,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_GEMM_2D() { ::testing::ValuesIn(padEnds2d()), ::testing::ValuesIn(dilations2d()), ::testing::ValuesIn(numOutChannels_Gemm()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_GEMM_2D; } @@ -648,7 +648,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_GEMM_2D_ ::testing::ValuesIn(padEnds2d()), ::testing::Values(SizeVector{2, 2}), ::testing::ValuesIn(numOutChannels_Gemm()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_GEMM_2D_dilated; } @@ -660,7 +660,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_GEMM_1D() { ::testing::ValuesIn(padEnds1d()), ::testing::ValuesIn(dilations1d()), ::testing::ValuesIn(numOutChannels_Gemm()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_GEMM_1D; } @@ -672,7 +672,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_2D() { ::testing::ValuesIn(padEnds2d()), ::testing::ValuesIn(dilations2d()), ::testing::ValuesIn(numOutChannels()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_2D; } @@ -684,7 +684,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_2D_dilat ::testing::ValuesIn(padEnds2d()), ::testing::Values(SizeVector{2, 2}), ::testing::ValuesIn(numOutChannels()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_2D_dilated; } @@ -696,7 +696,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_GEMM_3D() { ::testing::ValuesIn(padEnds3d()), ::testing::ValuesIn(dilations3d()), ::testing::ValuesIn(numOutChannels_Gemm()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_GEMM_3D; } @@ -708,7 +708,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_GEMM_3D_ ::testing::ValuesIn(padEnds3d()), ::testing::Values(SizeVector{2, 2, 2}), ::testing::ValuesIn(numOutChannels_Gemm()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_GEMM_3D_dilated; } @@ -720,7 +720,7 @@ const convParams_ExplicitPaddingType& convParams_ExplicitPadding_3D() { ::testing::ValuesIn(padEnds3d()), ::testing::ValuesIn(dilations3d()), ::testing::ValuesIn(numOutChannels()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_3D; } @@ -732,7 +732,7 @@ const convParams_ExplicitPaddingDilatedType& convParams_ExplicitPadding_3D_dilat ::testing::ValuesIn(padEnds3d()), ::testing::Values(SizeVector{2, 2, 2}), ::testing::ValuesIn(numOutChannels()), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_3D_dilated; } @@ -744,7 +744,7 @@ const convParams_ExplicitPadding_1x1_Type& convParams_ExplicitPadding_1x1_1D() { ::testing::Values(std::vector({0})), ::testing::Values(SizeVector({1})), ::testing::Values(63), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_1x1_1D; } @@ -756,7 +756,7 @@ const convParams_ExplicitPadding_1x1_Type& convParams_ExplicitPadding_1x1_2D() { ::testing::Values(std::vector({0, 0})), ::testing::Values(SizeVector({1, 1})), ::testing::Values(63), - ::testing::Values(ngraph::op::PadType::EXPLICIT)); + ::testing::Values(ov::op::PadType::EXPLICIT)); return convParams_ExplicitPadding_1x1_2D; } diff --git a/src/plugins/intel_gpu/tests/functional/single_layer_tests/dynamic/gather.cpp b/src/plugins/intel_gpu/tests/functional/single_layer_tests/dynamic/gather.cpp index 1ac088dd8dc50c..1f32a7356a47aa 100644 --- a/src/plugins/intel_gpu/tests/functional/single_layer_tests/dynamic/gather.cpp +++ b/src/plugins/intel_gpu/tests/functional/single_layer_tests/dynamic/gather.cpp @@ -1,4 +1,4 @@ -wss// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 //