Skip to content

Commit

Permalink
[CONFORMANCE][IE TEESTS] Move Summary from ngraph:: to ov:: (openvino…
Browse files Browse the repository at this point in the history
…toolkit#15539)

* [CONFORMANCE] Fix issue with overflowed crash

* [CONFORMANCE][IE TEESTS] Move Summary from ngraph:: to ov::

* Revert "[CONFORMANCE] Fix issue with overflowed crash"

This reverts commit a1f1677.

* Apply comments

* Fix build

* Update run_conformance.py
  • Loading branch information
iefode authored Feb 9, 2023
1 parent 6078c95 commit 8213a8f
Show file tree
Hide file tree
Showing 6 changed files with 69 additions and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@ OpGenerator getOpGeneratorMap();

static const std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> createFunctions() {
std::vector<std::pair<ov::DiscreteTypeInfo, std::shared_ptr<ov::Model>>> res;
auto opsets = ov::test::utils::OpSummary::getInstance().getOpSets();
auto opsets = ov::get_available_opsets();
auto opGenerator = getOpGeneratorMap();
std::set<ngraph::NodeTypeInfo> opsInfo;
for (const auto& opset : opsets) {
for (const auto& opset_pair : opsets) {
std::string opset_version = opset_pair.first;
const ov::OpSet& opset = opset_pair.second();
const auto &type_info_set = opset.get_type_info_set();
opsInfo.insert(type_info_set.begin(), type_info_set.end());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

#include "behavior/ov_plugin/caching_tests.hpp"

#include "openvino/pass/manager.hpp"

#include "common_test_utils/file_utils.hpp"
#include "functional_test_utils/skip_tests_config.hpp"
#include "functional_test_utils/summary/api_summary.hpp"
Expand Down Expand Up @@ -289,7 +291,7 @@ void CompileModelLoadFromFileTestBase::SetUp() {
}
m_cacheFolderName = ss.str();
core->set_property(ov::cache_dir());
ngraph::pass::Manager manager;
ov::pass::Manager manager;
manager.register_pass<ov::pass::Serialize>(m_modelName, m_weightsName);
manager.run_passes(ngraph::builder::subgraph::makeConvPoolRelu(
{1, 3, 227, 227}, InferenceEngine::details::convertPrecision(InferenceEngine::Precision::FP32)));
Expand Down Expand Up @@ -372,7 +374,7 @@ void CompileModelLoadFromMemoryTestBase::SetUp() {
}
m_cacheFolderName = ss.str();
core->set_property(ov::cache_dir());
ngraph::pass::Manager manager;
ov::pass::Manager manager;
manager.register_pass<ov::pass::Serialize>(m_modelName, m_weightsName);
manager.run_passes(ngraph::builder::subgraph::makeConvPoolRelu(
{1, 3, 227, 227},
Expand Down Expand Up @@ -455,7 +457,7 @@ TEST_P(CompileModelLoadFromMemoryTestBase, CanLoadFromMemoryWithoutExecption) {
}

TEST_P(CompileModelLoadFromMemoryTestBase, CanLoadFromMemoryWithoutWeightsANdExecption) {
ngraph::pass::Manager manager;
ov::pass::Manager manager;
std::shared_ptr<ov::Model> model;
{
auto data = std::make_shared<ov::op::v0::Parameter>(ov::element::f32, ov::Shape{3, 1, 2});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#include <process.h>
#endif

#include "openvino/pass/manager.hpp"
#include "openvino/core/preprocess/pre_post_process.hpp"
#include "openvino/pass/serialize.hpp"
#include "transformations/convert_precision.hpp"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@

#include "summary.hpp"

#include "openvino/opsets/opset.hpp"
#include "openvino/openvino.hpp"
#include "openvino/opsets/opset10.hpp"

namespace ov {
namespace test {
namespace utils {
Expand All @@ -25,10 +29,9 @@ class OpSummary : public virtual Summary {
private:
static OpSummary *p_instance;
static bool extractBody;
std::vector<ngraph::OpSet> opsets;
std::map<ngraph::NodeTypeInfo, PassRate> opsStats = {};
std::map<ov::NodeTypeInfo, PassRate> opsStats = {};

std::string getOpVersion(const ngraph::NodeTypeInfo &type_info);
std::string getOpVersion(const ov::NodeTypeInfo &type_info);

protected:
OpSummary();
Expand All @@ -38,23 +41,19 @@ class OpSummary : public virtual Summary {
public:
static OpSummary &getInstance();

std::map<ngraph::NodeTypeInfo, PassRate> getOPsStats() { return opsStats; }

std::vector<ngraph::OpSet> getOpSets() {
return opsets;
}
std::map<ov::NodeTypeInfo, PassRate> getOPsStats() { return opsStats; }

static void setExtractBody(bool val) { extractBody = val; }
static bool getExtractBody() { return extractBody; }

std::map<std::string, PassRate> getStatisticFromReport();
void saveReport() override;

void updateOPsStats(const std::shared_ptr<ngraph::Function> &function, const PassRate::Statuses &status);
void updateOPsImplStatus(const std::shared_ptr<ngraph::Function> &function, const bool implStatus);
void updateOPsStats(const std::shared_ptr<ov::Model> &model, const PassRate::Statuses &status);
void updateOPsImplStatus(const std::shared_ptr<ov::Model> &model, const bool implStatus);

void updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status);
void updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus);
void updateOPsStats(const ov::NodeTypeInfo &op, const PassRate::Statuses &status);
void updateOPsImplStatus(const ov::NodeTypeInfo &op, const bool implStatus);
};

} // namespace utils
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#include <map>
#include <fstream>

#include "ngraph/ngraph.hpp"
#include "openvino/openvino.hpp"

#include "common_test_utils/test_constants.hpp"
#include "common_test_utils/common_utils.hpp"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
// SPDX-License-Identifier: Apache-2.0
//

#include <algorithm>

#include <pugixml.hpp>


#include "functional_test_utils/summary/op_summary.hpp"
#include "common_test_utils/file_utils.hpp"

Expand All @@ -27,17 +30,6 @@ void OpSummaryDestroyer::initialize(OpSummary *p) {

OpSummary::OpSummary() {
reportFilename = CommonTestUtils::OP_REPORT_FILENAME;
// TODO: replace to get_available_opsets()
opsets.push_back(ngraph::get_opset1());
opsets.push_back(ngraph::get_opset2());
opsets.push_back(ngraph::get_opset3());
opsets.push_back(ngraph::get_opset4());
opsets.push_back(ngraph::get_opset5());
opsets.push_back(ngraph::get_opset6());
opsets.push_back(ngraph::get_opset7());
opsets.push_back(ngraph::get_opset8());
opsets.push_back(ngraph::get_opset9());
opsets.push_back(ngraph::get_opset10());
}

OpSummary &OpSummary::getInstance() {
Expand All @@ -48,7 +40,7 @@ OpSummary &OpSummary::getInstance() {
return *p_instance;
}

void OpSummary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::Statuses &status) {
void OpSummary::updateOPsStats(const ov::NodeTypeInfo &op, const PassRate::Statuses &status) {
auto it = opsStats.find(op);
if (opsStats.find(op) == opsStats.end()) {
opsStats.insert({op, PassRate()});
Expand Down Expand Up @@ -89,7 +81,7 @@ void OpSummary::updateOPsStats(const ngraph::NodeTypeInfo &op, const PassRate::S
}
}

void OpSummary::updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool implStatus) {
void OpSummary::updateOPsImplStatus(const ov::NodeTypeInfo &op, const bool implStatus) {
auto it = opsStats.find(op);
if (it != opsStats.end()) {
if (!it->second.isImplemented && implStatus) {
Expand All @@ -101,13 +93,14 @@ void OpSummary::updateOPsImplStatus(const ngraph::NodeTypeInfo &op, const bool i
}
}

std::string OpSummary::getOpVersion(const ngraph::NodeTypeInfo &type_info) {
for (size_t i = 0; i < opsets.size(); i++) {
if (opsets[i].contains_type(type_info)) {
return std::to_string(i+1);
}
std::string OpSummary::getOpVersion(const ov::NodeTypeInfo &type_info) {
std::string opset_name = "opset", version = type_info.get_version();
auto pos = version.find(opset_name);
if (pos == std::string::npos) {
return "undefined";
} else {
return version.substr(pos + opset_name.size());
}
return "undefined";
}

std::map<std::string, PassRate> OpSummary::getStatisticFromReport() {
Expand Down Expand Up @@ -136,41 +129,41 @@ std::map<std::string, PassRate> OpSummary::getStatisticFromReport() {
return oldOpsStat;
}

void OpSummary::updateOPsStats(const std::shared_ptr<ngraph::Function> &function, const PassRate::Statuses &status) {
if (function->get_parameters().empty()) {
void OpSummary::updateOPsStats(const std::shared_ptr<ov::Model> &model, const PassRate::Statuses &status) {
if (model->get_parameters().empty()) {
return;
}
bool isFunctionalGraph = false;
for (const auto &op : function->get_ordered_ops()) {
if (!ngraph::is_type<ngraph::op::Parameter>(op) &&
!ngraph::is_type<ngraph::op::Constant>(op) &&
!ngraph::is_type<ngraph::op::Result>(op)) {
for (const auto &op : model->get_ordered_ops()) {
if (!std::dynamic_pointer_cast<ov::op::v0::Parameter>(op) &&
!std::dynamic_pointer_cast<ov::op::v0::Constant>(op) &&
!std::dynamic_pointer_cast<ov::op::v0::Result>(op)) {
isFunctionalGraph = true;
break;
}
}

for (const auto &op : function->get_ordered_ops()) {
if ((ngraph::is_type<ngraph::op::Parameter>(op) ||
ngraph::is_type<ngraph::op::Constant>(op) ||
ngraph::is_type<ngraph::op::Result>(op)) && isFunctionalGraph) {
for (const auto &op : model->get_ordered_ops()) {
if (std::dynamic_pointer_cast<ov::op::v0::Parameter>(op) ||
std::dynamic_pointer_cast<ov::op::v0::Constant>(op) ||
std::dynamic_pointer_cast<ov::op::v0::Result>(op) || isFunctionalGraph) {
continue;
}
if (extractBody) {
if (ngraph::is_type<ngraph::op::TensorIterator>(op)) {
if (std::dynamic_pointer_cast<ov::op::v0::TensorIterator>(op)) {
updateOPsStats(op->get_type_info(), status);
auto ti = ngraph::as_type_ptr<ngraph::op::TensorIterator>(op);
auto ti = ov::as_type_ptr<ov::op::v0::TensorIterator>(op);
auto ti_body = ti->get_function();
updateOPsStats(ti_body, status);
} else if (ngraph::is_type<ngraph::op::v5::Loop>(op)) {
} else if (std::dynamic_pointer_cast<ov::op::v5::Loop>(op)) {
updateOPsStats(op->get_type_info(), status);
auto loop = ngraph::as_type_ptr<ngraph::op::v5::Loop>(op);
auto loop = ov::as_type_ptr<ov::op::v5::Loop>(op);
auto loop_body = loop->get_function();
updateOPsStats(loop_body, status);
} else if (ngraph::is_type<ngraph::op::v8::If>(op)) {
} else if (std::dynamic_pointer_cast<ov::op::v8::If>(op)) {
updateOPsStats(op->get_type_info(), status);
auto if_op = ngraph::as_type_ptr<ngraph::op::v8::If>(op);
std::vector<std::shared_ptr<ngraph::Function>> bodies;
auto if_op = ov::as_type_ptr<ov::op::v8::If>(op);
std::vector<std::shared_ptr<ov::Model>> bodies;
for (size_t i = 0; i < if_op->get_internal_subgraphs_size(); i++) {
auto if_body = if_op->get_function(i);
updateOPsStats(if_body, status);
Expand All @@ -181,33 +174,33 @@ void OpSummary::updateOPsStats(const std::shared_ptr<ngraph::Function> &function
}
}

void OpSummary::updateOPsImplStatus(const std::shared_ptr<ngraph::Function> &function, const bool implStatus) {
if (function->get_parameters().empty()) {
void OpSummary::updateOPsImplStatus(const std::shared_ptr<ov::Model> &model, const bool implStatus) {
if (model->get_parameters().empty()) {
return;
}
bool isFunctionalGraph = false;
for (const auto &op : function->get_ordered_ops()) {
if (!ngraph::is_type<ngraph::op::Parameter>(op) &&
!ngraph::is_type<ngraph::op::Constant>(op) &&
!ngraph::is_type<ngraph::op::Result>(op)) {
for (const auto &op : model->get_ordered_ops()) {
if (!std::dynamic_pointer_cast<ov::op::v0::Parameter>(op) &&
!std::dynamic_pointer_cast<ov::op::v0::Constant>(op) &&
!std::dynamic_pointer_cast<ov::op::v0::Result>(op)) {
isFunctionalGraph = true;
break;
}
}

for (const auto &op : function->get_ordered_ops()) {
if ((ngraph::is_type<ngraph::op::Parameter>(op) ||
ngraph::is_type<ngraph::op::Constant>(op) ||
ngraph::is_type<ngraph::op::Result>(op)) && isFunctionalGraph) {
for (const auto &op : model->get_ordered_ops()) {
if ((std::dynamic_pointer_cast<ov::op::v0::Parameter>(op) ||
std::dynamic_pointer_cast<ov::op::v0::Constant>(op) ||
std::dynamic_pointer_cast<ov::op::v0::Result>(op)) && isFunctionalGraph) {
continue;
} else if (ngraph::is_type<ngraph::op::TensorIterator>(op)) {
} else if (std::dynamic_pointer_cast<ov::op::v0::TensorIterator>(op)) {
updateOPsImplStatus(op->get_type_info(), implStatus);
auto ti = ngraph::as_type_ptr<ngraph::op::TensorIterator>(op);
auto ti = ov::as_type_ptr<ov::op::v0::TensorIterator>(op);
auto ti_body = ti->get_function();
updateOPsImplStatus(ti_body, implStatus);
} else if (ngraph::is_type<ngraph::op::v5::Loop>(op)) {
} else if (std::dynamic_pointer_cast<ov::op::v5::Loop>(op)) {
updateOPsImplStatus(op->get_type_info(), implStatus);
auto loop = ngraph::as_type_ptr<ngraph::op::v5::Loop>(op);
auto loop = ov::as_type_ptr<ov::op::v5::Loop>(op);
auto loop_body = loop->get_function();
updateOPsImplStatus(loop_body, implStatus);
} else {
Expand Down Expand Up @@ -249,8 +242,10 @@ void OpSummary::saveReport() {

std::string outputFilePath = outputFolder + std::string(CommonTestUtils::FileSeparator) + filename;

std::set<ngraph::NodeTypeInfo> opsInfo;
for (const auto &opset : opsets) {
std::set<ov::NodeTypeInfo> opsInfo;
for (const auto &opset_pair : get_available_opsets()) {
std::string opset_version = opset_pair.first;
const ov::OpSet& opset = opset_pair.second();
const auto &type_info_set = opset.get_type_info_set();
opsInfo.insert(type_info_set.begin(), type_info_set.end());
}
Expand Down

0 comments on commit 8213a8f

Please sign in to comment.