Skip to content

Commit

Permalink
Remove ExecutableNetwork::QueryState (openvinotoolkit#8034)
Browse files Browse the repository at this point in the history
* removed ExecutableNetwork::QueryState from code

* removed ExecutableNetwork::QueryStates from tests (not checked)

* buildable version

* remove unneeded change and fix cpplint error

* remove extra space

* remove QueryState from GNAExecutableNetwork

* clean up GNA tests for QueryState in tests_deprecated (without replacement because deprecated)

* fix tests after merge

* remove tests again after merge

* fixed tests with _REGULAR_API suffix
  • Loading branch information
sadolini authored and openvino-dev-samples committed Nov 24, 2021
1 parent eca208e commit 52bd59c
Show file tree
Hide file tree
Showing 27 changed files with 54 additions and 419 deletions.
7 changes: 0 additions & 7 deletions inference-engine/src/gna_plugin/gna_executable_network.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,6 @@ class GNAExecutableNetwork : public InferenceEngine::IExecutableNetworkInternal
return std::make_shared<GNAInferRequest>(plg, inputs, outputs);
}

INFERENCE_ENGINE_DEPRECATED("Use InferRequest::QueryState instead")
std::vector<InferenceEngine::IVariableStateInternal::Ptr> QueryState() override {
IE_SUPPRESS_DEPRECATED_START
return plg->QueryState();
IE_SUPPRESS_DEPRECATED_END
}

void Export(const std::string &modelFileName) override {
plg->Export(modelFileName);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,18 +190,6 @@ class INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) {
*/
INFERENCE_ENGINE_DEPRECATED("Use ExecutableNetwork::CreateInferRequest instead")
InferRequest::Ptr CreateInferRequestPtr();

/**
* @deprecated Use InferRequest::QueryState instead
* @brief Gets state control interface for given executable network.
*
* State control essential for recurrent networks
*
* @return A vector of Memory State objects
*/
INFERENCE_ENGINE_DEPRECATED("Use InferRequest::QueryState instead")
std::vector<VariableState> QueryState();
IE_SUPPRESS_DEPRECATED_END
};

} // namespace InferenceEngine
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,6 @@ ExecutableNetwork::operator IExecutableNetwork::Ptr() {
return std::make_shared<ExecutableNetworkBase>(_impl);
}

std::vector<VariableState> ExecutableNetwork::QueryState() {
std::vector<VariableState> controller;
EXEC_NET_CALL_STATEMENT({
for (auto&& state : _impl->QueryState()) {
controller.emplace_back(VariableState{_so, state});
}
});
return controller;
}

InferRequest ExecutableNetwork::CreateInferRequest() {
EXEC_NET_CALL_STATEMENT(return {_so, _impl->CreateInferRequest()});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,6 @@ std::shared_ptr<ngraph::Function> IExecutableNetworkInternal::GetExecGraphInfo()
IE_THROW(NotImplemented);
}

std::vector<std::shared_ptr<IVariableStateInternal>> IExecutableNetworkInternal::QueryState() {
IE_THROW(NotImplemented);
}

void IExecutableNetworkInternal::SetPointerToPlugin(const std::shared_ptr<IInferencePlugin>& plugin) {
_plugin = plugin;
}
Expand Down
6 changes: 0 additions & 6 deletions inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -283,12 +283,6 @@ bool MKLDNNExecNetwork::CanProcessDynBatch(const InferenceEngine::CNNNetwork &ne
return true;
}

IE_SUPPRESS_DEPRECATED_START
std::vector<IVariableStateInternal::Ptr> MKLDNNExecNetwork::QueryState() {
return memoryStates;
}
IE_SUPPRESS_DEPRECATED_END

void MKLDNNExecNetwork::Export(std::ostream& modelStream) {
CNNNetworkSerializer serializer(modelStream, extensionManager);
serializer <<_network;
Expand Down
3 changes: 0 additions & 3 deletions inference-engine/src/mkldnn_plugin/mkldnn_exec_network.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,6 @@ class MKLDNNExecNetwork: public InferenceEngine::ExecutableNetworkThreadSafeDefa

std::shared_ptr<ngraph::Function> GetExecGraphInfo() override;

INFERENCE_ENGINE_DEPRECATED("Use InferRequest::QueryState instead")
std::vector<InferenceEngine::IVariableStateInternal::Ptr> QueryState() override;

void Export(std::ostream& modelStream) override;

protected:
Expand Down
30 changes: 12 additions & 18 deletions inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,26 +60,20 @@ void MKLDNNPlugin::MKLDNNInferRequest::CreateInferRequest() {
// Save all MemoryLayer data tensors. Will use insight about mechanics
// of MemoryLayer implementation. It uses output edge of MemoryLayer
// producer as storage for tensor to keep it between infer calls.
IE_SUPPRESS_DEPRECATED_START
if (execNetwork->_numRequests > 1 || execNetwork->QueryState().size() == 0) {
for (auto &node : graph->GetNodes()) {
if (node->getType() == MemoryInput) {
auto memoryNode = dynamic_cast<MKLDNNMemoryInputNode*>(node.get());
auto state_store = memoryNode->getStore();
auto state_name = memoryNode->getId();

// Remove suffix with pair ID. Internal information.
auto suffix_idx = state_name.find("/id=");
if (suffix_idx != std::string::npos)
state_name = state_name.substr(0, suffix_idx);

memoryStates.emplace_back(new MKLDNNVariableState(state_name, state_store));
}
for (auto& node : graph->GetNodes()) {
if (node->getType() == MemoryInput) {
auto memoryNode = dynamic_cast<MKLDNNMemoryInputNode*>(node.get());
auto state_store = memoryNode->getStore();
auto state_name = memoryNode->getId();

// Remove suffix with pair ID. Internal information.
auto suffix_idx = state_name.find("/id=");
if (suffix_idx != std::string::npos)
state_name = state_name.substr(0, suffix_idx);

memoryStates.emplace_back(new MKLDNNVariableState(state_name, state_store));
}
} else {
memoryStates = execNetwork->QueryState();
}
IE_SUPPRESS_DEPRECATED_END
}

MKLDNNPlugin::MKLDNNInferRequest::~MKLDNNInferRequest() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,13 +115,6 @@ class INFERENCE_ENGINE_API_CLASS(IExecutableNetworkInternal)
*/
virtual std::shared_ptr<ngraph::Function> GetExecGraphInfo();

/**
* @deprecated Need to implement GetVariablesInfo for ExecutableNetwork
* @brief Queries memory states.
* @return Returns memory states
*/
virtual std::vector<std::shared_ptr<IVariableStateInternal>> QueryState();

/**
* @brief Sets the pointer to plugin internal.
* @param[in] plugin The plugin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,6 @@ TEST(ExecutableNetworkTests, throwsOnUninitializedGetExecGraphInfo) {
ASSERT_THROW(exec.GetExecGraphInfo(), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkTests, throwsOnUninitializedQueryState) {
IE_SUPPRESS_DEPRECATED_START
ExecutableNetwork exec;
ASSERT_THROW(exec.QueryState(), InferenceEngine::NotAllocated);
IE_SUPPRESS_DEPRECATED_END
}

TEST(ExecutableNetworkTests, throwsOnUninitializedSetConfig) {
ExecutableNetwork exec;
ASSERT_THROW(exec.SetConfig({{}}), InferenceEngine::NotAllocated);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,14 @@ class ImportMemoryTest : public testing::WithParamInterface<exportImportNetworkP
}
auto importedNetwork = core->ImportNetwork(inputStream, targetDevice, configuration);
std::vector<std::string> queryToState;
IE_SUPPRESS_DEPRECATED_START
for (const auto &query_state : executableNetwork.QueryState()) {
InferenceEngine::InferRequest importInfer = importedNetwork.CreateInferRequest();
for (const auto &query_state : importInfer.QueryState()) {
queryToState.push_back(query_state.GetName());
}
for (const auto &next_memory : importedNetwork.QueryState()) {
for (const auto &next_memory : importInfer.QueryState()) {
ASSERT_TRUE(std::find(queryToState.begin(), queryToState.end(), next_memory.GetName()) != queryToState.end())
<< "State " << next_memory.GetName() << " expected to be in memory states but it is not!";
}
IE_SUPPRESS_DEPRECATED_END
InferenceEngine::InferRequest importInfer = importedNetwork.CreateInferRequest();
importInfer.Infer();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@ TEST_P(Basic_LSTM_S, CompareWithRefImpl_LowLatencyTransformation) {
manager.register_pass<ngraph::pass::LowLatency2>(); // LowLatency enables UnrollTI
manager.run_passes(function);
LoadNetwork();
IE_SUPPRESS_DEPRECATED_START
auto states = executableNetwork.QueryState();
auto states = inferRequest.QueryState();
for (auto& state : states) {
auto name = state.GetName();
if (name.find("cell_state_1") != std::string::npos) {
Expand All @@ -58,7 +57,6 @@ TEST_P(Basic_LSTM_S, CompareWithRefImpl_LowLatencyTransformation) {
GTEST_FAIL() << "unknown memory state";
}
}
IE_SUPPRESS_DEPRECATED_END
// Run and compare
Infer();
const auto& actualOutputs = GetOutputs();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,98 +31,6 @@ InferenceEngine::ExecutableNetwork InferRequestVariableStateTest::PrepareNetwork
return ie->LoadNetwork(net, deviceName);
}

TEST_P(InferRequestVariableStateTest, smoke_VariableState_QueryState) {
IE_SUPPRESS_DEPRECATED_START
auto executableNet = PrepareNetwork();

auto states = executableNet.QueryState();
ASSERT_TRUE(states.size() == 2) << "Incorrect number of VariableStates";

for (auto &&state : states) {
auto name = state.GetName();
ASSERT_TRUE(std::find(statesToQuery.begin(), statesToQuery.end(), name) != statesToQuery.end())
<< "State " << name << "expected to be in memory states but it is not!";
}
IE_SUPPRESS_DEPRECATED_END
}

TEST_P(InferRequestVariableStateTest, smoke_VariableState_SetState) {
IE_SUPPRESS_DEPRECATED_START
auto executableNet = PrepareNetwork();
const float new_state_val = 13.0f;
for (auto &&state : executableNet.QueryState()) {
state.Reset();
auto state_val = state.GetState();
auto element_count = state_val->size();

float *new_state_data = new float[element_count];
for (int i = 0; i < element_count; i++) {
new_state_data[i] = new_state_val;
}
auto stateBlob = make_blob_with_precision(state_val->getTensorDesc());
stateBlob->allocate();
std::memcpy(stateBlob->buffer(), new_state_data, element_count * sizeof(float));
delete[]new_state_data;
state.SetState(stateBlob);
}

for (auto &&state : executableNet.QueryState()) {
auto lastState = state.GetState();
auto last_state_size = lastState->size();
auto last_state_data = lastState->cbuffer().as<float *>();
ASSERT_TRUE(last_state_size != 0) << "State size should not be 0";

for (int i = 0; i < last_state_size; i++) {
EXPECT_NEAR(new_state_val, last_state_data[i], 1e-5);
}
}
IE_SUPPRESS_DEPRECATED_END
}

TEST_P(InferRequestVariableStateTest, smoke_VariableState_Reset) {
IE_SUPPRESS_DEPRECATED_START
auto executableNet = PrepareNetwork();
const float new_state_val = 13.0f;
for (auto &&state : executableNet.QueryState()) {
state.Reset();
auto state_val = state.GetState();
auto element_count = state_val->size();

float *new_state_data = new float[element_count];
for (int i = 0; i < element_count; i++) {
new_state_data[i] = new_state_val;
}
auto stateBlob = make_blob_with_precision(state_val->getTensorDesc());
stateBlob->allocate();
std::memcpy(stateBlob->buffer(), new_state_data, element_count * sizeof(float));
delete[]new_state_data;

state.SetState(stateBlob);
}

executableNet.QueryState().front().Reset();

auto states = executableNet.QueryState();
for (int i = 0; i < states.size(); ++i) {
auto lastState = states[i].GetState();
auto last_state_size = lastState->size();
auto last_state_data = lastState->cbuffer().as<float *>();

ASSERT_TRUE(last_state_size != 0) << "State size should not be 0";

if (i == 0) {
for (int j = 0; j < last_state_size; ++j) {
EXPECT_NEAR(0, last_state_data[j], 1e-5);
}
} else {
for (int j = 0; j < last_state_size; ++j) {
EXPECT_NEAR(new_state_val, last_state_data[j], 1e-5);
}
}
}
IE_SUPPRESS_DEPRECATED_END
}

TEST_P(InferRequestVariableStateTest, inferreq_smoke_VariableState_QueryState) {
auto executableNet = PrepareNetwork();
auto inferReq = executableNet.CreateInferRequest();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ void DetectNetworkBatch::LoadNetwork() {
functionRefs = ngraph::clone_function(*cnnNetwork.getFunction());
ConfigureNetwork();
executableNetwork = core->LoadNetwork(cnnNetwork, targetDevice, configuration);
inferRequest = executableNetwork.CreateInferRequest();
}

TEST_P(DetectNetworkBatch, InferWithOneInput) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,10 @@ namespace ConfigurationTestsDefinitions {
ConfigureNetwork();
cnnNetwork.setBatchSize(max_batch_size);
executableNetwork = core->LoadNetwork(cnnNetwork, targetDevice, configuration);
inferRequest = executableNetwork.CreateInferRequest();
}

void DynamicBatchTest::Infer() {
inferRequest = executableNetwork.CreateInferRequest();
inputs.clear();

for (int i = 0; i < batch_sizes.size(); i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,7 @@ void LayerTestsCommon::LoadNetwork() {
CoreConfiguration(this);
ConfigureNetwork();
executableNetwork = core->LoadNetwork(cnnNetwork, targetDevice, configuration);
inferRequest = executableNetwork.CreateInferRequest();
}

void LayerTestsCommon::GenerateInputs() {
Expand All @@ -361,8 +362,6 @@ void LayerTestsCommon::GenerateInputs() {
}

void LayerTestsCommon::Infer() {
inferRequest = executableNetwork.CreateInferRequest();

const auto& inputsInfo = executableNetwork.GetInputsInfo();
const auto& functionParams = function->get_parameters();
for (int i = 0; i < functionParams.size(); ++i) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ namespace LayerTestsDefinitions {
CoreConfiguration(this);
ConfigureNetwork();
executableNetwork = core->LoadNetwork(cnnNetwork, targetDevice, configuration);
inferRequest = executableNetwork.CreateInferRequest();
}
GenerateInputs();
for (int64_t i = 0; i < iteration_count; ++i) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ namespace SubgraphTestsDefinitions {
SKIP_IF_CURRENT_TEST_IS_DISABLED()

LoadNetwork();
IE_SUPPRESS_DEPRECATED_START
auto states = executableNetwork.QueryState();

auto states = inferRequest.QueryState();
for (auto& state : states) {
auto name = state.GetName();
if (name == "memory_1") {
Expand All @@ -119,7 +119,6 @@ namespace SubgraphTestsDefinitions {
GTEST_FAIL() << "unknown memory state";
}
}
IE_SUPPRESS_DEPRECATED_END
GenerateInputs();
Infer();
switchToNgraphFriendlyModel();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@

namespace SubgraphTestsDefinitions {
void DelayedCopyTestBase::InitMemory() {
IE_SUPPRESS_DEPRECATED_START
auto states = executableNetwork.QueryState();
auto states = inferRequest.QueryState();
for (auto& state : states) {
auto name = state.GetName();
if (name.find("id") != std::string::npos) {
Expand All @@ -18,7 +17,6 @@ namespace SubgraphTestsDefinitions {
GTEST_FAIL() << "unknown memory state";
}
}
IE_SUPPRESS_DEPRECATED_END
}

void DelayedCopyTestBase::Run() {
Expand Down
Loading

0 comments on commit 52bd59c

Please sign in to comment.