|
| 1 | +// Copyright (C) 2018-2023 Intel Corporation |
| 2 | +// SPDX-License-Identifier: Apache-2.0 |
| 3 | +// |
| 4 | + |
| 5 | +#include <ie_metric_helpers.hpp> |
| 6 | +#include <common_test_utils/test_constants.hpp> |
| 7 | +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_icore.hpp" |
| 8 | +#include "unit_test_utils/mocks/mock_iinfer_request.hpp" |
| 9 | +#include "unit_test_utils/mocks/cpp_interfaces/impl/mock_inference_plugin_internal.hpp" |
| 10 | +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iexecutable_network_internal.hpp" |
| 11 | +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_ivariable_state_internal.hpp" |
| 12 | +#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp" |
| 13 | +#include <ie_core.hpp> |
| 14 | +#include <multi-device/multi_device_config.hpp> |
| 15 | +#include <ngraph_functions/subgraph_builders.hpp> |
| 16 | +#include <gtest/gtest.h> |
| 17 | +#include <gmock/gmock.h> |
| 18 | +#include "plugin/mock_auto_device_plugin.hpp" |
| 19 | +#include "mock_common.hpp" |
| 20 | +#include <thread> |
| 21 | + |
| 22 | +using ::testing::_; |
| 23 | +using ::testing::StrEq; |
| 24 | +using ::testing::Return; |
| 25 | +using ::testing::NiceMock; |
| 26 | +using Config = std::map<std::string, std::string>; |
| 27 | +using namespace MockMultiDevice; |
| 28 | + |
| 29 | +using ConfigParams = std::tuple<bool, |
| 30 | + Config>; |
| 31 | + |
| 32 | +// define a matcher if all the elements of subMap are contained in the map. |
| 33 | +MATCHER_P(MapContains, subMap, "Check if all the elements of the subMap are contained in the map.") { |
| 34 | + if (subMap.empty()) |
| 35 | + return true; |
| 36 | + for (auto& item : subMap) { |
| 37 | + auto key = item.first; |
| 38 | + auto value = item.second; |
| 39 | + auto dest = arg.find(key); |
| 40 | + if (dest == arg.end()) { |
| 41 | + return false; |
| 42 | + } else if (dest->second != value) { |
| 43 | + return false; |
| 44 | + } |
| 45 | + } |
| 46 | + return true; |
| 47 | +} |
| 48 | +class AutoStartupFallback : public ::testing::TestWithParam<ConfigParams> { |
| 49 | +public: |
| 50 | + std::shared_ptr<ngraph::Function> function; |
| 51 | + InferenceEngine::CNNNetwork cnnNet; |
| 52 | + std::shared_ptr<NiceMock<MockICore>> core; |
| 53 | + std::shared_ptr<NiceMock<MockMultiDeviceInferencePlugin>> plugin; |
| 54 | + |
| 55 | + //mock exeNetwork helper |
| 56 | + ov::SoPtr<IExecutableNetworkInternal> mockExeNetwork; |
| 57 | + // config for Auto device |
| 58 | + std::map<std::string, std::string> config; |
| 59 | + std::vector<DeviceInformation> metaDevices; |
| 60 | + std::shared_ptr<NiceMock<MockIInferRequestInternal>> inferReqInternal; |
| 61 | + size_t optimalNum; |
| 62 | + |
| 63 | +public: |
| 64 | + void TearDown() override { |
| 65 | + core.reset(); |
| 66 | + plugin.reset(); |
| 67 | + //mockIExeNet.reset(); |
| 68 | + mockExeNetwork = {}; |
| 69 | + config.clear(); |
| 70 | + metaDevices.clear(); |
| 71 | + inferReqInternal.reset(); |
| 72 | + } |
| 73 | + |
| 74 | + void SetUp() override { |
| 75 | + // prepare mockExeNetwork |
| 76 | + auto mockIExeNet = std::make_shared<NiceMock<MockIExecutableNetworkInternal>>(); |
| 77 | + mockExeNetwork = {mockIExeNet, {}}; |
| 78 | + // prepare mockicore and cnnNetwork for loading |
| 79 | + core = std::make_shared<NiceMock<MockICore>>(); |
| 80 | + NiceMock<MockMultiDeviceInferencePlugin>* mock_multi = new NiceMock<MockMultiDeviceInferencePlugin>(); |
| 81 | + plugin.reset(mock_multi); |
| 82 | + function = ngraph::builder::subgraph::makeConvPoolRelu(); |
| 83 | + cnnNet = InferenceEngine::CNNNetwork(function); |
| 84 | + // replace core with mock Icore |
| 85 | + plugin->SetCore(core); |
| 86 | + // mock execNetwork can work |
| 87 | + inferReqInternal = std::make_shared<NiceMock<MockIInferRequestInternal>>(); |
| 88 | + ON_CALL(*mockIExeNet.get(), CreateInferRequest()).WillByDefault(Return(inferReqInternal)); |
| 89 | + IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, optimalNum, 1); |
| 90 | + ON_CALL(*mockIExeNet.get(), GetMetric(StrEq(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)))) |
| 91 | + .WillByDefault(Return(optimalNum)); |
| 92 | + IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, supportConfigs, {}); |
| 93 | + ON_CALL(*core, GetMetric(_, StrEq(METRIC_KEY(SUPPORTED_CONFIG_KEYS)), _)) |
| 94 | + .WillByDefault(Return(supportConfigs)); |
| 95 | + ON_CALL(*core, GetConfig(_, StrEq(GPU_CONFIG_KEY(MAX_NUM_THREADS)))) |
| 96 | + .WillByDefault(Return(12)); |
| 97 | + } |
| 98 | +}; |
| 99 | + |
| 100 | +TEST_P(AutoStartupFallback, releaseResource) { |
| 101 | + // get Parameter |
| 102 | + bool startup_fallback; |
| 103 | + Config config; |
| 104 | + std::tie(startup_fallback, config) = this->GetParam(); |
| 105 | + // test auto plugin |
| 106 | + plugin->SetName("AUTO"); |
| 107 | + |
| 108 | + ON_CALL(*core, LoadNetwork(::testing::Matcher<const InferenceEngine::CNNNetwork&>(_), |
| 109 | + ::testing::Matcher<const std::string&>(_), |
| 110 | + ::testing::Matcher<const Config&>(_))).WillByDefault(Return(mockExeNetwork)); |
| 111 | + |
| 112 | + metaDevices = {{CommonTestUtils::DEVICE_CPU, {}, -1}, {CommonTestUtils::DEVICE_GPU, {}, -1}}; |
| 113 | + // DeviceInformation devInfo; |
| 114 | + ON_CALL(*plugin, ParseMetaDevices(_, _)).WillByDefault(Return(metaDevices)); |
| 115 | + ON_CALL(*plugin, GetValidDevice) |
| 116 | + .WillByDefault([this](const std::vector<DeviceInformation>& metaDevices, const std::string& netPrecision) { |
| 117 | + std::list<DeviceInformation> devices(metaDevices.begin(), metaDevices.end()); |
| 118 | + return devices; |
| 119 | + }); |
| 120 | + ON_CALL(*plugin, SelectDevice(_, _, _)).WillByDefault(Return(metaDevices[1])); |
| 121 | + |
| 122 | + EXPECT_CALL( |
| 123 | + *core, |
| 124 | + LoadNetwork(::testing::Matcher<const InferenceEngine::CNNNetwork&>(_), |
| 125 | + ::testing::Matcher<const std::string&>(CommonTestUtils::DEVICE_GPU), |
| 126 | + ::testing::Matcher<const std::map<std::string, std::string>&>(_))) |
| 127 | + .Times(1); |
| 128 | + if (startup_fallback) { |
| 129 | + std::map<std::string, std::string> test_map = {{"PERFORMANCE_HINT", "LATENCY"}}; |
| 130 | + EXPECT_CALL( |
| 131 | + *core, |
| 132 | + LoadNetwork(::testing::Matcher<const InferenceEngine::CNNNetwork&>(_), |
| 133 | + ::testing::Matcher<const std::string&>(CommonTestUtils::DEVICE_CPU), |
| 134 | + ::testing::Matcher<const std::map<std::string, std::string>&>(MapContains(test_map)))) |
| 135 | + .Times(1); |
| 136 | + } |
| 137 | + |
| 138 | + ASSERT_NO_THROW(plugin->LoadExeNetworkImpl(cnnNet, config)); |
| 139 | +} |
| 140 | + |
| 141 | +const std::vector<ConfigParams> testConfigs = {ConfigParams {true, {{"ENABLE_STARTUP_FALLBACK", "YES"}}}, |
| 142 | + ConfigParams {false, {{"ENABLE_STARTUP_FALLBACK", "NO"}}} |
| 143 | + }; |
| 144 | + |
| 145 | +INSTANTIATE_TEST_SUITE_P(smoke_Auto_StartupFallback, |
| 146 | + AutoStartupFallback, |
| 147 | + ::testing::ValuesIn(testConfigs)); |
0 commit comments