Skip to content
Open
6 changes: 4 additions & 2 deletions src/inference/dev_api/openvino/runtime/icore.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ class OPENVINO_RUNTIME_API ICore {
* operation
* @return A pointer to compiled model
*/
virtual ov::SoPtr<ov::ICompiledModel> compile_model(const std::string& model_path,
virtual ov::SoPtr<ov::ICompiledModel> compile_model(const std::filesystem::path& model_path,
const std::string& device_name,
const ov::AnyMap& config) const = 0;

Expand Down Expand Up @@ -267,7 +267,9 @@ class OPENVINO_RUNTIME_API ICore {
* @param keep_core_property Whether to return core-level properties
* @return map of properties that are supported by device
*/
virtual AnyMap get_supported_property(const std::string& full_device_name, const AnyMap& properties, const bool keep_core_property = true) const = 0;
virtual AnyMap get_supported_property(const std::string& full_device_name,
const AnyMap& properties,
const bool keep_core_property = true) const = 0;

virtual bool device_supports_model_caching(const std::string& device_name) const = 0;

Expand Down
73 changes: 22 additions & 51 deletions src/inference/include/openvino/runtime/core.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -260,17 +260,12 @@ class OPENVINO_RUNTIME_API Core {
*/
CompiledModel compile_model(const std::string& model_path, const AnyMap& properties = {});

template <class Path, std::enable_if_t<std::is_same_v<Path, std::filesystem::path>>* = nullptr>
auto compile_model(const Path& model_path, const AnyMap& properties = {}) const {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Instead remove it make if constexpr for string or wstring to call correct resolve overload use

if constexpr (std::is_same_v<typename Path::value_type, wchar_t>)
return compile_model(model_path.wstring(), properties);
else
return compile_model(model_path.string(), properties);
}
CompiledModel compile_model(const std::filesystem::path& model_path, const AnyMap& properties = {});

#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
CompiledModel compile_model(const std::wstring& model_path, const AnyMap& properties = {});
#endif
template <class Path, std::enable_if_t<std::is_constructible_v<std::string, Path>>* = nullptr>
CompiledModel compile_model(const Path& model_path, const AnyMap& properties = {}) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How is wstring version is handled?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After I moved the implementation to std::filesystem::path, the "wstring version" was just explicitly calling "std::filesystem::version version" without any helpers like wstring_to_string. Now it's done implicitly

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

wstring -> path conversion may not work correctly in some cases, the test should be created to confirm it (see file_path.cpp) where there is dedicated constructor.

It safer to keep wstring version and make call by usingov::util::make_path conversion.

return compile_model(std::string(model_path), properties);
}
/// @}

/**
Expand All @@ -281,34 +276,25 @@ class OPENVINO_RUNTIME_API Core {
* especially for cases when caching is enabled and cached model is available
*
* @tparam Properties Should be the pack of `std::pair<std::string, ov::Any>` types
* @param model_path path to model with string or wstring
* @param model_path path to model
* @param properties Optional pack of pairs: (property name, property value) relevant only for this
* load operation
*
* @return A compiled model
* @{
*/
template <typename... Properties>
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const std::string& model_path,
template <class Path, class... Properties, std::enable_if_t<std::is_constructible_v<std::string, Path>>* = nullptr>
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const Path& model_path,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What about wstring version?

Properties&&... properties) {
return compile_model(model_path, AnyMap{std::forward<Properties>(properties)...});
return compile_model(std::string(model_path), AnyMap{std::forward<Properties>(properties)...});
}

template <class Path, class... Properties, std::enable_if_t<std::is_same_v<Path, std::filesystem::path>>* = nullptr>
auto compile_model(const Path& model_path, Properties&&... properties) {
if constexpr (std::is_same_v<typename Path::value_type, wchar_t>)
return compile_model(model_path.wstring(), std::forward<Properties>(properties)...);
else
return compile_model(model_path.string(), std::forward<Properties>(properties)...);
}

#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
template <typename... Properties>
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const std::wstring& model_path,
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const std::filesystem::path& model_path,
Properties&&... properties) {
return compile_model(model_path, AnyMap{std::forward<Properties>(properties)...});
}
#endif

/// @}

/**
Expand All @@ -329,19 +315,14 @@ class OPENVINO_RUNTIME_API Core {
const std::string& device_name,
const AnyMap& properties = {});

template <class Path, std::enable_if_t<std::is_same_v<Path, std::filesystem::path>>* = nullptr>
auto compile_model(const Path& model_path, const std::string& device_name, const AnyMap& properties = {}) {
if constexpr (std::is_same_v<typename Path::value_type, wchar_t>)
return compile_model(model_path.wstring(), device_name, properties);
else
return compile_model(model_path.string(), device_name, properties);
}

#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
CompiledModel compile_model(const std::wstring& model_path,
CompiledModel compile_model(const std::filesystem::path& model_path,
const std::string& device_name,
const AnyMap& properties = {});
#endif
const AnyMap& config);

template <class Path, std::enable_if_t<std::is_constructible_v<std::string, Path>>* = nullptr>
CompiledModel compile_model(const Path& model_path, const std::string& device_name, const AnyMap& properties = {}) {
return compile_model(std::string(model_path), device_name, properties);
}
/// @}

/**
Expand All @@ -359,29 +340,19 @@ class OPENVINO_RUNTIME_API Core {
* @return A compiled model.
* @{
*/
template <typename... Properties>
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const std::string& model_path,
template <class Path, class... Properties, std::enable_if_t<std::is_constructible_v<Path, std::string>>* = nullptr>
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const Path& model_path,
const std::string& device_name,
Properties&&... properties) {
return compile_model(model_path, device_name, AnyMap{std::forward<Properties>(properties)...});
}

template <class Path, class... Properties, std::enable_if_t<std::is_same_v<Path, std::filesystem::path>>* = nullptr>
auto compile_model(const Path& model_path, const std::string& device_name, Properties&&... properties) {
if constexpr (std::is_same_v<typename Path::value_type, wchar_t>)
return compile_model(model_path.wstring(), device_name, std::forward<Properties>(properties)...);
else
return compile_model(model_path.string(), device_name, std::forward<Properties>(properties)...);
return compile_model(std::string(model_path), device_name, AnyMap{std::forward<Properties>(properties)...});
}

#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
template <typename... Properties>
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const std::wstring& model_path,
util::EnableIfAllStringAny<CompiledModel, Properties...> compile_model(const std::filesystem::path& model_path,
const std::string& device_name,
Properties&&... properties) {
return compile_model(model_path, device_name, AnyMap{std::forward<Properties>(properties)...});
}
#endif
/// @}

/**
Expand Down
26 changes: 11 additions & 15 deletions src/inference/src/cpp/core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -113,34 +113,30 @@ CompiledModel Core::compile_model(const std::shared_ptr<const ov::Model>& model,
});
}

CompiledModel Core::compile_model(const std::string& model_path, const AnyMap& config) {
CompiledModel Core::compile_model(const std::filesystem::path& model_path, const AnyMap& config) {
OV_ITT_SCOPED_REGION_BASE(ov::itt::domains::OV, "Compile model");
return compile_model(model_path, ov::default_device_name, config);
}

#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
CompiledModel Core::compile_model(const std::wstring& model_path, const AnyMap& config) {
OV_ITT_SCOPED_REGION_BASE(ov::itt::domains::OV, "Compile model");
return compile_model(ov::util::wstring_to_string(model_path), config);
}
#endif

CompiledModel Core::compile_model(const std::string& model_path, const std::string& device_name, const AnyMap& config) {
CompiledModel Core::compile_model(const std::filesystem::path& model_path,
const std::string& device_name,
const AnyMap& config) {
OV_ITT_SCOPED_REGION_BASE(ov::itt::domains::OV, "Compile model");
OV_CORE_CALL_STATEMENT({
auto exec = _impl->compile_model(model_path, device_name, config);
return {exec._ptr, exec._so};
});
}

#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
CompiledModel Core::compile_model(const std::wstring& model_path,
const std::string& device_name,
const AnyMap& config) {
CompiledModel Core::compile_model(const std::string& model_path, const AnyMap& config) {
OV_ITT_SCOPED_REGION_BASE(ov::itt::domains::OV, "Compile model");
return compile_model(ov::util::wstring_to_string(model_path), device_name, config);
return compile_model(ov::util::make_path(model_path), ov::default_device_name, config);
}

CompiledModel Core::compile_model(const std::string& model_path, const std::string& device_name, const AnyMap& config) {
OV_ITT_SCOPED_REGION_BASE(ov::itt::domains::OV, "Compile model");
return compile_model(ov::util::make_path(model_path), device_name, config);
}
#endif

CompiledModel Core::compile_model(const std::string& model,
const ov::Tensor& weights,
Expand Down
15 changes: 9 additions & 6 deletions src/inference/src/dev/core_impl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ std::filesystem::path extract_weight_path(const std::string& compiled_properties
}
}

using model_hint_t = std::variant<std::shared_ptr<const ov::Model>, std::string>;
using model_hint_t = std::variant<std::shared_ptr<const ov::Model>, std::filesystem::path>;

ov::SoPtr<ov::ICompiledModel> import_compiled_model(const ov::Plugin& plugin,
const ov::SoPtr<ov::IRemoteContext>& context,
Expand Down Expand Up @@ -257,13 +257,13 @@ ov::SoPtr<ov::ICompiledModel> import_compiled_model(const ov::Plugin& plugin,
cfg[ov::hint::model.name()] = model_ptr;
}
},
[&cfg, &plugin](const std::string& model_path) {
[&cfg, &plugin](const std::filesystem::path& model_path) {
if (cfg.count(ov::weights_path.name()) == 0 &&
ov::util::contains(plugin.get_property(ov::supported_properties), ov::weights_path)) {
std::filesystem::path weights_path{model_path};
weights_path.replace_extension(".bin");
if (ov::util::file_exists(weights_path)) {
cfg[ov::weights_path.name()] = weights_path.string();
cfg[ov::weights_path.name()] = ov::util::path_to_string(weights_path);
}
}
}};
Expand Down Expand Up @@ -899,7 +899,7 @@ ov::SoPtr<ov::ICompiledModel> ov::CoreImpl::compile_model(const std::shared_ptr<
return res;
}

ov::SoPtr<ov::ICompiledModel> ov::CoreImpl::compile_model(const std::string& model_path,
ov::SoPtr<ov::ICompiledModel> ov::CoreImpl::compile_model(const std::filesystem::path& model_path,
const std::string& device_name,
const ov::AnyMap& config) const {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::LoadTime, "Core::compile_model::Path");
Expand All @@ -915,7 +915,7 @@ ov::SoPtr<ov::ICompiledModel> ov::CoreImpl::compile_model(const std::string& mod
// Skip caching for proxy plugin. HW plugin will load network from the cache
CoreConfig::remove_core(parsed.m_config);
emplace_cache_dir_if_supported(parsed.m_config, plugin, cache_dir);
CacheContent cache_content{cache_manager, parsed.m_core_config.get_enable_mmap(), util::make_path(model_path)};
CacheContent cache_content{cache_manager, parsed.m_core_config.get_enable_mmap(), model_path};
cache_content.m_blob_id =
ov::ModelCache::compute_hash(cache_content.m_model_path, create_compile_config(plugin, parsed.m_config));
const auto lock = m_cache_guard.get_hash_lock(cache_content.m_blob_id);
Expand Down Expand Up @@ -1732,7 +1732,10 @@ std::shared_ptr<ov::Model> ov::CoreImpl::read_model(const std::string& modelPath
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::ReadTime, "CoreImpl::read_model from file");
auto local_core_config = m_core_config;
local_core_config.set(properties, {});
return ov::util::read_model(modelPath, binPath, get_extensions_copy(), local_core_config.get_enable_mmap());
return ov::util::read_model(ov::util::make_path(modelPath),
ov::util::make_path(binPath),
get_extensions_copy(),
local_core_config.get_enable_mmap());
}

std::shared_ptr<ov::Model> ov::CoreImpl::read_model(const std::string& model,
Expand Down
2 changes: 1 addition & 1 deletion src/inference/src/dev/core_impl.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ class CoreImpl : public ov::ICore, public std::enable_shared_from_this<ov::ICore
const ov::SoPtr<ov::IRemoteContext>& context,
const ov::AnyMap& config = {}) const override;

ov::SoPtr<ov::ICompiledModel> compile_model(const std::string& model_path,
ov::SoPtr<ov::ICompiledModel> compile_model(const std::filesystem::path& model_path,
const std::string& device_name,
const ov::AnyMap& config) const override;

Expand Down
5 changes: 3 additions & 2 deletions src/inference/src/dev/plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include "openvino/runtime/internal_properties.hpp"
#include "openvino/runtime/properties.hpp"
#include "openvino/util/common_util.hpp"
#include "openvino/util/file_util.hpp"

#define OV_PLUGIN_CALL_STATEMENT(...) \
OPENVINO_ASSERT(m_ptr != nullptr, "OpenVINO Runtime Plugin was not initialized."); \
Expand Down Expand Up @@ -53,9 +54,9 @@ ov::SoPtr<ov::ICompiledModel> ov::Plugin::compile_model(const std::shared_ptr<co
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model, properties), m_so});
}

ov::SoPtr<ov::ICompiledModel> ov::Plugin::compile_model(const std::string& model_path,
ov::SoPtr<ov::ICompiledModel> ov::Plugin::compile_model(const std::filesystem::path& model_path,
const ov::AnyMap& properties) const {
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(model_path, properties), m_so});
OV_PLUGIN_CALL_STATEMENT(return {m_ptr->compile_model(ov::util::path_to_string(model_path), properties), m_so});
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I assume this path conversion will be remove when plugin interface will be updated?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes

}

ov::SoPtr<ov::ICompiledModel> ov::Plugin::compile_model(const std::shared_ptr<const ov::Model>& model,
Expand Down
4 changes: 2 additions & 2 deletions src/inference/src/dev/plugin.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ class Plugin {
SoPtr<ov::ICompiledModel> compile_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const;

SoPtr<ov::ICompiledModel> compile_model(const std::string& model_path, const ov::AnyMap& properties) const;
SoPtr<ov::ICompiledModel> compile_model(const std::filesystem::path& model_path,
const ov::AnyMap& properties) const;

SoPtr<ov::ICompiledModel> compile_model(const std::shared_ptr<const ov::Model>& model,
const ov::SoPtr<ov::IRemoteContext>& context,
Expand Down Expand Up @@ -84,4 +85,3 @@ class Plugin {
};

} // namespace ov

27 changes: 7 additions & 20 deletions src/inference/src/model_reader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -113,31 +113,19 @@ void update_v10_model(std::shared_ptr<ov::Model>& model, bool frontendMode = fal
namespace ov {
namespace util {

std::shared_ptr<ov::Model> read_model(const std::string& modelPath,
const std::string& binPath,
std::shared_ptr<ov::Model> read_model(const std::filesystem::path& model_path,
const std::filesystem::path& bin_path,
const std::vector<ov::Extension::Ptr>& extensions,
bool enable_mmap) {
// Fix unicode name
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
std::wstring model_path = ov::util::string_to_wstring(modelPath.c_str());
#else
std::string model_path = modelPath;
#endif

// Try to load with FrontEndManager
ov::frontend::FrontEndManager manager;
ov::frontend::FrontEnd::Ptr FE;
ov::frontend::InputModel::Ptr inputModel;

ov::AnyVector params{model_path};
ov::AnyVector params{model_path.native()};

if (!binPath.empty()) {
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
const std::wstring& weights_path = ov::util::string_to_wstring(binPath.c_str());
#else
const std::string& weights_path = binPath;
#endif
params.emplace_back(weights_path);
if (!bin_path.empty()) {
params.emplace_back(bin_path.native());
}
params.emplace_back(enable_mmap);

Expand All @@ -153,14 +141,13 @@ std::shared_ptr<ov::Model> read_model(const std::string& modelPath,
return model;
}

const auto fileExt = modelPath.substr(modelPath.find_last_of(".") + 1);
std::string FEs;
for (const auto& fe_name : manager.get_available_front_ends())
FEs += fe_name + " ";
OPENVINO_THROW("Unable to read the model: ",
modelPath,
model_path,
" Please check that model format: ",
fileExt,
model_path.extension(),
" is supported and the model is correct.",
" Available frontends: ",
FEs);
Expand Down
8 changes: 4 additions & 4 deletions src/inference/src/model_reader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ namespace util {

/**
* @brief Reads model
* @param modelPath path to Model file
* @param binPath optional path for model weights. If empty for IR we will find bin file with the model name.
* @param model_path path to Model file
* @param bin_path optional path for model weights. If empty for IR we will find bin file with the model name.
* if bin file with the same name was not found, will load IR without weights.
* @param extensions vector with OpenVINO extensions
* @param enable_mmap boolean to enable/disable `mmap` use in Frontend
* @return Shared pointer to ov::Model
*/
std::shared_ptr<ov::Model> read_model(const std::string& modelPath,
const std::string& binPath,
std::shared_ptr<ov::Model> read_model(const std::filesystem::path& model_path,
const std::filesystem::path& bin_path,
const std::vector<ov::Extension::Ptr>& extensions,
bool enable_mmap);

Expand Down
5 changes: 3 additions & 2 deletions src/plugins/auto/src/auto_schedule.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,9 @@ void AutoSchedule::try_to_compile_model(AutoCompileContext& context, const std::
try {
auto compile_start_time = std::chrono::high_resolution_clock::now();
if (!(m_context->m_model_path.empty())) {
context.m_compiled_model =
m_context->m_ov_core->compile_model(m_context->m_model_path, device, device_config);
context.m_compiled_model = m_context->m_ov_core->compile_model(ov::util::make_path(m_context->m_model_path),
device,
device_config);
} else {
context.m_compiled_model = m_context->m_ov_core->compile_model(model, device, device_config);
}
Expand Down
Loading
Loading