diff --git a/src/inference/src/dev/core_impl.cpp b/src/inference/src/dev/core_impl.cpp index 32b43f346e9e44..6238fb6265082e 100644 --- a/src/inference/src/dev/core_impl.cpp +++ b/src/inference/src/dev/core_impl.cpp @@ -736,6 +736,7 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::shared_ptr< ov::AnyMap config_with_batch = config; // if auto-batching is applicable, the below function will patch the device name and config accordingly: auto model = apply_auto_batching(model_, deviceName, config_with_batch); + apply_rt_info(model_, config_with_batch); auto parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch, is_proxy_device(device_name)); auto plugin = get_plugin(parsed._deviceName); @@ -769,6 +770,7 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::shared_ptr< ov::AnyMap config_with_batch = config; // if auto-batching is applicable, the below function will patch the device name and config accordingly: auto model = apply_auto_batching(model_, deviceName, config_with_batch); + apply_rt_info(model_, config_with_batch); auto parsed = parseDeviceNameIntoConfig(deviceName, config_with_batch, is_proxy_device(deviceName)); auto plugin = get_plugin(parsed._deviceName); @@ -1098,6 +1100,17 @@ std::shared_ptr ov::CoreImpl::apply_auto_batching(const std::sh return ov::details::apply_batch_affinity(model, deviceNameWithoutBatch); } +void ov::CoreImpl::apply_rt_info(const std::shared_ptr& model, + ov::AnyMap& config) const { + if (model->has_rt_info({"runtime_options", "ACTIVATIONS_SCALE_FACTOR"})) { + if (config.find("ACTIVATIONS_SCALE_FACTOR") == config.end()) { + const auto activations_scale_factor = + model->get_rt_info({"runtime_options", "ACTIVATIONS_SCALE_FACTOR"}); + config.insert(ov::hint::activations_scale_factor(activations_scale_factor)); + } + } +} + void ov::CoreImpl::set_property(const std::string& device_name, const AnyMap& properties) { OPENVINO_ASSERT(device_name.find("HETERO:") != 0, "set_property is supported only for HETERO itself (without devices). " diff --git a/src/inference/src/dev/core_impl.hpp b/src/inference/src/dev/core_impl.hpp index 7cf12f3ba3280c..27096180e471e7 100644 --- a/src/inference/src/dev/core_impl.hpp +++ b/src/inference/src/dev/core_impl.hpp @@ -200,6 +200,9 @@ class CoreImpl : public ov::ICore, public std::enable_shared_from_this& model, + ov::AnyMap& config) const; + /* * @brief Register plugins according to the build configuration */