diff --git a/onnxruntime/core/providers/openvino/backends/basic_backend.cc b/onnxruntime/core/providers/openvino/backends/basic_backend.cc index 9da6e5945ab83..2400480ab9965 100644 --- a/onnxruntime/core/providers/openvino/backends/basic_backend.cc +++ b/onnxruntime/core/providers/openvino/backends/basic_backend.cc @@ -545,6 +545,8 @@ void BasicBackend::Infer(OrtKernelContext* ctx) { std::cout << "Inference successful" << std::endl; } + OVInferRequestPtr infer_request_ = infer_request; + // Once the inference is completed, the infer_request becomes free and is placed back into pool of infer_requests_ inferRequestsQueue_->putIdleRequest(std::move(infer_request)); #ifndef NDEBUG @@ -552,7 +554,7 @@ void BasicBackend::Infer(OrtKernelContext* ctx) { if (openvino_ep::backend_utils::IsDebugEnabled()) { inferRequestsQueue_->printstatus(); // Printing the elements of infer_requests_ vector pool only in debug mode std::string& hw_target = global_context_.device_type; - printPerformanceCounts(infer_request, std::cout, hw_target); + printPerformanceCounts(std::move(infer_request_), std::cout, hw_target); } #endif #endif