---
 libavfilter/dnn/dnn_backend_openvino.c | 12 +++++-------
 1 file changed, 5 insertions(+), 7 deletions(-)

diff --git a/libavfilter/dnn/dnn_backend_openvino.c 
b/libavfilter/dnn/dnn_backend_openvino.c
index a8a02d7589..9f3c696e0a 100644
--- a/libavfilter/dnn/dnn_backend_openvino.c
+++ b/libavfilter/dnn/dnn_backend_openvino.c
@@ -432,13 +432,6 @@ static DNNReturnType execute_model_ov(RequestItem 
*request, Queue *inferenceq)
     ctx = &task->ov_model->ctx;
 
     if (task->async) {
-        if (ff_queue_size(inferenceq) < ctx->options.batch_size) {
-            if (ff_safe_queue_push_front(task->ov_model->request_queue, 
request) < 0) {
-                av_log(ctx, AV_LOG_ERROR, "Failed to push back 
request_queue.\n");
-                return DNN_ERROR;
-            }
-            return DNN_SUCCESS;
-        }
         ret = fill_model_input_ov(task->ov_model, request);
         if (ret != DNN_SUCCESS) {
             return ret;
@@ -793,6 +786,11 @@ DNNReturnType ff_dnn_execute_model_async_ov(const DNNModel 
*model, const char *i
         return DNN_ERROR;
     }
 
+    if (ff_queue_size(ov_model->inference_queue) < ctx->options.batch_size) {
+        // not enough inference items queued for a batch
+        return DNN_SUCCESS;
+    }
+
     request = ff_safe_queue_pop_front(ov_model->request_queue);
     if (!request) {
         av_log(ctx, AV_LOG_ERROR, "unable to get infer request.\n");
-- 
2.17.1

_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

To unsubscribe, visit link above, or email
ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".

Reply via email to