diff mbox series

[FFmpeg-devel,v3,4/7] libavfilter: Remove Async Flag from DNN Filter Side

Message ID 20210824053151.14305-4-shubhanshu.e01@gmail.com
State Superseded
Headers show
Series [FFmpeg-devel,v3,1/7] lavfi/dnn: Task-based Inference in Native Backend | expand

Checks

Context Check Description
andriy/make_x86 success Make finished
andriy/make_fate_x86 success Make fate finished
andriy/make_ppc success Make finished
andriy/make_fate_ppc success Make fate finished

Commit Message

Shubhanshu Saxena Aug. 24, 2021, 5:31 a.m. UTC
Remove async flag from filter's perspective after the unification
of async and sync modes in the DNN backend.

Signed-off-by: Shubhanshu Saxena <shubhanshu.e01@gmail.com>
---
 doc/filters.texi                 | 14 ++++----------
 libavfilter/dnn/dnn_backend_tf.c |  7 +++++++
 libavfilter/dnn_filter_common.c  |  7 -------
 libavfilter/dnn_filter_common.h  |  2 +-
 4 files changed, 12 insertions(+), 18 deletions(-)
diff mbox series

Patch

diff --git a/doc/filters.texi b/doc/filters.texi
index b902aca12d..d99368e64b 100644
--- a/doc/filters.texi
+++ b/doc/filters.texi
@@ -10283,11 +10283,8 @@  and the second line is the name of label id 1, etc.
 The label id is considered as name if the label file is not provided.
 
 @item backend_configs
-Set the configs to be passed into backend
-
-@item async
-use DNN async execution if set (default: set),
-roll back to sync execution if the backend does not support async.
+Set the configs to be passed into backend. To use async execution, set async (default: set).
+Roll back to sync execution if the backend does not support async.
 
 @end table
 
@@ -10339,15 +10336,12 @@  Set the input name of the dnn network.
 Set the output name of the dnn network.
 
 @item backend_configs
-Set the configs to be passed into backend
+Set the configs to be passed into backend. To use async execution, set async (default: set).
+Roll back to sync execution if the backend does not support async.
 
 For tensorflow backend, you can set its configs with @option{sess_config} options,
 please use tools/python/tf_sess_config.py to get the configs of TensorFlow backend for your system.
 
-@item async
-use DNN async execution if set (default: set),
-roll back to sync execution if the backend does not support async.
-
 @end table
 
 @subsection Examples
diff --git a/libavfilter/dnn/dnn_backend_tf.c b/libavfilter/dnn/dnn_backend_tf.c
index 4a0b561f29..906934d8c0 100644
--- a/libavfilter/dnn/dnn_backend_tf.c
+++ b/libavfilter/dnn/dnn_backend_tf.c
@@ -884,6 +884,13 @@  DNNModel *ff_dnn_load_model_tf(const char *model_filename, DNNFunctionType func_
         ctx->options.nireq = av_cpu_count() / 2 + 1;
     }
 
+#if !HAVE_PTHREAD_CANCEL
+    if (ctx->options.async) {
+        ctx->options.async = 0;
+        av_log(filter_ctx, AV_LOG_WARNING, "pthread is not supported, roll back to sync.\n");
+    }
+#endif
+
     tf_model->request_queue = ff_safe_queue_create();
     if (!tf_model->request_queue) {
         goto err;
diff --git a/libavfilter/dnn_filter_common.c b/libavfilter/dnn_filter_common.c
index 455eaa37f4..3045ce0131 100644
--- a/libavfilter/dnn_filter_common.c
+++ b/libavfilter/dnn_filter_common.c
@@ -84,13 +84,6 @@  int ff_dnn_init(DnnContext *ctx, DNNFunctionType func_type, AVFilterContext *fil
         return AVERROR(EINVAL);
     }
 
-#if !HAVE_PTHREAD_CANCEL
-    if (ctx->async) {
-        ctx->async = 0;
-        av_log(filter_ctx, AV_LOG_WARNING, "pthread is not supported, roll back to sync.\n");
-    }
-#endif
-
     return 0;
 }
 
diff --git a/libavfilter/dnn_filter_common.h b/libavfilter/dnn_filter_common.h
index 4d92c1dc36..635ae631c1 100644
--- a/libavfilter/dnn_filter_common.h
+++ b/libavfilter/dnn_filter_common.h
@@ -46,7 +46,7 @@  typedef struct DnnContext {
     { "output",             "output name of the model",   OFFSET(model_outputnames_string), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },\
     { "backend_configs",    "backend configs",            OFFSET(backend_options),  AV_OPT_TYPE_STRING,    { .str = NULL }, 0, 0, FLAGS },\
     { "options", "backend configs (deprecated, use backend_configs)", OFFSET(backend_options),  AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS | AV_OPT_FLAG_DEPRECATED},\
-    { "async",              "use DNN async inference",    OFFSET(async),            AV_OPT_TYPE_BOOL,      { .i64 = 1},     0, 1, FLAGS},
+    { "async",              "use DNN async inference (ignored, use backend_configs='async=1')",    OFFSET(async),            AV_OPT_TYPE_BOOL,      { .i64 = 1},     0, 1, FLAGS},
 
 
 int ff_dnn_init(DnnContext *ctx, DNNFunctionType func_type, AVFilterContext *filter_ctx);