[FFmpeg-devel] dnn: rename function from dnn_execute_layer_pad to avfilter_dnn_execute_layer_pad

Submitted by Guo, Yejun on Aug. 1, 2019, 6:18 a.m.

Details

Message ID 1564640324-759-1-git-send-email-yejun.guo@intel.com
State New
Headers show

Commit Message

Guo, Yejun Aug. 1, 2019, 6:18 a.m.
background:
DNN (deep neural network) is a sub module of libavfilter, and FATE/dnn
is unit test for the DNN module, one unit test for one dnn layer.
The unit tests are not based on the APIs exported by libavfilter,
they just directly call into the functions within DNN submodule.
(It is not easy for the test to call libavfilter APIs to verify dnn layers.)

There is an issue when run the following command:
build$ ../ffmpeg/configure --disable-static --enable-shared
make
make fate-dnn-layer-pad

And part of error message:
tests/dnn/dnn-layer-pad-test.o: In function `test_with_mode_symmetric':
/work/media/ffmpeg/build/src/tests/dnn/dnn-layer-pad-test.c:73: undefined reference to `dnn_execute_layer_pad'

The root cause is that function dnn_execute_layer_pad is a LOCAL symbol
in libavfilter.so, and so the linker could not find it when build dnn-layer-pad-test.

To check it, just run: readelf -s libavfilter/libavfilter.so | grep dnn

There are three possible mothods to fix this issue.
1). delete the dnn unit tests from FATE.
I don't think it is a good choice.

2). ask people to build static library if he wants to use FATE.

3). export the dnn layer functions.
And it is what this patch for.

Besides the function rename to export from .so library, we also need
to add option -Wl,-rpath to build the dnn unit tests, so the path is
written into the test binary and help to find the .so libraries during execution.

Signed-off-by: Guo, Yejun <yejun.guo@intel.com>
---
 libavfilter/dnn/dnn_backend_native.c           | 2 +-
 libavfilter/dnn/dnn_backend_native_layer_pad.c | 2 +-
 libavfilter/dnn/dnn_backend_native_layer_pad.h | 2 +-
 tests/dnn/Makefile                             | 4 +++-
 tests/dnn/dnn-layer-pad-test.c                 | 6 +++---
 5 files changed, 9 insertions(+), 7 deletions(-)

Comments

Paul B Mahol Aug. 1, 2019, 9:05 a.m.
I do not like this.

On Thu, Aug 1, 2019 at 8:22 AM Guo, Yejun <yejun.guo@intel.com> wrote:

> background:
> DNN (deep neural network) is a sub module of libavfilter, and FATE/dnn
> is unit test for the DNN module, one unit test for one dnn layer.
> The unit tests are not based on the APIs exported by libavfilter,
> they just directly call into the functions within DNN submodule.
> (It is not easy for the test to call libavfilter APIs to verify dnn
> layers.)
>
> There is an issue when run the following command:
> build$ ../ffmpeg/configure --disable-static --enable-shared
> make
> make fate-dnn-layer-pad
>
> And part of error message:
> tests/dnn/dnn-layer-pad-test.o: In function `test_with_mode_symmetric':
> /work/media/ffmpeg/build/src/tests/dnn/dnn-layer-pad-test.c:73: undefined
> reference to `dnn_execute_layer_pad'
>
> The root cause is that function dnn_execute_layer_pad is a LOCAL symbol
> in libavfilter.so, and so the linker could not find it when build
> dnn-layer-pad-test.
>
> To check it, just run: readelf -s libavfilter/libavfilter.so | grep dnn
>
> There are three possible mothods to fix this issue.
> 1). delete the dnn unit tests from FATE.
> I don't think it is a good choice.
>
> 2). ask people to build static library if he wants to use FATE.
>
> 3). export the dnn layer functions.
> And it is what this patch for.
>
> Besides the function rename to export from .so library, we also need
> to add option -Wl,-rpath to build the dnn unit tests, so the path is
> written into the test binary and help to find the .so libraries during
> execution.
>
> Signed-off-by: Guo, Yejun <yejun.guo@intel.com>
> ---
>  libavfilter/dnn/dnn_backend_native.c           | 2 +-
>  libavfilter/dnn/dnn_backend_native_layer_pad.c | 2 +-
>  libavfilter/dnn/dnn_backend_native_layer_pad.h | 2 +-
>  tests/dnn/Makefile                             | 4 +++-
>  tests/dnn/dnn-layer-pad-test.c                 | 6 +++---
>  5 files changed, 9 insertions(+), 7 deletions(-)
>
> diff --git a/libavfilter/dnn/dnn_backend_native.c
> b/libavfilter/dnn/dnn_backend_native.c
> index 09c583b..fa412e0 100644
> --- a/libavfilter/dnn/dnn_backend_native.c
> +++ b/libavfilter/dnn/dnn_backend_native.c
> @@ -377,7 +377,7 @@ DNNReturnType ff_dnn_execute_model_native(const
> DNNModel *model, DNNData *output
>              break;
>          case MIRROR_PAD:
>              pad_params = (LayerPadParams *)network->layers[layer].params;
> -            dnn_execute_layer_pad(network->layers[layer - 1].output,
> network->layers[layer].output,
> +            avfilter_dnn_execute_layer_pad(network->layers[layer -
> 1].output, network->layers[layer].output,
>                                    pad_params, 1, cur_height, cur_width,
> cur_channels);
>              cur_height = cur_height + pad_params->paddings[1][0] +
> pad_params->paddings[1][1];
>              cur_width = cur_width + pad_params->paddings[2][0] +
> pad_params->paddings[2][1];
> diff --git a/libavfilter/dnn/dnn_backend_native_layer_pad.c
> b/libavfilter/dnn/dnn_backend_native_layer_pad.c
> index 5417d73..526ddfe 100644
> --- a/libavfilter/dnn/dnn_backend_native_layer_pad.c
> +++ b/libavfilter/dnn/dnn_backend_native_layer_pad.c
> @@ -48,7 +48,7 @@ static int after_get_buddy(int given, int border,
> LayerPadModeParam mode)
>      }
>  }
>
> -void dnn_execute_layer_pad(const float *input, float *output, const
> LayerPadParams *params, int number, int height, int width, int channel)
> +void avfilter_dnn_execute_layer_pad(const float *input, float *output,
> const LayerPadParams *params, int number, int height, int width, int
> channel)
>  {
>      int32_t before_paddings;
>      int32_t after_paddings;
> diff --git a/libavfilter/dnn/dnn_backend_native_layer_pad.h
> b/libavfilter/dnn/dnn_backend_native_layer_pad.h
> index 0fbe652..3f93d6d 100644
> --- a/libavfilter/dnn/dnn_backend_native_layer_pad.h
> +++ b/libavfilter/dnn/dnn_backend_native_layer_pad.h
> @@ -35,6 +35,6 @@ typedef struct LayerPadParams{
>      float constant_values;
>  } LayerPadParams;
>
> -void dnn_execute_layer_pad(const float *input, float *output, const
> LayerPadParams *params, int number, int height, int width, int channel);
> +void avfilter_dnn_execute_layer_pad(const float *input, float *output,
> const LayerPadParams *params, int number, int height, int width, int
> channel);
>
>  #endif
> diff --git a/tests/dnn/Makefile b/tests/dnn/Makefile
> index b2e6680..a34f2e7 100644
> --- a/tests/dnn/Makefile
> +++ b/tests/dnn/Makefile
> @@ -4,8 +4,10 @@ DNNTESTOBJS  := $(DNNTESTOBJS:%=$(DNNTESTSDIR)%)
> $(DNNTESTPROGS:%=$(DNNTESTSDIR)
>  DNNTESTPROGS := $(DNNTESTPROGS:%=$(DNNTESTSDIR)/%-test$(EXESUF))
>  -include $(wildcard $(DNNTESTOBJS:.o=.d))
>
> +LDDNNFLAGS =
> -Wl,-rpath=:libpostproc:libswresample:libswscale:libavfilter:libavdevice:libavformat:libavcodec:libavutil:libavresample
> +
>  $(DNNTESTPROGS): %$(EXESUF): %.o $(FF_DEP_LIBS)
> -       $(LD) $(LDFLAGS) $(LDEXEFLAGS) $(LD_O) $(filter %.o,$^)
> $(FF_EXTRALIBS) $(ELIBS)
> +       $(LD) $(LDFLAGS) $(LDDNNFLAGS) $(LDEXEFLAGS) $(LD_O) $(filter
> %.o,$^) $(FF_EXTRALIBS) $(ELIBS)
>
>  testclean::
>         $(RM) $(addprefix $(DNNTESTSDIR)/,$(CLEANSUFFIXES) *-test$(EXESUF))
> diff --git a/tests/dnn/dnn-layer-pad-test.c
> b/tests/dnn/dnn-layer-pad-test.c
> index 28a49eb..078a2f4 100644
> --- a/tests/dnn/dnn-layer-pad-test.c
> +++ b/tests/dnn/dnn-layer-pad-test.c
> @@ -70,7 +70,7 @@ static int test_with_mode_symmetric(void)
>      params.paddings[3][0] = 0;
>      params.paddings[3][1] = 0;
>
> -    dnn_execute_layer_pad(input, output, &params, 1, 4, 4, 3);
> +    avfilter_dnn_execute_layer_pad(input, output, &params, 1, 4, 4, 3);
>
>      for (int i = 0; i < sizeof(output) / sizeof(float); i++) {
>          if (fabs(output[i] - expected_output[i]) > EPSON) {
> @@ -123,7 +123,7 @@ static int test_with_mode_reflect(void)
>      params.paddings[3][0] = 0;
>      params.paddings[3][1] = 0;
>
> -    dnn_execute_layer_pad(input, output, &params, 3, 2, 2, 3);
> +    avfilter_dnn_execute_layer_pad(input, output, &params, 3, 2, 2, 3);
>
>      for (int i = 0; i < sizeof(output) / sizeof(float); i++) {
>          if (fabs(output[i] - expected_output[i]) > EPSON) {
> @@ -177,7 +177,7 @@ static int test_with_mode_constant(void)
>      params.paddings[3][0] = 1;
>      params.paddings[3][1] = 2;
>
> -    dnn_execute_layer_pad(input, output, &params, 1, 2, 2, 3);
> +    avfilter_dnn_execute_layer_pad(input, output, &params, 1, 2, 2, 3);
>
>      for (int i = 0; i < sizeof(output) / sizeof(float); i++) {
>          if (fabs(output[i] - expected_output[i]) > EPSON) {
> --
> 2.7.4
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Paul B Mahol Aug. 1, 2019, 9:09 a.m.
Why test uses internal function, why was this allowed to be committed at
all?
Who is reviewing this mess?

Why test does not use normal filtergraph?

>
Nicolas George Aug. 1, 2019, 9:09 a.m.
Paul B Mahol (12019-08-01):
> I do not like this.

I agree. The av namespace is for public functions.

Regards,
Guo, Yejun Aug. 1, 2019, 12:14 p.m.
> -----Original Message-----
> From: ffmpeg-devel [mailto:ffmpeg-devel-bounces@ffmpeg.org] On Behalf Of
> Nicolas George
> Sent: Thursday, August 01, 2019 5:10 PM
> To: FFmpeg development discussions and patches <ffmpeg-devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH] dnn: rename function from
> dnn_execute_layer_pad to avfilter_dnn_execute_layer_pad
> 
> Paul B Mahol (12019-08-01):
> > I do not like this.
> 
> I agree. The av namespace is for public functions.

sorry that the test uses the internal function at unit test level.
I'll send out a patch to revert it since it is not allowed, I'll then consider a better solution for the test.

My intention was to verify the dnn layers separately and directly, so one test for each layer was my choice.
And so I can add more dnn layers and its unit test one by one, and then add dnn generic filters at proper time.

I now know it is not the right choice and I'll plan to add filter test into FATE after a meaningful filter is enabled.

> 
> Regards,
> 
> --
>   Nicolas George
Pedro Arthur Aug. 1, 2019, 4 p.m.
Hi,

Em qui, 1 de ago de 2019 às 06:36, Paul B Mahol <onemda@gmail.com> escreveu:
>
> Why test uses internal function, why was this allowed to be committed at
> all?
> Who is reviewing this mess?
>
> Why test does not use normal filtergraph?
>
I was responsible for pushing the patch, thanks for point out the issues.
Could you provide more details regarding the proper way to make the test?

Thanks.

> >
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Paul B Mahol Aug. 1, 2019, 4:14 p.m.
On Thu, Aug 1, 2019 at 6:07 PM Pedro Arthur <bygrandao@gmail.com> wrote:

> Hi,
>
> Em qui, 1 de ago de 2019 às 06:36, Paul B Mahol <onemda@gmail.com>
> escreveu:
> >
> > Why test uses internal function, why was this allowed to be committed at
> > all?
> > Who is reviewing this mess?
> >
> > Why test does not use normal filtergraph?
> >
> I was responsible for pushing the patch, thanks for point out the issues.
> Could you provide more details regarding the proper way to make the test?
>
>
I believe Hendrik already pointed to right answer, in another thread.

Patch hide | download patch | download mbox

diff --git a/libavfilter/dnn/dnn_backend_native.c b/libavfilter/dnn/dnn_backend_native.c
index 09c583b..fa412e0 100644
--- a/libavfilter/dnn/dnn_backend_native.c
+++ b/libavfilter/dnn/dnn_backend_native.c
@@ -377,7 +377,7 @@  DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *output
             break;
         case MIRROR_PAD:
             pad_params = (LayerPadParams *)network->layers[layer].params;
-            dnn_execute_layer_pad(network->layers[layer - 1].output, network->layers[layer].output,
+            avfilter_dnn_execute_layer_pad(network->layers[layer - 1].output, network->layers[layer].output,
                                   pad_params, 1, cur_height, cur_width, cur_channels);
             cur_height = cur_height + pad_params->paddings[1][0] + pad_params->paddings[1][1];
             cur_width = cur_width + pad_params->paddings[2][0] + pad_params->paddings[2][1];
diff --git a/libavfilter/dnn/dnn_backend_native_layer_pad.c b/libavfilter/dnn/dnn_backend_native_layer_pad.c
index 5417d73..526ddfe 100644
--- a/libavfilter/dnn/dnn_backend_native_layer_pad.c
+++ b/libavfilter/dnn/dnn_backend_native_layer_pad.c
@@ -48,7 +48,7 @@  static int after_get_buddy(int given, int border, LayerPadModeParam mode)
     }
 }
 
-void dnn_execute_layer_pad(const float *input, float *output, const LayerPadParams *params, int number, int height, int width, int channel)
+void avfilter_dnn_execute_layer_pad(const float *input, float *output, const LayerPadParams *params, int number, int height, int width, int channel)
 {
     int32_t before_paddings;
     int32_t after_paddings;
diff --git a/libavfilter/dnn/dnn_backend_native_layer_pad.h b/libavfilter/dnn/dnn_backend_native_layer_pad.h
index 0fbe652..3f93d6d 100644
--- a/libavfilter/dnn/dnn_backend_native_layer_pad.h
+++ b/libavfilter/dnn/dnn_backend_native_layer_pad.h
@@ -35,6 +35,6 @@  typedef struct LayerPadParams{
     float constant_values;
 } LayerPadParams;
 
-void dnn_execute_layer_pad(const float *input, float *output, const LayerPadParams *params, int number, int height, int width, int channel);
+void avfilter_dnn_execute_layer_pad(const float *input, float *output, const LayerPadParams *params, int number, int height, int width, int channel);
 
 #endif
diff --git a/tests/dnn/Makefile b/tests/dnn/Makefile
index b2e6680..a34f2e7 100644
--- a/tests/dnn/Makefile
+++ b/tests/dnn/Makefile
@@ -4,8 +4,10 @@  DNNTESTOBJS  := $(DNNTESTOBJS:%=$(DNNTESTSDIR)%) $(DNNTESTPROGS:%=$(DNNTESTSDIR)
 DNNTESTPROGS := $(DNNTESTPROGS:%=$(DNNTESTSDIR)/%-test$(EXESUF))
 -include $(wildcard $(DNNTESTOBJS:.o=.d))
 
+LDDNNFLAGS = -Wl,-rpath=:libpostproc:libswresample:libswscale:libavfilter:libavdevice:libavformat:libavcodec:libavutil:libavresample
+
 $(DNNTESTPROGS): %$(EXESUF): %.o $(FF_DEP_LIBS)
-	$(LD) $(LDFLAGS) $(LDEXEFLAGS) $(LD_O) $(filter %.o,$^) $(FF_EXTRALIBS) $(ELIBS)
+	$(LD) $(LDFLAGS) $(LDDNNFLAGS) $(LDEXEFLAGS) $(LD_O) $(filter %.o,$^) $(FF_EXTRALIBS) $(ELIBS)
 
 testclean::
 	$(RM) $(addprefix $(DNNTESTSDIR)/,$(CLEANSUFFIXES) *-test$(EXESUF))
diff --git a/tests/dnn/dnn-layer-pad-test.c b/tests/dnn/dnn-layer-pad-test.c
index 28a49eb..078a2f4 100644
--- a/tests/dnn/dnn-layer-pad-test.c
+++ b/tests/dnn/dnn-layer-pad-test.c
@@ -70,7 +70,7 @@  static int test_with_mode_symmetric(void)
     params.paddings[3][0] = 0;
     params.paddings[3][1] = 0;
 
-    dnn_execute_layer_pad(input, output, &params, 1, 4, 4, 3);
+    avfilter_dnn_execute_layer_pad(input, output, &params, 1, 4, 4, 3);
 
     for (int i = 0; i < sizeof(output) / sizeof(float); i++) {
         if (fabs(output[i] - expected_output[i]) > EPSON) {
@@ -123,7 +123,7 @@  static int test_with_mode_reflect(void)
     params.paddings[3][0] = 0;
     params.paddings[3][1] = 0;
 
-    dnn_execute_layer_pad(input, output, &params, 3, 2, 2, 3);
+    avfilter_dnn_execute_layer_pad(input, output, &params, 3, 2, 2, 3);
 
     for (int i = 0; i < sizeof(output) / sizeof(float); i++) {
         if (fabs(output[i] - expected_output[i]) > EPSON) {
@@ -177,7 +177,7 @@  static int test_with_mode_constant(void)
     params.paddings[3][0] = 1;
     params.paddings[3][1] = 2;
 
-    dnn_execute_layer_pad(input, output, &params, 1, 2, 2, 3);
+    avfilter_dnn_execute_layer_pad(input, output, &params, 1, 2, 2, 3);
 
     for (int i = 0; i < sizeof(output) / sizeof(float); i++) {
         if (fabs(output[i] - expected_output[i]) > EPSON) {