I do not like this. On Thu, Aug 1, 2019 at 8:22 AM Guo, Yejun <yejun....@intel.com> wrote:
> background: > DNN (deep neural network) is a sub module of libavfilter, and FATE/dnn > is unit test for the DNN module, one unit test for one dnn layer. > The unit tests are not based on the APIs exported by libavfilter, > they just directly call into the functions within DNN submodule. > (It is not easy for the test to call libavfilter APIs to verify dnn > layers.) > > There is an issue when run the following command: > build$ ../ffmpeg/configure --disable-static --enable-shared > make > make fate-dnn-layer-pad > > And part of error message: > tests/dnn/dnn-layer-pad-test.o: In function `test_with_mode_symmetric': > /work/media/ffmpeg/build/src/tests/dnn/dnn-layer-pad-test.c:73: undefined > reference to `dnn_execute_layer_pad' > > The root cause is that function dnn_execute_layer_pad is a LOCAL symbol > in libavfilter.so, and so the linker could not find it when build > dnn-layer-pad-test. > > To check it, just run: readelf -s libavfilter/libavfilter.so | grep dnn > > There are three possible mothods to fix this issue. > 1). delete the dnn unit tests from FATE. > I don't think it is a good choice. > > 2). ask people to build static library if he wants to use FATE. > > 3). export the dnn layer functions. > And it is what this patch for. > > Besides the function rename to export from .so library, we also need > to add option -Wl,-rpath to build the dnn unit tests, so the path is > written into the test binary and help to find the .so libraries during > execution. > > Signed-off-by: Guo, Yejun <yejun....@intel.com> > --- > libavfilter/dnn/dnn_backend_native.c | 2 +- > libavfilter/dnn/dnn_backend_native_layer_pad.c | 2 +- > libavfilter/dnn/dnn_backend_native_layer_pad.h | 2 +- > tests/dnn/Makefile | 4 +++- > tests/dnn/dnn-layer-pad-test.c | 6 +++--- > 5 files changed, 9 insertions(+), 7 deletions(-) > > diff --git a/libavfilter/dnn/dnn_backend_native.c > b/libavfilter/dnn/dnn_backend_native.c > index 09c583b..fa412e0 100644 > --- a/libavfilter/dnn/dnn_backend_native.c > +++ b/libavfilter/dnn/dnn_backend_native.c > @@ -377,7 +377,7 @@ DNNReturnType ff_dnn_execute_model_native(const > DNNModel *model, DNNData *output > break; > case MIRROR_PAD: > pad_params = (LayerPadParams *)network->layers[layer].params; > - dnn_execute_layer_pad(network->layers[layer - 1].output, > network->layers[layer].output, > + avfilter_dnn_execute_layer_pad(network->layers[layer - > 1].output, network->layers[layer].output, > pad_params, 1, cur_height, cur_width, > cur_channels); > cur_height = cur_height + pad_params->paddings[1][0] + > pad_params->paddings[1][1]; > cur_width = cur_width + pad_params->paddings[2][0] + > pad_params->paddings[2][1]; > diff --git a/libavfilter/dnn/dnn_backend_native_layer_pad.c > b/libavfilter/dnn/dnn_backend_native_layer_pad.c > index 5417d73..526ddfe 100644 > --- a/libavfilter/dnn/dnn_backend_native_layer_pad.c > +++ b/libavfilter/dnn/dnn_backend_native_layer_pad.c > @@ -48,7 +48,7 @@ static int after_get_buddy(int given, int border, > LayerPadModeParam mode) > } > } > > -void dnn_execute_layer_pad(const float *input, float *output, const > LayerPadParams *params, int number, int height, int width, int channel) > +void avfilter_dnn_execute_layer_pad(const float *input, float *output, > const LayerPadParams *params, int number, int height, int width, int > channel) > { > int32_t before_paddings; > int32_t after_paddings; > diff --git a/libavfilter/dnn/dnn_backend_native_layer_pad.h > b/libavfilter/dnn/dnn_backend_native_layer_pad.h > index 0fbe652..3f93d6d 100644 > --- a/libavfilter/dnn/dnn_backend_native_layer_pad.h > +++ b/libavfilter/dnn/dnn_backend_native_layer_pad.h > @@ -35,6 +35,6 @@ typedef struct LayerPadParams{ > float constant_values; > } LayerPadParams; > > -void dnn_execute_layer_pad(const float *input, float *output, const > LayerPadParams *params, int number, int height, int width, int channel); > +void avfilter_dnn_execute_layer_pad(const float *input, float *output, > const LayerPadParams *params, int number, int height, int width, int > channel); > > #endif > diff --git a/tests/dnn/Makefile b/tests/dnn/Makefile > index b2e6680..a34f2e7 100644 > --- a/tests/dnn/Makefile > +++ b/tests/dnn/Makefile > @@ -4,8 +4,10 @@ DNNTESTOBJS := $(DNNTESTOBJS:%=$(DNNTESTSDIR)%) > $(DNNTESTPROGS:%=$(DNNTESTSDIR) > DNNTESTPROGS := $(DNNTESTPROGS:%=$(DNNTESTSDIR)/%-test$(EXESUF)) > -include $(wildcard $(DNNTESTOBJS:.o=.d)) > > +LDDNNFLAGS = > -Wl,-rpath=:libpostproc:libswresample:libswscale:libavfilter:libavdevice:libavformat:libavcodec:libavutil:libavresample > + > $(DNNTESTPROGS): %$(EXESUF): %.o $(FF_DEP_LIBS) > - $(LD) $(LDFLAGS) $(LDEXEFLAGS) $(LD_O) $(filter %.o,$^) > $(FF_EXTRALIBS) $(ELIBS) > + $(LD) $(LDFLAGS) $(LDDNNFLAGS) $(LDEXEFLAGS) $(LD_O) $(filter > %.o,$^) $(FF_EXTRALIBS) $(ELIBS) > > testclean:: > $(RM) $(addprefix $(DNNTESTSDIR)/,$(CLEANSUFFIXES) *-test$(EXESUF)) > diff --git a/tests/dnn/dnn-layer-pad-test.c > b/tests/dnn/dnn-layer-pad-test.c > index 28a49eb..078a2f4 100644 > --- a/tests/dnn/dnn-layer-pad-test.c > +++ b/tests/dnn/dnn-layer-pad-test.c > @@ -70,7 +70,7 @@ static int test_with_mode_symmetric(void) > params.paddings[3][0] = 0; > params.paddings[3][1] = 0; > > - dnn_execute_layer_pad(input, output, ¶ms, 1, 4, 4, 3); > + avfilter_dnn_execute_layer_pad(input, output, ¶ms, 1, 4, 4, 3); > > for (int i = 0; i < sizeof(output) / sizeof(float); i++) { > if (fabs(output[i] - expected_output[i]) > EPSON) { > @@ -123,7 +123,7 @@ static int test_with_mode_reflect(void) > params.paddings[3][0] = 0; > params.paddings[3][1] = 0; > > - dnn_execute_layer_pad(input, output, ¶ms, 3, 2, 2, 3); > + avfilter_dnn_execute_layer_pad(input, output, ¶ms, 3, 2, 2, 3); > > for (int i = 0; i < sizeof(output) / sizeof(float); i++) { > if (fabs(output[i] - expected_output[i]) > EPSON) { > @@ -177,7 +177,7 @@ static int test_with_mode_constant(void) > params.paddings[3][0] = 1; > params.paddings[3][1] = 2; > > - dnn_execute_layer_pad(input, output, ¶ms, 1, 2, 2, 3); > + avfilter_dnn_execute_layer_pad(input, output, ¶ms, 1, 2, 2, 3); > > for (int i = 0; i < sizeof(output) / sizeof(float); i++) { > if (fabs(output[i] - expected_output[i]) > EPSON) { > -- > 2.7.4 > > _______________________________________________ > ffmpeg-devel mailing list > ffmpeg-devel@ffmpeg.org > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel > > To unsubscribe, visit link above, or email > ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe". _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".