Added test case to interleave inference requests from multiple
models. Interleaving would load and start all models and launch
inference requests for the models using available queue-pairs

Operations sequence when testing with N models and R reps,

(load + start) x N -> (enqueue + dequeue) x N x R ...
        -> (stop + unload) x N

Test can be executed by selecting "inference_interleave" test.

Signed-off-by: Srikanth Yalavarthi <syalavar...@marvell.com>
---
 app/test-mldev/meson.build                 |   1 +
 app/test-mldev/ml_options.c                |   3 +-
 app/test-mldev/test_inference_interleave.c | 114 +++++++++++++++++++++
 3 files changed, 117 insertions(+), 1 deletion(-)
 create mode 100644 app/test-mldev/test_inference_interleave.c

diff --git a/app/test-mldev/meson.build b/app/test-mldev/meson.build
index 475d76d126..41d22fb22c 100644
--- a/app/test-mldev/meson.build
+++ b/app/test-mldev/meson.build
@@ -18,6 +18,7 @@ sources = files(
         'test_model_ops.c',
         'test_inference_common.c',
         'test_inference_ordered.c',
+        'test_inference_interleave.c',
 )
 
 deps += ['mldev']
diff --git a/app/test-mldev/ml_options.c b/app/test-mldev/ml_options.c
index b7215a7e88..f9e3ce8e6f 100644
--- a/app/test-mldev/ml_options.c
+++ b/app/test-mldev/ml_options.c
@@ -156,7 +156,8 @@ ml_dump_test_options(const char *testname)
                printf("\n");
        }
 
-       if (strcmp(testname, "inference_ordered") == 0) {
+       if ((strcmp(testname, "inference_ordered") == 0) ||
+           (strcmp(testname, "inference_interleave") == 0)) {
                printf("\t\t--filelist         : comma separated list of model, 
input and output\n"
                       "\t\t--repetitions      : number of inference 
repetitions\n");
                printf("\n");
diff --git a/app/test-mldev/test_inference_interleave.c 
b/app/test-mldev/test_inference_interleave.c
new file mode 100644
index 0000000000..9cf4cfa197
--- /dev/null
+++ b/app/test-mldev/test_inference_interleave.c
@@ -0,0 +1,114 @@
+/* SPDX-License-Identifier: BSD-3-Clause
+ * Copyright (c) 2022 Marvell.
+ */
+
+#include <rte_common.h>
+#include <rte_launch.h>
+
+#include "ml_common.h"
+#include "test_inference_common.h"
+
+static int
+test_inference_interleave_driver(struct ml_test *test, struct ml_options *opt)
+{
+       struct test_inference *t;
+       uint16_t fid = 0;
+       int ret = 0;
+
+       t = ml_test_priv(test);
+
+       ret = ml_inference_mldev_setup(test, opt);
+       if (ret != 0)
+               return ret;
+
+       ret = ml_inference_mem_setup(test, opt);
+       if (ret != 0)
+               return ret;
+
+       /* load and start all models */
+       for (fid = 0; fid < opt->nb_filelist; fid++) {
+               ret = ml_model_load(test, opt, &t->model[fid], fid);
+               if (ret != 0)
+                       goto error;
+
+               ret = ml_model_start(test, opt, &t->model[fid], fid);
+               if (ret != 0)
+                       goto error;
+
+               ret = ml_inference_iomem_setup(test, opt, fid);
+               if (ret != 0)
+                       goto error;
+       }
+
+       /* launch inference requests */
+       ret = ml_inference_launch_cores(test, opt, 0, opt->nb_filelist - 1);
+       if (ret != 0) {
+               ml_err("failed to launch cores");
+               goto error;
+       }
+
+       rte_eal_mp_wait_lcore();
+
+       /* stop and unload all models */
+       for (fid = 0; fid < opt->nb_filelist; fid++) {
+               ret = ml_inference_result(test, opt, fid);
+               if (ret != ML_TEST_SUCCESS)
+                       goto error;
+
+               ml_inference_iomem_destroy(test, opt, fid);
+
+               ret = ml_model_stop(test, opt, &t->model[fid], fid);
+               if (ret != 0)
+                       goto error;
+
+               ret = ml_model_unload(test, opt, &t->model[fid], fid);
+               if (ret != 0)
+                       goto error;
+       }
+
+       ml_inference_mem_destroy(test, opt);
+
+       ret = ml_inference_mldev_destroy(test, opt);
+       if (ret != 0)
+               return ret;
+
+       t->cmn.result = ML_TEST_SUCCESS;
+
+       return 0;
+
+error:
+       ml_inference_mem_destroy(test, opt);
+       for (fid = 0; fid < opt->nb_filelist; fid++) {
+               ml_inference_iomem_destroy(test, opt, fid);
+               ml_model_stop(test, opt, &t->model[fid], fid);
+               ml_model_unload(test, opt, &t->model[fid], fid);
+       }
+
+       t->cmn.result = ML_TEST_FAILED;
+
+       return ret;
+}
+
+static int
+test_inference_interleave_result(struct ml_test *test, struct ml_options *opt)
+{
+       struct test_inference *t;
+
+       RTE_SET_USED(opt);
+
+       t = ml_test_priv(test);
+
+       return t->cmn.result;
+}
+
+static const struct ml_test_ops inference_interleave = {
+       .cap_check = test_inference_cap_check,
+       .opt_check = test_inference_opt_check,
+       .opt_dump = test_inference_opt_dump,
+       .test_setup = test_inference_setup,
+       .test_destroy = test_inference_destroy,
+       .test_driver = test_inference_interleave_driver,
+       .test_result = test_inference_interleave_result,
+};
+
+ML_TEST_REGISTER(inference_interleave);
-- 
2.17.1

Reply via email to